2023-01-12 01:04:47 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2023-10-21 11:47:10 +00:00
|
|
|
# BSD 2-Clause License
|
2023-01-12 01:04:47 +00:00
|
|
|
#
|
2023-04-13 08:41:12 +00:00
|
|
|
# Apprise - Push Notification Library.
|
|
|
|
# Copyright (c) 2023, Chris Caron <lead2gold@gmail.com>
|
2023-01-12 01:04:47 +00:00
|
|
|
#
|
2023-04-13 08:41:12 +00:00
|
|
|
# Redistribution and use in source and binary forms, with or without
|
|
|
|
# modification, are permitted provided that the following conditions are met:
|
2023-01-12 01:04:47 +00:00
|
|
|
#
|
2023-04-13 08:41:12 +00:00
|
|
|
# 1. Redistributions of source code must retain the above copyright notice,
|
|
|
|
# this list of conditions and the following disclaimer.
|
2023-01-12 01:04:47 +00:00
|
|
|
#
|
2023-04-13 08:41:12 +00:00
|
|
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
|
|
# this list of conditions and the following disclaimer in the documentation
|
|
|
|
# and/or other materials provided with the distribution.
|
2023-01-12 01:04:47 +00:00
|
|
|
#
|
2023-04-13 08:41:12 +00:00
|
|
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
|
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
|
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
|
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
|
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
|
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
|
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
|
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
|
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
|
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
|
|
# POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
|
|
|
|
import asyncio
|
2023-10-21 11:47:10 +00:00
|
|
|
import concurrent.futures as cf
|
2023-01-12 01:04:47 +00:00
|
|
|
import os
|
|
|
|
from itertools import chain
|
2023-01-14 20:40:05 +00:00
|
|
|
from . import common
|
|
|
|
from .conversion import convert_between
|
2023-01-12 01:04:47 +00:00
|
|
|
from .utils import is_exclusive_match
|
|
|
|
from .utils import parse_list
|
2023-01-14 20:40:05 +00:00
|
|
|
from .utils import parse_urls
|
|
|
|
from .utils import cwe312_url
|
2023-01-12 01:04:47 +00:00
|
|
|
from .logger import logger
|
|
|
|
from .AppriseAsset import AppriseAsset
|
|
|
|
from .AppriseConfig import AppriseConfig
|
|
|
|
from .AppriseAttachment import AppriseAttachment
|
|
|
|
from .AppriseLocale import AppriseLocale
|
|
|
|
from .config.ConfigBase import ConfigBase
|
|
|
|
from .plugins.NotifyBase import NotifyBase
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
|
2023-01-12 01:04:47 +00:00
|
|
|
from . import plugins
|
|
|
|
from . import __version__
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
|
|
|
|
class Apprise:
|
2023-01-12 01:04:47 +00:00
|
|
|
"""
|
|
|
|
Our Notification Manager
|
|
|
|
|
|
|
|
"""
|
2023-01-14 20:40:05 +00:00
|
|
|
|
|
|
|
def __init__(self, servers=None, asset=None, location=None, debug=False):
|
2023-01-12 01:04:47 +00:00
|
|
|
"""
|
|
|
|
Loads a set of server urls while applying the Asset() module to each
|
|
|
|
if specified.
|
|
|
|
|
|
|
|
If no asset is provided, then the default asset is used.
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
Optionally specify a global ContentLocation for a more strict means
|
|
|
|
of handling Attachments.
|
2023-01-12 01:04:47 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
# Initialize a server list of URLs
|
|
|
|
self.servers = list()
|
|
|
|
|
|
|
|
# Assigns an central asset object that will be later passed into each
|
|
|
|
# notification plugin. Assets contain information such as the local
|
|
|
|
# directory images can be found in. It can also identify remote
|
|
|
|
# URL paths that contain the images you want to present to the end
|
|
|
|
# user. If no asset is specified, then the default one is used.
|
|
|
|
self.asset = \
|
|
|
|
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
|
|
|
|
|
|
|
if servers:
|
|
|
|
self.add(servers)
|
|
|
|
|
|
|
|
# Initialize our locale object
|
|
|
|
self.locale = AppriseLocale()
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
# Set our debug flag
|
|
|
|
self.debug = debug
|
|
|
|
|
|
|
|
# Store our hosting location for optional strict rule handling
|
|
|
|
# of Attachments. Setting this to None removes any attachment
|
|
|
|
# restrictions.
|
|
|
|
self.location = location
|
|
|
|
|
2023-01-12 01:04:47 +00:00
|
|
|
@staticmethod
|
|
|
|
def instantiate(url, asset=None, tag=None, suppress_exceptions=True):
|
|
|
|
"""
|
|
|
|
Returns the instance of a instantiated plugin based on the provided
|
|
|
|
Server URL. If the url fails to be parsed, then None is returned.
|
|
|
|
|
|
|
|
The specified url can be either a string (the URL itself) or a
|
|
|
|
dictionary containing all of the components needed to istantiate
|
|
|
|
the notification service. If identifying a dictionary, at the bare
|
|
|
|
minimum, one must specify the schema.
|
|
|
|
|
|
|
|
An example of a url dictionary object might look like:
|
|
|
|
{
|
|
|
|
schema: 'mailto',
|
|
|
|
host: 'google.com',
|
|
|
|
user: 'myuser',
|
|
|
|
password: 'mypassword',
|
|
|
|
}
|
|
|
|
|
|
|
|
Alternatively the string is much easier to specify:
|
|
|
|
mailto://user:mypassword@google.com
|
|
|
|
|
|
|
|
The dictionary works well for people who are calling details() to
|
|
|
|
extract the components they need to build the URL manually.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Initialize our result set
|
|
|
|
results = None
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
# Prepare our Asset Object
|
|
|
|
asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
|
|
|
|
|
|
|
if isinstance(url, str):
|
2023-01-12 01:04:47 +00:00
|
|
|
# Acquire our url tokens
|
2023-01-14 20:40:05 +00:00
|
|
|
results = plugins.url_to_dict(
|
|
|
|
url, secure_logging=asset.secure_logging)
|
|
|
|
|
2023-01-12 01:04:47 +00:00
|
|
|
if results is None:
|
2023-01-14 20:40:05 +00:00
|
|
|
# Failed to parse the server URL; detailed logging handled
|
|
|
|
# inside url_to_dict - nothing to report here.
|
2023-01-12 01:04:47 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
elif isinstance(url, dict):
|
|
|
|
# We already have our result set
|
|
|
|
results = url
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
if results.get('schema') not in common.NOTIFY_SCHEMA_MAP:
|
2023-01-12 01:04:47 +00:00
|
|
|
# schema is a mandatory dictionary item as it is the only way
|
|
|
|
# we can index into our loaded plugins
|
|
|
|
logger.error('Dictionary does not include a "schema" entry.')
|
2023-01-14 20:40:05 +00:00
|
|
|
logger.trace(
|
|
|
|
'Invalid dictionary unpacked as:{}{}'.format(
|
|
|
|
os.linesep, os.linesep.join(
|
|
|
|
['{}="{}"'.format(k, v)
|
|
|
|
for k, v in results.items()])))
|
2023-01-12 01:04:47 +00:00
|
|
|
return None
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
logger.trace(
|
|
|
|
'Dictionary unpacked as:{}{}'.format(
|
|
|
|
os.linesep, os.linesep.join(
|
|
|
|
['{}="{}"'.format(k, v) for k, v in results.items()])))
|
2023-01-12 01:04:47 +00:00
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
# Otherwise we handle the invalid input specified
|
2023-01-12 01:04:47 +00:00
|
|
|
else:
|
2023-01-14 20:40:05 +00:00
|
|
|
logger.error(
|
|
|
|
'An invalid URL type (%s) was specified for instantiation',
|
|
|
|
type(url))
|
|
|
|
return None
|
|
|
|
|
|
|
|
if not common.NOTIFY_SCHEMA_MAP[results['schema']].enabled:
|
|
|
|
#
|
|
|
|
# First Plugin Enable Check (Pre Initialization)
|
|
|
|
#
|
|
|
|
|
|
|
|
# Plugin has been disabled at a global level
|
|
|
|
logger.error(
|
|
|
|
'%s:// is disabled on this system.', results['schema'])
|
2023-01-12 01:04:47 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
# Build a list of tags to associate with the newly added notifications
|
|
|
|
results['tag'] = set(parse_list(tag))
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
# Set our Asset Object
|
|
|
|
results['asset'] = asset
|
2023-01-12 01:04:47 +00:00
|
|
|
|
|
|
|
if suppress_exceptions:
|
|
|
|
try:
|
|
|
|
# Attempt to create an instance of our plugin using the parsed
|
|
|
|
# URL information
|
2023-01-14 20:40:05 +00:00
|
|
|
plugin = common.NOTIFY_SCHEMA_MAP[results['schema']](**results)
|
2023-01-12 01:04:47 +00:00
|
|
|
|
|
|
|
# Create log entry of loaded URL
|
2023-01-14 20:40:05 +00:00
|
|
|
logger.debug(
|
|
|
|
'Loaded {} URL: {}'.format(
|
|
|
|
common.
|
|
|
|
NOTIFY_SCHEMA_MAP[results['schema']].service_name,
|
|
|
|
plugin.url(privacy=asset.secure_logging)))
|
2023-01-12 01:04:47 +00:00
|
|
|
|
|
|
|
except Exception:
|
2023-01-14 20:40:05 +00:00
|
|
|
# CWE-312 (Secure Logging) Handling
|
|
|
|
loggable_url = url if not asset.secure_logging \
|
|
|
|
else cwe312_url(url)
|
|
|
|
|
2023-01-12 01:04:47 +00:00
|
|
|
# the arguments are invalid or can not be used.
|
2023-01-14 20:40:05 +00:00
|
|
|
logger.error(
|
|
|
|
'Could not load {} URL: {}'.format(
|
|
|
|
common.
|
|
|
|
NOTIFY_SCHEMA_MAP[results['schema']].service_name,
|
|
|
|
loggable_url))
|
2023-01-12 01:04:47 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
else:
|
|
|
|
# Attempt to create an instance of our plugin using the parsed
|
|
|
|
# URL information but don't wrap it in a try catch
|
2023-01-14 20:40:05 +00:00
|
|
|
plugin = common.NOTIFY_SCHEMA_MAP[results['schema']](**results)
|
|
|
|
|
|
|
|
if not plugin.enabled:
|
|
|
|
#
|
|
|
|
# Second Plugin Enable Check (Post Initialization)
|
|
|
|
#
|
|
|
|
|
|
|
|
# Service/Plugin is disabled (on a more local level). This is a
|
|
|
|
# case where the plugin was initially enabled but then after the
|
|
|
|
# __init__() was called under the hood something pre-determined
|
|
|
|
# that it could no longer be used.
|
|
|
|
|
|
|
|
# The only downside to doing it this way is services are
|
|
|
|
# initialized prior to returning the details() if 3rd party tools
|
|
|
|
# are polling what is available. These services that become
|
|
|
|
# disabled thereafter are shown initially that they can be used.
|
|
|
|
logger.error(
|
|
|
|
'%s:// has become disabled on this system.', results['schema'])
|
|
|
|
return None
|
2023-01-12 01:04:47 +00:00
|
|
|
|
|
|
|
return plugin
|
|
|
|
|
|
|
|
def add(self, servers, asset=None, tag=None):
|
|
|
|
"""
|
|
|
|
Adds one or more server URLs into our list.
|
|
|
|
|
|
|
|
You can override the global asset if you wish by including it with the
|
|
|
|
server(s) that you add.
|
|
|
|
|
|
|
|
The tag allows you to associate 1 or more tag values to the server(s)
|
|
|
|
being added. tagging a service allows you to exclusively access them
|
|
|
|
when calling the notify() function.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Initialize our return status
|
|
|
|
return_status = True
|
|
|
|
|
|
|
|
if asset is None:
|
|
|
|
# prepare default asset
|
|
|
|
asset = self.asset
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
if isinstance(servers, str):
|
2023-01-12 01:04:47 +00:00
|
|
|
# build our server list
|
2023-01-14 20:40:05 +00:00
|
|
|
servers = parse_urls(servers)
|
2023-01-12 01:04:47 +00:00
|
|
|
if len(servers) == 0:
|
|
|
|
return False
|
|
|
|
|
|
|
|
elif isinstance(servers, dict):
|
|
|
|
# no problem, we support kwargs, convert it to a list
|
|
|
|
servers = [servers]
|
|
|
|
|
|
|
|
elif isinstance(servers, (ConfigBase, NotifyBase, AppriseConfig)):
|
|
|
|
# Go ahead and just add our plugin into our list
|
|
|
|
self.servers.append(servers)
|
|
|
|
return True
|
|
|
|
|
|
|
|
elif not isinstance(servers, (tuple, set, list)):
|
|
|
|
logger.error(
|
|
|
|
"An invalid notification (type={}) was specified.".format(
|
|
|
|
type(servers)))
|
|
|
|
return False
|
|
|
|
|
|
|
|
for _server in servers:
|
|
|
|
|
|
|
|
if isinstance(_server, (ConfigBase, NotifyBase, AppriseConfig)):
|
|
|
|
# Go ahead and just add our plugin into our list
|
|
|
|
self.servers.append(_server)
|
|
|
|
continue
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
elif not isinstance(_server, (str, dict)):
|
2023-01-12 01:04:47 +00:00
|
|
|
logger.error(
|
|
|
|
"An invalid notification (type={}) was specified.".format(
|
|
|
|
type(_server)))
|
|
|
|
return_status = False
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Instantiate ourselves an object, this function throws or
|
|
|
|
# returns None if it fails
|
|
|
|
instance = Apprise.instantiate(_server, asset=asset, tag=tag)
|
|
|
|
if not isinstance(instance, NotifyBase):
|
2023-01-14 20:40:05 +00:00
|
|
|
# No logging is required as instantiate() handles failure
|
2023-01-12 01:04:47 +00:00
|
|
|
# and/or success reasons for us
|
|
|
|
return_status = False
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Add our initialized plugin to our server listings
|
|
|
|
self.servers.append(instance)
|
|
|
|
|
|
|
|
# Return our status
|
|
|
|
return return_status
|
|
|
|
|
|
|
|
def clear(self):
|
|
|
|
"""
|
|
|
|
Empties our server list
|
|
|
|
|
|
|
|
"""
|
|
|
|
self.servers[:] = []
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
def find(self, tag=common.MATCH_ALL_TAG, match_always=True):
|
2023-01-12 01:04:47 +00:00
|
|
|
"""
|
2023-01-14 20:40:05 +00:00
|
|
|
Returns a list of all servers matching against the tag specified.
|
2023-01-12 01:04:47 +00:00
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Build our tag setup
|
|
|
|
# - top level entries are treated as an 'or'
|
|
|
|
# - second level (or more) entries are treated as 'and'
|
|
|
|
#
|
|
|
|
# examples:
|
|
|
|
# tag="tagA, tagB" = tagA or tagB
|
|
|
|
# tag=['tagA', 'tagB'] = tagA or tagB
|
|
|
|
# tag=[('tagA', 'tagC'), 'tagB'] = (tagA and tagC) or tagB
|
|
|
|
# tag=[('tagB', 'tagC')] = tagB and tagC
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
# A match_always flag allows us to pick up on our 'any' keyword
|
|
|
|
# and notify these services under all circumstances
|
|
|
|
match_always = common.MATCH_ALWAYS_TAG if match_always else None
|
|
|
|
|
2023-01-12 01:04:47 +00:00
|
|
|
# Iterate over our loaded plugins
|
|
|
|
for entry in self.servers:
|
|
|
|
|
|
|
|
if isinstance(entry, (ConfigBase, AppriseConfig)):
|
|
|
|
# load our servers
|
|
|
|
servers = entry.servers()
|
|
|
|
|
|
|
|
else:
|
|
|
|
servers = [entry, ]
|
|
|
|
|
|
|
|
for server in servers:
|
|
|
|
# Apply our tag matching based on our defined logic
|
|
|
|
if is_exclusive_match(
|
2023-01-14 20:40:05 +00:00
|
|
|
logic=tag, data=server.tags,
|
|
|
|
match_all=common.MATCH_ALL_TAG,
|
|
|
|
match_always=match_always):
|
2023-01-12 01:04:47 +00:00
|
|
|
yield server
|
|
|
|
return
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
def notify(self, body, title='', notify_type=common.NotifyType.INFO,
|
|
|
|
body_format=None, tag=common.MATCH_ALL_TAG, match_always=True,
|
|
|
|
attach=None, interpret_escapes=None):
|
2023-01-12 01:04:47 +00:00
|
|
|
"""
|
2023-01-14 20:40:05 +00:00
|
|
|
Send a notification to all the plugins previously loaded.
|
2023-01-12 01:04:47 +00:00
|
|
|
|
|
|
|
If the body_format specified is NotifyFormat.MARKDOWN, it will
|
|
|
|
be converted to HTML if the Notification type expects this.
|
|
|
|
|
|
|
|
if the tag is specified (either a string or a set/list/tuple
|
|
|
|
of strings), then only the notifications flagged with that
|
2023-01-14 20:40:05 +00:00
|
|
|
tagged value are notified. By default, all added services
|
2023-01-12 01:04:47 +00:00
|
|
|
are notified (tag=MATCH_ALL_TAG)
|
|
|
|
|
|
|
|
This function returns True if all notifications were successfully
|
|
|
|
sent, False if even just one of them fails, and None if no
|
|
|
|
notifications were sent at all as a result of tag filtering and/or
|
|
|
|
simply having empty configuration files that were read.
|
|
|
|
|
|
|
|
Attach can contain a list of attachment URLs. attach can also be
|
2023-01-14 20:40:05 +00:00
|
|
|
represented by an AttachBase() (or list of) object(s). This
|
2023-01-12 01:04:47 +00:00
|
|
|
identifies the products you wish to notify
|
2023-01-14 20:40:05 +00:00
|
|
|
|
|
|
|
Set interpret_escapes to True if you want to pre-escape a string
|
|
|
|
such as turning a \n into an actual new line, etc.
|
2023-01-12 01:04:47 +00:00
|
|
|
"""
|
|
|
|
|
2023-04-13 08:41:12 +00:00
|
|
|
try:
|
|
|
|
# Process arguments and build synchronous and asynchronous calls
|
|
|
|
# (this step can throw internal errors).
|
2023-10-21 11:47:10 +00:00
|
|
|
sequential_calls, parallel_calls = self._create_notify_calls(
|
2023-01-14 20:40:05 +00:00
|
|
|
body, title,
|
|
|
|
notify_type=notify_type, body_format=body_format,
|
|
|
|
tag=tag, match_always=match_always, attach=attach,
|
2023-04-13 08:41:12 +00:00
|
|
|
interpret_escapes=interpret_escapes
|
|
|
|
)
|
2023-01-14 20:40:05 +00:00
|
|
|
|
2023-04-13 08:41:12 +00:00
|
|
|
except TypeError:
|
|
|
|
# No notifications sent, and there was an internal error.
|
|
|
|
return False
|
2023-01-14 20:40:05 +00:00
|
|
|
|
2023-10-21 11:47:10 +00:00
|
|
|
if not sequential_calls and not parallel_calls:
|
2023-04-13 08:41:12 +00:00
|
|
|
# Nothing to send
|
|
|
|
return None
|
2023-01-14 20:40:05 +00:00
|
|
|
|
2023-10-21 11:47:10 +00:00
|
|
|
sequential_result = Apprise._notify_sequential(*sequential_calls)
|
|
|
|
parallel_result = Apprise._notify_parallel_threadpool(*parallel_calls)
|
|
|
|
return sequential_result and parallel_result
|
2023-04-13 08:41:12 +00:00
|
|
|
|
|
|
|
async def async_notify(self, *args, **kwargs):
|
2023-01-14 20:40:05 +00:00
|
|
|
"""
|
2023-04-13 08:41:12 +00:00
|
|
|
Send a notification to all the plugins previously loaded, for
|
|
|
|
asynchronous callers.
|
|
|
|
|
|
|
|
The arguments are identical to those of Apprise.notify().
|
2023-01-14 20:40:05 +00:00
|
|
|
|
2023-04-13 08:41:12 +00:00
|
|
|
"""
|
2023-01-14 20:40:05 +00:00
|
|
|
try:
|
2023-04-13 08:41:12 +00:00
|
|
|
# Process arguments and build synchronous and asynchronous calls
|
|
|
|
# (this step can throw internal errors).
|
2023-10-21 11:47:10 +00:00
|
|
|
sequential_calls, parallel_calls = self._create_notify_calls(
|
2023-04-13 08:41:12 +00:00
|
|
|
*args, **kwargs)
|
2023-01-14 20:40:05 +00:00
|
|
|
|
|
|
|
except TypeError:
|
2023-04-13 08:41:12 +00:00
|
|
|
# No notifications sent, and there was an internal error.
|
2023-01-12 01:04:47 +00:00
|
|
|
return False
|
|
|
|
|
2023-10-21 11:47:10 +00:00
|
|
|
if not sequential_calls and not parallel_calls:
|
2023-04-13 08:41:12 +00:00
|
|
|
# Nothing to send
|
|
|
|
return None
|
2023-01-14 20:40:05 +00:00
|
|
|
|
2023-10-21 11:47:10 +00:00
|
|
|
sequential_result = Apprise._notify_sequential(*sequential_calls)
|
|
|
|
parallel_result = \
|
|
|
|
await Apprise._notify_parallel_asyncio(*parallel_calls)
|
|
|
|
return sequential_result and parallel_result
|
2023-04-13 08:41:12 +00:00
|
|
|
|
|
|
|
def _create_notify_calls(self, *args, **kwargs):
|
2023-01-14 20:40:05 +00:00
|
|
|
"""
|
2023-04-13 08:41:12 +00:00
|
|
|
Creates notifications for all the plugins loaded.
|
|
|
|
|
2023-10-21 11:47:10 +00:00
|
|
|
Returns a list of (server, notify() kwargs) tuples for plugins with
|
|
|
|
parallelism disabled and another list for plugins with parallelism
|
|
|
|
enabled.
|
2023-01-14 20:40:05 +00:00
|
|
|
"""
|
|
|
|
|
2023-04-13 08:41:12 +00:00
|
|
|
all_calls = list(self._create_notify_gen(*args, **kwargs))
|
2023-01-14 20:40:05 +00:00
|
|
|
|
2023-10-21 11:47:10 +00:00
|
|
|
# Split into sequential and parallel notify() calls.
|
|
|
|
sequential, parallel = [], []
|
|
|
|
for (server, notify_kwargs) in all_calls:
|
|
|
|
if server.asset.async_mode:
|
|
|
|
parallel.append((server, notify_kwargs))
|
2023-04-13 08:41:12 +00:00
|
|
|
else:
|
2023-10-21 11:47:10 +00:00
|
|
|
sequential.append((server, notify_kwargs))
|
2023-01-14 20:40:05 +00:00
|
|
|
|
2023-10-21 11:47:10 +00:00
|
|
|
return sequential, parallel
|
2023-04-13 08:41:12 +00:00
|
|
|
|
|
|
|
def _create_notify_gen(self, body, title='',
|
|
|
|
notify_type=common.NotifyType.INFO,
|
|
|
|
body_format=None, tag=common.MATCH_ALL_TAG,
|
|
|
|
match_always=True, attach=None,
|
|
|
|
interpret_escapes=None):
|
|
|
|
"""
|
|
|
|
Internal generator function for _create_notify_calls().
|
2023-01-14 20:40:05 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
if len(self) == 0:
|
|
|
|
# Nothing to notify
|
|
|
|
msg = "There are no service(s) to notify"
|
|
|
|
logger.error(msg)
|
|
|
|
raise TypeError(msg)
|
2023-01-12 01:04:47 +00:00
|
|
|
|
2023-10-21 11:47:10 +00:00
|
|
|
if not (title or body or attach):
|
2023-01-14 20:40:05 +00:00
|
|
|
msg = "No message content specified to deliver"
|
|
|
|
logger.error(msg)
|
|
|
|
raise TypeError(msg)
|
|
|
|
|
|
|
|
try:
|
|
|
|
if title and isinstance(title, bytes):
|
|
|
|
title = title.decode(self.asset.encoding)
|
|
|
|
|
|
|
|
if body and isinstance(body, bytes):
|
|
|
|
body = body.decode(self.asset.encoding)
|
|
|
|
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
msg = 'The content passed into Apprise was not of encoding ' \
|
|
|
|
'type: {}'.format(self.asset.encoding)
|
|
|
|
logger.error(msg)
|
|
|
|
raise TypeError(msg)
|
2023-01-12 01:04:47 +00:00
|
|
|
|
|
|
|
# Tracks conversions
|
2023-01-14 20:40:05 +00:00
|
|
|
conversion_body_map = dict()
|
|
|
|
conversion_title_map = dict()
|
2023-01-12 01:04:47 +00:00
|
|
|
|
|
|
|
# Prepare attachments if required
|
|
|
|
if attach is not None and not isinstance(attach, AppriseAttachment):
|
2023-01-14 20:40:05 +00:00
|
|
|
attach = AppriseAttachment(
|
|
|
|
attach, asset=self.asset, location=self.location)
|
2023-01-12 01:04:47 +00:00
|
|
|
|
|
|
|
# Allow Asset default value
|
|
|
|
body_format = self.asset.body_format \
|
|
|
|
if body_format is None else body_format
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
# Allow Asset default value
|
|
|
|
interpret_escapes = self.asset.interpret_escapes \
|
|
|
|
if interpret_escapes is None else interpret_escapes
|
2023-01-12 01:04:47 +00:00
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
# Iterate over our loaded plugins
|
|
|
|
for server in self.find(tag, match_always=match_always):
|
2023-01-12 01:04:47 +00:00
|
|
|
# If our code reaches here, we either did not define a tag (it
|
|
|
|
# was set to None), or we did define a tag and the logic above
|
|
|
|
# determined we need to notify the service it's associated with
|
2023-10-21 11:47:10 +00:00
|
|
|
|
|
|
|
# First we need to generate a key we will use to determine if we
|
|
|
|
# need to build our data out. Entries without are merged with
|
|
|
|
# the body at this stage.
|
|
|
|
key = server.notify_format if server.title_maxlen > 0\
|
|
|
|
else f'_{server.notify_format}'
|
|
|
|
|
|
|
|
if key not in conversion_title_map:
|
2023-01-14 20:40:05 +00:00
|
|
|
|
|
|
|
# Prepare our title
|
2023-10-21 11:47:10 +00:00
|
|
|
conversion_title_map[key] = '' if not title else title
|
2023-01-14 20:40:05 +00:00
|
|
|
|
2023-10-21 11:47:10 +00:00
|
|
|
# Conversion of title only occurs for services where the title
|
|
|
|
# is blended with the body (title_maxlen <= 0)
|
|
|
|
if conversion_title_map[key] and server.title_maxlen <= 0:
|
|
|
|
conversion_title_map[key] = convert_between(
|
|
|
|
body_format, server.notify_format,
|
|
|
|
content=conversion_title_map[key])
|
2023-01-14 20:40:05 +00:00
|
|
|
|
2023-10-21 11:47:10 +00:00
|
|
|
# Our body is always converted no matter what
|
|
|
|
conversion_body_map[key] = \
|
|
|
|
convert_between(
|
|
|
|
body_format, server.notify_format, content=body)
|
2023-01-14 20:40:05 +00:00
|
|
|
|
|
|
|
if interpret_escapes:
|
|
|
|
#
|
|
|
|
# Escape our content
|
|
|
|
#
|
|
|
|
|
|
|
|
try:
|
|
|
|
# Added overhead required due to Python 3 Encoding Bug
|
|
|
|
# identified here: https://bugs.python.org/issue21331
|
2023-10-21 11:47:10 +00:00
|
|
|
conversion_body_map[key] = \
|
|
|
|
conversion_body_map[key]\
|
2023-01-14 20:40:05 +00:00
|
|
|
.encode('ascii', 'backslashreplace')\
|
|
|
|
.decode('unicode-escape')
|
|
|
|
|
2023-10-21 11:47:10 +00:00
|
|
|
conversion_title_map[key] = \
|
|
|
|
conversion_title_map[key]\
|
2023-01-14 20:40:05 +00:00
|
|
|
.encode('ascii', 'backslashreplace')\
|
|
|
|
.decode('unicode-escape')
|
|
|
|
|
|
|
|
except AttributeError:
|
|
|
|
# Must be of string type
|
|
|
|
msg = 'Failed to escape message body'
|
|
|
|
logger.error(msg)
|
|
|
|
raise TypeError(msg)
|
|
|
|
|
2023-04-13 08:41:12 +00:00
|
|
|
kwargs = dict(
|
2023-10-21 11:47:10 +00:00
|
|
|
body=conversion_body_map[key],
|
|
|
|
title=conversion_title_map[key],
|
2023-01-14 20:40:05 +00:00
|
|
|
notify_type=notify_type,
|
|
|
|
attach=attach,
|
2023-04-13 08:41:12 +00:00
|
|
|
body_format=body_format
|
2023-01-14 20:40:05 +00:00
|
|
|
)
|
2023-10-21 11:47:10 +00:00
|
|
|
yield (server, kwargs)
|
2023-04-13 08:41:12 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2023-10-21 11:47:10 +00:00
|
|
|
def _notify_sequential(*servers_kwargs):
|
2023-04-13 08:41:12 +00:00
|
|
|
"""
|
2023-10-21 11:47:10 +00:00
|
|
|
Process a list of notify() calls sequentially and synchronously.
|
2023-04-13 08:41:12 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
success = True
|
|
|
|
|
2023-10-21 11:47:10 +00:00
|
|
|
for (server, kwargs) in servers_kwargs:
|
2023-04-13 08:41:12 +00:00
|
|
|
try:
|
|
|
|
# Send notification
|
2023-10-21 11:47:10 +00:00
|
|
|
result = server.notify(**kwargs)
|
2023-04-13 08:41:12 +00:00
|
|
|
success = success and result
|
|
|
|
|
|
|
|
except TypeError:
|
|
|
|
# These are our internally thrown notifications.
|
|
|
|
success = False
|
|
|
|
|
|
|
|
except Exception:
|
|
|
|
# A catch all so we don't have to abort early
|
|
|
|
# just because one of our plugins has a bug in it.
|
|
|
|
logger.exception("Unhandled Notification Exception")
|
|
|
|
success = False
|
|
|
|
|
|
|
|
return success
|
|
|
|
|
|
|
|
@staticmethod
|
2023-10-21 11:47:10 +00:00
|
|
|
def _notify_parallel_threadpool(*servers_kwargs):
|
|
|
|
"""
|
|
|
|
Process a list of notify() calls in parallel and synchronously.
|
|
|
|
"""
|
|
|
|
|
|
|
|
n_calls = len(servers_kwargs)
|
|
|
|
|
|
|
|
# 0-length case
|
|
|
|
if n_calls == 0:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# There's no need to use a thread pool for just a single notification
|
|
|
|
if n_calls == 1:
|
|
|
|
return Apprise._notify_sequential(servers_kwargs[0])
|
|
|
|
|
|
|
|
# Create log entry
|
|
|
|
logger.info(
|
|
|
|
'Notifying %d service(s) with threads.', len(servers_kwargs))
|
|
|
|
|
|
|
|
with cf.ThreadPoolExecutor() as executor:
|
|
|
|
success = True
|
|
|
|
futures = [executor.submit(server.notify, **kwargs)
|
|
|
|
for (server, kwargs) in servers_kwargs]
|
|
|
|
|
|
|
|
for future in cf.as_completed(futures):
|
|
|
|
try:
|
|
|
|
result = future.result()
|
|
|
|
success = success and result
|
|
|
|
|
|
|
|
except TypeError:
|
|
|
|
# These are our internally thrown notifications.
|
|
|
|
success = False
|
|
|
|
|
|
|
|
except Exception:
|
|
|
|
# A catch all so we don't have to abort early
|
|
|
|
# just because one of our plugins has a bug in it.
|
|
|
|
logger.exception("Unhandled Notification Exception")
|
|
|
|
success = False
|
|
|
|
|
|
|
|
return success
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
async def _notify_parallel_asyncio(*servers_kwargs):
|
2023-04-13 08:41:12 +00:00
|
|
|
"""
|
2023-10-21 11:47:10 +00:00
|
|
|
Process a list of async_notify() calls in parallel and asynchronously.
|
2023-04-13 08:41:12 +00:00
|
|
|
"""
|
|
|
|
|
2023-10-21 11:47:10 +00:00
|
|
|
n_calls = len(servers_kwargs)
|
|
|
|
|
|
|
|
# 0-length case
|
|
|
|
if n_calls == 0:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# (Unlike with the thread pool, we don't optimize for the single-
|
|
|
|
# notification case because asyncio can do useful work while waiting
|
|
|
|
# for that thread to complete)
|
|
|
|
|
2023-04-13 08:41:12 +00:00
|
|
|
# Create log entry
|
2023-10-21 11:47:10 +00:00
|
|
|
logger.info(
|
|
|
|
'Notifying %d service(s) asynchronously.', len(servers_kwargs))
|
|
|
|
|
|
|
|
async def do_call(server, kwargs):
|
|
|
|
return await server.async_notify(**kwargs)
|
2023-04-13 08:41:12 +00:00
|
|
|
|
2023-10-21 11:47:10 +00:00
|
|
|
cors = (do_call(server, kwargs) for (server, kwargs) in servers_kwargs)
|
2023-04-13 08:41:12 +00:00
|
|
|
results = await asyncio.gather(*cors, return_exceptions=True)
|
|
|
|
|
|
|
|
if any(isinstance(status, Exception)
|
|
|
|
and not isinstance(status, TypeError) for status in results):
|
|
|
|
# A catch all so we don't have to abort early just because
|
|
|
|
# one of our plugins has a bug in it.
|
|
|
|
logger.exception("Unhandled Notification Exception")
|
|
|
|
return False
|
|
|
|
|
|
|
|
if any(isinstance(status, TypeError) for status in results):
|
|
|
|
# These are our internally thrown notifications.
|
|
|
|
return False
|
|
|
|
|
|
|
|
return all(results)
|
2023-01-14 20:40:05 +00:00
|
|
|
|
|
|
|
def details(self, lang=None, show_requirements=False, show_disabled=False):
|
2023-01-12 01:04:47 +00:00
|
|
|
"""
|
|
|
|
Returns the details associated with the Apprise object
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
# general object returned
|
|
|
|
response = {
|
|
|
|
# Defines the current version of Apprise
|
|
|
|
'version': __version__,
|
|
|
|
# Lists all of the currently supported Notifications
|
|
|
|
'schemas': [],
|
|
|
|
# Includes the configured asset details
|
|
|
|
'asset': self.asset.details(),
|
|
|
|
}
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
for plugin in set(common.NOTIFY_SCHEMA_MAP.values()):
|
|
|
|
# Iterate over our hashed plugins and dynamically build details on
|
|
|
|
# their status:
|
|
|
|
|
|
|
|
content = {
|
|
|
|
'service_name': getattr(plugin, 'service_name', None),
|
|
|
|
'service_url': getattr(plugin, 'service_url', None),
|
|
|
|
'setup_url': getattr(plugin, 'setup_url', None),
|
|
|
|
# Placeholder - populated below
|
|
|
|
'details': None,
|
2023-10-21 11:47:10 +00:00
|
|
|
|
|
|
|
# Let upstream service know of the plugins that support
|
|
|
|
# attachments
|
|
|
|
'attachment_support': getattr(
|
|
|
|
plugin, 'attachment_support', False),
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
# Differentiat between what is a custom loaded plugin and
|
|
|
|
# which is native.
|
|
|
|
'category': getattr(plugin, 'category', None)
|
|
|
|
}
|
|
|
|
|
|
|
|
# Standard protocol(s) should be None or a tuple
|
|
|
|
enabled = getattr(plugin, 'enabled', True)
|
|
|
|
if not show_disabled and not enabled:
|
|
|
|
# Do not show inactive plugins
|
|
|
|
continue
|
|
|
|
|
|
|
|
elif show_disabled:
|
|
|
|
# Add current state to response
|
|
|
|
content['enabled'] = enabled
|
2023-01-12 01:04:47 +00:00
|
|
|
|
|
|
|
# Standard protocol(s) should be None or a tuple
|
|
|
|
protocols = getattr(plugin, 'protocol', None)
|
2023-01-14 20:40:05 +00:00
|
|
|
if isinstance(protocols, str):
|
2023-01-12 01:04:47 +00:00
|
|
|
protocols = (protocols, )
|
|
|
|
|
|
|
|
# Secure protocol(s) should be None or a tuple
|
|
|
|
secure_protocols = getattr(plugin, 'secure_protocol', None)
|
2023-01-14 20:40:05 +00:00
|
|
|
if isinstance(secure_protocols, str):
|
2023-01-12 01:04:47 +00:00
|
|
|
secure_protocols = (secure_protocols, )
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
# Add our protocol details to our content
|
|
|
|
content.update({
|
|
|
|
'protocols': protocols,
|
|
|
|
'secure_protocols': secure_protocols,
|
|
|
|
})
|
|
|
|
|
2023-01-12 01:04:47 +00:00
|
|
|
if not lang:
|
|
|
|
# Simply return our results
|
2023-01-14 20:40:05 +00:00
|
|
|
content['details'] = plugins.details(plugin)
|
|
|
|
if show_requirements:
|
|
|
|
content['requirements'] = plugins.requirements(plugin)
|
|
|
|
|
2023-01-12 01:04:47 +00:00
|
|
|
else:
|
|
|
|
# Emulate the specified language when returning our results
|
|
|
|
with self.locale.lang_at(lang):
|
2023-01-14 20:40:05 +00:00
|
|
|
content['details'] = plugins.details(plugin)
|
|
|
|
if show_requirements:
|
|
|
|
content['requirements'] = plugins.requirements(plugin)
|
2023-01-12 01:04:47 +00:00
|
|
|
|
|
|
|
# Build our response object
|
2023-01-14 20:40:05 +00:00
|
|
|
response['schemas'].append(content)
|
2023-01-12 01:04:47 +00:00
|
|
|
|
|
|
|
return response
|
|
|
|
|
2023-01-14 20:40:05 +00:00
|
|
|
def urls(self, privacy=False):
|
2023-01-12 01:04:47 +00:00
|
|
|
"""
|
|
|
|
Returns all of the loaded URLs defined in this apprise object.
|
|
|
|
"""
|
2023-01-14 20:40:05 +00:00
|
|
|
return [x.url(privacy=privacy) for x in self.servers]
|
2023-01-12 01:04:47 +00:00
|
|
|
|
|
|
|
def pop(self, index):
|
|
|
|
"""
|
|
|
|
Removes an indexed Notification Service from the stack and returns it.
|
|
|
|
|
|
|
|
The thing is we can never pop AppriseConfig() entries, only what was
|
|
|
|
loaded within them. So pop needs to carefully iterate over our list
|
|
|
|
and only track actual entries.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Tracking variables
|
|
|
|
prev_offset = -1
|
|
|
|
offset = prev_offset
|
|
|
|
|
|
|
|
for idx, s in enumerate(self.servers):
|
|
|
|
if isinstance(s, (ConfigBase, AppriseConfig)):
|
|
|
|
servers = s.servers()
|
|
|
|
if len(servers) > 0:
|
|
|
|
# Acquire a new maximum offset to work with
|
|
|
|
offset = prev_offset + len(servers)
|
|
|
|
|
|
|
|
if offset >= index:
|
|
|
|
# we can pop an element from our config stack
|
|
|
|
fn = s.pop if isinstance(s, ConfigBase) \
|
|
|
|
else s.server_pop
|
|
|
|
|
|
|
|
return fn(index if prev_offset == -1
|
|
|
|
else (index - prev_offset - 1))
|
|
|
|
|
|
|
|
else:
|
|
|
|
offset = prev_offset + 1
|
|
|
|
if offset == index:
|
|
|
|
return self.servers.pop(idx)
|
|
|
|
|
|
|
|
# Update our old offset
|
|
|
|
prev_offset = offset
|
|
|
|
|
|
|
|
# If we reach here, then we indexed out of range
|
|
|
|
raise IndexError('list index out of range')
|
|
|
|
|
|
|
|
def __getitem__(self, index):
|
|
|
|
"""
|
|
|
|
Returns the indexed server entry of a loaded notification server
|
|
|
|
"""
|
|
|
|
# Tracking variables
|
|
|
|
prev_offset = -1
|
|
|
|
offset = prev_offset
|
|
|
|
|
|
|
|
for idx, s in enumerate(self.servers):
|
|
|
|
if isinstance(s, (ConfigBase, AppriseConfig)):
|
|
|
|
# Get our list of servers associate with our config object
|
|
|
|
servers = s.servers()
|
|
|
|
if len(servers) > 0:
|
|
|
|
# Acquire a new maximum offset to work with
|
|
|
|
offset = prev_offset + len(servers)
|
|
|
|
|
|
|
|
if offset >= index:
|
|
|
|
return servers[index if prev_offset == -1
|
|
|
|
else (index - prev_offset - 1)]
|
|
|
|
|
|
|
|
else:
|
|
|
|
offset = prev_offset + 1
|
|
|
|
if offset == index:
|
|
|
|
return self.servers[idx]
|
|
|
|
|
|
|
|
# Update our old offset
|
|
|
|
prev_offset = offset
|
|
|
|
|
|
|
|
# If we reach here, then we indexed out of range
|
|
|
|
raise IndexError('list index out of range')
|
|
|
|
|
2023-10-21 11:47:10 +00:00
|
|
|
def __getstate__(self):
|
|
|
|
"""
|
|
|
|
Pickle Support dumps()
|
|
|
|
"""
|
|
|
|
attributes = {
|
|
|
|
'asset': self.asset,
|
|
|
|
# Prepare our URL list as we need to extract the associated tags
|
|
|
|
# and asset details associated with it
|
|
|
|
'urls': [{
|
|
|
|
'url': server.url(privacy=False),
|
|
|
|
'tag': server.tags if server.tags else None,
|
|
|
|
'asset': server.asset} for server in self.servers],
|
|
|
|
'locale': self.locale,
|
|
|
|
'debug': self.debug,
|
|
|
|
'location': self.location,
|
|
|
|
}
|
|
|
|
|
|
|
|
return attributes
|
|
|
|
|
|
|
|
def __setstate__(self, state):
|
|
|
|
"""
|
|
|
|
Pickle Support loads()
|
|
|
|
"""
|
|
|
|
self.servers = list()
|
|
|
|
self.asset = state['asset']
|
|
|
|
self.locale = state['locale']
|
|
|
|
self.location = state['location']
|
|
|
|
for entry in state['urls']:
|
|
|
|
self.add(entry['url'], asset=entry['asset'], tag=entry['tag'])
|
|
|
|
|
2023-01-12 01:04:47 +00:00
|
|
|
def __bool__(self):
|
|
|
|
"""
|
2023-01-14 20:40:05 +00:00
|
|
|
Allows the Apprise object to be wrapped in an 'if statement'.
|
|
|
|
True is returned if at least one service has been loaded.
|
2023-01-12 01:04:47 +00:00
|
|
|
"""
|
|
|
|
return len(self) > 0
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
"""
|
|
|
|
Returns an iterator to each of our servers loaded. This includes those
|
|
|
|
found inside configuration.
|
|
|
|
"""
|
|
|
|
return chain(*[[s] if not isinstance(s, (ConfigBase, AppriseConfig))
|
|
|
|
else iter(s.servers()) for s in self.servers])
|
|
|
|
|
|
|
|
def __len__(self):
|
|
|
|
"""
|
|
|
|
Returns the number of servers loaded; this includes those found within
|
|
|
|
loaded configuration. This funtion nnever actually counts the
|
|
|
|
Config entry themselves (if they exist), only what they contain.
|
|
|
|
"""
|
|
|
|
return sum([1 if not isinstance(s, (ConfigBase, AppriseConfig))
|
|
|
|
else len(s.servers()) for s in self.servers])
|