Updated Apprise notification module to the latest providers.

This commit is contained in:
morpheus65535 2021-12-01 21:19:18 -05:00
parent 402c82d84f
commit d51dc68ebb
91 changed files with 11623 additions and 1429 deletions

View file

@ -23,11 +23,11 @@ def update_notifier():
notifiers_current.append([notifier['name']])
for x in results['schemas']:
if [x['service_name']] not in notifiers_current:
notifiers_new.append({'name': x['service_name'], 'enabled': 0})
logging.debug('Adding new notifier agent: ' + x['service_name'])
if [str(x['service_name'])] not in notifiers_current:
notifiers_new.append({'name': str(x['service_name']), 'enabled': 0})
logging.debug('Adding new notifier agent: ' + str(x['service_name']))
else:
notifiers_old.append([x['service_name']])
notifiers_old.append([str(x['service_name'])])
notifiers_to_delete = [item for item in notifiers_current if item not in notifiers_old]

View file

@ -34,6 +34,7 @@ from .common import MATCH_ALL_TAG
from .utils import is_exclusive_match
from .utils import parse_list
from .utils import parse_urls
from .utils import cwe312_url
from .logger import logger
from .AppriseAsset import AppriseAsset
@ -58,13 +59,15 @@ class Apprise(object):
"""
def __init__(self, servers=None, asset=None, debug=False):
def __init__(self, servers=None, asset=None, location=None, debug=False):
"""
Loads a set of server urls while applying the Asset() module to each
if specified.
If no asset is provided, then the default asset is used.
Optionally specify a global ContentLocation for a more strict means
of handling Attachments.
"""
# Initialize a server list of URLs
@ -87,6 +90,11 @@ class Apprise(object):
# Set our debug flag
self.debug = debug
# Store our hosting location for optional strict rule handling
# of Attachments. Setting this to None removes any attachment
# restrictions.
self.location = location
@staticmethod
def instantiate(url, asset=None, tag=None, suppress_exceptions=True):
"""
@ -116,9 +124,14 @@ class Apprise(object):
# Initialize our result set
results = None
# Prepare our Asset Object
asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset()
if isinstance(url, six.string_types):
# Acquire our url tokens
results = plugins.url_to_dict(url)
results = plugins.url_to_dict(
url, secure_logging=asset.secure_logging)
if results is None:
# Failed to parse the server URL; detailed logging handled
# inside url_to_dict - nothing to report here.
@ -132,25 +145,40 @@ class Apprise(object):
# schema is a mandatory dictionary item as it is the only way
# we can index into our loaded plugins
logger.error('Dictionary does not include a "schema" entry.')
logger.trace('Invalid dictionary unpacked as:{}{}'.format(
os.linesep, os.linesep.join(
['{}="{}"'.format(k, v) for k, v in results.items()])))
logger.trace(
'Invalid dictionary unpacked as:{}{}'.format(
os.linesep, os.linesep.join(
['{}="{}"'.format(k, v)
for k, v in results.items()])))
return None
logger.trace('Dictionary unpacked as:{}{}'.format(
os.linesep, os.linesep.join(
['{}="{}"'.format(k, v) for k, v in results.items()])))
logger.trace(
'Dictionary unpacked as:{}{}'.format(
os.linesep, os.linesep.join(
['{}="{}"'.format(k, v) for k, v in results.items()])))
# Otherwise we handle the invalid input specified
else:
logger.error('Invalid URL specified: {}'.format(url))
logger.error(
'An invalid URL type (%s) was specified for instantiation',
type(url))
return None
if not plugins.SCHEMA_MAP[results['schema']].enabled:
#
# First Plugin Enable Check (Pre Initialization)
#
# Plugin has been disabled at a global level
logger.error(
'%s:// is disabled on this system.', results['schema'])
return None
# Build a list of tags to associate with the newly added notifications
results['tag'] = set(parse_list(tag))
# Prepare our Asset Object
results['asset'] = \
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
# Set our Asset Object
results['asset'] = asset
if suppress_exceptions:
try:
@ -159,14 +187,21 @@ class Apprise(object):
plugin = plugins.SCHEMA_MAP[results['schema']](**results)
# Create log entry of loaded URL
logger.debug('Loaded {} URL: {}'.format(
plugins.SCHEMA_MAP[results['schema']].service_name,
plugin.url()))
logger.debug(
'Loaded {} URL: {}'.format(
plugins.SCHEMA_MAP[results['schema']].service_name,
plugin.url(privacy=asset.secure_logging)))
except Exception:
# CWE-312 (Secure Logging) Handling
loggable_url = url if not asset.secure_logging \
else cwe312_url(url)
# the arguments are invalid or can not be used.
logger.error('Could not load {} URL: {}'.format(
plugins.SCHEMA_MAP[results['schema']].service_name, url))
logger.error(
'Could not load {} URL: {}'.format(
plugins.SCHEMA_MAP[results['schema']].service_name,
loggable_url))
return None
else:
@ -174,6 +209,24 @@ class Apprise(object):
# URL information but don't wrap it in a try catch
plugin = plugins.SCHEMA_MAP[results['schema']](**results)
if not plugin.enabled:
#
# Second Plugin Enable Check (Post Initialization)
#
# Service/Plugin is disabled (on a more local level). This is a
# case where the plugin was initially enabled but then after the
# __init__() was called under the hood something pre-determined
# that it could no longer be used.
# The only downside to doing it this way is services are
# initialized prior to returning the details() if 3rd party tools
# are polling what is available. These services that become
# disabled thereafter are shown initially that they can be used.
logger.error(
'%s:// has become disabled on this system.', results['schema'])
return None
return plugin
def add(self, servers, asset=None, tag=None):
@ -286,7 +339,8 @@ class Apprise(object):
return
def notify(self, body, title='', notify_type=NotifyType.INFO,
body_format=None, tag=MATCH_ALL_TAG, attach=None):
body_format=None, tag=MATCH_ALL_TAG, attach=None,
interpret_escapes=None):
"""
Send a notification to all of the plugins previously loaded.
@ -306,47 +360,158 @@ class Apprise(object):
Attach can contain a list of attachment URLs. attach can also be
represented by a an AttachBase() (or list of) object(s). This
identifies the products you wish to notify
Set interpret_escapes to True if you want to pre-escape a string
such as turning a \n into an actual new line, etc.
"""
if ASYNCIO_SUPPORT:
return py3compat.asyncio.tosync(
self.async_notify(
body, title,
notify_type=notify_type, body_format=body_format,
tag=tag, attach=attach,
interpret_escapes=interpret_escapes,
),
debug=self.debug
)
else:
try:
results = list(
self._notifyall(
Apprise._notifyhandler,
body, title,
notify_type=notify_type, body_format=body_format,
tag=tag, attach=attach,
interpret_escapes=interpret_escapes,
)
)
except TypeError:
# No notifications sent, and there was an internal error.
return False
else:
if len(results) > 0:
# All notifications sent, return False if any failed.
return all(results)
else:
# No notifications sent.
return None
def async_notify(self, *args, **kwargs):
"""
Send a notification to all of the plugins previously loaded, for
asynchronous callers. This method is an async method that should be
awaited on, even if it is missing the async keyword in its signature.
(This is omitted to preserve syntax compatibility with Python 2.)
The arguments are identical to those of Apprise.notify(). This method
is not available in Python 2.
"""
try:
coroutines = list(
self._notifyall(
Apprise._notifyhandlerasync, *args, **kwargs))
except TypeError:
# No notifications sent, and there was an internal error.
return py3compat.asyncio.toasyncwrap(False)
else:
if len(coroutines) > 0:
# All notifications sent, return False if any failed.
return py3compat.asyncio.notify(coroutines)
else:
# No notifications sent.
return py3compat.asyncio.toasyncwrap(None)
@staticmethod
def _notifyhandler(server, **kwargs):
"""
The synchronous notification sender. Returns True if the notification
sent successfully.
"""
try:
# Send notification
return server.notify(**kwargs)
except TypeError:
# These our our internally thrown notifications
return False
except Exception:
# A catch all so we don't have to abort early
# just because one of our plugins has a bug in it.
logger.exception("Unhandled Notification Exception")
return False
@staticmethod
def _notifyhandlerasync(server, **kwargs):
"""
The asynchronous notification sender. Returns a coroutine that yields
True if the notification sent successfully.
"""
if server.asset.async_mode:
return server.async_notify(**kwargs)
else:
# Send the notification immediately, and wrap the result in a
# coroutine.
status = Apprise._notifyhandler(server, **kwargs)
return py3compat.asyncio.toasyncwrap(status)
def _notifyall(self, handler, body, title='', notify_type=NotifyType.INFO,
body_format=None, tag=MATCH_ALL_TAG, attach=None,
interpret_escapes=None):
"""
Creates notifications for all of the plugins loaded.
Returns a generator that calls handler for each notification. The first
and only argument supplied to handler is the server, and the keyword
arguments are exactly as they would be passed to server.notify().
"""
if len(self) == 0:
# Nothing to notify
return False
# Initialize our return result which only turns to True if we send
# at least one valid notification
status = None
raise TypeError("No service(s) to notify")
if not (title or body):
return False
raise TypeError("No message content specified to deliver")
if six.PY2:
# Python 2.7.x Unicode Character Handling
# Ensure we're working with utf-8
if isinstance(title, unicode): # noqa: F821
title = title.encode('utf-8')
if isinstance(body, unicode): # noqa: F821
body = body.encode('utf-8')
# Tracks conversions
conversion_map = dict()
# Prepare attachments if required
if attach is not None and not isinstance(attach, AppriseAttachment):
try:
attach = AppriseAttachment(attach, asset=self.asset)
except TypeError:
# bad attachments
return False
attach = AppriseAttachment(
attach, asset=self.asset, location=self.location)
# Allow Asset default value
body_format = self.asset.body_format \
if body_format is None else body_format
# for asyncio support; we track a list of our servers to notify
# sequentially
coroutines = []
# Allow Asset default value
interpret_escapes = self.asset.interpret_escapes \
if interpret_escapes is None else interpret_escapes
# Iterate over our loaded plugins
for server in self.find(tag):
if status is None:
# We have at least one server to notify; change status
# to be a default value of True from now (purely an
# initialiation at this point)
status = True
# If our code reaches here, we either did not define a tag (it
# was set to None), or we did define a tag and the logic above
# determined we need to notify the service it's associated with
@ -396,48 +561,59 @@ class Apprise(object):
# Store entry directly
conversion_map[server.notify_format] = body
if ASYNCIO_SUPPORT and server.asset.async_mode:
# Build a list of servers requiring notification
# that will be triggered asynchronously afterwards
coroutines.append(server.async_notify(
body=conversion_map[server.notify_format],
title=title,
notify_type=notify_type,
attach=attach))
if interpret_escapes:
#
# Escape our content
#
# We gather at this point and notify at the end
continue
try:
# Added overhead required due to Python 3 Encoding Bug
# identified here: https://bugs.python.org/issue21331
conversion_map[server.notify_format] = \
conversion_map[server.notify_format]\
.encode('ascii', 'backslashreplace')\
.decode('unicode-escape')
try:
# Send notification
if not server.notify(
body=conversion_map[server.notify_format],
title=title,
notify_type=notify_type,
attach=attach):
except UnicodeDecodeError: # pragma: no cover
# This occurs using a very old verion of Python 2.7 such
# as the one that ships with CentOS/RedHat 7.x (v2.7.5).
conversion_map[server.notify_format] = \
conversion_map[server.notify_format] \
.decode('string_escape')
# Toggle our return status flag
status = False
except AttributeError:
# Must be of string type
logger.error('Failed to escape message body')
raise TypeError
except TypeError:
# These our our internally thrown notifications
status = False
if title:
try:
# Added overhead required due to Python 3 Encoding Bug
# identified here: https://bugs.python.org/issue21331
title = title\
.encode('ascii', 'backslashreplace')\
.decode('unicode-escape')
except Exception:
# A catch all so we don't have to abort early
# just because one of our plugins has a bug in it.
logger.exception("Notification Exception")
status = False
except UnicodeDecodeError: # pragma: no cover
# This occurs using a very old verion of Python 2.7
# such as the one that ships with CentOS/RedHat 7.x
# (v2.7.5).
title = title.decode('string_escape')
if coroutines:
# perform our async notification(s)
if not py3compat.asyncio.notify(coroutines, debug=self.debug):
# Toggle our status only if we had a failure
status = False
except AttributeError:
# Must be of string type
logger.error('Failed to escape message title')
raise TypeError
return status
yield handler(
server,
body=conversion_map[server.notify_format],
title=title,
notify_type=notify_type,
attach=attach
)
def details(self, lang=None):
def details(self, lang=None, show_requirements=False, show_disabled=False):
"""
Returns the details associated with the Apprise object
@ -453,8 +629,27 @@ class Apprise(object):
'asset': self.asset.details(),
}
# to add it's mapping to our hash table
for plugin in set(plugins.SCHEMA_MAP.values()):
# Iterate over our hashed plugins and dynamically build details on
# their status:
content = {
'service_name': getattr(plugin, 'service_name', None),
'service_url': getattr(plugin, 'service_url', None),
'setup_url': getattr(plugin, 'setup_url', None),
# Placeholder - populated below
'details': None
}
# Standard protocol(s) should be None or a tuple
enabled = getattr(plugin, 'enabled', True)
if not show_disabled and not enabled:
# Do not show inactive plugins
continue
elif show_disabled:
# Add current state to response
content['enabled'] = enabled
# Standard protocol(s) should be None or a tuple
protocols = getattr(plugin, 'protocol', None)
@ -466,31 +661,35 @@ class Apprise(object):
if isinstance(secure_protocols, six.string_types):
secure_protocols = (secure_protocols, )
# Add our protocol details to our content
content.update({
'protocols': protocols,
'secure_protocols': secure_protocols,
})
if not lang:
# Simply return our results
details = plugins.details(plugin)
content['details'] = plugins.details(plugin)
if show_requirements:
content['requirements'] = plugins.requirements(plugin)
else:
# Emulate the specified language when returning our results
with self.locale.lang_at(lang):
details = plugins.details(plugin)
content['details'] = plugins.details(plugin)
if show_requirements:
content['requirements'] = plugins.requirements(plugin)
# Build our response object
response['schemas'].append({
'service_name': getattr(plugin, 'service_name', None),
'service_url': getattr(plugin, 'service_url', None),
'setup_url': getattr(plugin, 'setup_url', None),
'protocols': protocols,
'secure_protocols': secure_protocols,
'details': details,
})
response['schemas'].append(content)
return response
def urls(self):
def urls(self, privacy=False):
"""
Returns all of the loaded URLs defined in this apprise object.
"""
return [x.url() for x in self.servers]
return [x.url(privacy=privacy) for x in self.servers]
def pop(self, index):
"""
@ -592,3 +791,7 @@ class Apprise(object):
"""
return sum([1 if not isinstance(s, (ConfigBase, AppriseConfig))
else len(s.servers()) for s in self.servers])
if six.PY2:
del Apprise.async_notify

63
libs/apprise/Apprise.pyi Normal file
View file

@ -0,0 +1,63 @@
from typing import Any, Dict, List, Iterable, Iterator, Optional
from . import (AppriseAsset, AppriseAttachment, AppriseConfig, ConfigBase,
NotifyBase, NotifyFormat, NotifyType)
from .common import ContentLocation
_Server = Union[str, ConfigBase, NotifyBase, AppriseConfig]
_Servers = Union[_Server, Dict[Any, _Server], Iterable[_Server]]
# Can't define this recursively as mypy doesn't support recursive types:
# https://github.com/python/mypy/issues/731
_Tag = Union[str, Iterable[Union[str, Iterable[str]]]]
class Apprise:
def __init__(
self,
servers: _Servers = ...,
asset: Optional[AppriseAsset] = ...,
location: Optional[ContentLocation] = ...,
debug: bool = ...
) -> None: ...
@staticmethod
def instantiate(
url: Union[str, Dict[str, NotifyBase]],
asset: Optional[AppriseAsset] = ...,
tag: Optional[_Tag] = ...,
suppress_exceptions: bool = ...
) -> NotifyBase: ...
def add(
self,
servers: _Servers = ...,
asset: Optional[AppriseAsset] = ...,
tag: Optional[_Tag] = ...
) -> bool: ...
def clear(self) -> None: ...
def find(self, tag: str = ...) -> Iterator[Apprise]: ...
def notify(
self,
body: str,
title: str = ...,
notify_type: NotifyType = ...,
body_format: NotifyFormat = ...,
tag: _Tag = ...,
attach: Optional[AppriseAttachment] = ...,
interpret_escapes: Optional[bool] = ...
) -> bool: ...
async def async_notify(
self,
body: str,
title: str = ...,
notify_type: NotifyType = ...,
body_format: NotifyFormat = ...,
tag: _Tag = ...,
attach: Optional[AppriseAttachment] = ...,
interpret_escapes: Optional[bool] = ...
) -> bool: ...
def details(self, lang: Optional[str] = ...) -> Dict[str, Any]: ...
def urls(self, privacy: bool = ...) -> Iterable[str]: ...
def pop(self, index: int) -> ConfigBase: ...
def __getitem__(self, index: int) -> ConfigBase: ...
def __bool__(self) -> bool: ...
def __nonzero__(self) -> bool: ...
def __iter__(self) -> Iterator[ConfigBase]: ...
def __len__(self) -> int: ...

View file

@ -24,7 +24,7 @@
# THE SOFTWARE.
import re
from uuid import uuid4
from os.path import join
from os.path import dirname
from os.path import isfile
@ -105,6 +105,36 @@ class AppriseAsset(object):
# notifications are sent sequentially (one after another)
async_mode = True
# Whether or not to interpret escapes found within the input text prior
# to passing it upstream. Such as converting \t to an actual tab and \n
# to a new line.
interpret_escapes = False
# For more detail see CWE-312 @
# https://cwe.mitre.org/data/definitions/312.html
#
# By enabling this, the logging output has additional overhead applied to
# it preventing secure password and secret information from being
# displayed in the logging. Since there is overhead involved in performing
# this cleanup; system owners who run in a very isolated environment may
# choose to disable this for a slight performance bump. It is recommended
# that you leave this option as is otherwise.
secure_logging = True
# All internal/system flags are prefixed with an underscore (_)
# These can only be initialized using Python libraries and are not picked
# up from (yaml) configuration files (if set)
# An internal counter that is used by AppriseAPI
# (https://github.com/caronc/apprise-api). The idea is to allow one
# instance of AppriseAPI to call another, but to track how many times
# this occurs. It's intent is to prevent a loop where an AppriseAPI
# Server calls itself (or loops indefinitely)
_recursion = 0
# A unique identifer we can use to associate our calling source
_uid = str(uuid4())
def __init__(self, **kwargs):
"""
Asset Initialization

View file

@ -0,0 +1,34 @@
from typing import Dict, Optional
from . import NotifyFormat, NotifyType
class AppriseAsset:
app_id: str
app_desc: str
app_url: str
html_notify_map: Dict[NotifyType, str]
default_html_color: str
default_extension: str
theme: Optional[str]
image_url_mask: str
image_url_logo: str
image_path_mask: Optional[str]
body_format: Optional[NotifyFormat]
async_mode: bool
interpret_escapes: bool
def __init__(
self,
app_id: str = ...,
app_desc: str = ...,
app_url: str = ...,
html_notify_map: Dict[NotifyType, str] = ...,
default_html_color: str = ...,
default_extension: str = ...,
theme: Optional[str] = ...,
image_url_mask: str = ...,
image_url_logo: str = ...,
image_path_mask: Optional[str] = ...,
body_format: Optional[NotifyFormat] = ...,
async_mode: bool = ...,
interpret_escapes: bool = ...
) -> None: ...

View file

@ -29,6 +29,8 @@ from . import attachment
from . import URLBase
from .AppriseAsset import AppriseAsset
from .logger import logger
from .common import ContentLocation
from .common import CONTENT_LOCATIONS
from .utils import GET_SCHEMA_RE
@ -38,7 +40,8 @@ class AppriseAttachment(object):
"""
def __init__(self, paths=None, asset=None, cache=True, **kwargs):
def __init__(self, paths=None, asset=None, cache=True, location=None,
**kwargs):
"""
Loads all of the paths/urls specified (if any).
@ -59,6 +62,25 @@ class AppriseAttachment(object):
It's also worth nothing that the cache value is only set to elements
that are not already of subclass AttachBase()
Optionally set your current ContentLocation in the location argument.
This is used to further handle attachments. The rules are as follows:
- INACCESSIBLE: You simply have disabled use of the object; no
attachments will be retrieved/handled.
- HOSTED: You are hosting an attachment service for others.
In these circumstances all attachments that are LOCAL
based (such as file://) will not be allowed.
- LOCAL: The least restrictive mode as local files can be
referenced in addition to hosted.
In all both HOSTED and LOCAL modes, INACCESSIBLE attachment types will
continue to be inaccessible. However if you set this field (location)
to None (it's default value) the attachment location category will not
be tested in any way (all attachment types will be allowed).
The location field is also a global option that can be set when
initializing the Apprise object.
"""
# Initialize our attachment listings
@ -71,6 +93,15 @@ class AppriseAttachment(object):
self.asset = \
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
if location is not None and location not in CONTENT_LOCATIONS:
msg = "An invalid Attachment location ({}) was specified." \
.format(location)
logger.warning(msg)
raise TypeError(msg)
# Store our location
self.location = location
# Now parse any paths specified
if paths is not None:
# Store our path(s)
@ -123,26 +154,45 @@ class AppriseAttachment(object):
# Iterate over our attachments
for _attachment in attachments:
if isinstance(_attachment, attachment.AttachBase):
# Go ahead and just add our attachment into our list
self.attachments.append(_attachment)
if self.location == ContentLocation.INACCESSIBLE:
logger.warning(
"Attachments are disabled; ignoring {}"
.format(_attachment))
return_status = False
continue
elif not isinstance(_attachment, six.string_types):
if isinstance(_attachment, six.string_types):
logger.debug("Loading attachment: {}".format(_attachment))
# Instantiate ourselves an object, this function throws or
# returns None if it fails
instance = AppriseAttachment.instantiate(
_attachment, asset=asset, cache=cache)
if not isinstance(instance, attachment.AttachBase):
return_status = False
continue
elif not isinstance(_attachment, attachment.AttachBase):
logger.warning(
"An invalid attachment (type={}) was specified.".format(
type(_attachment)))
return_status = False
continue
logger.debug("Loading attachment: {}".format(_attachment))
else:
# our entry is of type AttachBase, so just go ahead and point
# our instance to it for some post processing below
instance = _attachment
# Instantiate ourselves an object, this function throws or
# returns None if it fails
instance = AppriseAttachment.instantiate(
_attachment, asset=asset, cache=cache)
if not isinstance(instance, attachment.AttachBase):
# Apply some simple logic if our location flag is set
if self.location and ((
self.location == ContentLocation.HOSTED
and instance.location != ContentLocation.HOSTED)
or instance.location == ContentLocation.INACCESSIBLE):
logger.warning(
"Attachment was disallowed due to accessibility "
"restrictions ({}->{}): {}".format(
self.location, instance.location,
instance.url(privacy=True)))
return_status = False
continue

View file

@ -0,0 +1,38 @@
from typing import Any, Iterable, Optional, Union
from . import AppriseAsset, ContentLocation
from .attachment import AttachBase
_Attachment = Union[str, AttachBase]
_Attachments = Iterable[_Attachment]
class AppriseAttachment:
def __init__(
self,
paths: Optional[_Attachments] = ...,
asset: Optional[AppriseAttachment] = ...,
cache: bool = ...,
location: Optional[ContentLocation] = ...,
**kwargs: Any
) -> None: ...
def add(
self,
attachments: _Attachments,
asset: Optional[AppriseAttachment] = ...,
cache: Optional[bool] = ...
) -> bool: ...
@staticmethod
def instantiate(
url: str,
asset: Optional[AppriseAsset] = ...,
cache: Optional[bool] = ...,
suppress_exceptions: bool = ...
) -> NotifyBase: ...
def clear(self) -> None: ...
def size(self) -> int: ...
def pop(self, index: int = ...) -> AttachBase: ...
def __getitem__(self, index: int) -> AttachBase: ...
def __bool__(self) -> bool: ...
def __nonzero__(self) -> bool: ...
def __iter__(self) -> Iterator[AttachBase]: ...
def __len__(self) -> int: ...

View file

@ -0,0 +1,49 @@
from typing import Any, Iterable, Iterator, List, Optional, Union
from . import AppriseAsset, NotifyBase
from .config import ConfigBase
_Configs = Union[ConfigBase, str, Iterable[str]]
class AppriseConfig:
def __init__(
self,
paths: Optional[_Configs] = ...,
asset: Optional[AppriseAsset] = ...,
cache: bool = ...,
recursion: int = ...,
insecure_includes: bool = ...,
**kwargs: Any
) -> None: ...
def add(
self,
configs: _Configs,
asset: Optional[AppriseAsset] = ...,
cache: bool = ...,
recursion: Optional[bool] = ...,
insecure_includes: Optional[bool] = ...
) -> bool: ...
def add_config(
self,
content: str,
asset: Optional[AppriseAsset] = ...,
tag: Optional[str] = ...,
format: Optional[str] = ...,
recursion: Optional[int] = ...,
insecure_includes: Optional[bool] = ...
) -> bool: ...
def servers(self, tag: str = ..., *args: Any, **kwargs: Any) -> List[ConfigBase]: ...
def instantiate(
url: str,
asset: Optional[AppriseAsset] = ...,
tag: Optional[str] = ...,
cache: Optional[bool] = ...
) -> NotifyBase: ...
def clear(self) -> None: ...
def server_pop(self, index: int) -> ConfigBase: ...
def pop(self, index: int = ...) -> ConfigBase: ...
def __getitem__(self, index: int) -> ConfigBase: ...
def __bool__(self) -> bool: ...
def __nonzero__(self) -> bool: ...
def __iter__(self) -> Iterator[ConfigBase]: ...
def __len__(self) -> int: ...

View file

@ -25,7 +25,7 @@
import re
import six
import logging
from .logger import logger
from time import sleep
from datetime import datetime
from xml.sax.saxutils import escape as sax_escape
@ -47,6 +47,7 @@ from .AppriseAsset import AppriseAsset
from .utils import parse_url
from .utils import parse_bool
from .utils import parse_list
from .utils import parse_phone_no
# Used to break a path list into parts
PATHSPLIT_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
@ -115,8 +116,8 @@ class URLBase(object):
# Secure sites should be verified against a Certificate Authority
verify_certificate = True
# Logging
logger = logging.getLogger(__name__)
# Logging to our global logger
logger = logger
# Define a default set of template arguments used for dynamically building
# details about our individual plugins for developers.
@ -280,7 +281,7 @@ class URLBase(object):
self._last_io_datetime = reference
return
if self.request_rate_per_sec <= 0.0:
if self.request_rate_per_sec <= 0.0 and not wait:
# We're done if there is no throttle limit set
return
@ -560,6 +561,39 @@ class URLBase(object):
return content
@staticmethod
def parse_phone_no(content, unquote=True):
"""A wrapper to utils.parse_phone_no() with unquoting support
Parses a specified set of data and breaks it into a list.
Args:
content (str): The path to split up into a list. If a list is
provided, then it's individual entries are processed.
unquote (:obj:`bool`, optional): call unquote on each element
added to the returned list.
Returns:
list: A unique list containing all of the elements in the path
"""
if unquote:
try:
content = URLBase.unquote(content)
except TypeError:
# Nothing further to do
return []
except AttributeError:
# This exception ONLY gets thrown under Python v2.7 if an
# object() is passed in place of the content
return []
content = parse_phone_no(content)
return content
@property
def app_id(self):
return self.asset.app_id if self.asset.app_id else ''
@ -636,6 +670,8 @@ class URLBase(object):
results['qsd'].get('verify', True))
# Password overrides
if 'password' in results['qsd']:
results['password'] = results['qsd']['password']
if 'pass' in results['qsd']:
results['password'] = results['qsd']['pass']

16
libs/apprise/URLBase.pyi Normal file
View file

@ -0,0 +1,16 @@
from logging import logger
from typing import Any, Iterable, Set, Optional
class URLBase:
service_name: Optional[str]
protocol: Optional[str]
secure_protocol: Optional[str]
request_rate_per_sec: int
socket_connect_timeout: float
socket_read_timeout: float
tags: Set[str]
verify_certificate: bool
logger: logger
def url(self, privacy: bool = ..., *args: Any, **kwargs: Any) -> str: ...
def __contains__(self, tags: Iterable[str]) -> bool: ...
def __str__(self) -> str: ...

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 Chris Caron <lead2gold@gmail.com>
# Copyright (C) 2021 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
@ -23,11 +23,11 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
__title__ = 'apprise'
__version__ = '0.8.8'
__title__ = 'Apprise'
__version__ = '0.9.6'
__author__ = 'Chris Caron'
__license__ = 'MIT'
__copywrite__ = 'Copyright (C) 2020 Chris Caron <lead2gold@gmail.com>'
__copywrite__ = 'Copyright (C) 2021 Chris Caron <lead2gold@gmail.com>'
__email__ = 'lead2gold@gmail.com'
__status__ = 'Production'
@ -41,8 +41,10 @@ from .common import OverflowMode
from .common import OVERFLOW_MODES
from .common import ConfigFormat
from .common import CONFIG_FORMATS
from .common import ConfigIncludeMode
from .common import CONFIG_INCLUDE_MODES
from .common import ContentIncludeMode
from .common import CONTENT_INCLUDE_MODES
from .common import ContentLocation
from .common import CONTENT_LOCATIONS
from .URLBase import URLBase
from .URLBase import PrivacyMode
@ -55,10 +57,13 @@ from .AppriseAsset import AppriseAsset
from .AppriseConfig import AppriseConfig
from .AppriseAttachment import AppriseAttachment
# Inherit our logging with our additional entries added to it
from .logger import logging
from .logger import logger
from .logger import LogCapture
# Set default logging handler to avoid "No handler found" warnings.
import logging
from logging import NullHandler
logging.getLogger(__name__).addHandler(NullHandler())
logging.getLogger(__name__).addHandler(logging.NullHandler())
__all__ = [
# Core
@ -69,6 +74,10 @@ __all__ = [
'NotifyType', 'NotifyImageSize', 'NotifyFormat', 'OverflowMode',
'NOTIFY_TYPES', 'NOTIFY_IMAGE_SIZES', 'NOTIFY_FORMATS', 'OVERFLOW_MODES',
'ConfigFormat', 'CONFIG_FORMATS',
'ConfigIncludeMode', 'CONFIG_INCLUDE_MODES',
'ContentIncludeMode', 'CONTENT_INCLUDE_MODES',
'ContentLocation', 'CONTENT_LOCATIONS',
'PrivacyMode',
# Logging
'logging', 'logger', 'LogCapture',
]

View file

@ -1,22 +1,23 @@
<?xml version="1.0" encoding="utf-8"?>
<xs:schema elementFormDefault="qualified" xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xs:schema attributeFormDefault="unqualified" elementFormDefault="qualified" xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xs:import namespace="http://schemas.xmlsoap.org/soap/envelope/" schemaLocation="http://schemas.xmlsoap.org/soap/envelope/"/>
<xs:element name="Notification">
<xs:complexType>
<xs:sequence>
<xs:element name="Version" type="xs:string" />
<xs:element name="MessageType" type="xs:string" />
<xs:simpleType>
<xs:restriction base="xs:string">
<xs:enumeration value="success" />
<xs:enumeration value="failure" />
<xs:enumeration value="info" />
<xs:enumeration value="warning" />
</xs:restriction>
</xs:simpleType>
</xs:element>
<xs:element name="Subject" type="xs:string" />
<xs:element name="Message" type="xs:string" />
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:complexType>
<xs:sequence>
<xs:element name="Version" type="xs:string" />
<xs:element name="Subject" type="xs:string" />
<xs:element name="MessageType">
<xs:simpleType>
<xs:restriction base="xs:string">
<xs:enumeration value="success" />
<xs:enumeration value="failure" />
<xs:enumeration value="info" />
<xs:enumeration value="warning" />
</xs:restriction>
</xs:simpleType>
</xs:element>
<xs:element name="Message" type="xs:string" />
</xs:sequence>
</xs:complexType>
</xs:element>
</xs:schema>

View file

@ -0,0 +1,40 @@
<?xml version="1.0" encoding="utf-8"?>
<xs:schema attributeFormDefault="unqualified" elementFormDefault="qualified" xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xs:import namespace="http://schemas.xmlsoap.org/soap/envelope/" schemaLocation="http://schemas.xmlsoap.org/soap/envelope/"/>
<xs:element name="Notification">
<xs:complexType>
<xs:sequence>
<xs:element name="Version" type="xs:string" />
<xs:element name="Subject" type="xs:string" />
<xs:element name="MessageType">
<xs:simpleType>
<xs:restriction base="xs:string">
<xs:enumeration value="success" />
<xs:enumeration value="failure" />
<xs:enumeration value="info" />
<xs:enumeration value="warning" />
</xs:restriction>
</xs:simpleType>
</xs:element>
<xs:element name="Message" type="xs:string" />
<xs:element name="Attachments" minOccurs="0">
<xs:complexType>
<xs:sequence>
<xs:element name="Attachment" minOccurs="0" maxOccurs="unbounded">
<xs:complexType>
<xs:simpleContent>
<xs:extension base="xs:string">
<xs:attribute name="mimetype" type="xs:string" use="required"/>
<xs:attribute name="filename" type="xs:string" use="required"/>
</xs:extension>
</xs:simpleContent>
</xs:complexType>
</xs:element>
</xs:sequence>
<xs:attribute name="encoding" type="xs:string" use="required"/>
</xs:complexType>
</xs:element>
</xs:sequence>
</xs:complexType>
</xs:element>
</xs:schema>

View file

@ -28,6 +28,7 @@ import time
import mimetypes
from ..URLBase import URLBase
from ..utils import parse_bool
from ..common import ContentLocation
from ..AppriseLocale import gettext_lazy as _
@ -62,6 +63,11 @@ class AttachBase(URLBase):
# 5 MB = 5242880 bytes
max_file_size = 5242880
# By default all attachments types are inaccessible.
# Developers of items identified in the attachment plugin directory
# are requried to set a location
location = ContentLocation.INACCESSIBLE
# Here is where we define all of the arguments we accept on the url
# such as: schema://whatever/?overflow=upstream&format=text
# These act the same way as tokens except they are optional and/or

View file

@ -0,0 +1,37 @@
from typing import Any, Dict, Optional
from .. import ContentLocation
class AttachBase:
max_detect_buffer_size: int
unknown_mimetype: str
unknown_filename: str
unknown_filename_extension: str
strict: bool
max_file_size: int
location: ContentLocation
template_args: Dict[str, Any]
def __init__(
self,
name: Optional[str] = ...,
mimetype: Optional[str] = ...,
cache: Optional[bool] = ...,
**kwargs: Any
) -> None: ...
@property
def path(self) -> Optional[str]: ...
@property
def name(self) -> Optional[str]: ...
@property
def mimetype(self) -> Optional[str]: ...
def exists(self) -> bool: ...
def invalidate(self) -> None: ...
def download(self) -> bool: ...
@staticmethod
def parse_url(
url: str,
verify_host: bool = ...
) -> Dict[str, Any]: ...
def __len__(self) -> int: ...
def __bool__(self) -> bool: ...
def __nonzero__(self) -> bool: ...

View file

@ -26,6 +26,7 @@
import re
import os
from .AttachBase import AttachBase
from ..common import ContentLocation
from ..AppriseLocale import gettext_lazy as _
@ -40,6 +41,10 @@ class AttachFile(AttachBase):
# The default protocol
protocol = 'file'
# Content is local to the same location as the apprise instance
# being called (server-side)
location = ContentLocation.LOCAL
def __init__(self, path, **kwargs):
"""
Initialize Local File Attachment Object
@ -81,6 +86,10 @@ class AttachFile(AttachBase):
validate it.
"""
if self.location == ContentLocation.INACCESSIBLE:
# our content is inaccessible
return False
# Ensure any existing content set has been invalidated
self.invalidate()

View file

@ -29,6 +29,7 @@ import six
import requests
from tempfile import NamedTemporaryFile
from .AttachBase import AttachBase
from ..common import ContentLocation
from ..URLBase import PrivacyMode
from ..AppriseLocale import gettext_lazy as _
@ -50,6 +51,9 @@ class AttachHTTP(AttachBase):
# The number of bytes in memory to read from the remote source at a time
chunk_size = 8192
# Web based requests are remote/external to our current location
location = ContentLocation.HOSTED
def __init__(self, headers=None, **kwargs):
"""
Initialize HTTP Object
@ -86,6 +90,10 @@ class AttachHTTP(AttachBase):
Perform retrieval of the configuration based on the specified request
"""
if self.location == ContentLocation.INACCESSIBLE:
# our content is inaccessible
return False
# Ensure any existing content set has been invalidated
self.invalidate()

View file

@ -26,7 +26,11 @@
import click
import logging
import platform
import six
import sys
import os
import re
from os.path import isfile
from os.path import expanduser
from os.path import expandvars
@ -39,6 +43,7 @@ from . import AppriseConfig
from .utils import parse_list
from .common import NOTIFY_TYPES
from .common import NOTIFY_FORMATS
from .common import ContentLocation
from .logger import logger
from . import __title__
@ -133,6 +138,9 @@ def print_version_msg():
help='Perform a trial run but only prints the notification '
'services to-be triggered to stdout. Notifications are never '
'sent using this mode.')
@click.option('--details', '-l', is_flag=True,
help='Prints details about the current services supported by '
'Apprise.')
@click.option('--recursion-depth', '-R', default=DEFAULT_RECURSION_DEPTH,
type=int,
help='The number of recursive import entries that can be '
@ -141,6 +149,8 @@ def print_version_msg():
@click.option('--verbose', '-v', count=True,
help='Makes the operation more talkative. Use multiple v to '
'increase the verbosity. I.e.: -vvvv')
@click.option('--interpret-escapes', '-e', is_flag=True,
help='Enable interpretation of backslash escapes')
@click.option('--debug', '-D', is_flag=True, help='Debug mode')
@click.option('--version', '-V', is_flag=True,
help='Display the apprise version and exit.')
@ -148,7 +158,7 @@ def print_version_msg():
metavar='SERVER_URL [SERVER_URL2 [SERVER_URL3]]',)
def main(body, title, config, attach, urls, notification_type, theme, tag,
input_format, dry_run, recursion_depth, verbose, disable_async,
debug, version):
details, interpret_escapes, debug, version):
"""
Send a notification to all of the specified servers identified by their
URLs the content provided within the title, body and notification-type.
@ -224,8 +234,15 @@ def main(body, title, config, attach, urls, notification_type, theme, tag,
# Prepare our asset
asset = AppriseAsset(
# Our body format
body_format=input_format,
# Interpret Escapes
interpret_escapes=interpret_escapes,
# Set the theme
theme=theme,
# Async mode is only used for Python v3+ and allows a user to send
# all of their notifications asyncronously. This was made an option
# incase there are problems in the future where it's better that
@ -234,18 +251,132 @@ def main(body, title, config, attach, urls, notification_type, theme, tag,
)
# Create our Apprise object
a = Apprise(asset=asset, debug=debug)
a = Apprise(asset=asset, debug=debug, location=ContentLocation.LOCAL)
# Load our configuration if no URLs or specified configuration was
# identified on the command line
a.add(AppriseConfig(
paths=[f for f in DEFAULT_SEARCH_PATHS if isfile(expanduser(f))]
if not (config or urls) else config,
asset=asset, recursion=recursion_depth))
if details:
# Print details and exit
results = a.details(show_requirements=True, show_disabled=True)
# Load our inventory up
for url in urls:
a.add(url)
# Sort our results:
plugins = sorted(
results['schemas'], key=lambda i: str(i['service_name']))
for entry in plugins:
protocols = [] if not entry['protocols'] else \
[p for p in entry['protocols']
if isinstance(p, six.string_types)]
protocols.extend(
[] if not entry['secure_protocols'] else
[p for p in entry['secure_protocols']
if isinstance(p, six.string_types)])
if len(protocols) == 1:
# Simplify view by swapping {schema} with the single
# protocol value
# Convert tuple to list
entry['details']['templates'] = \
list(entry['details']['templates'])
for x in range(len(entry['details']['templates'])):
entry['details']['templates'][x] = \
re.sub(
r'^[^}]+}://',
'{}://'.format(protocols[0]),
entry['details']['templates'][x])
click.echo(click.style(
'{} {:<30} '.format(
'+' if entry['enabled'] else '-',
str(entry['service_name'])),
fg="green" if entry['enabled'] else "red", bold=True),
nl=(not entry['enabled'] or len(protocols) == 1))
if not entry['enabled']:
if entry['requirements']['details']:
click.echo(
' ' + str(entry['requirements']['details']))
if entry['requirements']['packages_required']:
click.echo(' Python Packages Required:')
for req in entry['requirements']['packages_required']:
click.echo(' - ' + req)
if entry['requirements']['packages_recommended']:
click.echo(' Python Packages Recommended:')
for req in entry['requirements']['packages_recommended']:
click.echo(' - ' + req)
# new line padding between entries
click.echo()
continue
if len(protocols) > 1:
click.echo('| Schema(s): {}'.format(
', '.join(protocols),
))
prefix = ' - '
click.echo('{}{}'.format(
prefix,
'\n{}'.format(prefix).join(entry['details']['templates'])))
# new line padding between entries
click.echo()
sys.exit(0)
# The priorities of what is accepted are parsed in order below:
# 1. URLs by command line
# 2. Configuration by command line
# 3. URLs by environment variable: APPRISE_URLS
# 4. Configuration by environment variable: APPRISE_CONFIG
# 5. Default Configuration File(s) (if found)
#
if urls:
if tag:
# Ignore any tags specified
logger.warning(
'--tag (-g) entries are ignored when using specified URLs')
tag = None
# Load our URLs (if any defined)
for url in urls:
a.add(url)
if config:
# Provide a warning to the end user if they specified both
logger.warning(
'You defined both URLs and a --config (-c) entry; '
'Only the URLs will be referenced.')
elif config:
# We load our configuration file(s) now only if no URLs were specified
# Specified config entries trump all
a.add(AppriseConfig(
paths=config, asset=asset, recursion=recursion_depth))
elif os.environ.get('APPRISE_URLS', '').strip():
logger.debug('Loading provided APPRISE_URLS environment variable')
if tag:
# Ignore any tags specified
logger.warning(
'--tag (-g) entries are ignored when using specified URLs')
tag = None
# Attempt to use our APPRISE_URLS environment variable (if populated)
a.add(os.environ['APPRISE_URLS'].strip())
elif os.environ.get('APPRISE_CONFIG', '').strip():
logger.debug('Loading provided APPRISE_CONFIG environment variable')
# Fall back to config environment variable (if populated)
a.add(AppriseConfig(
paths=os.environ['APPRISE_CONFIG'].strip(),
asset=asset, recursion=recursion_depth))
else:
# Load default configuration
a.add(AppriseConfig(
paths=[f for f in DEFAULT_SEARCH_PATHS if isfile(expanduser(f))],
asset=asset, recursion=recursion_depth))
if len(a) == 0:
logger.error(

View file

@ -130,28 +130,58 @@ CONFIG_FORMATS = (
)
class ConfigIncludeMode(object):
class ContentIncludeMode(object):
"""
The different Cofiguration inclusion modes. All Configuration
plugins will have one of these associated with it.
The different Content inclusion modes. All content based plugins will
have one of these associated with it.
"""
# - Configuration inclusion of same type only; hence a file:// can include
# - Content inclusion of same type only; hence a file:// can include
# a file://
# - Cross file inclusion is not allowed unless insecure_includes (a flag)
# is set to True. In these cases STRICT acts as type ALWAYS
STRICT = 'strict'
# This configuration type can never be included
# This content type can never be included
NEVER = 'never'
# File configuration can always be included
# This content can always be included
ALWAYS = 'always'
CONFIG_INCLUDE_MODES = (
ConfigIncludeMode.STRICT,
ConfigIncludeMode.NEVER,
ConfigIncludeMode.ALWAYS,
CONTENT_INCLUDE_MODES = (
ContentIncludeMode.STRICT,
ContentIncludeMode.NEVER,
ContentIncludeMode.ALWAYS,
)
class ContentLocation(object):
"""
This is primarily used for handling file attachments. The idea is
to track the source of the attachment itself. We don't want
remote calls to a server to access local attachments for example.
By knowing the attachment type and cross-associating it with how
we plan on accessing the content, we can make a judgement call
(for security reasons) if we will allow it.
Obviously local uses of apprise can access both local and remote
type files.
"""
# Content is located locally (on the same server as apprise)
LOCAL = 'local'
# Content is located in a remote location
HOSTED = 'hosted'
# Content is inaccessible
INACCESSIBLE = 'n/a'
CONTENT_LOCATIONS = (
ContentLocation.LOCAL,
ContentLocation.HOSTED,
ContentLocation.INACCESSIBLE,
)
# This is a reserved tag that is automatically assigned to every

15
libs/apprise/common.pyi Normal file
View file

@ -0,0 +1,15 @@
class NotifyType:
INFO: NotifyType
SUCCESS: NotifyType
WARNING: NotifyType
FAILURE: NotifyType
class NotifyFormat:
TEXT: NotifyFormat
HTML: NotifyFormat
MARKDOWN: NotifyFormat
class ContentLocation:
LOCAL: ContentLocation
HOSTED: ContentLocation
INACCESSIBLE: ContentLocation

View file

@ -34,13 +34,18 @@ from ..AppriseAsset import AppriseAsset
from ..URLBase import URLBase
from ..common import ConfigFormat
from ..common import CONFIG_FORMATS
from ..common import ConfigIncludeMode
from ..common import ContentIncludeMode
from ..utils import GET_SCHEMA_RE
from ..utils import parse_list
from ..utils import parse_bool
from ..utils import parse_urls
from ..utils import cwe312_url
from . import SCHEMA_MAP
# Test whether token is valid or not
VALID_TOKEN = re.compile(
r'(?P<token>[a-z0-9][a-z0-9_]+)', re.I)
class ConfigBase(URLBase):
"""
@ -65,7 +70,7 @@ class ConfigBase(URLBase):
# By default all configuration is not includable using the 'include'
# line found in configuration files.
allow_cross_includes = ConfigIncludeMode.NEVER
allow_cross_includes = ContentIncludeMode.NEVER
# the config path manages the handling of relative include
config_path = os.getcwd()
@ -205,8 +210,8 @@ class ConfigBase(URLBase):
# Configuration files were detected; recursively populate them
# If we have been configured to do so
for url in configs:
if self.recursion > 0:
if self.recursion > 0:
# Attempt to acquire the schema at the very least to allow
# our configuration based urls.
schema = GET_SCHEMA_RE.match(url)
@ -219,6 +224,7 @@ class ConfigBase(URLBase):
url = os.path.join(self.config_path, url)
url = '{}://{}'.format(schema, URLBase.quote(url))
else:
# Ensure our schema is always in lower case
schema = schema.group('schema').lower()
@ -229,27 +235,31 @@ class ConfigBase(URLBase):
'Unsupported include schema {}.'.format(schema))
continue
# CWE-312 (Secure Logging) Handling
loggable_url = url if not asset.secure_logging \
else cwe312_url(url)
# Parse our url details of the server object as dictionary
# containing all of the information parsed from our URL
results = SCHEMA_MAP[schema].parse_url(url)
if not results:
# Failed to parse the server URL
self.logger.warning(
'Unparseable include URL {}'.format(url))
'Unparseable include URL {}'.format(loggable_url))
continue
# Handle cross inclusion based on allow_cross_includes rules
if (SCHEMA_MAP[schema].allow_cross_includes ==
ConfigIncludeMode.STRICT
ContentIncludeMode.STRICT
and schema not in self.schemas()
and not self.insecure_includes) or \
SCHEMA_MAP[schema].allow_cross_includes == \
ConfigIncludeMode.NEVER:
ContentIncludeMode.NEVER:
# Prevent the loading if insecure base protocols
ConfigBase.logger.warning(
'Including {}:// based configuration is prohibited. '
'Ignoring URL {}'.format(schema, url))
'Ignoring URL {}'.format(schema, loggable_url))
continue
# Prepare our Asset Object
@ -275,7 +285,7 @@ class ConfigBase(URLBase):
except Exception as e:
# the arguments are invalid or can not be used.
self.logger.warning(
'Could not load include URL: {}'.format(url))
'Could not load include URL: {}'.format(loggable_url))
self.logger.debug('Loading Exception: {}'.format(str(e)))
continue
@ -288,16 +298,23 @@ class ConfigBase(URLBase):
del cfg_plugin
else:
# CWE-312 (Secure Logging) Handling
loggable_url = url if not asset.secure_logging \
else cwe312_url(url)
self.logger.debug(
'Recursion limit reached; ignoring Include URL: %s' % url)
'Recursion limit reached; ignoring Include URL: %s',
loggable_url)
if self._cached_servers:
self.logger.info('Loaded {} entries from {}'.format(
len(self._cached_servers), self.url()))
self.logger.info(
'Loaded {} entries from {}'.format(
len(self._cached_servers),
self.url(privacy=asset.secure_logging)))
else:
self.logger.warning(
'Failed to load Apprise configuration from {}'.format(
self.url()))
self.url(privacy=asset.secure_logging)))
# Set the time our content was cached at
self._cached_time = time.time()
@ -527,6 +544,9 @@ class ConfigBase(URLBase):
# the include keyword
configs = list()
# Prepare our Asset Object
asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset()
# Define what a valid line should look like
valid_line_re = re.compile(
r'^\s*(?P<line>([;#]+(?P<comment>.*))|'
@ -563,27 +583,37 @@ class ConfigBase(URLBase):
continue
if config:
ConfigBase.logger.debug('Include URL: {}'.format(config))
# CWE-312 (Secure Logging) Handling
loggable_url = config if not asset.secure_logging \
else cwe312_url(config)
ConfigBase.logger.debug(
'Include URL: {}'.format(loggable_url))
# Store our include line
configs.append(config.strip())
continue
# CWE-312 (Secure Logging) Handling
loggable_url = url if not asset.secure_logging \
else cwe312_url(url)
# Acquire our url tokens
results = plugins.url_to_dict(url)
results = plugins.url_to_dict(
url, secure_logging=asset.secure_logging)
if results is None:
# Failed to parse the server URL
ConfigBase.logger.warning(
'Unparseable URL {} on line {}.'.format(url, line))
'Unparseable URL {} on line {}.'.format(
loggable_url, line))
continue
# Build a list of tags to associate with the newly added
# notifications if any were set
results['tag'] = set(parse_list(result.group('tags')))
# Prepare our Asset Object
results['asset'] = \
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
# Set our Asset Object
results['asset'] = asset
try:
# Attempt to create an instance of our plugin using the
@ -591,13 +621,14 @@ class ConfigBase(URLBase):
plugin = plugins.SCHEMA_MAP[results['schema']](**results)
# Create log entry of loaded URL
ConfigBase.logger.debug('Loaded URL: {}'.format(plugin.url()))
ConfigBase.logger.debug(
'Loaded URL: %s', plugin.url(privacy=asset.secure_logging))
except Exception as e:
# the arguments are invalid or can not be used.
ConfigBase.logger.warning(
'Could not load URL {} on line {}.'.format(
url, line))
loggable_url, line))
ConfigBase.logger.debug('Loading Exception: %s' % str(e))
continue
@ -633,7 +664,9 @@ class ConfigBase(URLBase):
# Load our data (safely)
result = yaml.load(content, Loader=yaml.SafeLoader)
except (AttributeError, yaml.error.MarkedYAMLError) as e:
except (AttributeError,
yaml.parser.ParserError,
yaml.error.MarkedYAMLError) as e:
# Invalid content
ConfigBase.logger.error(
'Invalid Apprise YAML data specified.')
@ -671,7 +704,9 @@ class ConfigBase(URLBase):
continue
if not (hasattr(asset, k) and
isinstance(getattr(asset, k), six.string_types)):
isinstance(getattr(asset, k),
(bool, six.string_types))):
# We can't set a function or non-string set value
ConfigBase.logger.warning(
'Invalid asset key "{}".'.format(k))
@ -681,15 +716,23 @@ class ConfigBase(URLBase):
# Convert to an empty string
v = ''
if not isinstance(v, six.string_types):
if (isinstance(v, (bool, six.string_types))
and isinstance(getattr(asset, k), bool)):
# If the object in the Asset is a boolean, then
# we want to convert the specified string to
# match that.
setattr(asset, k, parse_bool(v))
elif isinstance(v, six.string_types):
# Set our asset object with the new value
setattr(asset, k, v.strip())
else:
# we must set strings with a string
ConfigBase.logger.warning(
'Invalid asset value to "{}".'.format(k))
continue
# Set our asset object with the new value
setattr(asset, k, v.strip())
#
# global tag root directive
#
@ -740,6 +783,10 @@ class ConfigBase(URLBase):
# we can. Reset it to None on each iteration
results = list()
# CWE-312 (Secure Logging) Handling
loggable_url = url if not asset.secure_logging \
else cwe312_url(url)
if isinstance(url, six.string_types):
# We're just a simple URL string...
schema = GET_SCHEMA_RE.match(url)
@ -748,16 +795,18 @@ class ConfigBase(URLBase):
# config file at least has something to take action
# with.
ConfigBase.logger.warning(
'Invalid URL {}, entry #{}'.format(url, no + 1))
'Invalid URL {}, entry #{}'.format(
loggable_url, no + 1))
continue
# We found a valid schema worthy of tracking; store it's
# details:
_results = plugins.url_to_dict(url)
_results = plugins.url_to_dict(
url, secure_logging=asset.secure_logging)
if _results is None:
ConfigBase.logger.warning(
'Unparseable URL {}, entry #{}'.format(
url, no + 1))
loggable_url, no + 1))
continue
# add our results to our global set
@ -791,19 +840,20 @@ class ConfigBase(URLBase):
.format(key, no + 1))
continue
# Store our URL and Schema Regex
_url = key
# Store our schema
schema = _schema.group('schema').lower()
# Store our URL and Schema Regex
_url = key
if _url is None:
# the loop above failed to match anything
ConfigBase.logger.warning(
'Unsupported schema in urls, entry #{}'.format(no + 1))
'Unsupported URL, entry #{}'.format(no + 1))
continue
_results = plugins.url_to_dict(_url)
_results = plugins.url_to_dict(
_url, secure_logging=asset.secure_logging)
if _results is None:
# Setup dictionary
_results = {
@ -830,12 +880,33 @@ class ConfigBase(URLBase):
if 'schema' in entries:
del entries['schema']
# support our special tokens (if they're present)
if schema in plugins.SCHEMA_MAP:
entries = ConfigBase._special_token_handler(
schema, entries)
# Extend our dictionary with our new entries
r.update(entries)
# add our results to our global set
results.append(r)
elif isinstance(tokens, dict):
# support our special tokens (if they're present)
if schema in plugins.SCHEMA_MAP:
tokens = ConfigBase._special_token_handler(
schema, tokens)
# Copy ourselves a template of our parsed URL as a base to
# work with
r = _results.copy()
# add our result set
r.update(tokens)
# add our results to our global set
results.append(r)
else:
# add our results to our global set
results.append(_results)
@ -867,6 +938,17 @@ class ConfigBase(URLBase):
# Just use the global settings
_results['tag'] = global_tags
for key in list(_results.keys()):
# Strip out any tokens we know that we can't accept and
# warn the user
match = VALID_TOKEN.match(key)
if not match:
ConfigBase.logger.warning(
'Ignoring invalid token ({}) found in YAML '
'configuration entry #{}, item #{}'
.format(key, no + 1, entry))
del _results[key]
ConfigBase.logger.trace(
'URL #{}: {} unpacked as:{}{}'
.format(no + 1, url, os.linesep, os.linesep.join(
@ -883,7 +965,8 @@ class ConfigBase(URLBase):
# Create log entry of loaded URL
ConfigBase.logger.debug(
'Loaded URL: {}'.format(plugin.url()))
'Loaded URL: {}'.format(
plugin.url(privacy=asset.secure_logging)))
except Exception as e:
# the arguments are invalid or can not be used.
@ -913,6 +996,135 @@ class ConfigBase(URLBase):
# Pop the element off of the stack
return self._cached_servers.pop(index)
@staticmethod
def _special_token_handler(schema, tokens):
"""
This function takes a list of tokens and updates them to no longer
include any special tokens such as +,-, and :
- schema must be a valid schema of a supported plugin type
- tokens must be a dictionary containing the yaml entries parsed.
The idea here is we can post process a set of tokens provided in
a YAML file where the user provided some of the special keywords.
We effectivley look up what these keywords map to their appropriate
value they're expected
"""
# Create a copy of our dictionary
tokens = tokens.copy()
for kw, meta in plugins.SCHEMA_MAP[schema]\
.template_kwargs.items():
# Determine our prefix:
prefix = meta.get('prefix', '+')
# Detect any matches
matches = \
{k[1:]: str(v) for k, v in tokens.items()
if k.startswith(prefix)}
if not matches:
# we're done with this entry
continue
if not isinstance(tokens.get(kw), dict):
# Invalid; correct it
tokens[kw] = dict()
# strip out processed tokens
tokens = {k: v for k, v in tokens.items()
if not k.startswith(prefix)}
# Update our entries
tokens[kw].update(matches)
# Now map our tokens accordingly to the class templates defined by
# each service.
#
# This is specifically used for YAML file parsing. It allows a user to
# define an entry such as:
#
# urls:
# - mailto://user:pass@domain:
# - to: user1@hotmail.com
# - to: user2@hotmail.com
#
# Under the hood, the NotifyEmail() class does not parse the `to`
# argument. It's contents needs to be mapped to `targets`. This is
# defined in the class via the `template_args` and template_tokens`
# section.
#
# This function here allows these mappings to take place within the
# YAML file as independant arguments.
class_templates = \
plugins.details(plugins.SCHEMA_MAP[schema])
for key in list(tokens.keys()):
if key not in class_templates['args']:
# No need to handle non-arg entries
continue
# get our `map_to` and/or 'alias_of' value (if it exists)
map_to = class_templates['args'][key].get(
'alias_of', class_templates['args'][key].get('map_to', ''))
if map_to == key:
# We're already good as we are now
continue
if map_to in class_templates['tokens']:
meta = class_templates['tokens'][map_to]
else:
meta = class_templates['args'].get(
map_to, class_templates['args'][key])
# Perform a translation/mapping if our code reaches here
value = tokens[key]
del tokens[key]
# Detect if we're dealign with a list or not
is_list = re.search(
r'^(list|choice):.*',
meta.get('type'),
re.IGNORECASE)
if map_to not in tokens:
tokens[map_to] = [] if is_list \
else meta.get('default')
elif is_list and not isinstance(tokens.get(map_to), list):
# Convert ourselves to a list if we aren't already
tokens[map_to] = [tokens[map_to]]
# Type Conversion
if re.search(
r'^(choice:)?string',
meta.get('type'),
re.IGNORECASE) \
and not isinstance(value, six.string_types):
# Ensure our format is as expected
value = str(value)
# Apply any further translations if required (absolute map)
# This is the case when an arg maps to a token which further
# maps to a different function arg on the class constructor
abs_map = meta.get('map_to', map_to)
# Set our token as how it was provided by the configuration
if isinstance(tokens.get(map_to), list):
tokens[abs_map].append(value)
else:
tokens[abs_map] = value
# Return our tokens
return tokens
def __getitem__(self, index):
"""
Returns the indexed server entry associated with the loaded

View file

@ -0,0 +1,3 @@
from .. import URLBase
class ConfigBase(URLBase): ...

View file

@ -28,7 +28,7 @@ import io
import os
from .ConfigBase import ConfigBase
from ..common import ConfigFormat
from ..common import ConfigIncludeMode
from ..common import ContentIncludeMode
from ..AppriseLocale import gettext_lazy as _
@ -44,7 +44,7 @@ class ConfigFile(ConfigBase):
protocol = 'file'
# Configuration file inclusion can only be of the same type
allow_cross_includes = ConfigIncludeMode.STRICT
allow_cross_includes = ContentIncludeMode.STRICT
def __init__(self, path, **kwargs):
"""

View file

@ -28,7 +28,7 @@ import six
import requests
from .ConfigBase import ConfigBase
from ..common import ConfigFormat
from ..common import ConfigIncludeMode
from ..common import ContentIncludeMode
from ..URLBase import PrivacyMode
from ..AppriseLocale import gettext_lazy as _
@ -66,7 +66,7 @@ class ConfigHTTP(ConfigBase):
max_error_buffer_size = 2048
# Configuration file inclusion can always include this type
allow_cross_includes = ConfigIncludeMode.ALWAYS
allow_cross_includes = ContentIncludeMode.ALWAYS
def __init__(self, headers=None, **kwargs):
"""

View file

@ -1,21 +1,27 @@
# Translations template for apprise.
# Copyright (C) 2020 Chris Caron
# Copyright (C) 2021 Chris Caron
# This file is distributed under the same license as the apprise project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2020.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2021.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: apprise 0.8.8\n"
"Project-Id-Version: apprise 0.9.6\n"
"Report-Msgid-Bugs-To: lead2gold@gmail.com\n"
"POT-Creation-Date: 2020-09-02 07:46-0400\n"
"POT-Creation-Date: 2021-12-01 18:56-0500\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel 2.7.0\n"
"Generated-By: Babel 2.9.1\n"
msgid "A local Gnome environment is required."
msgstr ""
msgid "A local Microsoft Windows environment is required."
msgstr ""
msgid "API Key"
msgstr ""
@ -44,6 +50,27 @@ msgstr ""
msgid "Add Tokens"
msgstr ""
msgid "Alert Type"
msgstr ""
msgid "Alias"
msgstr ""
msgid "Amount"
msgstr ""
msgid "App Access Token"
msgstr ""
msgid "App ID"
msgstr ""
msgid "App Version"
msgstr ""
msgid "Application ID"
msgstr ""
msgid "Application Key"
msgstr ""
@ -83,6 +110,9 @@ msgstr ""
msgid "Cache Results"
msgstr ""
msgid "Call"
msgstr ""
msgid "Carbon Copy"
msgstr ""
@ -104,15 +134,27 @@ msgstr ""
msgid "Country"
msgstr ""
msgid "Currency"
msgstr ""
msgid "Custom Icon"
msgstr ""
msgid "Cycles"
msgstr ""
msgid "DBus Notification"
msgstr ""
msgid "Details"
msgstr ""
msgid "Detect Bot Owner"
msgstr ""
msgid "Device"
msgstr ""
msgid "Device API Key"
msgstr ""
@ -134,12 +176,18 @@ msgstr ""
msgid "Email"
msgstr ""
msgid "Email Header"
msgstr ""
msgid "Encrypted Password"
msgstr ""
msgid "Encrypted Salt"
msgstr ""
msgid "Entity"
msgstr ""
msgid "Event"
msgstr ""
@ -152,6 +200,12 @@ msgstr ""
msgid "Facility"
msgstr ""
msgid "Flair ID"
msgstr ""
msgid "Flair Text"
msgstr ""
msgid "Footer Logo"
msgstr ""
@ -170,6 +224,9 @@ msgstr ""
msgid "From Phone No"
msgstr ""
msgid "Gnome Notification"
msgstr ""
msgid "Group"
msgstr ""
@ -185,12 +242,33 @@ msgstr ""
msgid "Icon Type"
msgstr ""
msgid "Identifier"
msgstr ""
msgid "Image Link"
msgstr ""
msgid "Include Footer"
msgstr ""
msgid "Include Image"
msgstr ""
msgid "Include Segment"
msgstr ""
msgid "Is Ad?"
msgstr ""
msgid "Is Spoiler"
msgstr ""
msgid "Kind"
msgstr ""
msgid "Language"
msgstr ""
msgid "Local File"
msgstr ""
@ -200,6 +278,15 @@ msgstr ""
msgid "Log to STDERR"
msgstr ""
msgid "Long-Lived Access Token"
msgstr ""
msgid "MacOSX Notification"
msgstr ""
msgid "Master Key"
msgstr ""
msgid "Memory"
msgstr ""
@ -209,18 +296,41 @@ msgstr ""
msgid "Message Mode"
msgstr ""
msgid "Message Type"
msgstr ""
msgid "Modal"
msgstr ""
msgid "Mode"
msgstr ""
msgid "NSFW"
msgstr ""
msgid "Name"
msgstr ""
msgid "No dependencies."
msgstr ""
msgid "Notification ID"
msgstr ""
msgid "Notify Format"
msgstr ""
msgid "OAuth Access Token"
msgstr ""
msgid "OAuth2 KeyFile"
msgstr ""
msgid ""
"Only works with Mac OS X 10.8 and higher. Additionally requires that "
"/usr/local/bin/terminal-notifier is locally accessible."
msgstr ""
msgid "Organization"
msgstr ""
@ -230,6 +340,12 @@ msgstr ""
msgid "Overflow Mode"
msgstr ""
msgid "Packages are recommended to improve functionality."
msgstr ""
msgid "Packages are required to function."
msgstr ""
msgid "Password"
msgstr ""
@ -254,6 +370,9 @@ msgstr ""
msgid "Provider Key"
msgstr ""
msgid "QOS"
msgstr ""
msgid "Region"
msgstr ""
@ -263,6 +382,9 @@ msgstr ""
msgid "Remove Tokens"
msgstr ""
msgid "Resubmit Flag"
msgstr ""
msgid "Retry"
msgstr ""
@ -287,6 +409,9 @@ msgstr ""
msgid "Secure Mode"
msgstr ""
msgid "Send Replies"
msgstr ""
msgid "Sender ID"
msgstr ""
@ -296,6 +421,9 @@ msgstr ""
msgid "Server Timeout"
msgstr ""
msgid "Silent Notification"
msgstr ""
msgid "Socket Connect Timeout"
msgstr ""
@ -305,6 +433,9 @@ msgstr ""
msgid "Sound"
msgstr ""
msgid "Sound Link"
msgstr ""
msgid "Source Email"
msgstr ""
@ -314,12 +445,21 @@ msgstr ""
msgid "Source Phone No"
msgstr ""
msgid "Special Text Color"
msgstr ""
msgid "Sticky"
msgstr ""
msgid "Subtitle"
msgstr ""
msgid "Syslog Mode"
msgstr ""
msgid "Tags"
msgstr ""
msgid "Target Channel"
msgstr ""
@ -344,24 +484,45 @@ msgstr ""
msgid "Target Encoded ID"
msgstr ""
msgid "Target Escalation"
msgstr ""
msgid "Target JID"
msgstr ""
msgid "Target Phone No"
msgstr ""
msgid "Target Player ID"
msgstr ""
msgid "Target Queue"
msgstr ""
msgid "Target Room Alias"
msgstr ""
msgid "Target Room ID"
msgstr ""
msgid "Target Schedule"
msgstr ""
msgid "Target Short Code"
msgstr ""
msgid "Target Stream"
msgstr ""
msgid "Target Subreddit"
msgstr ""
msgid "Target Tag ID"
msgstr ""
msgid "Target Team"
msgstr ""
msgid "Target Topic"
msgstr ""
@ -371,12 +532,24 @@ msgstr ""
msgid "Targets"
msgstr ""
msgid "Targets "
msgstr ""
msgid "Team Name"
msgstr ""
msgid "Template"
msgstr ""
msgid "Template Data"
msgstr ""
msgid "Template Path"
msgstr ""
msgid "Template Tokens"
msgstr ""
msgid "Tenant Domain"
msgstr ""
@ -404,12 +577,27 @@ msgstr ""
msgid "Token C"
msgstr ""
msgid "URL"
msgstr ""
msgid "URL Title"
msgstr ""
msgid "Urgency"
msgstr ""
msgid "Use Avatar"
msgstr ""
msgid "Use Blocks"
msgstr ""
msgid "Use Fields"
msgstr ""
msgid "Use Session"
msgstr ""
msgid "User ID"
msgstr ""
@ -434,18 +622,27 @@ msgstr ""
msgid "Web Based"
msgstr ""
msgid "Web Page Preview"
msgstr ""
msgid "Webhook"
msgstr ""
msgid "Webhook ID"
msgstr ""
msgid "Webhook Key"
msgstr ""
msgid "Webhook Mode"
msgstr ""
msgid "Webhook Token"
msgstr ""
msgid "Workspace"
msgstr ""
msgid "X-Axis"
msgstr ""
@ -455,6 +652,9 @@ msgstr ""
msgid "Y-Axis"
msgstr ""
msgid "libdbus-1.so.x must be installed."
msgstr ""
msgid "ttl"
msgstr ""

View file

@ -23,7 +23,12 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
import logging
from io import StringIO
# The root identifier needed to monitor 'apprise' logging
LOGGER_NAME = 'apprise'
# Define a verbosity level that is a noisier then debug mode
logging.TRACE = logging.DEBUG - 1
@ -57,5 +62,136 @@ def deprecate(self, message, *args, **kwargs):
logging.Logger.trace = trace
logging.Logger.deprecate = deprecate
# Create ourselve a generic logging reference
logger = logging.getLogger('apprise')
# Create ourselve a generic (singleton) logging reference
logger = logging.getLogger(LOGGER_NAME)
class LogCapture(object):
"""
A class used to allow one to instantiate loggers that write to
memory for temporary purposes. e.g.:
1. with LogCapture() as captured:
2.
3. # Send our notification(s)
4. aobj.notify("hello world")
5.
6. # retrieve our logs produced by the above call via our
7. # `captured` StringIO object we have access to within the `with`
8. # block here:
9. print(captured.getvalue())
"""
def __init__(self, path=None, level=None, name=LOGGER_NAME, delete=True,
fmt='%(asctime)s - %(levelname)s - %(message)s'):
"""
Instantiate a temporary log capture object
If a path is specified, then log content is sent to that file instead
of a StringIO object.
You can optionally specify a logging level such as logging.INFO if you
wish, otherwise by default the script uses whatever logging has been
set globally. If you set delete to `False` then when using log files,
they are not automatically cleaned up afterwards.
Optionally over-ride the fmt as well if you wish.
"""
# Our memory buffer placeholder
self.__buffer_ptr = StringIO()
# Store our file path as it will determine whether or not we write to
# memory and a file
self.__path = path
self.__delete = delete
# Our logging level tracking
self.__level = level
self.__restore_level = None
# Acquire a pointer to our logger
self.__logger = logging.getLogger(name)
# Prepare our handler
self.__handler = logging.StreamHandler(self.__buffer_ptr) \
if not self.__path else logging.FileHandler(
self.__path, mode='a', encoding='utf-8')
# Use the specified level, otherwise take on the already
# effective level of our logger
self.__handler.setLevel(
self.__level if self.__level is not None
else self.__logger.getEffectiveLevel())
# Prepare our formatter
self.__handler.setFormatter(logging.Formatter(fmt))
def __enter__(self):
"""
Allows logger manipulation within a 'with' block
"""
if self.__level is not None:
# Temporary adjust our log level if required
self.__restore_level = self.__logger.getEffectiveLevel()
if self.__restore_level > self.__level:
# Bump our log level up for the duration of our `with`
self.__logger.setLevel(self.__level)
else:
# No restoration required
self.__restore_level = None
else:
# Do nothing but enforce that we have nothing to restore to
self.__restore_level = None
if self.__path:
# If a path has been identified, ensure we can write to the path
# and that the file exists
with open(self.__path, 'a'):
os.utime(self.__path, None)
# Update our buffer pointer
self.__buffer_ptr = open(self.__path, 'r')
# Add our handler
self.__logger.addHandler(self.__handler)
# return our memory pointer
return self.__buffer_ptr
def __exit__(self, exc_type, exc_value, tb):
"""
removes the handler gracefully when the with block has completed
"""
# Flush our content
self.__handler.flush()
self.__buffer_ptr.flush()
# Drop our handler
self.__logger.removeHandler(self.__handler)
if self.__restore_level is not None:
# Restore level
self.__logger.setLevel(self.__restore_level)
if self.__path:
# Close our file pointer
self.__buffer_ptr.close()
if self.__delete:
try:
# Always remove file afterwards
os.unlink(self.__path)
except OSError:
# It's okay if the file does not exist
pass
if exc_type is not None:
# pass exception on if one was generated
return False
return True

View file

@ -0,0 +1,382 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2021 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import re
import six
import requests
from json import dumps
from .NotifyBase import NotifyBase
from ..URLBase import PrivacyMode
from ..common import NotifyType
from ..utils import parse_list
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
class NotifyAppriseAPI(NotifyBase):
"""
A wrapper for Apprise (Persistent) API Notifications
"""
# The default descriptive name associated with the Notification
service_name = 'Apprise API'
# The services URL
service_url = 'https://github.com/caronc/apprise-api'
# The default protocol
protocol = 'apprise'
# The default secure protocol
secure_protocol = 'apprises'
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_apprise_api'
# Depending on the number of transactions/notifications taking place, this
# could take a while. 30 seconds should be enough to perform the task
socket_connect_timeout = 30.0
# Disable throttle rate for Apprise API requests since they are normally
# local anyway
request_rate_per_sec = 0.0
# Define object templates
templates = (
'{schema}://{host}/{token}',
'{schema}://{host}:{port}/{token}',
'{schema}://{user}@{host}/{token}',
'{schema}://{user}@{host}:{port}/{token}',
'{schema}://{user}:{password}@{host}/{token}',
'{schema}://{user}:{password}@{host}:{port}/{token}',
)
# Define our tokens; these are the minimum tokens required required to
# be passed into this function (as arguments). The syntax appends any
# previously defined in the base package and builds onto them
template_tokens = dict(NotifyBase.template_tokens, **{
'host': {
'name': _('Hostname'),
'type': 'string',
'required': True,
},
'port': {
'name': _('Port'),
'type': 'int',
'min': 1,
'max': 65535,
},
'user': {
'name': _('Username'),
'type': 'string',
},
'password': {
'name': _('Password'),
'type': 'string',
'private': True,
},
'token': {
'name': _('Token'),
'type': 'string',
'required': True,
'private': True,
'regex': (r'^[A-Z0-9_-]{1,32}$', 'i'),
},
})
# Define our template arguments
template_args = dict(NotifyBase.template_args, **{
'tags': {
'name': _('Tags'),
'type': 'string',
},
'to': {
'alias_of': 'token',
},
})
# Define any kwargs we're using
template_kwargs = {
'headers': {
'name': _('HTTP Header'),
'prefix': '+',
},
}
def __init__(self, token=None, tags=None, headers=None, **kwargs):
"""
Initialize Apprise API Object
headers can be a dictionary of key/value pairs that you want to
additionally include as part of the server headers to post with
"""
super(NotifyAppriseAPI, self).__init__(**kwargs)
self.fullpath = kwargs.get('fullpath')
if not isinstance(self.fullpath, six.string_types):
self.fullpath = '/'
self.token = validate_regex(
token, *self.template_tokens['token']['regex'])
if not self.token:
msg = 'The Apprise API token specified ({}) is invalid.'\
.format(token)
self.logger.warning(msg)
raise TypeError(msg)
# Build list of tags
self.__tags = parse_list(tags)
self.headers = {}
if headers:
# Store our extra headers
self.headers.update(headers)
return
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
"""
# Our URL parameters
params = self.url_parameters(privacy=privacy, *args, **kwargs)
# Append our headers into our parameters
params.update({'+{}'.format(k): v for k, v in self.headers.items()})
if self.__tags:
params['tags'] = ','.join([x for x in self.__tags])
# Determine Authentication
auth = ''
if self.user and self.password:
auth = '{user}:{password}@'.format(
user=NotifyAppriseAPI.quote(self.user, safe=''),
password=self.pprint(
self.password, privacy, mode=PrivacyMode.Secret, safe=''),
)
elif self.user:
auth = '{user}@'.format(
user=NotifyAppriseAPI.quote(self.user, safe=''),
)
default_port = 443 if self.secure else 80
fullpath = self.fullpath.strip('/')
return '{schema}://{auth}{hostname}{port}{fullpath}{token}' \
'/?{params}'.format(
schema=self.secure_protocol
if self.secure else self.protocol,
auth=auth,
# never encode hostname since we're expecting it to be a
# valid one
hostname=self.host,
port='' if self.port is None or self.port == default_port
else ':{}'.format(self.port),
fullpath='/{}/'.format(NotifyAppriseAPI.quote(
fullpath, safe='/')) if fullpath else '/',
token=self.pprint(self.token, privacy, safe=''),
params=NotifyAppriseAPI.urlencode(params))
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""
Perform Apprise API Notification
"""
headers = {}
# Apply any/all header over-rides defined
headers.update(self.headers)
# prepare Apprise API Object
payload = {
# Apprise API Payload
'title': title,
'body': body,
'type': notify_type,
'format': self.notify_format,
}
if self.__tags:
payload['tag'] = self.__tags
auth = None
if self.user:
auth = (self.user, self.password)
# Set our schema
schema = 'https' if self.secure else 'http'
url = '%s://%s' % (schema, self.host)
if isinstance(self.port, int):
url += ':%d' % self.port
fullpath = self.fullpath.strip('/')
url += '/{}/'.format(fullpath) if fullpath else '/'
url += 'notify/{}'.format(self.token)
# Some entries can not be over-ridden
headers.update({
'User-Agent': self.app_id,
'Content-Type': 'application/json',
# Pass our Source UUID4 Identifier
'X-Apprise-ID': self.asset._uid,
# Pass our current recursion count to our upstream server
'X-Apprise-Recursion-Count': str(self.asset._recursion + 1),
})
self.logger.debug('Apprise API POST URL: %s (cert_verify=%r)' % (
url, self.verify_certificate,
))
self.logger.debug('Apprise API Payload: %s' % str(payload))
# Always call throttle before any remote server i/o is made
self.throttle()
try:
r = requests.post(
url,
data=dumps(payload),
headers=headers,
auth=auth,
verify=self.verify_certificate,
timeout=self.request_timeout,
)
if r.status_code != requests.codes.ok:
# We had a problem
status_str = \
NotifyAppriseAPI.http_response_code_lookup(r.status_code)
self.logger.warning(
'Failed to send Apprise API notification: '
'{}{}error={}.'.format(
status_str,
', ' if status_str else '',
r.status_code))
self.logger.debug('Response Details:\r\n{}'.format(r.content))
# Return; we're done
return False
else:
self.logger.info('Sent Apprise API notification.')
except requests.RequestException as e:
self.logger.warning(
'A Connection error occurred sending Apprise API '
'notification to %s.' % self.host)
self.logger.debug('Socket Exception: %s' % str(e))
# Return; we're done
return False
return True
@staticmethod
def parse_native_url(url):
"""
Support http://hostname/notify/token and
http://hostname/path/notify/token
"""
result = re.match(
r'^http(?P<secure>s?)://(?P<hostname>[A-Z0-9._-]+)'
r'(:(?P<port>[0-9]+))?'
r'(?P<path>/[^?]+?)?/notify/(?P<token>[A-Z0-9_-]{1,32})/?'
r'(?P<params>\?.+)?$', url, re.I)
if result:
return NotifyAppriseAPI.parse_url(
'{schema}://{hostname}{port}{path}/{token}/{params}'.format(
schema=NotifyAppriseAPI.secure_protocol
if result.group('secure') else NotifyAppriseAPI.protocol,
hostname=result.group('hostname'),
port='' if not result.group('port')
else ':{}'.format(result.group('port')),
path='' if not result.group('path')
else result.group('path'),
token=result.group('token'),
params='' if not result.group('params')
else '?{}'.format(result.group('params'))))
return None
@staticmethod
def parse_url(url):
"""
Parses the URL and returns enough arguments that can allow
us to re-instantiate this object.
"""
results = NotifyBase.parse_url(url)
if not results:
# We're done early as we couldn't load the results
return results
# Add our headers that the user can potentially over-ride if they wish
# to to our returned result set
results['headers'] = results['qsd+']
if results['qsd-']:
results['headers'].update(results['qsd-'])
NotifyBase.logger.deprecate(
"minus (-) based Apprise API header tokens are being "
" removed; use the plus (+) symbol instead.")
# Tidy our header entries by unquoting them
results['headers'] = \
{NotifyAppriseAPI.unquote(x): NotifyAppriseAPI.unquote(y)
for x, y in results['headers'].items()}
# Support the passing of tags in the URL
if 'tags' in results['qsd'] and len(results['qsd']['tags']):
results['tags'] = \
NotifyAppriseAPI.parse_list(results['qsd']['tags'])
# Support the 'to' & 'token' variable so that we can support rooms
# this way too.
if 'token' in results['qsd'] and len(results['qsd']['token']):
results['token'] = \
NotifyAppriseAPI.unquote(results['qsd']['token'])
elif 'to' in results['qsd'] and len(results['qsd']['to']):
results['token'] = NotifyAppriseAPI.unquote(results['qsd']['to'])
else:
# Start with a list of path entries to work with
entries = NotifyAppriseAPI.split_path(results['fullpath'])
if entries:
# use our last entry found
results['token'] = entries[-1]
# pop our last entry off
entries = entries[:-1]
# re-assemble our full path
results['fullpath'] = '/'.join(entries)
return results

View file

@ -52,6 +52,54 @@ class NotifyBase(BASE_OBJECT):
This is the base class for all notification services
"""
# An internal flag used to test the state of the plugin. If set to
# False, then the plugin is not used. Plugins can disable themselves
# due to enviroment issues (such as missing libraries, or platform
# dependencies that are not present). By default all plugins are
# enabled.
enabled = True
# Some plugins may require additional packages above what is provided
# already by Apprise.
#
# Use this section to relay this information to the users of the script to
# help guide them with what they need to know if they plan on using your
# plugin. The below configuration should otherwise accomodate all normal
# situations and will not requrie any updating:
requirements = {
# Use the description to provide a human interpretable description of
# what is required to make the plugin work. This is only nessisary
# if there are package dependencies. Setting this to default will
# cause a general response to be returned. Only set this if you plan
# on over-riding the default. Always consider language support here.
# So before providing a value do the following in your code base:
#
# from apprise.AppriseLocale import gettext_lazy as _
#
# 'details': _('My detailed requirements')
'details': None,
# Define any required packages needed for the plugin to run. This is
# an array of strings that simply look like lines residing in a
# `requirements.txt` file...
#
# As an example, an entry may look like:
# 'packages_required': [
# 'cryptography < 3.4`,
# ]
'packages_required': [],
# Recommended packages identify packages that are not required to make
# your plugin work, but would improve it's use or grant it access to
# full functionality (that might otherwise be limited).
# Similar to `packages_required`, you would identify each entry in
# the array as you would in a `requirements.txt` file.
#
# - Do not re-provide entries already in the `packages_required`
'packages_recommended': [],
}
# The services URL
service_url = None
@ -153,7 +201,8 @@ class NotifyBase(BASE_OBJECT):
# Provide override
self.overflow_mode = overflow
def image_url(self, notify_type, logo=False, extension=None):
def image_url(self, notify_type, logo=False, extension=None,
image_size=None):
"""
Returns Image URL if possible
"""
@ -166,7 +215,7 @@ class NotifyBase(BASE_OBJECT):
return self.asset.image_url(
notify_type=notify_type,
image_size=self.image_size,
image_size=self.image_size if image_size is None else image_size,
logo=logo,
extension=extension,
)
@ -222,6 +271,13 @@ class NotifyBase(BASE_OBJECT):
"""
if not self.enabled:
# Deny notifications issued to services that are disabled
self.logger.warning(
"{} is currently disabled on this system.".format(
self.service_name))
return False
# Prepare attachments if required
if attach is not None and not isinstance(attach, AppriseAttachment):
try:

View file

@ -0,0 +1 @@
class NotifyBase: ...

View file

@ -36,7 +36,6 @@
# The API reference used to build this plugin was documented here:
# https://developers.clicksend.com/docs/rest/v3/
#
import re
import requests
from json import dumps
from base64 import b64encode
@ -44,7 +43,8 @@ from base64 import b64encode
from .NotifyBase import NotifyBase
from ..URLBase import PrivacyMode
from ..common import NotifyType
from ..utils import parse_list
from ..utils import is_phone_no
from ..utils import parse_phone_no
from ..utils import parse_bool
from ..AppriseLocale import gettext_lazy as _
@ -53,12 +53,6 @@ CLICKSEND_HTTP_ERROR_MAP = {
401: 'Unauthorized - Invalid Token.',
}
# Some Phone Number Detection
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
# Used to break path apart into list of channels
TARGET_LIST_DELIM = re.compile(r'[ \t\r\n,#\\/]+')
class NotifyClickSend(NotifyBase):
"""
@ -151,26 +145,18 @@ class NotifyClickSend(NotifyBase):
self.logger.warning(msg)
raise TypeError(msg)
for target in parse_list(targets):
for target in parse_phone_no(targets):
# Validate targets and drop bad ones:
result = IS_PHONE_NO.match(target)
if result:
# Further check our phone # for it's digit count
result = ''.join(re.findall(r'\d+', result.group('phone')))
if len(result) < 11 or len(result) > 14:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
# store valid phone number
self.targets.append(result)
result = is_phone_no(target)
if not result:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target))
# store valid phone number
self.targets.append(result['full'])
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""
@ -321,8 +307,7 @@ class NotifyClickSend(NotifyBase):
# Support the 'to' variable so that we can support rooms this way too
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'] += [x for x in filter(
bool, TARGET_LIST_DELIM.split(
NotifyClickSend.unquote(results['qsd']['to'])))]
results['targets'] += \
NotifyClickSend.parse_phone_no(results['qsd']['to'])
return results

View file

@ -30,7 +30,6 @@
# (both user and password) from the API Details section from within your
# account profile area: https://d7networks.com/accounts/profile/
import re
import six
import requests
import base64
@ -40,7 +39,8 @@ from json import loads
from .NotifyBase import NotifyBase
from ..URLBase import PrivacyMode
from ..common import NotifyType
from ..utils import parse_list
from ..utils import is_phone_no
from ..utils import parse_phone_no
from ..utils import parse_bool
from ..AppriseLocale import gettext_lazy as _
@ -52,9 +52,6 @@ D7NETWORKS_HTTP_ERROR_MAP = {
500: 'A Serverside Error Occured Handling the Request.',
}
# Some Phone Number Detection
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
# Priorities
class D7SMSPriority(object):
@ -197,36 +194,26 @@ class NotifyD7Networks(NotifyBase):
self.source = None \
if not isinstance(source, six.string_types) else source.strip()
# Parse our targets
self.targets = list()
for target in parse_list(targets):
# Validate targets and drop bad ones:
result = IS_PHONE_NO.match(target)
if result:
# Further check our phone # for it's digit count
# if it's less than 10, then we can assume it's
# a poorly specified phone no and spit a warning
result = ''.join(re.findall(r'\d+', result.group('phone')))
if len(result) < 11 or len(result) > 14:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
# store valid phone number
self.targets.append(result)
continue
self.logger.warning(
'Dropped invalid phone # ({}) specified.'.format(target))
if len(self.targets) == 0:
msg = 'There are no valid targets identified to notify.'
if not (self.user and self.password):
msg = 'A D7 Networks user/pass was not provided.'
self.logger.warning(msg)
raise TypeError(msg)
# Parse our targets
self.targets = list()
for target in parse_phone_no(targets):
# Validate targets and drop bad ones:
result = result = is_phone_no(target)
if not result:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
# store valid phone number
self.targets.append(result['full'])
return
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
@ -235,6 +222,11 @@ class NotifyD7Networks(NotifyBase):
redirects to the appropriate handling
"""
if len(self.targets) == 0:
# There were no services to notify
self.logger.warning('There were no D7 Networks targets to notify.')
return False
# error tracking (used for function return)
has_error = False
@ -479,6 +471,6 @@ class NotifyD7Networks(NotifyBase):
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'] += \
NotifyD7Networks.parse_list(results['qsd']['to'])
NotifyD7Networks.parse_phone_no(results['qsd']['to'])
return results

View file

@ -38,10 +38,6 @@ NOTIFY_DBUS_SUPPORT_ENABLED = False
# Image support is dependant on the GdkPixbuf library being available
NOTIFY_DBUS_IMAGE_SUPPORT = False
# The following are required to hook into the notifications:
NOTIFY_DBUS_INTERFACE = 'org.freedesktop.Notifications'
NOTIFY_DBUS_SETTING_LOCATION = '/org/freedesktop/Notifications'
# Initialize our mainloops
LOOP_GLIB = None
LOOP_QT = None
@ -132,8 +128,19 @@ class NotifyDBus(NotifyBase):
A wrapper for local DBus/Qt Notifications
"""
# Set our global enabled flag
enabled = NOTIFY_DBUS_SUPPORT_ENABLED
requirements = {
# Define our required packaging in order to work
'details': _('libdbus-1.so.x must be installed.')
}
# The default descriptive name associated with the Notification
service_name = 'DBus Notification'
service_name = _('DBus Notification')
# The services URL
service_url = 'http://www.freedesktop.org/Software/dbus/'
# The default protocols
# Python 3 keys() does not return a list object, it's it's own dict_keys()
@ -158,14 +165,9 @@ class NotifyDBus(NotifyBase):
# content to display
body_max_line_count = 10
# This entry is a bit hacky, but it allows us to unit-test this library
# in an environment that simply doesn't have the gnome packages
# available to us. It also allows us to handle situations where the
# packages actually are present but we need to test that they aren't.
# If anyone is seeing this had knows a better way of testing this
# outside of what is defined in test/test_glib_plugin.py, please
# let me know! :)
_enabled = NOTIFY_DBUS_SUPPORT_ENABLED
# The following are required to hook into the notifications:
dbus_interface = 'org.freedesktop.Notifications'
dbus_setting_location = '/org/freedesktop/Notifications'
# Define object templates
templates = (
@ -241,12 +243,6 @@ class NotifyDBus(NotifyBase):
"""
Perform DBus Notification
"""
if not self._enabled or MAINLOOP_MAP[self.schema] is None:
self.logger.warning(
"{} notifications could not be loaded.".format(self.schema))
return False
# Acquire our session
try:
session = SessionBus(mainloop=MAINLOOP_MAP[self.schema])
@ -265,14 +261,14 @@ class NotifyDBus(NotifyBase):
# acquire our dbus object
dbus_obj = session.get_object(
NOTIFY_DBUS_INTERFACE,
NOTIFY_DBUS_SETTING_LOCATION,
self.dbus_interface,
self.dbus_setting_location,
)
# Acquire our dbus interface
dbus_iface = Interface(
dbus_obj,
dbus_interface=NOTIFY_DBUS_INTERFACE,
dbus_interface=self.dbus_interface,
)
# image path

View file

@ -0,0 +1,343 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import re
import time
import hmac
import hashlib
import base64
import requests
from json import dumps
from .NotifyBase import NotifyBase
from ..URLBase import PrivacyMode
from ..common import NotifyFormat
from ..common import NotifyType
from ..utils import parse_list
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
# Register at https://dingtalk.com
# - Download their PC based software as it is the only way you can create
# a custom robot. You can create a custom robot per group. You will
# be provided an access_token that Apprise will need.
# Syntax:
# dingtalk://{access_token}/
# dingtalk://{access_token}/{optional_phone_no}
# dingtalk://{access_token}/{phone_no_1}/{phone_no_2}/{phone_no_N/
# Some Phone Number Detection
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
class NotifyDingTalk(NotifyBase):
"""
A wrapper for DingTalk Notifications
"""
# The default descriptive name associated with the Notification
service_name = 'DingTalk'
# The services URL
service_url = 'https://www.dingtalk.com/'
# All notification requests are secure
secure_protocol = 'dingtalk'
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_dingtalk'
# DingTalk API
notify_url = 'https://oapi.dingtalk.com/robot/send?access_token={token}'
# Do not set title_maxlen as it is set in a property value below
# since the length varies depending if we are doing a markdown
# based message or a text based one.
# title_maxlen = see below @propery defined
# Define object templates
templates = (
'{schema}://{token}/',
'{schema}://{token}/{targets}/',
'{schema}://{secret}@{token}/',
'{schema}://{secret}@{token}/{targets}/',
)
# Define our template tokens
template_tokens = dict(NotifyBase.template_tokens, **{
'token': {
'name': _('Token'),
'type': 'string',
'private': True,
'required': True,
'regex': (r'^[a-z0-9]+$', 'i'),
},
'secret': {
'name': _('Token'),
'type': 'string',
'private': True,
'regex': (r'^[a-z0-9]+$', 'i'),
},
'targets': {
'name': _('Target Phone No'),
'type': 'list:string',
},
})
# Define our template arguments
template_args = dict(NotifyBase.template_args, **{
'to': {
'alias_of': 'targets',
},
'token': {
'alias_of': 'token',
},
'secret': {
'alias_of': 'secret',
},
})
def __init__(self, token, targets=None, secret=None, **kwargs):
"""
Initialize DingTalk Object
"""
super(NotifyDingTalk, self).__init__(**kwargs)
# Secret Key (associated with project)
self.token = validate_regex(
token, *self.template_tokens['token']['regex'])
if not self.token:
msg = 'An invalid DingTalk API Token ' \
'({}) was specified.'.format(token)
self.logger.warning(msg)
raise TypeError(msg)
self.secret = None
if secret:
self.secret = validate_regex(
secret, *self.template_tokens['secret']['regex'])
if not self.secret:
msg = 'An invalid DingTalk Secret ' \
'({}) was specified.'.format(token)
self.logger.warning(msg)
raise TypeError(msg)
# Parse our targets
self.targets = list()
for target in parse_list(targets):
# Validate targets and drop bad ones:
result = IS_PHONE_NO.match(target)
if result:
# Further check our phone # for it's digit count
result = ''.join(re.findall(r'\d+', result.group('phone')))
if len(result) < 11 or len(result) > 14:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
# store valid phone number
self.targets.append(result)
continue
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
return
def get_signature(self):
"""
Calculates time-based signature so that we can send arbitrary messages.
"""
timestamp = str(round(time.time() * 1000))
secret_enc = self.secret.encode('utf-8')
str_to_sign_enc = \
"{}\n{}".format(timestamp, self.secret).encode('utf-8')
hmac_code = hmac.new(
secret_enc, str_to_sign_enc, digestmod=hashlib.sha256).digest()
signature = NotifyDingTalk.quote(base64.b64encode(hmac_code), safe='')
return timestamp, signature
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""
Perform DingTalk Notification
"""
payload = {
'msgtype': 'text',
'at': {
'atMobiles': self.targets,
'isAtAll': False,
}
}
if self.notify_format == NotifyFormat.MARKDOWN:
payload['markdown'] = {
'title': title,
'text': body,
}
else:
payload['text'] = {
'content': body,
}
# Our Notification URL
notify_url = self.notify_url.format(token=self.token)
params = None
if self.secret:
timestamp, signature = self.get_signature()
params = {
'timestamp': timestamp,
'sign': signature,
}
# Prepare our headers
headers = {
'User-Agent': self.app_id,
'Content-Type': 'application/json'
}
# Some Debug Logging
self.logger.debug('DingTalk URL: {} (cert_verify={})'.format(
notify_url, self.verify_certificate))
self.logger.debug('DingTalk Payload: {}' .format(payload))
# Always call throttle before any remote server i/o is made
self.throttle()
try:
r = requests.post(
notify_url,
data=dumps(payload),
headers=headers,
params=params,
verify=self.verify_certificate,
)
if r.status_code != requests.codes.ok:
# We had a problem
status_str = \
NotifyDingTalk.http_response_code_lookup(
r.status_code)
self.logger.warning(
'Failed to send DingTalk notification: '
'{}{}error={}.'.format(
status_str,
', ' if status_str else '',
r.status_code))
self.logger.debug(
'Response Details:\r\n{}'.format(r.content))
return False
else:
self.logger.info('Sent DingTalk notification.')
except requests.RequestException as e:
self.logger.warning(
'A Connection error occured sending DingTalk '
'notification.'
)
self.logger.debug('Socket Exception: %s' % str(e))
return False
return True
@property
def title_maxlen(self):
"""
The title isn't used when not in markdown mode.
"""
return NotifyBase.title_maxlen \
if self.notify_format == NotifyFormat.MARKDOWN else 0
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
"""
# Define any arguments set
args = {
'format': self.notify_format,
'overflow': self.overflow_mode,
'verify': 'yes' if self.verify_certificate else 'no',
}
return '{schema}://{secret}{token}/{targets}/?{args}'.format(
schema=self.secure_protocol,
secret='' if not self.secret else '{}@'.format(self.pprint(
self.secret, privacy, mode=PrivacyMode.Secret, safe='')),
token=self.pprint(self.token, privacy, safe=''),
targets='/'.join(
[NotifyDingTalk.quote(x, safe='') for x in self.targets]),
args=NotifyDingTalk.urlencode(args))
@staticmethod
def parse_url(url):
"""
Parses the URL and returns enough arguments that can allow
us to substantiate this object.
"""
results = NotifyBase.parse_url(url, verify_host=False)
if not results:
# We're done early as we couldn't load the results
return results
results['token'] = NotifyDingTalk.unquote(results['host'])
# if a user has been defined, use it's value as the secret
if results.get('user'):
results['secret'] = results.get('user')
# Get our entries; split_path() looks after unquoting content for us
# by default
results['targets'] = NotifyDingTalk.split_path(results['fullpath'])
# Support the use of the `token` keyword argument
if 'token' in results['qsd'] and len(results['qsd']['token']):
results['token'] = \
NotifyDingTalk.unquote(results['qsd']['token'])
# Support the use of the `secret` keyword argument
if 'secret' in results['qsd'] and len(results['qsd']['secret']):
results['secret'] = \
NotifyDingTalk.unquote(results['qsd']['secret'])
# Support the 'to' variable so that we can support targets this way too
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'] += \
NotifyDingTalk.parse_list(results['qsd']['to'])
return results

View file

@ -80,6 +80,11 @@ class NotifyDiscord(NotifyBase):
# The maximum allowable characters allowed in the body per message
body_maxlen = 2000
# Discord has a limit of the number of fields you can include in an
# embeds message. This value allows the discord message to safely
# break into multiple messages to handle these cases.
discord_max_fields = 10
# Define object templates
templates = (
'{schema}://{webhook_id}/{webhook_token}',
@ -133,6 +138,11 @@ class NotifyDiscord(NotifyBase):
'type': 'bool',
'default': True,
},
'fields': {
'name': _('Use Fields'),
'type': 'bool',
'default': True,
},
'image': {
'name': _('Include Image'),
'type': 'bool',
@ -143,7 +153,7 @@ class NotifyDiscord(NotifyBase):
def __init__(self, webhook_id, webhook_token, tts=False, avatar=True,
footer=False, footer_logo=True, include_image=False,
avatar_url=None, **kwargs):
fields=True, avatar_url=None, **kwargs):
"""
Initialize Discord Object
@ -181,6 +191,9 @@ class NotifyDiscord(NotifyBase):
# Place a thumbnail image inline with the message body
self.include_image = include_image
# Use Fields
self.fields = fields
# Avatar URL
# This allows a user to provide an over-ride to the otherwise
# dynamically generated avatar url images
@ -206,32 +219,23 @@ class NotifyDiscord(NotifyBase):
# Acquire image_url
image_url = self.image_url(notify_type)
# our fields variable
fields = []
if self.notify_format == NotifyFormat.MARKDOWN:
# Use embeds for payload
payload['embeds'] = [{
'provider': {
'author': {
'name': self.app_id,
'url': self.app_url,
},
'title': title,
'type': 'rich',
'description': body,
# Our color associated with our notification
'color': self.color(notify_type, int),
}]
# Break titles out so that we can sort them in embeds
fields = self.extract_markdown_sections(body)
if len(fields) > 0:
# Apply our additional parsing for a better presentation
# Swap first entry for description
payload['embeds'][0]['description'] = \
fields[0].get('name') + fields[0].get('value')
payload['embeds'][0]['fields'] = fields[1:]
if self.footer:
# Acquire logo URL
logo_url = self.image_url(notify_type, logo=True)
@ -251,6 +255,20 @@ class NotifyDiscord(NotifyBase):
'width': 256,
}
if self.fields:
# Break titles out so that we can sort them in embeds
description, fields = self.extract_markdown_sections(body)
# Swap first entry for description
payload['embeds'][0]['description'] = description
if fields:
# Apply our additional parsing for a better presentation
payload['embeds'][0]['fields'] = \
fields[:self.discord_max_fields]
# Remove entry from head of fields
fields = fields[self.discord_max_fields:]
else:
# not markdown
payload['content'] = \
@ -268,6 +286,16 @@ class NotifyDiscord(NotifyBase):
# We failed to post our message
return False
# Process any remaining fields IF set
if fields:
payload['embeds'][0]['description'] = ''
for i in range(0, len(fields), self.discord_max_fields):
payload['embeds'][0]['fields'] = \
fields[i:i + self.discord_max_fields]
if not self._send(payload):
# We failed to post our message
return False
if attach:
# Update our payload; the idea is to preserve it's other detected
# and assigned values for re-use here too
@ -413,8 +441,12 @@ class NotifyDiscord(NotifyBase):
'footer': 'yes' if self.footer else 'no',
'footer_logo': 'yes' if self.footer_logo else 'no',
'image': 'yes' if self.include_image else 'no',
'fields': 'yes' if self.fields else 'no',
}
if self.avatar_url:
params['avatar_url'] = self.avatar_url
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
@ -459,6 +491,11 @@ class NotifyDiscord(NotifyBase):
# Text To Speech
results['tts'] = parse_bool(results['qsd'].get('tts', False))
# Use sections
# effectively detect multiple fields and break them off
# into sections
results['fields'] = parse_bool(results['qsd'].get('fields', True))
# Use Footer
results['footer'] = parse_bool(results['qsd'].get('footer', False))
@ -513,6 +550,18 @@ class NotifyDiscord(NotifyBase):
fields that get passed as an embed entry to Discord.
"""
# Search for any header information found without it's own section
# identifier
match = re.match(
r'^\s*(?P<desc>[^\s#]+.*?)(?=\s*$|[\r\n]+\s*#)',
markdown, flags=re.S)
description = match.group('desc').strip() if match else ''
if description:
# Strip description from our string since it has been handled
# now.
markdown = re.sub(description, '', markdown, count=1)
regex = re.compile(
r'\s*#[# \t\v]*(?P<name>[^\n]+)(\n|\s*$)'
r'\s*((?P<value>[^#].+?)(?=\s*$|[\r\n]+\s*#))?', flags=re.S)
@ -523,9 +572,11 @@ class NotifyDiscord(NotifyBase):
d = el.groupdict()
fields.append({
'name': d.get('name', '').strip('# \r\n\t\v'),
'value': '```md\n' +
(d.get('value').strip() if d.get('value') else '') + '\n```'
'name': d.get('name', '').strip('#`* \r\n\t\v'),
'value': '```{}\n{}```'.format(
'md' if d.get('value') else '',
d.get('value').strip() + '\n' if d.get('value') else '',
),
})
return fields
return description, fields

View file

@ -106,6 +106,21 @@ EMAIL_TEMPLATES = (
},
),
# Yandex
(
'Yandex',
re.compile(
r'^((?P<label>[^+]+)\+)?(?P<id>[^@]+)@'
r'(?P<domain>yandex\.(com|ru|ua|by|kz|uz|tr|fr))$', re.I),
{
'port': 465,
'smtp_host': 'smtp.yandex.ru',
'secure': True,
'secure_mode': SecureMailMode.SSL,
'login_type': (WebBaseLogin.USERID, )
},
),
# Microsoft Hotmail
(
'Microsoft Hotmail',
@ -205,21 +220,22 @@ EMAIL_TEMPLATES = (
},
),
# Zoho Mail
# Zoho Mail (Free)
(
'Zoho Mail',
re.compile(
r'^((?P<label>[^+]+)\+)?(?P<id>[^@]+)@'
r'(?P<domain>zoho\.com)$', re.I),
r'(?P<domain>zoho(mail)?\.com)$', re.I),
{
'port': 465,
'port': 587,
'smtp_host': 'smtp.zoho.com',
'secure': True,
'secure_mode': SecureMailMode.SSL,
'secure_mode': SecureMailMode.STARTTLS,
'login_type': (WebBaseLogin.EMAIL, )
},
),
# SendGrid (Email Server)
# You must specify an authenticated sender address in the from= settings
# and a valid email in the to= to deliver your emails to
@ -285,7 +301,7 @@ class NotifyEmail(NotifyBase):
default_secure_mode = SecureMailMode.STARTTLS
# Default SMTP Timeout (in seconds)
connect_timeout = 15
socket_connect_timeout = 15
# Define object templates
templates = (
@ -347,10 +363,6 @@ class NotifyEmail(NotifyBase):
'type': 'string',
'map_to': 'from_name',
},
'smtp_host': {
'name': _('SMTP Server'),
'type': 'string',
},
'cc': {
'name': _('Carbon Copy'),
'type': 'list:string',
@ -359,6 +371,11 @@ class NotifyEmail(NotifyBase):
'name': _('Blind Carbon Copy'),
'type': 'list:string',
},
'smtp': {
'name': _('SMTP Server'),
'type': 'string',
'map_to': 'smtp_host',
},
'mode': {
'name': _('Secure Mode'),
'type': 'choice:string',
@ -366,17 +383,19 @@ class NotifyEmail(NotifyBase):
'default': SecureMailMode.STARTTLS,
'map_to': 'secure_mode',
},
'timeout': {
'name': _('Server Timeout'),
'type': 'int',
'default': 15,
'min': 5,
},
})
def __init__(self, timeout=15, smtp_host=None, from_name=None,
# Define any kwargs we're using
template_kwargs = {
'headers': {
'name': _('Email Header'),
'prefix': '+',
},
}
def __init__(self, smtp_host=None, from_name=None,
from_addr=None, secure_mode=None, targets=None, cc=None,
bcc=None, **kwargs):
bcc=None, headers=None, **kwargs):
"""
Initialize Email Object
@ -393,13 +412,6 @@ class NotifyEmail(NotifyBase):
else:
self.port = self.default_port
# Email SMTP Server Timeout
try:
self.timeout = int(timeout)
except (ValueError, TypeError):
self.timeout = self.connect_timeout
# Acquire Email 'To'
self.targets = list()
@ -412,6 +424,11 @@ class NotifyEmail(NotifyBase):
# For tracking our email -> name lookups
self.names = {}
self.headers = {}
if headers:
# Store our extra headers
self.headers.update(headers)
# Now we want to construct the To and From email
# addresses from the URL provided
self.from_addr = from_addr
@ -620,11 +637,11 @@ class NotifyEmail(NotifyBase):
except TypeError:
# Python v2.x Support (no charset keyword)
# Format our cc addresses to support the Name field
cc = [formataddr(
cc = [formataddr( # pragma: no branch
(self.names.get(addr, False), addr)) for addr in cc]
# Format our bcc addresses to support the Name field
bcc = [formataddr(
bcc = [formataddr( # pragma: no branch
(self.names.get(addr, False), addr)) for addr in bcc]
self.logger.debug(
@ -646,6 +663,11 @@ class NotifyEmail(NotifyBase):
content = MIMEText(body, 'plain', 'utf-8')
base = MIMEMultipart() if attach else content
# Apply any provided custom headers
for k, v in self.headers.items():
base[k] = Header(v, 'utf-8')
base['Subject'] = Header(title, 'utf-8')
try:
base['From'] = formataddr(
@ -714,7 +736,7 @@ class NotifyEmail(NotifyBase):
self.smtp_host,
self.port,
None,
timeout=self.timeout,
timeout=self.socket_connect_timeout,
)
if self.secure and self.secure_mode == SecureMailMode.STARTTLS:
@ -762,10 +784,12 @@ class NotifyEmail(NotifyBase):
'from': self.from_addr,
'mode': self.secure_mode,
'smtp': self.smtp_host,
'timeout': self.timeout,
'user': self.user,
}
# Append our headers into our parameters
params.update({'+{}'.format(k): v for k, v in self.headers.items()})
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
@ -864,8 +888,11 @@ class NotifyEmail(NotifyBase):
results['from_name'] = NotifyEmail.unquote(results['qsd']['name'])
if 'timeout' in results['qsd'] and len(results['qsd']['timeout']):
# Extract the timeout to associate with smtp server
results['timeout'] = results['qsd']['timeout']
# Deprecated in favor of cto= flag
NotifyBase.logger.deprecate(
"timeout= argument is deprecated; use cto= instead.")
results['qsd']['cto'] = results['qsd']['timeout']
del results['qsd']['timeout']
# Store SMTP Host if specified
if 'smtp' in results['qsd'] and len(results['qsd']['smtp']):
@ -887,4 +914,9 @@ class NotifyEmail(NotifyBase):
results['from_addr'] = from_addr
results['smtp_host'] = smtp_host
# Add our Meta Headers that the user can provide with their outbound
# emails
results['headers'] = {NotifyBase.unquote(x): NotifyBase.unquote(y)
for x, y in results['qsd+'].items()}
return results

View file

@ -697,3 +697,28 @@ class NotifyEmby(NotifyBase):
# ticket system as unresolved and has provided work-arounds
# - https://github.com/kennethreitz/requests/issues/3578
pass
except ImportError: # pragma: no cover
# The actual exception is `ModuleNotFoundError` however ImportError
# grants us backwards compatiblity with versions of Python older
# than v3.6
# Python code that makes early calls to sys.exit() can cause
# the __del__() code to run. However in some newer versions of
# Python, this causes the `sys` library to no longer be
# available. The stack overflow also goes on to suggest that
# it's not wise to use the __del__() as a deconstructor
# which is the case here.
# https://stackoverflow.com/questions/67218341/\
# modulenotfounderror-import-of-time-halted-none-in-sys-\
# modules-occured-when-obj?noredirect=1&lq=1
#
#
# Also see: https://stackoverflow.com/questions\
# /1481488/what-is-the-del-method-and-how-do-i-call-it
# At this time it seems clean to try to log out (if we can)
# but not throw any unessisary exceptions (like this one) to
# the end user if we don't have to.
pass

View file

@ -338,8 +338,12 @@ class NotifyEnigma2(NotifyBase):
# Add our headers that the user can potentially over-ride if they wish
# to to our returned result set
results['headers'] = results['qsd-']
results['headers'].update(results['qsd+'])
results['headers'] = results['qsd+']
if results['qsd-']:
results['headers'].update(results['qsd-'])
NotifyBase.logger.deprecate(
"minus (-) based Enigma header tokens are being "
" removed; use the plus (+) symbol instead.")
# Tidy our header entries by unquoting them
results['headers'] = {

View file

@ -0,0 +1,510 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2021 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# For this plugin to work correct, the FCM server must be set up to allow
# for remote connections.
# Firebase Cloud Messaging
# Visit your console page: https://console.firebase.google.com
# 1. Create a project if you haven't already. If you did the
# {project} ID will be listed as name-XXXXX.
# 2. Click on your project from here to open it up.
# 3. Access your Web API Key by clicking on:
# - The (gear-next-to-project-name) > Project Settings > Cloud Messaging
# Visit the following site to get you're Project information:
# - https://console.cloud.google.com/project/_/settings/general/
#
# Docs: https://firebase.google.com/docs/cloud-messaging/send-message
# Legacy Docs:
# https://firebase.google.com/docs/cloud-messaging/http-server-ref\
# #send-downstream
#
# If you Generate a new private key, it will provide a .json file
# You will need this in order to send an apprise messag
import six
import requests
from json import dumps
from ..NotifyBase import NotifyBase
from ...common import NotifyType
from ...utils import validate_regex
from ...utils import parse_list
from ...AppriseAttachment import AppriseAttachment
from ...AppriseLocale import gettext_lazy as _
# Default our global support flag
NOTIFY_FCM_SUPPORT_ENABLED = False
try:
from .oauth import GoogleOAuth
# We're good to go
NOTIFY_FCM_SUPPORT_ENABLED = True
except ImportError:
# cryptography is the dependency of the .oauth library
# Create a dummy object for init() call to work
class GoogleOAuth(object):
pass
# Our lookup map
FCM_HTTP_ERROR_MAP = {
400: 'A bad request was made to the server.',
401: 'The provided API Key was not valid.',
404: 'The token could not be registered.',
}
class FCMMode(object):
"""
Define the Firebase Cloud Messaging Modes
"""
# The legacy way of sending a message
Legacy = "legacy"
# The new API
OAuth2 = "oauth2"
# FCM Modes
FCM_MODES = (
# Legacy API
FCMMode.Legacy,
# HTTP v1 URL
FCMMode.OAuth2,
)
class NotifyFCM(NotifyBase):
"""
A wrapper for Google's Firebase Cloud Messaging Notifications
"""
# Set our global enabled flag
enabled = NOTIFY_FCM_SUPPORT_ENABLED
requirements = {
# Define our required packaging in order to work
'packages_required': 'cryptography'
}
# The default descriptive name associated with the Notification
service_name = 'Firebase Cloud Messaging'
# The services URL
service_url = 'https://firebase.google.com'
# The default protocol
secure_protocol = 'fcm'
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_fcm'
# Project Notification
# https://firebase.google.com/docs/cloud-messaging/send-message
notify_oauth2_url = \
"https://fcm.googleapis.com/v1/projects/{project}/messages:send"
notify_legacy_url = "https://fcm.googleapis.com/fcm/send"
# There is no reason we should exceed 5KB when reading in a JSON file.
# If it is more than this, then it is not accepted.
max_fcm_keyfile_size = 5000
# The maximum length of the body
body_maxlen = 1024
# A title can not be used for SMS Messages. Setting this to zero will
# cause any title (if defined) to get placed into the message body.
title_maxlen = 0
# Define object templates
templates = (
# OAuth2
'{schema}://{project}/{targets}?keyfile={keyfile}',
# Legacy Mode
'{schema}://{apikey}/{targets}',
)
# Define our template
template_tokens = dict(NotifyBase.template_tokens, **{
'apikey': {
'name': _('API Key'),
'type': 'string',
'private': True,
},
'keyfile': {
'name': _('OAuth2 KeyFile'),
'type': 'string',
'private': True,
},
'mode': {
'name': _('Mode'),
'type': 'choice:string',
'values': FCM_MODES,
'default': FCMMode.Legacy,
},
'project': {
'name': _('Project ID'),
'type': 'string',
'required': True,
},
'target_device': {
'name': _('Target Device'),
'type': 'string',
'map_to': 'targets',
},
'target_topic': {
'name': _('Target Topic'),
'type': 'string',
'prefix': '#',
'map_to': 'targets',
},
'targets': {
'name': _('Targets'),
'type': 'list:string',
},
})
template_args = dict(NotifyBase.template_args, **{
'to': {
'alias_of': 'targets',
},
})
def __init__(self, project, apikey, targets=None, mode=None, keyfile=None,
**kwargs):
"""
Initialize Firebase Cloud Messaging
"""
super(NotifyFCM, self).__init__(**kwargs)
if mode is None:
# Detect our mode
self.mode = FCMMode.OAuth2 if keyfile else FCMMode.Legacy
else:
# Setup our mode
self.mode = NotifyFCM.template_tokens['mode']['default'] \
if not isinstance(mode, six.string_types) else mode.lower()
if self.mode and self.mode not in FCM_MODES:
msg = 'The mode specified ({}) is invalid.'.format(mode)
self.logger.warning(msg)
raise TypeError(msg)
# Used for Legacy Mode; this is the Web API Key retrieved from the
# User Panel
self.apikey = None
# Path to our Keyfile
self.keyfile = None
# Our Project ID is required to verify against the keyfile
# specified
self.project = None
# Initialize our Google OAuth module we can work with
self.oauth = GoogleOAuth(
user_agent=self.app_id, timeout=self.request_timeout,
verify_certificate=self.verify_certificate)
if self.mode == FCMMode.OAuth2:
# The project ID associated with the account
self.project = validate_regex(project)
if not self.project:
msg = 'An invalid FCM Project ID ' \
'({}) was specified.'.format(project)
self.logger.warning(msg)
raise TypeError(msg)
if not keyfile:
msg = 'No FCM JSON KeyFile was specified.'
self.logger.warning(msg)
raise TypeError(msg)
# Our keyfile object is just an AppriseAttachment object
self.keyfile = AppriseAttachment(asset=self.asset)
# Add our definition to our template
self.keyfile.add(keyfile)
# Enforce maximum file size
self.keyfile[0].max_file_size = self.max_fcm_keyfile_size
else: # Legacy Mode
# The apikey associated with the account
self.apikey = validate_regex(apikey)
if not self.apikey:
msg = 'An invalid FCM API key ' \
'({}) was specified.'.format(apikey)
self.logger.warning(msg)
raise TypeError(msg)
# Acquire Device IDs to notify
self.targets = parse_list(targets)
return
@property
def access_token(self):
"""
Generates a access_token based on the keyfile provided
"""
keyfile = self.keyfile[0]
if not keyfile:
# We could not access the keyfile
self.logger.error(
'Could not access FCM keyfile {}.'.format(
keyfile.url(privacy=True)))
return None
if not self.oauth.load(keyfile.path):
self.logger.error(
'FCM keyfile {} could not be loaded.'.format(
keyfile.url(privacy=True)))
return None
# Verify our project id against the one provided in our keyfile
if self.project != self.oauth.project_id:
self.logger.error(
'FCM keyfile {} identifies itself for a different project'
.format(keyfile.url(privacy=True)))
return None
# Return our generated key; the below returns None if a token could
# not be acquired
return self.oauth.access_token
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""
Perform FCM Notification
"""
if not self.targets:
# There is no one to email; we're done
self.logger.warning('There are no FCM devices or topics to notify')
return False
if self.mode == FCMMode.OAuth2:
access_token = self.access_token
if not access_token:
# Error message is generated in access_tokengen() so no reason
# to additionally write anything here
return False
headers = {
'User-Agent': self.app_id,
'Content-Type': 'application/json',
"Authorization": "Bearer {}".format(access_token),
}
# Prepare our notify URL
notify_url = self.notify_oauth2_url
else: # FCMMode.Legacy
headers = {
'User-Agent': self.app_id,
'Content-Type': 'application/json',
"Authorization": "key={}".format(self.apikey),
}
# Prepare our notify URL
notify_url = self.notify_legacy_url
has_error = False
# Create a copy of the targets list
targets = list(self.targets)
while len(targets):
recipient = targets.pop(0)
if self.mode == FCMMode.OAuth2:
payload = {
'message': {
'token': None,
'notification': {
'title': title,
'body': body,
}
}
}
if recipient[0] == '#':
payload['message']['topic'] = recipient[1:]
self.logger.debug(
"FCM recipient %s parsed as a topic",
recipient[1:])
else:
payload['message']['token'] = recipient
self.logger.debug(
"FCM recipient %s parsed as a device token",
recipient)
else: # FCMMode.Legacy
payload = {
'notification': {
'notification': {
'title': title,
'body': body,
}
}
}
if recipient[0] == '#':
payload['to'] = '/topics/{}'.format(recipient)
self.logger.debug(
"FCM recipient %s parsed as a topic",
recipient[1:])
else:
payload['to'] = recipient
self.logger.debug(
"FCM recipient %s parsed as a device token",
recipient)
self.logger.debug(
'FCM %s POST URL: %s (cert_verify=%r)',
self.mode, notify_url, self.verify_certificate,
)
self.logger.debug('FCM %s Payload: %s', self.mode, str(payload))
# Always call throttle before any remote server i/o is made
self.throttle()
try:
r = requests.post(
notify_url.format(project=self.project),
data=dumps(payload),
headers=headers,
verify=self.verify_certificate,
timeout=self.request_timeout,
)
if r.status_code not in (
requests.codes.ok, requests.codes.no_content):
# We had a problem
status_str = \
NotifyBase.http_response_code_lookup(
r.status_code, FCM_HTTP_ERROR_MAP)
self.logger.warning(
'Failed to send {} FCM notification: '
'{}{}error={}.'.format(
self.mode,
status_str,
', ' if status_str else '',
r.status_code))
self.logger.debug(
'Response Details:\r\n%s', r.content)
has_error = True
else:
self.logger.info('Sent %s FCM notification.', self.mode)
except requests.RequestException as e:
self.logger.warning(
'A Connection error occurred sending FCM '
'notification.'
)
self.logger.debug('Socket Exception: %s', str(e))
has_error = True
return not has_error
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
"""
# Define any URL parameters
params = {
'mode': self.mode,
}
if self.keyfile:
# Include our keyfile if specified
params['keyfile'] = NotifyFCM.quote(
self.keyfile[0].url(privacy=privacy), safe='')
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
reference = NotifyFCM.quote(self.project) \
if self.mode == FCMMode.OAuth2 \
else self.pprint(self.apikey, privacy, safe='')
return '{schema}://{reference}/{targets}?{params}'.format(
schema=self.secure_protocol,
reference=reference,
targets='/'.join(
[NotifyFCM.quote(x) for x in self.targets]),
params=NotifyFCM.urlencode(params),
)
@staticmethod
def parse_url(url):
"""
Parses the URL and returns enough arguments that can allow
us to re-instantiate this object.
"""
results = NotifyBase.parse_url(url, verify_host=False)
if not results:
# We're done early as we couldn't load the results
return results
# The apikey/project is stored in the hostname
results['apikey'] = NotifyFCM.unquote(results['host'])
results['project'] = results['apikey']
# Get our Device IDs
results['targets'] = NotifyFCM.split_path(results['fullpath'])
# Get our mode
results['mode'] = results['qsd'].get('mode')
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'] += \
NotifyFCM.parse_list(results['qsd']['to'])
# Our Project ID
if 'project' in results['qsd'] and results['qsd']['project']:
results['project'] = \
NotifyFCM.unquote(results['qsd']['project'])
# Our Web API Key
if 'apikey' in results['qsd'] and results['qsd']['apikey']:
results['apikey'] = \
NotifyFCM.unquote(results['qsd']['apikey'])
# Our Keyfile (JSON)
if 'keyfile' in results['qsd'] and results['qsd']['keyfile']:
results['keyfile'] = \
NotifyFCM.unquote(results['qsd']['keyfile'])
return results

View file

@ -0,0 +1,329 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2021 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# To generate a private key file for your service account:
#
# 1. In the Firebase console, open Settings > Service Accounts.
# 2. Click Generate New Private Key, then confirm by clicking Generate Key.
# 3. Securely store the JSON file containing the key.
import io
import requests
import base64
import json
import calendar
from cryptography.hazmat import backends
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives import asymmetric
from cryptography.exceptions import UnsupportedAlgorithm
from datetime import datetime
from datetime import timedelta
from ...logger import logger
try:
# Python 2.7
from urllib import urlencode as _urlencode
except ImportError:
# Python 3.x
from urllib.parse import urlencode as _urlencode
try:
# Python 3.x
from json.decoder import JSONDecodeError
except ImportError:
# Python v2.7 Backwards Compatibility support
JSONDecodeError = ValueError
class GoogleOAuth(object):
"""
A OAuth simplified implimentation to Google's Firebase Cloud Messaging
"""
scopes = [
'https://www.googleapis.com/auth/firebase.messaging',
]
# 1 hour in seconds (the lifetime of our token)
access_token_lifetime_sec = timedelta(seconds=3600)
# The default URI to use if one is not found
default_token_uri = 'https://oauth2.googleapis.com/token'
# Taken right from google.auth.helpers:
clock_skew = timedelta(seconds=10)
def __init__(self, user_agent=None, timeout=(5, 4),
verify_certificate=True):
"""
Initialize our OAuth object
"""
# Wether or not to verify ssl
self.verify_certificate = verify_certificate
# Our (connect, read) timeout
self.request_timeout = timeout
# assign our user-agent if defined
self.user_agent = user_agent
# initialize our other object variables
self.__reset()
def __reset(self):
"""
Reset object internal variables
"""
# Google Keyfile Encoding
self.encoding = 'utf-8'
# Our retrieved JSON content (unmangled)
self.content = None
# Our generated key information we cache once loaded
self.private_key = None
# Our keys we build using the provided content
self.__refresh_token = None
self.__access_token = None
self.__access_token_expiry = datetime.utcnow()
def load(self, path):
"""
Generate our SSL details
"""
# Reset our objects
self.content = None
self.private_key = None
self.__access_token = None
self.__access_token_expiry = datetime.utcnow()
try:
with io.open(path, mode="r", encoding=self.encoding) as fp:
self.content = json.loads(fp.read())
except (OSError, IOError):
logger.debug('FCM keyfile {} could not be accessed'.format(path))
return False
except JSONDecodeError as e:
logger.debug(
'FCM keyfile {} generated a JSONDecodeError: {}'.format(
path, e))
return False
if not isinstance(self.content, dict):
logger.debug(
'FCM keyfile {} is incorrectly structured'.format(path))
self.__reset()
return False
# Verify we've got the correct tokens in our content to work with
is_valid = next((False for k in (
'client_email', 'private_key_id', 'private_key',
'type', 'project_id') if not self.content.get(k)), True)
if not is_valid:
logger.debug(
'FCM keyfile {} is missing required information'.format(path))
self.__reset()
return False
# Verify our service_account type
if self.content.get('type') != 'service_account':
logger.debug(
'FCM keyfile {} is not of type service_account'.format(path))
self.__reset()
return False
# Prepare our private key which is in PKCS8 PEM format
try:
self.private_key = serialization.load_pem_private_key(
self.content.get('private_key').encode(self.encoding),
password=None, backend=backends.default_backend())
except (TypeError, ValueError):
# ValueError: If the PEM data could not be decrypted or if its
# structure could not be decoded successfully.
# TypeError: If a password was given and the private key was
# not encrypted. Or if the key was encrypted but
# no password was supplied.
logger.error('FCM provided private key is invalid.')
self.__reset()
return False
except UnsupportedAlgorithm:
# If the serialized key is of a type that is not supported by
# the backend.
logger.error('FCM provided private key is not supported')
self.__reset()
return False
# We've done enough validation to move on
return True
@property
def access_token(self):
"""
Returns our access token (if it hasn't expired yet)
- if we do not have one we'll fetch one.
- if it expired, we'll renew it
- if a key simply can't be acquired, then we return None
"""
if not self.private_key or not self.content:
# invalid content (or not loaded)
logger.error(
'No FCM JSON keyfile content loaded to generate a access '
'token with.')
return None
if self.__access_token_expiry > datetime.utcnow():
# Return our no-expired key
return self.__access_token
# If we reach here we need to prepare our payload
token_uri = self.content.get('token_uri', self.default_token_uri)
service_email = self.content.get('client_email')
key_identifier = self.content.get('private_key_id')
# Generate our Assertion
now = datetime.utcnow()
expiry = now + self.access_token_lifetime_sec
payload = {
# The number of seconds since the UNIX epoch.
"iat": calendar.timegm(now.utctimetuple()),
"exp": calendar.timegm(expiry.utctimetuple()),
# The issuer must be the service account email.
"iss": service_email,
# The audience must be the auth token endpoint's URI
"aud": token_uri,
# Our token scopes
"scope": " ".join(self.scopes),
}
# JWT Details
header = {
'typ': 'JWT',
'alg': 'RS256' if isinstance(
self.private_key, asymmetric.rsa.RSAPrivateKey) else 'ES256',
# Key Identifier
'kid': key_identifier,
}
# Encodes base64 strings removing any padding characters.
segments = [
base64.urlsafe_b64encode(
json.dumps(header).encode(self.encoding)).rstrip(b"="),
base64.urlsafe_b64encode(
json.dumps(payload).encode(self.encoding)).rstrip(b"="),
]
signing_input = b".".join(segments)
signature = self.private_key.sign(
signing_input,
asymmetric.padding.PKCS1v15(),
hashes.SHA256(),
)
# Finally append our segment
segments.append(base64.urlsafe_b64encode(signature).rstrip(b"="))
assertion = b".".join(segments)
http_payload = _urlencode({
'assertion': assertion,
'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer',
})
http_headers = {
'Content-Type': 'application/x-www-form-urlencoded',
}
if self.user_agent:
http_headers['User-Agent'] = self.user_agent
logger.info('Refreshing FCM Access Token')
try:
r = requests.post(
token_uri,
data=http_payload,
headers=http_headers,
verify=self.verify_certificate,
timeout=self.request_timeout,
)
if r.status_code != requests.codes.ok:
# We had a problem
logger.warning(
'Failed to update FCM Access Token error={}.'
.format(r.status_code))
logger.debug(
'Response Details:\r\n%s', r.content)
return None
except requests.RequestException as e:
logger.warning(
'A Connection error occurred refreshing FCM '
'Access Token.'
)
logger.debug('Socket Exception: %s', str(e))
return None
# If we get here, we made our request successfully, now we need
# to parse out the data
response = json.loads(r.content)
self.__access_token = response['access_token']
self.__refresh_token = response.get(
'refresh_token', self.__refresh_token)
if 'expires_in' in response:
delta = timedelta(seconds=int(response['expires_in']))
self.__access_token_expiry = \
delta + datetime.utcnow() - self.clock_skew
else:
# Allow some grace before we expire
self.__access_token_expiry = expiry - self.clock_skew
logger.debug(
'Access Token successfully acquired: %s', self.__access_token)
# Return our token
return self.__access_token
@property
def project_id(self):
"""
Returns the project id found in the file
"""
return None if not self.content \
else self.content.get('project_id')

View file

@ -284,7 +284,7 @@ class NotifyGitter(NotifyBase):
# By default set wait to None
wait = None
if self.ratelimit_remaining == 0:
if self.ratelimit_remaining <= 0:
# Determine how long we should wait for or if we should wait at
# all. This isn't fool-proof because we can't be sure the client
# time (calling this script) is completely synced up with the

View file

@ -78,8 +78,19 @@ class NotifyGnome(NotifyBase):
A wrapper for local Gnome Notifications
"""
# Set our global enabled flag
enabled = NOTIFY_GNOME_SUPPORT_ENABLED
requirements = {
# Define our required packaging in order to work
'details': _('A local Gnome environment is required.')
}
# The default descriptive name associated with the Notification
service_name = 'Gnome Notification'
service_name = _('Gnome Notification')
# The service URL
service_url = 'https://www.gnome.org/'
# The default protocol
protocol = 'gnome'
@ -102,15 +113,6 @@ class NotifyGnome(NotifyBase):
# cause any title (if defined) to get placed into the message body.
title_maxlen = 0
# This entry is a bit hacky, but it allows us to unit-test this library
# in an environment that simply doesn't have the gnome packages
# available to us. It also allows us to handle situations where the
# packages actually are present but we need to test that they aren't.
# If anyone is seeing this had knows a better way of testing this
# outside of what is defined in test/test_gnome_plugin.py, please
# let me know! :)
_enabled = NOTIFY_GNOME_SUPPORT_ENABLED
# Define object templates
templates = (
'{schema}://',
@ -157,11 +159,6 @@ class NotifyGnome(NotifyBase):
Perform Gnome Notification
"""
if not self._enabled:
self.logger.warning(
"Gnome Notifications are not supported by this system.")
return False
try:
# App initialization
Notify.init(self.app_id)

View file

@ -0,0 +1,315 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2021 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# For this to work correctly you need to create a webhook. You'll also
# need a GSuite account (there are free trials if you don't have one)
#
# - Open Google Chat in your browser:
# Link: https://chat.google.com/
# - Go to the room to which you want to add a bot.
# - From the room menu at the top of the page, select Manage webhooks.
# - Provide it a name and optional avatar and click SAVE
# - Copy the URL listed next to your new webhook in the Webhook URL column.
# - Click outside the dialog box to close.
#
# When you've completed, you'll get a URL that looks a little like this:
# https://chat.googleapis.com/v1/spaces/AAAAk6lGXyM/\
# messages?key=AIzaSyDdI0hCZtE6vySjMm-WEfRq3CPzqKqqsHI&\
# token=O7b1nyri_waOpLMSzbFILAGRzgtQofPW71fEEXKcyFk%3D
#
# Simplified, it looks like this:
# https://chat.googleapis.com/v1/spaces/WORKSPACE/messages?\
# key=WEBHOOK_KEY&token=WEBHOOK_TOKEN
#
# This plugin will simply work using the url of:
# gchat://WORKSPACE/WEBHOOK_KEY/WEBHOOK_TOKEN
#
# API Documentation on Webhooks:
# - https://developers.google.com/hangouts/chat/quickstart/\
# incoming-bot-python
# - https://developers.google.com/hangouts/chat/reference/rest
#
import re
import requests
from json import dumps
from .NotifyBase import NotifyBase
from ..common import NotifyFormat
from ..common import NotifyType
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
class NotifyGoogleChat(NotifyBase):
"""
A wrapper to Google Chat Notifications
"""
# The default descriptive name associated with the Notification
service_name = 'Google Chat'
# The services URL
service_url = 'https://chat.google.com/'
# The default secure protocol
secure_protocol = 'gchat'
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_googlechat'
# Google Chat Webhook
notify_url = 'https://chat.googleapis.com/v1/spaces/{workspace}/messages' \
'?key={key}&token={token}'
# Default Notify Format
notify_format = NotifyFormat.MARKDOWN
# A title can not be used for Google Chat Messages. Setting this to zero
# will cause any title (if defined) to get placed into the message body.
title_maxlen = 0
# The maximum allowable characters allowed in the body per message
body_maxlen = 4000
# Define object templates
templates = (
'{schema}://{workspace}/{webhook_key}/{webhook_token}',
)
# Define our template tokens
template_tokens = dict(NotifyBase.template_tokens, **{
'workspace': {
'name': _('Workspace'),
'type': 'string',
'private': True,
'required': True,
},
'webhook_key': {
'name': _('Webhook Key'),
'type': 'string',
'private': True,
'required': True,
},
'webhook_token': {
'name': _('Webhook Token'),
'type': 'string',
'private': True,
'required': True,
},
})
# Define our template arguments
template_args = dict(NotifyBase.template_args, **{
'workspace': {
'alias_of': 'workspace',
},
'key': {
'alias_of': 'webhook_key',
},
'token': {
'alias_of': 'webhook_token',
},
})
def __init__(self, workspace, webhook_key, webhook_token, **kwargs):
"""
Initialize Google Chat Object
"""
super(NotifyGoogleChat, self).__init__(**kwargs)
# Workspace (associated with project)
self.workspace = validate_regex(workspace)
if not self.workspace:
msg = 'An invalid Google Chat Workspace ' \
'({}) was specified.'.format(workspace)
self.logger.warning(msg)
raise TypeError(msg)
# Webhook Key (associated with project)
self.webhook_key = validate_regex(webhook_key)
if not self.webhook_key:
msg = 'An invalid Google Chat Webhook Key ' \
'({}) was specified.'.format(webhook_key)
self.logger.warning(msg)
raise TypeError(msg)
# Webhook Token (associated with project)
self.webhook_token = validate_regex(webhook_token)
if not self.webhook_token:
msg = 'An invalid Google Chat Webhook Token ' \
'({}) was specified.'.format(webhook_token)
self.logger.warning(msg)
raise TypeError(msg)
return
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""
Perform Google Chat Notification
"""
# Our headers
headers = {
'User-Agent': self.app_id,
'Content-Type': 'application/json; charset=utf-8',
}
payload = {
# Our Message
'text': body,
}
# Construct Notify URL
notify_url = self.notify_url.format(
workspace=self.workspace,
key=self.webhook_key,
token=self.webhook_token,
)
self.logger.debug('Google Chat POST URL: %s (cert_verify=%r)' % (
notify_url, self.verify_certificate,
))
self.logger.debug('Google Chat Payload: %s' % str(payload))
# Always call throttle before any remote server i/o is made
self.throttle()
try:
r = requests.post(
notify_url,
data=dumps(payload),
headers=headers,
verify=self.verify_certificate,
timeout=self.request_timeout,
)
if r.status_code not in (
requests.codes.ok, requests.codes.no_content):
# We had a problem
status_str = \
NotifyBase.http_response_code_lookup(r.status_code)
self.logger.warning(
'Failed to send Google Chat notification: '
'{}{}error={}.'.format(
status_str,
', ' if status_str else '',
r.status_code))
self.logger.debug('Response Details:\r\n{}'.format(r.content))
# Return; we're done
return False
else:
self.logger.info('Sent Google Chat notification.')
except requests.RequestException as e:
self.logger.warning(
'A Connection error occurred postingto Google Chat.')
self.logger.debug('Socket Exception: %s' % str(e))
return False
return True
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
"""
# Set our parameters
params = self.url_parameters(privacy=privacy, *args, **kwargs)
return '{schema}://{workspace}/{key}/{token}/?{params}'.format(
schema=self.secure_protocol,
workspace=self.pprint(self.workspace, privacy, safe=''),
key=self.pprint(self.webhook_key, privacy, safe=''),
token=self.pprint(self.webhook_token, privacy, safe=''),
params=NotifyGoogleChat.urlencode(params),
)
@staticmethod
def parse_url(url):
"""
Parses the URL and returns enough arguments that can allow
us to re-instantiate this object.
Syntax:
gchat://workspace/webhook_key/webhook_token
"""
results = NotifyBase.parse_url(url, verify_host=False)
if not results:
# We're done early as we couldn't load the results
return results
# Store our Workspace
results['workspace'] = NotifyGoogleChat.unquote(results['host'])
# Acquire our tokens
tokens = NotifyGoogleChat.split_path(results['fullpath'])
# Store our Webhook Key
results['webhook_key'] = tokens.pop(0) if tokens else None
# Store our Webhook Token
results['webhook_token'] = tokens.pop(0) if tokens else None
# Support arguments as overrides (if specified)
if 'workspace' in results['qsd']:
results['workspace'] = \
NotifyGoogleChat.unquote(results['qsd']['workspace'])
if 'key' in results['qsd']:
results['webhook_key'] = \
NotifyGoogleChat.unquote(results['qsd']['key'])
if 'token' in results['qsd']:
results['webhook_token'] = \
NotifyGoogleChat.unquote(results['qsd']['token'])
return results
@staticmethod
def parse_native_url(url):
"""
Support
https://chat.googleapis.com/v1/spaces/{workspace}/messages
'?key={key}&token={token}
"""
result = re.match(
r'^https://chat\.googleapis\.com/v1/spaces/'
r'(?P<workspace>[A-Z0-9_-]+)/messages/*(?P<params>.+)$',
url, re.I)
if result:
return NotifyGoogleChat.parse_url(
'{schema}://{workspace}/{params}'.format(
schema=NotifyGoogleChat.secure_protocol,
workspace=result.group('workspace'),
params=result.group('params')))
return None

View file

@ -35,7 +35,7 @@ import requests
from json import dumps
from .NotifyBase import NotifyBase
from ..common import NotifyType
from ..common import NotifyType, NotifyFormat
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
@ -182,6 +182,13 @@ class NotifyGotify(NotifyBase):
'message': body,
}
if self.notify_format == NotifyFormat.MARKDOWN:
payload["extras"] = {
"client::display": {
"contentType": "text/markdown"
}
}
# Our headers
headers = {
'User-Agent': self.app_id,

View file

@ -68,6 +68,13 @@ class NotifyGrowl(NotifyBase):
A wrapper to Growl Notifications
"""
# Set our global enabled flag
enabled = NOTIFY_GROWL_SUPPORT_ENABLED
requirements = {
# Define our required packaging in order to work
'packages_required': 'gntp'
}
# The default descriptive name associated with the Notification
service_name = 'Growl'
@ -84,15 +91,6 @@ class NotifyGrowl(NotifyBase):
# Allows the user to specify the NotifyImageSize object
image_size = NotifyImageSize.XY_72
# This entry is a bit hacky, but it allows us to unit-test this library
# in an environment that simply doesn't have the windows packages
# available to us. It also allows us to handle situations where the
# packages actually are present but we need to test that they aren't.
# If anyone is seeing this had knows a better way of testing this
# outside of what is defined in test/test_growl_plugin.py, please
# let me know! :)
_enabled = NOTIFY_GROWL_SUPPORT_ENABLED
# Disable throttle rate for Growl requests since they are normally
# local anyway
request_rate_per_sec = 0
@ -251,13 +249,6 @@ class NotifyGrowl(NotifyBase):
"""
Perform Growl Notification
"""
if not self._enabled:
self.logger.warning(
"Growl Notifications are not supported by this system; "
"`pip install gntp`.")
return False
# Register ourselves with the server if we haven't done so already
if not self.growl and not self.register():
# We failed to register
@ -395,15 +386,27 @@ class NotifyGrowl(NotifyBase):
if 'priority' in results['qsd'] and len(results['qsd']['priority']):
_map = {
# Letter Assignments
'l': GrowlPriority.LOW,
'm': GrowlPriority.MODERATE,
'n': GrowlPriority.NORMAL,
'h': GrowlPriority.HIGH,
'e': GrowlPriority.EMERGENCY,
'lo': GrowlPriority.LOW,
'me': GrowlPriority.MODERATE,
'no': GrowlPriority.NORMAL,
'hi': GrowlPriority.HIGH,
'em': GrowlPriority.EMERGENCY,
# Support 3rd Party Documented Scale
'-2': GrowlPriority.LOW,
'-1': GrowlPriority.MODERATE,
'0': GrowlPriority.NORMAL,
'1': GrowlPriority.HIGH,
'2': GrowlPriority.EMERGENCY,
}
try:
results['priority'] = \
_map[results['qsd']['priority'][0].lower()]
_map[results['qsd']['priority'][0:2].lower()]
except KeyError:
# No priority was set

View file

@ -0,0 +1,310 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2021 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# You must generate a "Long-Lived Access Token". This can be done from your
# Home Assistant Profile page.
import requests
from json import dumps
from uuid import uuid4
from .NotifyBase import NotifyBase
from ..URLBase import PrivacyMode
from ..common import NotifyType
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
class NotifyHomeAssistant(NotifyBase):
"""
A wrapper for Home Assistant Notifications
"""
# The default descriptive name associated with the Notification
service_name = 'HomeAssistant'
# The services URL
service_url = 'https://www.home-assistant.io/'
# Insecure Protocol Access
protocol = 'hassio'
# Secure Protocol
secure_protocol = 'hassios'
# Default to Home Assistant Default Insecure port of 8123 instead of 80
default_insecure_port = 8123
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_homeassistant'
# Define object templates
templates = (
'{schema}://{host}/{accesstoken}',
'{schema}://{host}:{port}/{accesstoken}',
'{schema}://{user}@{host}/{accesstoken}',
'{schema}://{user}@{host}:{port}/{accesstoken}',
'{schema}://{user}:{password}@{host}/{accesstoken}',
'{schema}://{user}:{password}@{host}:{port}/{accesstoken}',
)
# Define our template tokens
template_tokens = dict(NotifyBase.template_tokens, **{
'host': {
'name': _('Hostname'),
'type': 'string',
'required': True,
},
'port': {
'name': _('Port'),
'type': 'int',
'min': 1,
'max': 65535,
},
'user': {
'name': _('Username'),
'type': 'string',
},
'password': {
'name': _('Password'),
'type': 'string',
'private': True,
},
'accesstoken': {
'name': _('Long-Lived Access Token'),
'type': 'string',
'private': True,
'required': True,
},
})
# Define our template arguments
template_args = dict(NotifyBase.template_args, **{
'nid': {
# Optional Unique Notification ID
'name': _('Notification ID'),
'type': 'string',
'regex': (r'^[a-f0-9_-]+$', 'i'),
},
})
def __init__(self, accesstoken, nid=None, **kwargs):
"""
Initialize Home Assistant Object
"""
super(NotifyHomeAssistant, self).__init__(**kwargs)
self.fullpath = kwargs.get('fullpath', '')
if not (self.secure or self.port):
# Use default insecure port
self.port = self.default_insecure_port
# Long-Lived Access token (generated from User Profile)
self.accesstoken = validate_regex(accesstoken)
if not self.accesstoken:
msg = 'An invalid Home Assistant Long-Lived Access Token ' \
'({}) was specified.'.format(accesstoken)
self.logger.warning(msg)
raise TypeError(msg)
# An Optional Notification Identifier
self.nid = None
if nid:
self.nid = validate_regex(
nid, *self.template_args['nid']['regex'])
if not self.nid:
msg = 'An invalid Home Assistant Notification Identifier ' \
'({}) was specified.'.format(nid)
self.logger.warning(msg)
raise TypeError(msg)
return
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""
Sends Message
"""
# Prepare our persistent_notification.create payload
payload = {
'title': title,
'message': body,
# Use a unique ID so we don't over-write the last message
# we posted. Otherwise use the notification id specified
'notification_id': self.nid if self.nid else str(uuid4()),
}
# Prepare our headers
headers = {
'User-Agent': self.app_id,
'Content-Type': 'application/json',
'Authorization': 'Bearer {}'.format(self.accesstoken),
}
auth = None
if self.user:
auth = (self.user, self.password)
# Set our schema
schema = 'https' if self.secure else 'http'
url = '{}://{}'.format(schema, self.host)
if isinstance(self.port, int):
url += ':%d' % self.port
url += '' if not self.fullpath else '/' + self.fullpath.strip('/')
url += '/api/services/persistent_notification/create'
self.logger.debug('Home Assistant POST URL: %s (cert_verify=%r)' % (
url, self.verify_certificate,
))
self.logger.debug('Home Assistant Payload: %s' % str(payload))
# Always call throttle before any remote server i/o is made
self.throttle()
try:
r = requests.post(
url,
data=dumps(payload),
headers=headers,
auth=auth,
verify=self.verify_certificate,
timeout=self.request_timeout,
)
if r.status_code != requests.codes.ok:
# We had a problem
status_str = \
NotifyHomeAssistant.http_response_code_lookup(
r.status_code)
self.logger.warning(
'Failed to send Home Assistant notification: '
'{}{}error={}.'.format(
status_str,
', ' if status_str else '',
r.status_code))
self.logger.debug('Response Details:\r\n{}'.format(r.content))
# Return; we're done
return False
else:
self.logger.info('Sent Home Assistant notification.')
except requests.RequestException as e:
self.logger.warning(
'A Connection error occurred sending Home Assistant '
'notification to %s.' % self.host)
self.logger.debug('Socket Exception: %s' % str(e))
# Return; we're done
return False
return True
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
"""
# Define any URL parameters
params = {}
if self.nid:
params['nid'] = self.nid
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
# Determine Authentication
auth = ''
if self.user and self.password:
auth = '{user}:{password}@'.format(
user=NotifyHomeAssistant.quote(self.user, safe=''),
password=self.pprint(
self.password, privacy, mode=PrivacyMode.Secret, safe=''),
)
elif self.user:
auth = '{user}@'.format(
user=NotifyHomeAssistant.quote(self.user, safe=''),
)
default_port = 443 if self.secure else self.default_insecure_port
url = '{schema}://{auth}{hostname}{port}{fullpath}' \
'{accesstoken}/?{params}'
return url.format(
schema=self.secure_protocol if self.secure else self.protocol,
auth=auth,
# never encode hostname since we're expecting it to be a valid one
hostname=self.host,
port='' if not self.port or self.port == default_port
else ':{}'.format(self.port),
fullpath='/' if not self.fullpath else '/{}/'.format(
NotifyHomeAssistant.quote(self.fullpath.strip('/'), safe='/')),
accesstoken=self.pprint(self.accesstoken, privacy, safe=''),
params=NotifyHomeAssistant.urlencode(params),
)
@staticmethod
def parse_url(url):
"""
Parses the URL and returns enough arguments that can allow
us to re-instantiate this object.
"""
results = NotifyBase.parse_url(url, verify_host=False)
if not results:
# We're done early as we couldn't load the results
return results
# Get our Long-Lived Access Token
if 'accesstoken' in results['qsd'] and \
len(results['qsd']['accesstoken']):
results['accesstoken'] = \
NotifyHomeAssistant.unquote(results['qsd']['accesstoken'])
else:
# Acquire our full path
fullpath = NotifyHomeAssistant.split_path(results['fullpath'])
# Otherwise pop the last element from our path to be it
results['accesstoken'] = fullpath.pop() if fullpath else None
# Re-assemble our full path
results['fullpath'] = '/'.join(fullpath)
# Allow the specification of a unique notification_id so that
# it will always replace the last one sent.
if 'nid' in results['qsd'] and len(results['qsd']['nid']):
results['nid'] = \
NotifyHomeAssistant.unquote(results['qsd']['nid'])
return results

View file

@ -325,6 +325,10 @@ class NotifyIFTTT(NotifyBase):
# Unquote our API Key
results['webhook_id'] = NotifyIFTTT.unquote(results['webhook_id'])
# Parse our add_token and del_token arguments (if specified)
results['add_token'] = results['qsd+']
results['del_token'] = results['qsd-']
# Our Event
results['events'] = list()
if results['user']:
@ -351,7 +355,7 @@ class NotifyIFTTT(NotifyBase):
result = re.match(
r'^https?://maker\.ifttt\.com/use/'
r'(?P<webhook_id>[A-Z0-9_-]+)'
r'/?(?P<events>([A-Z0-9_-]+/?)+)?'
r'((?P<events>(/[A-Z0-9_-]+)+))?'
r'/?(?P<params>\?.+)?$', url, re.I)
if result:

View file

@ -25,6 +25,7 @@
import six
import requests
import base64
from json import dumps
from .NotifyBase import NotifyBase
@ -160,11 +161,50 @@ class NotifyJSON(NotifyBase):
params=NotifyJSON.urlencode(params),
)
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
def send(self, body, title='', notify_type=NotifyType.INFO, attach=None,
**kwargs):
"""
Perform JSON Notification
"""
headers = {
'User-Agent': self.app_id,
'Content-Type': 'application/json'
}
# Apply any/all header over-rides defined
headers.update(self.headers)
# Track our potential attachments
attachments = []
if attach:
for attachment in attach:
# Perform some simple error checking
if not attachment:
# We could not access the attachment
self.logger.error(
'Could not access attachment {}.'.format(
attachment.url(privacy=True)))
return False
try:
with open(attachment.path, 'rb') as f:
# Output must be in a DataURL format (that's what
# PushSafer calls it):
attachments.append({
'filename': attachment.name,
'base64': base64.b64encode(f.read())
.decode('utf-8'),
'mimetype': attachment.mimetype,
})
except (OSError, IOError) as e:
self.logger.warning(
'An I/O error occurred while reading {}.'.format(
attachment.name if attachment else 'attachment'))
self.logger.debug('I/O Exception: %s' % str(e))
return False
# prepare JSON Object
payload = {
# Version: Major.Minor, Major is only updated if the entire
@ -173,17 +213,10 @@ class NotifyJSON(NotifyBase):
'version': '1.0',
'title': title,
'message': body,
'attachments': attachments,
'type': notify_type,
}
headers = {
'User-Agent': self.app_id,
'Content-Type': 'application/json'
}
# Apply any/all header over-rides defined
headers.update(self.headers)
auth = None
if self.user:
auth = (self.user, self.password)
@ -259,8 +292,12 @@ class NotifyJSON(NotifyBase):
# Add our headers that the user can potentially over-ride if they wish
# to to our returned result set
results['headers'] = results['qsd-']
results['headers'].update(results['qsd+'])
results['headers'] = results['qsd+']
if results['qsd-']:
results['headers'].update(results['qsd-'])
NotifyBase.logger.deprecate(
"minus (-) based JSON header tokens are being "
" removed; use the plus (+) symbol instead.")
# Tidy our header entries by unquoting them
results['headers'] = {NotifyJSON.unquote(x): NotifyJSON.unquote(y)

View file

@ -32,13 +32,13 @@
# This provider does not accept +1 (for example) as a country code. You need
# to specify 001 instead.
#
import re
import requests
from json import loads
from .NotifyBase import NotifyBase
from ..common import NotifyType
from ..utils import parse_list
from ..utils import is_phone_no
from ..utils import parse_phone_no
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
@ -68,9 +68,6 @@ KAVENEGAR_HTTP_ERROR_MAP = {
501: 'SMS can only be sent to the account holder number',
}
# Some Phone Number Detection
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
class NotifyKavenegar(NotifyBase):
"""
@ -165,53 +162,31 @@ class NotifyKavenegar(NotifyBase):
self.source = None
if source is not None:
result = IS_PHONE_NO.match(source)
result = is_phone_no(source)
if not result:
msg = 'The Kavenegar source specified ({}) is invalid.'\
.format(source)
self.logger.warning(msg)
raise TypeError(msg)
# Further check our phone # for it's digit count
result = ''.join(re.findall(r'\d+', result.group('phone')))
if len(result) < 11 or len(result) > 14:
msg = 'The MessageBird source # specified ({}) is invalid.'\
.format(source)
self.logger.warning(msg)
raise TypeError(msg)
# Store our source
self.source = result
self.source = result['full']
# Parse our targets
self.targets = list()
for target in parse_list(targets):
for target in parse_phone_no(targets):
# Validate targets and drop bad ones:
result = IS_PHONE_NO.match(target)
if result:
# Further check our phone # for it's digit count
# if it's less than 10, then we can assume it's
# a poorly specified phone no and spit a warning
result = ''.join(re.findall(r'\d+', result.group('phone')))
if len(result) < 11 or len(result) > 14:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
# store valid phone number
self.targets.append(result)
result = is_phone_no(target)
if not result:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
self.logger.warning(
'Dropped invalid phone # ({}) specified.'.format(target))
if len(self.targets) == 0:
msg = 'There are no valid targets identified to notify.'
self.logger.warning(msg)
raise TypeError(msg)
# store valid phone number
self.targets.append(result['full'])
return
@ -220,6 +195,11 @@ class NotifyKavenegar(NotifyBase):
Sends SMS Message
"""
if len(self.targets) == 0:
# There were no services to notify
self.logger.warning('There were no Kavenegar targets to notify.')
return False
# error tracking (used for function return)
has_error = False
@ -364,7 +344,7 @@ class NotifyKavenegar(NotifyBase):
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'] += \
NotifyKavenegar.parse_list(results['qsd']['to'])
NotifyKavenegar.parse_phone_no(results['qsd']['to'])
if 'from' in results['qsd'] and len(results['qsd']['from']):
results['source'] = \

View file

@ -27,15 +27,39 @@
# website. it can be done as follows:
# Cloud Mode:
# 1. Sign Up and login to the developer webpage https://developer.lametric.com
# 2. Create a **Notification App** if you haven't already done so from:
# https://developer.lametric.com/applications/sources
# 3. Provide it an app name, a description and privacy URL (which can point to
# anywhere; I set mine to `http://localhost`). No permissions are
# required.
# 4. Access your newly created app so that you can acquire both the
# **Client ID** and the **Client Secret** here:
# https://developer.lametric.com/applications/sources
# - Sign Up and login to the developer webpage https://developer.lametric.com
#
# - Create a **Indicator App** if you haven't already done so from here:
# https://developer.lametric.com/applications/sources
#
# There is a great official tutorial on how to do this here:
# https://lametric-documentation.readthedocs.io/en/latest/\
# guides/first-steps/first-lametric-indicator-app.html
#
# - Make sure to set the **Communication Type** to **PUSH**.
#
# - You will be able to **Publish** your app once you've finished setting it
# up. This will allow it to be accessible from the internet using the
# `cloud` mode of this Apprise Plugin. The **Publish** button shows up
# from within the settings of your Lametric App upon clicking on the
# **Draft Vx** folder (where `x` is the version - usually a 1)
#
# When you've completed, the site would have provided you a **PUSH URL** that
# looks like this:
# https://developer.lametric.com/api/v1/dev/widget/update/\
# com.lametric.{app_id}/{app_ver}
#
# You will need to record the `{app_id}` and `{app_ver}` to use the `cloud`
# mode.
#
# The same page should also provide you with an **Access Token**. It's
# approximately 86 characters with two equal (`=`) characters at the end of it.
# This becomes your `{app_token}`. Here is an example of what one might
# look like:
# K2MxWI0NzU0ZmI2NjJlZYTgViMDgDRiN8YjlmZjRmNTc4NDVhJzk0RiNjNh0EyKWW==`
#
# The syntax for the cloud mode is:
# * `lametric://{app_token}@{app_id}/{app_ver}?mode=cloud`
# Device Mode:
# - Sign Up and login to the developer webpage https://developer.lametric.com
@ -44,11 +68,14 @@
# - From here you can get your your API Key for the device you plan to notify.
# - Your devices IP Address can be found in LaMetric Time app at:
# Settings -> Wi-Fi -> IP Address
#
# The syntax for the device mode is:
# * `lametric://{apikey}@{host}`
# A great source for API examples (Device Mode):
# - https://lametric-documentation.readthedocs.io/en/latest/reference-docs\
# /device-notifications.html
#
# A great source for API examples (Cloud Mode):
# - https://lametric-documentation.readthedocs.io/en/latest/reference-docs\
# /lametric-cloud-reference.html
@ -56,18 +83,26 @@
# A great source for the icon reference:
# - https://developer.lametric.com/icons
import re
import six
import requests
from json import dumps
from .NotifyBase import NotifyBase
from ..URLBase import PrivacyMode
from ..common import NotifyType
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
from ..utils import is_hostname
from ..utils import is_ipaddr
# A URL Parser to detect App ID
LAMETRIC_APP_ID_DETECTOR_RE = re.compile(
r'(com\.lametric\.)?(?P<app_id>[0-9a-z.-]{1,64})'
r'(/(?P<app_ver>[1-9][0-9]*))?', re.I)
# Tokens are huge
LAMETRIC_IS_APP_TOKEN = re.compile(r'^[a-z0-9]{80,}==$', re.I)
class LametricMode(object):
"""
@ -295,7 +330,7 @@ class NotifyLametric(NotifyBase):
# URL used for notifying Lametric App's created in the Dev Portal
cloud_notify_url = 'https://developer.lametric.com/api/v1' \
'/dev/widget/update/com.lametric.{client_id}'
'/dev/widget/update/com.lametric.{app_id}/{app_ver}'
# URL used for local notifications directly to the device
device_notify_url = '{schema}://{host}{port}/api/v2/device/notifications'
@ -323,8 +358,9 @@ class NotifyLametric(NotifyBase):
# Define object templates
templates = (
# App Mode
'{schema}://{client_id}@{secret}',
# Cloud (App) Mode
'{schema}://{app_token}@{app_id}',
'{schema}://{app_token}@{app_id}/{app_ver}',
# Device Mode
'{schema}://{apikey}@{host}',
@ -334,11 +370,31 @@ class NotifyLametric(NotifyBase):
# Define our template tokens
template_tokens = dict(NotifyBase.template_tokens, **{
# Used for Local Device mode
'apikey': {
'name': _('Device API Key'),
'type': 'string',
'private': True,
},
# Used for Cloud mode
'app_id': {
'name': _('App ID'),
'type': 'string',
'private': True,
},
# Used for Cloud mode
'app_ver': {
'name': _('App Version'),
'type': 'string',
'regex': (r'^[1-9][0-9]*$', ''),
'default': '1',
},
# Used for Cloud mode
'app_token': {
'name': _('App Access Token'),
'type': 'string',
'regex': (r'^[A-Z0-9]{80,}==$', 'i'),
},
'host': {
'name': _('Hostname'),
'type': 'string',
@ -355,30 +411,22 @@ class NotifyLametric(NotifyBase):
'name': _('Username'),
'type': 'string',
},
'client_id': {
'name': _('Client ID'),
'type': 'string',
'private': True,
'regex': (r'^[a-z0-9-]+$', 'i'),
},
'secret': {
'name': _('Client Secret'),
'type': 'string',
'private': True,
},
})
# Define our template arguments
template_args = dict(NotifyBase.template_args, **{
'oauth_id': {
'alias_of': 'client_id',
},
'oauth_secret': {
'alias_of': 'secret',
},
'apikey': {
'alias_of': 'apikey',
},
'app_id': {
'alias_of': 'app_id',
},
'app_ver': {
'alias_of': 'app_ver',
},
'app_token': {
'alias_of': 'app_token',
},
'priority': {
'name': _('Priority'),
'type': 'choice:string',
@ -414,9 +462,9 @@ class NotifyLametric(NotifyBase):
},
})
def __init__(self, apikey=None, client_id=None, secret=None, priority=None,
icon=None, icon_type=None, sound=None, mode=None,
cycles=None, **kwargs):
def __init__(self, apikey=None, app_token=None, app_id=None,
app_ver=None, priority=None, icon=None, icon_type=None,
sound=None, mode=None, cycles=None, **kwargs):
"""
Initialize LaMetric Object
"""
@ -426,41 +474,61 @@ class NotifyLametric(NotifyBase):
if isinstance(mode, six.string_types) \
else self.template_args['mode']['default']
# Default Cloud Argument
self.lametric_app_id = None
self.lametric_app_ver = None
self.lametric_app_access_token = None
# Default Device/Cloud Argument
self.lametric_apikey = None
if self.mode not in LAMETRIC_MODES:
msg = 'An invalid LaMetric Mode ({}) was specified.'.format(mode)
self.logger.warning(msg)
raise TypeError(msg)
# Default Cloud Arguments
self.secret = None
self.client_id = None
# Default Device Arguments
self.apikey = None
if self.mode == LametricMode.CLOUD:
# Client ID
self.client_id = validate_regex(
client_id, *self.template_tokens['client_id']['regex'])
if not self.client_id:
msg = 'An invalid LaMetric Client OAuth2 ID ' \
'({}) was specified.'.format(client_id)
try:
results = LAMETRIC_APP_ID_DETECTOR_RE.match(app_id)
except TypeError:
msg = 'An invalid LaMetric Application ID ' \
'({}) was specified.'.format(app_id)
self.logger.warning(msg)
raise TypeError(msg)
# Client Secret
self.secret = validate_regex(secret)
if not self.secret:
msg = 'An invalid LaMetric Client OAuth2 Secret ' \
'({}) was specified.'.format(secret)
# Detect our Access Token
self.lametric_app_access_token = validate_regex(
app_token,
*self.template_tokens['app_token']['regex'])
if not self.lametric_app_access_token:
msg = 'An invalid LaMetric Application Access Token ' \
'({}) was specified.'.format(app_token)
self.logger.warning(msg)
raise TypeError(msg)
else: # LametricMode.DEVICE
# If app_ver is specified, it over-rides all
if app_ver:
self.lametric_app_ver = validate_regex(
app_ver, *self.template_tokens['app_ver']['regex'])
if not self.lametric_app_ver:
msg = 'An invalid LaMetric Application Version ' \
'({}) was specified.'.format(app_ver)
self.logger.warning(msg)
raise TypeError(msg)
# API Key
self.apikey = validate_regex(apikey)
if not self.apikey:
else:
# If app_ver wasn't specified, we parse it from the
# Application ID
self.lametric_app_ver = results.group('app_ver') \
if results.group('app_ver') else \
self.template_tokens['app_ver']['default']
# Store our Application ID
self.lametric_app_id = results.group('app_id')
if self.mode == LametricMode.DEVICE:
self.lametric_apikey = validate_regex(apikey)
if not self.lametric_apikey:
msg = 'An invalid LaMetric Device API Key ' \
'({}) was specified.'.format(apikey)
self.logger.warning(msg)
@ -522,8 +590,7 @@ class NotifyLametric(NotifyBase):
# Update header entries
headers.update({
'X-Access-Token': self.secret,
'Cache-Control': 'no-cache',
'X-Access-Token': self.lametric_apikey,
})
if self.sound:
@ -555,12 +622,14 @@ class NotifyLametric(NotifyBase):
{
"icon": icon,
"text": body,
"index": 0,
}
]
}
# Prepare our Cloud Notify URL
notify_url = self.cloud_notify_url.format(client_id=self.client_id)
notify_url = self.cloud_notify_url.format(
app_id=self.lametric_app_id, app_ver=self.lametric_app_ver)
# Return request parameters
return (notify_url, None, payload)
@ -646,6 +715,7 @@ class NotifyLametric(NotifyBase):
'User-Agent': self.app_id,
'Content-Type': 'application/json',
'Accept': 'application/json',
'Cache-Control': 'no-cache',
}
# Depending on the mode, the payload is gathered by
@ -730,11 +800,12 @@ class NotifyLametric(NotifyBase):
if self.mode == LametricMode.CLOUD:
# Upstream/LaMetric App Return
return '{schema}://{client_id}@{secret}/?{params}'.format(
return '{schema}://{token}@{app_id}/{app_ver}/?{params}'.format(
schema=self.protocol,
client_id=self.pprint(self.client_id, privacy, safe=''),
secret=self.pprint(
self.secret, privacy, mode=PrivacyMode.Secret, safe=''),
token=self.pprint(
self.lametric_app_access_token, privacy, safe=''),
app_id=self.pprint(self.lametric_app_id, privacy, safe=''),
app_ver=NotifyLametric.quote(self.lametric_app_ver, safe=''),
params=NotifyLametric.urlencode(params))
#
@ -758,11 +829,11 @@ class NotifyLametric(NotifyBase):
if self.user and self.password:
auth = '{user}:{apikey}@'.format(
user=NotifyLametric.quote(self.user, safe=''),
apikey=self.pprint(self.apikey, privacy, safe=''),
apikey=self.pprint(self.lametric_apikey, privacy, safe=''),
)
else: # self.apikey is set
auth = '{apikey}@'.format(
apikey=self.pprint(self.apikey, privacy, safe=''),
apikey=self.pprint(self.lametric_apikey, privacy, safe=''),
)
# Local Return
@ -799,64 +870,91 @@ class NotifyLametric(NotifyBase):
results['user'] = None
# Priority Handling
if 'priority' in results['qsd'] and len(results['qsd']['priority']):
results['priority'] = results['qsd']['priority'].strip().lower()
if 'priority' in results['qsd'] and results['qsd']['priority']:
results['priority'] = NotifyLametric.unquote(
results['qsd']['priority'].strip().lower())
# Icon Type
if 'icon' in results['qsd'] and len(results['qsd']['icon']):
results['icon'] = results['qsd']['icon'].strip().lower()
if 'icon' in results['qsd'] and results['qsd']['icon']:
results['icon'] = NotifyLametric.unquote(
results['qsd']['icon'].strip().lower())
# Icon Type
if 'icon_type' in results['qsd'] and len(results['qsd']['icon_type']):
results['icon_type'] = results['qsd']['icon_type'].strip().lower()
if 'icon_type' in results['qsd'] and results['qsd']['icon_type']:
results['icon_type'] = NotifyLametric.unquote(
results['qsd']['icon_type'].strip().lower())
# Sound
if 'sound' in results['qsd'] and len(results['qsd']['sound']):
results['sound'] = results['qsd']['sound'].strip().lower()
# We can detect the mode based on the validity of the hostname
results['mode'] = LametricMode.DEVICE \
if (is_hostname(results['host']) or
is_ipaddr(results['host'])) else LametricMode.CLOUD
# Mode override
if 'mode' in results['qsd'] and len(results['qsd']['mode']):
results['mode'] = NotifyLametric.unquote(results['qsd']['mode'])
if 'sound' in results['qsd'] and results['qsd']['sound']:
results['sound'] = NotifyLametric.unquote(
results['qsd']['sound'].strip().lower())
# API Key (Device Mode)
if 'apikey' in results['qsd'] and results['qsd']['apikey']:
# Extract API Key from an argument
results['apikey'] = \
NotifyLametric.unquote(results['qsd']['apikey'])
# App ID
if 'app' in results['qsd'] \
and results['qsd']['app']:
# Extract the App ID from an argument
results['app_id'] = \
NotifyLametric.unquote(results['qsd']['app'])
# App Version
if 'app_ver' in results['qsd'] \
and results['qsd']['app_ver']:
# Extract the App ID from an argument
results['app_ver'] = \
NotifyLametric.unquote(results['qsd']['app_ver'])
if 'token' in results['qsd'] and results['qsd']['token']:
# Extract Application Access Token from an argument
results['app_token'] = \
NotifyLametric.unquote(results['qsd']['token'])
# Mode override
if 'mode' in results['qsd'] and results['qsd']['mode']:
results['mode'] = NotifyLametric.unquote(
results['qsd']['mode'].strip().lower())
else:
# We can try to detect the mode based on the validity of the
# hostname. We can also scan the validity of the Application
# Access token
#
# This isn't a surfire way to do things though; it's best to
# specify the mode= flag
results['mode'] = LametricMode.DEVICE \
if ((is_hostname(results['host']) or
is_ipaddr(results['host'])) and
# make sure password is not an Access Token
(results['password'] and not
LAMETRIC_IS_APP_TOKEN.match(results['password'])) and
# Scan for app_ flags
next((f for f in results.keys() \
if f.startswith('app_')), None) is None) \
else LametricMode.CLOUD
# Handle defaults if not set
if results['mode'] == LametricMode.DEVICE:
if 'apikey' in results['qsd'] and len(results['qsd']['apikey']):
# Extract API Key from an argument
results['apikey'] = \
NotifyLametric.unquote(results['qsd']['apikey'])
else:
# Device Mode Defaults
if 'apikey' not in results:
results['apikey'] = \
NotifyLametric.unquote(results['password'])
elif results['mode'] == LametricMode.CLOUD:
# OAuth2 ID (Cloud Mode)
if 'oauth_id' in results['qsd'] \
and len(results['qsd']['oauth_id']):
# Extract the OAuth2 Key from an argument
results['client_id'] = \
NotifyLametric.unquote(results['qsd']['oauth_id'])
else:
results['client_id'] = \
NotifyLametric.unquote(results['password'])
# OAuth2 Secret (Cloud Mode)
if 'oauth_secret' in results['qsd'] and \
len(results['qsd']['oauth_secret']):
# Extract the API Secret from an argument
results['secret'] = \
NotifyLametric.unquote(results['qsd']['oauth_secret'])
else:
results['secret'] = \
else:
# CLOUD Mode Defaults
if 'app_id' not in results:
results['app_id'] = \
NotifyLametric.unquote(results['host'])
if 'app_token' not in results:
results['app_token'] = \
NotifyLametric.unquote(results['password'])
# Set cycles
try:
@ -867,3 +965,38 @@ class NotifyLametric(NotifyBase):
pass
return results
@staticmethod
def parse_native_url(url):
"""
Support
https://developer.lametric.com/api/v1/dev/\
widget/update/com.lametric.{APP_ID}/1
https://developer.lametric.com/api/v1/dev/\
widget/update/com.lametric.{APP_ID}/{APP_VER}
"""
# If users do provide the Native URL they wll also want to add
# ?token={APP_ACCESS_TOKEN} to the parameters at the end or the
# URL will fail to load in later stages.
result = re.match(
r'^http(?P<secure>s)?://(?P<host>[^/]+)'
r'/api/(?P<api_ver>v[1-9]*[0-9]+)'
r'/dev/widget/update/'
r'com\.lametric\.(?P<app_id>[0-9a-z.-]{1,64})'
r'(/(?P<app_ver>[1-9][0-9]*))?/?'
r'(?P<params>\?.+)?$', url, re.I)
if result:
return NotifyLametric.parse_url(
'{schema}://{app_id}{app_ver}/{params}'.format(
schema=NotifyLametric.secure_protocol
if result.group('secure') else NotifyLametric.protocol,
app_id=result.group('app_id'),
app_ver='/{}'.format(result.group('app_ver'))
if result.group('app_ver') else '',
params='' if not result.group('params')
else result.group('params')))
return None

View file

@ -0,0 +1,536 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2021 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# PAHO MQTT Documentation:
# https://www.eclipse.org/paho/index.php?page=clients/python/docs/index.php
#
# Looking at the PAHO MQTT Source can help shed light on what's going on too
# as their inline documentation is pretty good!
# https://github.com/eclipse/paho.mqtt.python\
# /blob/master/src/paho/mqtt/client.py
import ssl
import re
import six
from time import sleep
from datetime import datetime
from os.path import isfile
from .NotifyBase import NotifyBase
from ..URLBase import PrivacyMode
from ..common import NotifyType
from ..utils import parse_list
from ..utils import parse_bool
from ..AppriseLocale import gettext_lazy as _
# Default our global support flag
NOTIFY_MQTT_SUPPORT_ENABLED = False
if six.PY2:
# handle Python v2.7 suport
class ConnectionError(Exception):
pass
try:
# 3rd party modules
import paho.mqtt.client as mqtt
# We're good to go!
NOTIFY_MQTT_SUPPORT_ENABLED = True
MQTT_PROTOCOL_MAP = {
# v3.1.1
"311": mqtt.MQTTv311,
# v3.1
"31": mqtt.MQTTv31,
# v5.0
"5": mqtt.MQTTv5,
# v5.0 (alias)
"50": mqtt.MQTTv5,
}
except ImportError:
# No problem; we just simply can't support this plugin because we're
# either using Linux, or simply do not have pywin32 installed.
MQTT_PROTOCOL_MAP = {}
# A lookup map for relaying version to user
HUMAN_MQTT_PROTOCOL_MAP = {
"v3.1.1": "311",
"v3.1": "31",
"v5.0": "5",
}
class NotifyMQTT(NotifyBase):
"""
A wrapper for MQTT Notifications
"""
# Set our global enabled flag
enabled = NOTIFY_MQTT_SUPPORT_ENABLED
requirements = {
# Define our required packaging in order to work
'packages_required': 'paho-mqtt'
}
# The default descriptive name associated with the Notification
service_name = 'MQTT Notification'
# The default protocol
protocol = 'mqtt'
# Secure protocol
secure_protocol = 'mqtts'
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_mqtt'
# MQTT does not have a title
title_maxlen = 0
# The maximum length a body can be set to
body_maxlen = 268435455
# Use a throttle; but it doesn't need to be so strict since most
# MQTT server hostings can handle the small bursts of packets and are
# locally hosted anyway
request_rate_per_sec = 0.5
# Port Defaults (unless otherwise specified)
mqtt_insecure_port = 1883
# The default secure port to use (if mqtts://)
mqtt_secure_port = 8883
# The default mqtt keepalive value
mqtt_keepalive = 30
# The default mqtt transport
mqtt_transport = "tcp"
# The number of seconds to wait for a publish to occur at before
# checking to see if it's been sent yet.
mqtt_block_time_sec = 0.2
# Set the maximum number of messages with QoS>0 that can be part way
# through their network flow at once.
mqtt_inflight_messages = 200
# Taken from https://golang.org/src/crypto/x509/root_linux.go
CA_CERTIFICATE_FILE_LOCATIONS = [
# Debian/Ubuntu/Gentoo etc.
"/etc/ssl/certs/ca-certificates.crt",
# Fedora/RHEL 6
"/etc/pki/tls/certs/ca-bundle.crt",
# OpenSUSE
"/etc/ssl/ca-bundle.pem",
# OpenELEC
"/etc/pki/tls/cacert.pem",
# CentOS/RHEL 7
"/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
]
# Define object templates
templates = (
'{schema}://{user}@{host}/{topic}',
'{schema}://{user}@{host}:{port}/{topic}',
'{schema}://{user}:{password}@{host}/{topic}',
'{schema}://{user}:{password}@{host}:{port}/{topic}',
)
template_tokens = dict(NotifyBase.template_tokens, **{
'host': {
'name': _('Hostname'),
'type': 'string',
'required': True,
},
'port': {
'name': _('Port'),
'type': 'int',
'min': 1,
'max': 65535,
},
'user': {
'name': _('User Name'),
'type': 'string',
'required': True,
},
'password': {
'name': _('Password'),
'type': 'string',
'private': True,
'required': True,
},
'topic': {
'name': _('Target Queue'),
'type': 'string',
'map_to': 'targets',
},
'targets': {
'name': _('Targets'),
'type': 'list:string',
},
})
# Define our template arguments
template_args = dict(NotifyBase.template_args, **{
'to': {
'alias_of': 'targets',
},
'qos': {
'name': _('QOS'),
'type': 'int',
'default': 0,
'min': 0,
'max': 2,
},
'version': {
'name': _('Version'),
'type': 'choice:string',
'values': HUMAN_MQTT_PROTOCOL_MAP,
'default': "v3.1.1",
},
'client_id': {
'name': _('Client ID'),
'type': 'string',
},
'session': {
'name': _('Use Session'),
'type': 'bool',
'default': False,
},
})
def __init__(self, targets=None, version=None, qos=None,
client_id=None, session=None, **kwargs):
"""
Initialize MQTT Object
"""
super(NotifyMQTT, self).__init__(**kwargs)
# Initialize topics
self.topics = parse_list(targets)
if version is None:
self.version = self.template_args['version']['default']
else:
self.version = version
# Save our client id if specified
self.client_id = client_id
# Maintain our session (associated with our user id if set)
self.session = self.template_args['session']['default'] \
if session is None or not self.client_id \
else parse_bool(session)
# Set up our Quality of Service (QoS)
try:
self.qos = self.template_args['qos']['default'] \
if qos is None else int(qos)
if self.qos < self.template_args['qos']['min'] \
or self.qos > self.template_args['qos']['max']:
# Let error get handle on exceptio higher up
raise ValueError("")
except (ValueError, TypeError):
msg = 'An invalid MQTT QOS ({}) was specified.'.format(qos)
self.logger.warning(msg)
raise TypeError(msg)
if not self.port:
# Assign port (if not otherwise set)
self.port = self.mqtt_secure_port \
if self.secure else self.mqtt_insecure_port
self.ca_certs = None
if self.secure:
# verify SSL key or abort
self.ca_certs = next(
(cert for cert in self.CA_CERTIFICATE_FILE_LOCATIONS
if isfile(cert)), None)
# Set up our MQTT Publisher
try:
# Get our protocol
self.mqtt_protocol = \
MQTT_PROTOCOL_MAP[re.sub(r'[^0-9]+', '', self.version)]
except (KeyError):
msg = 'An invalid MQTT Protocol version ' \
'({}) was specified.'.format(version)
self.logger.warning(msg)
raise TypeError(msg)
# Our MQTT Client Object
self.client = mqtt.Client(
client_id=self.client_id,
clean_session=not self.session, userdata=None,
protocol=self.mqtt_protocol, transport=self.mqtt_transport,
)
# Our maximum number of in-flight messages
self.client.max_inflight_messages_set(self.mqtt_inflight_messages)
# Toggled to False once our connection has been established at least
# once
self.__initial_connect = True
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""
Perform MQTT Notification
"""
if len(self.topics) == 0:
# There were no services to notify
self.logger.warning('There were no MQTT topics to notify.')
return False
# For logging:
url = '{host}:{port}'.format(host=self.host, port=self.port)
try:
if self.__initial_connect:
# Our initial connection
if self.user:
self.client.username_pw_set(
self.user, password=self.password)
if self.secure:
if self.ca_certs is None:
self.logger.warning(
'MQTT Secure comunication can not be verified; '
'no local CA certificate file')
return False
self.client.tls_set(
ca_certs=self.ca_certs, certfile=None, keyfile=None,
cert_reqs=ssl.CERT_REQUIRED,
tls_version=ssl.PROTOCOL_TLS,
ciphers=None)
# Set our TLS Verify Flag
self.client.tls_insecure_set(self.verify_certificate)
# Establish our connection
if self.client.connect(
self.host, port=self.port,
keepalive=self.mqtt_keepalive) \
!= mqtt.MQTT_ERR_SUCCESS:
self.logger.warning(
'An MQTT connection could not be established for {}'.
format(url))
return False
# Start our client loop
self.client.loop_start()
# Throttle our start otherwise the starting handshaking doesnt
# work. I'm not sure if this is a bug or not, but with qos=0,
# and without this sleep(), the messages randomly fails to be
# delivered.
sleep(0.01)
# Toggle our flag since we never need to enter this area again
self.__initial_connect = False
# Create a copy of the subreddits list
topics = list(self.topics)
has_error = False
while len(topics) > 0 and not has_error:
# Retrieve our subreddit
topic = topics.pop()
# For logging:
url = '{host}:{port}/{topic}'.format(
host=self.host,
port=self.port,
topic=topic)
# Always call throttle before any remote server i/o is made
self.throttle()
# handle a re-connection
if not self.client.is_connected() and \
self.client.reconnect() != mqtt.MQTT_ERR_SUCCESS:
self.logger.warning(
'An MQTT connection could not be sustained for {}'.
format(url))
has_error = True
break
# Some Debug Logging
self.logger.debug('MQTT POST URL: {} (cert_verify={})'.format(
url, self.verify_certificate))
self.logger.debug('MQTT Payload: %s' % str(body))
result = self.client.publish(
topic, payload=body, qos=self.qos, retain=False)
if result.rc != mqtt.MQTT_ERR_SUCCESS:
# Toggle our status
self.logger.warning(
'An error (rc={}) occured when sending MQTT to {}'.
format(result.rc, url))
has_error = True
break
elif not result.is_published():
self.logger.debug(
'Blocking until MQTT payload is published...')
reference = datetime.now()
while not has_error and not result.is_published():
# Throttle
sleep(self.mqtt_block_time_sec)
# Our own throttle so we can abort eventually....
elapsed = (datetime.now() - reference).total_seconds()
if elapsed >= self.socket_read_timeout:
self.logger.warning(
'The MQTT message could not be delivered')
has_error = True
# if we reach here; we're at the bottom of our loop
# we loop around and do the next topic now
except ConnectionError as e:
self.logger.warning(
'MQTT Connection Error received from {}'.format(url))
self.logger.debug('Socket Exception: %s' % str(e))
return False
except ssl.CertificateError as e:
self.logger.warning(
'MQTT SSL Certificate Error received from {}'.format(url))
self.logger.debug('Socket Exception: %s' % str(e))
return False
except ValueError as e:
# ValueError's are thrown from publish() call if there is a problem
self.logger.warning(
'MQTT Publishing error received: from {}'.format(url))
self.logger.debug('Socket Exception: %s' % str(e))
return False
return not has_error
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
"""
# Define any URL parameters
params = {
'version': self.version,
'qos': str(self.qos),
'session': 'yes' if self.session else 'no',
}
if self.client_id:
# Our client id is set if specified
params['client_id'] = self.client_id
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
# Determine Authentication
auth = ''
if self.user and self.password:
auth = '{user}:{password}@'.format(
user=NotifyMQTT.quote(self.user, safe=''),
password=self.pprint(
self.password, privacy, mode=PrivacyMode.Secret, safe=''),
)
elif self.user:
auth = '{user}@'.format(
user=NotifyMQTT.quote(self.user, safe=''),
)
default_port = self.mqtt_secure_port \
if self.secure else self.mqtt_insecure_port
return '{schema}://{auth}{hostname}{port}/{targets}?{params}'.format(
schema=self.secure_protocol if self.secure else self.protocol,
auth=auth,
# never encode hostname since we're expecting it to be a valid one
hostname=self.host,
port='' if self.port is None or self.port == default_port
else ':{}'.format(self.port),
targets=','.join(
[NotifyMQTT.quote(x, safe='/') for x in self.topics]),
params=NotifyMQTT.urlencode(params),
)
@staticmethod
def parse_url(url):
"""
There are no parameters nessisary for this protocol; simply having
windows:// is all you need. This function just makes sure that
is in place.
"""
results = NotifyBase.parse_url(url)
if not results:
# We're done early as we couldn't load the results
return results
try:
# Acquire topic(s)
results['targets'] = parse_list(
NotifyMQTT.unquote(results['fullpath'].lstrip('/')))
except AttributeError:
# No 'fullpath' specified
results['targets'] = []
# The MQTT protocol version to use
if 'version' in results['qsd'] and len(results['qsd']['version']):
results['version'] = \
NotifyMQTT.unquote(results['qsd']['version'])
# The MQTT Client ID
if 'client_id' in results['qsd'] and len(results['qsd']['client_id']):
results['client_id'] = \
NotifyMQTT.unquote(results['qsd']['client_id'])
if 'session' in results['qsd'] and len(results['qsd']['session']):
results['session'] = parse_bool(results['qsd']['session'])
# The MQTT Quality of Service to use
if 'qos' in results['qsd'] and len(results['qsd']['qos']):
results['qos'] = \
NotifyMQTT.unquote(results['qsd']['qos'])
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'].extend(
NotifyMQTT.parse_list(results['qsd']['to']))
# return results
return results

View file

@ -31,18 +31,15 @@
# Get details on the API used in this plugin here:
# - https://world.msg91.com/apidoc/textsms/send-sms.php
import re
import requests
from .NotifyBase import NotifyBase
from ..common import NotifyType
from ..utils import parse_list
from ..utils import is_phone_no
from ..utils import parse_phone_no
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
# Some Phone Number Detection
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
class MSG91Route(object):
"""
@ -207,33 +204,18 @@ class NotifyMSG91(NotifyBase):
# Parse our targets
self.targets = list()
for target in parse_list(targets):
for target in parse_phone_no(targets):
# Validate targets and drop bad ones:
result = IS_PHONE_NO.match(target)
if result:
# Further check our phone # for it's digit count
result = ''.join(re.findall(r'\d+', result.group('phone')))
if len(result) < 11 or len(result) > 14:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
# store valid phone number
self.targets.append(result)
result = is_phone_no(target)
if not result:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
if not self.targets:
# We have a bot token and no target(s) to message
msg = 'No MSG91 targets to notify.'
self.logger.warning(msg)
raise TypeError(msg)
# store valid phone number
self.targets.append(result['full'])
return
@ -242,6 +224,11 @@ class NotifyMSG91(NotifyBase):
Perform MSG91 Notification
"""
if len(self.targets) == 0:
# There were no services to notify
self.logger.warning('There were no MSG91 targets to notify.')
return False
# Prepare our headers
headers = {
'User-Agent': self.app_id,
@ -365,6 +352,6 @@ class NotifyMSG91(NotifyBase):
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'] += \
NotifyMSG91.parse_list(results['qsd']['to'])
NotifyMSG91.parse_phone_no(results['qsd']['to'])
return results

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
# Copyright (C) 2020 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
@ -43,26 +43,39 @@
#
# When you've completed this, it will generate you a (webhook) URL that
# looks like:
# https://outlook.office.com/webhook/ \
# https://team-name.webhook.office.com/webhookb2/ \
# abcdefgf8-2f4b-4eca-8f61-225c83db1967@abcdefg2-5a99-4849-8efc-\
# c9e78d28e57d/IncomingWebhook/291289f63a8abd3593e834af4d79f9fe/\
# a2329f43-0ffb-46ab-948b-c9abdad9d643
#
# Yes... The URL is that big... But it looks like this (greatly simplified):
# https://TEAM-NAME.webhook.office.com/webhookb2/ABCD/IncomingWebhook/DEFG/HIJK
# ^ ^ ^ ^
# | | | |
# These are important <--------------------------^--------------------^----^
#
# The Legacy format didn't have the team name identified and reads 'outlook'
# While this still works, consider that Microsoft will be dropping support
# for this soon, so you may need to update your IncomingWebhook. Here is
# what a legacy URL looked like:
# https://outlook.office.com/webhook/ABCD/IncomingWebhook/DEFG/HIJK
# ^ ^ ^
# ^ ^ ^ ^
# | | | |
# legacy team reference: 'outlook' | | |
# | | |
# These are important <--------------^--------------------^----^
#
# You'll notice that the first token is actually 2 separated by an @ symbol
# But lets just ignore that and assume it's one great big token instead.
#
# These 3 tokens is what you'll need to build your URL with:
# msteams://ABCD/DEFG/HIJK
# These 3 tokens need to be placed in the URL after the Team
# msteams://TEAM/ABCD/DEFG/HIJK
#
import re
import requests
from json import dumps
import json
from .NotifyBase import NotifyBase
from ..common import NotifyImageSize
@ -70,11 +83,17 @@ from ..common import NotifyType
from ..common import NotifyFormat
from ..utils import parse_bool
from ..utils import validate_regex
from ..utils import apply_template
from ..utils import TemplateType
from ..AppriseAttachment import AppriseAttachment
from ..AppriseLocale import gettext_lazy as _
# Used to prepare our UUID regex matching
UUID4_RE = \
r'[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}'
try:
from json.decoder import JSONDecodeError
except ImportError:
# Python v2.7 Backwards Compatibility support
JSONDecodeError = ValueError
class NotifyMSTeams(NotifyBase):
@ -95,7 +114,12 @@ class NotifyMSTeams(NotifyBase):
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_msteams'
# MSTeams uses the http protocol with JSON requests
notify_url = 'https://outlook.office.com/webhook'
notify_url_v1 = 'https://outlook.office.com/webhook/' \
'{token_a}/IncomingWebhook/{token_b}/{token_c}'
# New MSTeams webhook (as of April 11th, 2021)
notify_url_v2 = 'https://{team}.webhook.office.com/webhookb2/' \
'{token_a}/IncomingWebhook/{token_b}/{token_c}'
# Allows the user to specify the NotifyImageSize object
image_size = NotifyImageSize.XY_72
@ -106,13 +130,28 @@ class NotifyMSTeams(NotifyBase):
# Default Notification Format
notify_format = NotifyFormat.MARKDOWN
# There is no reason we should exceed 35KB when reading in a JSON file.
# If it is more than this, then it is not accepted
max_msteams_template_size = 35000
# Define object templates
templates = (
'{schema}://{token_a}/{token_b}{token_c}',
# New required format
'{schema}://{team}/{token_a}/{token_b}/{token_c}',
# Deprecated
'{schema}://{token_a}/{token_b}/{token_c}',
)
# Define our template tokens
template_tokens = dict(NotifyBase.template_tokens, **{
# The Microsoft Team Name
'team': {
'name': _('Team Name'),
'type': 'string',
'required': True,
'regex': (r'^[A-Z0-9_-]+$', 'i'),
},
# Token required as part of the API request
# /AAAAAAAAA@AAAAAAAAA/........./.........
'token_a': {
@ -120,7 +159,7 @@ class NotifyMSTeams(NotifyBase):
'type': 'string',
'private': True,
'required': True,
'regex': (r'^{}@{}$'.format(UUID4_RE, UUID4_RE), 'i'),
'regex': (r'^[A-Z0-9-]+@[A-Z0-9-]+$', 'i'),
},
# Token required as part of the API request
# /................../BBBBBBBBB/..........
@ -129,7 +168,7 @@ class NotifyMSTeams(NotifyBase):
'type': 'string',
'private': True,
'required': True,
'regex': (r'^[A-Za-z0-9]{32}$', 'i'),
'regex': (r'^[a-z0-9]+$', 'i'),
},
# Token required as part of the API request
# /........./........./CCCCCCCCCCCCCCCCCCCCCCCC
@ -138,7 +177,7 @@ class NotifyMSTeams(NotifyBase):
'type': 'string',
'private': True,
'required': True,
'regex': (r'^{}$'.format(UUID4_RE), 'i'),
'regex': (r'^[a-z0-9-]+$', 'i'),
},
})
@ -150,15 +189,67 @@ class NotifyMSTeams(NotifyBase):
'default': False,
'map_to': 'include_image',
},
'version': {
'name': _('Version'),
'type': 'choice:int',
'values': (1, 2),
'default': 2,
},
'template': {
'name': _('Template Path'),
'type': 'string',
'private': True,
},
})
def __init__(self, token_a, token_b, token_c, include_image=True,
**kwargs):
# Define our token control
template_kwargs = {
'tokens': {
'name': _('Template Tokens'),
'prefix': ':',
},
}
def __init__(self, token_a, token_b, token_c, team=None, version=None,
include_image=True, template=None, tokens=None, **kwargs):
"""
Initialize Microsoft Teams Object
You can optional specify a template and identify arguments you
wish to populate your template with when posting. Some reserved
template arguments that can not be over-ridden are:
`body`, `title`, and `type`.
"""
super(NotifyMSTeams, self).__init__(**kwargs)
try:
self.version = int(version)
except TypeError:
# None was specified... take on default
self.version = self.template_args['version']['default']
except ValueError:
# invalid content was provided; let this get caught in the next
# validation check for the version
self.version = None
if self.version not in self.template_args['version']['values']:
msg = 'An invalid MSTeams Version ' \
'({}) was specified.'.format(version)
self.logger.warning(msg)
raise TypeError(msg)
self.team = validate_regex(team)
if not self.team:
NotifyBase.logger.deprecate(
"Apprise requires you to identify your Microsoft Team name as "
"part of the URL. e.g.: "
"msteams://TEAM-NAME/{token_a}/{token_b}/{token_c}")
# Fallback
self.team = 'outlook'
self.token_a = validate_regex(
token_a, *self.template_tokens['token_a']['regex'])
if not self.token_a:
@ -186,8 +277,120 @@ class NotifyMSTeams(NotifyBase):
# Place a thumbnail image inline with the message body
self.include_image = include_image
# Our template object is just an AppriseAttachment object
self.template = AppriseAttachment(asset=self.asset)
if template:
# Add our definition to our template
self.template.add(template)
# Enforce maximum file size
self.template[0].max_file_size = self.max_msteams_template_size
# Template functionality
self.tokens = {}
if isinstance(tokens, dict):
self.tokens.update(tokens)
elif tokens:
msg = 'The specified MSTeams Template Tokens ' \
'({}) are not identified as a dictionary.'.format(tokens)
self.logger.warning(msg)
raise TypeError(msg)
# else: NoneType - this is okay
return
def gen_payload(self, body, title='', notify_type=NotifyType.INFO,
**kwargs):
"""
This function generates our payload whether it be the generic one
Apprise generates by default, or one provided by a specified
external template.
"""
# Acquire our to-be footer icon if configured to do so
image_url = None if not self.include_image \
else self.image_url(notify_type)
if not self.template:
# By default we use a generic working payload if there was
# no template specified
payload = {
"@type": "MessageCard",
"@context": "https://schema.org/extensions",
"summary": self.app_desc,
"themeColor": self.color(notify_type),
"sections": [
{
"activityImage": None,
"activityTitle": title,
"text": body,
},
]
}
if image_url:
payload['sections'][0]['activityImage'] = image_url
return payload
# If our code reaches here, then we generate ourselves the payload
template = self.template[0]
if not template:
# We could not access the attachment
self.logger.error(
'Could not access MSTeam template {}.'.format(
template.url(privacy=True)))
return False
# Take a copy of our token dictionary
tokens = self.tokens.copy()
# Apply some defaults template values
tokens['app_body'] = body
tokens['app_title'] = title
tokens['app_type'] = notify_type
tokens['app_id'] = self.app_id
tokens['app_desc'] = self.app_desc
tokens['app_color'] = self.color(notify_type)
tokens['app_image_url'] = image_url
tokens['app_url'] = self.app_url
# Enforce Application mode
tokens['app_mode'] = TemplateType.JSON
try:
with open(template.path, 'r') as fp:
content = json.loads(apply_template(fp.read(), **tokens))
except (OSError, IOError):
self.logger.error(
'MSTeam template {} could not be read.'.format(
template.url(privacy=True)))
return None
except JSONDecodeError as e:
self.logger.error(
'MSTeam template {} contains invalid JSON.'.format(
template.url(privacy=True)))
self.logger.debug('JSONDecodeError: {}'.format(e))
return None
# Load our JSON data (if valid)
has_error = False
if '@type' not in content:
self.logger.error(
'MSTeam template {} is missing @type kwarg.'.format(
template.url(privacy=True)))
has_error = True
if '@context' not in content:
self.logger.error(
'MSTeam template {} is missing @context kwarg.'.format(
template.url(privacy=True)))
has_error = True
return content if not has_error else None
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""
Perform Microsoft Teams Notification
@ -198,37 +401,27 @@ class NotifyMSTeams(NotifyBase):
'Content-Type': 'application/json',
}
url = '%s/%s/IncomingWebhook/%s/%s' % (
self.notify_url,
self.token_a,
self.token_b,
self.token_c,
)
notify_url = self.notify_url_v2.format(
team=self.team,
token_a=self.token_a,
token_b=self.token_b,
token_c=self.token_c,
) if self.version > 1 else \
self.notify_url_v1.format(
token_a=self.token_a,
token_b=self.token_b,
token_c=self.token_c)
# Prepare our payload
payload = {
"@type": "MessageCard",
"@context": "https://schema.org/extensions",
"summary": self.app_desc,
"themeColor": self.color(notify_type),
"sections": [
{
"activityImage": None,
"activityTitle": title,
"text": body,
},
]
}
# Acquire our to-be footer icon if configured to do so
image_url = None if not self.include_image \
else self.image_url(notify_type)
if image_url:
payload['sections'][0]['activityImage'] = image_url
# Generate our payload if it's possible
payload = self.gen_payload(
body=body, title=title, notify_type=notify_type, **kwargs)
if not payload:
# No need to present a reason; that will come from the
# gen_payload() function itself
return False
self.logger.debug('MSTeams POST URL: %s (cert_verify=%r)' % (
url, self.verify_certificate,
notify_url, self.verify_certificate,
))
self.logger.debug('MSTeams Payload: %s' % str(payload))
@ -236,8 +429,8 @@ class NotifyMSTeams(NotifyBase):
self.throttle()
try:
r = requests.post(
url,
data=dumps(payload),
notify_url,
data=json.dumps(payload),
headers=headers,
verify=self.verify_certificate,
timeout=self.request_timeout,
@ -283,17 +476,38 @@ class NotifyMSTeams(NotifyBase):
'image': 'yes' if self.include_image else 'no',
}
if self.version != self.template_args['version']['default']:
params['version'] = str(self.version)
if self.template:
params['template'] = NotifyMSTeams.quote(
self.template[0].url(), safe='')
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
# Store any template entries if specified
params.update({':{}'.format(k): v for k, v in self.tokens.items()})
return '{schema}://{token_a}/{token_b}/{token_c}/'\
'?{params}'.format(
schema=self.secure_protocol,
token_a=self.pprint(self.token_a, privacy, safe=''),
token_b=self.pprint(self.token_b, privacy, safe=''),
token_c=self.pprint(self.token_c, privacy, safe=''),
params=NotifyMSTeams.urlencode(params),
)
if self.version > 1:
return '{schema}://{team}/{token_a}/{token_b}/{token_c}/'\
'?{params}'.format(
schema=self.secure_protocol,
team=NotifyMSTeams.quote(self.team, safe=''),
token_a=self.pprint(self.token_a, privacy, safe=''),
token_b=self.pprint(self.token_b, privacy, safe=''),
token_c=self.pprint(self.token_c, privacy, safe=''),
params=NotifyMSTeams.urlencode(params),
)
else: # Version 1
return '{schema}://{token_a}/{token_b}/{token_c}/'\
'?{params}'.format(
schema=self.secure_protocol,
token_a=self.pprint(self.token_a, privacy, safe='@'),
token_b=self.pprint(self.token_b, privacy, safe=''),
token_c=self.pprint(self.token_c, privacy, safe=''),
params=NotifyMSTeams.urlencode(params),
)
@staticmethod
def parse_url(url):
@ -302,6 +516,7 @@ class NotifyMSTeams(NotifyBase):
us to re-instantiate this object.
"""
results = NotifyBase.parse_url(url, verify_host=False)
if not results:
# We're done early as we couldn't load the results
@ -310,6 +525,7 @@ class NotifyMSTeams(NotifyBase):
# Get unquoted entries
entries = NotifyMSTeams.split_path(results['fullpath'])
# Deprecated mode (backwards compatibility)
if results.get('user'):
# If a user was found, it's because it's still part of the first
# token, so we concatinate them
@ -319,42 +535,62 @@ class NotifyMSTeams(NotifyBase):
)
else:
# The first token is stored in the hostname
results['token_a'] = NotifyMSTeams.unquote(results['host'])
# Get the Team from the hostname
results['team'] = NotifyMSTeams.unquote(results['host'])
# Now fetch the remaining tokens
try:
results['token_b'] = entries.pop(0)
# Get the token from the path
results['token_a'] = None if not entries \
else NotifyMSTeams.unquote(entries.pop(0))
except IndexError:
# We're done
results['token_b'] = None
try:
results['token_c'] = entries.pop(0)
except IndexError:
# We're done
results['token_c'] = None
results['token_b'] = None if not entries \
else NotifyMSTeams.unquote(entries.pop(0))
results['token_c'] = None if not entries \
else NotifyMSTeams.unquote(entries.pop(0))
# Get Image
results['include_image'] = \
parse_bool(results['qsd'].get('image', True))
# Get Team name if defined
if 'team' in results['qsd'] and results['qsd']['team']:
results['team'] = \
NotifyMSTeams.unquote(results['qsd']['team'])
# Template Handling
if 'template' in results['qsd'] and results['qsd']['template']:
results['template'] = \
NotifyMSTeams.unquote(results['qsd']['template'])
# Override version if defined
if 'version' in results['qsd'] and results['qsd']['version']:
results['version'] = \
NotifyMSTeams.unquote(results['qsd']['version'])
else:
# Set our version if not otherwise set
results['version'] = 1 if not results.get('team') else 2
# Store our tokens
results['tokens'] = results['qsd:']
return results
@staticmethod
def parse_native_url(url):
"""
Support:
Legacy Support:
https://outlook.office.com/webhook/ABCD/IncomingWebhook/DEFG/HIJK
New Hook Support:
https://team-name.office.com/webhook/ABCD/IncomingWebhook/DEFG/HIJK
"""
# We don't need to do incredibly details token matching as the purpose
# of this is just to detect that were dealing with an msteams url
# token parsing will occur once we initialize the function
result = re.match(
r'^https?://outlook\.office\.com/webhook/'
r'^https?://(?P<team>[^.]+)(?P<v2a>\.webhook)?\.office\.com/'
r'webhook(?P<v2b>b2)?/'
r'(?P<token_a>[A-Z0-9-]+@[A-Z0-9-]+)/'
r'IncomingWebhook/'
r'(?P<token_b>[A-Z0-9]+)/'
@ -362,13 +598,28 @@ class NotifyMSTeams(NotifyBase):
r'(?P<params>\?.+)?$', url, re.I)
if result:
return NotifyMSTeams.parse_url(
'{schema}://{token_a}/{token_b}/{token_c}/{params}'.format(
schema=NotifyMSTeams.secure_protocol,
token_a=result.group('token_a'),
token_b=result.group('token_b'),
token_c=result.group('token_c'),
params='' if not result.group('params')
else result.group('params')))
if result.group('v2a'):
# Version 2 URL
return NotifyMSTeams.parse_url(
'{schema}://{team}/{token_a}/{token_b}/{token_c}'
'/{params}'.format(
schema=NotifyMSTeams.secure_protocol,
team=result.group('team'),
token_a=result.group('token_a'),
token_b=result.group('token_b'),
token_c=result.group('token_c'),
params='' if not result.group('params')
else result.group('params')))
else:
# Version 1 URLs
# team is also set to 'outlook' in this case
return NotifyMSTeams.parse_url(
'{schema}://{token_a}/{token_b}/{token_c}'
'/{params}'.format(
schema=NotifyMSTeams.secure_protocol,
token_a=result.group('token_a'),
token_b=result.group('token_b'),
token_c=result.group('token_c'),
params='' if not result.group('params')
else result.group('params')))
return None

View file

@ -36,6 +36,19 @@ from ..common import NotifyType
from ..utils import parse_bool
from ..AppriseLocale import gettext_lazy as _
# Default our global support flag
NOTIFY_MACOSX_SUPPORT_ENABLED = False
if platform.system() == 'Darwin':
# Check this is Mac OS X 10.8, or higher
major, minor = platform.mac_ver()[0].split('.')[:2]
# Toggle our enabled flag if verion is correct and executable
# found. This is done in such a way to provide verbosity to the
# end user so they know why it may or may not work for them.
NOTIFY_MACOSX_SUPPORT_ENABLED = \
(int(major) > 10 or (int(major) == 10 and int(minor) >= 8))
class NotifyMacOSX(NotifyBase):
"""
@ -44,8 +57,22 @@ class NotifyMacOSX(NotifyBase):
Source: https://github.com/julienXX/terminal-notifier
"""
# Set our global enabled flag
enabled = NOTIFY_MACOSX_SUPPORT_ENABLED
requirements = {
# Define our required packaging in order to work
'details': _(
'Only works with Mac OS X 10.8 and higher. Additionally '
' requires that /usr/local/bin/terminal-notifier is locally '
'accessible.')
}
# The default descriptive name associated with the Notification
service_name = 'MacOSX Notification'
service_name = _('MacOSX Notification')
# The services URL
service_url = 'https://github.com/julienXX/terminal-notifier'
# The default protocol
protocol = 'macosx'
@ -100,31 +127,8 @@ class NotifyMacOSX(NotifyBase):
# or not.
self.include_image = include_image
self._enabled = False
if platform.system() == 'Darwin':
# Check this is Mac OS X 10.8, or higher
major, minor = platform.mac_ver()[0].split('.')[:2]
# Toggle our _enabled flag if verion is correct and executable
# found. This is done in such a way to provide verbosity to the
# end user so they know why it may or may not work for them.
if not (int(major) > 10 or (int(major) == 10 and int(minor) >= 8)):
self.logger.warning(
"MacOSX Notifications require your OS to be at least "
"v10.8 (detected {}.{})".format(major, minor))
elif not os.access(self.notify_path, os.X_OK):
self.logger.warning(
"MacOSX Notifications require '{}' to be in place."
.format(self.notify_path))
else:
# We're good to go
self._enabled = True
# Set sound object (no q/a for now)
self.sound = sound
return
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
@ -132,9 +136,10 @@ class NotifyMacOSX(NotifyBase):
Perform MacOSX Notification
"""
if not self._enabled:
if not os.access(self.notify_path, os.X_OK):
self.logger.warning(
"MacOSX Notifications are not supported by this system.")
"MacOSX Notifications require '{}' to be in place."
.format(self.notify_path))
return False
# Start with our notification path
@ -160,6 +165,9 @@ class NotifyMacOSX(NotifyBase):
# Always call throttle before any remote server i/o is made
self.throttle()
# Capture some output for helpful debugging later on
self.logger.debug('MacOSX CMD: {}'.format(' '.join(cmd)))
# Send our notification
output = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)

View file

@ -52,10 +52,12 @@
# then it will also become the 'to' address as well.
#
import requests
from email.utils import formataddr
from .NotifyBase import NotifyBase
from ..common import NotifyType
from ..utils import parse_list
from ..common import NotifyFormat
from ..utils import parse_emails
from ..utils import parse_bool
from ..utils import is_email
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
@ -111,9 +113,16 @@ class NotifyMailgun(NotifyBase):
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_mailgun'
# Default Notify Format
notify_format = NotifyFormat.HTML
# The default region to use if one isn't otherwise specified
mailgun_default_region = MailgunRegion.US
# The maximum amount of emails that can reside within a single
# batch transfer
default_batch_size = 2000
# Define object templates
templates = (
'{schema}://{user}@{host}:{apikey}/',
@ -161,9 +170,35 @@ class NotifyMailgun(NotifyBase):
'to': {
'alias_of': 'targets',
},
'cc': {
'name': _('Carbon Copy'),
'type': 'list:string',
},
'bcc': {
'name': _('Blind Carbon Copy'),
'type': 'list:string',
},
'batch': {
'name': _('Batch Mode'),
'type': 'bool',
'default': False,
},
})
def __init__(self, apikey, targets, from_name=None, region_name=None,
# Define any kwargs we're using
template_kwargs = {
'headers': {
'name': _('Email Header'),
'prefix': '+',
},
'tokens': {
'name': _('Template Tokens'),
'prefix': ':',
},
}
def __init__(self, apikey, targets, cc=None, bcc=None, from_name=None,
region_name=None, headers=None, tokens=None, batch=False,
**kwargs):
"""
Initialize Mailgun Object
@ -184,8 +219,30 @@ class NotifyMailgun(NotifyBase):
self.logger.warning(msg)
raise TypeError(msg)
# Parse our targets
self.targets = parse_list(targets)
# Acquire Email 'To'
self.targets = list()
# Acquire Carbon Copies
self.cc = set()
# Acquire Blind Carbon Copies
self.bcc = set()
# For tracking our email -> name lookups
self.names = {}
self.headers = {}
if headers:
# Store our extra headers
self.headers.update(headers)
self.tokens = {}
if tokens:
# Store our template tokens
self.tokens.update(tokens)
# Prepare Batch Mode Flag
self.batch = batch
# Store our region
try:
@ -214,29 +271,146 @@ class NotifyMailgun(NotifyBase):
self.logger.warning(msg)
raise TypeError(msg)
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
if targets:
# Validate recipients (to:) and drop bad ones:
for recipient in parse_emails(targets):
result = is_email(recipient)
if result:
self.targets.append(
(result['name'] if result['name'] else False,
result['full_email']))
continue
self.logger.warning(
'Dropped invalid To email '
'({}) specified.'.format(recipient),
)
else:
# If our target email list is empty we want to add ourselves to it
self.targets.append(
(self.from_name if self.from_name else False, self.from_addr))
# Validate recipients (cc:) and drop bad ones:
for recipient in parse_emails(cc):
email = is_email(recipient)
if email:
self.cc.add(email['full_email'])
# Index our name (if one exists)
self.names[email['full_email']] = \
email['name'] if email['name'] else False
continue
self.logger.warning(
'Dropped invalid Carbon Copy email '
'({}) specified.'.format(recipient),
)
# Validate recipients (bcc:) and drop bad ones:
for recipient in parse_emails(bcc):
email = is_email(recipient)
if email:
self.bcc.add(email['full_email'])
# Index our name (if one exists)
self.names[email['full_email']] = \
email['name'] if email['name'] else False
continue
self.logger.warning(
'Dropped invalid Blind Carbon Copy email '
'({}) specified.'.format(recipient),
)
def send(self, body, title='', notify_type=NotifyType.INFO, attach=None,
**kwargs):
"""
Perform Mailgun Notification
"""
if not self.targets:
# There is no one to email; we're done
self.logger.warning(
'There are no Email recipients to notify')
return False
# error tracking (used for function return)
has_error = False
# Send in batches if identified to do so
batch_size = 1 if not self.batch else self.default_batch_size
# Prepare our headers
headers = {
'User-Agent': self.app_id,
'Accept': 'application/json',
}
# Track our potential files
files = {}
if attach:
for idx, attachment in enumerate(attach):
# Perform some simple error checking
if not attachment:
# We could not access the attachment
self.logger.error(
'Could not access attachment {}.'.format(
attachment.url(privacy=True)))
return False
self.logger.debug(
'Preparing Mailgun attachment {}'.format(
attachment.url(privacy=True)))
try:
files['attachment[{}]'.format(idx)] = \
(attachment.name, open(attachment.path, 'rb'))
except (OSError, IOError) as e:
self.logger.warning(
'An I/O error occurred while opening {}.'.format(
attachment.name if attachment
else 'attachment'))
self.logger.debug('I/O Exception: %s' % str(e))
# tidy up any open files before we make our early
# return
for entry in files.values():
self.logger.trace(
'Closing attachment {}'.format(entry[0]))
entry[1].close()
return False
try:
reply_to = formataddr(
(self.from_name if self.from_name else False,
self.from_addr), charset='utf-8')
except TypeError:
# Python v2.x Support (no charset keyword)
# Format our cc addresses to support the Name field
reply_to = formataddr(
(self.from_name if self.from_name else False,
self.from_addr))
# Prepare our payload
payload = {
'from': '{name} <{addr}>'.format(
name=self.app_id if not self.from_name else self.from_name,
addr=self.from_addr),
# pass skip-verification switch upstream too
'o:skip-verification': not self.verify_certificate,
# Base payload options
'from': reply_to,
'subject': title,
'text': body,
}
if self.notify_format == NotifyFormat.HTML:
payload['html'] = body
else:
payload['text'] = body
# Prepare our URL as it's based on our hostname
url = '{}{}/messages'.format(
MAILGUN_API_LOOKUP[self.region_name], self.host)
@ -244,22 +418,106 @@ class NotifyMailgun(NotifyBase):
# Create a copy of the targets list
emails = list(self.targets)
if len(emails) == 0:
# No email specified; use the from
emails.append(self.from_addr)
for index in range(0, len(emails), batch_size):
# Initialize our cc list
cc = (self.cc - self.bcc)
while len(emails):
# Get our email to notify
email = emails.pop(0)
# Initialize our bcc list
bcc = set(self.bcc)
# Prepare our user
payload['to'] = '{} <{}>'.format(email, email)
# Initialize our to list
to = list()
# Ensure we're pointed to the head of the attachment; this doesn't
# do much for the first iteration through this loop as we're
# already pointing there..., but it allows us to re-use the
# attachment over and over again without closing and then
# re-opening the same file again and again
for entry in files.values():
try:
self.logger.trace(
'Seeking to head of attachment {}'.format(entry[0]))
entry[1].seek(0)
except (OSError, IOError) as e:
self.logger.warning(
'An I/O error occurred seeking to head of attachment '
'{}.'.format(entry[0]))
self.logger.debug('I/O Exception: %s' % str(e))
# tidy up any open files before we make our early
# return
for entry in files.values():
self.logger.trace(
'Closing attachment {}'.format(entry[0]))
entry[1].close()
return False
for to_addr in self.targets[index:index + batch_size]:
# Strip target out of cc list if in To
cc = (cc - set([to_addr[1]]))
# Strip target out of bcc list if in To
bcc = (bcc - set([to_addr[1]]))
try:
# Prepare our to
to.append(formataddr(to_addr, charset='utf-8'))
except TypeError:
# Python v2.x Support (no charset keyword)
# Format our cc addresses to support the Name field
# Prepare our to
to.append(formataddr(to_addr))
# Prepare our To
payload['to'] = ','.join(to)
if cc:
try:
# Format our cc addresses to support the Name field
payload['cc'] = ','.join([formataddr(
(self.names.get(addr, False), addr), charset='utf-8')
for addr in cc])
except TypeError:
# Python v2.x Support (no charset keyword)
# Format our cc addresses to support the Name field
payload['cc'] = ','.join([formataddr( # pragma: no branch
(self.names.get(addr, False), addr))
for addr in cc])
# Format our bcc addresses to support the Name field
if bcc:
payload['bcc'] = ','.join(bcc)
# Store our token entries; users can reference these as %value%
# in their email message.
if self.tokens:
payload.update(
{'v:{}'.format(k): v for k, v in self.tokens.items()})
# Store our header entries if defined into the payload
# in their payload
if self.headers:
payload.update(
{'h:{}'.format(k): v for k, v in self.headers.items()})
# Some Debug Logging
self.logger.debug('Mailgun POST URL: {} (cert_verify={})'.format(
url, self.verify_certificate))
self.logger.debug('Mailgun Payload: {}' .format(payload))
# For logging output of success and errors; we get a head count
# of our outbound details:
verbose_dest = ', '.join(
[x[1] for x in self.targets[index:index + batch_size]]) \
if len(self.targets[index:index + batch_size]) <= 3 \
else '{} recipients'.format(
len(self.targets[index:index + batch_size]))
# Always call throttle before any remote server i/o is made
self.throttle()
try:
@ -268,6 +526,7 @@ class NotifyMailgun(NotifyBase):
auth=("api", self.apikey),
data=payload,
headers=headers,
files=None if not files else files,
verify=self.verify_certificate,
timeout=self.request_timeout,
)
@ -276,12 +535,12 @@ class NotifyMailgun(NotifyBase):
# We had a problem
status_str = \
NotifyBase.http_response_code_lookup(
r.status_code, MAILGUN_API_LOOKUP)
r.status_code, MAILGUN_HTTP_ERROR_MAP)
self.logger.warning(
'Failed to send Mailgun notification to {}: '
'{}{}error={}.'.format(
email,
verbose_dest,
status_str,
', ' if status_str else '',
r.status_code))
@ -295,12 +554,13 @@ class NotifyMailgun(NotifyBase):
else:
self.logger.info(
'Sent Mailgun notification to {}.'.format(email))
'Sent Mailgun notification to {}.'.format(
verbose_dest))
except requests.RequestException as e:
self.logger.warning(
'A Connection error occurred sending Mailgun:%s ' % (
email) + 'notification.'
verbose_dest) + 'notification.'
)
self.logger.debug('Socket Exception: %s' % str(e))
@ -308,6 +568,21 @@ class NotifyMailgun(NotifyBase):
has_error = True
continue
except (OSError, IOError) as e:
self.logger.warning(
'An I/O error occurred while reading attachments')
self.logger.debug('I/O Exception: %s' % str(e))
# Mark our failure
has_error = True
continue
# Close any potential attachments that are still open
for entry in files.values():
self.logger.trace(
'Closing attachment {}'.format(entry[0]))
entry[1].close()
return not has_error
def url(self, privacy=False, *args, **kwargs):
@ -318,8 +593,15 @@ class NotifyMailgun(NotifyBase):
# Define any URL parameters
params = {
'region': self.region_name,
'batch': 'yes' if self.batch else 'no',
}
# Append our headers into our parameters
params.update({'+{}'.format(k): v for k, v in self.headers.items()})
# Append our template tokens into our parameters
params.update({':{}'.format(k): v for k, v in self.tokens.items()})
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
@ -327,13 +609,32 @@ class NotifyMailgun(NotifyBase):
# from_name specified; pass it back on the url
params['name'] = self.from_name
if self.cc:
# Handle our Carbon Copy Addresses
params['cc'] = ','.join(
['{}{}'.format(
'' if not e not in self.names
else '{}:'.format(self.names[e]), e) for e in self.cc])
if self.bcc:
# Handle our Blind Carbon Copy Addresses
params['bcc'] = ','.join(self.bcc)
# a simple boolean check as to whether we display our target emails
# or not
has_targets = \
not (len(self.targets) == 1
and self.targets[0][1] == self.from_addr)
return '{schema}://{user}@{host}/{apikey}/{targets}/?{params}'.format(
schema=self.secure_protocol,
host=self.host,
user=NotifyMailgun.quote(self.user, safe=''),
apikey=self.pprint(self.apikey, privacy, safe=''),
targets='/'.join(
[NotifyMailgun.quote(x, safe='') for x in self.targets]),
targets='' if not has_targets else '/'.join(
[NotifyMailgun.quote('{}{}'.format(
'' if not e[0] else '{}:'.format(e[0]), e[1]),
safe='') for e in self.targets]),
params=NotifyMailgun.urlencode(params))
@staticmethod
@ -370,10 +671,30 @@ class NotifyMailgun(NotifyBase):
results['region_name'] = \
NotifyMailgun.unquote(results['qsd']['region'])
# Support the 'to' variable so that we can support targets this way too
# The 'to' makes it easier to use yaml configuration
# Handle 'to' email address
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'] += \
NotifyMailgun.parse_list(results['qsd']['to'])
results['targets'].append(results['qsd']['to'])
# Handle Carbon Copy Addresses
if 'cc' in results['qsd'] and len(results['qsd']['cc']):
results['cc'] = results['qsd']['cc']
# Handle Blind Carbon Copy Addresses
if 'bcc' in results['qsd'] and len(results['qsd']['bcc']):
results['bcc'] = results['qsd']['bcc']
# Add our Meta Headers that the user can provide with their outbound
# emails
results['headers'] = {NotifyBase.unquote(x): NotifyBase.unquote(y)
for x, y in results['qsd+'].items()}
# Add our template tokens (if defined)
results['tokens'] = {NotifyBase.unquote(x): NotifyBase.unquote(y)
for x, y in results['qsd:'].items()}
# Get Batch Mode Flag
results['batch'] = \
parse_bool(results['qsd'].get(
'batch', NotifyMailgun.template_args['batch']['default']))
return results

View file

@ -30,6 +30,7 @@
import re
import six
import requests
from markdown import markdown
from json import dumps
from json import loads
from time import time
@ -41,6 +42,7 @@ from ..common import NotifyImageSize
from ..common import NotifyFormat
from ..utils import parse_bool
from ..utils import parse_list
from ..utils import apply_template
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
@ -65,6 +67,21 @@ IS_ROOM_ID = re.compile(
r'(?P<home_server>[a-z0-9.-]+))?\s*$', re.I)
class MatrixMessageType(object):
"""
The Matrix Message types
"""
TEXT = "text"
NOTICE = "notice"
# matrix message types are placed into this list for validation purposes
MATRIX_MESSAGE_TYPES = (
MatrixMessageType.TEXT,
MatrixMessageType.NOTICE,
)
class MatrixWebhookMode(object):
# Webhook Mode is disabled
DISABLED = "off"
@ -79,7 +96,7 @@ class MatrixWebhookMode(object):
T2BOT = "t2bot"
# webhook modes are placed ito this list for validation purposes
# webhook modes are placed into this list for validation purposes
MATRIX_WEBHOOK_MODES = (
MatrixWebhookMode.DISABLED,
MatrixWebhookMode.MATRIX,
@ -131,13 +148,13 @@ class NotifyMatrix(NotifyBase):
'{schema}://{token}',
'{schema}://{user}@{token}',
# All other non-t2bot setups require targets
# Disabled webhook
'{schema}://{user}:{password}@{host}/{targets}',
'{schema}://{user}:{password}@{host}:{port}/{targets}',
'{schema}://{token}:{password}@{host}/{targets}',
'{schema}://{token}:{password}@{host}:{port}/{targets}',
'{schema}://{user}:{token}:{password}@{host}/{targets}',
'{schema}://{user}:{token}:{password}@{host}:{port}/{targets}',
# Webhook mode
'{schema}://{user}:{token}@{host}/{targets}',
'{schema}://{user}:{token}@{host}:{port}/{targets}',
)
# Define our template tokens
@ -204,12 +221,22 @@ class NotifyMatrix(NotifyBase):
'values': MATRIX_WEBHOOK_MODES,
'default': MatrixWebhookMode.DISABLED,
},
'msgtype': {
'name': _('Message Type'),
'type': 'choice:string',
'values': MATRIX_MESSAGE_TYPES,
'default': MatrixMessageType.TEXT,
},
'to': {
'alias_of': 'targets',
},
'token': {
'alias_of': 'token',
},
})
def __init__(self, targets=None, mode=None, include_image=False, **kwargs):
def __init__(self, targets=None, mode=None, msgtype=None,
include_image=False, **kwargs):
"""
Initialize Matrix Object
"""
@ -235,20 +262,28 @@ class NotifyMatrix(NotifyBase):
self._room_cache = {}
# Setup our mode
self.mode = MatrixWebhookMode.DISABLED \
self.mode = self.template_args['mode']['default'] \
if not isinstance(mode, six.string_types) else mode.lower()
if self.mode and self.mode not in MATRIX_WEBHOOK_MODES:
msg = 'The mode specified ({}) is invalid.'.format(mode)
self.logger.warning(msg)
raise TypeError(msg)
# Setup our message type
self.msgtype = self.template_args['msgtype']['default'] \
if not isinstance(msgtype, six.string_types) else msgtype.lower()
if self.msgtype and self.msgtype not in MATRIX_MESSAGE_TYPES:
msg = 'The msgtype specified ({}) is invalid.'.format(msgtype)
self.logger.warning(msg)
raise TypeError(msg)
if self.mode == MatrixWebhookMode.T2BOT:
# t2bot configuration requires that a webhook id is specified
self.access_token = validate_regex(
self.host, r'^[a-z0-9]{64}$', 'i')
self.password, r'^[a-z0-9]{64}$', 'i')
if not self.access_token:
msg = 'An invalid T2Bot/Matrix Webhook ID ' \
'({}) was specified.'.format(self.host)
'({}) was specified.'.format(self.password)
self.logger.warning(msg)
raise TypeError(msg)
@ -283,7 +318,7 @@ class NotifyMatrix(NotifyBase):
default_port = 443 if self.secure else 80
# Prepare our URL
url = '{schema}://{hostname}:{port}/{webhook_path}/{token}'.format(
url = '{schema}://{hostname}:{port}{webhook_path}/{token}'.format(
schema='https' if self.secure else 'http',
hostname=self.host,
port='' if self.port is None
@ -412,20 +447,31 @@ class NotifyMatrix(NotifyBase):
payload = {
'displayName':
self.user if self.user else self.app_id,
'format': 'html',
'format': 'plain' if self.notify_format == NotifyFormat.TEXT
else 'html',
'text': '',
}
if self.notify_format == NotifyFormat.HTML:
payload['text'] = '{}{}'.format('' if not title else title, body)
# Add additional information to our content; use {{app_title}}
# to apply the title to the html body
tokens = {
'app_title': NotifyMatrix.escape_html(
title, whitespace=False),
}
payload['text'] = apply_template(body, **tokens)
else: # TEXT or MARKDOWN
elif self.notify_format == NotifyFormat.MARKDOWN:
# Add additional information to our content; use {{app_title}}
# to apply the title to the html body
tokens = {
'app_title': title,
}
payload['text'] = markdown(apply_template(body, **tokens))
# Ensure our content is escaped
title = NotifyMatrix.escape_html(title)
body = NotifyMatrix.escape_html(body)
payload['text'] = '{}{}'.format(
'' if not title else '<h4>{}</h4>'.format(title), body)
else: # NotifyFormat.TEXT
payload['text'] = \
body if not title else '{}\r\n{}'.format(title, body)
return payload
@ -494,11 +540,6 @@ class NotifyMatrix(NotifyBase):
has_error = True
continue
# We have our data cached at this point we can freely use it
msg = '{title}{body}'.format(
title='' if not title else '{}\r\n'.format(title),
body=body)
# Acquire our image url if we're configured to do so
image_url = None if not self.include_image else \
self.image_url(notify_type)
@ -523,10 +564,36 @@ class NotifyMatrix(NotifyBase):
# Define our payload
payload = {
'msgtype': 'm.text',
'body': msg,
'msgtype': 'm.{}'.format(self.msgtype),
'body': '{title}{body}'.format(
title='' if not title else '{}\r\n'.format(title),
body=body),
}
# Update our payload advance formatting for the services that
# support them.
if self.notify_format == NotifyFormat.HTML:
# Add additional information to our content; use {{app_title}}
# to apply the title to the html body
tokens = {
'app_title': NotifyMatrix.escape_html(
title, whitespace=False),
}
payload.update({
'format': 'org.matrix.custom.html',
'formatted_body': apply_template(body, **tokens),
})
elif self.notify_format == NotifyFormat.MARKDOWN:
tokens = {
'app_title': title,
}
payload.update({
'format': 'org.matrix.custom.html',
'formatted_body': markdown(apply_template(body, **tokens))
})
# Build our path
path = '/rooms/{}/send/m.room.message'.format(
NotifyMatrix.quote(room_id))
@ -694,7 +761,7 @@ class NotifyMatrix(NotifyBase):
# Prepare our Join Payload
payload = {}
# Not in cache, next step is to check if it's a room id...
# Check if it's a room id...
result = IS_ROOM_ID.match(room)
if result:
# We detected ourselves the home_server
@ -707,11 +774,23 @@ class NotifyMatrix(NotifyBase):
home_server,
)
# Check our cache for speed:
if room_id in self._room_cache:
# We're done as we've already joined the channel
return self._room_cache[room_id]['id']
# Build our URL
path = '/join/{}'.format(NotifyMatrix.quote(room_id))
# Make our query
postokay, _ = self._fetch(path, payload=payload)
if postokay:
# Cache our entry for fast access later
self._room_cache[room_id] = {
'id': room_id,
'home_server': home_server,
}
return room_id if postokay else None
# Try to see if it's an alias then...
@ -1003,9 +1082,54 @@ class NotifyMatrix(NotifyBase):
"""
Ensure we relinquish our token
"""
if self.mode != MatrixWebhookMode.T2BOT:
if self.mode == MatrixWebhookMode.T2BOT:
# nothing to do
return
try:
self._logout()
except LookupError: # pragma: no cover
# Python v3.5 call to requests can sometimes throw the exception
# "/usr/lib64/python3.7/socket.py", line 748, in getaddrinfo
# LookupError: unknown encoding: idna
#
# This occurs every time when running unit-tests against Apprise:
# LANG=C.UTF-8 PYTHONPATH=$(pwd) py.test-3.7
#
# There has been an open issue on this since Jan 2017.
# - https://bugs.python.org/issue29288
#
# A ~similar~ issue can be identified here in the requests
# ticket system as unresolved and has provided work-arounds
# - https://github.com/kennethreitz/requests/issues/3578
pass
except ImportError: # pragma: no cover
# The actual exception is `ModuleNotFoundError` however ImportError
# grants us backwards compatiblity with versions of Python older
# than v3.6
# Python code that makes early calls to sys.exit() can cause
# the __del__() code to run. However in some newer versions of
# Python, this causes the `sys` library to no longer be
# available. The stack overflow also goes on to suggest that
# it's not wise to use the __del__() as a deconstructor
# which is the case here.
# https://stackoverflow.com/questions/67218341/\
# modulenotfounderror-import-of-time-halted-none-in-sys-\
# modules-occured-when-obj?noredirect=1&lq=1
#
#
# Also see: https://stackoverflow.com/questions\
# /1481488/what-is-the-del-method-and-how-do-i-call-it
# At this time it seems clean to try to log out (if we can)
# but not throw any unessisary exceptions (like this one) to
# the end user if we don't have to.
pass
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
@ -1015,31 +1139,36 @@ class NotifyMatrix(NotifyBase):
params = {
'image': 'yes' if self.include_image else 'no',
'mode': self.mode,
'msgtype': self.msgtype,
}
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
# Determine Authentication
auth = ''
if self.user and self.password:
auth = '{user}:{password}@'.format(
user=NotifyMatrix.quote(self.user, safe=''),
password=self.pprint(
self.password, privacy, mode=PrivacyMode.Secret, safe=''),
)
if self.mode != MatrixWebhookMode.T2BOT:
# Determine Authentication
if self.user and self.password:
auth = '{user}:{password}@'.format(
user=NotifyMatrix.quote(self.user, safe=''),
password=self.pprint(
self.password, privacy, mode=PrivacyMode.Secret,
safe=''),
)
elif self.user:
auth = '{user}@'.format(
user=NotifyMatrix.quote(self.user, safe=''),
)
elif self.user:
auth = '{user}@'.format(
user=NotifyMatrix.quote(self.user, safe=''),
)
default_port = 443 if self.secure else 80
return '{schema}://{auth}{hostname}{port}/{rooms}?{params}'.format(
schema=self.secure_protocol if self.secure else self.protocol,
auth=auth,
hostname=NotifyMatrix.quote(self.host, safe=''),
hostname=NotifyMatrix.quote(self.host, safe='')
if self.mode != MatrixWebhookMode.T2BOT
else self.pprint(self.access_token, privacy, safe=''),
port='' if self.port is None
or self.port == default_port else ':{}'.format(self.port),
rooms=NotifyMatrix.quote('/'.join(self.rooms)),
@ -1086,6 +1215,20 @@ class NotifyMatrix(NotifyBase):
# Default mode to t2bot
results['mode'] = MatrixWebhookMode.T2BOT
if results['mode'] and \
results['mode'].lower() == MatrixWebhookMode.T2BOT:
# unquote our hostname and pass it in as the password/token
results['password'] = NotifyMatrix.unquote(results['host'])
# Support the message type keyword
if 'msgtype' in results['qsd'] and len(results['qsd']['msgtype']):
results['msgtype'] = \
NotifyMatrix.unquote(results['qsd']['msgtype'])
# Support the use of the token= keyword
if 'token' in results['qsd'] and len(results['qsd']['token']):
results['password'] = NotifyMatrix.unquote(results['qsd']['token'])
return results
@staticmethod

View file

@ -23,6 +23,16 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Create an incoming webhook; the website will provide you with something like:
# http://localhost:8065/hooks/yobjmukpaw3r3urc5h6i369yima
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^
# |-- this is the webhook --|
#
# You can effectively turn the url above to read this:
# mmost://localhost:8065/yobjmukpaw3r3urc5h6i369yima
# - swap http with mmost
# - drop /hooks/ reference
import six
import requests
from json import dumps
@ -40,13 +50,13 @@ from ..AppriseLocale import gettext_lazy as _
# - https://docs.mattermost.com/administration/config-settings.html
class NotifyMatterMost(NotifyBase):
class NotifyMattermost(NotifyBase):
"""
A wrapper for MatterMost Notifications
A wrapper for Mattermost Notifications
"""
# The default descriptive name associated with the Notification
service_name = 'MatterMost'
service_name = 'Mattermost'
# The services URL
service_url = 'https://mattermost.com/'
@ -74,14 +84,14 @@ class NotifyMatterMost(NotifyBase):
# Define object templates
templates = (
'{schema}://{host}/{authtoken}',
'{schema}://{host}/{authtoken}:{port}',
'{schema}://{botname}@{host}/{authtoken}',
'{schema}://{botname}@{host}:{port}/{authtoken}',
'{schema}://{host}/{fullpath}/{authtoken}',
'{schema}://{host}/{fullpath}{authtoken}:{port}',
'{schema}://{botname}@{host}/{fullpath}/{authtoken}',
'{schema}://{botname}@{host}:{port}/{fullpath}/{authtoken}',
'{schema}://{host}/{token}',
'{schema}://{host}/{token}:{port}',
'{schema}://{botname}@{host}/{token}',
'{schema}://{botname}@{host}:{port}/{token}',
'{schema}://{host}/{fullpath}/{token}',
'{schema}://{host}/{fullpath}{token}:{port}',
'{schema}://{botname}@{host}/{fullpath}/{token}',
'{schema}://{botname}@{host}:{port}/{fullpath}/{token}',
)
# Define our template tokens
@ -91,10 +101,9 @@ class NotifyMatterMost(NotifyBase):
'type': 'string',
'required': True,
},
'authtoken': {
'name': _('Access Key'),
'token': {
'name': _('Webhook Token'),
'type': 'string',
'regex': (r'^[a-z0-9]{24,32}$', 'i'),
'private': True,
'required': True,
},
@ -132,12 +141,12 @@ class NotifyMatterMost(NotifyBase):
},
})
def __init__(self, authtoken, fullpath=None, channels=None,
def __init__(self, token, fullpath=None, channels=None,
include_image=False, **kwargs):
"""
Initialize MatterMost Object
Initialize Mattermost Object
"""
super(NotifyMatterMost, self).__init__(**kwargs)
super(NotifyMattermost, self).__init__(**kwargs)
if self.secure:
self.schema = 'https'
@ -150,16 +159,15 @@ class NotifyMatterMost(NotifyBase):
fullpath, six.string_types) else fullpath.strip()
# Authorization Token (associated with project)
self.authtoken = validate_regex(
authtoken, *self.template_tokens['authtoken']['regex'])
if not self.authtoken:
msg = 'An invalid MatterMost Authorization Token ' \
'({}) was specified.'.format(authtoken)
self.token = validate_regex(token)
if not self.token:
msg = 'An invalid Mattermost Authorization Token ' \
'({}) was specified.'.format(token)
self.logger.warning(msg)
raise TypeError(msg)
# Optional Channels
self.channels = parse_list(channels)
# Optional Channels (strip off any channel prefix entries if present)
self.channels = [x.lstrip('#') for x in parse_list(channels)]
if not self.port:
self.port = self.default_port
@ -171,7 +179,7 @@ class NotifyMatterMost(NotifyBase):
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""
Perform MatterMost Notification
Perform Mattermost Notification
"""
# Create a copy of our channels, otherwise place a dummy entry
@ -211,12 +219,12 @@ class NotifyMatterMost(NotifyBase):
url = '{}://{}:{}{}/hooks/{}'.format(
self.schema, self.host, self.port, self.fullpath,
self.authtoken)
self.token)
self.logger.debug('MatterMost POST URL: %s (cert_verify=%r)' % (
self.logger.debug('Mattermost POST URL: %s (cert_verify=%r)' % (
url, self.verify_certificate,
))
self.logger.debug('MatterMost Payload: %s' % str(payload))
self.logger.debug('Mattermost Payload: %s' % str(payload))
# Always call throttle before any remote server i/o is made
self.throttle()
@ -233,11 +241,11 @@ class NotifyMatterMost(NotifyBase):
if r.status_code != requests.codes.ok:
# We had a problem
status_str = \
NotifyMatterMost.http_response_code_lookup(
NotifyMattermost.http_response_code_lookup(
r.status_code)
self.logger.warning(
'Failed to send MatterMost notification{}: '
'Failed to send Mattermost notification{}: '
'{}{}error={}.'.format(
'' if not channel
else ' to channel {}'.format(channel),
@ -254,13 +262,13 @@ class NotifyMatterMost(NotifyBase):
else:
self.logger.info(
'Sent MatterMost notification{}.'.format(
'Sent Mattermost notification{}.'.format(
'' if not channel
else ' to channel {}'.format(channel)))
except requests.RequestException as e:
self.logger.warning(
'A Connection error occurred sending MatterMost '
'A Connection error occurred sending Mattermost '
'notification{}.'.format(
'' if not channel
else ' to channel {}'.format(channel)))
@ -290,7 +298,8 @@ class NotifyMatterMost(NotifyBase):
# historically the value only accepted one channel and is
# therefore identified as 'channel'. Channels have always been
# optional, so that is why this setting is nested in an if block
params['channel'] = ','.join(self.channels)
params['channel'] = ','.join(
[NotifyMattermost.quote(x, safe='') for x in self.channels])
default_port = 443 if self.secure else self.default_port
default_schema = self.secure_protocol if self.secure else self.protocol
@ -299,11 +308,11 @@ class NotifyMatterMost(NotifyBase):
botname = ''
if self.user:
botname = '{botname}@'.format(
botname=NotifyMatterMost.quote(self.user, safe=''),
botname=NotifyMattermost.quote(self.user, safe=''),
)
return \
'{schema}://{botname}{hostname}{port}{fullpath}{authtoken}' \
'{schema}://{botname}{hostname}{port}{fullpath}{token}' \
'/?{params}'.format(
schema=default_schema,
botname=botname,
@ -313,9 +322,9 @@ class NotifyMatterMost(NotifyBase):
port='' if not self.port or self.port == default_port
else ':{}'.format(self.port),
fullpath='/' if not self.fullpath else '{}/'.format(
NotifyMatterMost.quote(self.fullpath, safe='/')),
authtoken=self.pprint(self.authtoken, privacy, safe=''),
params=NotifyMatterMost.urlencode(params),
NotifyMattermost.quote(self.fullpath, safe='/')),
token=self.pprint(self.token, privacy, safe=''),
params=NotifyMattermost.urlencode(params),
)
@staticmethod
@ -330,11 +339,11 @@ class NotifyMatterMost(NotifyBase):
# We're done early as we couldn't load the results
return results
# Acquire our tokens; the last one will always be our authtoken
# Acquire our tokens; the last one will always be our token
# all entries before it will be our path
tokens = NotifyMatterMost.split_path(results['fullpath'])
tokens = NotifyMattermost.split_path(results['fullpath'])
results['authtoken'] = None if not tokens else tokens.pop()
results['token'] = None if not tokens else tokens.pop()
# Store our path
results['fullpath'] = '' if not tokens \
@ -347,12 +356,12 @@ class NotifyMatterMost(NotifyBase):
if 'to' in results['qsd'] and len(results['qsd']['to']):
# Allow the user to specify the channel to post to
results['channels'].append(
NotifyMatterMost.parse_list(results['qsd']['to']))
NotifyMattermost.parse_list(results['qsd']['to']))
if 'channel' in results['qsd'] and len(results['qsd']['channel']):
# Allow the user to specify the channel to post to
results['channels'].append(
NotifyMatterMost.parse_list(results['qsd']['channel']))
NotifyMattermost.parse_list(results['qsd']['channel']))
# Image manipulation
results['include_image'] = \

View file

@ -29,18 +29,15 @@
# - https://dashboard.messagebird.com/en/user/index
#
import re
import requests
from .NotifyBase import NotifyBase
from ..common import NotifyType
from ..utils import parse_list
from ..utils import is_phone_no
from ..utils import parse_phone_no
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
# Some Phone Number Detection
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
class NotifyMessageBird(NotifyBase):
"""
@ -129,28 +126,20 @@ class NotifyMessageBird(NotifyBase):
self.logger.warning(msg)
raise TypeError(msg)
result = IS_PHONE_NO.match(source)
result = is_phone_no(source)
if not result:
msg = 'The MessageBird source specified ({}) is invalid.'\
.format(source)
self.logger.warning(msg)
raise TypeError(msg)
# Further check our phone # for it's digit count
result = ''.join(re.findall(r'\d+', result.group('phone')))
if len(result) < 11 or len(result) > 14:
msg = 'The MessageBird source # specified ({}) is invalid.'\
.format(source)
self.logger.warning(msg)
raise TypeError(msg)
# Store our source
self.source = result
self.source = result['full']
# Parse our targets
self.targets = list()
targets = parse_list(targets)
targets = parse_phone_no(targets)
if not targets:
# No sources specified, use our own phone no
self.targets.append(self.source)
@ -159,31 +148,16 @@ class NotifyMessageBird(NotifyBase):
# otherwise, store all of our target numbers
for target in targets:
# Validate targets and drop bad ones:
result = IS_PHONE_NO.match(target)
if result:
# Further check our phone # for it's digit count
result = ''.join(re.findall(r'\d+', result.group('phone')))
if len(result) < 11 or len(result) > 14:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
# store valid phone number
self.targets.append(result)
result = is_phone_no(target)
if not result:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
if not self.targets:
# We have a bot token and no target(s) to message
msg = 'No MessageBird targets to notify.'
self.logger.warning(msg)
raise TypeError(msg)
# store valid phone number
self.targets.append(result['full'])
return
@ -192,6 +166,11 @@ class NotifyMessageBird(NotifyBase):
Perform MessageBird Notification
"""
if len(self.targets) == 0:
# There were no services to notify
self.logger.warning('There were no MessageBird targets to notify.')
return False
# error tracking (used for function return)
has_error = False
@ -345,6 +324,7 @@ class NotifyMessageBird(NotifyBase):
try:
# The first path entry is the source/originator
results['source'] = results['targets'].pop(0)
except IndexError:
# No path specified... this URL is potentially un-parseable; we can
# hope for a from= entry
@ -357,7 +337,7 @@ class NotifyMessageBird(NotifyBase):
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'] += \
NotifyMessageBird.parse_list(results['qsd']['to'])
NotifyMessageBird.parse_phone_no(results['qsd']['to'])
if 'from' in results['qsd'] and len(results['qsd']['from']):
results['source'] = \

View file

@ -28,20 +28,16 @@
# Get your (api) key and secret here:
# - https://dashboard.nexmo.com/getting-started-guide
#
import re
import requests
from .NotifyBase import NotifyBase
from ..URLBase import PrivacyMode
from ..common import NotifyType
from ..utils import parse_list
from ..utils import is_phone_no
from ..utils import parse_phone_no
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
# Some Phone Number Detection
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
class NotifyNexmo(NotifyBase):
"""
@ -185,44 +181,31 @@ class NotifyNexmo(NotifyBase):
# The Source Phone #
self.source = source
if not IS_PHONE_NO.match(self.source):
result = is_phone_no(source)
if not result:
msg = 'The Account (From) Phone # specified ' \
'({}) is invalid.'.format(source)
self.logger.warning(msg)
raise TypeError(msg)
# Tidy source
self.source = re.sub(r'[^\d]+', '', self.source)
if len(self.source) < 11 or len(self.source) > 14:
msg = 'The Account (From) Phone # specified ' \
'({}) contains an invalid digit count.'.format(source)
self.logger.warning(msg)
raise TypeError(msg)
# Store our parsed value
self.source = result['full']
# Parse our targets
self.targets = list()
for target in parse_list(targets):
for target in parse_phone_no(targets):
# Validate targets and drop bad ones:
result = IS_PHONE_NO.match(target)
if result:
# Further check our phone # for it's digit count
result = ''.join(re.findall(r'\d+', result.group('phone')))
if len(result) < 11 or len(result) > 14:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
# store valid phone number
self.targets.append(result)
result = is_phone_no(target)
if not result:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
# store valid phone number
self.targets.append(result['full'])
return
@ -393,10 +376,10 @@ class NotifyNexmo(NotifyBase):
results['ttl'] = \
NotifyNexmo.unquote(results['qsd']['ttl'])
# Support the 'to' variable so that we can support targets this way too
# Support the 'to' variable so that we can support rooms this way too
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'] += \
NotifyNexmo.parse_list(results['qsd']['to'])
NotifyNexmo.parse_phone_no(results['qsd']['to'])
return results

View file

@ -51,11 +51,7 @@ class NotifyNextcloud(NotifyBase):
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_nextcloud'
# Nextcloud URL
notify_url = '{schema}://{host}/ocs/v2.php/apps/admin_notifications/' \
'api/v1/notifications/{target}'
# Nextcloud does not support a title
# Nextcloud title length
title_maxlen = 255
# Defines the maximum allowable characters per message.
@ -101,6 +97,22 @@ class NotifyNextcloud(NotifyBase):
},
})
# Define our template arguments
template_args = dict(NotifyBase.template_args, **{
# Nextcloud uses different API end points depending on the version
# being used however the (API) payload remains the same. Allow users
# to specify the version they are using:
'version': {
'name': _('Version'),
'type': 'int',
'min': 1,
'default': 21,
},
'to': {
'alias_of': 'targets',
},
})
# Define any kwargs we're using
template_kwargs = {
'headers': {
@ -109,7 +121,7 @@ class NotifyNextcloud(NotifyBase):
},
}
def __init__(self, targets=None, headers=None, **kwargs):
def __init__(self, targets=None, version=None, headers=None, **kwargs):
"""
Initialize Nextcloud Object
"""
@ -121,6 +133,20 @@ class NotifyNextcloud(NotifyBase):
self.logger.warning(msg)
raise TypeError(msg)
self.version = self.template_args['version']['default']
if version is not None:
try:
self.version = int(version)
if self.version < self.template_args['version']['min']:
# Let upper exception handle this
raise ValueError()
except (ValueError, TypeError):
msg = 'At invalid Nextcloud version ({}) was specified.'\
.format(version)
self.logger.warning(msg)
raise TypeError(msg)
self.headers = {}
if headers:
# Store our extra headers
@ -163,17 +189,28 @@ class NotifyNextcloud(NotifyBase):
if self.user:
auth = (self.user, self.password)
notify_url = self.notify_url.format(
# Nextcloud URL based on version used
notify_url = '{schema}://{host}/ocs/v2.php/'\
'apps/admin_notifications/' \
'api/v1/notifications/{target}' \
if self.version < 21 else \
'{schema}://{host}/ocs/v2.php/'\
'apps/notifications/'\
'api/v2/admin_notifications/{target}'
notify_url = notify_url.format(
schema='https' if self.secure else 'http',
host=self.host if not isinstance(self.port, int)
else '{}:{}'.format(self.host, self.port),
target=target,
)
self.logger.debug('Nextcloud POST URL: %s (cert_verify=%r)' % (
notify_url, self.verify_certificate,
))
self.logger.debug('Nextcloud Payload: %s' % str(payload))
self.logger.debug(
'Nextcloud v%d POST URL: %s (cert_verify=%r)',
self.version, notify_url, self.verify_certificate)
self.logger.debug(
'Nextcloud v%d Payload: %s',
self.version, str(payload))
# Always call throttle before any remote server i/o is made
self.throttle()
@ -194,8 +231,9 @@ class NotifyNextcloud(NotifyBase):
r.status_code)
self.logger.warning(
'Failed to send Nextcloud notification:'
'Failed to send Nextcloud v{} notification:'
'{}{}error={}.'.format(
self.version,
status_str,
', ' if status_str else '',
r.status_code))
@ -207,13 +245,13 @@ class NotifyNextcloud(NotifyBase):
continue
else:
self.logger.info('Sent Nextcloud notification.')
self.logger.info(
'Sent Nextcloud %d notification.', self.version)
except requests.RequestException as e:
self.logger.warning(
'A Connection error occurred sending Nextcloud '
'notification.',
)
'A Connection error occurred sending Nextcloud v%d'
'notification.', self.version)
self.logger.debug('Socket Exception: %s' % str(e))
# track our failure
@ -230,8 +268,11 @@ class NotifyNextcloud(NotifyBase):
# Create URL parameters from our headers
params = {'+{}'.format(k): v for k, v in self.headers.items()}
# Our URL parameters
params = self.url_parameters(privacy=privacy, *args, **kwargs)
# Set our version
params['version'] = str(self.version)
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
# Determine Authentication
auth = ''
@ -285,9 +326,18 @@ class NotifyNextcloud(NotifyBase):
results['targets'] += \
NotifyNextcloud.parse_list(results['qsd']['to'])
# Allow users to over-ride the Nextcloud version being used
if 'version' in results['qsd'] and len(results['qsd']['version']):
results['version'] = \
NotifyNextcloud.unquote(results['qsd']['version'])
# Add our headers that the user can potentially over-ride if they
# wish to to our returned result set
results['headers'] = results['qsd-']
results['headers'].update(results['qsd+'])
results['headers'] = results['qsd+']
if results['qsd-']:
results['headers'].update(results['qsd-'])
NotifyBase.logger.deprecate(
"minus (-) based Nextcloud header tokens are being "
" removed; use the plus (+) symbol instead.")
return results

View file

@ -365,8 +365,12 @@ class NotifyNotica(NotifyBase):
# Add our headers that the user can potentially over-ride if they
# wish to to our returned result set
results['headers'] = results['qsd-']
results['headers'].update(results['qsd+'])
results['headers'] = results['qsd+']
if results['qsd-']:
results['headers'].update(results['qsd-'])
NotifyBase.logger.deprecate(
"minus (-) based Notica header tokens are being "
" removed; use the plus (+) symbol instead.")
return results

View file

@ -0,0 +1,495 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# One Signal requires that you've signed up with the service and
# generated yourself an API Key and APP ID.
# Sources:
# - https://documentation.onesignal.com/docs/accounts-and-keys
# - https://documentation.onesignal.com/reference/create-notification
import requests
from json import dumps
from itertools import chain
from .NotifyBase import NotifyBase
from ..common import NotifyType
from ..common import NotifyImageSize
from ..utils import validate_regex
from ..utils import parse_list
from ..utils import parse_bool
from ..utils import is_email
from ..AppriseLocale import gettext_lazy as _
class OneSignalCategory(NotifyBase):
"""
We define the different category types that we can notify via OneSignal
"""
PLAYER = 'include_player_ids'
EMAIL = 'include_email_tokens'
USER = 'include_external_user_ids'
SEGMENT = 'included_segments'
ONESIGNAL_CATEGORIES = (
OneSignalCategory.PLAYER,
OneSignalCategory.EMAIL,
OneSignalCategory.USER,
OneSignalCategory.SEGMENT,
)
class NotifyOneSignal(NotifyBase):
"""
A wrapper for OneSignal Notifications
"""
# The default descriptive name associated with the Notification
service_name = 'OneSignal'
# The services URL
service_url = 'https://onesignal.com'
# The default protocol
secure_protocol = 'onesignal'
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_onesignal'
# Notification
notify_url = "https://onesignal.com/api/v1/notifications"
# Allows the user to specify the NotifyImageSize object
image_size = NotifyImageSize.XY_72
# The maximum allowable batch sizes per message
maximum_batch_size = 2000
# Define object templates
templates = (
'{schema}://{app}@{apikey}/{targets}',
'{schema}://{template}:{app}@{apikey}/{targets}',
)
# Define our template
template_tokens = dict(NotifyBase.template_tokens, **{
# The App_ID is a UUID
# such as: 8250eaf6-1a58-489e-b136-7c74a864b434
'app': {
'name': _('App ID'),
'type': 'string',
'private': True,
'required': True,
},
'template': {
'name': _('Template'),
'type': 'string',
'private': True,
},
'apikey': {
'name': _('API Key'),
'type': 'string',
'private': True,
'required': True,
},
'target_device': {
'name': _('Target Player ID'),
'type': 'string',
'map_to': 'targets',
},
'target_email': {
'name': _('Target Email'),
'type': 'string',
'map_to': 'targets',
},
'target_user': {
'name': _('Target User'),
'type': 'string',
'prefix': '@',
'map_to': 'targets',
},
'target_segment': {
'name': _('Include Segment'),
'type': 'string',
'prefix': '#',
'map_to': 'targets',
},
'targets': {
'name': _('Targets'),
'type': 'list:string',
},
})
template_args = dict(NotifyBase.template_args, **{
'to': {
'alias_of': 'targets',
},
'image': {
'name': _('Include Image'),
'type': 'bool',
'default': True,
'map_to': 'include_image',
},
'batch': {
'name': _('Batch Mode'),
'type': 'bool',
'default': False,
},
'template': {
'alias_of': 'template',
},
'subtitle': {
'name': _('Subtitle'),
'type': 'string',
},
'language': {
'name': _('Language'),
'type': 'string',
'default': 'en',
},
})
def __init__(self, app, apikey, targets=None, include_image=True,
template=None, subtitle=None, language=None, batch=False,
**kwargs):
"""
Initialize OneSignal
"""
super(NotifyOneSignal, self).__init__(**kwargs)
# The apikey associated with the account
self.apikey = validate_regex(apikey)
if not self.apikey:
msg = 'An invalid OneSignal API key ' \
'({}) was specified.'.format(apikey)
self.logger.warning(msg)
raise TypeError(msg)
# The App ID associated with the account
self.app = validate_regex(app)
if not self.app:
msg = 'An invalid OneSignal Application ID ' \
'({}) was specified.'.format(app)
self.logger.warning(msg)
raise TypeError(msg)
# Prepare Batch Mode Flag
self.batch_size = self.maximum_batch_size if batch else 1
# Place a thumbnail image inline with the message body
self.include_image = include_image
# Our Assorted Types of Targets
self.targets = {
OneSignalCategory.PLAYER: [],
OneSignalCategory.EMAIL: [],
OneSignalCategory.USER: [],
OneSignalCategory.SEGMENT: [],
}
# Assign our template (if defined)
self.template_id = template
# Assign our subtitle (if defined)
self.subtitle = subtitle
# Our Language
self.language = language.strip().lower()[0:2]\
if language \
else NotifyOneSignal.template_args['language']['default']
if not self.language or len(self.language) != 2:
msg = 'An invalid OneSignal Language ({}) was specified.'.format(
language)
self.logger.warning(msg)
raise TypeError(msg)
# Sort our targets
for _target in parse_list(targets):
target = _target.strip()
if len(target) < 2:
self.logger.debug('Ignoring OneSignal Entry: %s' % target)
continue
if target.startswith(
NotifyOneSignal.template_tokens
['target_user']['prefix']):
self.targets[OneSignalCategory.USER].append(target)
self.logger.debug(
'Detected OneSignal UserID: %s' %
self.targets[OneSignalCategory.USER][-1])
continue
if target.startswith(
NotifyOneSignal.template_tokens
['target_segment']['prefix']):
self.targets[OneSignalCategory.SEGMENT].append(target)
self.logger.debug(
'Detected OneSignal Include Segment: %s' %
self.targets[OneSignalCategory.SEGMENT][-1])
continue
result = is_email(target)
if result:
self.targets[OneSignalCategory.EMAIL]\
.append(result['full_email'])
self.logger.debug(
'Detected OneSignal Email: %s' %
self.targets[OneSignalCategory.EMAIL][-1])
else:
# Add element as Player ID
self.targets[OneSignalCategory.PLAYER].append(target)
self.logger.debug(
'Detected OneSignal Player ID: %s' %
self.targets[OneSignalCategory.PLAYER][-1])
return
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""
Perform OneSignal Notification
"""
headers = {
'User-Agent': self.app_id,
'Content-Type': 'application/json; charset=utf-8',
"Authorization": "Basic {}".format(self.apikey),
}
has_error = False
sent_count = 0
payload = {
'app_id': self.app,
'headings': {
self.language: title if title else self.app_desc,
},
'contents': {
self.language: body,
},
# Sending true wakes your app from background to run custom native
# code (Apple interprets this as content-available=1).
# Note: Not applicable if the app is in the "force-quit" state
# (i.e app was swiped away). Omit the contents field to
# prevent displaying a visible notification.
'content_available': True,
}
if self.subtitle:
payload.update({
'subtitle': {
self.language: self.subtitle,
},
})
if self.template_id:
payload['template_id'] = self.template_id
# Acquire our large_icon image URL (if set)
image_url = None if not self.include_image \
else self.image_url(notify_type)
if image_url:
payload['large_icon'] = image_url
# Acquire our small_icon image URL (if set)
image_url = None if not self.include_image \
else self.image_url(notify_type, image_size=NotifyImageSize.XY_32)
if image_url:
payload['small_icon'] = image_url
for category in ONESIGNAL_CATEGORIES:
# Create a pointer to our list of targets for specified category
targets = self.targets[category]
for index in range(0, len(targets), self.batch_size):
payload[category] = targets[index:index + self.batch_size]
# Track our sent count
sent_count += len(payload[category])
self.logger.debug('OneSignal POST URL: %s (cert_verify=%r)' % (
self.notify_url, self.verify_certificate,
))
self.logger.debug('OneSignal Payload: %s' % str(payload))
# Always call throttle before any remote server i/o is made
self.throttle()
try:
r = requests.post(
self.notify_url,
data=dumps(payload),
headers=headers,
verify=self.verify_certificate,
timeout=self.request_timeout,
)
if r.status_code not in (
requests.codes.ok, requests.codes.no_content):
# We had a problem
status_str = \
NotifyOneSignal.http_response_code_lookup(
r.status_code)
self.logger.warning(
'Failed to send OneSignal notification: '
'{}{}error={}.'.format(
status_str,
', ' if status_str else '',
r.status_code))
self.logger.debug(
'Response Details:\r\n%s', r.content)
has_error = True
else:
self.logger.info('Sent OneSignal notification.')
except requests.RequestException as e:
self.logger.warning(
'A Connection error occurred sending OneSignal '
'notification.'
)
self.logger.debug('Socket Exception: %s', str(e))
has_error = True
if not sent_count:
# There is no one to notify; we need to capture this and not
# return a valid
self.logger.warning('There are no OneSignal targets to notify')
return False
return not has_error
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
"""
# Define any URL parameters
params = {
'image': 'yes' if self.include_image else 'no',
'batch': 'yes' if self.batch_size > 1 else 'no',
}
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
return '{schema}://{tp_id}{app}@{apikey}/{targets}?{params}'.format(
schema=self.secure_protocol,
tp_id='{}:'.format(
self.pprint(self.template_id, privacy, safe=''))
if self.template_id else '',
app=self.pprint(self.app, privacy, safe=''),
apikey=self.pprint(self.apikey, privacy, safe=''),
targets='/'.join(chain(
[NotifyOneSignal.quote(x)
for x in self.targets[OneSignalCategory.PLAYER]],
[NotifyOneSignal.quote(x)
for x in self.targets[OneSignalCategory.EMAIL]],
[NotifyOneSignal.quote('{}{}'.format(
NotifyOneSignal.template_tokens
['target_user']['prefix'], x), safe='')
for x in self.targets[OneSignalCategory.USER]],
[NotifyOneSignal.quote('{}{}'.format(
NotifyOneSignal.template_tokens
['target_segment']['prefix'], x), safe='')
for x in self.targets[OneSignalCategory.SEGMENT]])),
params=NotifyOneSignal.urlencode(params),
)
@staticmethod
def parse_url(url):
"""
Parses the URL and returns enough arguments that can allow
us to re-instantiate this object.
"""
results = NotifyBase.parse_url(url, verify_host=False)
if not results:
# We're done early as we couldn't load the results
return results
if not results.get('password'):
# The APP ID identifier associated with the account
results['app'] = NotifyOneSignal.unquote(results['user'])
else:
# The APP ID identifier associated with the account
results['app'] = NotifyOneSignal.unquote(results['password'])
# The Template ID
results['template'] = NotifyOneSignal.unquote(results['user'])
# Get Image Boolean (if set)
results['include_image'] = \
parse_bool(
results['qsd'].get(
'image',
NotifyOneSignal.template_args['image']['default']))
# Get Batch Boolean (if set)
results['batch'] = \
parse_bool(
results['qsd'].get(
'batch',
NotifyOneSignal.template_args['batch']['default']))
# The API Key is stored in the hostname
results['apikey'] = NotifyOneSignal.unquote(results['host'])
# Get our Targets
results['targets'] = NotifyOneSignal.split_path(results['fullpath'])
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'] += \
NotifyOneSignal.parse_list(results['qsd']['to'])
if 'app' in results['qsd'] and len(results['qsd']['app']):
results['app'] = \
NotifyOneSignal.unquote(results['qsd']['app'])
if 'apikey' in results['qsd'] and len(results['qsd']['apikey']):
results['apikey'] = \
NotifyOneSignal.unquote(results['qsd']['apikey'])
if 'template' in results['qsd'] and len(results['qsd']['template']):
results['template'] = \
NotifyOneSignal.unquote(results['qsd']['template'])
if 'subtitle' in results['qsd'] and len(results['qsd']['subtitle']):
results['subtitle'] = \
NotifyOneSignal.unquote(results['qsd']['subtitle'])
if 'lang' in results['qsd'] and len(results['qsd']['lang']):
results['language'] = \
NotifyOneSignal.unquote(results['qsd']['lang'])
return results

View file

@ -0,0 +1,601 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Signup @ https://www.opsgenie.com
#
# Generate your Integration API Key
# https://app.opsgenie.com/settings/integration/add/API/
# Knowing this, you can build your Opsgenie URL as follows:
# opsgenie://{apikey}/
# opsgenie://{apikey}/@{user}
# opsgenie://{apikey}/*{schedule}
# opsgenie://{apikey}/^{escalation}
# opsgenie://{apikey}/#{team}
#
# You can mix and match what you want to notify freely
# opsgenie://{apikey}/@{user}/#{team}/*{schedule}/^{escalation}
#
# If no target prefix is specified, then it is assumed to be a user.
#
# API Documentation: https://docs.opsgenie.com/docs/alert-api
# API Integration Docs: https://docs.opsgenie.com/docs/api-integration
import requests
from json import dumps
from .NotifyBase import NotifyBase
from ..common import NotifyType
from ..utils import validate_regex
from ..utils import is_uuid
from ..utils import parse_list
from ..utils import parse_bool
from ..AppriseLocale import gettext_lazy as _
class OpsgenieCategory(NotifyBase):
"""
We define the different category types that we can notify
"""
USER = 'user'
SCHEDULE = 'schedule'
ESCALATION = 'escalation'
TEAM = 'team'
OPSGENIE_CATEGORIES = (
OpsgenieCategory.USER,
OpsgenieCategory.SCHEDULE,
OpsgenieCategory.ESCALATION,
OpsgenieCategory.TEAM,
)
# Regions
class OpsgenieRegion(object):
US = 'us'
EU = 'eu'
# Opsgenie APIs
OPSGENIE_API_LOOKUP = {
OpsgenieRegion.US: 'https://api.opsgenie.com/v2/alerts',
OpsgenieRegion.EU: 'https://api.eu.opsgenie.com/v2/alerts',
}
# A List of our regions we can use for verification
OPSGENIE_REGIONS = (
OpsgenieRegion.US,
OpsgenieRegion.EU,
)
# Priorities
class OpsgeniePriority(object):
LOW = 1
MODERATE = 2
NORMAL = 3
HIGH = 4
EMERGENCY = 5
OPSGENIE_PRIORITIES = (
OpsgeniePriority.LOW,
OpsgeniePriority.MODERATE,
OpsgeniePriority.NORMAL,
OpsgeniePriority.HIGH,
OpsgeniePriority.EMERGENCY,
)
class NotifyOpsgenie(NotifyBase):
"""
A wrapper for Opsgenie Notifications
"""
# The default descriptive name associated with the Notification
service_name = 'Opsgenie'
# The services URL
service_url = 'https://opsgenie.com/'
# All notification requests are secure
secure_protocol = 'opsgenie'
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_opsgenie'
# The maximum length of the body
body_maxlen = 15000
# If we don't have the specified min length, then we don't bother using
# the body directive
opsgenie_body_minlen = 130
# The default region to use if one isn't otherwise specified
opsgenie_default_region = OpsgenieRegion.US
# The maximum allowable targets within a notification
maximum_batch_size = 50
# Define object templates
templates = (
'{schema}://{apikey}',
'{schema}://{apikey}/{targets}',
)
# Define our template tokens
template_tokens = dict(NotifyBase.template_tokens, **{
'apikey': {
'name': _('API Key'),
'type': 'string',
'private': True,
'required': True,
},
'target_escalation': {
'name': _('Target Escalation'),
'prefix': '^',
'type': 'string',
'map_to': 'targets',
},
'target_schedule': {
'name': _('Target Schedule'),
'type': 'string',
'prefix': '*',
'map_to': 'targets',
},
'target_user': {
'name': _('Target User'),
'type': 'string',
'prefix': '@',
'map_to': 'targets',
},
'target_team': {
'name': _('Target Team'),
'type': 'string',
'prefix': '#',
'map_to': 'targets',
},
'targets': {
'name': _('Targets '),
'type': 'list:string',
},
})
# Define our template arguments
template_args = dict(NotifyBase.template_args, **{
'region': {
'name': _('Region Name'),
'type': 'choice:string',
'values': OPSGENIE_REGIONS,
'default': OpsgenieRegion.US,
'map_to': 'region_name',
},
'batch': {
'name': _('Batch Mode'),
'type': 'bool',
'default': False,
},
'priority': {
'name': _('Priority'),
'type': 'choice:int',
'values': OPSGENIE_PRIORITIES,
'default': OpsgeniePriority.NORMAL,
},
'entity': {
'name': _('Entity'),
'type': 'string',
},
'alias': {
'name': _('Alias'),
'type': 'string',
},
'tags': {
'name': _('Tags'),
'type': 'string',
},
'to': {
'alias_of': 'targets',
},
})
# Map of key-value pairs to use as custom properties of the alert.
template_kwargs = {
'details': {
'name': _('Details'),
'prefix': '+',
},
}
def __init__(self, apikey, targets, region_name=None, details=None,
priority=None, alias=None, entity=None, batch=False,
tags=None, **kwargs):
"""
Initialize Opsgenie Object
"""
super(NotifyOpsgenie, self).__init__(**kwargs)
# API Key (associated with project)
self.apikey = validate_regex(apikey)
if not self.apikey:
msg = 'An invalid Opsgenie API Key ' \
'({}) was specified.'.format(apikey)
self.logger.warning(msg)
raise TypeError(msg)
# The Priority of the message
if priority not in OPSGENIE_PRIORITIES:
self.priority = OpsgeniePriority.NORMAL
else:
self.priority = priority
# Store our region
try:
self.region_name = self.opsgenie_default_region \
if region_name is None else region_name.lower()
if self.region_name not in OPSGENIE_REGIONS:
# allow the outer except to handle this common response
raise
except:
# Invalid region specified
msg = 'The Opsgenie region specified ({}) is invalid.' \
.format(region_name)
self.logger.warning(msg)
raise TypeError(msg)
self.details = {}
if details:
# Store our extra details
self.details.update(details)
# Prepare Batch Mode Flag
self.batch_size = self.maximum_batch_size if batch else 1
# Assign our tags (if defined)
self.__tags = parse_list(tags)
# Assign our entity (if defined)
self.entity = entity
# Assign our alias (if defined)
self.alias = alias
# Initialize our Targets
self.targets = []
# Sort our targets
for _target in parse_list(targets):
target = _target.strip()
if len(target) < 2:
self.logger.debug('Ignoring Opsgenie Entry: %s' % target)
continue
if target.startswith(NotifyOpsgenie.template_tokens
['target_team']['prefix']):
self.targets.append(
{'type': OpsgenieCategory.TEAM, 'id': target[1:]}
if is_uuid(target[1:]) else
{'type': OpsgenieCategory.TEAM, 'name': target[1:]})
elif target.startswith(NotifyOpsgenie.template_tokens
['target_schedule']['prefix']):
self.targets.append(
{'type': OpsgenieCategory.SCHEDULE, 'id': target[1:]}
if is_uuid(target[1:]) else
{'type': OpsgenieCategory.SCHEDULE, 'name': target[1:]})
elif target.startswith(NotifyOpsgenie.template_tokens
['target_escalation']['prefix']):
self.targets.append(
{'type': OpsgenieCategory.ESCALATION, 'id': target[1:]}
if is_uuid(target[1:]) else
{'type': OpsgenieCategory.ESCALATION, 'name': target[1:]})
elif target.startswith(NotifyOpsgenie.template_tokens
['target_user']['prefix']):
self.targets.append(
{'type': OpsgenieCategory.USER, 'id': target[1:]}
if is_uuid(target[1:]) else
{'type': OpsgenieCategory.USER, 'username': target[1:]})
else:
# Ambiguious entry; treat it as a user but not before
# displaying a warning to the end user first:
self.logger.debug(
'Treating ambigious Opsgenie target %s as a user', target)
self.targets.append(
{'type': OpsgenieCategory.USER, 'id': target}
if is_uuid(target) else
{'type': OpsgenieCategory.USER, 'username': target})
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""
Perform Opsgenie Notification
"""
headers = {
'User-Agent': self.app_id,
'Content-Type': 'application/json',
'Authorization': 'GenieKey {}'.format(self.apikey),
}
# Prepare our URL as it's based on our hostname
notify_url = OPSGENIE_API_LOOKUP[self.region_name]
# Initialize our has_error flag
has_error = False
# We want to manually set the title onto the body if specified
title_body = body if not title else '{}: {}'.format(title, body)
# Create a copy ouf our details object
details = self.details.copy()
if 'type' not in details:
details['type'] = notify_type
# Prepare our payload
payload = {
'source': self.app_desc,
'message': title_body,
'description': body,
'details': details,
'priority': 'P{}'.format(self.priority),
}
# Use our body directive if we exceed the minimum message
# limitation
if len(payload['message']) > self.opsgenie_body_minlen:
payload['message'] = '{}...'.format(
body[:self.opsgenie_body_minlen - 3])
if self.__tags:
payload['tags'] = self.__tags
if self.entity:
payload['entity'] = self.entity
if self.alias:
payload['alias'] = self.alias
length = len(self.targets) if self.targets else 1
for index in range(0, length, self.batch_size):
if self.targets:
# If there were no targets identified, then we simply
# just iterate once without the responders set
payload['responders'] = \
self.targets[index:index + self.batch_size]
# Some Debug Logging
self.logger.debug(
'Opsgenie POST URL: {} (cert_verify={})'.format(
notify_url, self.verify_certificate))
self.logger.debug('Opsgenie Payload: {}' .format(payload))
# Always call throttle before any remote server i/o is made
self.throttle()
try:
r = requests.post(
notify_url,
data=dumps(payload),
headers=headers,
verify=self.verify_certificate,
timeout=self.request_timeout,
)
if r.status_code not in (
requests.codes.accepted, requests.codes.ok):
status_str = \
NotifyBase.http_response_code_lookup(
r.status_code)
self.logger.warning(
'Failed to send Opsgenie notification:'
'{}{}error={}.'.format(
status_str,
', ' if status_str else '',
r.status_code))
self.logger.debug(
'Response Details:\r\n{}'.format(r.content))
# Mark our failure
has_error = True
continue
# If we reach here; the message was sent
self.logger.info('Sent Opsgenie notification')
self.logger.debug(
'Response Details:\r\n{}'.format(r.content))
except requests.RequestException as e:
self.logger.warning(
'A Connection error occurred sending Opsgenie '
'notification.')
self.logger.debug('Socket Exception: %s' % str(e))
# Mark our failure
has_error = True
return not has_error
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
"""
_map = {
OpsgeniePriority.LOW: 'low',
OpsgeniePriority.MODERATE: 'moderate',
OpsgeniePriority.NORMAL: 'normal',
OpsgeniePriority.HIGH: 'high',
OpsgeniePriority.EMERGENCY: 'emergency',
}
# Define any URL parameters
params = {
'region': self.region_name,
'priority':
_map[OpsgeniePriority.NORMAL] if self.priority not in _map
else _map[self.priority],
'batch': 'yes' if self.batch_size > 1 else 'no',
}
# Assign our entity value (if defined)
if self.entity:
params['entity'] = self.entity
# Assign our alias value (if defined)
if self.alias:
params['alias'] = self.alias
# Assign our tags (if specifed)
if self.__tags:
params['tags'] = ','.join(self.__tags)
# Append our details into our parameters
params.update({'+{}'.format(k): v for k, v in self.details.items()})
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
# A map allows us to map our target types so they can be correctly
# placed back into your URL below. Hence map the 'user' -> '@'
__map = {
OpsgenieCategory.USER:
NotifyOpsgenie.template_tokens['target_user']['prefix'],
OpsgenieCategory.SCHEDULE:
NotifyOpsgenie.template_tokens['target_schedule']['prefix'],
OpsgenieCategory.ESCALATION:
NotifyOpsgenie.template_tokens['target_escalation']['prefix'],
OpsgenieCategory.TEAM:
NotifyOpsgenie.template_tokens['target_team']['prefix'],
}
return '{schema}://{apikey}/{targets}/?{params}'.format(
schema=self.secure_protocol,
apikey=self.pprint(self.apikey, privacy, safe=''),
targets='/'.join(
[NotifyOpsgenie.quote('{}{}'.format(
__map[x['type']],
x.get('id', x.get('name', x.get('username')))))
for x in self.targets]),
params=NotifyOpsgenie.urlencode(params))
@staticmethod
def parse_url(url):
"""
Parses the URL and returns enough arguments that can allow
us to re-instantiate this object.
"""
results = NotifyBase.parse_url(url, verify_host=False)
if not results:
# We're done early as we couldn't load the results
return results
# The API Key is stored in the hostname
results['apikey'] = NotifyOpsgenie.unquote(results['host'])
# Get our Targets
results['targets'] = NotifyOpsgenie.split_path(results['fullpath'])
# Add our Meta Detail keys
results['details'] = {NotifyBase.unquote(x): NotifyBase.unquote(y)
for x, y in results['qsd+'].items()}
if 'priority' in results['qsd'] and len(results['qsd']['priority']):
_map = {
# Letter Assignnments
'l': OpsgeniePriority.LOW,
'm': OpsgeniePriority.MODERATE,
'n': OpsgeniePriority.NORMAL,
'h': OpsgeniePriority.HIGH,
'e': OpsgeniePriority.EMERGENCY,
'lo': OpsgeniePriority.LOW,
'me': OpsgeniePriority.MODERATE,
'no': OpsgeniePriority.NORMAL,
'hi': OpsgeniePriority.HIGH,
'em': OpsgeniePriority.EMERGENCY,
# Support 3rd Party API Documented Scale
'1': OpsgeniePriority.LOW,
'2': OpsgeniePriority.MODERATE,
'3': OpsgeniePriority.NORMAL,
'4': OpsgeniePriority.HIGH,
'5': OpsgeniePriority.EMERGENCY,
'p1': OpsgeniePriority.LOW,
'p2': OpsgeniePriority.MODERATE,
'p3': OpsgeniePriority.NORMAL,
'p4': OpsgeniePriority.HIGH,
'p5': OpsgeniePriority.EMERGENCY,
}
try:
results['priority'] = \
_map[results['qsd']['priority'][0:2].lower()]
except KeyError:
# No priority was set
pass
# Get Batch Boolean (if set)
results['batch'] = \
parse_bool(
results['qsd'].get(
'batch',
NotifyOpsgenie.template_args['batch']['default']))
if 'apikey' in results['qsd'] and len(results['qsd']['apikey']):
results['apikey'] = \
NotifyOpsgenie.unquote(results['qsd']['apikey'])
if 'tags' in results['qsd'] and len(results['qsd']['tags']):
# Extract our tags
results['tags'] = \
parse_list(NotifyOpsgenie.unquote(results['qsd']['tags']))
if 'region' in results['qsd'] and len(results['qsd']['region']):
# Extract our region
results['region_name'] = \
NotifyOpsgenie.unquote(results['qsd']['region'])
if 'entity' in results['qsd'] and len(results['qsd']['entity']):
# Extract optional entity field
results['entity'] = \
NotifyOpsgenie.unquote(results['qsd']['entity'])
if 'alias' in results['qsd'] and len(results['qsd']['alias']):
# Extract optional alias field
results['alias'] = \
NotifyOpsgenie.unquote(results['qsd']['alias'])
# Handle 'to' email address
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'].append(results['qsd']['to'])
return results

View file

@ -0,0 +1,320 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Official API reference: https://developer.gitter.im/docs/user-resource
import re
import six
import requests
from json import dumps
from .NotifyBase import NotifyBase
from ..common import NotifyType
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
# Used to break path apart into list of targets
TARGET_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
# Priorities
class ParsePlatformDevice(object):
# All Devices
ALL = 'all'
# Apple IOS (APNS)
IOS = 'ios'
# Android/Firebase (FCM)
ANDROID = 'android'
PARSE_PLATFORM_DEVICES = (
ParsePlatformDevice.ALL,
ParsePlatformDevice.IOS,
ParsePlatformDevice.ANDROID,
)
class NotifyParsePlatform(NotifyBase):
"""
A wrapper for Parse Platform Notifications
"""
# The default descriptive name associated with the Notification
service_name = 'Parse Platform'
# The services URL
service_url = ' https://parseplatform.org/'
# insecure notifications (using http)
protocol = 'parsep'
# Secure notifications (using https)
secure_protocol = 'parseps'
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_parseplatform'
# Define object templates
templates = (
'{schema}://{app_id}:{master_key}@{host}',
'{schema}://{app_id}:{master_key}@{host}:{port}',
)
# Define our template tokens
template_tokens = dict(NotifyBase.template_tokens, **{
'host': {
'name': _('Hostname'),
'type': 'string',
'required': True,
},
'port': {
'name': _('Port'),
'type': 'int',
'min': 1,
'max': 65535,
},
'app_id': {
'name': _('App ID'),
'type': 'string',
'private': True,
'required': True,
},
'master_key': {
'name': _('Master Key'),
'type': 'string',
'private': True,
'required': True,
},
})
# Define our template arguments
template_args = dict(NotifyBase.template_args, **{
'device': {
'name': _('Device'),
'type': 'choice:string',
'values': PARSE_PLATFORM_DEVICES,
'default': ParsePlatformDevice.ALL,
},
'app_id': {
'alias_of': 'app_id',
},
'master_key': {
'alias_of': 'master_key',
},
})
def __init__(self, app_id, master_key, device=None, **kwargs):
"""
Initialize Parse Platform Object
"""
super(NotifyParsePlatform, self).__init__(**kwargs)
self.fullpath = kwargs.get('fullpath')
if not isinstance(self.fullpath, six.string_types):
self.fullpath = '/'
# Application ID
self.application_id = validate_regex(app_id)
if not self.application_id:
msg = 'An invalid Parse Platform Application ID ' \
'({}) was specified.'.format(app_id)
self.logger.warning(msg)
raise TypeError(msg)
# Master Key
self.master_key = validate_regex(master_key)
if not self.master_key:
msg = 'An invalid Parse Platform Master Key ' \
'({}) was specified.'.format(master_key)
self.logger.warning(msg)
raise TypeError(msg)
# Initialize Devices Array
self.devices = []
if device:
self.device = device.lower()
if device not in PARSE_PLATFORM_DEVICES:
msg = 'An invalid Parse Platform device ' \
'({}) was specified.'.format(device)
self.logger.warning(msg)
raise TypeError(msg)
else:
self.device = self.template_args['device']['default']
if self.device == ParsePlatformDevice.ALL:
self.devices = [d for d in PARSE_PLATFORM_DEVICES
if d != ParsePlatformDevice.ALL]
else:
# Store our device
self.devices.append(device)
return
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""
Perform Parse Platform Notification
"""
# Prepare our headers:
headers = {
'User-Agent': self.app_id,
'Content-Type': 'application/json',
'X-Parse-Application-Id': self.application_id,
'X-Parse-Master-Key': self.master_key,
}
# prepare our payload
payload = {
'where': {
'deviceType': {
'$in': self.devices,
}
},
'data': {
'title': title,
'alert': body,
}
}
# Set our schema
schema = 'https' if self.secure else 'http'
# Our Notification URL
url = '%s://%s' % (schema, self.host)
if isinstance(self.port, int):
url += ':%d' % self.port
url += self.fullpath.rstrip('/') + '/parse/push/'
self.logger.debug('Parse Platform POST URL: %s (cert_verify=%r)' % (
url, self.verify_certificate,
))
self.logger.debug('Parse Platform Payload: %s' % str(payload))
# Always call throttle before any remote server i/o is made
self.throttle()
try:
r = requests.post(
url,
data=dumps(payload),
headers=headers,
verify=self.verify_certificate,
)
if r.status_code != requests.codes.ok:
# We had a problem
status_str = NotifyParsePlatform.\
http_response_code_lookup(r.status_code)
self.logger.warning(
'Failed to send Parse Platform notification: '
'{}{}error={}.'.format(
status_str,
', ' if status_str else '',
r.status_code))
self.logger.debug('Response Details:\r\n{}'.format(r.content))
# Return; we're done
return False
else:
self.logger.info('Sent Parse Platform notification.')
except requests.RequestException as e:
self.logger.warning(
'A Connection error occured sending Parse Platform '
'notification to %s.' % self.host)
self.logger.debug('Socket Exception: %s' % str(e))
# Return; we're done
return False
return True
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
"""
# Define any arguments set
params = {
'device': self.device,
}
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
default_port = 443 if self.secure else 80
return \
'{schema}://{app_id}:{master_key}@' \
'{hostname}{port}{fullpath}/?{params}'.format(
schema=self.secure_protocol if self.secure else self.protocol,
app_id=self.pprint(self.application_id, privacy, safe=''),
master_key=self.pprint(self.master_key, privacy, safe=''),
hostname=NotifyParsePlatform.quote(self.host, safe=''),
port='' if self.port is None or self.port == default_port
else ':{}'.format(self.port),
fullpath=NotifyParsePlatform.quote(self.fullpath, safe='/'),
params=NotifyParsePlatform.urlencode(params))
@staticmethod
def parse_url(url):
"""
Parses the URL and returns enough arguments that can allow
us to substantiate this object.
"""
results = NotifyBase.parse_url(url)
if not results:
# We're done early as we couldn't load the results
return results
# App ID is retrieved from the user
results['app_id'] = NotifyParsePlatform.unquote(results['user'])
# Master Key is retrieved from the password
results['master_key'] = \
NotifyParsePlatform.unquote(results['password'])
# Device support override
if 'device' in results['qsd'] and len(results['qsd']['device']):
results['device'] = results['qsd']['device']
# Allow app_id attribute over-ride
if 'app_id' in results['qsd'] and len(results['qsd']['app_id']):
results['app_id'] = results['qsd']['app_id']
# Allow master_key attribute over-ride
if 'master_key' in results['qsd'] \
and len(results['qsd']['master_key']):
results['master_key'] = results['qsd']['master_key']
return results

View file

@ -23,20 +23,17 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import re
import requests
from .NotifyBase import NotifyBase
from ..common import NotifyType
from ..utils import is_email
from ..utils import is_phone_no
from ..utils import parse_list
from ..utils import parse_bool
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
# Some Phone Number Detection
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
class NotifyPopcornNotify(NotifyBase):
"""
@ -127,19 +124,10 @@ class NotifyPopcornNotify(NotifyBase):
for target in parse_list(targets):
# Validate targets and drop bad ones:
result = IS_PHONE_NO.match(target)
result = is_phone_no(target)
if result:
# Further check our phone # for it's digit count
result = ''.join(re.findall(r'\d+', result.group('phone')))
if len(result) < 11 or len(result) > 14:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
# store valid phone number
self.targets.append(result)
self.targets.append(result['full'])
continue
result = is_email(target)

View file

@ -278,15 +278,27 @@ class NotifyProwl(NotifyBase):
if 'priority' in results['qsd'] and len(results['qsd']['priority']):
_map = {
# Letter Assignments
'l': ProwlPriority.LOW,
'm': ProwlPriority.MODERATE,
'n': ProwlPriority.NORMAL,
'h': ProwlPriority.HIGH,
'e': ProwlPriority.EMERGENCY,
'lo': ProwlPriority.LOW,
'me': ProwlPriority.MODERATE,
'no': ProwlPriority.NORMAL,
'hi': ProwlPriority.HIGH,
'em': ProwlPriority.EMERGENCY,
# Support 3rd Party Documented Scale
'-2': ProwlPriority.LOW,
'-1': ProwlPriority.MODERATE,
'0': ProwlPriority.NORMAL,
'1': ProwlPriority.HIGH,
'2': ProwlPriority.EMERGENCY,
}
try:
results['priority'] = \
_map[results['qsd']['priority'][0].lower()]
_map[results['qsd']['priority'][0:2].lower()]
except KeyError:
# No priority was set

View file

@ -367,8 +367,9 @@ class NotifyPushBullet(NotifyBase):
except (OSError, IOError) as e:
self.logger.warning(
'An I/O error occurred while reading {}.'.format(
payload.name if payload else 'attachment'))
'An I/O error occurred while handling {}.'.format(
payload.name if isinstance(payload, AttachBase)
else payload))
self.logger.debug('I/O Exception: %s' % str(e))
return False, response

View file

@ -29,6 +29,7 @@ import requests
from .NotifyBase import NotifyBase
from ..common import NotifyType
from ..common import NotifyFormat
from ..utils import parse_list
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
@ -162,14 +163,12 @@ class NotifyPushover(NotifyBase):
'type': 'string',
'private': True,
'required': True,
'regex': (r'^[a-z0-9]{30}$', 'i'),
},
'token': {
'name': _('Access Token'),
'type': 'string',
'private': True,
'required': True,
'regex': (r'^[a-z0-9]{30}$', 'i'),
},
'target_device': {
'name': _('Target Device'),
@ -197,6 +196,16 @@ class NotifyPushover(NotifyBase):
'regex': (r'^[a-z]{1,12}$', 'i'),
'default': PushoverSound.PUSHOVER,
},
'url': {
'name': _('URL'),
'map_to': 'supplemental_url',
'type': 'string',
},
'url_title': {
'name': _('URL Title'),
'map_to': 'supplemental_url_title',
'type': 'string'
},
'retry': {
'name': _('Retry'),
'type': 'int',
@ -216,15 +225,15 @@ class NotifyPushover(NotifyBase):
})
def __init__(self, user_key, token, targets=None, priority=None,
sound=None, retry=None, expire=None, **kwargs):
sound=None, retry=None, expire=None, supplemental_url=None,
supplemental_url_title=None, **kwargs):
"""
Initialize Pushover Object
"""
super(NotifyPushover, self).__init__(**kwargs)
# Access Token (associated with project)
self.token = validate_regex(
token, *self.template_tokens['token']['regex'])
self.token = validate_regex(token)
if not self.token:
msg = 'An invalid Pushover Access Token ' \
'({}) was specified.'.format(token)
@ -232,8 +241,7 @@ class NotifyPushover(NotifyBase):
raise TypeError(msg)
# User Key (associated with project)
self.user_key = validate_regex(
user_key, *self.template_tokens['user_key']['regex'])
self.user_key = validate_regex(user_key)
if not self.user_key:
msg = 'An invalid Pushover User Key ' \
'({}) was specified.'.format(user_key)
@ -244,6 +252,10 @@ class NotifyPushover(NotifyBase):
if len(self.targets) == 0:
self.targets = (PUSHOVER_SEND_TO_ALL, )
# Setup supplemental url
self.supplemental_url = supplemental_url
self.supplemental_url_title = supplemental_url_title
# Setup our sound
self.sound = NotifyPushover.default_pushover_sound \
if not isinstance(sound, six.string_types) else sound.lower()
@ -324,6 +336,15 @@ class NotifyPushover(NotifyBase):
'sound': self.sound,
}
if self.supplemental_url:
payload['url'] = self.supplemental_url
if self.supplemental_url_title:
payload['url_title'] = self.supplemental_url_title
if self.notify_format == NotifyFormat.HTML:
# https://pushover.net/api#html
payload['html'] = 1
if self.priority == PushoverPriority.EMERGENCY:
payload.update({'retry': self.retry, 'expire': self.expire})
@ -568,6 +589,14 @@ class NotifyPushover(NotifyBase):
results['sound'] = \
NotifyPushover.unquote(results['qsd']['sound'])
# Get the supplementary url
if 'url' in results['qsd'] and len(results['qsd']['url']):
results['supplemental_url'] = NotifyPushover.unquote(
results['qsd']['url']
)
if 'url_title' in results['qsd'] and len(results['qsd']['url_title']):
results['supplemental_url_title'] = results['qsd']['url_title']
# Get expire and retry
if 'expire' in results['qsd'] and len(results['qsd']['expire']):
results['expire'] = results['qsd']['expire']

View file

@ -0,0 +1,750 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2021 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# 1. Visit https://www.reddit.com/prefs/apps and scroll to the bottom
# 2. Click on the button that reads 'are you a developer? create an app...'
# 3. Set the mode to `script`,
# 4. Provide a `name`, `description`, `redirect uri` and save it.
# 5. Once the bot is saved, you'll be given a ID (next to the the bot name)
# and a Secret.
# The App ID will look something like this: YWARPXajkk645m
# The App Secret will look something like this: YZGKc5YNjq3BsC-bf7oBKalBMeb1xA
# The App will also have a location where you can identify the users
# who have access (identified as Developers) to the app itself. You will
# additionally need these credentials authenticate with.
# With this information you'll be able to form the URL:
# reddit://{user}:{password}@{app_id}/{app_secret}
# All of the documentation needed to work with the Reddit API can be found
# here:
# - https://www.reddit.com/dev/api/
# - https://www.reddit.com/dev/api/#POST_api_submit
# - https://github.com/reddit-archive/reddit/wiki/API
import six
import requests
from json import loads
from datetime import timedelta
from datetime import datetime
from .NotifyBase import NotifyBase
from ..URLBase import PrivacyMode
from ..common import NotifyFormat
from ..common import NotifyType
from ..utils import parse_list
from ..utils import parse_bool
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
from .. import __title__, __version__
# Extend HTTP Error Messages
REDDIT_HTTP_ERROR_MAP = {
401: 'Unauthorized - Invalid Token',
}
class RedditMessageKind(object):
"""
Define the kinds of messages supported
"""
# Attempt to auto-detect the type prior to passing along the message to
# Reddit
AUTO = 'auto'
# A common message
SELF = 'self'
# A Hyperlink
LINK = 'link'
REDDIT_MESSAGE_KINDS = (
RedditMessageKind.AUTO,
RedditMessageKind.SELF,
RedditMessageKind.LINK,
)
class NotifyReddit(NotifyBase):
"""
A wrapper for Notify Reddit Notifications
"""
# The default descriptive name associated with the Notification
service_name = 'Reddit'
# The services URL
service_url = 'https://reddit.com'
# The default secure protocol
secure_protocol = 'reddit'
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_reddit'
# The maximum size of the message
body_maxlen = 6000
# Maximum title length as defined by the Reddit API
title_maxlen = 300
# Default to markdown
notify_format = NotifyFormat.MARKDOWN
# The default Notification URL to use
auth_url = 'https://www.reddit.com/api/v1/access_token'
submit_url = 'https://oauth.reddit.com/api/submit'
# Reddit is kind enough to return how many more requests we're allowed to
# continue to make within it's header response as:
# X-RateLimit-Reset: The epoc time (in seconds) we can expect our
# rate-limit to be reset.
# X-RateLimit-Remaining: an integer identifying how many requests we're
# still allow to make.
request_rate_per_sec = 0
# For Tracking Purposes
ratelimit_reset = datetime.utcnow()
# Default to 1.0
ratelimit_remaining = 1.0
# Taken right from google.auth.helpers:
clock_skew = timedelta(seconds=10)
# 1 hour in seconds (the lifetime of our token)
access_token_lifetime_sec = timedelta(seconds=3600)
# Define object templates
templates = (
'{schema}://{user}:{password}@{app_id}/{app_secret}/{targets}',
)
# Define our template arguments
template_tokens = dict(NotifyBase.template_tokens, **{
'user': {
'name': _('User Name'),
'type': 'string',
'required': True,
},
'password': {
'name': _('Password'),
'type': 'string',
'private': True,
'required': True,
},
'app_id': {
'name': _('Application ID'),
'type': 'string',
'private': True,
'required': True,
'regex': (r'^[a-z0-9-]+$', 'i'),
},
'app_secret': {
'name': _('Application Secret'),
'type': 'string',
'private': True,
'required': True,
'regex': (r'^[a-z0-9-]+$', 'i'),
},
'target_subreddit': {
'name': _('Target Subreddit'),
'type': 'string',
'map_to': 'targets',
},
'targets': {
'name': _('Targets'),
'type': 'list:string',
},
})
# Define our template arguments
template_args = dict(NotifyBase.template_args, **{
'to': {
'alias_of': 'targets',
},
'kind': {
'name': _('Kind'),
'type': 'choice:string',
'values': REDDIT_MESSAGE_KINDS,
'default': RedditMessageKind.AUTO,
},
'flair_id': {
'name': _('Flair ID'),
'type': 'string',
'map_to': 'flair_id',
},
'flair_text': {
'name': _('Flair Text'),
'type': 'string',
'map_to': 'flair_text',
},
'nsfw': {
'name': _('NSFW'),
'type': 'bool',
'default': False,
'map_to': 'nsfw',
},
'ad': {
'name': _('Is Ad?'),
'type': 'bool',
'default': False,
'map_to': 'advertisement',
},
'replies': {
'name': _('Send Replies'),
'type': 'bool',
'default': True,
'map_to': 'sendreplies',
},
'spoiler': {
'name': _('Is Spoiler'),
'type': 'bool',
'default': False,
'map_to': 'spoiler',
},
'resubmit': {
'name': _('Resubmit Flag'),
'type': 'bool',
'default': False,
'map_to': 'resubmit',
},
})
def __init__(self, app_id=None, app_secret=None, targets=None,
kind=None, nsfw=False, sendreplies=True, resubmit=False,
spoiler=False, advertisement=False,
flair_id=None, flair_text=None, **kwargs):
"""
Initialize Notify Reddit Object
"""
super(NotifyReddit, self).__init__(**kwargs)
# Initialize subreddit list
self.subreddits = set()
# Not Safe For Work Flag
self.nsfw = nsfw
# Send Replies Flag
self.sendreplies = sendreplies
# Is Spoiler Flag
self.spoiler = spoiler
# Resubmit Flag
self.resubmit = resubmit
# Is Ad?
self.advertisement = advertisement
# Flair details
self.flair_id = flair_id
self.flair_text = flair_text
# Our keys we build using the provided content
self.__refresh_token = None
self.__access_token = None
self.__access_token_expiry = datetime.utcnow()
self.kind = kind.strip().lower() \
if isinstance(kind, six.string_types) \
else self.template_args['kind']['default']
if self.kind not in REDDIT_MESSAGE_KINDS:
msg = 'An invalid Reddit message kind ({}) was specified'.format(
kind)
self.logger.warning(msg)
raise TypeError(msg)
self.user = validate_regex(self.user)
if not self.user:
msg = 'An invalid Reddit User ID ' \
'({}) was specified'.format(self.user)
self.logger.warning(msg)
raise TypeError(msg)
self.password = validate_regex(self.password)
if not self.password:
msg = 'An invalid Reddit Password ' \
'({}) was specified'.format(self.password)
self.logger.warning(msg)
raise TypeError(msg)
self.client_id = validate_regex(
app_id, *self.template_tokens['app_id']['regex'])
if not self.client_id:
msg = 'An invalid Reddit App ID ' \
'({}) was specified'.format(app_id)
self.logger.warning(msg)
raise TypeError(msg)
self.client_secret = validate_regex(
app_secret, *self.template_tokens['app_secret']['regex'])
if not self.client_secret:
msg = 'An invalid Reddit App Secret ' \
'({}) was specified'.format(app_secret)
self.logger.warning(msg)
raise TypeError(msg)
# Build list of subreddits
self.subreddits = [
sr.lstrip('#') for sr in parse_list(targets) if sr.lstrip('#')]
if not self.subreddits:
self.logger.warning(
'No subreddits were identified to be notified')
return
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
"""
# Define any URL parameters
params = {
'kind': self.kind,
'ad': 'yes' if self.advertisement else 'no',
'nsfw': 'yes' if self.nsfw else 'no',
'resubmit': 'yes' if self.resubmit else 'no',
'replies': 'yes' if self.sendreplies else 'no',
'spoiler': 'yes' if self.spoiler else 'no',
}
# Flair support
if self.flair_id:
params['flair_id'] = self.flair_id
if self.flair_text:
params['flair_text'] = self.flair_text
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
return '{schema}://{user}:{password}@{app_id}/{app_secret}' \
'/{targets}/?{params}'.format(
schema=self.secure_protocol,
user=NotifyReddit.quote(self.user, safe=''),
password=self.pprint(
self.password, privacy, mode=PrivacyMode.Secret, safe=''),
app_id=self.pprint(
self.client_id, privacy, mode=PrivacyMode.Secret, safe=''),
app_secret=self.pprint(
self.client_secret, privacy, mode=PrivacyMode.Secret,
safe=''),
targets='/'.join(
[NotifyReddit.quote(x, safe='') for x in self.subreddits]),
params=NotifyReddit.urlencode(params),
)
def login(self):
"""
A simple wrapper to authenticate with the Reddit Server
"""
# Prepare our payload
payload = {
'grant_type': 'password',
'username': self.user,
'password': self.password,
}
# Enforce a False flag setting before calling _fetch()
self.__access_token = False
# Send Login Information
postokay, response = self._fetch(
self.auth_url,
payload=payload,
)
if not postokay or not response:
# Setting this variable to False as a way of letting us know
# we failed to authenticate on our last attempt
self.__access_token = False
return False
# Our response object looks like this (content has been altered for
# presentation purposes):
# {
# "access_token": Your access token,
# "token_type": "bearer",
# "expires_in": Unix Epoch Seconds,
# "scope": A scope string,
# "refresh_token": Your refresh token
# }
# Acquire our token
self.__access_token = response.get('access_token')
# Handle other optional arguments we can use
if 'expires_in' in response:
delta = timedelta(seconds=int(response['expires_in']))
self.__access_token_expiry = \
delta + datetime.utcnow() - self.clock_skew
else:
self.__access_token_expiry = self.access_token_lifetime_sec + \
datetime.utcnow() - self.clock_skew
# The Refresh Token
self.__refresh_token = response.get(
'refresh_token', self.__refresh_token)
if self.__access_token:
self.logger.info('Authenticated to Reddit as {}'.format(self.user))
return True
self.logger.warning(
'Failed to authenticate to Reddit as {}'.format(self.user))
# Mark our failure
return False
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""
Perform Reddit Notification
"""
# error tracking (used for function return)
has_error = False
if not self.__access_token and not self.login():
# We failed to authenticate - we're done
return False
if not len(self.subreddits):
# We have nothing to notify; we're done
self.logger.warning('There are no Reddit targets to notify')
return False
# Prepare our Message Type/Kind
if self.kind == RedditMessageKind.AUTO:
parsed = NotifyBase.parse_url(body)
# Detect a link
if parsed and parsed.get('schema', '').startswith('http') \
and parsed.get('host'):
kind = RedditMessageKind.LINK
else:
kind = RedditMessageKind.SELF
else:
kind = self.kind
# Create a copy of the subreddits list
subreddits = list(self.subreddits)
while len(subreddits) > 0:
# Retrieve our subreddit
subreddit = subreddits.pop()
# Prepare our payload
payload = {
'ad': True if self.advertisement else False,
'api_type': 'json',
'extension': 'json',
'sr': subreddit,
'title': title,
'kind': kind,
'nsfw': True if self.nsfw else False,
'resubmit': True if self.resubmit else False,
'sendreplies': True if self.sendreplies else False,
'spoiler': True if self.spoiler else False,
}
if self.flair_id:
payload['flair_id'] = self.flair_id
if self.flair_text:
payload['flair_text'] = self.flair_text
if kind == RedditMessageKind.LINK:
payload.update({
'url': body,
})
else:
payload.update({
'text': body,
})
postokay, response = self._fetch(self.submit_url, payload=payload)
# only toggle has_error flag if we had an error
if not postokay:
# Mark our failure
has_error = True
continue
# If we reach here, we were successful
self.logger.info(
'Sent Reddit notification to {}'.format(
subreddit))
return not has_error
def _fetch(self, url, payload=None):
"""
Wrapper to Reddit API requests object
"""
# use what was specified, otherwise build headers dynamically
headers = {
'User-Agent': '{} v{}'.format(__title__, __version__)
}
if self.__access_token:
# Set our token
headers['Authorization'] = 'Bearer {}'.format(self.__access_token)
# Prepare our url
url = self.submit_url if self.__access_token else self.auth_url
# Some Debug Logging
self.logger.debug('Reddit POST URL: {} (cert_verify={})'.format(
url, self.verify_certificate))
self.logger.debug('Reddit Payload: %s' % str(payload))
# By default set wait to None
wait = None
if self.ratelimit_remaining <= 0.0:
# Determine how long we should wait for or if we should wait at
# all. This isn't fool-proof because we can't be sure the client
# time (calling this script) is completely synced up with the
# Gitter server. One would hope we're on NTP and our clocks are
# the same allowing this to role smoothly:
now = datetime.utcnow()
if now < self.ratelimit_reset:
# We need to throttle for the difference in seconds
wait = abs(
(self.ratelimit_reset - now + self.clock_skew)
.total_seconds())
# Always call throttle before any remote server i/o is made;
self.throttle(wait=wait)
# Initialize a default value for our content value
content = {}
# acquire our request mode
try:
r = requests.post(
url,
data=payload,
auth=None if self.__access_token
else (self.client_id, self.client_secret),
headers=headers,
verify=self.verify_certificate,
timeout=self.request_timeout,
)
# We attempt to login again and retry the original request
# if we aren't in the process of handling a login already
if r.status_code != requests.codes.ok \
and self.__access_token and url != self.auth_url:
# We had a problem
status_str = \
NotifyReddit.http_response_code_lookup(
r.status_code, REDDIT_HTTP_ERROR_MAP)
self.logger.debug(
'Taking countermeasures after failed to send to Reddit '
'{}: {}error={}'.format(
url,
', ' if status_str else '',
r.status_code))
self.logger.debug(
'Response Details:\r\n{}'.format(r.content))
# We failed to authenticate with our token; login one more
# time and retry this original request
if not self.login():
return (False, {})
# Try again
r = requests.post(
url,
data=payload,
headers=headers,
verify=self.verify_certificate,
timeout=self.request_timeout
)
# Get our JSON content if it's possible
try:
content = loads(r.content)
except (TypeError, ValueError, AttributeError):
# TypeError = r.content is not a String
# ValueError = r.content is Unparsable
# AttributeError = r.content is None
# We had a problem
status_str = \
NotifyReddit.http_response_code_lookup(
r.status_code, REDDIT_HTTP_ERROR_MAP)
# Reddit always returns a JSON response
self.logger.warning(
'Failed to send to Reddit after countermeasures {}: '
'{}error={}'.format(
url,
', ' if status_str else '',
r.status_code))
self.logger.debug(
'Response Details:\r\n{}'.format(r.content))
return (False, {})
if r.status_code != requests.codes.ok:
# We had a problem
status_str = \
NotifyReddit.http_response_code_lookup(
r.status_code, REDDIT_HTTP_ERROR_MAP)
self.logger.warning(
'Failed to send to Reddit {}: '
'{}error={}'.format(
url,
', ' if status_str else '',
r.status_code))
self.logger.debug(
'Response Details:\r\n{}'.format(r.content))
# Mark our failure
return (False, content)
errors = [] if not content else \
content.get('json', {}).get('errors', [])
if errors:
self.logger.warning(
'Failed to send to Reddit {}: '
'{}'.format(
url,
str(errors)))
self.logger.debug(
'Response Details:\r\n{}'.format(r.content))
# Mark our failure
return (False, content)
try:
# Store our rate limiting (if provided)
self.ratelimit_remaining = \
float(r.headers.get(
'X-RateLimit-Remaining'))
self.ratelimit_reset = datetime.utcfromtimestamp(
int(r.headers.get('X-RateLimit-Reset')))
except (TypeError, ValueError):
# This is returned if we could not retrieve this information
# gracefully accept this state and move on
pass
except requests.RequestException as e:
self.logger.warning(
'Exception received when sending Reddit to {}'.
format(url))
self.logger.debug('Socket Exception: %s' % str(e))
# Mark our failure
return (False, content)
return (True, content)
@staticmethod
def parse_url(url):
"""
Parses the URL and returns enough arguments that can allow
us to re-instantiate this object.
"""
results = NotifyBase.parse_url(url, verify_host=False)
if not results:
# We're done early as we couldn't load the results
return results
# Acquire our targets
results['targets'] = NotifyReddit.split_path(results['fullpath'])
# Kind override
if 'kind' in results['qsd'] and results['qsd']['kind']:
results['kind'] = NotifyReddit.unquote(
results['qsd']['kind'].strip().lower())
else:
results['kind'] = RedditMessageKind.AUTO
# Is an Ad?
results['ad'] = \
parse_bool(results['qsd'].get('ad', False))
# Get Not Safe For Work (NSFW) Flag
results['nsfw'] = \
parse_bool(results['qsd'].get('nsfw', False))
# Send Replies Flag
results['replies'] = \
parse_bool(results['qsd'].get('replies', True))
# Resubmit Flag
results['resubmit'] = \
parse_bool(results['qsd'].get('resubmit', False))
# Is Spoiler Flag
results['spoiler'] = \
parse_bool(results['qsd'].get('spoiler', False))
if 'flair_text' in results['qsd']:
results['flair_text'] = \
NotifyReddit.unquote(results['qsd']['flair_text'])
if 'flair_id' in results['qsd']:
results['flair_id'] = \
NotifyReddit.unquote(results['qsd']['flair_id'])
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'] += \
NotifyReddit.parse_list(results['qsd']['to'])
if 'app_id' in results['qsd']:
results['app_id'] = \
NotifyReddit.unquote(results['qsd']['app_id'])
else:
# The App/Bot ID is the hostname
results['app_id'] = NotifyReddit.unquote(results['host'])
if 'app_secret' in results['qsd']:
results['app_secret'] = \
NotifyReddit.unquote(results['qsd']['app_secret'])
else:
# The first target identified is the App secret
results['app_secret'] = \
None if not results['targets'] else results['targets'].pop(0)
return results

View file

@ -174,14 +174,17 @@ class NotifyRocketChat(NotifyBase):
'avatar': {
'name': _('Use Avatar'),
'type': 'bool',
'default': True,
'default': False,
},
'webhook': {
'alias_of': 'webhook',
},
'to': {
'alias_of': 'targets',
},
})
def __init__(self, webhook=None, targets=None, mode=None, avatar=True,
def __init__(self, webhook=None, targets=None, mode=None, avatar=None,
**kwargs):
"""
Initialize Notify Rocket.Chat Object
@ -209,9 +212,6 @@ class NotifyRocketChat(NotifyBase):
# Assign our webhook (if defined)
self.webhook = webhook
# Place an avatar image to associate with our content
self.avatar = avatar
# Used to track token headers upon authentication (if successful)
# This is only used if not on webhook mode
self.headers = {}
@ -278,6 +278,22 @@ class NotifyRocketChat(NotifyBase):
self.logger.warning(msg)
raise TypeError(msg)
# Prepare our avatar setting
# - if specified; that trumps all
# - if not specified and we're dealing with a basic setup, the Avatar
# is disabled by default. This is because if the account doesn't
# have the bot flag set on it it won't work as documented here:
# https://developer.rocket.chat/api/rest-api/endpoints\
# /team-collaboration-endpoints/chat/postmessage
# - Otherwise if we're a webhook, we enable the avatar by default
# (if not otherwise specified) since it will work nicely.
# Place an avatar image to associate with our content
if self.mode == RocketChatAuthMode.BASIC:
self.avatar = False if avatar is None else avatar
else: # self.mode == RocketChatAuthMode.WEBHOOK:
self.avatar = True if avatar is None else avatar
return
def url(self, privacy=False, *args, **kwargs):
@ -367,11 +383,6 @@ class NotifyRocketChat(NotifyBase):
# Initiaize our error tracking
has_error = False
headers = {
'User-Agent': self.app_id,
'Content-Type': 'application/json',
}
while len(targets):
# Retrieve our target
target = targets.pop(0)
@ -380,8 +391,7 @@ class NotifyRocketChat(NotifyBase):
payload['channel'] = target
if not self._send(
dumps(payload), notify_type=notify_type, path=path,
headers=headers, **kwargs):
payload, notify_type=notify_type, path=path, **kwargs):
# toggle flag
has_error = True
@ -400,21 +410,24 @@ class NotifyRocketChat(NotifyBase):
return False
# prepare JSON Object
payload = self._payload(body, title, notify_type)
_payload = self._payload(body, title, notify_type)
# Initiaize our error tracking
has_error = False
# Build our list of channels/rooms/users (if any identified)
channels = ['@{}'.format(u) for u in self.users]
channels.extend(['#{}'.format(c) for c in self.channels])
# Create a copy of our channels to notify against
channels = list(self.channels)
_payload = payload.copy()
payload = _payload.copy()
while len(channels) > 0:
# Get Channel
channel = channels.pop(0)
_payload['channel'] = channel
payload['channel'] = channel
if not self._send(
_payload, notify_type=notify_type, headers=self.headers,
payload, notify_type=notify_type, headers=self.headers,
**kwargs):
# toggle flag
@ -422,11 +435,11 @@ class NotifyRocketChat(NotifyBase):
# Create a copy of our room id's to notify against
rooms = list(self.rooms)
_payload = payload.copy()
payload = _payload.copy()
while len(rooms):
# Get Room
room = rooms.pop(0)
_payload['roomId'] = room
payload['roomId'] = room
if not self._send(
payload, notify_type=notify_type, headers=self.headers,
@ -451,13 +464,13 @@ class NotifyRocketChat(NotifyBase):
# apply our images if they're set to be displayed
image_url = self.image_url(notify_type)
if self.avatar:
if self.avatar and image_url:
payload['avatar'] = image_url
return payload
def _send(self, payload, notify_type, path='api/v1/chat.postMessage',
headers=None, **kwargs):
headers={}, **kwargs):
"""
Perform Notify Rocket.Chat Notification
"""
@ -468,13 +481,19 @@ class NotifyRocketChat(NotifyBase):
api_url, self.verify_certificate))
self.logger.debug('Rocket.Chat Payload: %s' % str(payload))
# Apply minimum headers
headers.update({
'User-Agent': self.app_id,
'Content-Type': 'application/json',
})
# Always call throttle before any remote server i/o is made
self.throttle()
try:
r = requests.post(
api_url,
data=payload,
data=dumps(payload),
headers=headers,
verify=self.verify_certificate,
timeout=self.request_timeout,
@ -691,8 +710,8 @@ class NotifyRocketChat(NotifyBase):
NotifyRocketChat.unquote(results['qsd']['mode'])
# avatar icon
results['avatar'] = \
parse_bool(results['qsd'].get('avatar', True))
if 'avatar' in results['qsd'] and len(results['qsd']['avatar']):
results['avatar'] = parse_bool(results['qsd'].get('avatar', True))
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):

View file

@ -0,0 +1,584 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2021 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Signup @ https://smtp2go.com (free accounts available)
#
# From your dashboard, you can generate an API Key if you haven't already
# at https://app.smtp2go.com/settings/apikeys/
# The API Key from here which will look something like:
# api-60F0DD0AB5BA11ABA421F23C91C88EF4
#
# Knowing this, you can buid your smtp2go url as follows:
# smtp2go://{user}@{domain}/{apikey}
# smtp2go://{user}@{domain}/{apikey}/{email}
#
# You can email as many addresses as you want as:
# smtp2go://{user}@{domain}/{apikey}/{email1}/{email2}/{emailN}
#
# The {user}@{domain} effectively assembles the 'from' email address
# the email will be transmitted from. If no email address is specified
# then it will also become the 'to' address as well.
#
import base64
import requests
from json import dumps
from email.utils import formataddr
from .NotifyBase import NotifyBase
from ..common import NotifyType
from ..common import NotifyFormat
from ..utils import parse_emails
from ..utils import parse_bool
from ..utils import is_email
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
SMTP2GO_HTTP_ERROR_MAP = {
429: 'To many requests.',
}
class NotifySMTP2Go(NotifyBase):
"""
A wrapper for SMTP2Go Notifications
"""
# The default descriptive name associated with the Notification
service_name = 'SMTP2Go'
# The services URL
service_url = 'https://www.smtp2go.com/'
# All notification requests are secure
secure_protocol = 'smtp2go'
# SMTP2Go advertises they allow 300 requests per minute.
# 60/300 = 0.2
request_rate_per_sec = 0.20
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_smtp2go'
# Notify URL
notify_url = 'https://api.smtp2go.com/v3/email/send'
# Default Notify Format
notify_format = NotifyFormat.HTML
# The maximum amount of emails that can reside within a single
# batch transfer
default_batch_size = 100
# Define object templates
templates = (
'{schema}://{user}@{host}:{apikey}/',
'{schema}://{user}@{host}:{apikey}/{targets}',
)
# Define our template tokens
template_tokens = dict(NotifyBase.template_tokens, **{
'user': {
'name': _('User Name'),
'type': 'string',
'required': True,
},
'host': {
'name': _('Domain'),
'type': 'string',
'required': True,
},
'apikey': {
'name': _('API Key'),
'type': 'string',
'private': True,
'required': True,
},
'targets': {
'name': _('Target Emails'),
'type': 'list:string',
},
})
# Define our template arguments
template_args = dict(NotifyBase.template_args, **{
'name': {
'name': _('From Name'),
'type': 'string',
'map_to': 'from_name',
},
'to': {
'alias_of': 'targets',
},
'cc': {
'name': _('Carbon Copy'),
'type': 'list:string',
},
'bcc': {
'name': _('Blind Carbon Copy'),
'type': 'list:string',
},
'batch': {
'name': _('Batch Mode'),
'type': 'bool',
'default': False,
},
})
# Define any kwargs we're using
template_kwargs = {
'headers': {
'name': _('Email Header'),
'prefix': '+',
},
}
def __init__(self, apikey, targets, cc=None, bcc=None, from_name=None,
headers=None, batch=False, **kwargs):
"""
Initialize SMTP2Go Object
"""
super(NotifySMTP2Go, self).__init__(**kwargs)
# API Key (associated with project)
self.apikey = validate_regex(apikey)
if not self.apikey:
msg = 'An invalid SMTP2Go API Key ' \
'({}) was specified.'.format(apikey)
self.logger.warning(msg)
raise TypeError(msg)
# Validate our username
if not self.user:
msg = 'No SMTP2Go username was specified.'
self.logger.warning(msg)
raise TypeError(msg)
# Acquire Email 'To'
self.targets = list()
# Acquire Carbon Copies
self.cc = set()
# Acquire Blind Carbon Copies
self.bcc = set()
# For tracking our email -> name lookups
self.names = {}
self.headers = {}
if headers:
# Store our extra headers
self.headers.update(headers)
# Prepare Batch Mode Flag
self.batch = batch
# Get our From username (if specified)
self.from_name = from_name
# Get our from email address
self.from_addr = '{user}@{host}'.format(user=self.user, host=self.host)
if not is_email(self.from_addr):
# Parse Source domain based on from_addr
msg = 'Invalid ~From~ email format: {}'.format(self.from_addr)
self.logger.warning(msg)
raise TypeError(msg)
if targets:
# Validate recipients (to:) and drop bad ones:
for recipient in parse_emails(targets):
result = is_email(recipient)
if result:
self.targets.append(
(result['name'] if result['name'] else False,
result['full_email']))
continue
self.logger.warning(
'Dropped invalid To email '
'({}) specified.'.format(recipient),
)
else:
# If our target email list is empty we want to add ourselves to it
self.targets.append(
(self.from_name if self.from_name else False, self.from_addr))
# Validate recipients (cc:) and drop bad ones:
for recipient in parse_emails(cc):
email = is_email(recipient)
if email:
self.cc.add(email['full_email'])
# Index our name (if one exists)
self.names[email['full_email']] = \
email['name'] if email['name'] else False
continue
self.logger.warning(
'Dropped invalid Carbon Copy email '
'({}) specified.'.format(recipient),
)
# Validate recipients (bcc:) and drop bad ones:
for recipient in parse_emails(bcc):
email = is_email(recipient)
if email:
self.bcc.add(email['full_email'])
# Index our name (if one exists)
self.names[email['full_email']] = \
email['name'] if email['name'] else False
continue
self.logger.warning(
'Dropped invalid Blind Carbon Copy email '
'({}) specified.'.format(recipient),
)
def send(self, body, title='', notify_type=NotifyType.INFO, attach=None,
**kwargs):
"""
Perform SMTP2Go Notification
"""
if not self.targets:
# There is no one to email; we're done
self.logger.warning(
'There are no Email recipients to notify')
return False
# error tracking (used for function return)
has_error = False
# Send in batches if identified to do so
batch_size = 1 if not self.batch else self.default_batch_size
# Prepare our headers
headers = {
'User-Agent': self.app_id,
'Accept': 'application/json',
'Content-Type': 'application/json',
}
# Track our potential attachments
attachments = []
if attach:
for idx, attachment in enumerate(attach):
# Perform some simple error checking
if not attachment:
# We could not access the attachment
self.logger.error(
'Could not access attachment {}.'.format(
attachment.url(privacy=True)))
return False
try:
with open(attachment.path, 'rb') as f:
# Output must be in a DataURL format (that's what
# PushSafer calls it):
attachments.append({
'filename': attachment.name,
'fileblob': base64.b64encode(f.read())
.decode('utf-8'),
'mimetype': attachment.mimetype,
})
except (OSError, IOError) as e:
self.logger.warning(
'An I/O error occurred while reading {}.'.format(
attachment.name if attachment else 'attachment'))
self.logger.debug('I/O Exception: %s' % str(e))
return False
try:
sender = formataddr(
(self.from_name if self.from_name else False,
self.from_addr), charset='utf-8')
except TypeError:
# Python v2.x Support (no charset keyword)
# Format our cc addresses to support the Name field
sender = formataddr(
(self.from_name if self.from_name else False,
self.from_addr))
# Prepare our payload
payload = {
# API Key
'api_key': self.apikey,
# Base payload options
'sender': sender,
'subject': title,
# our To array
'to': [],
}
if attachments:
payload['attachments'] = attachments
if self.notify_format == NotifyFormat.HTML:
payload['html_body'] = body
else:
payload['text_body'] = body
# Create a copy of the targets list
emails = list(self.targets)
for index in range(0, len(emails), batch_size):
# Initialize our cc list
cc = (self.cc - self.bcc)
# Initialize our bcc list
bcc = set(self.bcc)
# Initialize our to list
to = list()
for to_addr in self.targets[index:index + batch_size]:
# Strip target out of cc list if in To
cc = (cc - set([to_addr[1]]))
# Strip target out of bcc list if in To
bcc = (bcc - set([to_addr[1]]))
try:
# Prepare our to
to.append(formataddr(to_addr, charset='utf-8'))
except TypeError:
# Python v2.x Support (no charset keyword)
# Format our cc addresses to support the Name field
# Prepare our to
to.append(formataddr(to_addr))
# Prepare our To
payload['to'] = to
if cc:
try:
# Format our cc addresses to support the Name field
payload['cc'] = [formataddr(
(self.names.get(addr, False), addr), charset='utf-8')
for addr in cc]
except TypeError:
# Python v2.x Support (no charset keyword)
# Format our cc addresses to support the Name field
payload['cc'] = [formataddr( # pragma: no branch
(self.names.get(addr, False), addr))
for addr in cc]
# Format our bcc addresses to support the Name field
if bcc:
# set our bcc variable (convert to list first so it's
# JSON serializable)
payload['bcc'] = list(bcc)
# Store our header entries if defined into the payload
# in their payload
if self.headers:
payload['custom_headers'] = \
[{'header': k, 'value': v}
for k, v in self.headers.items()]
# Some Debug Logging
self.logger.debug('SMTP2Go POST URL: {} (cert_verify={})'.format(
self.notify_url, self.verify_certificate))
self.logger.debug('SMTP2Go Payload: {}' .format(payload))
# For logging output of success and errors; we get a head count
# of our outbound details:
verbose_dest = ', '.join(
[x[1] for x in self.targets[index:index + batch_size]]) \
if len(self.targets[index:index + batch_size]) <= 3 \
else '{} recipients'.format(
len(self.targets[index:index + batch_size]))
# Always call throttle before any remote server i/o is made
self.throttle()
try:
r = requests.post(
self.notify_url,
data=dumps(payload),
headers=headers,
verify=self.verify_certificate,
timeout=self.request_timeout,
)
if r.status_code != requests.codes.ok:
# We had a problem
status_str = \
NotifyBase.http_response_code_lookup(
r.status_code, SMTP2GO_HTTP_ERROR_MAP)
self.logger.warning(
'Failed to send SMTP2Go notification to {}: '
'{}{}error={}.'.format(
verbose_dest,
status_str,
', ' if status_str else '',
r.status_code))
self.logger.debug(
'Response Details:\r\n{}'.format(r.content))
# Mark our failure
has_error = True
continue
else:
self.logger.info(
'Sent SMTP2Go notification to {}.'.format(
verbose_dest))
except requests.RequestException as e:
self.logger.warning(
'A Connection error occurred sending SMTP2Go:%s ' % (
verbose_dest) + 'notification.'
)
self.logger.debug('Socket Exception: %s' % str(e))
# Mark our failure
has_error = True
continue
except (OSError, IOError) as e:
self.logger.warning(
'An I/O error occurred while reading attachments')
self.logger.debug('I/O Exception: %s' % str(e))
# Mark our failure
has_error = True
continue
return not has_error
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
"""
# Define any URL parameters
params = {
'batch': 'yes' if self.batch else 'no',
}
# Append our headers into our parameters
params.update({'+{}'.format(k): v for k, v in self.headers.items()})
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
if self.from_name is not None:
# from_name specified; pass it back on the url
params['name'] = self.from_name
if self.cc:
# Handle our Carbon Copy Addresses
params['cc'] = ','.join(
['{}{}'.format(
'' if not e not in self.names
else '{}:'.format(self.names[e]), e) for e in self.cc])
if self.bcc:
# Handle our Blind Carbon Copy Addresses
params['bcc'] = ','.join(self.bcc)
# a simple boolean check as to whether we display our target emails
# or not
has_targets = \
not (len(self.targets) == 1
and self.targets[0][1] == self.from_addr)
return '{schema}://{user}@{host}/{apikey}/{targets}?{params}'.format(
schema=self.secure_protocol,
host=self.host,
user=NotifySMTP2Go.quote(self.user, safe=''),
apikey=self.pprint(self.apikey, privacy, safe=''),
targets='' if not has_targets else '/'.join(
[NotifySMTP2Go.quote('{}{}'.format(
'' if not e[0] else '{}:'.format(e[0]), e[1]),
safe='') for e in self.targets]),
params=NotifySMTP2Go.urlencode(params))
@staticmethod
def parse_url(url):
"""
Parses the URL and returns enough arguments that can allow
us to re-instantiate this object.
"""
results = NotifyBase.parse_url(url, verify_host=False)
if not results:
# We're done early as we couldn't load the results
return results
# Get our entries; split_path() looks after unquoting content for us
# by default
results['targets'] = NotifySMTP2Go.split_path(results['fullpath'])
# Our very first entry is reserved for our api key
try:
results['apikey'] = results['targets'].pop(0)
except IndexError:
# We're done - no API Key found
results['apikey'] = None
if 'name' in results['qsd'] and len(results['qsd']['name']):
# Extract from name to associate with from address
results['from_name'] = \
NotifySMTP2Go.unquote(results['qsd']['name'])
# Handle 'to' email address
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'].append(results['qsd']['to'])
# Handle Carbon Copy Addresses
if 'cc' in results['qsd'] and len(results['qsd']['cc']):
results['cc'] = results['qsd']['cc']
# Handle Blind Carbon Copy Addresses
if 'bcc' in results['qsd'] and len(results['qsd']['bcc']):
results['bcc'] = results['qsd']['bcc']
# Add our Meta Headers that the user can provide with their outbound
# emails
results['headers'] = {NotifyBase.unquote(x): NotifyBase.unquote(y)
for x, y in results['qsd+'].items()}
# Get Batch Mode Flag
results['batch'] = \
parse_bool(results['qsd'].get(
'batch', NotifySMTP2Go.template_args['batch']['default']))
return results

View file

@ -35,13 +35,11 @@ from itertools import chain
from .NotifyBase import NotifyBase
from ..URLBase import PrivacyMode
from ..common import NotifyType
from ..utils import is_phone_no
from ..utils import parse_list
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
# Some Phone Number Detection
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
# Topic Detection
# Summary: 256 Characters max, only alpha/numeric plus underscore (_) and
# dash (-) additionally allowed.
@ -198,24 +196,10 @@ class NotifySNS(NotifyBase):
self.aws_auth_algorithm = 'AWS4-HMAC-SHA256'
self.aws_auth_request = 'aws4_request'
# Get our targets
targets = parse_list(targets)
# Validate targets and drop bad ones:
for target in targets:
result = IS_PHONE_NO.match(target)
for target in parse_list(targets):
result = is_phone_no(target)
if result:
# Further check our phone # for it's digit count
# if it's less than 10, then we can assume it's
# a poorly specified phone no and spit a warning
result = ''.join(re.findall(r'\d+', result.group('phone')))
if len(result) < 11 or len(result) > 14:
self.logger.warning(
'Dropped invalid phone # '
'(%s) specified.' % target,
)
continue
# store valid phone number
self.phone.append('+{}'.format(result))
continue
@ -231,12 +215,6 @@ class NotifySNS(NotifyBase):
'(%s) specified.' % target,
)
if len(self.phone) == 0 and len(self.topics) == 0:
# We have a bot token and no target(s) to message
msg = 'No AWS targets to notify.'
self.logger.warning(msg)
raise TypeError(msg)
return
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
@ -244,6 +222,11 @@ class NotifySNS(NotifyBase):
wrapper to send_notification since we can alert more then one channel
"""
if len(self.phone) == 0 and len(self.topics) == 0:
# We have a bot token and no target(s) to message
self.logger.warning('No AWS targets to notify.')
return False
# Initiaize our error tracking
error_count = 0
@ -361,7 +344,7 @@ class NotifySNS(NotifyBase):
self.logger.debug('Response Details:\r\n{}'.format(r.content))
return (False, NotifySNS.aws_response_to_dict(r.content))
return (False, NotifySNS.aws_response_to_dict(r.text))
else:
self.logger.info(
@ -375,7 +358,7 @@ class NotifySNS(NotifyBase):
self.logger.debug('Socket Exception: %s' % str(e))
return (False, NotifySNS.aws_response_to_dict(None))
return (True, NotifySNS.aws_response_to_dict(r.content))
return (True, NotifySNS.aws_response_to_dict(r.text))
def aws_prepare_request(self, payload, reference=None):
"""

View file

@ -32,8 +32,8 @@ from ..common import NotifyType
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
# Default our global support flag
CRYPTOGRAPHY_AVAILABLE = False
from base64 import urlsafe_b64encode
import hashlib
try:
from cryptography.hazmat.primitives import padding
@ -41,15 +41,13 @@ try:
from cryptography.hazmat.primitives.ciphers import algorithms
from cryptography.hazmat.primitives.ciphers import modes
from cryptography.hazmat.backends import default_backend
from base64 import urlsafe_b64encode
import hashlib
CRYPTOGRAPHY_AVAILABLE = True
# We're good to go!
NOTIFY_SIMPLEPUSH_ENABLED = True
except ImportError:
# no problem; this just means the added encryption functionality isn't
# available. You can still send a SimplePush message
pass
# cryptography is required in order for this package to work
NOTIFY_SIMPLEPUSH_ENABLED = False
class NotifySimplePush(NotifyBase):
@ -57,6 +55,14 @@ class NotifySimplePush(NotifyBase):
A wrapper for SimplePush Notifications
"""
# Set our global enabled flag
enabled = NOTIFY_SIMPLEPUSH_ENABLED
requirements = {
# Define our required packaging in order to work
'packages_required': 'cryptography'
}
# The default descriptive name associated with the Notification
service_name = 'SimplePush'
@ -181,15 +187,6 @@ class NotifySimplePush(NotifyBase):
Perform SimplePush Notification
"""
# Encrypt Message (providing support is available)
if self.password and self.user and not CRYPTOGRAPHY_AVAILABLE:
# Provide the end user at least some notification that they're
# not getting what they asked for
self.logger.warning(
"Authenticated SimplePush Notifications are not supported by "
"this system; `pip install cryptography`.")
return False
headers = {
'User-Agent': self.app_id,
'Content-type': "application/x-www-form-urlencoded",
@ -200,7 +197,7 @@ class NotifySimplePush(NotifyBase):
'key': self.apikey,
}
if self.password and self.user and CRYPTOGRAPHY_AVAILABLE:
if self.password and self.user:
body = self._encrypt(body)
title = self._encrypt(title)
payload.update({

View file

@ -33,7 +33,6 @@
# from). Activated phone numbers can be found on your dashboard here:
# - https://dashboard.sinch.com/numbers/your-numbers/numbers
#
import re
import six
import requests
import json
@ -41,15 +40,12 @@ import json
from .NotifyBase import NotifyBase
from ..URLBase import PrivacyMode
from ..common import NotifyType
from ..utils import parse_list
from ..utils import is_phone_no
from ..utils import parse_phone_no
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
# Some Phone Number Detection
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
class SinchRegion(object):
"""
Defines the Sinch Server Regions
@ -194,15 +190,6 @@ class NotifySinch(NotifyBase):
self.logger.warning(msg)
raise TypeError(msg)
# The Source Phone # and/or short-code
self.source = source
if not IS_PHONE_NO.match(self.source):
msg = 'The Account (From) Phone # or Short-code specified ' \
'({}) is invalid.'.format(source)
self.logger.warning(msg)
raise TypeError(msg)
# Setup our region
self.region = self.template_args['region']['default'] \
if not isinstance(region, six.string_types) else region.lower()
@ -211,8 +198,16 @@ class NotifySinch(NotifyBase):
self.logger.warning(msg)
raise TypeError(msg)
# The Source Phone # and/or short-code
result = is_phone_no(source, min_len=5)
if not result:
msg = 'The Account (From) Phone # or Short-code specified ' \
'({}) is invalid.'.format(source)
self.logger.warning(msg)
raise TypeError(msg)
# Tidy source
self.source = re.sub(r'[^\d]+', '', self.source)
self.source = result['full']
if len(self.source) < 11 or len(self.source) > 14:
# A short code is a special 5 or 6 digit telephone number
@ -233,37 +228,18 @@ class NotifySinch(NotifyBase):
# Parse our targets
self.targets = list()
for target in parse_list(targets):
# Validate targets and drop bad ones:
result = IS_PHONE_NO.match(target)
if result:
# Further check our phone # for it's digit count
# if it's less than 10, then we can assume it's
# a poorly specified phone no and spit a warning
result = ''.join(re.findall(r'\d+', result.group('phone')))
if len(result) < 11 or len(result) > 14:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
# store valid phone number
self.targets.append('+{}'.format(result))
for target in parse_phone_no(targets):
# Parse each phone number we found
result = is_phone_no(target)
if not result:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
if not self.targets:
if len(self.source) in (5, 6):
# raise a warning since we're a short-code. We need
# a number to message
msg = 'There are no valid Sinch targets to notify.'
self.logger.warning(msg)
raise TypeError(msg)
# store valid phone number
self.targets.append('+{}'.format(result['full']))
return
@ -272,6 +248,14 @@ class NotifySinch(NotifyBase):
Perform Sinch Notification
"""
if not self.targets:
if len(self.source) in (5, 6):
# Generate a warning since we're a short-code. We need
# a number to message at minimum
self.logger.warning(
'There are no valid Sinch targets to notify.')
return False
# error tracking (used for function return)
has_error = False
@ -459,6 +443,7 @@ class NotifySinch(NotifyBase):
if 'from' in results['qsd'] and len(results['qsd']['from']):
results['source'] = \
NotifySinch.unquote(results['qsd']['from'])
if 'source' in results['qsd'] and len(results['qsd']['source']):
results['source'] = \
NotifySinch.unquote(results['qsd']['source'])
@ -472,6 +457,6 @@ class NotifySinch(NotifyBase):
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'] += \
NotifySinch.parse_list(results['qsd']['to'])
NotifySinch.parse_phone_no(results['qsd']['to'])
return results

View file

@ -43,7 +43,7 @@
# to add a 'Bot User'. Give it a name and choose 'Add Bot User'.
# 4. Now you can choose 'Install App' to which you can choose 'Install App
# to Workspace'.
# 5. You will need to authorize the app which you get promopted to do.
# 5. You will need to authorize the app which you get prompted to do.
# 6. Finally you'll get some important information providing you your
# 'OAuth Access Token' and 'Bot User OAuth Access Token' such as:
# slack://{Oauth Access Token}
@ -53,6 +53,21 @@
# ... or:
# slack://xoxb-1234-1234-4ddbc191d40ee098cbaae6f3523ada2d
#
# You must at least give your bot the following access for it to
# be useful:
# - chat:write - MUST be set otherwise you can not post into
# a channel
# - users:read.email - Required if you want to be able to lookup
# users by their email address.
#
# The easiest way to bring a bot into a channel (so that it can send
# a message to it is to invite it. At this time Apprise does not support
# an auto-join functionality. To do this:
# - In the 'Details' section of your channel
# - Click on the 'More' [...] (elipse icon)
# - Click 'Add apps'
# - You will be able to select the Bot App you previously created
# - Your bot will join your channel.
import re
import requests
@ -64,6 +79,7 @@ from .NotifyBase import NotifyBase
from ..common import NotifyImageSize
from ..common import NotifyType
from ..common import NotifyFormat
from ..utils import is_email
from ..utils import parse_bool
from ..utils import parse_list
from ..utils import validate_regex
@ -202,6 +218,11 @@ class NotifySlack(NotifyBase):
'prefix': '+',
'map_to': 'targets',
},
'target_email': {
'name': _('Target Email'),
'type': 'string',
'map_to': 'targets',
},
'target_user': {
'name': _('Target User'),
'type': 'string',
@ -234,14 +255,26 @@ class NotifySlack(NotifyBase):
'default': True,
'map_to': 'include_footer',
},
# Use Payload in Blocks (vs legacy way):
# See: https://api.slack.com/reference/messaging/payload
'blocks': {
'name': _('Use Blocks'),
'type': 'bool',
'default': False,
'map_to': 'use_blocks',
},
'to': {
'alias_of': 'targets',
},
'token': {
'name': _('Token'),
'alias_of': ('access_token', 'token_a', 'token_b', 'token_c'),
},
})
def __init__(self, access_token=None, token_a=None, token_b=None,
token_c=None, targets=None, include_image=True,
include_footer=True, **kwargs):
include_footer=True, use_blocks=None, **kwargs):
"""
Initialize Slack Object
"""
@ -287,6 +320,16 @@ class NotifySlack(NotifyBase):
self.logger.warning(
'No user was specified; using "%s".' % self.app_id)
# Look the users up by their email address and map them back to their
# id here for future queries (if needed). This allows people to
# specify a full email as a recipient via slack
self._lookup_users = {}
self.use_blocks = parse_bool(
use_blocks, self.template_args['blocks']['default']) \
if use_blocks is not None \
else self.template_args['blocks']['default']
# Build list of channels
self.channels = parse_list(targets)
if len(self.channels) == 0:
@ -330,29 +373,117 @@ class NotifySlack(NotifyBase):
# error tracking (used for function return)
has_error = False
# Perform Formatting
title = self._re_formatting_rules.sub( # pragma: no branch
lambda x: self._re_formatting_map[x.group()], title,
)
body = self._re_formatting_rules.sub( # pragma: no branch
lambda x: self._re_formatting_map[x.group()], body,
)
#
# Prepare JSON Object (applicable to both WEBHOOK and BOT mode)
payload = {
'username': self.user if self.user else self.app_id,
# Use Markdown language
'mrkdwn': (self.notify_format == NotifyFormat.MARKDOWN),
'attachments': [{
'title': title,
'text': body,
'color': self.color(notify_type),
# Time
'ts': time(),
}],
}
#
if self.use_blocks:
# Our slack format
_slack_format = 'mrkdwn' \
if self.notify_format == NotifyFormat.MARKDOWN \
else 'plain_text'
# Prepare our URL (depends on mode)
payload = {
'username': self.user if self.user else self.app_id,
'attachments': [{
'blocks': [{
'type': 'section',
'text': {
'type': _slack_format,
'text': body
}
}],
'color': self.color(notify_type),
}]
}
# Slack only accepts non-empty header sections
if title:
payload['attachments'][0]['blocks'].insert(0, {
'type': 'header',
'text': {
'type': 'plain_text',
'text': title,
'emoji': True
}
})
# Include the footer only if specified to do so
if self.include_footer:
# Acquire our to-be footer icon if configured to do so
image_url = None if not self.include_image \
else self.image_url(notify_type)
# Prepare our footer based on the block structure
_footer = {
'type': 'context',
'elements': [{
'type': _slack_format,
'text': self.app_id
}]
}
if image_url:
payload['icon_url'] = image_url
_footer['elements'].insert(0, {
'type': 'image',
'image_url': image_url,
'alt_text': notify_type
})
payload['attachments'][0]['blocks'].append(_footer)
else:
#
# Legacy API Formatting
#
if self.notify_format == NotifyFormat.MARKDOWN:
body = self._re_formatting_rules.sub( # pragma: no branch
lambda x: self._re_formatting_map[x.group()], body,
)
# Perform Formatting on title here; this is not needed for block
# mode above
title = self._re_formatting_rules.sub( # pragma: no branch
lambda x: self._re_formatting_map[x.group()], title,
)
# Prepare JSON Object (applicable to both WEBHOOK and BOT mode)
payload = {
'username': self.user if self.user else self.app_id,
# Use Markdown language
'mrkdwn': (self.notify_format == NotifyFormat.MARKDOWN),
'attachments': [{
'title': title,
'text': body,
'color': self.color(notify_type),
# Time
'ts': time(),
}],
}
# Acquire our to-be footer icon if configured to do so
image_url = None if not self.include_image \
else self.image_url(notify_type)
if image_url:
payload['icon_url'] = image_url
# Include the footer only if specified to do so
if self.include_footer:
if image_url:
payload['attachments'][0]['footer_icon'] = image_url
# Include the footer only if specified to do so
payload['attachments'][0]['footer'] = self.app_id
if attach and self.mode is SlackMode.WEBHOOK:
# Be friendly; let the user know why they can't send their
# attachments if using the Webhook mode
self.logger.warning(
'Slack Webhooks do not support attachments.')
# Prepare our Slack URL (depends on mode)
if self.mode is SlackMode.WEBHOOK:
url = '{}/{}/{}/{}'.format(
self.webhook_url,
@ -364,16 +495,6 @@ class NotifySlack(NotifyBase):
else: # SlackMode.BOT
url = self.api_url.format('chat.postMessage')
if self.include_footer:
# Include the footer only if specified to do so
payload['attachments'][0]['footer'] = self.app_id
if attach and self.mode is SlackMode.WEBHOOK:
# Be friendly; let the user know why they can't send their
# attachments if using the Webhook mode
self.logger.warning(
'Slack Webhooks do not support attachments.')
# Create a copy of the channel list
channels = list(self.channels)
@ -382,45 +503,47 @@ class NotifySlack(NotifyBase):
channel = channels.pop(0)
if channel is not None:
_channel = validate_regex(
channel, r'[+#@]?(?P<value>[A-Z0-9_]{1,32})')
if not _channel:
channel = validate_regex(channel, r'[+#@]?[A-Z0-9_]{1,32}')
if not channel:
# Channel over-ride was specified
self.logger.warning(
"The specified target {} is invalid;"
"skipping.".format(_channel))
"skipping.".format(channel))
# Mark our failure
has_error = True
continue
if len(_channel) > 1 and _channel[0] == '+':
if channel[0] == '+':
# Treat as encoded id if prefixed with a +
payload['channel'] = _channel[1:]
payload['channel'] = channel[1:]
elif len(_channel) > 1 and _channel[0] == '@':
elif channel[0] == '@':
# Treat @ value 'as is'
payload['channel'] = _channel
payload['channel'] = channel
else:
# Prefix with channel hash tag
payload['channel'] = '#{}'.format(_channel)
# We'll perform a user lookup if we detect an email
email = is_email(channel)
if email:
payload['channel'] = \
self.lookup_userid(email['full_email'])
if not payload['channel']:
# Move along; any notifications/logging would have
# come from lookup_userid()
has_error = True
continue
else:
# Prefix with channel hash tag (if not already)
payload['channel'] = \
channel if channel[0] == '#' \
else '#{}'.format(channel)
# Store the valid and massaged payload that is recognizable by
# slack. This list is used for sending attachments later.
attach_channel_list.append(payload['channel'])
# Acquire our to-be footer icon if configured to do so
image_url = None if not self.include_image \
else self.image_url(notify_type)
if image_url:
payload['icon_url'] = image_url
if self.include_footer:
payload['attachments'][0]['footer_icon'] = image_url
response = self._send(url, payload)
if not response:
# Handle any error
@ -465,6 +588,162 @@ class NotifySlack(NotifyBase):
return not has_error
def lookup_userid(self, email):
"""
Takes an email address and attempts to resolve/acquire it's user
id for notification purposes.
"""
if email in self._lookup_users:
# We're done as entry has already been retrieved
return self._lookup_users[email]
if self.mode is not SlackMode.BOT:
# You can not look up
self.logger.warning(
'Emails can not be resolved to Slack User IDs unless you '
'have a bot configured.')
return None
lookup_url = self.api_url.format('users.lookupByEmail')
headers = {
'User-Agent': self.app_id,
'Content-Type': 'application/x-www-form-urlencoded',
'Authorization': 'Bearer {}'.format(self.access_token),
}
# we pass in our email address as the argument
params = {
'email': email,
}
self.logger.debug('Slack User Lookup POST URL: %s (cert_verify=%r)' % (
lookup_url, self.verify_certificate,
))
self.logger.debug('Slack User Lookup Parameters: %s' % str(params))
# Initialize our HTTP JSON response
response = {'ok': False}
# Initialize our detected user id (also the response to this function)
user_id = None
# Always call throttle before any remote server i/o is made
self.throttle()
try:
r = requests.get(
lookup_url,
headers=headers,
params=params,
verify=self.verify_certificate,
timeout=self.request_timeout,
)
# Attachment posts return a JSON string
try:
response = loads(r.content)
except (AttributeError, TypeError, ValueError):
# ValueError = r.content is Unparsable
# TypeError = r.content is None
# AttributeError = r is None
pass
# We can get a 200 response, but still fail. A failure message
# might look like this (missing bot permissions):
# {
# 'ok': False,
# 'error': 'missing_scope',
# 'needed': 'users:read.email',
# 'provided': 'calls:write,chat:write'
# }
if r.status_code != requests.codes.ok \
or not (response and response.get('ok', False)):
# We had a problem
status_str = \
NotifySlack.http_response_code_lookup(
r.status_code, SLACK_HTTP_ERROR_MAP)
self.logger.warning(
'Failed to send Slack User Lookup:'
'{}{}error={}.'.format(
status_str,
', ' if status_str else '',
r.status_code))
self.logger.debug('Response Details:\r\n{}'.format(r.content))
# Return; we're done
return False
# If we reach here, then we were successful in looking up
# the user. A response generally looks like this:
# {
# 'ok': True,
# 'user': {
# 'id': 'J1ZQB9T9Y',
# 'team_id': 'K1WR6TML2',
# 'name': 'l2g',
# 'deleted': False,
# 'color': '9f69e7',
# 'real_name': 'Chris C',
# 'tz': 'America/New_York',
# 'tz_label': 'Eastern Standard Time',
# 'tz_offset': -18000,
# 'profile': {
# 'title': '',
# 'phone': '',
# 'skype': '',
# 'real_name': 'Chris C',
# 'real_name_normalized':
# 'Chris C',
# 'display_name': 'l2g',
# 'display_name_normalized': 'l2g',
# 'fields': None,
# 'status_text': '',
# 'status_emoji': '',
# 'status_expiration': 0,
# 'avatar_hash': 'g785e9c0ddf6',
# 'email': 'lead2gold@gmail.com',
# 'first_name': 'Chris',
# 'last_name': 'C',
# 'image_24': 'https://secure.gravatar.com/...',
# 'image_32': 'https://secure.gravatar.com/...',
# 'image_48': 'https://secure.gravatar.com/...',
# 'image_72': 'https://secure.gravatar.com/...',
# 'image_192': 'https://secure.gravatar.com/...',
# 'image_512': 'https://secure.gravatar.com/...',
# 'status_text_canonical': '',
# 'team': 'K1WR6TML2'
# },
# 'is_admin': True,
# 'is_owner': True,
# 'is_primary_owner': True,
# 'is_restricted': False,
# 'is_ultra_restricted': False,
# 'is_bot': False,
# 'is_app_user': False,
# 'updated': 1603904274
# }
# }
# We're only interested in the id
user_id = response['user']['id']
# Cache it for future
self._lookup_users[email] = user_id
self.logger.info(
'Email %s resolves to the Slack User ID: %s.', email, user_id)
except requests.RequestException as e:
self.logger.warning(
'A Connection error occurred looking up Slack User.',
)
self.logger.debug('Socket Exception: %s' % str(e))
# Return; we're done
return None
return user_id
def _send(self, url, payload, attach=None, **kwargs):
"""
Wrapper to the requests (post) object
@ -477,6 +756,7 @@ class NotifySlack(NotifyBase):
headers = {
'User-Agent': self.app_id,
'Accept': 'application/json',
}
if not attach:
@ -486,7 +766,7 @@ class NotifySlack(NotifyBase):
headers['Authorization'] = 'Bearer {}'.format(self.access_token)
# Our response object
response = None
response = {'ok': False}
# Always call throttle before any remote server i/o is made
self.throttle()
@ -508,7 +788,28 @@ class NotifySlack(NotifyBase):
timeout=self.request_timeout,
)
if r.status_code != requests.codes.ok:
# Posts return a JSON string
try:
response = loads(r.content)
except (AttributeError, TypeError, ValueError):
# ValueError = r.content is Unparsable
# TypeError = r.content is None
# AttributeError = r is None
pass
# Another response type is:
# {
# 'ok': False,
# 'error': 'not_in_channel',
# }
#
# The text 'ok' is returned if this is a Webhook request
# So the below captures that as well.
status_okay = (response and response.get('ok', False)) \
if self.mode is SlackMode.BOT else r.text == 'ok'
if r.status_code != requests.codes.ok or not status_okay:
# We had a problem
status_str = \
NotifySlack.http_response_code_lookup(
@ -526,30 +827,6 @@ class NotifySlack(NotifyBase):
'Response Details:\r\n{}'.format(r.content))
return False
elif attach:
# Attachment posts return a JSON string
try:
response = loads(r.content)
except (AttributeError, TypeError, ValueError):
# ValueError = r.content is Unparsable
# TypeError = r.content is None
# AttributeError = r is None
pass
if not (response and response.get('ok', True)):
# Bare minimum requirements not met
self.logger.warning(
'Failed to send {}to Slack: error={}.'.format(
attach.name if attach else '',
r.status_code))
self.logger.debug(
'Response Details:\r\n{}'.format(r.content))
return False
else:
response = r.content
# Message Post Response looks like this:
# {
# "attachments": [
@ -653,19 +930,20 @@ class NotifySlack(NotifyBase):
params = {
'image': 'yes' if self.include_image else 'no',
'footer': 'yes' if self.include_footer else 'no',
'blocks': 'yes' if self.use_blocks else 'no',
}
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
if self.mode == SlackMode.WEBHOOK:
# Determine if there is a botname present
botname = ''
if self.user:
botname = '{botname}@'.format(
botname=NotifySlack.quote(self.user, safe=''),
)
# Determine if there is a botname present
botname = ''
if self.user:
botname = '{botname}@'.format(
botname=NotifySlack.quote(self.user, safe=''),
)
if self.mode == SlackMode.WEBHOOK:
return '{schema}://{botname}{token_a}/{token_b}/{token_c}/'\
'{targets}/?{params}'.format(
schema=self.secure_protocol,
@ -679,9 +957,10 @@ class NotifySlack(NotifyBase):
params=NotifySlack.urlencode(params),
)
# else -> self.mode == SlackMode.BOT:
return '{schema}://{access_token}/{targets}/'\
return '{schema}://{botname}{access_token}/{targets}/'\
'?{params}'.format(
schema=self.secure_protocol,
botname=botname,
access_token=self.pprint(self.access_token, privacy, safe=''),
targets='/'.join(
[NotifySlack.quote(x, safe='') for x in self.channels]),
@ -714,25 +993,36 @@ class NotifySlack(NotifyBase):
else:
# We're dealing with a webhook
results['token_a'] = token
# Now fetch the remaining tokens
try:
results['token_b'] = entries.pop(0)
except IndexError:
# We're done
results['token_b'] = None
try:
results['token_c'] = entries.pop(0)
except IndexError:
# We're done
results['token_c'] = None
results['token_b'] = entries.pop(0) if entries else None
results['token_c'] = entries.pop(0) if entries else None
# assign remaining entries to the channels we wish to notify
results['targets'] = entries
# Support the token flag where you can set it to the bot token
# or the webhook token (with slash delimiters)
if 'token' in results['qsd'] and len(results['qsd']['token']):
# Break our entries up into a list; we can ue the Channel
# list delimiter above since it doesn't contain any characters
# we don't otherwise accept anyway in our token
entries = [x for x in filter(
bool, CHANNEL_LIST_DELIM.split(
NotifySlack.unquote(results['qsd']['token'])))]
# check to see if we're dealing with a bot/user token
if entries and entries[0].startswith('xo'):
# We're dealing with a bot
results['access_token'] = entries[0]
results['token_a'] = None
results['token_b'] = None
results['token_c'] = None
else: # Webhook
results['access_token'] = None
results['token_a'] = entries.pop(0) if entries else None
results['token_b'] = entries.pop(0) if entries else None
results['token_c'] = entries.pop(0) if entries else None
# Support the 'to' variable so that we can support rooms this way too
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
@ -744,6 +1034,10 @@ class NotifySlack(NotifyBase):
results['include_image'] = \
parse_bool(results['qsd'].get('image', True))
# Get Payload structure (use blocks?)
if 'blocks' in results['qsd'] and len(results['qsd']['blocks']):
results['use_blocks'] = parse_bool(results['qsd']['blocks'])
# Get Footer Flag
results['include_footer'] = \
parse_bool(results['qsd'].get('footer', True))

View file

@ -0,0 +1,784 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Signup @ https://www.sparkpost.com
#
# Ensure you've added a Senders Domain and have generated yourself an
# API Key at:
# https://app.sparkpost.com/dashboard
# Note: For SMTP Access, your API key must have at least been granted the
# 'Send via SMTP' privileges.
# From here you can click on the domain you're interested in. You can acquire
# the API Key from here which will look something like:
# 1e1d479fcf1a87527e9411e083c700689fa1acdc
#
# Knowing this, you can buid your sparkpost url as follows:
# sparkpost://{user}@{domain}/{apikey}
# sparkpost://{user}@{domain}/{apikey}/{email}
#
# You can email as many addresses as you want as:
# sparkpost://{user}@{domain}/{apikey}/{email1}/{email2}/{emailN}
#
# The {user}@{domain} effectively assembles the 'from' email address
# the email will be transmitted from. If no email address is specified
# then it will also become the 'to' address as well.
#
# The {domain} must cross reference a domain you've set up with Spark Post
#
# API Documentation: https://developers.sparkpost.com/api/
# Specifically: https://developers.sparkpost.com/api/transmissions/
import requests
import base64
from json import loads
from json import dumps
from .NotifyBase import NotifyBase
from ..common import NotifyType
from ..common import NotifyFormat
from ..utils import is_email
from email.utils import formataddr
from ..utils import validate_regex
from ..utils import parse_emails
from ..utils import parse_bool
from ..AppriseLocale import gettext_lazy as _
# Provide some known codes SparkPost uses and what they translate to:
# Based on https://www.sparkpost.com/docs/tech-resources/extended-error-codes/
SPARKPOST_HTTP_ERROR_MAP = {
400: 'A bad request was made to the server',
401: 'Invalid User ID and/or Unauthorized User',
403: 'Permission Denied; the provided API Key was not valid',
404: 'There is a problem with the server query URI.',
405: 'Invalid HTTP method',
420: 'Sending limit reached.',
422: 'Invalid data/format/type/length',
429: 'To many requests per sec; rate limit',
}
# Priorities
class SparkPostRegion(object):
US = 'us'
EU = 'eu'
# SparkPost APIs
SPARKPOST_API_LOOKUP = {
SparkPostRegion.US: 'https://api.sparkpost.com/api/v1',
SparkPostRegion.EU: 'https://api.eu.sparkpost.com/api/v1',
}
# A List of our regions we can use for verification
SPARKPOST_REGIONS = (
SparkPostRegion.US,
SparkPostRegion.EU,
)
class NotifySparkPost(NotifyBase):
"""
A wrapper for SparkPost Notifications
"""
# The default descriptive name associated with the Notification
service_name = 'SparkPost'
# The services URL
service_url = 'https://sparkpost.com/'
# All notification requests are secure
secure_protocol = 'sparkpost'
# SparkPost advertises they allow 300 requests per minute.
# 60/300 = 0.2
request_rate_per_sec = 0.20
# Words straight from their website:
# https://developers.sparkpost.com/api/#header-rate-limiting
# These limits are dynamic, but as a general rule, wait 1 to 5 seconds
# after receiving a 429 response before requesting again.
# As a simple work around, this is what we will do... Wait X seconds
# (defined below) before trying again when we get a 429 error
sparkpost_retry_wait_sec = 5
# The maximum number of times we'll retry to send our message when we've
# reached a throttling situatin before giving up
sparkpost_retry_attempts = 3
# The maximum amount of emails that can reside within a single
# batch transfer based on:
# https://www.sparkpost.com/docs/tech-resources/\
# smtp-rest-api-performance/#sending-via-the-transmission-rest-api
default_batch_size = 2000
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_sparkpost'
# Default Notify Format
notify_format = NotifyFormat.HTML
# The default region to use if one isn't otherwise specified
sparkpost_default_region = SparkPostRegion.US
# Define object templates
templates = (
'{schema}://{user}@{host}:{apikey}/',
'{schema}://{user}@{host}:{apikey}/{targets}',
)
# Define our template tokens
template_tokens = dict(NotifyBase.template_tokens, **{
'user': {
'name': _('User Name'),
'type': 'string',
'required': True,
},
'host': {
'name': _('Domain'),
'type': 'string',
'required': True,
},
'apikey': {
'name': _('API Key'),
'type': 'string',
'private': True,
'required': True,
},
'targets': {
'name': _('Target Emails'),
'type': 'list:string',
},
})
# Define our template arguments
template_args = dict(NotifyBase.template_args, **{
'name': {
'name': _('From Name'),
'type': 'string',
'map_to': 'from_name',
},
'region': {
'name': _('Region Name'),
'type': 'choice:string',
'values': SPARKPOST_REGIONS,
'default': SparkPostRegion.US,
'map_to': 'region_name',
},
'to': {
'alias_of': 'targets',
},
'cc': {
'name': _('Carbon Copy'),
'type': 'list:string',
},
'bcc': {
'name': _('Blind Carbon Copy'),
'type': 'list:string',
},
'batch': {
'name': _('Batch Mode'),
'type': 'bool',
'default': False,
},
})
# Define any kwargs we're using
template_kwargs = {
'headers': {
'name': _('Email Header'),
'prefix': '+',
},
'tokens': {
'name': _('Template Tokens'),
'prefix': ':',
},
}
def __init__(self, apikey, targets, cc=None, bcc=None, from_name=None,
region_name=None, headers=None, tokens=None, batch=False,
**kwargs):
"""
Initialize SparkPost Object
"""
super(NotifySparkPost, self).__init__(**kwargs)
# API Key (associated with project)
self.apikey = validate_regex(apikey)
if not self.apikey:
msg = 'An invalid SparkPost API Key ' \
'({}) was specified.'.format(apikey)
self.logger.warning(msg)
raise TypeError(msg)
# Validate our username
if not self.user:
msg = 'No SparkPost username was specified.'
self.logger.warning(msg)
raise TypeError(msg)
# Acquire Email 'To'
self.targets = list()
# Acquire Carbon Copies
self.cc = set()
# Acquire Blind Carbon Copies
self.bcc = set()
# For tracking our email -> name lookups
self.names = {}
# Store our region
try:
self.region_name = self.sparkpost_default_region \
if region_name is None else region_name.lower()
if self.region_name not in SPARKPOST_REGIONS:
# allow the outer except to handle this common response
raise
except:
# Invalid region specified
msg = 'The SparkPost region specified ({}) is invalid.' \
.format(region_name)
self.logger.warning(msg)
raise TypeError(msg)
# Get our From username (if specified)
self.from_name = from_name
# Get our from email address
self.from_addr = '{user}@{host}'.format(user=self.user, host=self.host)
if not is_email(self.from_addr):
# Parse Source domain based on from_addr
msg = 'Invalid ~From~ email format: {}'.format(self.from_addr)
self.logger.warning(msg)
raise TypeError(msg)
self.headers = {}
if headers:
# Store our extra headers
self.headers.update(headers)
self.tokens = {}
if tokens:
# Store our template tokens
self.tokens.update(tokens)
# Prepare Batch Mode Flag
self.batch = batch
if targets:
# Validate recipients (to:) and drop bad ones:
for recipient in parse_emails(targets):
result = is_email(recipient)
if result:
self.targets.append(
(result['name'] if result['name'] else False,
result['full_email']))
continue
self.logger.warning(
'Dropped invalid To email '
'({}) specified.'.format(recipient),
)
else:
# If our target email list is empty we want to add ourselves to it
self.targets.append(
(self.from_name if self.from_name else False, self.from_addr))
# Validate recipients (cc:) and drop bad ones:
for recipient in parse_emails(cc):
email = is_email(recipient)
if email:
self.cc.add(email['full_email'])
# Index our name (if one exists)
self.names[email['full_email']] = \
email['name'] if email['name'] else False
continue
self.logger.warning(
'Dropped invalid Carbon Copy email '
'({}) specified.'.format(recipient),
)
# Validate recipients (bcc:) and drop bad ones:
for recipient in parse_emails(bcc):
email = is_email(recipient)
if email:
self.bcc.add(email['full_email'])
# Index our name (if one exists)
self.names[email['full_email']] = \
email['name'] if email['name'] else False
continue
self.logger.warning(
'Dropped invalid Blind Carbon Copy email '
'({}) specified.'.format(recipient),
)
def __post(self, payload, retry):
"""
Performs the actual post and returns the response
"""
# Prepare our headers
headers = {
'User-Agent': self.app_id,
'Accept': 'application/json',
'Content-Type': 'application/json',
'Authorization': self.apikey,
}
# Prepare our URL as it's based on our hostname
url = '{}/transmissions/'.format(
SPARKPOST_API_LOOKUP[self.region_name])
# Some Debug Logging
self.logger.debug('SparkPost POST URL: {} (cert_verify={})'.format(
url, self.verify_certificate))
if 'attachments' in payload['content']:
# Since we print our payload; attachments make it a bit too noisy
# we just strip out the data block to accomodate it
log_payload = \
{k: v for k, v in payload.items() if k != "content"}
log_payload['content'] = \
{k: v for k, v in payload['content'].items()
if k != "attachments"}
log_payload['content']['attachments'] = \
[{k: v for k, v in x.items() if k != "data"}
for x in payload['content']['attachments']]
else:
# No tidying is needed
log_payload = payload
self.logger.debug('SparkPost Payload: {}' .format(log_payload))
wait = None
# For logging output of success and errors; we get a head count
# of our outbound details:
verbose_dest = ', '.join(
[x['address']['email'] for x in payload['recipients']]) \
if len(payload['recipients']) <= 3 \
else '{} recipients'.format(len(payload['recipients']))
# Initialize our response object
json_response = {}
# Set ourselves a status code
status_code = -1
while 1: # pragma: no branch
# Always call throttle before any remote server i/o is made
self.throttle(wait=wait)
try:
r = requests.post(
url,
data=dumps(payload),
headers=headers,
verify=self.verify_certificate,
timeout=self.request_timeout,
)
# A Good response (200) looks like this:
# "results": {
# "total_rejected_recipients": 0,
# "total_accepted_recipients": 1,
# "id": "11668787484950529"
# }
# }
#
# A Bad response looks like this:
# {
# "errors": [
# {
# "description":
# "Unconfigured or unverified sending domain.",
# "code": "7001",
# "message": "Invalid domain"
# }
# ]
# }
#
try:
# Update our status response if we can
json_response = loads(r.content)
except (AttributeError, TypeError, ValueError):
# ValueError = r.content is Unparsable
# TypeError = r.content is None
# AttributeError = r is None
# We could not parse JSON response.
# We will just use the status we already have.
pass
status_code = r.status_code
payload['recipients'] = list()
if status_code == requests.codes.ok:
self.logger.info(
'Sent SparkPost notification to {}.'.format(
verbose_dest))
return status_code, json_response
# We had a problem if we get here
status_str = \
NotifyBase.http_response_code_lookup(
status_code, SPARKPOST_API_LOOKUP)
self.logger.warning(
'Failed to send SparkPost notification to {}: '
'{}{}error={}.'.format(
verbose_dest,
status_str,
', ' if status_str else '',
status_code))
self.logger.debug(
'Response Details:\r\n{}'.format(r.content))
if status_code == requests.codes.too_many_requests and retry:
retry = retry - 1
if retry > 0:
wait = self.sparkpost_retry_wait_sec
continue
except requests.RequestException as e:
self.logger.warning(
'A Connection error occurred sending SparkPost '
'notification')
self.logger.debug('Socket Exception: %s' % str(e))
# Anything else and we're done
return status_code, json_response
# Our code will never reach here (outside of infinite while loop above)
def send(self, body, title='', notify_type=NotifyType.INFO, attach=None,
**kwargs):
"""
Perform SparkPost Notification
"""
if not self.targets:
# There is no one to email; we're done
self.logger.warning(
'There are no Email recipients to notify')
return False
# Initialize our has_error flag
has_error = False
# Send in batches if identified to do so
batch_size = 1 if not self.batch else self.default_batch_size
try:
reply_to = formataddr((self.from_name if self.from_name else False,
self.from_addr), charset='utf-8')
except TypeError:
# Python v2.x Support (no charset keyword)
# Format our cc addresses to support the Name field
reply_to = formataddr((self.from_name if self.from_name else False,
self.from_addr))
payload = {
"options": {
# When set to True, an image is included with the email which
# is used to detect if the user looked at the image or not.
'open_tracking': False,
# Track if links were clicked that were found within email
'click_tracking': False,
},
"content": {
"from": {
"name": self.from_name
if self.from_name else self.app_desc,
"email": self.from_addr,
},
# SparkPost does not allow empty subject lines or lines that
# only contain whitespace; Since Apprise allows an empty title
# parameter we swap empty title entries with the period
"subject": title if title.strip() else '.',
"reply_to": reply_to,
}
}
if self.notify_format == NotifyFormat.HTML:
payload['content']['html'] = body
else:
payload['content']['text'] = body
if attach:
# Prepare ourselves an attachment object
payload['content']['attachments'] = []
for attachment in attach:
# Perform some simple error checking
if not attachment:
# We could not access the attachment
self.logger.error(
'Could not access attachment {}.'.format(
attachment.url(privacy=True)))
return False
self.logger.debug(
'Preparing SparkPost attachment {}'.format(
attachment.url(privacy=True)))
try:
with open(attachment.path, 'rb') as fp:
# Prepare API Upload Payload
payload['content']['attachments'].append({
'name': attachment.name,
'type': attachment.mimetype,
'data': base64.b64encode(fp.read()).decode("ascii")
})
except (OSError, IOError) as e:
self.logger.warning(
'An I/O error occurred while reading {}.'.format(
attachment.name if attachment else 'attachment'))
self.logger.debug('I/O Exception: %s' % str(e))
return False
# Take a copy of our token dictionary
tokens = self.tokens.copy()
# Apply some defaults template values
tokens['app_body'] = body
tokens['app_title'] = title
tokens['app_type'] = notify_type
tokens['app_id'] = self.app_id
tokens['app_desc'] = self.app_desc
tokens['app_color'] = self.color(notify_type)
tokens['app_url'] = self.app_url
# Store our tokens if they're identified
payload['substitution_data'] = self.tokens
# Create a copy of the targets list
emails = list(self.targets)
for index in range(0, len(emails), batch_size):
# Generate our email listing
payload['recipients'] = list()
# Initialize our cc list
cc = (self.cc - self.bcc)
# Initialize our bcc list
bcc = set(self.bcc)
# Initialize our headers
headers = self.headers.copy()
for addr in self.targets[index:index + batch_size]:
entry = {
'address': {
'email': addr[1],
}
}
# Strip target out of cc list if in To
cc = (cc - set([addr[1]]))
# Strip target out of bcc list if in To
bcc = (bcc - set([addr[1]]))
if addr[0]:
entry['address']['name'] = addr[0]
# Add our recipient to our list
payload['recipients'].append(entry)
if cc:
# Handle our cc List
for addr in cc:
entry = {
'address': {
'email': addr,
'header_to':
# Take the first email in the To
self.targets[index:index + batch_size][0][1],
},
}
if self.names.get(addr):
entry['address']['name'] = self.names[addr]
# Add our recipient to our list
payload['recipients'].append(entry)
headers['CC'] = ','.join(cc)
# Handle our bcc
for addr in bcc:
# Add our recipient to our list
payload['recipients'].append({
'address': {
'email': addr,
'header_to':
# Take the first email in the To
self.targets[index:index + batch_size][0][1],
},
})
if headers:
payload['content']['headers'] = headers
# Send our message
status_code, response = \
self.__post(payload, self.sparkpost_retry_attempts)
# Failed
if status_code != requests.codes.ok:
has_error = True
return not has_error
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
"""
# Define any URL parameters
params = {
'region': self.region_name,
'batch': 'yes' if self.batch else 'no',
}
# Append our headers into our parameters
params.update({'+{}'.format(k): v for k, v in self.headers.items()})
# Append our template tokens into our parameters
params.update({':{}'.format(k): v for k, v in self.tokens.items()})
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
if self.from_name is not None:
# from_name specified; pass it back on the url
params['name'] = self.from_name
if self.cc:
# Handle our Carbon Copy Addresses
params['cc'] = ','.join(
['{}{}'.format(
'' if not e not in self.names
else '{}:'.format(self.names[e]), e) for e in self.cc])
if self.bcc:
# Handle our Blind Carbon Copy Addresses
params['bcc'] = ','.join(self.bcc)
# a simple boolean check as to whether we display our target emails
# or not
has_targets = \
not (len(self.targets) == 1
and self.targets[0][1] == self.from_addr)
return '{schema}://{user}@{host}/{apikey}/{targets}/?{params}'.format(
schema=self.secure_protocol,
host=self.host,
user=NotifySparkPost.quote(self.user, safe=''),
apikey=self.pprint(self.apikey, privacy, safe=''),
targets='' if not has_targets else '/'.join(
[NotifySparkPost.quote('{}{}'.format(
'' if not e[0] else '{}:'.format(e[0]), e[1]),
safe='') for e in self.targets]),
params=NotifySparkPost.urlencode(params))
@staticmethod
def parse_url(url):
"""
Parses the URL and returns enough arguments that can allow
us to re-instantiate this object.
"""
results = NotifyBase.parse_url(url, verify_host=False)
if not results:
# We're done early as we couldn't load the results
return results
# Get our entries; split_path() looks after unquoting content for us
# by default
results['targets'] = NotifySparkPost.split_path(results['fullpath'])
# Our very first entry is reserved for our api key
try:
results['apikey'] = results['targets'].pop(0)
except IndexError:
# We're done - no API Key found
results['apikey'] = None
if 'name' in results['qsd'] and len(results['qsd']['name']):
# Extract from name to associate with from address
results['from_name'] = \
NotifySparkPost.unquote(results['qsd']['name'])
if 'region' in results['qsd'] and len(results['qsd']['region']):
# Extract from name to associate with from address
results['region_name'] = \
NotifySparkPost.unquote(results['qsd']['region'])
# Handle 'to' email address
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'].append(results['qsd']['to'])
# Handle Carbon Copy Addresses
if 'cc' in results['qsd'] and len(results['qsd']['cc']):
results['cc'] = results['qsd']['cc']
# Handle Blind Carbon Copy Addresses
if 'bcc' in results['qsd'] and len(results['qsd']['bcc']):
results['bcc'] = results['qsd']['bcc']
# Add our Meta Headers that the user can provide with their outbound
# emails
results['headers'] = {NotifyBase.unquote(x): NotifyBase.unquote(y)
for x, y in results['qsd+'].items()}
# Add our template tokens (if defined)
results['tokens'] = {NotifyBase.unquote(x): NotifyBase.unquote(y)
for x, y in results['qsd:'].items()}
# Get Batch Mode Flag
results['batch'] = \
parse_bool(results['qsd'].get(
'batch', NotifySparkPost.template_args['batch']['default']))
return results

View file

@ -365,7 +365,8 @@ class NotifySpontit(NotifyBase):
# Support MacOS subtitle option
if 'subtitle' in results['qsd'] and len(results['qsd']['subtitle']):
results['subtitle'] = results['qsd']['subtitle']
results['subtitle'] = \
NotifySpontit.unquote(results['qsd']['subtitle'])
# Support the 'to' variable so that we can support targets this way too
# The 'to' makes it easier to use yaml configuration

View file

@ -0,0 +1,467 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2021 <example@example.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# For this to work correctly you need to register an app
# and generate an access token
#
#
# This plugin will simply work using the url of:
# streamlabs://access_token/
#
# API Documentation on Webhooks:
# - https://dev.streamlabs.com/
#
import requests
from .NotifyBase import NotifyBase
from ..common import NotifyType
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
# calls
class StrmlabsCall(object):
ALERT = 'ALERTS'
DONATION = 'DONATIONS'
# A List of calls we can use for verification
STRMLABS_CALLS = (
StrmlabsCall.ALERT,
StrmlabsCall.DONATION,
)
# alerts
class StrmlabsAlert(object):
FOLLOW = 'follow'
SUBSCRIPTION = 'subscription'
DONATION = 'donation'
HOST = 'host'
# A List of calls we can use for verification
STRMLABS_ALERTS = (
StrmlabsAlert.FOLLOW,
StrmlabsAlert.SUBSCRIPTION,
StrmlabsAlert.DONATION,
StrmlabsAlert.HOST,
)
class NotifyStreamlabs(NotifyBase):
"""
A wrapper to Streamlabs Donation Notifications
"""
# The default descriptive name associated with the Notification
service_name = 'Streamlabs'
# The services URL
service_url = 'https://streamlabs.com/'
# The default secure protocol
secure_protocol = 'strmlabs'
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_streamlabs'
# Streamlabs Api endpoint
notify_url = 'https://streamlabs.com/api/v1.0/'
# The maximum allowable characters allowed in the body per message
body_maxlen = 255
# Define object templates
templates = (
'{schema}://{access_token}/',
)
# Define our template tokens
template_tokens = dict(NotifyBase.template_tokens, **{
'access_token': {
'name': _('Access Token'),
'private': True,
'required': True,
'type': 'string',
'regex': (r'^[a-z0-9]{40}$', 'i')
},
})
# Define our template arguments
template_args = dict(NotifyBase.template_args, **{
'call': {
'name': _('Call'),
'type': 'choice:string',
'values': STRMLABS_CALLS,
'default': StrmlabsCall.ALERT,
},
'alert_type': {
'name': _('Alert Type'),
'type': 'choice:string',
'values': STRMLABS_ALERTS,
'default': StrmlabsAlert.DONATION,
},
'image_href': {
'name': _('Image Link'),
'type': 'string',
'default': '',
},
'sound_href': {
'name': _('Sound Link'),
'type': 'string',
'default': '',
},
'duration': {
'name': _('Duration'),
'type': 'int',
'default': 1000,
'min': 0
},
'special_text_color': {
'name': _('Special Text Color'),
'type': 'string',
'default': '',
'regex': (r'^[A-Z]$', 'i'),
},
'amount': {
'name': _('Amount'),
'type': 'int',
'default': 0,
'min': 0
},
'currency': {
'name': _('Currency'),
'type': 'string',
'default': 'USD',
'regex': (r'^[A-Z]{3}$', 'i'),
},
'name': {
'name': _('Name'),
'type': 'string',
'default': 'Anon',
'regex': (r'^[^\s].{1,24}$', 'i')
},
'identifier': {
'name': _('Identifier'),
'type': 'string',
'default': 'Apprise',
},
})
def __init__(self, access_token,
call=StrmlabsCall.ALERT,
alert_type=StrmlabsAlert.DONATION,
image_href='', sound_href='', duration=1000,
special_text_color='',
amount=0, currency='USD', name='Anon',
identifier='Apprise',
**kwargs):
"""
Initialize Streamlabs Object
"""
super(NotifyStreamlabs, self).__init__(**kwargs)
# access token is generated by user
# using https://streamlabs.com/api/v1.0/token
# Tokens for Streamlabs never need to be refreshed.
self.access_token = validate_regex(
access_token,
*self.template_tokens['access_token']['regex']
)
if not self.access_token:
msg = 'An invalid Streamslabs access token was specified.'
self.logger.warning(msg)
raise TypeError(msg)
# Store the call
try:
if call not in STRMLABS_CALLS:
# allow the outer except to handle this common response
raise
else:
self.call = call
except Exception as e:
# Invalid region specified
msg = 'The streamlabs call specified ({}) is invalid.' \
.format(call)
self.logger.warning(msg)
self.logger.debug('Socket Exception: %s' % str(e))
raise TypeError(msg)
# Store the alert_type
# only applicable when calling /alerts
try:
if alert_type not in STRMLABS_ALERTS:
# allow the outer except to handle this common response
raise
else:
self.alert_type = alert_type
except Exception as e:
# Invalid region specified
msg = 'The streamlabs alert type specified ({}) is invalid.' \
.format(call)
self.logger.warning(msg)
self.logger.debug('Socket Exception: %s' % str(e))
raise TypeError(msg)
# params only applicable when calling /alerts
self.image_href = image_href
self.sound_href = sound_href
self.duration = duration
self.special_text_color = special_text_color
# only applicable when calling /donations
# The amount of this donation.
self.amount = amount
# only applicable when calling /donations
# The 3 letter currency code for this donation.
# Must be one of the supported currency codes.
self.currency = validate_regex(
currency,
*self.template_args['currency']['regex']
)
# only applicable when calling /donations
if not self.currency:
msg = 'An invalid Streamslabs currency was specified.'
self.logger.warning(msg)
raise TypeError(msg)
# only applicable when calling /donations
# The name of the donor
self.name = validate_regex(
name,
*self.template_args['name']['regex']
)
if not self.name:
msg = 'An invalid Streamslabs donor was specified.'
self.logger.warning(msg)
raise TypeError(msg)
# An identifier for this donor,
# which is used to group donations with the same donor.
# only applicable when calling /donations
self.identifier = identifier
return
def send(self, body, title='', notify_type=NotifyType.INFO, attach=None,
**kwargs):
"""
Perform Streamlabs notification call (either donation or alert)
"""
headers = {
'User-Agent': self.app_id,
}
if self.call == StrmlabsCall.ALERT:
data = {
'access_token': self.access_token,
'type': self.alert_type.lower(),
'image_href': self.image_href,
'sound_href': self.sound_href,
'message': title,
'user_massage': body,
'duration': self.duration,
'special_text_color': self.special_text_color,
}
try:
r = requests.post(
self.notify_url + self.call.lower(),
headers=headers,
data=data,
verify=self.verify_certificate,
)
if r.status_code != requests.codes.ok:
# We had a problem
status_str = \
NotifyStreamlabs.http_response_code_lookup(
r.status_code)
self.logger.warning(
'Failed to send Streamlabs alert: '
'{}{}error={}.'.format(
status_str,
', ' if status_str else '',
r.status_code))
self.logger.debug(
'Response Details:\r\n{}'.format(r.content))
return False
else:
self.logger.info('Sent Streamlabs alert.')
except requests.RequestException as e:
self.logger.warning(
'A Connection error occured sending Streamlabs '
'alert.'
)
self.logger.debug('Socket Exception: %s' % str(e))
return False
if self.call == StrmlabsCall.DONATION:
data = {
'name': self.name,
'identifier': self.identifier,
'amount': self.amount,
'currency': self.currency,
'access_token': self.access_token,
'message': body,
}
try:
r = requests.post(
self.notify_url + self.call.lower(),
headers=headers,
data=data,
verify=self.verify_certificate,
)
if r.status_code != requests.codes.ok:
# We had a problem
status_str = \
NotifyStreamlabs.http_response_code_lookup(
r.status_code)
self.logger.warning(
'Failed to send Streamlabs donation: '
'{}{}error={}.'.format(
status_str,
', ' if status_str else '',
r.status_code))
self.logger.debug(
'Response Details:\r\n{}'.format(r.content))
return False
else:
self.logger.info('Sent Streamlabs donation.')
except requests.RequestException as e:
self.logger.warning(
'A Connection error occured sending Streamlabs '
'donation.'
)
self.logger.debug('Socket Exception: %s' % str(e))
return False
return True
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
"""
# Define any URL parameters
params = {
'call': self.call,
# donation
'name': self.name,
'identifier': self.identifier,
'amount': self.amount,
'currency': self.currency,
# alert
'alert_type': self.alert_type,
'image_href': self.image_href,
'sound_href': self.sound_href,
'duration': self.duration,
'special_text_color': self.special_text_color,
}
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
return '{schema}://{access_token}/?{params}'.format(
schema=self.secure_protocol,
access_token=self.pprint(self.access_token, privacy, safe=''),
params=NotifyStreamlabs.urlencode(params),
)
@staticmethod
def parse_url(url):
"""
Parses the URL and returns enough arguments that can allow
us to re-instantiate this object.
Syntax:
strmlabs://access_token
"""
results = NotifyBase.parse_url(url, verify_host=False)
if not results:
# We're done early as we couldn't load the results
return results
# Store our access code
access_token = NotifyStreamlabs.unquote(results['host'])
results['access_token'] = access_token
# call
if 'call' in results['qsd'] and results['qsd']['call']:
results['call'] = NotifyStreamlabs.unquote(
results['qsd']['call'].strip().upper())
# donation - amount
if 'amount' in results['qsd'] and results['qsd']['amount']:
results['amount'] = NotifyStreamlabs.unquote(
results['qsd']['amount'])
# donation - currency
if 'currency' in results['qsd'] and results['qsd']['currency']:
results['currency'] = NotifyStreamlabs.unquote(
results['qsd']['currency'].strip().upper())
# donation - name
if 'name' in results['qsd'] and results['qsd']['name']:
results['name'] = NotifyStreamlabs.unquote(
results['qsd']['name'].strip().upper())
# donation - identifier
if 'identifier' in results['qsd'] and results['qsd']['identifier']:
results['identifier'] = NotifyStreamlabs.unquote(
results['qsd']['identifier'].strip().upper())
# alert - alert_type
if 'alert_type' in results['qsd'] and results['qsd']['alert_type']:
results['alert_type'] = NotifyStreamlabs.unquote(
results['qsd']['alert_type'])
# alert - image_href
if 'image_href' in results['qsd'] and results['qsd']['image_href']:
results['image_href'] = NotifyStreamlabs.unquote(
results['qsd']['image_href'])
# alert - sound_href
if 'sound_href' in results['qsd'] and results['qsd']['sound_href']:
results['sound_href'] = NotifyStreamlabs.unquote(
results['qsd']['sound_href'].strip().upper())
# alert - duration
if 'duration' in results['qsd'] and results['qsd']['duration']:
results['duration'] = NotifyStreamlabs.unquote(
results['qsd']['duration'].strip().upper())
# alert - special_text_color
if 'special_text_color' in results['qsd'] \
and results['qsd']['special_text_color']:
results['special_text_color'] = NotifyStreamlabs.unquote(
results['qsd']['special_text_color'].strip().upper())
return results

View file

@ -22,12 +22,15 @@
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
import six
import syslog
import socket
from .NotifyBase import NotifyBase
from ..common import NotifyType
from ..utils import parse_bool
from ..utils import is_hostname
from ..AppriseLocale import gettext_lazy as _
@ -98,6 +101,21 @@ SYSLOG_FACILITY_RMAP = {
}
class SyslogMode(object):
# A local query
LOCAL = "local"
# A remote query
REMOTE = "remote"
# webhook modes are placed ito this list for validation purposes
SYSLOG_MODES = (
SyslogMode.LOCAL,
SyslogMode.REMOTE,
)
class NotifySyslog(NotifyBase):
"""
A wrapper for Syslog Notifications
@ -119,13 +137,14 @@ class NotifySyslog(NotifyBase):
# local anyway
request_rate_per_sec = 0
# Title to be added to body if present
title_maxlen = 0
# Define object templates
templates = (
'{schema}://',
'{schema}://{facility}',
'{schema}://{host}',
'{schema}://{host}:{port}',
'{schema}://{host}/{facility}',
'{schema}://{host}:{port}/{facility}',
)
# Define our template tokens
@ -136,6 +155,18 @@ class NotifySyslog(NotifyBase):
'values': [k for k in SYSLOG_FACILITY_MAP.keys()],
'default': SyslogFacility.USER,
},
'host': {
'name': _('Hostname'),
'type': 'string',
'required': True,
},
'port': {
'name': _('Port'),
'type': 'int',
'min': 1,
'max': 65535,
'default': 514,
},
})
# Define our template arguments
@ -144,6 +175,12 @@ class NotifySyslog(NotifyBase):
# We map back to the same element defined in template_tokens
'alias_of': 'facility',
},
'mode': {
'name': _('Syslog Mode'),
'type': 'choice:string',
'values': SYSLOG_MODES,
'default': SyslogMode.LOCAL,
},
'logpid': {
'name': _('Log PID'),
'type': 'bool',
@ -158,8 +195,8 @@ class NotifySyslog(NotifyBase):
},
})
def __init__(self, facility=None, log_pid=True, log_perror=False,
**kwargs):
def __init__(self, facility=None, mode=None, log_pid=True,
log_perror=False, **kwargs):
"""
Initialize Syslog Object
"""
@ -179,6 +216,14 @@ class NotifySyslog(NotifyBase):
SYSLOG_FACILITY_MAP[
self.template_tokens['facility']['default']]
self.mode = self.template_args['mode']['default'] \
if not isinstance(mode, six.string_types) else mode.lower()
if self.mode not in SYSLOG_MODES:
msg = 'The mode specified ({}) is invalid.'.format(mode)
self.logger.warning(msg)
raise TypeError(msg)
# Logging Options
self.logoptions = 0
@ -214,17 +259,76 @@ class NotifySyslog(NotifyBase):
NotifyType.WARNING: syslog.LOG_WARNING,
}
if title:
# Format title
body = '{}: {}'.format(title, body)
# Always call throttle before any remote server i/o is made
self.throttle()
try:
syslog.syslog(_pmap[notify_type], body)
if self.mode == SyslogMode.LOCAL:
try:
syslog.syslog(_pmap[notify_type], body)
except KeyError:
# An invalid notification type was specified
self.logger.warning(
'An invalid notification type '
'({}) was specified.'.format(notify_type))
return False
except KeyError:
# An invalid notification type was specified
self.logger.warning(
'An invalid notification type '
'({}) was specified.'.format(notify_type))
return False
else: # SyslogMode.REMOTE
host = self.host
port = self.port if self.port \
else self.template_tokens['port']['default']
if self.log_pid:
payload = '<%d>- %d - %s' % (
_pmap[notify_type] + self.facility * 8, os.getpid(), body)
else:
payload = '<%d>- %s' % (
_pmap[notify_type] + self.facility * 8, body)
# send UDP packet to upstream server
self.logger.debug(
'Syslog Host: %s:%d/%s',
host, port, SYSLOG_FACILITY_RMAP[self.facility])
self.logger.debug('Syslog Payload: %s' % str(payload))
# our sent bytes
sent = 0
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.settimeout(self.socket_connect_timeout)
sent = sock.sendto(payload.encode('utf-8'), (host, port))
sock.close()
except socket.gaierror as e:
self.logger.warning(
'A connection error occurred sending Syslog '
'notification to %s:%d/%s', host, port,
SYSLOG_FACILITY_RMAP[self.facility]
)
self.logger.debug('Socket Exception: %s' % str(e))
return False
except socket.timeout as e:
self.logger.warning(
'A connection timeout occurred sending Syslog '
'notification to %s:%d/%s', host, port,
SYSLOG_FACILITY_RMAP[self.facility]
)
self.logger.debug('Socket Exception: %s' % str(e))
return False
if sent < len(payload):
self.logger.warning(
'Syslog sent %d byte(s) but intended to send %d byte(s)',
sent, len(payload))
return False
self.logger.info('Sent Syslog (%s) notification.', self.mode)
return True
@ -237,16 +341,31 @@ class NotifySyslog(NotifyBase):
params = {
'logperror': 'yes' if self.log_perror else 'no',
'logpid': 'yes' if self.log_pid else 'no',
'mode': self.mode,
}
# Extend our parameters
params.update(self.url_parameters(privacy=privacy, *args, **kwargs))
return '{schema}://{facility}/?{params}'.format(
if self.mode == SyslogMode.LOCAL:
return '{schema}://{facility}/?{params}'.format(
facility=self.template_tokens['facility']['default']
if self.facility not in SYSLOG_FACILITY_RMAP
else SYSLOG_FACILITY_RMAP[self.facility],
schema=self.secure_protocol,
params=NotifySyslog.urlencode(params),
)
# Remote mode:
return '{schema}://{hostname}{port}/{facility}/?{params}'.format(
schema=self.secure_protocol,
hostname=NotifySyslog.quote(self.host, safe=''),
port='' if self.port is None
or self.port == self.template_tokens['port']['default']
else ':{}'.format(self.port),
facility=self.template_tokens['facility']['default']
if self.facility not in SYSLOG_FACILITY_RMAP
else SYSLOG_FACILITY_RMAP[self.facility],
schema=self.secure_protocol,
params=NotifySyslog.urlencode(params),
)
@ -262,9 +381,28 @@ class NotifySyslog(NotifyBase):
# We're done early as we couldn't load the results
return results
# if specified; save hostname into facility
facility = None if not results['host'] \
else NotifySyslog.unquote(results['host'])
tokens = []
if results['host']:
tokens.append(NotifySyslog.unquote(results['host']))
# Get our path values
tokens.extend(NotifySyslog.split_path(results['fullpath']))
facility = None
if len(tokens) > 1 and is_hostname(tokens[0]):
# syslog://hostname/facility
results['mode'] = SyslogMode.REMOTE
# Store our facility as the first path entry
facility = tokens[-1]
elif tokens:
# This is a bit ambigious... it could be either:
# syslog://facility -or- syslog://hostname
# First lets test it as a facility; we'll correct this
# later on if nessisary
facility = tokens[-1]
# However if specified on the URL, that will over-ride what was
# identified
@ -280,15 +418,34 @@ class NotifySyslog(NotifyBase):
facility = next((f for f in SYSLOG_FACILITY_MAP.keys()
if f.startswith(facility)), facility)
# Save facility
results['facility'] = facility
# Attempt to solve our ambiguity
if len(tokens) == 1 and is_hostname(tokens[0]) and (
results['port'] or facility not in SYSLOG_FACILITY_MAP):
# facility is likely hostname; update our guessed mode
results['mode'] = SyslogMode.REMOTE
# Reset our facility value
facility = None
# Set mode if not otherwise set
if 'mode' in results['qsd'] and len(results['qsd']['mode']):
results['mode'] = NotifySyslog.unquote(results['qsd']['mode'])
# Save facility if set
if facility:
results['facility'] = facility
# Include PID as part of the message logged
results['log_pid'] = \
parse_bool(results['qsd'].get('logpid', True))
results['log_pid'] = parse_bool(
results['qsd'].get(
'logpid',
NotifySyslog.template_args['logpid']['default']))
# Print to stderr as well.
results['log_perror'] = \
parse_bool(results['qsd'].get('logperror', False))
results['log_perror'] = parse_bool(
results['qsd'].get(
'logperror',
NotifySyslog.template_args['logperror']['default']))
return results

View file

@ -205,13 +205,23 @@ class NotifyTelegram(NotifyBase):
'default': True,
'map_to': 'detect_owner',
},
'silent': {
'name': _('Silent Notification'),
'type': 'bool',
'default': False,
},
'preview': {
'name': _('Web Page Preview'),
'type': 'bool',
'default': False,
},
'to': {
'alias_of': 'targets',
},
})
def __init__(self, bot_token, targets, detect_owner=True,
include_image=False, **kwargs):
include_image=False, silent=None, preview=None, **kwargs):
"""
Initialize Telegram Object
"""
@ -229,6 +239,14 @@ class NotifyTelegram(NotifyBase):
# Parse our list
self.targets = parse_list(targets)
# Define whether or not we should make audible alarms
self.silent = self.template_args['silent']['default'] \
if silent is None else bool(silent)
# Define whether or not we should display a web page preview
self.preview = self.template_args['preview']['default'] \
if preview is None else bool(preview)
# if detect_owner is set to True, we will attempt to determine who
# the bot owner is based on the first person who messaged it. This
# is not a fool proof way of doing things as over time Telegram removes
@ -513,7 +531,12 @@ class NotifyTelegram(NotifyBase):
'sendMessage'
)
payload = {}
payload = {
# Notification Audible Control
'disable_notification': self.silent,
# Display Web Page Preview (if possible)
'disable_web_page_preview': not self.preview,
}
# Prepare Email Message
if self.notify_format == NotifyFormat.MARKDOWN:
@ -524,35 +547,73 @@ class NotifyTelegram(NotifyBase):
body,
)
elif self.notify_format == NotifyFormat.HTML:
else: # HTML or TEXT
# Use Telegram's HTML mode
payload['parse_mode'] = 'HTML'
# HTML Spaces (&nbsp;) and tabs (&emsp;) aren't supported
# See https://core.telegram.org/bots/api#html-style
body = re.sub('&nbsp;?', ' ', body, re.I)
# Tabs become 3 spaces
body = re.sub('&emsp;?', ' ', body, re.I)
if title:
# Telegram's HTML support doesn't like having HTML escaped
# characters passed into it. to handle this situation, we need to
# search the body for these sequences and convert them to the
# output the user expected
telegram_escape_html_dict = {
# HTML Spaces (&nbsp;) and tabs (&emsp;) aren't supported
# See https://core.telegram.org/bots/api#html-style
title = re.sub('&nbsp;?', ' ', title, re.I)
r'nbsp': ' ',
# Tabs become 3 spaces
title = re.sub('&emsp;?', ' ', title, re.I)
r'emsp': ' ',
payload['text'] = '{}{}'.format(
'<b>{}</b>\r\n'.format(title) if title else '',
body,
)
# Some characters get re-escaped by the Telegram upstream
# service so we need to convert these back,
r'apos': '\'',
r'quot': '"',
}
else: # TEXT
payload['parse_mode'] = 'HTML'
# Create a regular expression from the dictionary keys
html_regex = re.compile("&(%s);?" % "|".join(
map(re.escape, telegram_escape_html_dict.keys())).lower(),
re.I)
# Escape content
title = NotifyTelegram.escape_html(title, whitespace=False)
body = NotifyTelegram.escape_html(body, whitespace=False)
# For each match, look-up corresponding value in dictionary
# we look +1 to ignore the & that does not appear in the index
# we only look at the first 4 characters because we don't want to
# fail on &apos; as it's accepted (along with &apos - no
# semi-colon)
body = html_regex.sub( # pragma: no branch
lambda mo: telegram_escape_html_dict[
mo.string[mo.start():mo.end()][1:5]], body)
if title:
# For each match, look-up corresponding value in dictionary
# Indexing is explained above (for how the body is parsed)
title = html_regex.sub( # pragma: no branch
lambda mo: telegram_escape_html_dict[
mo.string[mo.start():mo.end()][1:5]], title)
if self.notify_format == NotifyFormat.TEXT:
telegram_escape_text_dict = {
# We need to escape characters that conflict with html
# entity blocks (< and >) when displaying text
r'>': '&gt;',
r'<': '&lt;',
}
# Create a regular expression from the dictionary keys
text_regex = re.compile("(%s)" % "|".join(
map(re.escape, telegram_escape_text_dict.keys())).lower(),
re.I)
# For each match, look-up corresponding value in dictionary
body = text_regex.sub( # pragma: no branch
lambda mo: telegram_escape_text_dict[
mo.string[mo.start():mo.end()]], body)
if title:
# For each match, look-up corresponding value in dictionary
title = text_regex.sub( # pragma: no branch
lambda mo: telegram_escape_text_dict[
mo.string[mo.start():mo.end()]], title)
payload['text'] = '{}{}'.format(
'<b>{}</b>\r\n'.format(title) if title else '',
@ -679,6 +740,8 @@ class NotifyTelegram(NotifyBase):
params = {
'image': self.include_image,
'detect': 'yes' if self.detect_owner else 'no',
'silent': 'yes' if self.silent else 'no',
'preview': 'yes' if self.preview else 'no',
}
# Extend our parameters
@ -762,6 +825,15 @@ class NotifyTelegram(NotifyBase):
# Store our bot token
results['bot_token'] = bot_token
# Silent (Sends the message Silently); users will receive
# notification with no sound.
results['silent'] = \
parse_bool(results['qsd'].get('silent', False))
# Show Web Page Preview
results['preview'] = \
parse_bool(results['qsd'].get('preview', False))
# Include images with our message
results['include_image'] = \
parse_bool(results['qsd'].get('image', False))

View file

@ -40,22 +40,18 @@
# or consider purchasing a short-code from here:
# https://www.twilio.com/docs/glossary/what-is-a-short-code
#
import re
import requests
from json import loads
from .NotifyBase import NotifyBase
from ..URLBase import PrivacyMode
from ..common import NotifyType
from ..utils import parse_list
from ..utils import is_phone_no
from ..utils import parse_phone_no
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
# Some Phone Number Detection
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
class NotifyTwilio(NotifyBase):
"""
A wrapper for Twilio Notifications
@ -112,7 +108,7 @@ class NotifyTwilio(NotifyBase):
'type': 'string',
'private': True,
'required': True,
'regex': (r'^[a-f0-9]+$', 'i'),
'regex': (r'^[a-z0-9]+$', 'i'),
},
'from_phone': {
'name': _('From Phone No'),
@ -154,10 +150,16 @@ class NotifyTwilio(NotifyBase):
'token': {
'alias_of': 'auth_token',
},
'apikey': {
'name': _('API Key'),
'type': 'string',
'private': True,
'regex': (r'^SK[a-f0-9]+$', 'i'),
},
})
def __init__(self, account_sid, auth_token, source, targets=None,
**kwargs):
apikey=None, ** kwargs):
"""
Initialize Twilio Object
"""
@ -181,17 +183,19 @@ class NotifyTwilio(NotifyBase):
self.logger.warning(msg)
raise TypeError(msg)
# The Source Phone # and/or short-code
self.source = source
# The API Key associated with the account (optional)
self.apikey = validate_regex(
apikey, *self.template_args['apikey']['regex'])
if not IS_PHONE_NO.match(self.source):
result = is_phone_no(source, min_len=5)
if not result:
msg = 'The Account (From) Phone # or Short-code specified ' \
'({}) is invalid.'.format(source)
self.logger.warning(msg)
raise TypeError(msg)
# Tidy source
self.source = re.sub(r'[^\d]+', '', self.source)
# Store The Source Phone # and/or short-code
self.source = result['full']
if len(self.source) < 11 or len(self.source) > 14:
# https://www.twilio.com/docs/glossary/what-is-a-short-code
@ -213,37 +217,18 @@ class NotifyTwilio(NotifyBase):
# Parse our targets
self.targets = list()
for target in parse_list(targets):
for target in parse_phone_no(targets):
# Validate targets and drop bad ones:
result = IS_PHONE_NO.match(target)
if result:
# Further check our phone # for it's digit count
# if it's less than 10, then we can assume it's
# a poorly specified phone no and spit a warning
result = ''.join(re.findall(r'\d+', result.group('phone')))
if len(result) < 11 or len(result) > 14:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
# store valid phone number
self.targets.append('+{}'.format(result))
result = is_phone_no(target)
if not result:
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
continue
self.logger.warning(
'Dropped invalid phone # '
'({}) specified.'.format(target),
)
if not self.targets:
if len(self.source) in (5, 6):
# raise a warning since we're a short-code. We need
# a number to message
msg = 'There are no valid Twilio targets to notify.'
self.logger.warning(msg)
raise TypeError(msg)
# store valid phone number
self.targets.append('+{}'.format(result['full']))
return
@ -252,6 +237,14 @@ class NotifyTwilio(NotifyBase):
Perform Twilio Notification
"""
if not self.targets:
if len(self.source) in (5, 6):
# Generate a warning since we're a short-code. We need
# a number to message at minimum
self.logger.warning(
'There are no valid Twilio targets to notify.')
return False
# error tracking (used for function return)
has_error = False
@ -276,8 +269,8 @@ class NotifyTwilio(NotifyBase):
# Create a copy of the targets list
targets = list(self.targets)
# Set up our authentication
auth = (self.account_sid, self.auth_token)
# Set up our authentication. Prefer the API Key if provided.
auth = (self.apikey or self.account_sid, self.auth_token)
if len(targets) == 0:
# No sources specified, use our own phone no
@ -371,6 +364,10 @@ class NotifyTwilio(NotifyBase):
# Our URL parameters
params = self.url_parameters(privacy=privacy, *args, **kwargs)
if self.apikey is not None:
# apikey specified; pass it back on the url
params['apikey'] = self.apikey
return '{schema}://{sid}:{token}@{source}/{targets}/?{params}'.format(
schema=self.secure_protocol,
sid=self.pprint(
@ -417,6 +414,10 @@ class NotifyTwilio(NotifyBase):
results['account_sid'] = \
NotifyTwilio.unquote(results['qsd']['sid'])
# API Key
if 'apikey' in results['qsd'] and len(results['qsd']['apikey']):
results['apikey'] = results['qsd']['apikey']
# Support the 'from' and 'source' variable so that we can support
# targets this way too.
# The 'from' makes it easier to use yaml configuration
@ -431,6 +432,6 @@ class NotifyTwilio(NotifyBase):
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'] += \
NotifyTwilio.parse_list(results['qsd']['to'])
NotifyTwilio.parse_phone_no(results['qsd']['to'])
return results

View file

@ -562,6 +562,7 @@ class NotifyTwist(NotifyBase):
if not len(self.channel_ids):
# We have nothing to notify
self.logger.warning('There are no Twist targets to notify')
return False
# Notify all of our identified channels
@ -789,7 +790,7 @@ class NotifyTwist(NotifyBase):
try:
self.logout()
except LookupError:
except LookupError: # pragma: no cover
# Python v3.5 call to requests can sometimes throw the exception
# "/usr/lib64/python3.7/socket.py", line 748, in getaddrinfo
# LookupError: unknown encoding: idna
@ -804,3 +805,28 @@ class NotifyTwist(NotifyBase):
# ticket system as unresolved and has provided work-arounds
# - https://github.com/kennethreitz/requests/issues/3578
pass
except ImportError: # pragma: no cover
# The actual exception is `ModuleNotFoundError` however ImportError
# grants us backwards compatiblity with versions of Python older
# than v3.6
# Python code that makes early calls to sys.exit() can cause
# the __del__() code to run. However in some newer versions of
# Python, this causes the `sys` library to no longer be
# available. The stack overflow also goes on to suggest that
# it's not wise to use the __del__() as a deconstructor
# which is the case here.
# https://stackoverflow.com/questions/67218341/\
# modulenotfounderror-import-of-time-halted-none-in-sys-\
# modules-occured-when-obj?noredirect=1&lq=1
#
#
# Also see: https://stackoverflow.com/questions\
# /1481488/what-is-the-del-method-and-how-do-i-call-it
# At this time it seems clean to try to log out (if we can)
# but not throw any unessisary exceptions (like this one) to
# the end user if we don't have to.
pass

View file

@ -56,6 +56,13 @@ class NotifyWindows(NotifyBase):
"""
A wrapper for local Windows Notifications
"""
# Set our global enabled flag
enabled = NOTIFY_WINDOWS_SUPPORT_ENABLED
requirements = {
# Define our required packaging in order to work
'details': _('A local Microsoft Windows environment is required.')
}
# The default descriptive name associated with the Notification
service_name = 'Windows Notification'
@ -80,15 +87,6 @@ class NotifyWindows(NotifyBase):
# The number of seconds to display the popup for
default_popup_duration_sec = 12
# This entry is a bit hacky, but it allows us to unit-test this library
# in an environment that simply doesn't have the windows packages
# available to us. It also allows us to handle situations where the
# packages actually are present but we need to test that they aren't.
# If anyone is seeing this had knows a better way of testing this
# outside of what is defined in test/test_windows_plugin.py, please
# let me know! :)
_enabled = NOTIFY_WINDOWS_SUPPORT_ENABLED
# Define object templates
templates = (
'{schema}://',
@ -144,12 +142,6 @@ class NotifyWindows(NotifyBase):
Perform Windows Notification
"""
if not self._enabled:
self.logger.warning(
"Windows Notifications are not supported by this system; "
"`pip install pywin32`.")
return False
# Always call throttle before any remote server i/o is made
self.throttle()

View file

@ -26,6 +26,7 @@
import re
import six
import requests
import base64
from .NotifyBase import NotifyBase
from ..URLBase import PrivacyMode
@ -58,6 +59,11 @@ class NotifyXML(NotifyBase):
# local anyway
request_rate_per_sec = 0
# XSD Information
xsd_ver = '1.1'
xsd_url = 'https://raw.githubusercontent.com/caronc/apprise/master' \
'/apprise/assets/NotifyXML-{version}.xsd'
# Define object templates
templates = (
'{schema}://{host}',
@ -118,11 +124,12 @@ class NotifyXML(NotifyBase):
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<soapenv:Body>
<Notification xmlns:xsi="http://nuxref.com/apprise/NotifyXML-1.0.xsd">
<Version>1.0</Version>
<Notification xmlns:xsi="{XSD_URL}">
<Version>{XSD_VER}</Version>
<Subject>{SUBJECT}</Subject>
<MessageType>{MESSAGE_TYPE}</MessageType>
<Message>{MESSAGE}</Message>
{ATTACHMENTS}
</Notification>
</soapenv:Body>
</soapenv:Envelope>"""
@ -175,7 +182,8 @@ class NotifyXML(NotifyBase):
params=NotifyXML.urlencode(params),
)
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
def send(self, body, title='', notify_type=NotifyType.INFO, attach=None,
**kwargs):
"""
Perform XML Notification
"""
@ -189,11 +197,55 @@ class NotifyXML(NotifyBase):
# Apply any/all header over-rides defined
headers.update(self.headers)
# Our XML Attachmement subsitution
xml_attachments = ''
# Track our potential attachments
attachments = []
if attach:
for attachment in attach:
# Perform some simple error checking
if not attachment:
# We could not access the attachment
self.logger.error(
'Could not access attachment {}.'.format(
attachment.url(privacy=True)))
return False
try:
with open(attachment.path, 'rb') as f:
# Output must be in a DataURL format (that's what
# PushSafer calls it):
entry = \
'<Attachment filename="{}" mimetype="{}">'.format(
NotifyXML.escape_html(
attachment.name, whitespace=False),
NotifyXML.escape_html(
attachment.mimetype, whitespace=False))
entry += base64.b64encode(f.read()).decode('utf-8')
entry += '</Attachment>'
attachments.append(entry)
except (OSError, IOError) as e:
self.logger.warning(
'An I/O error occurred while reading {}.'.format(
attachment.name if attachment else 'attachment'))
self.logger.debug('I/O Exception: %s' % str(e))
return False
# Update our xml_attachments record:
xml_attachments = \
'<Attachments format="base64">' + \
''.join(attachments) + '</Attachments>'
re_map = {
'{XSD_VER}': self.xsd_ver,
'{XSD_URL}': self.xsd_url.format(version=self.xsd_ver),
'{MESSAGE_TYPE}': NotifyXML.escape_html(
notify_type, whitespace=False),
'{SUBJECT}': NotifyXML.escape_html(title, whitespace=False),
'{MESSAGE}': NotifyXML.escape_html(body, whitespace=False),
'{ATTACHMENTS}': xml_attachments,
}
# Iterate over above list and store content accordingly
@ -219,6 +271,7 @@ class NotifyXML(NotifyBase):
self.logger.debug('XML POST URL: %s (cert_verify=%r)' % (
url, self.verify_certificate,
))
self.logger.debug('XML Payload: %s' % str(payload))
# Always call throttle before any remote server i/o is made
@ -278,8 +331,12 @@ class NotifyXML(NotifyBase):
# Add our headers that the user can potentially over-ride if they wish
# to to our returned result set
results['headers'] = results['qsd-']
results['headers'].update(results['qsd+'])
results['headers'] = results['qsd+']
if results['qsd-']:
results['headers'].update(results['qsd-'])
NotifyBase.logger.deprecate(
"minus (-) based XML header tokens are being "
"removed; use the plus (+) symbol instead.")
# Tidy our header entries by unquoting them
results['headers'] = {NotifyXML.unquote(x): NotifyXML.unquote(y)

View file

@ -6,23 +6,24 @@ import logging
# Default our global support flag
SLEEKXMPP_SUPPORT_AVAILABLE = False
SLIXMPP_SUPPORT_AVAILABLE = False
try:
# Import sleekxmpp if available
import sleekxmpp
# Import slixmpp if available
import slixmpp
import asyncio
SLEEKXMPP_SUPPORT_AVAILABLE = True
SLIXMPP_SUPPORT_AVAILABLE = True
except ImportError:
# No problem; we just simply can't support this plugin because we're
# either using Linux, or simply do not have sleekxmpp installed.
# either using Linux, or simply do not have slixmpp installed.
pass
class SleekXmppAdapter(object):
class SliXmppAdapter(object):
"""
Wrapper to sleekxmpp
Wrapper to slixmpp
"""
@ -38,12 +39,6 @@ class SleekXmppAdapter(object):
# The default secure protocol
secure_protocol = 'xmpps'
# The default XMPP port
default_unsecure_port = 5222
# The default XMPP secure port
default_secure_port = 5223
# Taken from https://golang.org/src/crypto/x509/root_linux.go
CA_CERTIFICATE_FILE_LOCATIONS = [
# Debian/Ubuntu/Gentoo etc.
@ -59,19 +54,20 @@ class SleekXmppAdapter(object):
]
# This entry is a bit hacky, but it allows us to unit-test this library
# in an environment that simply doesn't have the sleekxmpp package
# in an environment that simply doesn't have the slixmpp package
# available to us.
#
# If anyone is seeing this had knows a better way of testing this
# outside of what is defined in test/test_xmpp_plugin.py, please
# let me know! :)
_enabled = SLEEKXMPP_SUPPORT_AVAILABLE
_enabled = SLIXMPP_SUPPORT_AVAILABLE
def __init__(self, host=None, port=None, secure=False,
verify_certificate=True, xep=None, jid=None, password=None,
body=None, targets=None, before_message=None, logger=None):
body=None, subject=None, targets=None, before_message=None,
logger=None):
"""
Initialize our SleekXmppAdapter object
Initialize our SliXmppAdapter object
"""
self.host = host
@ -84,25 +80,35 @@ class SleekXmppAdapter(object):
self.password = password
self.body = body
self.subject = subject
self.targets = targets
self.before_message = before_message
self.logger = logger or logging.getLogger(__name__)
# Use the Apprise log handlers for configuring the sleekxmpp logger.
# Use the Apprise log handlers for configuring the slixmpp logger.
apprise_logger = logging.getLogger('apprise')
sleek_logger = logging.getLogger('sleekxmpp')
sli_logger = logging.getLogger('slixmpp')
for handler in apprise_logger.handlers:
sleek_logger.addHandler(handler)
sleek_logger.setLevel(apprise_logger.level)
sli_logger.addHandler(handler)
sli_logger.setLevel(apprise_logger.level)
if not self.load():
raise ValueError("Invalid XMPP Configuration")
def load(self):
try:
asyncio.get_event_loop()
except RuntimeError:
# slixmpp can not handle not having an event_loop
# see: https://lab.louiz.org/poezio/slixmpp/-/issues/3456
# This is a work-around to this problem
asyncio.set_event_loop(asyncio.new_event_loop())
# Prepare our object
self.xmpp = sleekxmpp.ClientXMPP(self.jid, self.password)
self.xmpp = slixmpp.ClientXMPP(self.jid, self.password)
# Register our session
self.xmpp.add_event_handler("session_start", self.session_start)
@ -112,7 +118,7 @@ class SleekXmppAdapter(object):
try:
self.xmpp.register_plugin('xep_{0:04d}'.format(xep))
except sleekxmpp.plugins.base.PluginNotFound:
except slixmpp.plugins.base.PluginNotFound:
self.logger.warning(
'Could not register plugin {}'.format(
'xep_{0:04d}'.format(xep)))
@ -141,6 +147,11 @@ class SleekXmppAdapter(object):
'no local CA certificate file')
return False
# If the user specified a port, skip SRV resolving, otherwise it is a
# lot easier to let slixmpp handle DNS instead of the user.
self.override_connection = \
None if not self.port else (self.host, self.port)
# We're good
return True
@ -150,32 +161,14 @@ class SleekXmppAdapter(object):
"""
# Establish connection to XMPP server.
# To speed up sending messages, don't use the "reattempt" feature,
# it will add a nasty delay even before connecting to XMPP server.
if not self.xmpp.connect((self.host, self.port),
use_ssl=self.secure, reattempt=False):
default_port = self.default_secure_port \
if self.secure else self.default_unsecure_port
default_schema = self.secure_protocol \
if self.secure else self.protocol
# Log connection issue
self.logger.warning(
'Failed to authenticate {jid} with: {schema}://{host}{port}'
.format(
jid=self.jid,
schema=default_schema,
host=self.host,
port='' if not self.port or self.port == default_port
else ':{}'.format(self.port),
))
# Instruct slixmpp to connect to the XMPP service.
if not self.xmpp.connect(
self.override_connection, use_ssl=self.secure):
return False
# Process XMPP communication.
self.xmpp.process(block=True)
# Run the asyncio event loop, and return once disconnected,
# for any reason.
self.xmpp.process(forever=False)
return self.success
@ -198,7 +191,9 @@ class SleekXmppAdapter(object):
self.before_message()
# The message we wish to send, and the JID that will receive it.
self.xmpp.send_message(mto=target, mbody=self.body, mtype='chat')
self.xmpp.send_message(
mto=target, msubject=self.subject,
mbody=self.body, mtype='chat')
# Using wait=True ensures that the send queue will be
# emptied before ending the session.

View file

@ -30,7 +30,7 @@ from ...URLBase import PrivacyMode
from ...common import NotifyType
from ...utils import parse_list
from ...AppriseLocale import gettext_lazy as _
from .SleekXmppAdapter import SleekXmppAdapter
from .SliXmppAdapter import SliXmppAdapter
# xep string parser
XEP_PARSE_RE = re.compile('^[^1-9]*(?P<xep>[1-9][0-9]{0,3})$')
@ -40,10 +40,22 @@ class NotifyXMPP(NotifyBase):
"""
A wrapper for XMPP Notifications
"""
# Set our global enabled flag
enabled = SliXmppAdapter._enabled
requirements = {
# Define our required packaging in order to work
'packages_required': [
"slixmpp; python_version >= '3.7'",
]
}
# The default descriptive name associated with the Notification
service_name = 'XMPP'
# The services URL
service_url = 'https://xmpp.org/'
# The default protocol
protocol = 'xmpp'
@ -56,34 +68,13 @@ class NotifyXMPP(NotifyBase):
# Lower throttle rate for XMPP
request_rate_per_sec = 0.5
# The default XMPP port
default_unsecure_port = 5222
# The default XMPP secure port
default_secure_port = 5223
# XMPP does not support a title
title_maxlen = 0
# This entry is a bit hacky, but it allows us to unit-test this library
# in an environment that simply doesn't have the sleekxmpp package
# available to us.
#
# If anyone is seeing this had knows a better way of testing this
# outside of what is defined in test/test_xmpp_plugin.py, please
# let me know! :)
_enabled = SleekXmppAdapter._enabled
# Our XMPP Adapter we use to communicate through
_adapter = SliXmppAdapter if SliXmppAdapter._enabled else None
# Define object templates
templates = (
'{schema}://{host}',
'{schema}://{password}@{host}',
'{schema}://{password}@{host}:{port}',
'{schema}://{user}:{password}@{host}',
'{schema}://{user}:{password}@{host}:{port}',
'{schema}://{host}/{targets}',
'{schema}://{password}@{host}/{targets}',
'{schema}://{password}@{host}:{port}/{targets}',
'{schema}://{user}:{password}@{host}/{targets}',
'{schema}://{user}:{password}@{host}:{port}/{targets}',
)
@ -104,6 +95,7 @@ class NotifyXMPP(NotifyBase):
'user': {
'name': _('Username'),
'type': 'string',
'required': True,
},
'password': {
'name': _('Password'),
@ -214,6 +206,7 @@ class NotifyXMPP(NotifyBase):
# By default we send ourselves a message
if targets:
self.targets = parse_list(targets)
self.targets[0] = self.targets[0][1:]
else:
self.targets = list()
@ -223,40 +216,20 @@ class NotifyXMPP(NotifyBase):
Perform XMPP Notification
"""
if not self._enabled:
self.logger.warning(
'XMPP Notifications are not supported by this system '
'- install sleekxmpp.')
return False
# Detect our JID if it isn't otherwise specified
jid = self.jid
password = self.password
if not jid:
if self.user and self.password:
# xmpp://user:password@hostname
jid = '{}@{}'.format(self.user, self.host)
else:
# xmpp://password@hostname
jid = self.host
password = self.password if self.password else self.user
# Compute port number
if not self.port:
port = self.default_secure_port \
if self.secure else self.default_unsecure_port
else:
port = self.port
jid = '{}@{}'.format(self.user, self.host)
try:
# Communicate with XMPP.
xmpp_adapter = SleekXmppAdapter(
host=self.host, port=port, secure=self.secure,
xmpp_adapter = self._adapter(
host=self.host, port=self.port, secure=self.secure,
verify_certificate=self.verify_certificate, xep=self.xep,
jid=jid, password=password, body=body, targets=self.targets,
before_message=self.throttle, logger=self.logger)
jid=jid, password=password, body=body, subject=title,
targets=self.targets, before_message=self.throttle,
logger=self.logger)
except ValueError:
# We failed
@ -287,28 +260,19 @@ class NotifyXMPP(NotifyBase):
# and/or space as a delimiters - %20 = space
jids = '%20'.join([NotifyXMPP.quote(x, safe='') for x in self.targets])
default_port = self.default_secure_port \
if self.secure else self.default_unsecure_port
default_schema = self.secure_protocol if self.secure else self.protocol
if self.user and self.password:
auth = '{user}:{password}'.format(
user=NotifyXMPP.quote(self.user, safe=''),
password=self.pprint(
self.password, privacy, mode=PrivacyMode.Secret, safe=''))
else:
auth = self.pprint(
self.password if self.password else self.user, privacy,
mode=PrivacyMode.Secret, safe='')
auth = '{user}:{password}'.format(
user=NotifyXMPP.quote(self.user, safe=''),
password=self.pprint(
self.password, privacy, mode=PrivacyMode.Secret, safe=''))
return '{schema}://{auth}@{hostname}{port}/{jids}?{params}'.format(
auth=auth,
schema=default_schema,
# never encode hostname since we're expecting it to be a valid one
hostname=self.host,
port='' if not self.port or self.port == default_port
port='' if not self.port
else ':{}'.format(self.port),
jids=jids,
params=NotifyXMPP.urlencode(params),

View file

@ -77,12 +77,12 @@ ZULIP_HTTP_ERROR_MAP = {
401: 'Unauthorized - Invalid Token.',
}
# Used to break path apart into list of channels
# Used to break path apart into list of streams
TARGET_LIST_DELIM = re.compile(r'[ \t\r\n,#\\/]+')
# Used to detect a channel
# Used to detect a streams
IS_VALID_TARGET_RE = re.compile(
r'#?(?P<channel>[A-Z0-9_]{1,32})', re.I)
r'#?(?P<stream>[A-Z0-9_]{1,32})', re.I)
class NotifyZulip(NotifyBase):
@ -142,8 +142,8 @@ class NotifyZulip(NotifyBase):
'type': 'string',
'map_to': 'targets',
},
'target_channel': {
'name': _('Target Channel'),
'target_stream': {
'name': _('Target Stream'),
'type': 'string',
'map_to': 'targets',
},
@ -164,8 +164,8 @@ class NotifyZulip(NotifyBase):
# if one isn't defined in the apprise url
default_hostname = 'zulipchat.com'
# The default channel to notify if no targets are specified
default_notification_channel = 'general'
# The default stream to notify if no targets are specified
default_notification_stream = 'general'
def __init__(self, botname, organization, token, targets=None, **kwargs):
"""
@ -218,8 +218,8 @@ class NotifyZulip(NotifyBase):
self.targets = parse_list(targets)
if len(self.targets) == 0:
# No channels identified, use default
self.targets.append(self.default_notification_channel)
# No streams identified, use default
self.targets.append(self.default_notification_stream)
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""

View file

@ -33,7 +33,7 @@ from os.path import abspath
# Used for testing
from . import NotifyEmail as NotifyEmailBase
from .NotifyXMPP import SleekXmppAdapter
from .NotifyXMPP import SliXmppAdapter
# NotifyBase object is passed in as a module not class
from . import NotifyBase
@ -43,6 +43,7 @@ from ..common import NOTIFY_IMAGE_SIZES
from ..common import NotifyType
from ..common import NOTIFY_TYPES
from ..utils import parse_list
from ..utils import cwe312_url
from ..utils import GET_SCHEMA_RE
from ..logger import logger
from ..AppriseLocale import gettext_lazy as _
@ -62,8 +63,8 @@ __all__ = [
# Tokenizer
'url_to_dict',
# sleekxmpp access points (used for NotifyXMPP Testing)
'SleekXmppAdapter',
# slixmpp access points (used for NotifyXMPP Testing)
'SliXmppAdapter',
]
# we mirror our base purely for the ability to reset everything; this
@ -438,7 +439,93 @@ def details(plugin):
}
def url_to_dict(url):
def requirements(plugin):
"""
Provides a list of packages and its requirement details
"""
requirements = {
# Use the description to provide a human interpretable description of
# what is required to make the plugin work. This is only nessisary
# if there are package dependencies
'details': '',
# Define any required packages needed for the plugin to run. This is
# an array of strings that simply look like lines in the
# `requirements.txt` file...
#
# A single string is perfectly acceptable:
# 'packages_required' = 'cryptography'
#
# Multiple entries should look like the following
# 'packages_required' = [
# 'cryptography < 3.4`,
# ]
#
'packages_required': [],
# Recommended packages identify packages that are not required to make
# your plugin work, but would improve it's use or grant it access to
# full functionality (that might otherwise be limited).
# Similar to `packages_required`, you would identify each entry in
# the array as you would in a `requirements.txt` file.
#
# - Do not re-provide entries already in the `packages_required`
'packages_recommended': [],
}
# Populate our template differently if we don't find anything above
if not (hasattr(plugin, 'requirements')
and isinstance(plugin.requirements, dict)):
# We're done early
return requirements
# Get our required packages
_req_packages = plugin.requirements.get('packages_required')
if isinstance(_req_packages, six.string_types):
# Convert to list
_req_packages = [_req_packages]
elif not isinstance(_req_packages, (set, list, tuple)):
# Allow one to set the required packages to None (as an example)
_req_packages = []
requirements['packages_required'] = [str(p) for p in _req_packages]
# Get our recommended packages
_opt_packages = plugin.requirements.get('packages_recommended')
if isinstance(_opt_packages, six.string_types):
# Convert to list
_opt_packages = [_opt_packages]
elif not isinstance(_opt_packages, (set, list, tuple)):
# Allow one to set the recommended packages to None (as an example)
_opt_packages = []
requirements['packages_recommended'] = [str(p) for p in _opt_packages]
# Get our package details
_req_details = plugin.requirements.get('details')
if not _req_details:
if not (_req_packages or _opt_packages):
_req_details = _('No dependencies.')
elif _req_packages:
_req_details = _('Packages are required to function.')
else: # opt_packages
_req_details = \
_('Packages are recommended to improve functionality.')
else:
# Store our details if defined
requirements['details'] = _req_details
# Return our compiled package requirements
return requirements
def url_to_dict(url, secure_logging=True):
"""
Takes an apprise URL and returns the tokens associated with it
if they can be acquired based on the plugins available.
@ -453,13 +540,16 @@ def url_to_dict(url):
# swap hash (#) tag values with their html version
_url = url.replace('/#', '/%23')
# CWE-312 (Secure Logging) Handling
loggable_url = url if not secure_logging else cwe312_url(url)
# Attempt to acquire the schema at the very least to allow our plugins to
# determine if they can make a better interpretation of a URL geared for
# them.
schema = GET_SCHEMA_RE.match(_url)
if schema is None:
# Not a valid URL; take an early exit
logger.error('Unsupported URL: {}'.format(url))
logger.error('Unsupported URL: {}'.format(loggable_url))
return None
# Ensure our schema is always in lower case
@ -476,7 +566,7 @@ def url_to_dict(url):
None)
if not results:
logger.error('Unparseable URL {}'.format(url))
logger.error('Unparseable URL {}'.format(loggable_url))
return None
logger.trace('URL {} unpacked as:{}{}'.format(
@ -489,7 +579,7 @@ def url_to_dict(url):
results = SCHEMA_MAP[schema].parse_url(_url)
if not results:
logger.error('Unparseable {} URL {}'.format(
SCHEMA_MAP[schema].service_name, url))
SCHEMA_MAP[schema].service_name, loggable_url))
return None
logger.trace('{} URL {} unpacked as:{}{}'.format(

0
libs/apprise/py.typed Normal file
View file

View file

@ -25,6 +25,7 @@
import sys
import asyncio
from functools import partial
from ..URLBase import URLBase
from ..logger import logger
@ -35,60 +36,61 @@ ASYNCIO_RUN_SUPPORT = \
(sys.version_info.major == 3 and sys.version_info.minor >= 7)
def notify(coroutines, debug=False):
# async reference produces a SyntaxError (E999) in Python v2.7
# For this reason we turn on the noqa flag
async def notify(coroutines): # noqa: E999
"""
A Wrapper to the AsyncNotifyBase.async_notify() calls allowing us
An async wrapper to the AsyncNotifyBase.async_notify() calls allowing us
to call gather() and collect the responses
"""
# Create log entry
logger.info(
'Notifying {} service(s) asynchronous.'.format(len(coroutines)))
'Notifying {} service(s) asynchronously.'.format(len(coroutines)))
if ASYNCIO_RUN_SUPPORT:
# async reference produces a SyntaxError (E999) in Python v2.7
# For this reason we turn on the noqa flag
async def main(results, coroutines): # noqa: E999
"""
Task: Notify all servers specified and return our result set
through a mutable object.
"""
# send our notifications and store our result set into
# our results dictionary
results['response'] = \
await asyncio.gather(*coroutines, return_exceptions=True)
# Initialize a mutable object we can populate with our notification
# responses
results = {}
# Send our notifications
asyncio.run(main(results, coroutines), debug=debug)
# Acquire our return status
status = next((s for s in results['response'] if s is False), True)
else:
#
# The depricated way
#
# acquire access to our event loop
loop = asyncio.get_event_loop()
if debug:
# Enable debug mode
loop.set_debug(1)
# Send our notifications and acquire our status
results = loop.run_until_complete(asyncio.gather(*coroutines))
# Acquire our return status
status = next((r for r in results if r is False), True)
results = await asyncio.gather(*coroutines, return_exceptions=True)
# Returns True if all notifications succeeded, otherwise False is
# returned.
return status
failed = any(not status or isinstance(status, Exception)
for status in results)
return not failed
def tosync(cor, debug=False):
"""
Await a coroutine from non-async code.
"""
if ASYNCIO_RUN_SUPPORT:
return asyncio.run(cor, debug=debug)
else:
# The Deprecated Way (<= Python v3.6)
try:
# acquire access to our event loop
loop = asyncio.get_event_loop()
except RuntimeError:
# This happens if we're inside a thread of another application
# where there is no running event_loop(). Pythong v3.7 and
# higher automatically take care of this case for us. But for
# the lower versions we need to do the following:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# Enable debug mode
loop.set_debug(debug)
return loop.run_until_complete(cor)
async def toasyncwrap(v): # noqa: E999
"""
Create a coroutine that, when run, returns the provided value.
"""
return v
class AsyncNotifyBase(URLBase):
@ -100,8 +102,12 @@ class AsyncNotifyBase(URLBase):
"""
Async Notification Wrapper
"""
loop = asyncio.get_event_loop()
try:
return self.notify(*args, **kwargs)
return await loop.run_in_executor(
None, partial(self.notify, *args, **kwargs))
except TypeError:
# These our our internally thrown notifications

View file

@ -25,6 +25,7 @@
import re
import six
import json
import contextlib
import os
from os.path import expanduser
@ -95,9 +96,10 @@ TIDY_NUX_TRIM_RE = re.compile(
# The handling of custom arguments passed in the URL; we treat any
# argument (which would otherwise appear in the qsd area of our parse_url()
# function differently if they start with a + or - value
# function differently if they start with a +, - or : value
NOTIFY_CUSTOM_ADD_TOKENS = re.compile(r'^( |\+)(?P<key>.*)\s*')
NOTIFY_CUSTOM_DEL_TOKENS = re.compile(r'^-(?P<key>.*)\s*')
NOTIFY_CUSTOM_COLON_TOKENS = re.compile(r'^:(?P<key>.*)\s*')
# Used for attempting to acquire the schema if the URL can't be parsed.
GET_SCHEMA_RE = re.compile(r'\s*(?P<schema>[a-z0-9]{2,9})://.*$', re.I)
@ -113,18 +115,23 @@ GET_SCHEMA_RE = re.compile(r'\s*(?P<schema>[a-z0-9]{2,9})://.*$', re.I)
# - user@example.com
# - label+user@example.com
GET_EMAIL_RE = re.compile(
r'((?P<name>[^:<]+)?[:<\s]+)?'
r'(([\s"\']+)?(?P<name>[^:<"\']+)?[:<\s"\']+)?'
r'(?P<full_email>((?P<label>[^+]+)\+)?'
r'(?P<email>(?P<userid>[a-z0-9$%=_~-]+'
r'(?:\.[a-z0-9$%+=_~-]+)'
r'*)@(?P<domain>('
r'(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+'
r'[a-z0-9](?:[a-z0-9-]*[a-z0-9]))|'
r'[a-z0-9][a-z0-9-]{5,})))'
r'(?:[a-z0-9](?:[a-z0-9_-]*[a-z0-9])?\.)+'
r'[a-z0-9](?:[a-z0-9_-]*[a-z0-9]))|'
r'[a-z0-9][a-z0-9_-]{5,})))'
r'\s*>?', re.IGNORECASE)
# Regular expression used to extract a phone number
GET_PHONE_NO_RE = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
# A simple verification check to make sure the content specified
# rougly conforms to a phone number before we parse it further
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
# Regular expression used to destinguish between multiple phone numbers
PHONE_NO_DETECTION_RE = re.compile(
r'\s*([+(\s]*[0-9][0-9()\s-]+[0-9])(?=$|[\s,+(]+[0-9])', re.I)
# Regular expression used to destinguish between multiple URLs
URL_DETECTION_RE = re.compile(
@ -136,11 +143,29 @@ EMAIL_DETECTION_RE = re.compile(
r'[^@\s,]+@[^\s,]+)',
re.IGNORECASE)
# Used to prepare our UUID regex matching
UUID4_RE = re.compile(
r'[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}',
re.IGNORECASE)
# validate_regex() utilizes this mapping to track and re-use pre-complied
# regular expressions
REGEX_VALIDATE_LOOKUP = {}
class TemplateType(object):
"""
Defines the different template types we can perform parsing on
"""
# RAW does nothing at all to the content being parsed
# data is taken at it's absolute value
RAW = 'raw'
# Data is presumed to be of type JSON and is therefore escaped
# if required to do so (such as single quotes)
JSON = 'json'
def is_ipaddr(addr, ipv4=True, ipv6=True):
"""
Validates against IPV4 and IPV6 IP Addresses
@ -191,7 +216,7 @@ def is_ipaddr(addr, ipv4=True, ipv6=True):
return False
def is_hostname(hostname, ipv4=True, ipv6=True):
def is_hostname(hostname, ipv4=True, ipv6=True, underscore=True):
"""
Validate hostname
"""
@ -200,7 +225,7 @@ def is_hostname(hostname, ipv4=True, ipv6=True):
if len(hostname) > 253 or len(hostname) == 0:
return False
# Strip trailling period on hostname (if one exists)
# Strip trailing period on hostname (if one exists)
if hostname[-1] == ".":
hostname = hostname[:-1]
@ -217,9 +242,14 @@ def is_hostname(hostname, ipv4=True, ipv6=True):
# - Hostnames can ony be comprised of alpha-numeric characters and the
# hyphen (-) character.
# - Hostnames can not start with the hyphen (-) character.
# - as a workaround for https://github.com/docker/compose/issues/229 to
# being able to address services in other stacks, we also allow
# underscores in hostnames (if flag is set accordingly)
# - labels can not exceed 63 characters
# - allow single character alpha characters
allowed = re.compile(
r'(?!-)[a-z0-9][a-z0-9-]{1,62}(?<!-)$',
r'^([a-z0-9][a-z0-9_-]{1,62}|[a-z_-])(?<![_-])$' if underscore else
r'^([a-z0-9][a-z0-9-]{1,62}|[a-z-])(?<!-)$',
re.IGNORECASE,
)
@ -229,6 +259,119 @@ def is_hostname(hostname, ipv4=True, ipv6=True):
return hostname
def is_uuid(uuid):
"""Determine if the specified entry is uuid v4 string
Args:
address (str): The string you want to check.
Returns:
bool: Returns False if the specified element is not a uuid otherwise
it returns True
"""
try:
match = UUID4_RE.match(uuid)
except TypeError:
# not parseable content
return False
return True if match else False
def is_phone_no(phone, min_len=11):
"""Determine if the specified entry is a phone number
Args:
phone (str): The string you want to check.
min_len (int): Defines the smallest expected length of the phone
before it's to be considered invalid. By default
the phone number can't be any larger then 14
Returns:
bool: Returns False if the address specified is not a phone number
and a dictionary of the parsed phone number if it is as:
{
'country': '1',
'area': '800',
'line': '1234567',
'full': '18001234567',
'pretty': '+1 800-123-4567',
}
Non conventional numbers such as 411 would look like provided that
`min_len` is set to at least a 3:
{
'country': '',
'area': '',
'line': '411',
'full': '411',
'pretty': '411',
}
"""
try:
if not IS_PHONE_NO.match(phone):
# not parseable content as it does not even conform closely to a
# phone number)
return False
except TypeError:
return False
# Tidy phone number up first
phone = re.sub(r'[^\d]+', '', phone)
if len(phone) > 14 or len(phone) < min_len:
# Invalid phone number
return False
# Full phone number without any markup is as is now
full = phone
# Break apart our phone number
line = phone[-7:]
phone = phone[:len(phone) - 7] if len(phone) > 7 else ''
# the area code (if present)
area = phone[-3:] if phone else ''
# The country code is the leftovers
country = phone[:len(phone) - 3] if len(phone) > 3 else ''
# Prepare a nicely (consistently) formatted phone number
pretty = ''
if country:
# The leftover is the country code
pretty += '+{} '.format(country)
if area:
pretty += '{}-'.format(area)
if len(line) >= 7:
pretty += '{}-{}'.format(line[:3], line[3:])
else:
pretty += line
return {
# The line code (last 7 digits)
'line': line,
# Area code
'area': area,
# The country code (if identified)
'country': country,
# A nicely formatted phone no
'pretty': pretty,
# All digits in-line
'full': full,
}
def is_email(address):
"""Determine if the specified entry is an email address
@ -236,8 +379,17 @@ def is_email(address):
address (str): The string you want to check.
Returns:
bool: Returns True if the address specified is an email address
and False if it isn't.
bool: Returns False if the address specified is not an email address
and a dictionary of the parsed email if it is as:
{
'name': 'Parse Name'
'email': 'user@domain.com'
'full_email': 'label+user@domain.com'
'label': 'label'
'user': 'user',
'domain': 'domain.com'
}
"""
try:
@ -318,10 +470,11 @@ def parse_qsd(qs):
'qsd': {},
# Detected Entries that start with + or - are additionally stored in
# these values (un-touched). The +/- however are stripped from their
# these values (un-touched). The :,+,- however are stripped from their
# name before they are stored here.
'qsd+': {},
'qsd-': {},
'qsd:': {},
}
pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
@ -361,6 +514,12 @@ def parse_qsd(qs):
# Store content 'as-is'
result['qsd-'][k.group('key')] = val
# Check for tokens that start with a colon symbol (:)
k = NOTIFY_CUSTOM_COLON_TOKENS.match(key)
if k is not None:
# Store content 'as-is'
result['qsd:'][k.group('key')] = val
return result
@ -418,11 +577,12 @@ def parse_url(url, default_schema='http', verify_host=True):
# qsd = Query String Dictionary
'qsd': {},
# Detected Entries that start with + or - are additionally stored in
# these values (un-touched). The +/- however are stripped from their
# name before they are stored here.
# Detected Entries that start with +, - or : are additionally stored in
# these values (un-touched). The +, -, and : however are stripped
# from their name before they are stored here.
'qsd+': {},
'qsd-': {},
'qsd:': {},
}
qsdata = ''
@ -534,10 +694,7 @@ def parse_url(url, default_schema='http', verify_host=True):
def parse_bool(arg, default=False):
"""
NZBGet uses 'yes' and 'no' as well as other strings such as 'on' or
'off' etch to handle boolean operations from it's control interface.
This method can just simplify checks to these variables.
Support string based boolean settings.
If the content could not be parsed, then the default is returned.
"""
@ -572,9 +729,46 @@ def parse_bool(arg, default=False):
return bool(arg)
def parse_phone_no(*args, **kwargs):
"""
Takes a string containing phone numbers separated by comma's and/or spaces
and returns a list.
"""
# for Python 2.7 support, store_unparsable is not in the url above
# as just parse_emails(*args, store_unparseable=True) since it is
# an invalid syntax. This is the workaround to be backards compatible:
store_unparseable = kwargs.get('store_unparseable', True)
result = []
for arg in args:
if isinstance(arg, six.string_types) and arg:
_result = PHONE_NO_DETECTION_RE.findall(arg)
if _result:
result += _result
elif not _result and store_unparseable:
# we had content passed into us that was lost because it was
# so poorly formatted that it didn't even come close to
# meeting the regular expression we defined. We intentially
# keep it as part of our result set so that parsing done
# at a higher level can at least report this to the end user
# and hopefully give them some indication as to what they
# may have done wrong.
result += \
[x for x in filter(bool, re.split(STRING_DELIMITERS, arg))]
elif isinstance(arg, (set, list, tuple)):
# Use recursion to handle the list of phone numbers
result += parse_phone_no(
*arg, store_unparseable=store_unparseable)
return result
def parse_emails(*args, **kwargs):
"""
Takes a string containing URLs separated by comma's and/or spaces and
Takes a string containing emails separated by comma's and/or spaces and
returns a list.
"""
@ -821,6 +1015,174 @@ def validate_regex(value, regex=r'[^\s]+', flags=re.I, strip=True, fmt=None):
return value.strip() if strip else value
def cwe312_word(word, force=False, advanced=True, threshold=5):
"""
This function was written to help mask secure/private information that may
or may not be found within Apprise. The idea is to provide a presentable
word response that the user who prepared it would understand, yet not
reveal any private information for any potential intruder
For more detail see CWE-312 @
https://cwe.mitre.org/data/definitions/312.html
The `force` is an optional argument used to keep the string formatting
consistent and in one place. If set, the content passed in is presumed
to be containing secret information and will be updated accordingly.
If advanced is set to `True` then content is additionally checked for
upper/lower/ascii/numerical variances. If an obscurity threshold is
reached, then content is considered secret
"""
class Variance(object):
"""
A Simple List of Possible Character Variances
"""
# An Upper Case Character (ABCDEF... etc)
ALPHA_UPPER = '+'
# An Lower Case Character (abcdef... etc)
ALPHA_LOWER = '-'
# A Special Character ($%^;... etc)
SPECIAL = 's'
# A Numerical Character (1234... etc)
NUMERIC = 'n'
if not (isinstance(word, six.string_types) and word.strip()):
# not a password if it's not something we even support
return word
# Formatting
word = word.strip()
if force:
# We're forcing the representation to be a secret
# We do this for consistency
return '{}...{}'.format(word[0:1], word[-1:])
elif len(word) > 1 and \
not is_hostname(word, ipv4=True, ipv6=True, underscore=False):
# Verify if it is a hostname or not
return '{}...{}'.format(word[0:1], word[-1:])
elif len(word) >= 16:
# an IP will be 15 characters so we don't want to use a smaller
# value then 16 (e.g 101.102.103.104)
# we can assume very long words are passwords otherwise
return '{}...{}'.format(word[0:1], word[-1:])
if advanced:
#
# Mark word a secret based on it's obscurity
#
# Our variances will increase depending on these variables:
last_variance = None
obscurity = 0
for c in word:
# Detect our variance
if c.isdigit():
variance = Variance.NUMERIC
elif c.isalpha() and c.isupper():
variance = Variance.ALPHA_UPPER
elif c.isalpha() and c.islower():
variance = Variance.ALPHA_LOWER
else:
variance = Variance.SPECIAL
if last_variance != variance or variance == Variance.SPECIAL:
obscurity += 1
if obscurity >= threshold:
return '{}...{}'.format(word[0:1], word[-1:])
last_variance = variance
# Otherwise we're good; return our word
return word
def cwe312_url(url):
"""
This function was written to help mask secure/private information that may
or may not be found on an Apprise URL. The idea is to not disrupt the
structure of the previous URL too much, yet still protect the users
private information from being logged directly to screen.
For more detail see CWE-312 @
https://cwe.mitre.org/data/definitions/312.html
For example, consider the URL: http://user:password@localhost/
When passed into this function, the return value would be:
http://user:****@localhost/
Since apprise allows you to put private information everywhere in it's
custom URLs, it uses this function to manipulate the content before
returning to any kind of logger.
The idea is that the URL can still be interpreted by the person who
constructed them, but not to an intruder.
"""
# Parse our URL
results = parse_url(url)
if not results:
# Nothing was returned (invalid data was fed in); return our
# information as it was fed to us (without changing it)
return url
# Update our URL with values
results['password'] = cwe312_word(results['password'], force=True)
if not results['schema'].startswith('http'):
results['user'] = cwe312_word(results['user'])
results['host'] = cwe312_word(results['host'])
else:
results['host'] = cwe312_word(results['host'], advanced=False)
results['user'] = cwe312_word(results['user'], advanced=False)
# Apply our full path scan in all cases
results['fullpath'] = '/' + \
'/'.join([cwe312_word(x)
for x in re.split(
r'[\\/]+',
results['fullpath'].lstrip('/'))]) \
if results['fullpath'] else ''
#
# Now re-assemble our URL for display purposes
#
# Determine Authentication
auth = ''
if results['user'] and results['password']:
auth = '{user}:{password}@'.format(
user=results['user'],
password=results['password'],
)
elif results['user']:
auth = '{user}@'.format(
user=results['user'],
)
params = ''
if results['qsd']:
params = '?{}'.format(
"&".join(["{}={}".format(k, cwe312_word(v, force=(
k in ('password', 'secret', 'pass', 'token', 'key',
'id', 'apikey', 'to'))))
for k, v in results['qsd'].items()]))
return '{schema}://{auth}{hostname}{port}{fullpath}{params}'.format(
schema=results['schema'],
auth=auth,
# never encode hostname since we're expecting it to be a valid one
hostname=results['host'],
port='' if not results['port'] else ':{}'.format(results['port']),
fullpath=results['fullpath'] if results['fullpath'] else '',
params=params,
)
@contextlib.contextmanager
def environ(*remove, **update):
"""
@ -845,3 +1207,45 @@ def environ(*remove, **update):
finally:
# Restore our snapshot
os.environ = env_orig.copy()
def apply_template(template, app_mode=TemplateType.RAW, **kwargs):
"""
Takes a template in a str format and applies all of the keywords
and their values to it.
The app$mode is used to dictact any pre-processing that needs to take place
to the escaped string prior to it being placed. The idea here is for
elements to be placed in a JSON response for example should be escaped
early in their string format.
The template must contain keywords wrapped in in double
squirly braces like {{keyword}}. These are matched to the respected
kwargs passed into this function.
If there is no match found, content is not swapped.
"""
def _escape_raw(content):
# No escaping necessary
return content
def _escape_json(content):
# remove surounding quotes
return json.dumps(content)[1:-1]
# Our escape function
fn = _escape_json if app_mode == TemplateType.JSON else _escape_raw
lookup = [re.escape(x) for x in kwargs.keys()]
# Compile this into a list
mask_r = re.compile(
re.escape('{{') + r'\s*(' + '|'.join(lookup) + r')\s*'
+ re.escape('}}'), re.IGNORECASE)
# we index 2 characters off the head and 2 characters from the tail
# to drop the '{{' and '}}' surrounding our match so that we can
# re-index it back into our list
return mask_r.sub(lambda x: fn(kwargs[x.group()[2:-2].strip()]), template)

View file

@ -1,4 +1,4 @@
apprise=0.8.8
apprise=0.9.6
apscheduler=3.8.0
babelfish=0.6.0
backports.functools-lru-cache=1.5