mirror of
https://github.com/morpheus65535/bazarr.git
synced 2025-04-24 14:47:16 -04:00
Merge branch 'development' into morpheus
This commit is contained in:
commit
7f1f46a506
90 changed files with 21254 additions and 3210 deletions
|
@ -39,7 +39,6 @@ if __name__ == '__main__':
|
|||
pass
|
||||
|
||||
def daemon():
|
||||
threading.Timer(1.0, daemon).start()
|
||||
if os.path.exists(stopfile):
|
||||
try:
|
||||
os.remove(stopfile)
|
||||
|
@ -57,12 +56,9 @@ if __name__ == '__main__':
|
|||
else:
|
||||
start_bazarr()
|
||||
|
||||
|
||||
daemon()
|
||||
|
||||
start_bazarr()
|
||||
|
||||
|
||||
# Keep the script running forever.
|
||||
while True:
|
||||
time.sleep(0.001)
|
||||
daemon()
|
||||
time.sleep(1)
|
||||
|
|
|
@ -10,7 +10,7 @@ from get_args import args
|
|||
from config import settings, bazarr_url
|
||||
from queueconfig import notifications
|
||||
|
||||
if not args.no_update:
|
||||
if not args.no_update and not args.release_update:
|
||||
import git
|
||||
|
||||
current_working_directory = os.path.dirname(os.path.dirname(__file__))
|
||||
|
|
|
@ -33,6 +33,7 @@ defaults = {
|
|||
'page_size': '25',
|
||||
'minimum_score_movie': '70',
|
||||
'use_embedded_subs': 'True',
|
||||
'utf8_encode': 'True',
|
||||
'ignore_pgs_subs': 'False',
|
||||
'adaptive_searching': 'False',
|
||||
'enabled_providers': '',
|
||||
|
|
|
@ -104,6 +104,11 @@ def download_subtitle(path, language, hi, forced, providers, providers_auth, sce
|
|||
# fixme: supply all missing languages, not only one, to hit providers only once who support multiple languages in
|
||||
# one query
|
||||
|
||||
if settings.general.getboolean('utf8_encode'):
|
||||
os.environ["SZ_KEEP_ENCODING"] = ""
|
||||
else:
|
||||
os.environ["SZ_KEEP_ENCODING"] = True
|
||||
|
||||
logging.debug('BAZARR Searching subtitles for this file: ' + path)
|
||||
if hi == "True":
|
||||
hi = "force HI"
|
||||
|
@ -356,7 +361,12 @@ def manual_search(path, language, hi, forced, providers, providers_auth, sceneNa
|
|||
|
||||
def manual_download_subtitle(path, language, hi, forced, subtitle, provider, providers_auth, sceneName, title, media_type):
|
||||
logging.debug('BAZARR Manually downloading subtitles for this file: ' + path)
|
||||
|
||||
|
||||
if settings.general.getboolean('utf8_encode'):
|
||||
os.environ["SZ_KEEP_ENCODING"] = ""
|
||||
else:
|
||||
os.environ["SZ_KEEP_ENCODING"] = True
|
||||
|
||||
subtitle = pickle.loads(codecs.decode(subtitle.encode(), "base64"))
|
||||
use_scenename = settings.general.getboolean('use_scenename')
|
||||
use_postprocessing = settings.general.getboolean('use_postprocessing')
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# coding=utf-8
|
||||
|
||||
bazarr_version = '0.7.5'
|
||||
bazarr_version = '0.7.5.1'
|
||||
|
||||
import gc
|
||||
import sys
|
||||
|
@ -79,6 +79,10 @@ os.environ["BAZARR_VERSION"] = bazarr_version
|
|||
|
||||
configure_logging(settings.general.getboolean('debug') or args.debug)
|
||||
|
||||
# Check and install update on startup when running on Windows from installer
|
||||
if args.release_update:
|
||||
check_and_apply_update()
|
||||
|
||||
if settings.proxy.type != 'None':
|
||||
if settings.proxy.username != '' and settings.proxy.password != '':
|
||||
proxy = settings.proxy.type + '://' + settings.proxy.username + ':' + settings.proxy.password + '@' + \
|
||||
|
@ -1250,6 +1254,11 @@ def save_settings():
|
|||
settings_general_embedded = 'False'
|
||||
else:
|
||||
settings_general_embedded = 'True'
|
||||
settings_general_utf8_encode = request.forms.get('settings_general_utf8_encode')
|
||||
if settings_general_utf8_encode is None:
|
||||
settings_general_utf8_encode = 'False'
|
||||
else:
|
||||
settings_general_utf8_encode = 'True'
|
||||
settings_general_ignore_pgs = request.forms.get('settings_general_ignore_pgs')
|
||||
if settings_general_ignore_pgs is None:
|
||||
settings_general_ignore_pgs = 'False'
|
||||
|
@ -1354,6 +1363,7 @@ def save_settings():
|
|||
|
||||
settings.general.minimum_score_movie = text_type(settings_general_minimum_score_movies)
|
||||
settings.general.use_embedded_subs = text_type(settings_general_embedded)
|
||||
settings.general.utf8_encode = text_type(settings_general_utf8_encode)
|
||||
settings.general.ignore_pgs_subs = text_type(settings_general_ignore_pgs)
|
||||
settings.general.adaptive_searching = text_type(settings_general_adaptive_searching)
|
||||
settings.general.multithreading = text_type(settings_general_multithreading)
|
||||
|
@ -1839,8 +1849,7 @@ def get_subtitle():
|
|||
@custom_auth_basic(check_credentials)
|
||||
def manual_search_json():
|
||||
authorize()
|
||||
ref = request.environ['HTTP_REFERER']
|
||||
|
||||
|
||||
episodePath = request.forms.get('episodePath')
|
||||
sceneName = request.forms.get('sceneName')
|
||||
language = request.forms.get('language')
|
||||
|
|
|
@ -1,92 +1,50 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Apprise Core
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with apprise. If not, see <http://www.gnu.org/licenses/>.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import logging
|
||||
import os
|
||||
import six
|
||||
from markdown import markdown
|
||||
|
||||
from itertools import chain
|
||||
from .common import NotifyType
|
||||
from .common import NotifyFormat
|
||||
from .utils import is_exclusive_match
|
||||
from .utils import parse_list
|
||||
from .utils import compat_is_basestring
|
||||
from .utils import split_urls
|
||||
from .utils import GET_SCHEMA_RE
|
||||
from .logger import logger
|
||||
|
||||
from .AppriseAsset import AppriseAsset
|
||||
from .AppriseConfig import AppriseConfig
|
||||
from .AppriseLocale import AppriseLocale
|
||||
from .config.ConfigBase import ConfigBase
|
||||
from .plugins.NotifyBase import NotifyBase
|
||||
|
||||
from . import NotifyBase
|
||||
from . import plugins
|
||||
from . import __version__
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Build a list of supported plugins
|
||||
SCHEMA_MAP = {}
|
||||
|
||||
# Used for attempting to acquire the schema if the URL can't be parsed.
|
||||
GET_SCHEMA_RE = re.compile(r'\s*(?P<schema>[a-z0-9]{3,9})://.*$', re.I)
|
||||
|
||||
|
||||
# Load our Lookup Matrix
|
||||
def __load_matrix():
|
||||
"""
|
||||
Dynamically load our schema map; this allows us to gracefully
|
||||
skip over plugins we simply don't have the dependecies for.
|
||||
|
||||
"""
|
||||
# to add it's mapping to our hash table
|
||||
for entry in dir(plugins):
|
||||
|
||||
# Get our plugin
|
||||
plugin = getattr(plugins, entry)
|
||||
if not hasattr(plugin, 'app_id'): # pragma: no branch
|
||||
# Filter out non-notification modules
|
||||
continue
|
||||
|
||||
# Load protocol(s) if defined
|
||||
proto = getattr(plugin, 'protocol', None)
|
||||
if compat_is_basestring(proto):
|
||||
if proto not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[proto] = plugin
|
||||
|
||||
elif isinstance(proto, (set, list, tuple)):
|
||||
# Support iterables list types
|
||||
for p in proto:
|
||||
if p not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[p] = plugin
|
||||
|
||||
# Load secure protocol(s) if defined
|
||||
protos = getattr(plugin, 'secure_protocol', None)
|
||||
if compat_is_basestring(protos):
|
||||
if protos not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[protos] = plugin
|
||||
|
||||
if isinstance(protos, (set, list, tuple)):
|
||||
# Support iterables list types
|
||||
for p in protos:
|
||||
if p not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[p] = plugin
|
||||
|
||||
|
||||
# Dynamically build our module
|
||||
__load_matrix()
|
||||
|
||||
|
||||
class Apprise(object):
|
||||
"""
|
||||
|
@ -110,63 +68,124 @@ class Apprise(object):
|
|||
# directory images can be found in. It can also identify remote
|
||||
# URL paths that contain the images you want to present to the end
|
||||
# user. If no asset is specified, then the default one is used.
|
||||
self.asset = asset
|
||||
if asset is None:
|
||||
# Load our default configuration
|
||||
self.asset = AppriseAsset()
|
||||
self.asset = \
|
||||
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
|
||||
if servers:
|
||||
self.add(servers)
|
||||
|
||||
# Initialize our locale object
|
||||
self.locale = AppriseLocale()
|
||||
|
||||
@staticmethod
|
||||
def instantiate(url, asset=None, suppress_exceptions=True):
|
||||
def instantiate(url, asset=None, tag=None, suppress_exceptions=True):
|
||||
"""
|
||||
Returns the instance of a instantiated plugin based on the provided
|
||||
Server URL. If the url fails to be parsed, then None is returned.
|
||||
|
||||
The specified url can be either a string (the URL itself) or a
|
||||
dictionary containing all of the components needed to istantiate
|
||||
the notification service. If identifying a dictionary, at the bare
|
||||
minimum, one must specify the schema.
|
||||
|
||||
An example of a url dictionary object might look like:
|
||||
{
|
||||
schema: 'mailto',
|
||||
host: 'google.com',
|
||||
user: 'myuser',
|
||||
password: 'mypassword',
|
||||
}
|
||||
|
||||
Alternatively the string is much easier to specify:
|
||||
mailto://user:mypassword@google.com
|
||||
|
||||
The dictionary works well for people who are calling details() to
|
||||
extract the components they need to build the URL manually.
|
||||
"""
|
||||
# swap hash (#) tag values with their html version
|
||||
# This is useful for accepting channels (as arguments to pushbullet)
|
||||
_url = url.replace('/#', '/%23')
|
||||
|
||||
# Attempt to acquire the schema at the very least to allow our plugins
|
||||
# to determine if they can make a better interpretation of a URL
|
||||
# geared for them anyway.
|
||||
schema = GET_SCHEMA_RE.match(_url)
|
||||
if schema is None:
|
||||
logger.error('%s is an unparseable server url.' % url)
|
||||
# Initialize our result set
|
||||
results = None
|
||||
|
||||
if isinstance(url, six.string_types):
|
||||
# swap hash (#) tag values with their html version
|
||||
_url = url.replace('/#', '/%23')
|
||||
|
||||
# Attempt to acquire the schema at the very least to allow our
|
||||
# plugins to determine if they can make a better interpretation of
|
||||
# a URL geared for them
|
||||
schema = GET_SCHEMA_RE.match(_url)
|
||||
if schema is None:
|
||||
logger.error(
|
||||
'Unparseable schema:// found in URL {}.'.format(url))
|
||||
return None
|
||||
|
||||
# Ensure our schema is always in lower case
|
||||
schema = schema.group('schema').lower()
|
||||
|
||||
# Some basic validation
|
||||
if schema not in plugins.SCHEMA_MAP:
|
||||
# Give the user the benefit of the doubt that the user may be
|
||||
# using one of the URLs provided to them by their notification
|
||||
# service. Before we fail for good, just scan all the plugins
|
||||
# that support he native_url() parse function
|
||||
results = \
|
||||
next((r['plugin'].parse_native_url(_url)
|
||||
for r in plugins.MODULE_MAP.values()
|
||||
if r['plugin'].parse_native_url(_url) is not None),
|
||||
None)
|
||||
|
||||
else:
|
||||
# Parse our url details of the server object as dictionary
|
||||
# containing all of the information parsed from our URL
|
||||
results = plugins.SCHEMA_MAP[schema].parse_url(_url)
|
||||
|
||||
if results is None:
|
||||
# Failed to parse the server URL
|
||||
logger.error('Unparseable URL {}.'.format(url))
|
||||
return None
|
||||
|
||||
logger.trace('URL {} unpacked as:{}{}'.format(
|
||||
url, os.linesep, os.linesep.join(
|
||||
['{}="{}"'.format(k, v) for k, v in results.items()])))
|
||||
|
||||
elif isinstance(url, dict):
|
||||
# We already have our result set
|
||||
results = url
|
||||
|
||||
if results.get('schema') not in plugins.SCHEMA_MAP:
|
||||
# schema is a mandatory dictionary item as it is the only way
|
||||
# we can index into our loaded plugins
|
||||
logger.error('Dictionary does not include a "schema" entry.')
|
||||
logger.trace('Invalid dictionary unpacked as:{}{}'.format(
|
||||
os.linesep, os.linesep.join(
|
||||
['{}="{}"'.format(k, v) for k, v in results.items()])))
|
||||
return None
|
||||
|
||||
logger.trace('Dictionary unpacked as:{}{}'.format(
|
||||
os.linesep, os.linesep.join(
|
||||
['{}="{}"'.format(k, v) for k, v in results.items()])))
|
||||
|
||||
else:
|
||||
logger.error('Invalid URL specified: {}'.format(url))
|
||||
return None
|
||||
|
||||
# Update the schema
|
||||
schema = schema.group('schema').lower()
|
||||
# Build a list of tags to associate with the newly added notifications
|
||||
results['tag'] = set(parse_list(tag))
|
||||
|
||||
# Some basic validation
|
||||
if schema not in SCHEMA_MAP:
|
||||
logger.error(
|
||||
'{0} is not a supported server type (url={1}).'.format(
|
||||
schema,
|
||||
_url,
|
||||
)
|
||||
)
|
||||
return None
|
||||
|
||||
# Parse our url details
|
||||
# the server object is a dictionary containing all of the information
|
||||
# parsed from our URL
|
||||
results = SCHEMA_MAP[schema].parse_url(_url)
|
||||
|
||||
if not results:
|
||||
# Failed to parse the server URL
|
||||
logger.error('Could not parse URL: %s' % url)
|
||||
return None
|
||||
# Prepare our Asset Object
|
||||
results['asset'] = \
|
||||
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
|
||||
if suppress_exceptions:
|
||||
try:
|
||||
# Attempt to create an instance of our plugin using the parsed
|
||||
# URL information
|
||||
plugin = SCHEMA_MAP[results['schema']](**results)
|
||||
plugin = plugins.SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
except:
|
||||
# Create log entry of loaded URL
|
||||
logger.debug('Loaded URL: {}'.format(plugin.url()))
|
||||
|
||||
except Exception:
|
||||
# the arguments are invalid or can not be used.
|
||||
logger.error('Could not load URL: %s' % url)
|
||||
return None
|
||||
|
@ -174,43 +193,71 @@ class Apprise(object):
|
|||
else:
|
||||
# Attempt to create an instance of our plugin using the parsed
|
||||
# URL information but don't wrap it in a try catch
|
||||
plugin = SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
# Save our asset
|
||||
if asset:
|
||||
plugin.asset = asset
|
||||
plugin = plugins.SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
return plugin
|
||||
|
||||
def add(self, servers, asset=None):
|
||||
def add(self, servers, asset=None, tag=None):
|
||||
"""
|
||||
Adds one or more server URLs into our list.
|
||||
|
||||
You can override the global asset if you wish by including it with the
|
||||
server(s) that you add.
|
||||
|
||||
The tag allows you to associate 1 or more tag values to the server(s)
|
||||
being added. tagging a service allows you to exclusively access them
|
||||
when calling the notify() function.
|
||||
"""
|
||||
|
||||
# Initialize our return status
|
||||
return_status = True
|
||||
|
||||
if asset is None:
|
||||
if isinstance(asset, AppriseAsset):
|
||||
# prepare default asset
|
||||
asset = self.asset
|
||||
|
||||
if isinstance(servers, NotifyBase):
|
||||
if isinstance(servers, six.string_types):
|
||||
# build our server list
|
||||
servers = split_urls(servers)
|
||||
if len(servers) == 0:
|
||||
return False
|
||||
|
||||
elif isinstance(servers, dict):
|
||||
# no problem, we support kwargs, convert it to a list
|
||||
servers = [servers]
|
||||
|
||||
elif isinstance(servers, (ConfigBase, NotifyBase, AppriseConfig)):
|
||||
# Go ahead and just add our plugin into our list
|
||||
self.servers.append(servers)
|
||||
return True
|
||||
|
||||
servers = parse_list(servers)
|
||||
elif not isinstance(servers, (tuple, set, list)):
|
||||
logger.error(
|
||||
"An invalid notification (type={}) was specified.".format(
|
||||
type(servers)))
|
||||
return False
|
||||
|
||||
for _server in servers:
|
||||
|
||||
if isinstance(_server, (ConfigBase, NotifyBase, AppriseConfig)):
|
||||
# Go ahead and just add our plugin into our list
|
||||
self.servers.append(_server)
|
||||
continue
|
||||
|
||||
elif not isinstance(_server, (six.string_types, dict)):
|
||||
logger.error(
|
||||
"An invalid notification (type={}) was specified.".format(
|
||||
type(_server)))
|
||||
return_status = False
|
||||
continue
|
||||
|
||||
# Instantiate ourselves an object, this function throws or
|
||||
# returns None if it fails
|
||||
instance = Apprise.instantiate(_server, asset=asset)
|
||||
if not instance:
|
||||
instance = Apprise.instantiate(_server, asset=asset, tag=tag)
|
||||
if not isinstance(instance, NotifyBase):
|
||||
# No logging is requird as instantiate() handles failure
|
||||
# and/or success reasons for us
|
||||
return_status = False
|
||||
logging.error(
|
||||
"Failed to load notification url: {}".format(_server),
|
||||
)
|
||||
continue
|
||||
|
||||
# Add our initialized plugin to our server listings
|
||||
|
@ -226,18 +273,23 @@ class Apprise(object):
|
|||
"""
|
||||
self.servers[:] = []
|
||||
|
||||
def notify(self, title, body, notify_type=NotifyType.INFO,
|
||||
body_format=None):
|
||||
def notify(self, body, title='', notify_type=NotifyType.INFO,
|
||||
body_format=None, tag=None):
|
||||
"""
|
||||
Send a notification to all of the plugins previously loaded.
|
||||
|
||||
If the body_format specified is NotifyFormat.MARKDOWN, it will
|
||||
be converted to HTML if the Notification type expects this.
|
||||
|
||||
if the tag is specified (either a string or a set/list/tuple
|
||||
of strings), then only the notifications flagged with that
|
||||
tagged value are notified. By default all added services
|
||||
are notified (tag=None)
|
||||
|
||||
"""
|
||||
|
||||
# Initialize our return result
|
||||
status = len(self.servers) > 0
|
||||
status = len(self) > 0
|
||||
|
||||
if not (title or body):
|
||||
return False
|
||||
|
@ -245,43 +297,104 @@ class Apprise(object):
|
|||
# Tracks conversions
|
||||
conversion_map = dict()
|
||||
|
||||
# Build our tag setup
|
||||
# - top level entries are treated as an 'or'
|
||||
# - second level (or more) entries are treated as 'and'
|
||||
#
|
||||
# examples:
|
||||
# tag="tagA, tagB" = tagA or tagB
|
||||
# tag=['tagA', 'tagB'] = tagA or tagB
|
||||
# tag=[('tagA', 'tagC'), 'tagB'] = (tagA and tagC) or tagB
|
||||
# tag=[('tagB', 'tagC')] = tagB and tagC
|
||||
|
||||
# Iterate over our loaded plugins
|
||||
for server in self.servers:
|
||||
if server.notify_format not in conversion_map:
|
||||
if body_format == NotifyFormat.MARKDOWN and \
|
||||
server.notify_format == NotifyFormat.HTML:
|
||||
for entry in self.servers:
|
||||
|
||||
# Apply Markdown
|
||||
conversion_map[server.notify_format] = markdown(body)
|
||||
if isinstance(entry, (ConfigBase, AppriseConfig)):
|
||||
# load our servers
|
||||
servers = entry.servers()
|
||||
|
||||
else:
|
||||
# Store entry directly
|
||||
conversion_map[server.notify_format] = body
|
||||
else:
|
||||
servers = [entry, ]
|
||||
|
||||
try:
|
||||
# Send notification
|
||||
if not server.notify(
|
||||
title=title,
|
||||
body=conversion_map[server.notify_format],
|
||||
notify_type=notify_type):
|
||||
for server in servers:
|
||||
# Apply our tag matching based on our defined logic
|
||||
if tag is not None and not is_exclusive_match(
|
||||
logic=tag, data=server.tags):
|
||||
continue
|
||||
|
||||
# Toggle our return status flag
|
||||
# If our code reaches here, we either did not define a tag (it
|
||||
# was set to None), or we did define a tag and the logic above
|
||||
# determined we need to notify the service it's associated with
|
||||
if server.notify_format not in conversion_map:
|
||||
if body_format == NotifyFormat.MARKDOWN and \
|
||||
server.notify_format == NotifyFormat.HTML:
|
||||
|
||||
# Apply Markdown
|
||||
conversion_map[server.notify_format] = markdown(body)
|
||||
|
||||
elif body_format == NotifyFormat.TEXT and \
|
||||
server.notify_format == NotifyFormat.HTML:
|
||||
|
||||
# Basic TEXT to HTML format map; supports keys only
|
||||
re_map = {
|
||||
# Support Ampersand
|
||||
r'&': '&',
|
||||
|
||||
# Spaces to for formatting purposes since
|
||||
# multiple spaces are treated as one an this may
|
||||
# not be the callers intention
|
||||
r' ': ' ',
|
||||
|
||||
# Tab support
|
||||
r'\t': ' ',
|
||||
|
||||
# Greater than and Less than Characters
|
||||
r'>': '>',
|
||||
r'<': '<',
|
||||
}
|
||||
|
||||
# Compile our map
|
||||
re_table = re.compile(
|
||||
r'(' + '|'.join(
|
||||
map(re.escape, re_map.keys())) + r')',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
# Execute our map against our body in addition to
|
||||
# swapping out new lines and replacing them with <br/>
|
||||
conversion_map[server.notify_format] = \
|
||||
re.sub(r'\r*\n', '<br/>\r\n',
|
||||
re_table.sub(
|
||||
lambda x: re_map[x.group()], body))
|
||||
|
||||
else:
|
||||
# Store entry directly
|
||||
conversion_map[server.notify_format] = body
|
||||
|
||||
try:
|
||||
# Send notification
|
||||
if not server.notify(
|
||||
body=conversion_map[server.notify_format],
|
||||
title=title,
|
||||
notify_type=notify_type):
|
||||
|
||||
# Toggle our return status flag
|
||||
status = False
|
||||
|
||||
except TypeError:
|
||||
# These our our internally thrown notifications
|
||||
status = False
|
||||
|
||||
except TypeError:
|
||||
# These our our internally thrown notifications
|
||||
# TODO: Change this to a custom one such as AppriseNotifyError
|
||||
status = False
|
||||
|
||||
except Exception:
|
||||
# A catch all so we don't have to abort early
|
||||
# just because one of our plugins has a bug in it.
|
||||
logging.exception("Notification Exception")
|
||||
status = False
|
||||
except Exception:
|
||||
# A catch all so we don't have to abort early
|
||||
# just because one of our plugins has a bug in it.
|
||||
logger.exception("Notification Exception")
|
||||
status = False
|
||||
|
||||
return status
|
||||
|
||||
def details(self):
|
||||
def details(self, lang=None):
|
||||
"""
|
||||
Returns the details associated with the Apprise object
|
||||
|
||||
|
@ -298,24 +411,26 @@ class Apprise(object):
|
|||
}
|
||||
|
||||
# to add it's mapping to our hash table
|
||||
for entry in sorted(dir(plugins)):
|
||||
|
||||
# Get our plugin
|
||||
plugin = getattr(plugins, entry)
|
||||
if not hasattr(plugin, 'app_id'): # pragma: no branch
|
||||
# Filter out non-notification modules
|
||||
continue
|
||||
for plugin in set(plugins.SCHEMA_MAP.values()):
|
||||
|
||||
# Standard protocol(s) should be None or a tuple
|
||||
protocols = getattr(plugin, 'protocol', None)
|
||||
if compat_is_basestring(protocols):
|
||||
if isinstance(protocols, six.string_types):
|
||||
protocols = (protocols, )
|
||||
|
||||
# Secure protocol(s) should be None or a tuple
|
||||
secure_protocols = getattr(plugin, 'secure_protocol', None)
|
||||
if compat_is_basestring(secure_protocols):
|
||||
if isinstance(secure_protocols, six.string_types):
|
||||
secure_protocols = (secure_protocols, )
|
||||
|
||||
if not lang:
|
||||
# Simply return our results
|
||||
details = plugins.details(plugin)
|
||||
else:
|
||||
# Emulate the specified language when returning our results
|
||||
with self.locale.lang_at(lang):
|
||||
details = plugins.details(plugin)
|
||||
|
||||
# Build our response object
|
||||
response['schemas'].append({
|
||||
'service_name': getattr(plugin, 'service_name', None),
|
||||
|
@ -323,12 +438,100 @@ class Apprise(object):
|
|||
'setup_url': getattr(plugin, 'setup_url', None),
|
||||
'protocols': protocols,
|
||||
'secure_protocols': secure_protocols,
|
||||
'details': details,
|
||||
})
|
||||
|
||||
return response
|
||||
|
||||
def urls(self):
|
||||
"""
|
||||
Returns all of the loaded URLs defined in this apprise object.
|
||||
"""
|
||||
return [x.url() for x in self.servers]
|
||||
|
||||
def pop(self, index):
|
||||
"""
|
||||
Removes an indexed Notification Service from the stack and returns it.
|
||||
|
||||
The thing is we can never pop AppriseConfig() entries, only what was
|
||||
loaded within them. So pop needs to carefully iterate over our list
|
||||
and only track actual entries.
|
||||
"""
|
||||
|
||||
# Tracking variables
|
||||
prev_offset = -1
|
||||
offset = prev_offset
|
||||
|
||||
for idx, s in enumerate(self.servers):
|
||||
if isinstance(s, (ConfigBase, AppriseConfig)):
|
||||
servers = s.servers()
|
||||
if len(servers) > 0:
|
||||
# Acquire a new maximum offset to work with
|
||||
offset = prev_offset + len(servers)
|
||||
|
||||
if offset >= index:
|
||||
# we can pop an element from our config stack
|
||||
fn = s.pop if isinstance(s, ConfigBase) \
|
||||
else s.server_pop
|
||||
|
||||
return fn(index if prev_offset == -1
|
||||
else (index - prev_offset - 1))
|
||||
|
||||
else:
|
||||
offset = prev_offset + 1
|
||||
if offset == index:
|
||||
return self.servers.pop(idx)
|
||||
|
||||
# Update our old offset
|
||||
prev_offset = offset
|
||||
|
||||
# If we reach here, then we indexed out of range
|
||||
raise IndexError('list index out of range')
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Returns the indexed server entry of a loaded notification server
|
||||
"""
|
||||
# Tracking variables
|
||||
prev_offset = -1
|
||||
offset = prev_offset
|
||||
|
||||
for idx, s in enumerate(self.servers):
|
||||
if isinstance(s, (ConfigBase, AppriseConfig)):
|
||||
# Get our list of servers associate with our config object
|
||||
servers = s.servers()
|
||||
if len(servers) > 0:
|
||||
# Acquire a new maximum offset to work with
|
||||
offset = prev_offset + len(servers)
|
||||
|
||||
if offset >= index:
|
||||
return servers[index if prev_offset == -1
|
||||
else (index - prev_offset - 1)]
|
||||
|
||||
else:
|
||||
offset = prev_offset + 1
|
||||
if offset == index:
|
||||
return self.servers[idx]
|
||||
|
||||
# Update our old offset
|
||||
prev_offset = offset
|
||||
|
||||
# If we reach here, then we indexed out of range
|
||||
raise IndexError('list index out of range')
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Returns an iterator to each of our servers loaded. This includes those
|
||||
found inside configuration.
|
||||
"""
|
||||
return chain(*[[s] if not isinstance(s, (ConfigBase, AppriseConfig))
|
||||
else iter(s.servers()) for s in self.servers])
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Returns the number of servers loaded
|
||||
Returns the number of servers loaded; this includes those found within
|
||||
loaded configuration. This funtion nnever actually counts the
|
||||
Config entry themselves (if they exist), only what they contain.
|
||||
"""
|
||||
return len(self.servers)
|
||||
return sum([1 if not isinstance(s, (ConfigBase, AppriseConfig))
|
||||
else len(s.servers()) for s in self.servers])
|
||||
|
|
|
@ -1,20 +1,27 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Apprise Asset
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
|
||||
|
@ -227,7 +234,7 @@ class AppriseAsset(object):
|
|||
'app_desc': self.app_desc,
|
||||
'default_extension': self.default_extension,
|
||||
'theme': self.theme,
|
||||
'image_path_mask': self.image_url_mask,
|
||||
'image_path_mask': self.image_path_mask,
|
||||
'image_url_mask': self.image_url_mask,
|
||||
'image_url_logo': self.image_url_logo,
|
||||
}
|
||||
|
|
289
libs/apprise/AppriseConfig.py
Normal file
289
libs/apprise/AppriseConfig.py
Normal file
|
@ -0,0 +1,289 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import six
|
||||
|
||||
from . import config
|
||||
from . import ConfigBase
|
||||
from . import URLBase
|
||||
from .AppriseAsset import AppriseAsset
|
||||
|
||||
from .utils import GET_SCHEMA_RE
|
||||
from .utils import parse_list
|
||||
from .utils import is_exclusive_match
|
||||
from .logger import logger
|
||||
|
||||
|
||||
class AppriseConfig(object):
|
||||
"""
|
||||
Our Apprise Configuration File Manager
|
||||
|
||||
- Supports a list of URLs defined one after another (text format)
|
||||
- Supports a destinct YAML configuration format
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, paths=None, asset=None, cache=True, **kwargs):
|
||||
"""
|
||||
Loads all of the paths specified (if any).
|
||||
|
||||
The path can either be a single string identifying one explicit
|
||||
location, otherwise you can pass in a series of locations to scan
|
||||
via a list.
|
||||
|
||||
If no path is specified then a default list is used.
|
||||
|
||||
If cache is set to True, then after the data is loaded, it's cached
|
||||
within this object so it isn't retrieved again later.
|
||||
"""
|
||||
|
||||
# Initialize a server list of URLs
|
||||
self.configs = list()
|
||||
|
||||
# Prepare our Asset Object
|
||||
self.asset = \
|
||||
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
|
||||
if paths is not None:
|
||||
# Store our path(s)
|
||||
self.add(paths)
|
||||
|
||||
return
|
||||
|
||||
def add(self, configs, asset=None, tag=None):
|
||||
"""
|
||||
Adds one or more config URLs into our list.
|
||||
|
||||
You can override the global asset if you wish by including it with the
|
||||
config(s) that you add.
|
||||
|
||||
"""
|
||||
|
||||
# Initialize our return status
|
||||
return_status = True
|
||||
|
||||
if isinstance(asset, AppriseAsset):
|
||||
# prepare default asset
|
||||
asset = self.asset
|
||||
|
||||
if isinstance(configs, ConfigBase):
|
||||
# Go ahead and just add our configuration into our list
|
||||
self.configs.append(configs)
|
||||
return True
|
||||
|
||||
elif isinstance(configs, six.string_types):
|
||||
# Save our path
|
||||
configs = (configs, )
|
||||
|
||||
elif not isinstance(configs, (tuple, set, list)):
|
||||
logger.error(
|
||||
'An invalid configuration path (type={}) was '
|
||||
'specified.'.format(type(configs)))
|
||||
return False
|
||||
|
||||
# Iterate over our
|
||||
for _config in configs:
|
||||
|
||||
if isinstance(_config, ConfigBase):
|
||||
# Go ahead and just add our configuration into our list
|
||||
self.configs.append(_config)
|
||||
continue
|
||||
|
||||
elif not isinstance(_config, six.string_types):
|
||||
logger.warning(
|
||||
"An invalid configuration (type={}) was specified.".format(
|
||||
type(_config)))
|
||||
return_status = False
|
||||
continue
|
||||
|
||||
logger.debug("Loading configuration: {}".format(_config))
|
||||
|
||||
# Instantiate ourselves an object, this function throws or
|
||||
# returns None if it fails
|
||||
instance = AppriseConfig.instantiate(_config, asset=asset, tag=tag)
|
||||
if not isinstance(instance, ConfigBase):
|
||||
return_status = False
|
||||
continue
|
||||
|
||||
# Add our initialized plugin to our server listings
|
||||
self.configs.append(instance)
|
||||
|
||||
# Return our status
|
||||
return return_status
|
||||
|
||||
def servers(self, tag=None, cache=True):
|
||||
"""
|
||||
Returns all of our servers dynamically build based on parsed
|
||||
configuration.
|
||||
|
||||
If a tag is specified, it applies to the configuration sources
|
||||
themselves and not the notification services inside them.
|
||||
|
||||
This is for filtering the configuration files polled for
|
||||
results.
|
||||
|
||||
"""
|
||||
# Build our tag setup
|
||||
# - top level entries are treated as an 'or'
|
||||
# - second level (or more) entries are treated as 'and'
|
||||
#
|
||||
# examples:
|
||||
# tag="tagA, tagB" = tagA or tagB
|
||||
# tag=['tagA', 'tagB'] = tagA or tagB
|
||||
# tag=[('tagA', 'tagC'), 'tagB'] = (tagA and tagC) or tagB
|
||||
# tag=[('tagB', 'tagC')] = tagB and tagC
|
||||
|
||||
response = list()
|
||||
|
||||
for entry in self.configs:
|
||||
|
||||
# Apply our tag matching based on our defined logic
|
||||
if tag is not None and not is_exclusive_match(
|
||||
logic=tag, data=entry.tags):
|
||||
continue
|
||||
|
||||
# Build ourselves a list of services dynamically and return the
|
||||
# as a list
|
||||
response.extend(entry.servers(cache=cache))
|
||||
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def instantiate(url, asset=None, tag=None, suppress_exceptions=True):
|
||||
"""
|
||||
Returns the instance of a instantiated configuration plugin based on
|
||||
the provided Server URL. If the url fails to be parsed, then None
|
||||
is returned.
|
||||
|
||||
"""
|
||||
# Attempt to acquire the schema at the very least to allow our
|
||||
# configuration based urls.
|
||||
schema = GET_SCHEMA_RE.match(url)
|
||||
if schema is None:
|
||||
# Plan B is to assume we're dealing with a file
|
||||
schema = config.ConfigFile.protocol
|
||||
url = '{}://{}'.format(schema, URLBase.quote(url))
|
||||
|
||||
else:
|
||||
# Ensure our schema is always in lower case
|
||||
schema = schema.group('schema').lower()
|
||||
|
||||
# Some basic validation
|
||||
if schema not in config.SCHEMA_MAP:
|
||||
logger.warning('Unsupported schema {}.'.format(schema))
|
||||
return None
|
||||
|
||||
# Parse our url details of the server object as dictionary containing
|
||||
# all of the information parsed from our URL
|
||||
results = config.SCHEMA_MAP[schema].parse_url(url)
|
||||
|
||||
if not results:
|
||||
# Failed to parse the server URL
|
||||
logger.warning('Unparseable URL {}.'.format(url))
|
||||
return None
|
||||
|
||||
# Build a list of tags to associate with the newly added notifications
|
||||
results['tag'] = set(parse_list(tag))
|
||||
|
||||
# Prepare our Asset Object
|
||||
results['asset'] = \
|
||||
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
|
||||
if suppress_exceptions:
|
||||
try:
|
||||
# Attempt to create an instance of our plugin using the parsed
|
||||
# URL information
|
||||
cfg_plugin = config.SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
except Exception:
|
||||
# the arguments are invalid or can not be used.
|
||||
logger.warning('Could not load URL: %s' % url)
|
||||
return None
|
||||
|
||||
else:
|
||||
# Attempt to create an instance of our plugin using the parsed
|
||||
# URL information but don't wrap it in a try catch
|
||||
cfg_plugin = config.SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
return cfg_plugin
|
||||
|
||||
def clear(self):
|
||||
"""
|
||||
Empties our configuration list
|
||||
|
||||
"""
|
||||
self.configs[:] = []
|
||||
|
||||
def server_pop(self, index):
|
||||
"""
|
||||
Removes an indexed Apprise Notification from the servers
|
||||
"""
|
||||
|
||||
# Tracking variables
|
||||
prev_offset = -1
|
||||
offset = prev_offset
|
||||
|
||||
for entry in self.configs:
|
||||
servers = entry.servers(cache=True)
|
||||
if len(servers) > 0:
|
||||
# Acquire a new maximum offset to work with
|
||||
offset = prev_offset + len(servers)
|
||||
|
||||
if offset >= index:
|
||||
# we can pop an notification from our config stack
|
||||
return entry.pop(index if prev_offset == -1
|
||||
else (index - prev_offset - 1))
|
||||
|
||||
# Update our old offset
|
||||
prev_offset = offset
|
||||
|
||||
# If we reach here, then we indexed out of range
|
||||
raise IndexError('list index out of range')
|
||||
|
||||
def pop(self, index):
|
||||
"""
|
||||
Removes an indexed Apprise Configuration from the stack and
|
||||
returns it.
|
||||
"""
|
||||
# Remove our entry
|
||||
return self.configs.pop(index)
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Returns the indexed config entry of a loaded apprise configuration
|
||||
"""
|
||||
return self.configs[index]
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Returns an iterator to our config list
|
||||
"""
|
||||
return iter(self.configs)
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Returns the number of config entries loaded
|
||||
"""
|
||||
return len(self.configs)
|
215
libs/apprise/AppriseLocale.py
Normal file
215
libs/apprise/AppriseLocale.py
Normal file
|
@ -0,0 +1,215 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import six
|
||||
import ctypes
|
||||
import locale
|
||||
import contextlib
|
||||
from os.path import join
|
||||
from os.path import dirname
|
||||
from os.path import abspath
|
||||
|
||||
# Define our translation domain
|
||||
DOMAIN = 'apprise'
|
||||
LOCALE_DIR = abspath(join(dirname(__file__), 'i18n'))
|
||||
|
||||
# This gets toggled to True if we succeed
|
||||
GETTEXT_LOADED = False
|
||||
|
||||
try:
|
||||
# Initialize gettext
|
||||
import gettext
|
||||
|
||||
# install() creates a _() in our builtins
|
||||
gettext.install(DOMAIN, localedir=LOCALE_DIR)
|
||||
|
||||
# Toggle our flag
|
||||
GETTEXT_LOADED = True
|
||||
|
||||
except ImportError:
|
||||
# gettext isn't available; no problem, just fall back to using
|
||||
# the library features without multi-language support.
|
||||
try:
|
||||
# Python v2.7
|
||||
import __builtin__
|
||||
__builtin__.__dict__['_'] = lambda x: x # pragma: no branch
|
||||
|
||||
except ImportError:
|
||||
# Python v3.4+
|
||||
import builtins
|
||||
builtins.__dict__['_'] = lambda x: x # pragma: no branch
|
||||
|
||||
|
||||
class LazyTranslation(object):
|
||||
"""
|
||||
Doesn't translate anything until str() or unicode() references
|
||||
are made.
|
||||
|
||||
"""
|
||||
def __init__(self, text, *args, **kwargs):
|
||||
"""
|
||||
Store our text
|
||||
"""
|
||||
self.text = text
|
||||
|
||||
super(LazyTranslation, self).__init__(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return gettext.gettext(self.text)
|
||||
|
||||
|
||||
# Lazy translation handling
|
||||
def gettext_lazy(text):
|
||||
"""
|
||||
A dummy function that can be referenced
|
||||
"""
|
||||
return LazyTranslation(text=text)
|
||||
|
||||
|
||||
class AppriseLocale(object):
|
||||
"""
|
||||
A wrapper class to gettext so that we can manipulate multiple lanaguages
|
||||
on the fly if required.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, language=None):
|
||||
"""
|
||||
Initializes our object, if a language is specified, then we
|
||||
initialize ourselves to that, otherwise we use whatever we detect
|
||||
from the local operating system. If all else fails, we resort to the
|
||||
defined default_language.
|
||||
|
||||
"""
|
||||
|
||||
# Cache previously loaded translations
|
||||
self._gtobjs = {}
|
||||
|
||||
# Get our language
|
||||
self.lang = AppriseLocale.detect_language(language)
|
||||
|
||||
if GETTEXT_LOADED is False:
|
||||
# We're done
|
||||
return
|
||||
|
||||
if self.lang:
|
||||
# Load our gettext object and install our language
|
||||
try:
|
||||
self._gtobjs[self.lang] = gettext.translation(
|
||||
DOMAIN, localedir=LOCALE_DIR, languages=[self.lang])
|
||||
|
||||
# Install our language
|
||||
self._gtobjs[self.lang].install()
|
||||
|
||||
except IOError:
|
||||
# This occurs if we can't access/load our translations
|
||||
pass
|
||||
|
||||
@contextlib.contextmanager
|
||||
def lang_at(self, lang):
|
||||
"""
|
||||
The syntax works as:
|
||||
with at.lang_at('fr'):
|
||||
# apprise works as though the french language has been
|
||||
# defined. afterwards, the language falls back to whatever
|
||||
# it was.
|
||||
"""
|
||||
|
||||
if GETTEXT_LOADED is False:
|
||||
# yield
|
||||
yield
|
||||
|
||||
# we're done
|
||||
return
|
||||
|
||||
# Tidy the language
|
||||
lang = AppriseLocale.detect_language(lang, detect_fallback=False)
|
||||
|
||||
# Now attempt to load it
|
||||
try:
|
||||
if lang in self._gtobjs:
|
||||
if lang != self.lang:
|
||||
# Install our language only if we aren't using it
|
||||
# already
|
||||
self._gtobjs[lang].install()
|
||||
|
||||
else:
|
||||
self._gtobjs[lang] = gettext.translation(
|
||||
DOMAIN, localedir=LOCALE_DIR, languages=[self.lang])
|
||||
|
||||
# Install our language
|
||||
self._gtobjs[lang].install()
|
||||
|
||||
# Yield
|
||||
yield
|
||||
|
||||
except (IOError, KeyError):
|
||||
# This occurs if we can't access/load our translations
|
||||
# Yield reguardless
|
||||
yield
|
||||
|
||||
finally:
|
||||
# Fall back to our previous language
|
||||
if lang != self.lang and lang in self._gtobjs:
|
||||
# Install our language
|
||||
self._gtobjs[self.lang].install()
|
||||
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def detect_language(lang=None, detect_fallback=True):
|
||||
"""
|
||||
returns the language (if it's retrievable)
|
||||
"""
|
||||
# We want to only use the 2 character version of this language
|
||||
# hence en_CA becomes en, en_US becomes en.
|
||||
if not isinstance(lang, six.string_types):
|
||||
if detect_fallback is False:
|
||||
# no detection enabled; we're done
|
||||
return None
|
||||
|
||||
if hasattr(ctypes, 'windll'):
|
||||
windll = ctypes.windll.kernel32
|
||||
try:
|
||||
lang = locale.windows_locale[
|
||||
windll.GetUserDefaultUILanguage()]
|
||||
|
||||
# Our detected windows language
|
||||
return lang[0:2].lower()
|
||||
|
||||
except (TypeError, KeyError):
|
||||
# Fallback to posix detection
|
||||
pass
|
||||
|
||||
try:
|
||||
# Detect language
|
||||
lang = locale.getdefaultlocale()[0]
|
||||
|
||||
except TypeError:
|
||||
# None is returned if the default can't be determined
|
||||
# we're done in this case
|
||||
return None
|
||||
|
||||
return None if not lang else lang[0:2].lower()
|
477
libs/apprise/URLBase.py
Normal file
477
libs/apprise/URLBase.py
Normal file
|
@ -0,0 +1,477 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import logging
|
||||
from time import sleep
|
||||
from datetime import datetime
|
||||
from xml.sax.saxutils import escape as sax_escape
|
||||
|
||||
try:
|
||||
# Python 2.7
|
||||
from urllib import unquote as _unquote
|
||||
from urllib import quote as _quote
|
||||
from urllib import urlencode as _urlencode
|
||||
|
||||
except ImportError:
|
||||
# Python 3.x
|
||||
from urllib.parse import unquote as _unquote
|
||||
from urllib.parse import quote as _quote
|
||||
from urllib.parse import urlencode as _urlencode
|
||||
|
||||
from .AppriseAsset import AppriseAsset
|
||||
from .utils import parse_url
|
||||
from .utils import parse_bool
|
||||
from .utils import parse_list
|
||||
|
||||
# Used to break a path list into parts
|
||||
PATHSPLIT_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
# Define the HTML Lookup Table
|
||||
HTML_LOOKUP = {
|
||||
400: 'Bad Request - Unsupported Parameters.',
|
||||
401: 'Verification Failed.',
|
||||
404: 'Page not found.',
|
||||
405: 'Method not allowed.',
|
||||
500: 'Internal server error.',
|
||||
503: 'Servers are overloaded.',
|
||||
}
|
||||
|
||||
|
||||
class URLBase(object):
|
||||
"""
|
||||
This is the base class for all URL Manipulation
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the URL
|
||||
service_name = None
|
||||
|
||||
# The default simple (insecure) protocol
|
||||
# all inheriting entries must provide their protocol lookup
|
||||
# protocol:// (in this example they would specify 'protocol')
|
||||
protocol = None
|
||||
|
||||
# The default secure protocol
|
||||
# all inheriting entries must provide their protocol lookup
|
||||
# protocols:// (in this example they would specify 'protocols')
|
||||
# This value can be the same as the defined protocol.
|
||||
secure_protocol = None
|
||||
|
||||
# Throttle
|
||||
request_rate_per_sec = 0
|
||||
|
||||
# Maintain a set of tags to associate with this specific notification
|
||||
tags = set()
|
||||
|
||||
# Secure sites should be verified against a Certificate Authority
|
||||
verify_certificate = True
|
||||
|
||||
# Logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def __init__(self, asset=None, **kwargs):
|
||||
"""
|
||||
Initialize some general logging and common server arguments that will
|
||||
keep things consistent when working with the children that
|
||||
inherit this class.
|
||||
|
||||
"""
|
||||
# Prepare our Asset Object
|
||||
self.asset = \
|
||||
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
|
||||
# Certificate Verification (for SSL calls); default to being enabled
|
||||
self.verify_certificate = kwargs.get('verify', True)
|
||||
|
||||
# Secure Mode
|
||||
self.secure = kwargs.get('secure', False)
|
||||
|
||||
self.host = URLBase.unquote(kwargs.get('host'))
|
||||
self.port = kwargs.get('port')
|
||||
if self.port:
|
||||
try:
|
||||
self.port = int(self.port)
|
||||
|
||||
except (TypeError, ValueError):
|
||||
self.port = None
|
||||
|
||||
self.user = kwargs.get('user')
|
||||
if self.user:
|
||||
# Always unquote user if it exists
|
||||
self.user = URLBase.unquote(self.user)
|
||||
|
||||
self.password = kwargs.get('password')
|
||||
if self.password:
|
||||
# Always unquote the password if it exists
|
||||
self.password = URLBase.unquote(self.password)
|
||||
|
||||
if 'tag' in kwargs:
|
||||
# We want to associate some tags with our notification service.
|
||||
# the code below gets the 'tag' argument if defined, otherwise
|
||||
# it just falls back to whatever was already defined globally
|
||||
self.tags = set(parse_list(kwargs.get('tag'), self.tags))
|
||||
|
||||
# Tracks the time any i/o was made to the remote server. This value
|
||||
# is automatically set and controlled through the throttle() call.
|
||||
self._last_io_datetime = None
|
||||
|
||||
def throttle(self, last_io=None, wait=None):
|
||||
"""
|
||||
A common throttle control
|
||||
|
||||
if a wait is specified, then it will force a sleep of the
|
||||
specified time if it is larger then the calculated throttle
|
||||
time.
|
||||
"""
|
||||
|
||||
if last_io is not None:
|
||||
# Assume specified last_io
|
||||
self._last_io_datetime = last_io
|
||||
|
||||
# Get ourselves a reference time of 'now'
|
||||
reference = datetime.now()
|
||||
|
||||
if self._last_io_datetime is None:
|
||||
# Set time to 'now' and no need to throttle
|
||||
self._last_io_datetime = reference
|
||||
return
|
||||
|
||||
if self.request_rate_per_sec <= 0.0:
|
||||
# We're done if there is no throttle limit set
|
||||
return
|
||||
|
||||
# If we reach here, we need to do additional logic.
|
||||
# If the difference between the reference time and 'now' is less than
|
||||
# the defined request_rate_per_sec then we need to throttle for the
|
||||
# remaining balance of this time.
|
||||
|
||||
elapsed = (reference - self._last_io_datetime).total_seconds()
|
||||
|
||||
if wait is not None:
|
||||
self.logger.debug('Throttling forced for {}s...'.format(wait))
|
||||
sleep(wait)
|
||||
|
||||
elif elapsed < self.request_rate_per_sec:
|
||||
self.logger.debug('Throttling for {}s...'.format(
|
||||
self.request_rate_per_sec - elapsed))
|
||||
sleep(self.request_rate_per_sec - elapsed)
|
||||
|
||||
# Update our timestamp before we leave
|
||||
self._last_io_datetime = datetime.now()
|
||||
return
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Assembles the URL associated with the notification based on the
|
||||
arguments provied.
|
||||
|
||||
"""
|
||||
raise NotImplementedError("url() is implimented by the child class.")
|
||||
|
||||
def __contains__(self, tags):
|
||||
"""
|
||||
Returns true if the tag specified is associated with this notification.
|
||||
|
||||
tag can also be a tuple, set, and/or list
|
||||
|
||||
"""
|
||||
if isinstance(tags, (tuple, set, list)):
|
||||
return bool(set(tags) & self.tags)
|
||||
|
||||
# return any match
|
||||
return tags in self.tags
|
||||
|
||||
@staticmethod
|
||||
def escape_html(html, convert_new_lines=False, whitespace=True):
|
||||
"""
|
||||
Takes html text as input and escapes it so that it won't
|
||||
conflict with any xml/html wrapping characters.
|
||||
|
||||
Args:
|
||||
html (str): The HTML code to escape
|
||||
convert_new_lines (:obj:`bool`, optional): escape new lines (\n)
|
||||
whitespace (:obj:`bool`, optional): escape whitespace
|
||||
|
||||
Returns:
|
||||
str: The escaped html
|
||||
"""
|
||||
if not isinstance(html, six.string_types) or not html:
|
||||
return ''
|
||||
|
||||
# Escape HTML
|
||||
escaped = sax_escape(html, {"'": "'", "\"": """})
|
||||
|
||||
if whitespace:
|
||||
# Tidy up whitespace too
|
||||
escaped = escaped\
|
||||
.replace(u'\t', u' ')\
|
||||
.replace(u' ', u' ')
|
||||
|
||||
if convert_new_lines:
|
||||
return escaped.replace(u'\n', u'<br/>')
|
||||
|
||||
return escaped
|
||||
|
||||
@staticmethod
|
||||
def unquote(content, encoding='utf-8', errors='replace'):
|
||||
"""
|
||||
Replace %xx escapes by their single-character equivalent. The optional
|
||||
encoding and errors parameters specify how to decode percent-encoded
|
||||
sequences.
|
||||
|
||||
Wrapper to Python's unquote while remaining compatible with both
|
||||
Python 2 & 3 since the reference to this function changed between
|
||||
versions.
|
||||
|
||||
Note: errors set to 'replace' means that invalid sequences are
|
||||
replaced by a placeholder character.
|
||||
|
||||
Args:
|
||||
content (str): The quoted URI string you wish to unquote
|
||||
encoding (:obj:`str`, optional): encoding type
|
||||
errors (:obj:`str`, errors): how to handle invalid character found
|
||||
in encoded string (defined by encoding)
|
||||
|
||||
Returns:
|
||||
str: The unquoted URI string
|
||||
"""
|
||||
if not content:
|
||||
return ''
|
||||
|
||||
try:
|
||||
# Python v3.x
|
||||
return _unquote(content, encoding=encoding, errors=errors)
|
||||
|
||||
except TypeError:
|
||||
# Python v2.7
|
||||
return _unquote(content)
|
||||
|
||||
@staticmethod
|
||||
def quote(content, safe='/', encoding=None, errors=None):
|
||||
""" Replaces single character non-ascii characters and URI specific
|
||||
ones by their %xx code.
|
||||
|
||||
Wrapper to Python's unquote while remaining compatible with both
|
||||
Python 2 & 3 since the reference to this function changed between
|
||||
versions.
|
||||
|
||||
Args:
|
||||
content (str): The URI string you wish to quote
|
||||
safe (str): non-ascii characters and URI specific ones that you
|
||||
do not wish to escape (if detected). Setting this
|
||||
string to an empty one causes everything to be
|
||||
escaped.
|
||||
encoding (:obj:`str`, optional): encoding type
|
||||
errors (:obj:`str`, errors): how to handle invalid character found
|
||||
in encoded string (defined by encoding)
|
||||
|
||||
Returns:
|
||||
str: The quoted URI string
|
||||
"""
|
||||
if not content:
|
||||
return ''
|
||||
|
||||
try:
|
||||
# Python v3.x
|
||||
return _quote(content, safe=safe, encoding=encoding, errors=errors)
|
||||
|
||||
except TypeError:
|
||||
# Python v2.7
|
||||
return _quote(content, safe=safe)
|
||||
|
||||
@staticmethod
|
||||
def urlencode(query, doseq=False, safe='', encoding=None, errors=None):
|
||||
"""Convert a mapping object or a sequence of two-element tuples
|
||||
|
||||
Wrapper to Python's unquote while remaining compatible with both
|
||||
Python 2 & 3 since the reference to this function changed between
|
||||
versions.
|
||||
|
||||
The resulting string is a series of key=value pairs separated by '&'
|
||||
characters, where both key and value are quoted using the quote()
|
||||
function.
|
||||
|
||||
Note: If the dictionary entry contains an entry that is set to None
|
||||
it is not included in the final result set. If you want to
|
||||
pass in an empty variable, set it to an empty string.
|
||||
|
||||
Args:
|
||||
query (str): The dictionary to encode
|
||||
doseq (:obj:`bool`, optional): Handle sequences
|
||||
safe (:obj:`str`): non-ascii characters and URI specific ones that
|
||||
you do not wish to escape (if detected). Setting this string
|
||||
to an empty one causes everything to be escaped.
|
||||
encoding (:obj:`str`, optional): encoding type
|
||||
errors (:obj:`str`, errors): how to handle invalid character found
|
||||
in encoded string (defined by encoding)
|
||||
|
||||
Returns:
|
||||
str: The escaped parameters returned as a string
|
||||
"""
|
||||
# Tidy query by eliminating any records set to None
|
||||
_query = {k: v for (k, v) in query.items() if v is not None}
|
||||
try:
|
||||
# Python v3.x
|
||||
return _urlencode(
|
||||
_query, doseq=doseq, safe=safe, encoding=encoding,
|
||||
errors=errors)
|
||||
|
||||
except TypeError:
|
||||
# Python v2.7
|
||||
return _urlencode(_query)
|
||||
|
||||
@staticmethod
|
||||
def split_path(path, unquote=True):
|
||||
"""Splits a URL up into a list object.
|
||||
|
||||
Parses a specified URL and breaks it into a list.
|
||||
|
||||
Args:
|
||||
path (str): The path to split up into a list.
|
||||
unquote (:obj:`bool`, optional): call unquote on each element
|
||||
added to the returned list.
|
||||
|
||||
Returns:
|
||||
list: A list containing all of the elements in the path
|
||||
"""
|
||||
|
||||
try:
|
||||
paths = PATHSPLIT_LIST_DELIM.split(path.lstrip('/'))
|
||||
if unquote:
|
||||
paths = \
|
||||
[URLBase.unquote(x) for x in filter(bool, paths)]
|
||||
|
||||
except AttributeError:
|
||||
# path is not useable, we still want to gracefully return an
|
||||
# empty list
|
||||
paths = []
|
||||
|
||||
return paths
|
||||
|
||||
@staticmethod
|
||||
def parse_list(content, unquote=True):
|
||||
"""A wrapper to utils.parse_list() with unquoting support
|
||||
|
||||
Parses a specified set of data and breaks it into a list.
|
||||
|
||||
Args:
|
||||
content (str): The path to split up into a list. If a list is
|
||||
provided, then it's individual entries are processed.
|
||||
|
||||
unquote (:obj:`bool`, optional): call unquote on each element
|
||||
added to the returned list.
|
||||
|
||||
Returns:
|
||||
list: A unique list containing all of the elements in the path
|
||||
"""
|
||||
|
||||
content = parse_list(content)
|
||||
if unquote:
|
||||
content = \
|
||||
[URLBase.unquote(x) for x in filter(bool, content)]
|
||||
|
||||
return content
|
||||
|
||||
@property
|
||||
def app_id(self):
|
||||
return self.asset.app_id
|
||||
|
||||
@property
|
||||
def app_desc(self):
|
||||
return self.asset.app_desc
|
||||
|
||||
@property
|
||||
def app_url(self):
|
||||
return self.asset.app_url
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url, verify_host=True):
|
||||
"""Parses the URL and returns it broken apart into a dictionary.
|
||||
|
||||
This is very specific and customized for Apprise.
|
||||
|
||||
|
||||
Args:
|
||||
url (str): The URL you want to fully parse.
|
||||
verify_host (:obj:`bool`, optional): a flag kept with the parsed
|
||||
URL which some child classes will later use to verify SSL
|
||||
keys (if SSL transactions take place). Unless under very
|
||||
specific circumstances, it is strongly recomended that
|
||||
you leave this default value set to True.
|
||||
|
||||
Returns:
|
||||
A dictionary is returned containing the URL fully parsed if
|
||||
successful, otherwise None is returned.
|
||||
"""
|
||||
|
||||
results = parse_url(
|
||||
url, default_schema='unknown', verify_host=verify_host)
|
||||
|
||||
if not results:
|
||||
# We're done; we failed to parse our url
|
||||
return results
|
||||
|
||||
# if our URL ends with an 's', then assueme our secure flag is set.
|
||||
results['secure'] = (results['schema'][-1] == 's')
|
||||
|
||||
# Support SSL Certificate 'verify' keyword. Default to being enabled
|
||||
results['verify'] = True
|
||||
|
||||
if 'verify' in results['qsd']:
|
||||
results['verify'] = parse_bool(
|
||||
results['qsd'].get('verify', True))
|
||||
|
||||
# Password overrides
|
||||
if 'pass' in results['qsd']:
|
||||
results['password'] = results['qsd']['pass']
|
||||
|
||||
# User overrides
|
||||
if 'user' in results['qsd']:
|
||||
results['user'] = results['qsd']['user']
|
||||
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def http_response_code_lookup(code, response_mask=None):
|
||||
"""Parses the interger response code returned by a remote call from
|
||||
a web request into it's human readable string version.
|
||||
|
||||
You can over-ride codes or add new ones by providing your own
|
||||
response_mask that contains a dictionary of integer -> string mapped
|
||||
variables
|
||||
"""
|
||||
if isinstance(response_mask, dict):
|
||||
# Apply any/all header over-rides defined
|
||||
HTML_LOOKUP.update(response_mask)
|
||||
|
||||
# Look up our response
|
||||
try:
|
||||
response = HTML_LOOKUP[code]
|
||||
|
||||
except KeyError:
|
||||
response = ''
|
||||
|
||||
return response
|
|
@ -1,26 +1,35 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# base class for easier library inclusion
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
__title__ = 'apprise'
|
||||
__version__ = '0.5.0'
|
||||
__author__ = 'Chris Caron <lead2gold@gmail.com>'
|
||||
__license__ = 'GPLv3'
|
||||
__copywrite__ = 'Copyright 2017-2018 Chris Caron <lead2gold@gmail.com>'
|
||||
__version__ = '0.7.8'
|
||||
__author__ = 'Chris Caron'
|
||||
__license__ = 'MIT'
|
||||
__copywrite__ = 'Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>'
|
||||
__email__ = 'lead2gold@gmail.com'
|
||||
__status__ = 'Production'
|
||||
|
||||
from .common import NotifyType
|
||||
from .common import NOTIFY_TYPES
|
||||
|
@ -28,10 +37,18 @@ from .common import NotifyImageSize
|
|||
from .common import NOTIFY_IMAGE_SIZES
|
||||
from .common import NotifyFormat
|
||||
from .common import NOTIFY_FORMATS
|
||||
from .common import OverflowMode
|
||||
from .common import OVERFLOW_MODES
|
||||
from .common import ConfigFormat
|
||||
from .common import CONFIG_FORMATS
|
||||
|
||||
from .URLBase import URLBase
|
||||
from .plugins.NotifyBase import NotifyBase
|
||||
from .config.ConfigBase import ConfigBase
|
||||
|
||||
from .Apprise import Apprise
|
||||
from .AppriseAsset import AppriseAsset
|
||||
from .AppriseConfig import AppriseConfig
|
||||
|
||||
# Set default logging handler to avoid "No handler found" warnings.
|
||||
import logging
|
||||
|
@ -40,9 +57,11 @@ logging.getLogger(__name__).addHandler(NullHandler())
|
|||
|
||||
__all__ = [
|
||||
# Core
|
||||
'Apprise', 'AppriseAsset', 'NotifyBase',
|
||||
'Apprise', 'AppriseAsset', 'AppriseConfig', 'URLBase', 'NotifyBase',
|
||||
'ConfigBase',
|
||||
|
||||
# Reference
|
||||
'NotifyType', 'NotifyImageSize', 'NotifyFormat', 'NOTIFY_TYPES',
|
||||
'NOTIFY_IMAGE_SIZES', 'NOTIFY_FORMATS',
|
||||
'NotifyType', 'NotifyImageSize', 'NotifyFormat', 'OverflowMode',
|
||||
'NOTIFY_TYPES', 'NOTIFY_IMAGE_SIZES', 'NOTIFY_FORMATS', 'OVERFLOW_MODES',
|
||||
'ConfigFormat', 'CONFIG_FORMATS',
|
||||
]
|
||||
|
|
|
@ -1,40 +1,71 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Apprise CLI Tool
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with apprise. If not, see <http://www.gnu.org/licenses/>.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import click
|
||||
import logging
|
||||
import platform
|
||||
import sys
|
||||
from os.path import isfile
|
||||
from os.path import expanduser
|
||||
from os.path import expandvars
|
||||
|
||||
from . import NotifyType
|
||||
from . import Apprise
|
||||
from . import AppriseAsset
|
||||
from . import AppriseConfig
|
||||
from .utils import parse_list
|
||||
from .common import NOTIFY_TYPES
|
||||
from .logger import logger
|
||||
|
||||
# Logging
|
||||
logger = logging.getLogger('apprise.plugins.NotifyBase')
|
||||
from . import __title__
|
||||
from . import __version__
|
||||
from . import __license__
|
||||
from . import __copywrite__
|
||||
|
||||
# Defines our click context settings adding -h to the additional options that
|
||||
# can be specified to get the help menu to come up
|
||||
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
|
||||
|
||||
# Define our default configuration we use if nothing is otherwise specified
|
||||
DEFAULT_SEARCH_PATHS = (
|
||||
'~/.apprise',
|
||||
'~/.apprise.yml',
|
||||
'~/.config/apprise',
|
||||
'~/.config/apprise.yml',
|
||||
)
|
||||
|
||||
# Detect Windows
|
||||
if platform.system() == 'Windows':
|
||||
# Default Search Path for Windows Users
|
||||
DEFAULT_SEARCH_PATHS = (
|
||||
expandvars('%APPDATA%/Apprise/apprise'),
|
||||
expandvars('%APPDATA%/Apprise/apprise.yml'),
|
||||
expandvars('%LOCALAPPDATA%/Apprise/apprise'),
|
||||
expandvars('%LOCALAPPDATA%/Apprise/apprise.yml'),
|
||||
)
|
||||
|
||||
|
||||
def print_help_msg(command):
|
||||
"""
|
||||
|
@ -45,23 +76,54 @@ def print_help_msg(command):
|
|||
click.echo(command.get_help(ctx))
|
||||
|
||||
|
||||
def print_version_msg():
|
||||
"""
|
||||
Prints version message when -V or --version is specified.
|
||||
|
||||
"""
|
||||
result = list()
|
||||
result.append('{} v{}'.format(__title__, __version__))
|
||||
result.append(__copywrite__)
|
||||
result.append(
|
||||
'This code is licensed under the {} License.'.format(__license__))
|
||||
click.echo('\n'.join(result))
|
||||
|
||||
|
||||
@click.command(context_settings=CONTEXT_SETTINGS)
|
||||
@click.option('--title', '-t', default=None, type=str,
|
||||
help='Specify the message title.')
|
||||
@click.option('--body', '-b', default=None, type=str,
|
||||
help='Specify the message body.')
|
||||
help='Specify the message body. If no body is specified then '
|
||||
'content is read from <stdin>.')
|
||||
@click.option('--title', '-t', default=None, type=str,
|
||||
help='Specify the message title. This field is complete '
|
||||
'optional.')
|
||||
@click.option('--config', '-c', default=None, type=str, multiple=True,
|
||||
metavar='CONFIG_URL',
|
||||
help='Specify one or more configuration locations.')
|
||||
@click.option('--notification-type', '-n', default=NotifyType.INFO, type=str,
|
||||
metavar='TYPE', help='Specify the message type (default=info).')
|
||||
@click.option('--theme', '-T', default='default', type=str,
|
||||
metavar='TYPE',
|
||||
help='Specify the message type (default=info). Possible values'
|
||||
' are "{}", and "{}".'.format(
|
||||
'", "'.join(NOTIFY_TYPES[:-1]), NOTIFY_TYPES[-1]))
|
||||
@click.option('--theme', '-T', default='default', type=str, metavar='THEME',
|
||||
help='Specify the default theme.')
|
||||
@click.option('--tag', '-g', default=None, type=str, multiple=True,
|
||||
metavar='TAG', help='Specify one or more tags to filter '
|
||||
'which services to notify. Use multiple --tag (-g) entries to '
|
||||
'"OR" the tags together and comma separated to "AND" them. '
|
||||
'If no tags are specified then all services are notified.')
|
||||
@click.option('-v', '--verbose', count=True)
|
||||
@click.option('-V', '--version', is_flag=True,
|
||||
help='Display the apprise version and exit.')
|
||||
@click.argument('urls', nargs=-1,
|
||||
metavar='SERVER_URL [SERVER_URL2 [SERVER_URL3]]',)
|
||||
def main(title, body, urls, notification_type, theme, verbose):
|
||||
def main(body, title, config, urls, notification_type, theme, tag, verbose,
|
||||
version):
|
||||
"""
|
||||
Send a notification to all of the specified servers identified by their
|
||||
URLs the content provided within the title, body and notification-type.
|
||||
|
||||
For a list of all of the supported services and information on how to
|
||||
use them, check out at https://github.com/caronc/apprise
|
||||
"""
|
||||
# Note: Click ignores the return values of functions it wraps, If you
|
||||
# want to return a specific error code, you must call sys.exit()
|
||||
|
@ -69,23 +131,35 @@ def main(title, body, urls, notification_type, theme, verbose):
|
|||
|
||||
# Logging
|
||||
ch = logging.StreamHandler(sys.stdout)
|
||||
if verbose > 2:
|
||||
if verbose > 3:
|
||||
# -vvvv: Most Verbose Debug Logging
|
||||
logger.setLevel(logging.TRACE)
|
||||
|
||||
elif verbose > 2:
|
||||
# -vvv: Debug Logging
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
elif verbose == 1:
|
||||
elif verbose > 1:
|
||||
# -vv: INFO Messages
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
elif verbose > 0:
|
||||
# -v: WARNING Messages
|
||||
logger.setLevel(logging.WARNING)
|
||||
|
||||
else:
|
||||
# No verbosity means we display ERRORS only AND any deprecation
|
||||
# warnings
|
||||
logger.setLevel(logging.ERROR)
|
||||
|
||||
# Format our logger
|
||||
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
if not urls:
|
||||
logger.error('You must specify at least one server URL.')
|
||||
print_help_msg(main)
|
||||
sys.exit(1)
|
||||
if version:
|
||||
print_version_msg()
|
||||
sys.exit(0)
|
||||
|
||||
# Prepare our asset
|
||||
asset = AppriseAsset(theme=theme)
|
||||
|
@ -93,15 +167,33 @@ def main(title, body, urls, notification_type, theme, verbose):
|
|||
# Create our object
|
||||
a = Apprise(asset=asset)
|
||||
|
||||
# Load our configuration if no URLs or specified configuration was
|
||||
# identified on the command line
|
||||
a.add(AppriseConfig(
|
||||
paths=[f for f in DEFAULT_SEARCH_PATHS if isfile(expanduser(f))]
|
||||
if not (config or urls) else config), asset=asset)
|
||||
|
||||
# Load our inventory up
|
||||
for url in urls:
|
||||
a.add(url)
|
||||
|
||||
if len(a) == 0:
|
||||
logger.error(
|
||||
'You must specify at least one server URL or populated '
|
||||
'configuration file.')
|
||||
print_help_msg(main)
|
||||
sys.exit(1)
|
||||
|
||||
if body is None:
|
||||
# if no body was specified, then read from STDIN
|
||||
body = click.get_text_stream('stdin').read()
|
||||
|
||||
# each --tag entry comprises of a comma separated 'and' list
|
||||
# we or each of of the --tag and sets specified.
|
||||
tags = None if not tag else [parse_list(t) for t in tag]
|
||||
|
||||
# now print it out
|
||||
if a.notify(title=title, body=body, notify_type=notification_type):
|
||||
if a.notify(
|
||||
body=body, title=title, notify_type=notification_type, tag=tags):
|
||||
sys.exit(0)
|
||||
sys.exit(1)
|
||||
|
|
|
@ -1,20 +1,27 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Base Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
|
||||
class NotifyType(object):
|
||||
|
@ -70,3 +77,54 @@ NOTIFY_FORMATS = (
|
|||
NotifyFormat.HTML,
|
||||
NotifyFormat.MARKDOWN,
|
||||
)
|
||||
|
||||
|
||||
class OverflowMode(object):
|
||||
"""
|
||||
A list of pre-defined modes of how to handle the text when it exceeds the
|
||||
defined maximum message size.
|
||||
"""
|
||||
|
||||
# Send the data as is; untouched. Let the upstream server decide how the
|
||||
# content is handled. Some upstream services might gracefully handle this
|
||||
# with expected intentions; others might not.
|
||||
UPSTREAM = 'upstream'
|
||||
|
||||
# Always truncate the text when it exceeds the maximum message size and
|
||||
# send it anyway
|
||||
TRUNCATE = 'truncate'
|
||||
|
||||
# Split the message into multiple smaller messages that fit within the
|
||||
# limits of what is expected. The smaller messages are sent
|
||||
SPLIT = 'split'
|
||||
|
||||
|
||||
# Define our modes so we can verify if we need to
|
||||
OVERFLOW_MODES = (
|
||||
OverflowMode.UPSTREAM,
|
||||
OverflowMode.TRUNCATE,
|
||||
OverflowMode.SPLIT,
|
||||
)
|
||||
|
||||
|
||||
class ConfigFormat(object):
|
||||
"""
|
||||
A list of pre-defined config formats that can be passed via the
|
||||
apprise library.
|
||||
"""
|
||||
|
||||
# A text based configuration. This consists of a list of URLs delimited by
|
||||
# a new line. pound/hashtag (#) or semi-colon (;) can be used as comment
|
||||
# characters.
|
||||
TEXT = 'text'
|
||||
|
||||
# YAML files allow a more rich of an experience when settig up your
|
||||
# apprise configuration files.
|
||||
YAML = 'yaml'
|
||||
|
||||
|
||||
# Define our configuration formats mostly used for verification
|
||||
CONFIG_FORMATS = (
|
||||
ConfigFormat.TEXT,
|
||||
ConfigFormat.YAML,
|
||||
)
|
||||
|
|
610
libs/apprise/config/ConfigBase.py
Normal file
610
libs/apprise/config/ConfigBase.py
Normal file
|
@ -0,0 +1,610 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import os
|
||||
import re
|
||||
import six
|
||||
import yaml
|
||||
|
||||
from .. import plugins
|
||||
from ..AppriseAsset import AppriseAsset
|
||||
from ..URLBase import URLBase
|
||||
from ..common import ConfigFormat
|
||||
from ..common import CONFIG_FORMATS
|
||||
from ..utils import GET_SCHEMA_RE
|
||||
from ..utils import parse_list
|
||||
|
||||
|
||||
class ConfigBase(URLBase):
|
||||
"""
|
||||
This is the base class for all supported configuration sources
|
||||
"""
|
||||
|
||||
# The Default Encoding to use if not otherwise detected
|
||||
encoding = 'utf-8'
|
||||
|
||||
# The default expected configuration format unless otherwise
|
||||
# detected by the sub-modules
|
||||
default_config_format = ConfigFormat.TEXT
|
||||
|
||||
# This is only set if the user overrides the config format on the URL
|
||||
# this should always initialize itself as None
|
||||
config_format = None
|
||||
|
||||
# Don't read any more of this amount of data into memory as there is no
|
||||
# reason we should be reading in more. This is more of a safe guard then
|
||||
# anything else. 128KB (131072B)
|
||||
max_buffer_size = 131072
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initialize some general logging and common server arguments that will
|
||||
keep things consistent when working with the configurations that
|
||||
inherit this class.
|
||||
|
||||
"""
|
||||
|
||||
super(ConfigBase, self).__init__(**kwargs)
|
||||
|
||||
# Tracks previously loaded content for speed
|
||||
self._cached_servers = None
|
||||
|
||||
if 'encoding' in kwargs:
|
||||
# Store the encoding
|
||||
self.encoding = kwargs.get('encoding')
|
||||
|
||||
if 'format' in kwargs:
|
||||
# Store the enforced config format
|
||||
self.config_format = kwargs.get('format').lower()
|
||||
|
||||
if self.config_format not in CONFIG_FORMATS:
|
||||
# Simple error checking
|
||||
err = 'An invalid config format ({}) was specified.'.format(
|
||||
self.config_format)
|
||||
self.logger.warning(err)
|
||||
raise TypeError(err)
|
||||
|
||||
return
|
||||
|
||||
def servers(self, asset=None, cache=True, **kwargs):
|
||||
"""
|
||||
Performs reads loaded configuration and returns all of the services
|
||||
that could be parsed and loaded.
|
||||
|
||||
"""
|
||||
|
||||
if cache is True and isinstance(self._cached_servers, list):
|
||||
# We already have cached results to return; use them
|
||||
return self._cached_servers
|
||||
|
||||
# Our response object
|
||||
self._cached_servers = list()
|
||||
|
||||
# read() causes the child class to do whatever it takes for the
|
||||
# config plugin to load the data source and return unparsed content
|
||||
# None is returned if there was an error or simply no data
|
||||
content = self.read(**kwargs)
|
||||
if not isinstance(content, six.string_types):
|
||||
# Nothing more to do
|
||||
return list()
|
||||
|
||||
# Our Configuration format uses a default if one wasn't one detected
|
||||
# or enfored.
|
||||
config_format = \
|
||||
self.default_config_format \
|
||||
if self.config_format is None else self.config_format
|
||||
|
||||
# Dynamically load our parse_ function based on our config format
|
||||
fn = getattr(ConfigBase, 'config_parse_{}'.format(config_format))
|
||||
|
||||
# Execute our config parse function which always returns a list
|
||||
self._cached_servers.extend(fn(content=content, asset=asset))
|
||||
|
||||
if len(self._cached_servers):
|
||||
self.logger.info('Loaded {} entries from {}'.format(
|
||||
len(self._cached_servers), self.url()))
|
||||
else:
|
||||
self.logger.warning('Failed to load configuration from {}'.format(
|
||||
self.url()))
|
||||
|
||||
return self._cached_servers
|
||||
|
||||
def read(self):
|
||||
"""
|
||||
This object should be implimented by the child classes
|
||||
|
||||
"""
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url, verify_host=True):
|
||||
"""Parses the URL and returns it broken apart into a dictionary.
|
||||
|
||||
This is very specific and customized for Apprise.
|
||||
|
||||
|
||||
Args:
|
||||
url (str): The URL you want to fully parse.
|
||||
verify_host (:obj:`bool`, optional): a flag kept with the parsed
|
||||
URL which some child classes will later use to verify SSL
|
||||
keys (if SSL transactions take place). Unless under very
|
||||
specific circumstances, it is strongly recomended that
|
||||
you leave this default value set to True.
|
||||
|
||||
Returns:
|
||||
A dictionary is returned containing the URL fully parsed if
|
||||
successful, otherwise None is returned.
|
||||
"""
|
||||
|
||||
results = URLBase.parse_url(url, verify_host=verify_host)
|
||||
|
||||
if not results:
|
||||
# We're done; we failed to parse our url
|
||||
return results
|
||||
|
||||
# Allow overriding the default config format
|
||||
if 'format' in results['qsd']:
|
||||
results['format'] = results['qsd'].get('format')
|
||||
if results['format'] not in CONFIG_FORMATS:
|
||||
URLBase.logger.warning(
|
||||
'Unsupported format specified {}'.format(
|
||||
results['format']))
|
||||
del results['format']
|
||||
|
||||
# Defines the encoding of the payload
|
||||
if 'encoding' in results['qsd']:
|
||||
results['encoding'] = results['qsd'].get('encoding')
|
||||
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def config_parse_text(content, asset=None):
|
||||
"""
|
||||
Parse the specified content as though it were a simple text file only
|
||||
containing a list of URLs. Return a list of loaded notification plugins
|
||||
|
||||
Optionally associate an asset with the notification.
|
||||
|
||||
The file syntax is:
|
||||
|
||||
#
|
||||
# pound/hashtag allow for line comments
|
||||
#
|
||||
# One or more tags can be idenified using comma's (,) to separate
|
||||
# them.
|
||||
<Tag(s)>=<URL>
|
||||
|
||||
# Or you can use this format (no tags associated)
|
||||
<URL>
|
||||
|
||||
"""
|
||||
# For logging, track the line number
|
||||
line = 0
|
||||
|
||||
response = list()
|
||||
|
||||
# Define what a valid line should look like
|
||||
valid_line_re = re.compile(
|
||||
r'^\s*(?P<line>([;#]+(?P<comment>.*))|'
|
||||
r'(\s*(?P<tags>[^=]+)=|=)?\s*'
|
||||
r'(?P<url>[a-z0-9]{2,9}://.*))?$', re.I)
|
||||
|
||||
try:
|
||||
# split our content up to read line by line
|
||||
content = re.split(r'\r*\n', content)
|
||||
|
||||
except TypeError:
|
||||
# content was not expected string type
|
||||
ConfigBase.logger.error('Invalid apprise text data specified')
|
||||
return list()
|
||||
|
||||
for entry in content:
|
||||
# Increment our line count
|
||||
line += 1
|
||||
|
||||
result = valid_line_re.match(entry)
|
||||
if not result:
|
||||
# Invalid syntax
|
||||
ConfigBase.logger.error(
|
||||
'Invalid apprise text format found '
|
||||
'{} on line {}.'.format(entry, line))
|
||||
|
||||
# Assume this is a file we shouldn't be parsing. It's owner
|
||||
# can read the error printed to screen and take action
|
||||
# otherwise.
|
||||
return list()
|
||||
|
||||
if result.group('comment') or not result.group('line'):
|
||||
# Comment/empty line; do nothing
|
||||
continue
|
||||
|
||||
# Store our url read in
|
||||
url = result.group('url')
|
||||
|
||||
# swap hash (#) tag values with their html version
|
||||
_url = url.replace('/#', '/%23')
|
||||
|
||||
# Attempt to acquire the schema at the very least to allow our
|
||||
# plugins to determine if they can make a better
|
||||
# interpretation of a URL geared for them
|
||||
schema = GET_SCHEMA_RE.match(_url)
|
||||
|
||||
# Ensure our schema is always in lower case
|
||||
schema = schema.group('schema').lower()
|
||||
|
||||
# Some basic validation
|
||||
if schema not in plugins.SCHEMA_MAP:
|
||||
ConfigBase.logger.warning(
|
||||
'Unsupported schema {} on line {}.'.format(
|
||||
schema, line))
|
||||
continue
|
||||
|
||||
# Parse our url details of the server object as dictionary
|
||||
# containing all of the information parsed from our URL
|
||||
results = plugins.SCHEMA_MAP[schema].parse_url(_url)
|
||||
|
||||
if results is None:
|
||||
# Failed to parse the server URL
|
||||
ConfigBase.logger.warning(
|
||||
'Unparseable URL {} on line {}.'.format(url, line))
|
||||
continue
|
||||
|
||||
# Build a list of tags to associate with the newly added
|
||||
# notifications if any were set
|
||||
results['tag'] = set(parse_list(result.group('tags')))
|
||||
|
||||
ConfigBase.logger.trace(
|
||||
'URL {} unpacked as:{}{}'.format(
|
||||
url, os.linesep, os.linesep.join(
|
||||
['{}="{}"'.format(k, v) for k, v in results.items()])))
|
||||
|
||||
# Prepare our Asset Object
|
||||
results['asset'] = \
|
||||
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
|
||||
try:
|
||||
# Attempt to create an instance of our plugin using the
|
||||
# parsed URL information
|
||||
plugin = plugins.SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
# Create log entry of loaded URL
|
||||
ConfigBase.logger.debug('Loaded URL: {}'.format(plugin.url()))
|
||||
|
||||
except Exception as e:
|
||||
# the arguments are invalid or can not be used.
|
||||
ConfigBase.logger.warning(
|
||||
'Could not load URL {} on line {}.'.format(
|
||||
url, line))
|
||||
ConfigBase.logger.debug('Loading Exception: %s' % str(e))
|
||||
continue
|
||||
|
||||
# if we reach here, we successfully loaded our data
|
||||
response.append(plugin)
|
||||
|
||||
# Return what was loaded
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def config_parse_yaml(content, asset=None):
|
||||
"""
|
||||
Parse the specified content as though it were a yaml file
|
||||
specifically formatted for apprise. Return a list of loaded
|
||||
notification plugins.
|
||||
|
||||
Optionally associate an asset with the notification.
|
||||
|
||||
"""
|
||||
response = list()
|
||||
|
||||
try:
|
||||
# Load our data (safely)
|
||||
result = yaml.load(content, Loader=yaml.SafeLoader)
|
||||
|
||||
except (AttributeError, yaml.error.MarkedYAMLError) as e:
|
||||
# Invalid content
|
||||
ConfigBase.logger.error(
|
||||
'Invalid apprise yaml data specified.')
|
||||
ConfigBase.logger.debug(
|
||||
'YAML Exception:{}{}'.format(os.linesep, e))
|
||||
return list()
|
||||
|
||||
if not isinstance(result, dict):
|
||||
# Invalid content
|
||||
ConfigBase.logger.error('Invalid apprise yaml structure specified')
|
||||
return list()
|
||||
|
||||
# YAML Version
|
||||
version = result.get('version', 1)
|
||||
if version != 1:
|
||||
# Invalid syntax
|
||||
ConfigBase.logger.error(
|
||||
'Invalid apprise yaml version specified {}.'.format(version))
|
||||
return list()
|
||||
|
||||
#
|
||||
# global asset object
|
||||
#
|
||||
asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
tokens = result.get('asset', None)
|
||||
if tokens and isinstance(tokens, dict):
|
||||
for k, v in tokens.items():
|
||||
|
||||
if k.startswith('_') or k.endswith('_'):
|
||||
# Entries are considered reserved if they start or end
|
||||
# with an underscore
|
||||
ConfigBase.logger.warning(
|
||||
'Ignored asset key "{}".'.format(k))
|
||||
continue
|
||||
|
||||
if not (hasattr(asset, k) and
|
||||
isinstance(getattr(asset, k), six.string_types)):
|
||||
# We can't set a function or non-string set value
|
||||
ConfigBase.logger.warning(
|
||||
'Invalid asset key "{}".'.format(k))
|
||||
continue
|
||||
|
||||
if v is None:
|
||||
# Convert to an empty string
|
||||
v = ''
|
||||
|
||||
if not isinstance(v, six.string_types):
|
||||
# we must set strings with a string
|
||||
ConfigBase.logger.warning(
|
||||
'Invalid asset value to "{}".'.format(k))
|
||||
continue
|
||||
|
||||
# Set our asset object with the new value
|
||||
setattr(asset, k, v.strip())
|
||||
|
||||
#
|
||||
# global tag root directive
|
||||
#
|
||||
global_tags = set()
|
||||
|
||||
tags = result.get('tag', None)
|
||||
if tags and isinstance(tags, (list, tuple, six.string_types)):
|
||||
# Store any preset tags
|
||||
global_tags = set(parse_list(tags))
|
||||
|
||||
#
|
||||
# urls root directive
|
||||
#
|
||||
urls = result.get('urls', None)
|
||||
if not isinstance(urls, (list, tuple)):
|
||||
# Unsupported
|
||||
ConfigBase.logger.error(
|
||||
'Missing "urls" directive in apprise yaml.')
|
||||
return list()
|
||||
|
||||
# Iterate over each URL
|
||||
for no, url in enumerate(urls):
|
||||
|
||||
# Our results object is what we use to instantiate our object if
|
||||
# we can. Reset it to None on each iteration
|
||||
results = list()
|
||||
|
||||
if isinstance(url, six.string_types):
|
||||
# We're just a simple URL string
|
||||
|
||||
# swap hash (#) tag values with their html version
|
||||
_url = url.replace('/#', '/%23')
|
||||
|
||||
# Attempt to acquire the schema at the very least to allow our
|
||||
# plugins to determine if they can make a better
|
||||
# interpretation of a URL geared for them
|
||||
schema = GET_SCHEMA_RE.match(_url)
|
||||
if schema is None:
|
||||
ConfigBase.logger.warning(
|
||||
'Unsupported schema in urls entry #{}'.format(no + 1))
|
||||
continue
|
||||
|
||||
# Ensure our schema is always in lower case
|
||||
schema = schema.group('schema').lower()
|
||||
|
||||
# Some basic validation
|
||||
if schema not in plugins.SCHEMA_MAP:
|
||||
ConfigBase.logger.warning(
|
||||
'Unsupported schema {} in urls entry #{}'.format(
|
||||
schema, no + 1))
|
||||
continue
|
||||
|
||||
# Parse our url details of the server object as dictionary
|
||||
# containing all of the information parsed from our URL
|
||||
_results = plugins.SCHEMA_MAP[schema].parse_url(_url)
|
||||
if _results is None:
|
||||
ConfigBase.logger.warning(
|
||||
'Unparseable {} based url; entry #{}'.format(
|
||||
schema, no + 1))
|
||||
continue
|
||||
|
||||
# add our results to our global set
|
||||
results.append(_results)
|
||||
|
||||
elif isinstance(url, dict):
|
||||
# We are a url string with additional unescaped options
|
||||
if six.PY2:
|
||||
_url, tokens = next(url.iteritems())
|
||||
else: # six.PY3
|
||||
_url, tokens = next(iter(url.items()))
|
||||
|
||||
# swap hash (#) tag values with their html version
|
||||
_url = _url.replace('/#', '/%23')
|
||||
|
||||
# Get our schema
|
||||
schema = GET_SCHEMA_RE.match(_url)
|
||||
if schema is None:
|
||||
ConfigBase.logger.warning(
|
||||
'Unsupported schema in urls entry #{}'.format(no + 1))
|
||||
continue
|
||||
|
||||
# Ensure our schema is always in lower case
|
||||
schema = schema.group('schema').lower()
|
||||
|
||||
# Some basic validation
|
||||
if schema not in plugins.SCHEMA_MAP:
|
||||
ConfigBase.logger.warning(
|
||||
'Unsupported schema {} in urls entry #{}'.format(
|
||||
schema, no + 1))
|
||||
continue
|
||||
|
||||
# Parse our url details of the server object as dictionary
|
||||
# containing all of the information parsed from our URL
|
||||
_results = plugins.SCHEMA_MAP[schema].parse_url(_url)
|
||||
if _results is None:
|
||||
# Setup dictionary
|
||||
_results = {
|
||||
# Minimum requirements
|
||||
'schema': schema,
|
||||
}
|
||||
|
||||
if tokens is not None:
|
||||
# populate and/or override any results populated by
|
||||
# parse_url()
|
||||
for entries in tokens:
|
||||
# Copy ourselves a template of our parsed URL as a base
|
||||
# to work with
|
||||
r = _results.copy()
|
||||
|
||||
# We are a url string with additional unescaped options
|
||||
if isinstance(entries, dict):
|
||||
if six.PY2:
|
||||
_url, tokens = next(url.iteritems())
|
||||
else: # six.PY3
|
||||
_url, tokens = next(iter(url.items()))
|
||||
|
||||
# Tags you just can't over-ride
|
||||
if 'schema' in entries:
|
||||
del entries['schema']
|
||||
|
||||
# Extend our dictionary with our new entries
|
||||
r.update(entries)
|
||||
|
||||
# add our results to our global set
|
||||
results.append(r)
|
||||
|
||||
else:
|
||||
# add our results to our global set
|
||||
results.append(_results)
|
||||
|
||||
else:
|
||||
# Unsupported
|
||||
ConfigBase.logger.warning(
|
||||
'Unsupported apprise yaml entry #{}'.format(no + 1))
|
||||
continue
|
||||
|
||||
# Track our entries
|
||||
entry = 0
|
||||
|
||||
while len(results):
|
||||
# Increment our entry count
|
||||
entry += 1
|
||||
|
||||
# Grab our first item
|
||||
_results = results.pop(0)
|
||||
|
||||
# tag is a special keyword that is managed by apprise object.
|
||||
# The below ensures our tags are set correctly
|
||||
if 'tag' in _results:
|
||||
# Tidy our list up
|
||||
_results['tag'] = \
|
||||
set(parse_list(_results['tag'])) | global_tags
|
||||
|
||||
else:
|
||||
# Just use the global settings
|
||||
_results['tag'] = global_tags
|
||||
|
||||
ConfigBase.logger.trace(
|
||||
'URL #{}: {} unpacked as:{}{}'
|
||||
.format(no + 1, url, os.linesep, os.linesep.join(
|
||||
['{}="{}"'.format(k, a)
|
||||
for k, a in _results.items()])))
|
||||
|
||||
# Prepare our Asset Object
|
||||
_results['asset'] = asset
|
||||
|
||||
try:
|
||||
# Attempt to create an instance of our plugin using the
|
||||
# parsed URL information
|
||||
plugin = plugins.SCHEMA_MAP[_results['schema']](**_results)
|
||||
|
||||
# Create log entry of loaded URL
|
||||
ConfigBase.logger.debug(
|
||||
'Loaded URL: {}'.format(plugin.url()))
|
||||
|
||||
except Exception:
|
||||
# the arguments are invalid or can not be used.
|
||||
ConfigBase.logger.warning(
|
||||
'Could not load apprise yaml entry #{}, item #{}'
|
||||
.format(no + 1, entry))
|
||||
continue
|
||||
|
||||
# if we reach here, we successfully loaded our data
|
||||
response.append(plugin)
|
||||
|
||||
return response
|
||||
|
||||
def pop(self, index):
|
||||
"""
|
||||
Removes an indexed Notification Service from the stack and
|
||||
returns it.
|
||||
"""
|
||||
|
||||
if not isinstance(self._cached_servers, list):
|
||||
# Generate ourselves a list of content we can pull from
|
||||
self.servers(cache=True)
|
||||
|
||||
# Pop the element off of the stack
|
||||
return self._cached_servers.pop(index)
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Returns the indexed server entry associated with the loaded
|
||||
notification servers
|
||||
"""
|
||||
if not isinstance(self._cached_servers, list):
|
||||
# Generate ourselves a list of content we can pull from
|
||||
self.servers(cache=True)
|
||||
|
||||
return self._cached_servers[index]
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Returns an iterator to our server list
|
||||
"""
|
||||
if not isinstance(self._cached_servers, list):
|
||||
# Generate ourselves a list of content we can pull from
|
||||
self.servers(cache=True)
|
||||
|
||||
return iter(self._cached_servers)
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Returns the total number of servers loaded
|
||||
"""
|
||||
if not isinstance(self._cached_servers, list):
|
||||
# Generate ourselves a list of content we can pull from
|
||||
self.servers(cache=True)
|
||||
|
||||
return len(self._cached_servers)
|
163
libs/apprise/config/ConfigFile.py
Normal file
163
libs/apprise/config/ConfigFile.py
Normal file
|
@ -0,0 +1,163 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import io
|
||||
import os
|
||||
from os.path import expanduser
|
||||
from .ConfigBase import ConfigBase
|
||||
from ..common import ConfigFormat
|
||||
|
||||
|
||||
class ConfigFile(ConfigBase):
|
||||
"""
|
||||
A wrapper for File based configuration sources
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Local File'
|
||||
|
||||
# The default protocol
|
||||
protocol = 'file'
|
||||
|
||||
def __init__(self, path, **kwargs):
|
||||
"""
|
||||
Initialize File Object
|
||||
|
||||
headers can be a dictionary of key/value pairs that you want to
|
||||
additionally include as part of the server headers to post with
|
||||
|
||||
"""
|
||||
super(ConfigFile, self).__init__(**kwargs)
|
||||
|
||||
# Store our file path as it was set
|
||||
self.path = path
|
||||
|
||||
return
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'encoding': self.encoding,
|
||||
}
|
||||
|
||||
if self.config_format:
|
||||
# A format was enforced; make sure it's passed back with the url
|
||||
args['format'] = self.config_format
|
||||
|
||||
return 'file://{path}?{args}'.format(
|
||||
path=self.quote(self.path),
|
||||
args=self.urlencode(args),
|
||||
)
|
||||
|
||||
def read(self, **kwargs):
|
||||
"""
|
||||
Perform retrieval of the configuration based on the specified request
|
||||
"""
|
||||
|
||||
response = None
|
||||
|
||||
path = os.path.expanduser(self.path)
|
||||
try:
|
||||
if self.max_buffer_size > 0 and \
|
||||
os.path.getsize(path) > self.max_buffer_size:
|
||||
|
||||
# Content exceeds maximum buffer size
|
||||
self.logger.error(
|
||||
'File size exceeds maximum allowable buffer length'
|
||||
' ({}KB).'.format(int(self.max_buffer_size / 1024)))
|
||||
return None
|
||||
|
||||
except OSError:
|
||||
# getsize() can throw this acception if the file is missing
|
||||
# and or simply isn't accessible
|
||||
self.logger.error(
|
||||
'File is not accessible: {}'.format(path))
|
||||
return None
|
||||
|
||||
# Always call throttle before any server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
# Python 3 just supports open(), however to remain compatible with
|
||||
# Python 2, we use the io module
|
||||
with io.open(path, "rt", encoding=self.encoding) as f:
|
||||
# Store our content for parsing
|
||||
response = f.read()
|
||||
|
||||
except (ValueError, UnicodeDecodeError):
|
||||
# A result of our strict encoding check; if we receive this
|
||||
# then the file we're opening is not something we can
|
||||
# understand the encoding of..
|
||||
|
||||
self.logger.error(
|
||||
'File not using expected encoding ({}) : {}'.format(
|
||||
self.encoding, path))
|
||||
return None
|
||||
|
||||
except (IOError, OSError):
|
||||
# IOError is present for backwards compatibility with Python
|
||||
# versions older then 3.3. >= 3.3 throw OSError now.
|
||||
|
||||
# Could not open and/or read the file; this is not a problem since
|
||||
# we scan a lot of default paths.
|
||||
self.logger.error(
|
||||
'File can not be opened for read: {}'.format(path))
|
||||
return None
|
||||
|
||||
# Detect config format based on file extension if it isn't already
|
||||
# enforced
|
||||
if self.config_format is None and \
|
||||
re.match(r'^.*\.ya?ml\s*$', path, re.I) is not None:
|
||||
|
||||
# YAML Filename Detected
|
||||
self.default_config_format = ConfigFormat.YAML
|
||||
|
||||
# Return our response object
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL so that we can handle all different file paths
|
||||
and return it as our path object
|
||||
|
||||
"""
|
||||
|
||||
results = ConfigBase.parse_url(url, verify_host=False)
|
||||
if not results:
|
||||
# We're done early; it's not a good URL
|
||||
return results
|
||||
|
||||
match = re.match(r'file://(?P<path>[^?]+)(\?.*)?', url, re.I)
|
||||
if not match:
|
||||
return None
|
||||
|
||||
results['path'] = expanduser(ConfigFile.unquote(match.group('path')))
|
||||
return results
|
276
libs/apprise/config/ConfigHTTP.py
Normal file
276
libs/apprise/config/ConfigHTTP.py
Normal file
|
@ -0,0 +1,276 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
from .ConfigBase import ConfigBase
|
||||
from ..common import ConfigFormat
|
||||
|
||||
# Support YAML formats
|
||||
# text/yaml
|
||||
# text/x-yaml
|
||||
# application/yaml
|
||||
# application/x-yaml
|
||||
MIME_IS_YAML = re.compile('(text|application)/(x-)?yaml', re.I)
|
||||
|
||||
# Support TEXT formats
|
||||
# text/plain
|
||||
# text/html
|
||||
MIME_IS_TEXT = re.compile('text/(plain|html)', re.I)
|
||||
|
||||
|
||||
class ConfigHTTP(ConfigBase):
|
||||
"""
|
||||
A wrapper for HTTP based configuration sources
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'HTTP'
|
||||
|
||||
# The default protocol
|
||||
protocol = 'http'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'https'
|
||||
|
||||
# The maximum number of seconds to wait for a connection to be established
|
||||
# before out-right just giving up
|
||||
connection_timeout_sec = 5.0
|
||||
|
||||
# If an HTTP error occurs, define the number of characters you still want
|
||||
# to read back. This is useful for debugging purposes, but nothing else.
|
||||
# The idea behind enforcing this kind of restriction is to prevent abuse
|
||||
# from queries to services that may be untrusted.
|
||||
max_error_buffer_size = 2048
|
||||
|
||||
def __init__(self, headers=None, **kwargs):
|
||||
"""
|
||||
Initialize HTTP Object
|
||||
|
||||
headers can be a dictionary of key/value pairs that you want to
|
||||
additionally include as part of the server headers to post with
|
||||
|
||||
"""
|
||||
super(ConfigHTTP, self).__init__(**kwargs)
|
||||
|
||||
self.schema = 'https' if self.secure else 'http'
|
||||
|
||||
self.fullpath = kwargs.get('fullpath')
|
||||
if not isinstance(self.fullpath, six.string_types):
|
||||
self.fullpath = '/'
|
||||
|
||||
self.headers = {}
|
||||
if headers:
|
||||
# Store our extra headers
|
||||
self.headers.update(headers)
|
||||
|
||||
return
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'encoding': self.encoding,
|
||||
}
|
||||
|
||||
if self.config_format:
|
||||
# A format was enforced; make sure it's passed back with the url
|
||||
args['format'] = self.config_format
|
||||
|
||||
# Append our headers into our args
|
||||
args.update({'+{}'.format(k): v for k, v in self.headers.items()})
|
||||
|
||||
# Determine Authentication
|
||||
auth = ''
|
||||
if self.user and self.password:
|
||||
auth = '{user}:{password}@'.format(
|
||||
user=self.quote(self.user, safe=''),
|
||||
password=self.quote(self.password, safe=''),
|
||||
)
|
||||
elif self.user:
|
||||
auth = '{user}@'.format(
|
||||
user=self.quote(self.user, safe=''),
|
||||
)
|
||||
|
||||
default_port = 443 if self.secure else 80
|
||||
|
||||
return '{schema}://{auth}{hostname}{port}/?{args}'.format(
|
||||
schema=self.secure_protocol if self.secure else self.protocol,
|
||||
auth=auth,
|
||||
hostname=self.host,
|
||||
port='' if self.port is None or self.port == default_port
|
||||
else ':{}'.format(self.port),
|
||||
args=self.urlencode(args),
|
||||
)
|
||||
|
||||
def read(self, **kwargs):
|
||||
"""
|
||||
Perform retrieval of the configuration based on the specified request
|
||||
"""
|
||||
|
||||
# prepare XML Object
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
}
|
||||
|
||||
# Apply any/all header over-rides defined
|
||||
headers.update(self.headers)
|
||||
|
||||
auth = None
|
||||
if self.user:
|
||||
auth = (self.user, self.password)
|
||||
|
||||
url = '%s://%s' % (self.schema, self.host)
|
||||
if isinstance(self.port, int):
|
||||
url += ':%d' % self.port
|
||||
|
||||
url += self.fullpath
|
||||
|
||||
self.logger.debug('HTTP POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
|
||||
# Prepare our response object
|
||||
response = None
|
||||
|
||||
# Where our request object will temporarily live.
|
||||
r = None
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
# Make our request
|
||||
r = requests.post(
|
||||
url,
|
||||
headers=headers,
|
||||
auth=auth,
|
||||
verify=self.verify_certificate,
|
||||
timeout=self.connection_timeout_sec,
|
||||
stream=True,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
status_str = \
|
||||
ConfigBase.http_response_code_lookup(r.status_code)
|
||||
self.logger.error(
|
||||
'Failed to get HTTP configuration: '
|
||||
'{}{} error={}.'.format(
|
||||
status_str,
|
||||
',' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
# Display payload for debug information only; Don't read any
|
||||
# more than the first X bytes since we're potentially accessing
|
||||
# content from untrusted servers.
|
||||
if self.max_error_buffer_size > 0:
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(
|
||||
r.content[0:self.max_error_buffer_size]))
|
||||
|
||||
# Close out our connection if it exists to eliminate any
|
||||
# potential inefficiencies with the Request connection pool as
|
||||
# documented on their site when using the stream=True option.
|
||||
r.close()
|
||||
|
||||
# Return None (signifying a failure)
|
||||
return None
|
||||
|
||||
# Store our response
|
||||
if self.max_buffer_size > 0 and \
|
||||
r.headers['Content-Length'] > self.max_buffer_size:
|
||||
|
||||
# Provide warning of data truncation
|
||||
self.logger.error(
|
||||
'HTTP config response exceeds maximum buffer length '
|
||||
'({}KB);'.format(int(self.max_buffer_size / 1024)))
|
||||
|
||||
# Close out our connection if it exists to eliminate any
|
||||
# potential inefficiencies with the Request connection pool as
|
||||
# documented on their site when using the stream=True option.
|
||||
r.close()
|
||||
|
||||
# Return None - buffer execeeded
|
||||
return None
|
||||
|
||||
else:
|
||||
# Store our result
|
||||
response = r.content
|
||||
|
||||
# Detect config format based on mime if the format isn't
|
||||
# already enforced
|
||||
content_type = r.headers.get(
|
||||
'Content-Type', 'application/octet-stream')
|
||||
if self.config_format is None and content_type:
|
||||
if MIME_IS_YAML.match(content_type) is not None:
|
||||
|
||||
# YAML data detected based on header content
|
||||
self.default_config_format = ConfigFormat.YAML
|
||||
|
||||
elif MIME_IS_TEXT.match(content_type) is not None:
|
||||
|
||||
# TEXT data detected based on header content
|
||||
self.default_config_format = ConfigFormat.TEXT
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.error(
|
||||
'A Connection error occured retrieving HTTP '
|
||||
'configuration from %s.' % self.host)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return None (signifying a failure)
|
||||
return None
|
||||
|
||||
# Close out our connection if it exists to eliminate any potential
|
||||
# inefficiencies with the Request connection pool as documented on
|
||||
# their site when using the stream=True option.
|
||||
r.close()
|
||||
|
||||
# Return our response object
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = ConfigBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Add our headers that the user can potentially over-ride if they wish
|
||||
# to to our returned result set
|
||||
results['headers'] = results['qsd-']
|
||||
results['headers'].update(results['qsd+'])
|
||||
|
||||
return results
|
119
libs/apprise/config/__init__.py
Normal file
119
libs/apprise/config/__init__.py
Normal file
|
@ -0,0 +1,119 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import six
|
||||
import re
|
||||
|
||||
from os import listdir
|
||||
from os.path import dirname
|
||||
from os.path import abspath
|
||||
|
||||
# Maintains a mapping of all of the configuration services
|
||||
SCHEMA_MAP = {}
|
||||
|
||||
__all__ = []
|
||||
|
||||
|
||||
# Load our Lookup Matrix
|
||||
def __load_matrix(path=abspath(dirname(__file__)), name='apprise.config'):
|
||||
"""
|
||||
Dynamically load our schema map; this allows us to gracefully
|
||||
skip over modules we simply don't have the dependencies for.
|
||||
|
||||
"""
|
||||
# Used for the detection of additional Notify Services objects
|
||||
# The .py extension is optional as we support loading directories too
|
||||
module_re = re.compile(r'^(?P<name>Config[a-z0-9]+)(\.py)?$', re.I)
|
||||
|
||||
for f in listdir(path):
|
||||
match = module_re.match(f)
|
||||
if not match:
|
||||
# keep going
|
||||
continue
|
||||
|
||||
# Store our notification/plugin name:
|
||||
plugin_name = match.group('name')
|
||||
try:
|
||||
module = __import__(
|
||||
'{}.{}'.format(name, plugin_name),
|
||||
globals(), locals(),
|
||||
fromlist=[plugin_name])
|
||||
|
||||
except ImportError:
|
||||
# No problem, we can't use this object
|
||||
continue
|
||||
|
||||
if not hasattr(module, plugin_name):
|
||||
# Not a library we can load as it doesn't follow the simple rule
|
||||
# that the class must bear the same name as the notification
|
||||
# file itself.
|
||||
continue
|
||||
|
||||
# Get our plugin
|
||||
plugin = getattr(module, plugin_name)
|
||||
if not hasattr(plugin, 'app_id'):
|
||||
# Filter out non-notification modules
|
||||
continue
|
||||
|
||||
elif plugin_name in __all__:
|
||||
# we're already handling this object
|
||||
continue
|
||||
|
||||
# Add our module name to our __all__
|
||||
__all__.append(plugin_name)
|
||||
|
||||
# Ensure we provide the class as the reference to this directory and
|
||||
# not the module:
|
||||
globals()[plugin_name] = plugin
|
||||
|
||||
# Load protocol(s) if defined
|
||||
proto = getattr(plugin, 'protocol', None)
|
||||
if isinstance(proto, six.string_types):
|
||||
if proto not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[proto] = plugin
|
||||
|
||||
elif isinstance(proto, (set, list, tuple)):
|
||||
# Support iterables list types
|
||||
for p in proto:
|
||||
if p not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[p] = plugin
|
||||
|
||||
# Load secure protocol(s) if defined
|
||||
protos = getattr(plugin, 'secure_protocol', None)
|
||||
if isinstance(protos, six.string_types):
|
||||
if protos not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[protos] = plugin
|
||||
|
||||
if isinstance(protos, (set, list, tuple)):
|
||||
# Support iterables list types
|
||||
for p in protos:
|
||||
if p not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[p] = plugin
|
||||
|
||||
return SCHEMA_MAP
|
||||
|
||||
|
||||
# Dynamically build our schema base
|
||||
__load_matrix()
|
0
libs/apprise/i18n/__init__.py
Normal file
0
libs/apprise/i18n/__init__.py
Normal file
292
libs/apprise/i18n/apprise.pot
Normal file
292
libs/apprise/i18n/apprise.pot
Normal file
|
@ -0,0 +1,292 @@
|
|||
# Translations template for apprise.
|
||||
# Copyright (C) 2019 Chris Caron
|
||||
# This file is distributed under the same license as the apprise project.
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, 2019.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: apprise 0.7.8\n"
|
||||
"Report-Msgid-Bugs-To: lead2gold@gmail.com\n"
|
||||
"POT-Creation-Date: 2019-06-06 12:49-0400\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=utf-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Generated-By: Babel 2.7.0\n"
|
||||
|
||||
msgid "API Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Access Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Access Key ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Access Secret"
|
||||
msgstr ""
|
||||
|
||||
msgid "Access Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "Account SID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Add Tokens"
|
||||
msgstr ""
|
||||
|
||||
msgid "Application Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Application Secret"
|
||||
msgstr ""
|
||||
|
||||
msgid "Auth Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "Authorization Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "Avatar Image"
|
||||
msgstr ""
|
||||
|
||||
msgid "Bot Name"
|
||||
msgstr ""
|
||||
|
||||
msgid "Bot Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "Channels"
|
||||
msgstr ""
|
||||
|
||||
msgid "Consumer Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Consumer Secret"
|
||||
msgstr ""
|
||||
|
||||
msgid "Detect Bot Owner"
|
||||
msgstr ""
|
||||
|
||||
msgid "Device ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Display Footer"
|
||||
msgstr ""
|
||||
|
||||
msgid "Domain"
|
||||
msgstr ""
|
||||
|
||||
msgid "Duration"
|
||||
msgstr ""
|
||||
|
||||
msgid "Events"
|
||||
msgstr ""
|
||||
|
||||
msgid "Footer Logo"
|
||||
msgstr ""
|
||||
|
||||
msgid "From Email"
|
||||
msgstr ""
|
||||
|
||||
msgid "From Name"
|
||||
msgstr ""
|
||||
|
||||
msgid "From Phone No"
|
||||
msgstr ""
|
||||
|
||||
msgid "Group"
|
||||
msgstr ""
|
||||
|
||||
msgid "HTTP Header"
|
||||
msgstr ""
|
||||
|
||||
msgid "Hostname"
|
||||
msgstr ""
|
||||
|
||||
msgid "Include Image"
|
||||
msgstr ""
|
||||
|
||||
msgid "Modal"
|
||||
msgstr ""
|
||||
|
||||
msgid "Notify Format"
|
||||
msgstr ""
|
||||
|
||||
msgid "Organization"
|
||||
msgstr ""
|
||||
|
||||
msgid "Overflow Mode"
|
||||
msgstr ""
|
||||
|
||||
msgid "Password"
|
||||
msgstr ""
|
||||
|
||||
msgid "Path"
|
||||
msgstr ""
|
||||
|
||||
msgid "Port"
|
||||
msgstr ""
|
||||
|
||||
msgid "Priority"
|
||||
msgstr ""
|
||||
|
||||
msgid "Provider Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Region"
|
||||
msgstr ""
|
||||
|
||||
msgid "Region Name"
|
||||
msgstr ""
|
||||
|
||||
msgid "Remove Tokens"
|
||||
msgstr ""
|
||||
|
||||
msgid "Rooms"
|
||||
msgstr ""
|
||||
|
||||
msgid "SMTP Server"
|
||||
msgstr ""
|
||||
|
||||
msgid "Schema"
|
||||
msgstr ""
|
||||
|
||||
msgid "Secret Access Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Secret Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Secure Mode"
|
||||
msgstr ""
|
||||
|
||||
msgid "Server Timeout"
|
||||
msgstr ""
|
||||
|
||||
msgid "Sound"
|
||||
msgstr ""
|
||||
|
||||
msgid "Source JID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Channel"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Chat ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Device"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Device ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Email"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Emails"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Encoded ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target JID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Phone No"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Room Alias"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Room ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Short Code"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Tag ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Topic"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target User"
|
||||
msgstr ""
|
||||
|
||||
msgid "Targets"
|
||||
msgstr ""
|
||||
|
||||
msgid "Text To Speech"
|
||||
msgstr ""
|
||||
|
||||
msgid "To Channel ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "To Email"
|
||||
msgstr ""
|
||||
|
||||
msgid "To User ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "Token A"
|
||||
msgstr ""
|
||||
|
||||
msgid "Token B"
|
||||
msgstr ""
|
||||
|
||||
msgid "Token C"
|
||||
msgstr ""
|
||||
|
||||
msgid "Urgency"
|
||||
msgstr ""
|
||||
|
||||
msgid "Use Avatar"
|
||||
msgstr ""
|
||||
|
||||
msgid "User"
|
||||
msgstr ""
|
||||
|
||||
msgid "User Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "User Name"
|
||||
msgstr ""
|
||||
|
||||
msgid "Username"
|
||||
msgstr ""
|
||||
|
||||
msgid "Verify SSL"
|
||||
msgstr ""
|
||||
|
||||
msgid "Version"
|
||||
msgstr ""
|
||||
|
||||
msgid "Webhook"
|
||||
msgstr ""
|
||||
|
||||
msgid "Webhook ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Webhook Mode"
|
||||
msgstr ""
|
||||
|
||||
msgid "Webhook Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "X-Axis"
|
||||
msgstr ""
|
||||
|
||||
msgid "XEP"
|
||||
msgstr ""
|
||||
|
||||
msgid "Y-Axis"
|
||||
msgstr ""
|
||||
|
293
libs/apprise/i18n/en/LC_MESSAGES/apprise.po
Normal file
293
libs/apprise/i18n/en/LC_MESSAGES/apprise.po
Normal file
|
@ -0,0 +1,293 @@
|
|||
# English translations for apprise.
|
||||
# Copyright (C) 2019 Chris Caron
|
||||
# This file is distributed under the same license as the apprise project.
|
||||
# Chris Caron <lead2gold@gmail.com>, 2019.
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: apprise 0.7.6\n"
|
||||
"Report-Msgid-Bugs-To: lead2gold@gmail.com\n"
|
||||
"POT-Creation-Date: 2019-05-28 16:56-0400\n"
|
||||
"PO-Revision-Date: 2019-05-24 20:00-0400\n"
|
||||
"Last-Translator: Chris Caron <lead2gold@gmail.com>\n"
|
||||
"Language: en\n"
|
||||
"Language-Team: en <LL@li.org>\n"
|
||||
"Plural-Forms: nplurals=2; plural=(n != 1)\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=utf-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Generated-By: Babel 2.6.0\n"
|
||||
|
||||
msgid "API Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Access Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Access Key ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Access Secret"
|
||||
msgstr ""
|
||||
|
||||
msgid "Access Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "Account SID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Add Tokens"
|
||||
msgstr ""
|
||||
|
||||
msgid "Application Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Application Secret"
|
||||
msgstr ""
|
||||
|
||||
msgid "Auth Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "Authorization Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "Avatar Image"
|
||||
msgstr ""
|
||||
|
||||
msgid "Bot Name"
|
||||
msgstr ""
|
||||
|
||||
msgid "Bot Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "Channels"
|
||||
msgstr ""
|
||||
|
||||
msgid "Consumer Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Consumer Secret"
|
||||
msgstr ""
|
||||
|
||||
msgid "Detect Bot Owner"
|
||||
msgstr ""
|
||||
|
||||
msgid "Device ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Display Footer"
|
||||
msgstr ""
|
||||
|
||||
msgid "Domain"
|
||||
msgstr ""
|
||||
|
||||
msgid "Duration"
|
||||
msgstr ""
|
||||
|
||||
msgid "Events"
|
||||
msgstr ""
|
||||
|
||||
msgid "Footer Logo"
|
||||
msgstr ""
|
||||
|
||||
msgid "From Email"
|
||||
msgstr ""
|
||||
|
||||
msgid "From Name"
|
||||
msgstr ""
|
||||
|
||||
msgid "From Phone No"
|
||||
msgstr ""
|
||||
|
||||
msgid "Group"
|
||||
msgstr ""
|
||||
|
||||
msgid "HTTP Header"
|
||||
msgstr ""
|
||||
|
||||
msgid "Hostname"
|
||||
msgstr ""
|
||||
|
||||
msgid "Include Image"
|
||||
msgstr ""
|
||||
|
||||
msgid "Modal"
|
||||
msgstr ""
|
||||
|
||||
msgid "Notify Format"
|
||||
msgstr ""
|
||||
|
||||
msgid "Organization"
|
||||
msgstr ""
|
||||
|
||||
msgid "Overflow Mode"
|
||||
msgstr ""
|
||||
|
||||
msgid "Password"
|
||||
msgstr ""
|
||||
|
||||
msgid "Port"
|
||||
msgstr ""
|
||||
|
||||
msgid "Priority"
|
||||
msgstr ""
|
||||
|
||||
msgid "Provider Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Region"
|
||||
msgstr ""
|
||||
|
||||
msgid "Region Name"
|
||||
msgstr ""
|
||||
|
||||
msgid "Remove Tokens"
|
||||
msgstr ""
|
||||
|
||||
msgid "Rooms"
|
||||
msgstr ""
|
||||
|
||||
msgid "SMTP Server"
|
||||
msgstr ""
|
||||
|
||||
msgid "Schema"
|
||||
msgstr ""
|
||||
|
||||
msgid "Secret Access Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Secret Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Secure Mode"
|
||||
msgstr ""
|
||||
|
||||
msgid "Server Timeout"
|
||||
msgstr ""
|
||||
|
||||
msgid "Sound"
|
||||
msgstr ""
|
||||
|
||||
msgid "Source JID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Channel"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Chat ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Device"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Device ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Email"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Emails"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Encoded ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target JID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Phone No"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Room Alias"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Room ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Short Code"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Tag ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Topic"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target User"
|
||||
msgstr ""
|
||||
|
||||
msgid "Targets"
|
||||
msgstr ""
|
||||
|
||||
msgid "Text To Speech"
|
||||
msgstr ""
|
||||
|
||||
msgid "To Channel ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "To Email"
|
||||
msgstr ""
|
||||
|
||||
msgid "To User ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "Token A"
|
||||
msgstr ""
|
||||
|
||||
msgid "Token B"
|
||||
msgstr ""
|
||||
|
||||
msgid "Token C"
|
||||
msgstr ""
|
||||
|
||||
msgid "Urgency"
|
||||
msgstr ""
|
||||
|
||||
msgid "Use Avatar"
|
||||
msgstr ""
|
||||
|
||||
msgid "User"
|
||||
msgstr ""
|
||||
|
||||
msgid "User Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "User Name"
|
||||
msgstr ""
|
||||
|
||||
msgid "Username"
|
||||
msgstr ""
|
||||
|
||||
msgid "Verify SSL"
|
||||
msgstr ""
|
||||
|
||||
msgid "Version"
|
||||
msgstr ""
|
||||
|
||||
msgid "Webhook"
|
||||
msgstr ""
|
||||
|
||||
msgid "Webhook ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Webhook Mode"
|
||||
msgstr ""
|
||||
|
||||
msgid "Webhook Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "X-Axis"
|
||||
msgstr ""
|
||||
|
||||
msgid "XEP"
|
||||
msgstr ""
|
||||
|
||||
msgid "Y-Axis"
|
||||
msgstr ""
|
||||
|
||||
#~ msgid "Access Key Secret"
|
||||
#~ msgstr ""
|
||||
|
61
libs/apprise/logger.py
Normal file
61
libs/apprise/logger.py
Normal file
|
@ -0,0 +1,61 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import logging
|
||||
|
||||
# Define a verbosity level that is a noisier then debug mode
|
||||
logging.TRACE = logging.DEBUG - 1
|
||||
|
||||
# Define a verbosity level that is always used even when no verbosity is set
|
||||
# from the command line. The idea here is to allow for deprecation notices
|
||||
logging.DEPRECATE = logging.ERROR + 1
|
||||
|
||||
# Assign our Levels into our logging object
|
||||
logging.addLevelName(logging.DEPRECATE, "DEPRECATION WARNING")
|
||||
logging.addLevelName(logging.TRACE, "TRACE")
|
||||
|
||||
|
||||
def trace(self, message, *args, **kwargs):
|
||||
"""
|
||||
Verbose Debug Logging - Trace
|
||||
"""
|
||||
if self.isEnabledFor(logging.TRACE):
|
||||
self._log(logging.TRACE, message, args, **kwargs)
|
||||
|
||||
|
||||
def deprecate(self, message, *args, **kwargs):
|
||||
"""
|
||||
Deprication Warning Logging
|
||||
"""
|
||||
if self.isEnabledFor(logging.DEPRECATE):
|
||||
self._log(logging.DEPRECATE, message, args, **kwargs)
|
||||
|
||||
|
||||
# Assign our Loggers for use in Apprise
|
||||
logging.Logger.trace = trace
|
||||
logging.Logger.deprecate = deprecate
|
||||
|
||||
# Create ourselve a generic logging reference
|
||||
logger = logging.getLogger('apprise')
|
|
@ -1,112 +1,54 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Base Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import logging
|
||||
from time import sleep
|
||||
try:
|
||||
# Python 2.7
|
||||
from urllib import unquote as _unquote
|
||||
from urllib import quote as _quote
|
||||
from urllib import urlencode as _urlencode
|
||||
|
||||
except ImportError:
|
||||
# Python 3.x
|
||||
from urllib.parse import unquote as _unquote
|
||||
from urllib.parse import quote as _quote
|
||||
from urllib.parse import urlencode as _urlencode
|
||||
|
||||
from ..utils import parse_url
|
||||
from ..utils import parse_bool
|
||||
from ..utils import is_hostname
|
||||
from ..URLBase import URLBase
|
||||
from ..common import NotifyType
|
||||
from ..common import NOTIFY_TYPES
|
||||
from ..common import NotifyFormat
|
||||
from ..common import NOTIFY_FORMATS
|
||||
|
||||
from ..AppriseAsset import AppriseAsset
|
||||
|
||||
# use sax first because it's faster
|
||||
from xml.sax.saxutils import escape as sax_escape
|
||||
from ..common import OverflowMode
|
||||
from ..common import OVERFLOW_MODES
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
||||
def _escape(text):
|
||||
"""
|
||||
saxutil escape tool
|
||||
"""
|
||||
return sax_escape(text, {"'": "'", "\"": """})
|
||||
|
||||
|
||||
HTTP_ERROR_MAP = {
|
||||
400: 'Bad Request - Unsupported Parameters.',
|
||||
401: 'Verification Failed.',
|
||||
404: 'Page not found.',
|
||||
405: 'Method not allowed.',
|
||||
500: 'Internal server error.',
|
||||
503: 'Servers are overloaded.',
|
||||
}
|
||||
|
||||
# HTML New Line Delimiter
|
||||
NOTIFY_NEWLINE = '\r\n'
|
||||
|
||||
# Used to break a path list into parts
|
||||
PATHSPLIT_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
# Regular expression retrieved from:
|
||||
# http://www.regular-expressions.info/email.html
|
||||
IS_EMAIL_RE = re.compile(
|
||||
r"((?P<label>[^+]+)\+)?"
|
||||
r"(?P<userid>[a-z0-9$%=_~-]+"
|
||||
r"(?:\.[a-z0-9$%+=_~-]+)"
|
||||
r"*)@(?P<domain>(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+"
|
||||
r"[a-z0-9](?:[a-z0-9-]*"
|
||||
r"[a-z0-9]))?",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
|
||||
class NotifyBase(object):
|
||||
class NotifyBase(URLBase):
|
||||
"""
|
||||
This is the base class for all notification services
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = None
|
||||
|
||||
# The services URL
|
||||
service_url = None
|
||||
|
||||
# The default simple (insecure) protocol
|
||||
# all inheriting entries must provide their protocol lookup
|
||||
# protocol:// (in this example they would specify 'protocol')
|
||||
protocol = None
|
||||
|
||||
# The default secure protocol
|
||||
# all inheriting entries must provide their protocol lookup
|
||||
# protocols:// (in this example they would specify 'protocols')
|
||||
# This value can be the same as the defined protocol.
|
||||
secure_protocol = None
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = None
|
||||
|
||||
# Most Servers do not like more then 1 request per 5 seconds, so 5.5 gives
|
||||
# us a safe play range...
|
||||
throttle_attempt = 5.5
|
||||
# us a safe play range. Override the one defined already in the URLBase
|
||||
request_rate_per_sec = 5.5
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = None
|
||||
|
@ -114,71 +56,127 @@ class NotifyBase(object):
|
|||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 32768
|
||||
|
||||
# Defines the maximum allowable characters in the title
|
||||
# Defines the maximum allowable characters in the title; set this to zero
|
||||
# if a title can't be used. Titles that are not used but are defined are
|
||||
# automatically placed into the body
|
||||
title_maxlen = 250
|
||||
|
||||
# Set the maximum line count; if this is set to anything larger then zero
|
||||
# the message (prior to it being sent) will be truncated to this number
|
||||
# of lines. Setting this to zero disables this feature.
|
||||
body_max_line_count = 0
|
||||
|
||||
# Default Notify Format
|
||||
notify_format = NotifyFormat.TEXT
|
||||
|
||||
# Logging
|
||||
logger = logging.getLogger(__name__)
|
||||
# Default Overflow Mode
|
||||
overflow_mode = OverflowMode.UPSTREAM
|
||||
|
||||
# Default Title HTML Tagging
|
||||
# When a title is specified for a notification service that doesn't accept
|
||||
# titles, by default apprise tries to give a plesant view and convert the
|
||||
# title so that it can be placed into the body. The default is to just
|
||||
# use a <b> tag. The below causes the <b>title</b> to get generated:
|
||||
default_html_tag_id = 'b'
|
||||
|
||||
# Define a default set of template arguments used for dynamically building
|
||||
# details about our individual plugins for developers.
|
||||
|
||||
# Define object templates
|
||||
templates = ()
|
||||
|
||||
# Provides a mapping of tokens, certain entries are fixed and automatically
|
||||
# configured if found (such as schema, host, user, pass, and port)
|
||||
template_tokens = {}
|
||||
|
||||
# Here is where we define all of the arguments we accept on the url
|
||||
# such as: schema://whatever/?overflow=upstream&format=text
|
||||
# These act the same way as tokens except they are optional and/or
|
||||
# have default values set if mandatory. This rule must be followed
|
||||
template_args = {
|
||||
'overflow': {
|
||||
'name': _('Overflow Mode'),
|
||||
'type': 'choice:string',
|
||||
'values': OVERFLOW_MODES,
|
||||
# Provide a default
|
||||
'default': overflow_mode,
|
||||
# look up default using the following parent class value at
|
||||
# runtime. The variable name identified here (in this case
|
||||
# overflow_mode) is checked and it's result is placed over-top of
|
||||
# the 'default'. This is done because once a parent class inherits
|
||||
# this one, the overflow_mode already set as a default 'could' be
|
||||
# potentially over-ridden and changed to a different value.
|
||||
'_lookup_default': 'overflow_mode',
|
||||
},
|
||||
'format': {
|
||||
'name': _('Notify Format'),
|
||||
'type': 'choice:string',
|
||||
'values': NOTIFY_FORMATS,
|
||||
# Provide a default
|
||||
'default': notify_format,
|
||||
# look up default using the following parent class value at
|
||||
# runtime.
|
||||
'_lookup_default': 'notify_format',
|
||||
},
|
||||
'verify': {
|
||||
'name': _('Verify SSL'),
|
||||
# SSL Certificate Authority Verification
|
||||
'type': 'bool',
|
||||
# Provide a default
|
||||
'default': URLBase.verify_certificate,
|
||||
# look up default using the following parent class value at
|
||||
# runtime.
|
||||
'_lookup_default': 'verify_certificate',
|
||||
},
|
||||
}
|
||||
|
||||
# kwargs are dynamically built because a prefix causes us to parse the
|
||||
# content slightly differently. The prefix is required and can be either
|
||||
# a (+ or -). Below would handle the +key=value:
|
||||
# {
|
||||
# 'headers': {
|
||||
# 'name': _('HTTP Header'),
|
||||
# 'prefix': '+',
|
||||
# 'type': 'string',
|
||||
# },
|
||||
# },
|
||||
#
|
||||
# In a kwarg situation, the 'key' is always presumed to be treated as
|
||||
# a string. When the 'type' is defined, it is being defined to respect
|
||||
# the 'value'.
|
||||
|
||||
template_kwargs = {}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initialize some general logging and common server arguments that will
|
||||
keep things consistent when working with the notifiers that will
|
||||
inherit this class.
|
||||
Initialize some general configuration that will keep things consistent
|
||||
when working with the notifiers that will inherit this class.
|
||||
|
||||
"""
|
||||
|
||||
# Prepare our Assets
|
||||
self.asset = AppriseAsset()
|
||||
super(NotifyBase, self).__init__(**kwargs)
|
||||
|
||||
# Certificate Verification (for SSL calls); default to being enabled
|
||||
self.verify_certificate = kwargs.get('verify', True)
|
||||
|
||||
# Secure Mode
|
||||
self.secure = kwargs.get('secure', False)
|
||||
|
||||
self.host = kwargs.get('host', '')
|
||||
self.port = kwargs.get('port')
|
||||
if self.port:
|
||||
try:
|
||||
self.port = int(self.port)
|
||||
|
||||
except (TypeError, ValueError):
|
||||
self.port = None
|
||||
|
||||
self.user = kwargs.get('user')
|
||||
self.password = kwargs.get('password')
|
||||
|
||||
if 'notify_format' in kwargs:
|
||||
# Store the specified notify_format if specified
|
||||
notify_format = kwargs.get('notify_format')
|
||||
if 'format' in kwargs:
|
||||
# Store the specified format if specified
|
||||
notify_format = kwargs.get('format', '')
|
||||
if notify_format.lower() not in NOTIFY_FORMATS:
|
||||
self.logger.error(
|
||||
'Invalid notification format %s' % notify_format,
|
||||
)
|
||||
raise TypeError(
|
||||
'Invalid notification format %s' % notify_format,
|
||||
)
|
||||
msg = 'Invalid notification format %s'.format(notify_format)
|
||||
self.logger.error(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Provide override
|
||||
self.notify_format = notify_format
|
||||
|
||||
def throttle(self, throttle_time=None):
|
||||
"""
|
||||
A common throttle control
|
||||
"""
|
||||
self.logger.debug('Throttling...')
|
||||
if 'overflow' in kwargs:
|
||||
# Store the specified format if specified
|
||||
overflow = kwargs.get('overflow', '')
|
||||
if overflow.lower() not in OVERFLOW_MODES:
|
||||
msg = 'Invalid overflow method {}'.format(overflow)
|
||||
self.logger.error(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
throttle_time = throttle_time \
|
||||
if throttle_time is not None else self.throttle_attempt
|
||||
|
||||
# Perform throttle
|
||||
if throttle_time > 0:
|
||||
sleep(throttle_time)
|
||||
|
||||
return
|
||||
# Provide override
|
||||
self.overflow_mode = overflow
|
||||
|
||||
def image_url(self, notify_type, logo=False, extension=None):
|
||||
"""
|
||||
|
@ -242,152 +240,183 @@ class NotifyBase(object):
|
|||
color_type=color_type,
|
||||
)
|
||||
|
||||
@property
|
||||
def app_id(self):
|
||||
return self.asset.app_id
|
||||
|
||||
@property
|
||||
def app_desc(self):
|
||||
return self.asset.app_desc
|
||||
|
||||
@property
|
||||
def app_url(self):
|
||||
return self.asset.app_url
|
||||
|
||||
@staticmethod
|
||||
def escape_html(html, convert_new_lines=False, whitespace=True):
|
||||
def notify(self, body, title=None, notify_type=NotifyType.INFO,
|
||||
overflow=None, **kwargs):
|
||||
"""
|
||||
Takes html text as input and escapes it so that it won't
|
||||
conflict with any xml/html wrapping characters.
|
||||
"""
|
||||
escaped = _escape(html)
|
||||
|
||||
if whitespace:
|
||||
# Tidy up whitespace too
|
||||
escaped = escaped\
|
||||
.replace(u'\t', u' ')\
|
||||
.replace(u' ', u' ')
|
||||
|
||||
if convert_new_lines:
|
||||
return escaped.replace(u'\n', u'<br/>')
|
||||
|
||||
return escaped
|
||||
|
||||
@staticmethod
|
||||
def unquote(content, encoding='utf-8', errors='replace'):
|
||||
"""
|
||||
common unquote function
|
||||
Performs notification
|
||||
|
||||
"""
|
||||
if not content:
|
||||
return ''
|
||||
|
||||
try:
|
||||
# Python v3.x
|
||||
return _unquote(content, encoding=encoding, errors=errors)
|
||||
# Handle situations where the title is None
|
||||
title = '' if not title else title
|
||||
|
||||
except TypeError:
|
||||
# Python v2.7
|
||||
return _unquote(content)
|
||||
# Apply our overflow (if defined)
|
||||
for chunk in self._apply_overflow(body=body, title=title,
|
||||
overflow=overflow):
|
||||
# Send notification
|
||||
if not self.send(body=chunk['body'], title=chunk['title'],
|
||||
notify_type=notify_type):
|
||||
|
||||
@staticmethod
|
||||
def quote(content, safe='/', encoding=None, errors=None):
|
||||
# Toggle our return status flag
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _apply_overflow(self, body, title=None, overflow=None):
|
||||
"""
|
||||
common quote function
|
||||
Takes the message body and title as input. This function then
|
||||
applies any defined overflow restrictions associated with the
|
||||
notification service and may alter the message if/as required.
|
||||
|
||||
The function will always return a list object in the following
|
||||
structure:
|
||||
[
|
||||
{
|
||||
title: 'the title goes here',
|
||||
body: 'the message body goes here',
|
||||
},
|
||||
{
|
||||
title: 'the title goes here',
|
||||
body: 'the message body goes here',
|
||||
},
|
||||
|
||||
]
|
||||
"""
|
||||
|
||||
response = list()
|
||||
|
||||
# tidy
|
||||
title = '' if not title else title.strip()
|
||||
body = '' if not body else body.rstrip()
|
||||
|
||||
if overflow is None:
|
||||
# default
|
||||
overflow = self.overflow_mode
|
||||
|
||||
if self.title_maxlen <= 0 and len(title) > 0:
|
||||
if self.notify_format == NotifyFormat.MARKDOWN:
|
||||
# Content is appended to body as markdown
|
||||
body = '**{}**\r\n{}'.format(title, body)
|
||||
|
||||
elif self.notify_format == NotifyFormat.HTML:
|
||||
# Content is appended to body as html
|
||||
body = '<{open_tag}>{title}</{close_tag}>' \
|
||||
'<br />\r\n{body}'.format(
|
||||
open_tag=self.default_html_tag_id,
|
||||
title=self.escape_html(title),
|
||||
close_tag=self.default_html_tag_id,
|
||||
body=body)
|
||||
else:
|
||||
# Content is appended to body as text
|
||||
body = '{}\r\n{}'.format(title, body)
|
||||
|
||||
title = ''
|
||||
|
||||
# Enforce the line count first always
|
||||
if self.body_max_line_count > 0:
|
||||
# Limit results to just the first 2 line otherwise
|
||||
# there is just to much content to display
|
||||
body = re.split(r'\r*\n', body)
|
||||
body = '\r\n'.join(body[0:self.body_max_line_count])
|
||||
|
||||
if overflow == OverflowMode.UPSTREAM:
|
||||
# Nothing more to do
|
||||
response.append({'body': body, 'title': title})
|
||||
return response
|
||||
|
||||
elif len(title) > self.title_maxlen:
|
||||
# Truncate our Title
|
||||
title = title[:self.title_maxlen]
|
||||
|
||||
if self.body_maxlen > 0 and len(body) <= self.body_maxlen:
|
||||
response.append({'body': body, 'title': title})
|
||||
return response
|
||||
|
||||
if overflow == OverflowMode.TRUNCATE:
|
||||
# Truncate our body and return
|
||||
response.append({
|
||||
'body': body[:self.body_maxlen],
|
||||
'title': title,
|
||||
})
|
||||
# For truncate mode, we're done now
|
||||
return response
|
||||
|
||||
# If we reach here, then we are in SPLIT mode.
|
||||
# For here, we want to split the message as many times as we have to
|
||||
# in order to fit it within the designated limits.
|
||||
response = [{
|
||||
'body': body[i: i + self.body_maxlen],
|
||||
'title': title} for i in range(0, len(body), self.body_maxlen)]
|
||||
|
||||
return response
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Should preform the actual notification itself.
|
||||
|
||||
"""
|
||||
if not content:
|
||||
return ''
|
||||
|
||||
try:
|
||||
# Python v3.x
|
||||
return _quote(content, safe=safe, encoding=encoding, errors=errors)
|
||||
|
||||
except TypeError:
|
||||
# Python v2.7
|
||||
return _quote(content, safe=safe)
|
||||
|
||||
@staticmethod
|
||||
def urlencode(query, doseq=False, safe='', encoding=None, errors=None):
|
||||
"""
|
||||
common urlencode function
|
||||
|
||||
"""
|
||||
try:
|
||||
# Python v3.x
|
||||
return _urlencode(
|
||||
query, doseq=doseq, safe=safe, encoding=encoding,
|
||||
errors=errors)
|
||||
|
||||
except TypeError:
|
||||
# Python v2.7
|
||||
return _urlencode(query)
|
||||
|
||||
@staticmethod
|
||||
def split_path(path, unquote=True):
|
||||
"""
|
||||
Splits a URL up into a list object.
|
||||
|
||||
"""
|
||||
if unquote:
|
||||
return PATHSPLIT_LIST_DELIM.split(
|
||||
NotifyBase.unquote(path).lstrip('/'))
|
||||
return PATHSPLIT_LIST_DELIM.split(path.lstrip('/'))
|
||||
|
||||
@staticmethod
|
||||
def is_email(address):
|
||||
"""
|
||||
Returns True if specified entry is an email address
|
||||
|
||||
"""
|
||||
return IS_EMAIL_RE.match(address) is not None
|
||||
|
||||
@staticmethod
|
||||
def is_hostname(hostname):
|
||||
"""
|
||||
Returns True if specified entry is a hostname
|
||||
|
||||
"""
|
||||
return is_hostname(hostname)
|
||||
raise NotImplementedError(
|
||||
"send() is not implimented by the child class.")
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url, verify_host=True):
|
||||
"""
|
||||
Parses the URL and returns it broken apart into a dictionary.
|
||||
"""Parses the URL and returns it broken apart into a dictionary.
|
||||
|
||||
This is very specific and customized for Apprise.
|
||||
|
||||
|
||||
Args:
|
||||
url (str): The URL you want to fully parse.
|
||||
verify_host (:obj:`bool`, optional): a flag kept with the parsed
|
||||
URL which some child classes will later use to verify SSL
|
||||
keys (if SSL transactions take place). Unless under very
|
||||
specific circumstances, it is strongly recomended that
|
||||
you leave this default value set to True.
|
||||
|
||||
Returns:
|
||||
A dictionary is returned containing the URL fully parsed if
|
||||
successful, otherwise None is returned.
|
||||
"""
|
||||
results = parse_url(
|
||||
url, default_schema='unknown', verify_host=verify_host)
|
||||
results = URLBase.parse_url(url, verify_host=verify_host)
|
||||
|
||||
if not results:
|
||||
# We're done; we failed to parse our url
|
||||
return results
|
||||
|
||||
# if our URL ends with an 's', then assueme our secure flag is set.
|
||||
results['secure'] = (results['schema'][-1] == 's')
|
||||
|
||||
# Support SSL Certificate 'verify' keyword. Default to being enabled
|
||||
results['verify'] = verify_host
|
||||
|
||||
if 'verify' in results['qsd']:
|
||||
results['verify'] = parse_bool(
|
||||
results['qsd'].get('verify', True))
|
||||
|
||||
# Allow overriding the default format
|
||||
if 'format' in results['qsd']:
|
||||
results['format'] = results['qsd'].get('format')
|
||||
if results['format'] not in NOTIFY_FORMATS:
|
||||
NotifyBase.logger.warning(
|
||||
URLBase.logger.warning(
|
||||
'Unsupported format specified {}'.format(
|
||||
results['format']))
|
||||
del results['format']
|
||||
|
||||
# Password overrides
|
||||
if 'pass' in results['qsd']:
|
||||
results['password'] = results['qsd']['pass']
|
||||
|
||||
# User overrides
|
||||
if 'user' in results['qsd']:
|
||||
results['user'] = results['qsd']['user']
|
||||
# Allow overriding the default overflow
|
||||
if 'overflow' in results['qsd']:
|
||||
results['overflow'] = results['qsd'].get('overflow')
|
||||
if results['overflow'] not in OVERFLOW_MODES:
|
||||
URLBase.logger.warning(
|
||||
'Unsupported overflow specified {}'.format(
|
||||
results['overflow']))
|
||||
del results['overflow']
|
||||
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def parse_native_url(url):
|
||||
"""
|
||||
This is a base class that can be optionally over-ridden by child
|
||||
classes who can build their Apprise URL based on the one provided
|
||||
by the notification service they choose to use.
|
||||
|
||||
The intent of this is to make Apprise a little more userfriendly
|
||||
to people who aren't familiar with constructing URLs and wish to
|
||||
use the ones that were just provied by their notification serivice
|
||||
that they're using.
|
||||
|
||||
This function will return None if the passed in URL can't be matched
|
||||
as belonging to the notification service. Otherwise this function
|
||||
should return the same set of results that parse_url() does.
|
||||
"""
|
||||
return None
|
||||
|
|
|
@ -1,27 +1,36 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Boxcar Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
from json import dumps
|
||||
import requests
|
||||
import re
|
||||
from time import time
|
||||
import six
|
||||
import requests
|
||||
import hmac
|
||||
from json import dumps
|
||||
from time import time
|
||||
from hashlib import sha1
|
||||
from itertools import chain
|
||||
try:
|
||||
from urlparse import urlparse
|
||||
|
||||
|
@ -29,10 +38,10 @@ except ImportError:
|
|||
from urllib.parse import urlparse
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
|
||||
from ..utils import parse_bool
|
||||
from ..common import NotifyType
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import compat_is_basestring
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# Default to sending to all devices if nothing is specified
|
||||
DEFAULT_TAG = '@all'
|
||||
|
@ -44,8 +53,8 @@ DEFAULT_TAG = '@all'
|
|||
IS_TAG = re.compile(r'^[@](?P<name>[A-Z0-9]{1,63})$', re.I)
|
||||
|
||||
# Device tokens are only referenced when developing.
|
||||
# it's not likely you'll send a message directly to a device, but
|
||||
# if you do; this plugin supports it.
|
||||
# It's not likely you'll send a message directly to a device, but if you do;
|
||||
# this plugin supports it.
|
||||
IS_DEVICETOKEN = re.compile(r'^[A-Z0-9]{64}$', re.I)
|
||||
|
||||
# Both an access key and seret key are created and assigned to each project
|
||||
|
@ -53,8 +62,8 @@ IS_DEVICETOKEN = re.compile(r'^[A-Z0-9]{64}$', re.I)
|
|||
VALIDATE_ACCESS = re.compile(r'[A-Z0-9_-]{64}', re.I)
|
||||
VALIDATE_SECRET = re.compile(r'[A-Z0-9_-]{64}', re.I)
|
||||
|
||||
# Used to break apart list of potential tags by their delimiter
|
||||
# into a usable list.
|
||||
# Used to break apart list of potential tags by their delimiter into a useable
|
||||
# list.
|
||||
TAGS_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
|
||||
|
@ -84,7 +93,64 @@ class NotifyBoxcar(NotifyBase):
|
|||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 10000
|
||||
|
||||
def __init__(self, access, secret, recipients=None, **kwargs):
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{access_key}/{secret_key}/',
|
||||
'{schema}://{access_key}/{secret_key}/{targets}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'access_key': {
|
||||
'name': _('Access Key'),
|
||||
'type': 'string',
|
||||
'regex': (r'[A-Z0-9_-]{64}', 'i'),
|
||||
'private': True,
|
||||
'required': True,
|
||||
'map_to': 'access',
|
||||
},
|
||||
'secret_key': {
|
||||
'name': _('Secret Key'),
|
||||
'type': 'string',
|
||||
'regex': (r'[A-Z0-9_-]{64}', 'i'),
|
||||
'private': True,
|
||||
'required': True,
|
||||
'map_to': 'secret',
|
||||
},
|
||||
'target_tag': {
|
||||
'name': _('Target Tag ID'),
|
||||
'type': 'string',
|
||||
'prefix': '@',
|
||||
'regex': (r'[A-Z0-9]{1,63}', 'i'),
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'target_device': {
|
||||
'name': _('Target Device ID'),
|
||||
'type': 'string',
|
||||
'regex': (r'[A-Z0-9]{64}', 'i'),
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'targets': {
|
||||
'name': _('Targets'),
|
||||
'type': 'list:string',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'image': {
|
||||
'name': _('Include Image'),
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'map_to': 'include_image',
|
||||
},
|
||||
'to': {
|
||||
'alias_of': 'targets',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, access, secret, targets=None, include_image=True,
|
||||
**kwargs):
|
||||
"""
|
||||
Initialize Boxcar Object
|
||||
"""
|
||||
|
@ -101,67 +167,63 @@ class NotifyBoxcar(NotifyBase):
|
|||
self.access = access.strip()
|
||||
|
||||
except AttributeError:
|
||||
self.logger.warning(
|
||||
'The specified access key specified is invalid.',
|
||||
)
|
||||
raise TypeError(
|
||||
'The specified access key specified is invalid.',
|
||||
)
|
||||
msg = 'The specified access key is invalid.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
try:
|
||||
# Secret Key (associated with project)
|
||||
self.secret = secret.strip()
|
||||
|
||||
except AttributeError:
|
||||
self.logger.warning(
|
||||
'The specified secret key specified is invalid.',
|
||||
)
|
||||
raise TypeError(
|
||||
'The specified secret key specified is invalid.',
|
||||
)
|
||||
msg = 'The specified secret key is invalid.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not VALIDATE_ACCESS.match(self.access):
|
||||
self.logger.warning(
|
||||
'The access key specified (%s) is invalid.' % self.access,
|
||||
)
|
||||
raise TypeError(
|
||||
'The access key specified (%s) is invalid.' % self.access,
|
||||
)
|
||||
msg = 'The access key specified ({}) is invalid.'\
|
||||
.format(self.access)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not VALIDATE_SECRET.match(self.secret):
|
||||
self.logger.warning(
|
||||
'The secret key specified (%s) is invalid.' % self.secret,
|
||||
)
|
||||
raise TypeError(
|
||||
'The secret key specified (%s) is invalid.' % self.secret,
|
||||
)
|
||||
msg = 'The secret key specified ({}) is invalid.'\
|
||||
.format(self.secret)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not recipients:
|
||||
if not targets:
|
||||
self.tags.append(DEFAULT_TAG)
|
||||
recipients = []
|
||||
targets = []
|
||||
|
||||
elif compat_is_basestring(recipients):
|
||||
recipients = [x for x in filter(bool, TAGS_LIST_DELIM.split(
|
||||
recipients,
|
||||
elif isinstance(targets, six.string_types):
|
||||
targets = [x for x in filter(bool, TAGS_LIST_DELIM.split(
|
||||
targets,
|
||||
))]
|
||||
|
||||
# Validate recipients and drop bad ones:
|
||||
for recipient in recipients:
|
||||
if IS_TAG.match(recipient):
|
||||
# Validate targets and drop bad ones:
|
||||
for target in targets:
|
||||
if IS_TAG.match(target):
|
||||
# store valid tag/alias
|
||||
self.tags.append(IS_TAG.match(recipient).group('name'))
|
||||
self.tags.append(IS_TAG.match(target).group('name'))
|
||||
|
||||
elif IS_DEVICETOKEN.match(recipient):
|
||||
elif IS_DEVICETOKEN.match(target):
|
||||
# store valid device
|
||||
self.device_tokens.append(recipient)
|
||||
self.device_tokens.append(target)
|
||||
|
||||
else:
|
||||
self.logger.warning(
|
||||
'Dropped invalid tag/alias/device_token '
|
||||
'(%s) specified.' % recipient,
|
||||
'({}) specified.'.format(target),
|
||||
)
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
# Track whether or not we want to send an image with our notification
|
||||
# or not.
|
||||
self.include_image = include_image
|
||||
|
||||
return
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Boxcar Notification
|
||||
"""
|
||||
|
@ -193,7 +255,9 @@ class NotifyBoxcar(NotifyBase):
|
|||
payload['device_tokens'] = self.device_tokens
|
||||
|
||||
# Source picture should be <= 450 DP wide, ~2:1 aspect.
|
||||
image_url = self.image_url(notify_type)
|
||||
image_url = None if not self.include_image \
|
||||
else self.image_url(notify_type)
|
||||
|
||||
if image_url:
|
||||
# Set our image
|
||||
payload['@img'] = image_url
|
||||
|
@ -211,7 +275,7 @@ class NotifyBoxcar(NotifyBase):
|
|||
sha1,
|
||||
)
|
||||
|
||||
params = self.urlencode({
|
||||
params = NotifyBoxcar.urlencode({
|
||||
"publishkey": self.access,
|
||||
"signature": h.hexdigest(),
|
||||
})
|
||||
|
@ -222,6 +286,9 @@ class NotifyBoxcar(NotifyBase):
|
|||
))
|
||||
self.logger.debug('Boxcar Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
notify_url,
|
||||
|
@ -232,20 +299,18 @@ class NotifyBoxcar(NotifyBase):
|
|||
|
||||
# Boxcar returns 201 (Created) when successful
|
||||
if r.status_code != requests.codes.created:
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Boxcar notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyBoxcar.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Boxcar notification '
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Boxcar notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -265,6 +330,29 @@ class NotifyBoxcar(NotifyBase):
|
|||
|
||||
return True
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'image': 'yes' if self.include_image else 'no',
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
return '{schema}://{access}/{secret}/{targets}/?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
access=NotifyBoxcar.quote(self.access, safe=''),
|
||||
secret=NotifyBoxcar.quote(self.secret, safe=''),
|
||||
targets='/'.join([
|
||||
NotifyBoxcar.quote(x, safe='') for x in chain(
|
||||
self.tags, self.device_tokens) if x != DEFAULT_TAG]),
|
||||
args=NotifyBoxcar.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
|
@ -278,23 +366,30 @@ class NotifyBoxcar(NotifyBase):
|
|||
return None
|
||||
|
||||
# The first token is stored in the hostname
|
||||
access = results['host']
|
||||
results['access'] = NotifyBoxcar.unquote(results['host'])
|
||||
|
||||
# Now fetch the remaining tokens
|
||||
secret = NotifyBase.split_path(results['fullpath'])[0]
|
||||
# Get our entries; split_path() looks after unquoting content for us
|
||||
# by default
|
||||
entries = NotifyBoxcar.split_path(results['fullpath'])
|
||||
|
||||
# Our recipients
|
||||
recipients = ','.join(
|
||||
NotifyBase.split_path(results['fullpath'])[1:])
|
||||
try:
|
||||
# Now fetch the remaining tokens
|
||||
results['secret'] = entries.pop(0)
|
||||
|
||||
if not (access and secret):
|
||||
# If we did not recive an access and/or secret code
|
||||
# then we're done
|
||||
return None
|
||||
except IndexError:
|
||||
# secret wasn't specified
|
||||
results['secret'] = None
|
||||
|
||||
# Store our required content
|
||||
results['recipients'] = recipients if recipients else None
|
||||
results['access'] = access
|
||||
results['secret'] = secret
|
||||
# Our recipients make up the remaining entries of our array
|
||||
results['targets'] = entries
|
||||
|
||||
# The 'to' makes it easier to use yaml configuration
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['targets'] += \
|
||||
NotifyBoxcar.parse_list(results['qsd'].get('to'))
|
||||
|
||||
# Include images with our message
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get('image', True))
|
||||
|
||||
return results
|
||||
|
|
436
libs/apprise/plugins/NotifyDBus.py
Normal file
436
libs/apprise/plugins/NotifyDBus.py
Normal file
|
@ -0,0 +1,436 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyType
|
||||
from ..utils import GET_SCHEMA_RE
|
||||
from ..utils import parse_bool
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# Default our global support flag
|
||||
NOTIFY_DBUS_SUPPORT_ENABLED = False
|
||||
|
||||
# Image support is dependant on the GdkPixbuf library being available
|
||||
NOTIFY_DBUS_IMAGE_SUPPORT = False
|
||||
|
||||
# The following are required to hook into the notifications:
|
||||
NOTIFY_DBUS_INTERFACE = 'org.freedesktop.Notifications'
|
||||
NOTIFY_DBUS_SETTING_LOCATION = '/org/freedesktop/Notifications'
|
||||
|
||||
# Initialize our mainloops
|
||||
LOOP_GLIB = None
|
||||
LOOP_QT = None
|
||||
|
||||
|
||||
try:
|
||||
# dbus essentials
|
||||
from dbus import SessionBus
|
||||
from dbus import Interface
|
||||
from dbus import Byte
|
||||
from dbus import ByteArray
|
||||
|
||||
#
|
||||
# now we try to determine which mainloop(s) we can access
|
||||
#
|
||||
|
||||
# glib
|
||||
try:
|
||||
from dbus.mainloop.glib import DBusGMainLoop
|
||||
LOOP_GLIB = DBusGMainLoop()
|
||||
|
||||
except ImportError:
|
||||
# No problem
|
||||
pass
|
||||
|
||||
# qt
|
||||
try:
|
||||
from dbus.mainloop.qt import DBusQtMainLoop
|
||||
LOOP_QT = DBusQtMainLoop(set_as_default=True)
|
||||
|
||||
except ImportError:
|
||||
# No problem
|
||||
pass
|
||||
|
||||
# We're good as long as at least one
|
||||
NOTIFY_DBUS_SUPPORT_ENABLED = (
|
||||
LOOP_GLIB is not None or LOOP_QT is not None)
|
||||
|
||||
try:
|
||||
# The following is required for Image/Icon loading only
|
||||
import gi
|
||||
gi.require_version('GdkPixbuf', '2.0')
|
||||
from gi.repository import GdkPixbuf
|
||||
NOTIFY_DBUS_IMAGE_SUPPORT = True
|
||||
|
||||
except (ImportError, ValueError):
|
||||
# No problem; this will get caught in outer try/catch
|
||||
|
||||
# A ValueError will get thrown upon calling gi.require_version() if
|
||||
# GDK/GTK isn't installed on the system but gi is.
|
||||
pass
|
||||
|
||||
except ImportError:
|
||||
# No problem; we just simply can't support this plugin; we could
|
||||
# be in microsoft windows, or we just don't have the python-gobject
|
||||
# library available to us (or maybe one we don't support)?
|
||||
pass
|
||||
|
||||
# Define our supported protocols and the loop to assign them.
|
||||
# The key to value pairs are the actual supported schema's matched
|
||||
# up with the Main Loop they should reference when accessed.
|
||||
MAINLOOP_MAP = {
|
||||
'qt': LOOP_QT,
|
||||
'kde': LOOP_QT,
|
||||
'glib': LOOP_GLIB,
|
||||
'dbus': LOOP_QT if LOOP_QT else LOOP_GLIB,
|
||||
}
|
||||
|
||||
|
||||
# Urgencies
|
||||
class DBusUrgency(object):
|
||||
LOW = 0
|
||||
NORMAL = 1
|
||||
HIGH = 2
|
||||
|
||||
|
||||
# Define our urgency levels
|
||||
DBUS_URGENCIES = (
|
||||
DBusUrgency.LOW,
|
||||
DBusUrgency.NORMAL,
|
||||
DBusUrgency.HIGH,
|
||||
)
|
||||
|
||||
|
||||
class NotifyDBus(NotifyBase):
|
||||
"""
|
||||
A wrapper for local DBus/Qt Notifications
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'DBus Notification'
|
||||
|
||||
# The default protocols
|
||||
# Python 3 keys() does not return a list object, it's it's own dict_keys()
|
||||
# object if we were to reference, we wouldn't be backwards compatible with
|
||||
# Python v2. So converting the result set back into a list makes us
|
||||
# compatible
|
||||
|
||||
protocol = list(MAINLOOP_MAP.keys())
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_dbus'
|
||||
|
||||
# No throttling required for DBus queries
|
||||
request_rate_per_sec = 0
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_128
|
||||
|
||||
# The number of seconds to keep the message present for
|
||||
message_timeout_ms = 13000
|
||||
|
||||
# Limit results to just the first 10 line otherwise there is just to much
|
||||
# content to display
|
||||
body_max_line_count = 10
|
||||
|
||||
# A title can not be used for SMS Messages. Setting this to zero will
|
||||
# cause any title (if defined) to get placed into the message body.
|
||||
title_maxlen = 0
|
||||
|
||||
# This entry is a bit hacky, but it allows us to unit-test this library
|
||||
# in an environment that simply doesn't have the gnome packages
|
||||
# available to us. It also allows us to handle situations where the
|
||||
# packages actually are present but we need to test that they aren't.
|
||||
# If anyone is seeing this had knows a better way of testing this
|
||||
# outside of what is defined in test/test_glib_plugin.py, please
|
||||
# let me know! :)
|
||||
_enabled = NOTIFY_DBUS_SUPPORT_ENABLED
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://_/',
|
||||
)
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'urgency': {
|
||||
'name': _('Urgency'),
|
||||
'type': 'choice:int',
|
||||
'values': DBUS_URGENCIES,
|
||||
'default': DBusUrgency.NORMAL,
|
||||
},
|
||||
'x': {
|
||||
'name': _('X-Axis'),
|
||||
'type': 'int',
|
||||
'min': 0,
|
||||
'map_to': 'x_axis',
|
||||
},
|
||||
'y': {
|
||||
'name': _('Y-Axis'),
|
||||
'type': 'int',
|
||||
'min': 0,
|
||||
'map_to': 'y_axis',
|
||||
},
|
||||
'image': {
|
||||
'name': _('Include Image'),
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'map_to': 'include_image',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, urgency=None, x_axis=None, y_axis=None,
|
||||
include_image=True, **kwargs):
|
||||
"""
|
||||
Initialize DBus Object
|
||||
"""
|
||||
|
||||
super(NotifyDBus, self).__init__(**kwargs)
|
||||
|
||||
# Track our notifications
|
||||
self.registry = {}
|
||||
|
||||
# Store our schema; default to dbus
|
||||
self.schema = kwargs.get('schema', 'dbus')
|
||||
|
||||
if self.schema not in MAINLOOP_MAP:
|
||||
msg = 'The schema specified ({}) is not supported.' \
|
||||
.format(self.schema)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# The urgency of the message
|
||||
if urgency not in DBUS_URGENCIES:
|
||||
self.urgency = DBusUrgency.NORMAL
|
||||
|
||||
else:
|
||||
self.urgency = urgency
|
||||
|
||||
# Our x/y axis settings
|
||||
self.x_axis = x_axis if isinstance(x_axis, int) else None
|
||||
self.y_axis = y_axis if isinstance(y_axis, int) else None
|
||||
|
||||
# Track whether or not we want to send an image with our notification
|
||||
# or not.
|
||||
self.include_image = include_image
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform DBus Notification
|
||||
"""
|
||||
|
||||
if not self._enabled or MAINLOOP_MAP[self.schema] is None:
|
||||
self.logger.warning(
|
||||
"{} notifications could not be loaded.".format(self.schema))
|
||||
return False
|
||||
|
||||
# Acquire our session
|
||||
session = SessionBus(mainloop=MAINLOOP_MAP[self.schema])
|
||||
|
||||
# acquire our dbus object
|
||||
dbus_obj = session.get_object(
|
||||
NOTIFY_DBUS_INTERFACE,
|
||||
NOTIFY_DBUS_SETTING_LOCATION,
|
||||
)
|
||||
|
||||
# Acquire our dbus interface
|
||||
dbus_iface = Interface(
|
||||
dbus_obj,
|
||||
dbus_interface=NOTIFY_DBUS_INTERFACE,
|
||||
)
|
||||
|
||||
# image path
|
||||
icon_path = None if not self.include_image \
|
||||
else self.image_path(notify_type, extension='.ico')
|
||||
|
||||
# Our meta payload
|
||||
meta_payload = {
|
||||
"urgency": Byte(self.urgency)
|
||||
}
|
||||
|
||||
if not (self.x_axis is None and self.y_axis is None):
|
||||
# Set x/y access if these were set
|
||||
meta_payload['x'] = self.x_axis
|
||||
meta_payload['y'] = self.y_axis
|
||||
|
||||
if NOTIFY_DBUS_IMAGE_SUPPORT and icon_path:
|
||||
try:
|
||||
# Use Pixbuf to create the proper image type
|
||||
image = GdkPixbuf.Pixbuf.new_from_file(icon_path)
|
||||
|
||||
# Associate our image to our notification
|
||||
meta_payload['icon_data'] = (
|
||||
image.get_width(),
|
||||
image.get_height(),
|
||||
image.get_rowstride(),
|
||||
image.get_has_alpha(),
|
||||
image.get_bits_per_sample(),
|
||||
image.get_n_channels(),
|
||||
ByteArray(image.get_pixels())
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(
|
||||
"Could not load Gnome notification icon ({}): {}"
|
||||
.format(icon_path, e))
|
||||
|
||||
try:
|
||||
# Always call throttle() before any remote execution is made
|
||||
self.throttle()
|
||||
|
||||
dbus_iface.Notify(
|
||||
# Application Identifier
|
||||
self.app_id,
|
||||
# Message ID (0 = New Message)
|
||||
0,
|
||||
# Icon (str) - not used
|
||||
'',
|
||||
# Title
|
||||
str(title),
|
||||
# Body
|
||||
str(body),
|
||||
# Actions
|
||||
list(),
|
||||
# Meta
|
||||
meta_payload,
|
||||
# Message Timeout
|
||||
self.message_timeout_ms,
|
||||
)
|
||||
|
||||
self.logger.info('Sent DBus notification.')
|
||||
|
||||
except Exception:
|
||||
self.logger.warning('Failed to send DBus notification.')
|
||||
self.logger.exception('DBus Exception')
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
_map = {
|
||||
DBusUrgency.LOW: 'low',
|
||||
DBusUrgency.NORMAL: 'normal',
|
||||
DBusUrgency.HIGH: 'high',
|
||||
}
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'image': 'yes' if self.include_image else 'no',
|
||||
'urgency': 'normal' if self.urgency not in _map
|
||||
else _map[self.urgency],
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
# x in (x,y) screen coordinates
|
||||
if self.x_axis:
|
||||
args['x'] = str(self.x_axis)
|
||||
|
||||
# y in (x,y) screen coordinates
|
||||
if self.y_axis:
|
||||
args['y'] = str(self.y_axis)
|
||||
|
||||
return '{schema}://_/?{args}'.format(
|
||||
schema=self.protocol,
|
||||
args=NotifyDBus.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
There are no parameters nessisary for this protocol; simply having
|
||||
gnome:// is all you need. This function just makes sure that
|
||||
is in place.
|
||||
|
||||
"""
|
||||
schema = GET_SCHEMA_RE.match(url)
|
||||
if schema is None:
|
||||
# Content is simply not parseable
|
||||
return None
|
||||
|
||||
results = NotifyBase.parse_url(url)
|
||||
if not results:
|
||||
results = {
|
||||
'schema': schema.group('schema').lower(),
|
||||
'user': None,
|
||||
'password': None,
|
||||
'port': None,
|
||||
'host': '_',
|
||||
'fullpath': None,
|
||||
'path': None,
|
||||
'url': url,
|
||||
'qsd': {},
|
||||
}
|
||||
|
||||
# Include images with our message
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get('image', True))
|
||||
|
||||
# DBus supports urgency, but we we also support the keyword priority
|
||||
# so that it is consistent with some of the other plugins
|
||||
urgency = results['qsd'].get('urgency', results['qsd'].get('priority'))
|
||||
if urgency and len(urgency):
|
||||
_map = {
|
||||
'0': DBusUrgency.LOW,
|
||||
'l': DBusUrgency.LOW,
|
||||
'n': DBusUrgency.NORMAL,
|
||||
'1': DBusUrgency.NORMAL,
|
||||
'h': DBusUrgency.HIGH,
|
||||
'2': DBusUrgency.HIGH,
|
||||
}
|
||||
|
||||
try:
|
||||
# Attempt to index/retrieve our urgency
|
||||
results['urgency'] = _map[urgency[0].lower()]
|
||||
|
||||
except KeyError:
|
||||
# No priority was set
|
||||
pass
|
||||
|
||||
# handle x,y coordinates
|
||||
try:
|
||||
results['x_axis'] = int(results['qsd'].get('x'))
|
||||
|
||||
except (TypeError, ValueError):
|
||||
# No x was set
|
||||
pass
|
||||
|
||||
try:
|
||||
results['y_axis'] = int(results['qsd'].get('y'))
|
||||
|
||||
except (TypeError, ValueError):
|
||||
# No y was set
|
||||
pass
|
||||
|
||||
return results
|
|
@ -1,20 +1,27 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Discord Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# For this to work correctly you need to create a webhook. To do this just
|
||||
# click on the little gear icon next to the channel you're part of. From
|
||||
|
@ -38,10 +45,11 @@ import requests
|
|||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyFormat
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_bool
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
||||
class NotifyDiscord(NotifyBase):
|
||||
|
@ -59,7 +67,7 @@ class NotifyDiscord(NotifyBase):
|
|||
secure_protocol = 'discord'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_discored'
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_discord'
|
||||
|
||||
# Discord Webhook
|
||||
notify_url = 'https://discordapp.com/api/webhooks'
|
||||
|
@ -70,11 +78,66 @@ class NotifyDiscord(NotifyBase):
|
|||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 2000
|
||||
|
||||
# Default Notify Format
|
||||
notify_format = NotifyFormat.MARKDOWN
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{webhook_id}/{webhook_token}',
|
||||
'{schema}://{botname}@{webhook_id}/{webhook_token}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'botname': {
|
||||
'name': _('Bot Name'),
|
||||
'type': 'string',
|
||||
'map_to': 'user',
|
||||
},
|
||||
'webhook_id': {
|
||||
'name': _('Webhook ID'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'webhook_token': {
|
||||
'name': _('Webhook Token'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'tts': {
|
||||
'name': _('Text To Speech'),
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
},
|
||||
'avatar': {
|
||||
'name': _('Avatar Image'),
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
},
|
||||
'footer': {
|
||||
'name': _('Display Footer'),
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
},
|
||||
'footer_logo': {
|
||||
'name': _('Footer Logo'),
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
},
|
||||
'image': {
|
||||
'name': _('Include Image'),
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'map_to': 'include_image',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, webhook_id, webhook_token, tts=False, avatar=True,
|
||||
footer=False, thumbnail=True, **kwargs):
|
||||
footer=False, footer_logo=True, include_image=False,
|
||||
**kwargs):
|
||||
"""
|
||||
Initialize Discord Object
|
||||
|
||||
|
@ -82,14 +145,14 @@ class NotifyDiscord(NotifyBase):
|
|||
super(NotifyDiscord, self).__init__(**kwargs)
|
||||
|
||||
if not webhook_id:
|
||||
raise TypeError(
|
||||
'An invalid Client ID was specified.'
|
||||
)
|
||||
msg = 'An invalid Client ID was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not webhook_token:
|
||||
raise TypeError(
|
||||
'An invalid Webhook Token was specified.'
|
||||
)
|
||||
msg = 'An invalid Webhook Token was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Store our data
|
||||
self.webhook_id = webhook_id
|
||||
|
@ -101,15 +164,18 @@ class NotifyDiscord(NotifyBase):
|
|||
# Over-ride Avatar Icon
|
||||
self.avatar = avatar
|
||||
|
||||
# Place a footer icon
|
||||
# Place a footer
|
||||
self.footer = footer
|
||||
|
||||
# include a footer_logo in footer
|
||||
self.footer_logo = footer_logo
|
||||
|
||||
# Place a thumbnail image inline with the message body
|
||||
self.thumbnail = thumbnail
|
||||
self.include_image = include_image
|
||||
|
||||
return
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Discord Notification
|
||||
"""
|
||||
|
@ -127,11 +193,14 @@ class NotifyDiscord(NotifyBase):
|
|||
# If Text-To-Speech is set to True, then we do not want to wait
|
||||
# for the whole message before continuing. Otherwise, we wait
|
||||
'wait': self.tts is False,
|
||||
}
|
||||
|
||||
# Our color associated with our notification
|
||||
'color': self.color(notify_type, int),
|
||||
# Acquire image_url
|
||||
image_url = self.image_url(notify_type)
|
||||
|
||||
'embeds': [{
|
||||
if self.notify_format == NotifyFormat.MARKDOWN:
|
||||
# Use embeds for payload
|
||||
payload['embeds'] = [{
|
||||
'provider': {
|
||||
'name': self.app_id,
|
||||
'url': self.app_url,
|
||||
|
@ -139,10 +208,12 @@ class NotifyDiscord(NotifyBase):
|
|||
'title': title,
|
||||
'type': 'rich',
|
||||
'description': body,
|
||||
}]
|
||||
}
|
||||
|
||||
if self.notify_format == NotifyFormat.MARKDOWN:
|
||||
# Our color associated with our notification
|
||||
'color': self.color(notify_type, int),
|
||||
}]
|
||||
|
||||
# Break titles out so that we can sort them in embeds
|
||||
fields = self.extract_markdown_sections(body)
|
||||
|
||||
if len(fields) > 0:
|
||||
|
@ -153,25 +224,32 @@ class NotifyDiscord(NotifyBase):
|
|||
fields[0].get('name') + fields[0].get('value')
|
||||
payload['embeds'][0]['fields'] = fields[1:]
|
||||
|
||||
if self.footer:
|
||||
logo_url = self.image_url(notify_type, logo=True)
|
||||
payload['embeds'][0]['footer'] = {
|
||||
'text': self.app_desc,
|
||||
}
|
||||
if logo_url:
|
||||
payload['embeds'][0]['footer']['icon_url'] = logo_url
|
||||
if self.footer:
|
||||
# Acquire logo URL
|
||||
logo_url = self.image_url(notify_type, logo=True)
|
||||
|
||||
image_url = self.image_url(notify_type)
|
||||
if image_url:
|
||||
if self.thumbnail:
|
||||
# Set Footer text to our app description
|
||||
payload['embeds'][0]['footer'] = {
|
||||
'text': self.app_desc,
|
||||
}
|
||||
|
||||
if self.footer_logo and logo_url:
|
||||
payload['embeds'][0]['footer']['icon_url'] = logo_url
|
||||
|
||||
if self.include_image and image_url:
|
||||
payload['embeds'][0]['thumbnail'] = {
|
||||
'url': image_url,
|
||||
'height': 256,
|
||||
'width': 256,
|
||||
}
|
||||
|
||||
if self.avatar:
|
||||
payload['avatar_url'] = image_url
|
||||
else:
|
||||
# not markdown
|
||||
payload['content'] = \
|
||||
body if not title else "{}\r\n{}".format(title, body)
|
||||
|
||||
if self.avatar and image_url:
|
||||
payload['avatar_url'] = image_url
|
||||
|
||||
if self.user:
|
||||
# Optionally override the default username of the webhook
|
||||
|
@ -188,6 +266,10 @@ class NotifyDiscord(NotifyBase):
|
|||
notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Discord Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
notify_url,
|
||||
|
@ -197,20 +279,19 @@ class NotifyDiscord(NotifyBase):
|
|||
)
|
||||
if r.status_code not in (
|
||||
requests.codes.ok, requests.codes.no_content):
|
||||
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Discord notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Discord notification '
|
||||
'(error=%s).' % r.status_code)
|
||||
self.logger.warning(
|
||||
'Failed to send Discord notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -228,6 +309,30 @@ class NotifyDiscord(NotifyBase):
|
|||
|
||||
return True
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'tts': 'yes' if self.tts else 'no',
|
||||
'avatar': 'yes' if self.avatar else 'no',
|
||||
'footer': 'yes' if self.footer else 'no',
|
||||
'footer_logo': 'yes' if self.footer_logo else 'no',
|
||||
'image': 'yes' if self.include_image else 'no',
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
return '{schema}://{webhook_id}/{webhook_token}/?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
webhook_id=NotifyDiscord.quote(self.webhook_id, safe=''),
|
||||
webhook_token=NotifyDiscord.quote(self.webhook_token, safe=''),
|
||||
args=NotifyDiscord.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
|
@ -245,14 +350,14 @@ class NotifyDiscord(NotifyBase):
|
|||
return results
|
||||
|
||||
# Store our webhook ID
|
||||
webhook_id = results['host']
|
||||
webhook_id = NotifyDiscord.unquote(results['host'])
|
||||
|
||||
# Now fetch our tokens
|
||||
try:
|
||||
webhook_token = [x for x in filter(bool, NotifyBase.split_path(
|
||||
results['fullpath']))][0]
|
||||
webhook_token = \
|
||||
NotifyDiscord.split_path(results['fullpath'])[0]
|
||||
|
||||
except (ValueError, AttributeError, IndexError):
|
||||
except IndexError:
|
||||
# Force some bad values that will get caught
|
||||
# in parsing later
|
||||
webhook_token = None
|
||||
|
@ -266,15 +371,53 @@ class NotifyDiscord(NotifyBase):
|
|||
# Use Footer
|
||||
results['footer'] = parse_bool(results['qsd'].get('footer', False))
|
||||
|
||||
# Use Footer Logo
|
||||
results['footer_logo'] = \
|
||||
parse_bool(results['qsd'].get('footer_logo', True))
|
||||
|
||||
# Update Avatar Icon
|
||||
results['avatar'] = parse_bool(results['qsd'].get('avatar', True))
|
||||
|
||||
# Use Thumbnail
|
||||
results['thumbnail'] = \
|
||||
parse_bool(results['qsd'].get('thumbnail', True))
|
||||
if 'thumbnail' in results['qsd']:
|
||||
# Deprication Notice issued for v0.7.5
|
||||
NotifyDiscord.logger.deprecate(
|
||||
'The Discord URL contains the parameter '
|
||||
'"thumbnail=" which will be deprecated in an upcoming '
|
||||
'release. Please use "image=" instead.'
|
||||
)
|
||||
|
||||
# use image= for consistency with the other plugins but we also
|
||||
# support thumbnail= for backwards compatibility.
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get(
|
||||
'image', results['qsd'].get('thumbnail', False)))
|
||||
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def parse_native_url(url):
|
||||
"""
|
||||
Support https://discordapp.com/api/webhooks/WEBHOOK_ID/WEBHOOK_TOKEN
|
||||
"""
|
||||
|
||||
result = re.match(
|
||||
r'^https?://discordapp\.com/api/webhooks/'
|
||||
r'(?P<webhook_id>[0-9]+)/'
|
||||
r'(?P<webhook_token>[A-Z0-9_-]+)/?'
|
||||
r'(?P<args>\?[.+])?$', url, re.I)
|
||||
|
||||
if result:
|
||||
return NotifyDiscord.parse_url(
|
||||
'{schema}://{webhook_id}/{webhook_token}/{args}'.format(
|
||||
schema=NotifyDiscord.secure_protocol,
|
||||
webhook_id=result.group('webhook_id'),
|
||||
webhook_token=result.group('webhook_token'),
|
||||
args='' if not result.group('args')
|
||||
else result.group('args')))
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def extract_markdown_sections(markdown):
|
||||
"""
|
||||
|
@ -284,8 +427,8 @@ class NotifyDiscord(NotifyBase):
|
|||
|
||||
"""
|
||||
regex = re.compile(
|
||||
r'\s*#+\s*(?P<name>[^#\n]+)([ \r\t\v#]*)'
|
||||
r'(?P<value>(.+?)(\n(?!\s#))|\s*$)', flags=re.S)
|
||||
r'^\s*#+\s*(?P<name>[^#\n]+)([ \r\t\v#])?'
|
||||
r'(?P<value>([^ \r\t\v#].+?)(\n(?!\s#))|\s*$)', flags=re.S | re.M)
|
||||
|
||||
common = regex.finditer(markdown)
|
||||
fields = list()
|
||||
|
|
|
@ -1,31 +1,41 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Email Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
|
||||
from datetime import datetime
|
||||
import six
|
||||
import smtplib
|
||||
from socket import error as SocketError
|
||||
|
||||
from email.mime.text import MIMEText
|
||||
from socket import error as SocketError
|
||||
from datetime import datetime
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyFormat
|
||||
from ..common import NotifyType
|
||||
from ..utils import is_email
|
||||
from ..utils import parse_list
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
||||
class WebBaseLogin(object):
|
||||
|
@ -40,18 +50,33 @@ class WebBaseLogin(object):
|
|||
USERID = 'UserID'
|
||||
|
||||
|
||||
# Secure Email Modes
|
||||
class SecureMailMode(object):
|
||||
SSL = "ssl"
|
||||
STARTTLS = "starttls"
|
||||
|
||||
|
||||
# Define all of the secure modes (used during validation)
|
||||
SECURE_MODES = (
|
||||
SecureMailMode.SSL,
|
||||
SecureMailMode.STARTTLS,
|
||||
)
|
||||
|
||||
# To attempt to make this script stupid proof, if we detect an email address
|
||||
# that is part of the this table, we can pre-use a lot more defaults if they
|
||||
# aren't otherwise specified on the users input.
|
||||
WEBBASE_LOOKUP_TABLE = (
|
||||
EMAIL_TEMPLATES = (
|
||||
# Google GMail
|
||||
(
|
||||
'Google Mail',
|
||||
re.compile(r'^((?P<label>[^+]+)\+)?(?P<id>[^@]+)@(?P<domain>gmail\.com)$', re.I),
|
||||
re.compile(
|
||||
r'^((?P<label>[^+]+)\+)?(?P<id>[^@]+)@'
|
||||
r'(?P<domain>gmail\.com)$', re.I),
|
||||
{
|
||||
'port': 587,
|
||||
'smtp_host': 'smtp.gmail.com',
|
||||
'secure': True,
|
||||
'secure_mode': SecureMailMode.STARTTLS,
|
||||
'login_type': (WebBaseLogin.EMAIL, )
|
||||
},
|
||||
),
|
||||
|
@ -59,11 +84,14 @@ WEBBASE_LOOKUP_TABLE = (
|
|||
# Pronto Mail
|
||||
(
|
||||
'Pronto Mail',
|
||||
re.compile(r'^((?P<label>[^+]+)\+)?(?P<id>[^@]+)@(?P<domain>prontomail\.com)$', re.I),
|
||||
re.compile(
|
||||
r'^((?P<label>[^+]+)\+)?(?P<id>[^@]+)@'
|
||||
r'(?P<domain>prontomail\.com)$', re.I),
|
||||
{
|
||||
'port': 465,
|
||||
'smtp_host': 'secure.emailsrvr.com',
|
||||
'secure': True,
|
||||
'secure_mode': SecureMailMode.STARTTLS,
|
||||
'login_type': (WebBaseLogin.EMAIL, )
|
||||
},
|
||||
),
|
||||
|
@ -71,11 +99,14 @@ WEBBASE_LOOKUP_TABLE = (
|
|||
# Microsoft Hotmail
|
||||
(
|
||||
'Microsoft Hotmail',
|
||||
re.compile(r'^((?P<label>[^+]+)\+)?(?P<id>[^@]+)@(?P<domain>(hotmail|live)\.com)$', re.I),
|
||||
re.compile(
|
||||
r'^((?P<label>[^+]+)\+)?(?P<id>[^@]+)@'
|
||||
r'(?P<domain>(hotmail|live)\.com)$', re.I),
|
||||
{
|
||||
'port': 587,
|
||||
'smtp_host': 'smtp.live.com',
|
||||
'secure': True,
|
||||
'secure_mode': SecureMailMode.STARTTLS,
|
||||
'login_type': (WebBaseLogin.EMAIL, )
|
||||
},
|
||||
),
|
||||
|
@ -83,11 +114,83 @@ WEBBASE_LOOKUP_TABLE = (
|
|||
# Yahoo Mail
|
||||
(
|
||||
'Yahoo Mail',
|
||||
re.compile(r'^((?P<label>[^+]+)\+)?(?P<id>[^@]+)@(?P<domain>yahoo\.(ca|com))$', re.I),
|
||||
re.compile(
|
||||
r'^((?P<label>[^+]+)\+)?(?P<id>[^@]+)@'
|
||||
r'(?P<domain>yahoo\.(ca|com))$', re.I),
|
||||
{
|
||||
'port': 465,
|
||||
'smtp_host': 'smtp.mail.yahoo.com',
|
||||
'secure': True,
|
||||
'secure_mode': SecureMailMode.STARTTLS,
|
||||
'login_type': (WebBaseLogin.EMAIL, )
|
||||
},
|
||||
),
|
||||
|
||||
# Fast Mail (Series 1)
|
||||
(
|
||||
'Fast Mail',
|
||||
re.compile(
|
||||
r'^((?P<label>[^+]+)\+)?(?P<id>[^@]+)@'
|
||||
r'(?P<domain>fastmail\.(com|cn|co\.uk|com\.au|de|es|fm|fr|im|'
|
||||
r'in|jp|mx|net|nl|org|se|to|tw|uk|us))$', re.I),
|
||||
{
|
||||
'port': 465,
|
||||
'smtp_host': 'smtp.fastmail.com',
|
||||
'secure': True,
|
||||
'secure_mode': SecureMailMode.SSL,
|
||||
'login_type': (WebBaseLogin.EMAIL, )
|
||||
},
|
||||
),
|
||||
|
||||
# Fast Mail (Series 2)
|
||||
(
|
||||
'Fast Mail Extended Addresses',
|
||||
re.compile(
|
||||
r'^((?P<label>[^+]+)\+)?(?P<id>[^@]+)@'
|
||||
r'(?P<domain>123mail\.org|airpost\.net|eml\.cc|fmail\.co\.uk|'
|
||||
r'fmgirl\.com|fmguy\.com|mailbolt\.com|mailcan\.com|'
|
||||
r'mailhaven\.com|mailmight\.com|ml1\.net|mm\.st|myfastmail\.com|'
|
||||
r'proinbox\.com|promessage\.com|rushpost\.com|sent\.(as|at|com)|'
|
||||
r'speedymail\.org|warpmail\.net|xsmail\.com|150mail\.com|'
|
||||
r'150ml\.com|16mail\.com|2-mail\.com|4email\.net|50mail\.com|'
|
||||
r'allmail\.net|bestmail\.us|cluemail\.com|elitemail\.org|'
|
||||
r'emailcorner\.net|emailengine\.(net|org)|emailgroups\.net|'
|
||||
r'emailplus\.org|emailuser\.net|f-m\.fm|fast-email\.com|'
|
||||
r'fast-mail\.org|fastem\.com|fastemail\.us|fastemailer\.com|'
|
||||
r'fastest\.cc|fastimap\.com|fastmailbox\.net|fastmessaging\.com|'
|
||||
r'fea\.st|fmailbox\.com|ftml\.net|h-mail\.us|hailmail\.net|'
|
||||
r'imap-mail\.com|imap\.cc|imapmail\.org|inoutbox\.com|'
|
||||
r'internet-e-mail\.com|internet-mail\.org|internetemails\.net|'
|
||||
r'internetmailing\.net|jetemail\.net|justemail\.net|'
|
||||
r'letterboxes\.org|mail-central\.com|mail-page\.com|'
|
||||
r'mailandftp\.com|mailas\.com|mailc\.net|mailforce\.net|'
|
||||
r'mailftp\.com|mailingaddress\.org|mailite\.com|mailnew\.com|'
|
||||
r'mailsent\.net|mailservice\.ms|mailup\.net|mailworks\.org|'
|
||||
r'mymacmail\.com|nospammail\.net|ownmail\.net|petml\.com|'
|
||||
r'postinbox\.com|postpro\.net|realemail\.net|reallyfast\.biz|'
|
||||
r'reallyfast\.info|speedpost\.net|ssl-mail\.com|swift-mail\.com|'
|
||||
r'the-fastest\.net|the-quickest\.com|theinternetemail\.com|'
|
||||
r'veryfast\.biz|veryspeedy\.net|yepmail\.net)$', re.I),
|
||||
{
|
||||
'port': 465,
|
||||
'smtp_host': 'smtp.fastmail.com',
|
||||
'secure': True,
|
||||
'secure_mode': SecureMailMode.SSL,
|
||||
'login_type': (WebBaseLogin.EMAIL, )
|
||||
},
|
||||
),
|
||||
|
||||
# Zoho Mail
|
||||
(
|
||||
'Zoho Mail',
|
||||
re.compile(
|
||||
r'^((?P<label>[^+]+)\+)?(?P<id>[^@]+)@'
|
||||
r'(?P<domain>zoho\.com)$', re.I),
|
||||
{
|
||||
'port': 465,
|
||||
'smtp_host': 'smtp.zoho.com',
|
||||
'secure': True,
|
||||
'secure_mode': SecureMailMode.SSL,
|
||||
'login_type': (WebBaseLogin.EMAIL, )
|
||||
},
|
||||
),
|
||||
|
@ -95,7 +198,9 @@ WEBBASE_LOOKUP_TABLE = (
|
|||
# Catch All
|
||||
(
|
||||
'Custom',
|
||||
re.compile(r'^((?P<label>[^+]+)\+)?(?P<id>[^@]+)@(?P<domain>.+)$', re.I),
|
||||
re.compile(
|
||||
r'^((?P<label>[^+]+)\+)?(?P<id>[^@]+)@'
|
||||
r'(?P<domain>.+)$', re.I),
|
||||
{
|
||||
# Setting smtp_host to None is a way of
|
||||
# auto-detecting it based on other parameters
|
||||
|
@ -125,18 +230,101 @@ class NotifyEmail(NotifyBase):
|
|||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_email'
|
||||
|
||||
# Default Notify Format
|
||||
notify_format = NotifyFormat.HTML
|
||||
|
||||
# Default Non-Encryption Port
|
||||
default_port = 25
|
||||
|
||||
# Default Secure Port
|
||||
default_secure_port = 587
|
||||
|
||||
# Default Secure Mode
|
||||
default_secure_mode = SecureMailMode.STARTTLS
|
||||
|
||||
# Default SMTP Timeout (in seconds)
|
||||
connect_timeout = 15
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{user}:{password}@{host}',
|
||||
'{schema}://{user}:{password}@{host}:{port}',
|
||||
'{schema}://{user}:{password}@{host}/{targets}',
|
||||
'{schema}://{user}:{password}@{host}:{port}/{targets}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'user': {
|
||||
'name': _('User Name'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'password': {
|
||||
'name': _('Password'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'host': {
|
||||
'name': _('Domain'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'port': {
|
||||
'name': _('Port'),
|
||||
'type': 'int',
|
||||
'min': 1,
|
||||
'max': 65535,
|
||||
},
|
||||
'targets': {
|
||||
'name': _('Target Emails'),
|
||||
'type': 'list:string',
|
||||
},
|
||||
})
|
||||
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'to': {
|
||||
'name': _('To Email'),
|
||||
'type': 'string',
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'from': {
|
||||
'name': _('From Email'),
|
||||
'type': 'string',
|
||||
'map_to': 'from_addr',
|
||||
},
|
||||
'name': {
|
||||
'name': _('From Name'),
|
||||
'type': 'string',
|
||||
'map_to': 'from_name',
|
||||
},
|
||||
'smtp_host': {
|
||||
'name': _('SMTP Server'),
|
||||
'type': 'string',
|
||||
},
|
||||
'mode': {
|
||||
'name': _('Secure Mode'),
|
||||
'type': 'choice:string',
|
||||
'values': SECURE_MODES,
|
||||
'default': SecureMailMode.STARTTLS,
|
||||
'map_to': 'secure_mode',
|
||||
},
|
||||
'timeout': {
|
||||
'name': _('Server Timeout'),
|
||||
'type': 'int',
|
||||
'default': 15,
|
||||
'min': 5,
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, timeout=15, smtp_host=None, from_name=None,
|
||||
from_addr=None, secure_mode=None, targets=None, **kwargs):
|
||||
"""
|
||||
Initialize Email Object
|
||||
|
||||
The smtp_host and secure_mode can be automatically detected depending
|
||||
on how the URL was built
|
||||
"""
|
||||
super(NotifyEmail, self).__init__(**kwargs)
|
||||
|
||||
|
@ -150,26 +338,49 @@ class NotifyEmail(NotifyBase):
|
|||
|
||||
# Email SMTP Server Timeout
|
||||
try:
|
||||
self.timeout = int(kwargs.get('timeout', self.connect_timeout))
|
||||
self.timeout = int(timeout)
|
||||
|
||||
except (ValueError, TypeError):
|
||||
self.timeout = self.connect_timeout
|
||||
|
||||
# Acquire targets
|
||||
self.targets = parse_list(targets)
|
||||
|
||||
# Now we want to construct the To and From email
|
||||
# addresses from the URL provided
|
||||
self.from_name = kwargs.get('name', None)
|
||||
self.from_addr = kwargs.get('from', None)
|
||||
self.to_addr = kwargs.get('to', self.from_addr)
|
||||
self.from_name = from_name
|
||||
self.from_addr = from_addr
|
||||
|
||||
if not NotifyBase.is_email(self.from_addr):
|
||||
if not self.from_addr:
|
||||
# detect our email address
|
||||
self.from_addr = '{}@{}'.format(
|
||||
re.split(r'[\s@]+', self.user)[0],
|
||||
self.host,
|
||||
)
|
||||
|
||||
if not is_email(self.from_addr):
|
||||
# Parse Source domain based on from_addr
|
||||
raise TypeError('Invalid ~From~ email format: %s' % self.from_addr)
|
||||
msg = 'Invalid ~From~ email specified: {}'.format(self.from_addr)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not NotifyBase.is_email(self.to_addr):
|
||||
raise TypeError('Invalid ~To~ email format: %s' % self.to_addr)
|
||||
# If our target email list is empty we want to add ourselves to it
|
||||
if len(self.targets) == 0:
|
||||
self.targets.append(self.from_addr)
|
||||
|
||||
# Now detect the SMTP Server
|
||||
self.smtp_host = kwargs.get('smtp_host', '')
|
||||
self.smtp_host = \
|
||||
smtp_host if isinstance(smtp_host, six.string_types) else ''
|
||||
|
||||
# Now detect secure mode
|
||||
self.secure_mode = self.default_secure_mode \
|
||||
if not isinstance(secure_mode, six.string_types) \
|
||||
else secure_mode.lower()
|
||||
if self.secure_mode not in SECURE_MODES:
|
||||
msg = 'The secure mode specified ({}) is invalid.'\
|
||||
.format(secure_mode)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Apply any defaults based on certain known configurations
|
||||
self.NotifyEmailDefaults()
|
||||
|
@ -188,47 +399,53 @@ class NotifyEmail(NotifyBase):
|
|||
# over-riding any smarts to be applied
|
||||
return
|
||||
|
||||
for i in range(len(WEBBASE_LOOKUP_TABLE)): # pragma: no branch
|
||||
for i in range(len(EMAIL_TEMPLATES)): # pragma: no branch
|
||||
self.logger.debug('Scanning %s against %s' % (
|
||||
self.to_addr, WEBBASE_LOOKUP_TABLE[i][0]
|
||||
self.from_addr, EMAIL_TEMPLATES[i][0]
|
||||
))
|
||||
match = WEBBASE_LOOKUP_TABLE[i][1].match(self.from_addr)
|
||||
match = EMAIL_TEMPLATES[i][1].match(self.from_addr)
|
||||
if match:
|
||||
self.logger.info(
|
||||
'Applying %s Defaults' %
|
||||
WEBBASE_LOOKUP_TABLE[i][0],
|
||||
EMAIL_TEMPLATES[i][0],
|
||||
)
|
||||
self.port = WEBBASE_LOOKUP_TABLE[i][2]\
|
||||
self.port = EMAIL_TEMPLATES[i][2]\
|
||||
.get('port', self.port)
|
||||
self.secure = WEBBASE_LOOKUP_TABLE[i][2]\
|
||||
self.secure = EMAIL_TEMPLATES[i][2]\
|
||||
.get('secure', self.secure)
|
||||
|
||||
self.smtp_host = WEBBASE_LOOKUP_TABLE[i][2]\
|
||||
self.secure_mode = EMAIL_TEMPLATES[i][2]\
|
||||
.get('secure_mode', self.secure_mode)
|
||||
self.smtp_host = EMAIL_TEMPLATES[i][2]\
|
||||
.get('smtp_host', self.smtp_host)
|
||||
|
||||
if self.smtp_host is None:
|
||||
# Detect Server if possible
|
||||
self.smtp_host = re.split(r'[\s@]+', self.from_addr)[-1]
|
||||
# default to our host
|
||||
self.smtp_host = self.host
|
||||
|
||||
# Adjust email login based on the defined
|
||||
# usertype
|
||||
login_type = WEBBASE_LOOKUP_TABLE[i][2]\
|
||||
# Adjust email login based on the defined usertype. If no entry
|
||||
# was specified, then we default to having them all set (which
|
||||
# basically implies that there are no restrictions and use use
|
||||
# whatever was specified)
|
||||
login_type = EMAIL_TEMPLATES[i][2]\
|
||||
.get('login_type', [])
|
||||
|
||||
if NotifyBase.is_email(self.user) and \
|
||||
WebBaseLogin.EMAIL not in login_type:
|
||||
# Email specified but login type
|
||||
# not supported; switch it to user id
|
||||
self.user = match.group('id')
|
||||
if login_type:
|
||||
# only apply additional logic to our user if a login_type
|
||||
# was specified.
|
||||
if is_email(self.user) and \
|
||||
WebBaseLogin.EMAIL not in login_type:
|
||||
# Email specified but login type
|
||||
# not supported; switch it to user id
|
||||
self.user = match.group('id')
|
||||
|
||||
elif WebBaseLogin.USERID not in login_type:
|
||||
# user specified but login type
|
||||
# not supported; switch it to email
|
||||
self.user = '%s@%s' % (self.user, self.host)
|
||||
elif WebBaseLogin.USERID not in login_type:
|
||||
# user specified but login type
|
||||
# not supported; switch it to email
|
||||
self.user = '{}@{}'.format(self.user, self.host)
|
||||
|
||||
break
|
||||
|
||||
def notify(self, title, body, **kwargs):
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Email Notification
|
||||
"""
|
||||
|
@ -237,70 +454,148 @@ class NotifyEmail(NotifyBase):
|
|||
if not from_name:
|
||||
from_name = self.app_desc
|
||||
|
||||
self.logger.debug('Email From: %s <%s>' % (
|
||||
self.from_addr, from_name))
|
||||
self.logger.debug('Email To: %s' % (self.to_addr))
|
||||
self.logger.debug('Login ID: %s' % (self.user))
|
||||
self.logger.debug('Delivery: %s:%d' % (self.smtp_host, self.port))
|
||||
# error tracking (used for function return)
|
||||
has_error = False
|
||||
|
||||
# Prepare Email Message
|
||||
if self.notify_format == NotifyFormat.HTML:
|
||||
email = MIMEText(body, 'html')
|
||||
email['Content-Type'] = 'text/html'
|
||||
# Create a copy of the targets list
|
||||
emails = list(self.targets)
|
||||
while len(emails):
|
||||
# Get our email to notify
|
||||
to_addr = emails.pop(0)
|
||||
|
||||
if not is_email(to_addr):
|
||||
self.logger.warning(
|
||||
'Invalid ~To~ email specified: {}'.format(to_addr))
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
self.logger.debug(
|
||||
'Email From: {} <{}>'.format(from_name, self.from_addr))
|
||||
self.logger.debug('Email To: {}'.format(to_addr))
|
||||
self.logger.debug('Login ID: {}'.format(self.user))
|
||||
self.logger.debug(
|
||||
'Delivery: {}:{}'.format(self.smtp_host, self.port))
|
||||
|
||||
# Prepare Email Message
|
||||
if self.notify_format == NotifyFormat.HTML:
|
||||
email = MIMEText(body, 'html')
|
||||
|
||||
else:
|
||||
email = MIMEText(body, 'plain')
|
||||
|
||||
email['Subject'] = title
|
||||
email['From'] = '{} <{}>'.format(from_name, self.from_addr)
|
||||
email['To'] = to_addr
|
||||
email['Date'] = \
|
||||
datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000")
|
||||
email['X-Application'] = self.app_id
|
||||
|
||||
# bind the socket variable to the current namespace
|
||||
socket = None
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
self.logger.debug('Connecting to remote SMTP server...')
|
||||
socket_func = smtplib.SMTP
|
||||
if self.secure and self.secure_mode == SecureMailMode.SSL:
|
||||
self.logger.debug('Securing connection with SSL...')
|
||||
socket_func = smtplib.SMTP_SSL
|
||||
|
||||
socket = socket_func(
|
||||
self.smtp_host,
|
||||
self.port,
|
||||
None,
|
||||
timeout=self.timeout,
|
||||
)
|
||||
|
||||
if self.secure and self.secure_mode == SecureMailMode.STARTTLS:
|
||||
# Handle Secure Connections
|
||||
self.logger.debug('Securing connection with STARTTLS...')
|
||||
socket.starttls()
|
||||
|
||||
if self.user and self.password:
|
||||
# Apply Login credetials
|
||||
self.logger.debug('Applying user credentials...')
|
||||
socket.login(self.user, self.password)
|
||||
|
||||
# Send the email
|
||||
socket.sendmail(
|
||||
self.from_addr, to_addr, email.as_string())
|
||||
|
||||
self.logger.info(
|
||||
'Sent Email notification to "{}".'.format(to_addr))
|
||||
|
||||
except (SocketError, smtplib.SMTPException, RuntimeError) as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Email '
|
||||
'notification to {}.'.format(self.smtp_host))
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
|
||||
finally:
|
||||
# Gracefully terminate the connection with the server
|
||||
if socket is not None: # pragma: no branch
|
||||
socket.quit()
|
||||
|
||||
return not has_error
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'from': self.from_addr,
|
||||
'name': self.from_name,
|
||||
'mode': self.secure_mode,
|
||||
'smtp': self.smtp_host,
|
||||
'timeout': self.timeout,
|
||||
'user': self.user,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
# pull email suffix from username (if present)
|
||||
user = self.user.split('@')[0]
|
||||
|
||||
# Determine Authentication
|
||||
auth = ''
|
||||
if self.user and self.password:
|
||||
auth = '{user}:{password}@'.format(
|
||||
user=NotifyEmail.quote(user, safe=''),
|
||||
password=NotifyEmail.quote(self.password, safe=''),
|
||||
)
|
||||
else:
|
||||
email = MIMEText(body, 'text')
|
||||
email['Content-Type'] = 'text/plain'
|
||||
|
||||
email['Subject'] = title
|
||||
email['From'] = '%s <%s>' % (from_name, self.from_addr)
|
||||
email['To'] = self.to_addr
|
||||
email['Date'] = datetime.utcnow()\
|
||||
.strftime("%a, %d %b %Y %H:%M:%S +0000")
|
||||
email['X-Application'] = self.app_id
|
||||
|
||||
# bind the socket variable to the current namespace
|
||||
socket = None
|
||||
try:
|
||||
self.logger.debug('Connecting to remote SMTP server...')
|
||||
socket = smtplib.SMTP(
|
||||
self.smtp_host,
|
||||
self.port,
|
||||
None,
|
||||
timeout=self.timeout,
|
||||
# user url
|
||||
auth = '{user}@'.format(
|
||||
user=NotifyEmail.quote(user, safe=''),
|
||||
)
|
||||
|
||||
if self.secure:
|
||||
# Handle Secure Connections
|
||||
self.logger.debug('Securing connection with TLS...')
|
||||
socket.starttls()
|
||||
# Default Port setup
|
||||
default_port = \
|
||||
self.default_secure_port if self.secure else self.default_port
|
||||
|
||||
if self.user and self.password:
|
||||
# Apply Login credetials
|
||||
self.logger.debug('Applying user credentials...')
|
||||
socket.login(self.user, self.password)
|
||||
# a simple boolean check as to whether we display our target emails
|
||||
# or not
|
||||
has_targets = \
|
||||
not (len(self.targets) == 1 and self.targets[0] == self.from_addr)
|
||||
|
||||
# Send the email
|
||||
socket.sendmail(self.from_addr, self.to_addr, email.as_string())
|
||||
|
||||
self.logger.info('Sent Email notification to "%s".' % (
|
||||
self.to_addr,
|
||||
))
|
||||
|
||||
except (SocketError, smtplib.SMTPException, RuntimeError) as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Email '
|
||||
'notification to %s.' % self.smtp_host)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
finally:
|
||||
# Gracefully terminate the connection with the server
|
||||
if socket is not None: # pragma: no branch
|
||||
socket.quit()
|
||||
|
||||
return True
|
||||
return '{schema}://{auth}{hostname}{port}/{targets}?{args}'.format(
|
||||
schema=self.secure_protocol if self.secure else self.protocol,
|
||||
auth=auth,
|
||||
hostname=NotifyEmail.quote(self.host, safe=''),
|
||||
port='' if self.port is None or self.port == default_port
|
||||
else ':{}'.format(self.port),
|
||||
targets='' if has_targets else '/'.join(
|
||||
[NotifyEmail.quote(x, safe='') for x in self.targets]),
|
||||
args=NotifyEmail.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
|
@ -315,50 +610,30 @@ class NotifyEmail(NotifyBase):
|
|||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
|
||||
# Default Format is HTML
|
||||
results['notify_format'] = NotifyFormat.HTML
|
||||
|
||||
to_addr = ''
|
||||
# The From address is a must; either through the use of templates
|
||||
# from= entry and/or merging the user and hostname together, this
|
||||
# must be calculated or parse_url will fail.
|
||||
from_addr = ''
|
||||
|
||||
# The server we connect to to send our mail to
|
||||
smtp_host = ''
|
||||
|
||||
if 'format' in results['qsd'] and len(results['qsd']['format']):
|
||||
# Extract email format (Text/Html)
|
||||
format = NotifyBase.unquote(results['qsd']['format']).lower()
|
||||
if len(format) > 0 and format[0] == 't':
|
||||
results['notify_format'] = NotifyFormat.TEXT
|
||||
# Get our potential email targets; if none our found we'll just
|
||||
# add one to ourselves
|
||||
results['targets'] = NotifyEmail.split_path(results['fullpath'])
|
||||
|
||||
# Attempt to detect 'from' email address
|
||||
if 'from' in results['qsd'] and len(results['qsd']['from']):
|
||||
from_addr = NotifyBase.unquote(results['qsd']['from'])
|
||||
|
||||
else:
|
||||
# get 'To' email address
|
||||
from_addr = '%s@%s' % (
|
||||
re.split(
|
||||
r'[\s@]+', NotifyBase.unquote(results['user']))[0],
|
||||
results.get('host', '')
|
||||
)
|
||||
# Lets be clever and attempt to make the from
|
||||
# address an email based on the to address
|
||||
from_addr = '%s@%s' % (
|
||||
re.split(r'[\s@]+', from_addr)[0],
|
||||
re.split(r'[\s@]+', from_addr)[-1],
|
||||
)
|
||||
from_addr = NotifyEmail.unquote(results['qsd']['from'])
|
||||
|
||||
# Attempt to detect 'to' email address
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
to_addr = NotifyBase.unquote(results['qsd']['to']).strip()
|
||||
|
||||
if not to_addr:
|
||||
# Send to ourselves if not otherwise specified to do so
|
||||
to_addr = from_addr
|
||||
results['targets'] += \
|
||||
NotifyEmail.parse_list(results['qsd']['to'])
|
||||
|
||||
if 'name' in results['qsd'] and len(results['qsd']['name']):
|
||||
# Extract from name to associate with from address
|
||||
results['name'] = NotifyBase.unquote(results['qsd']['name'])
|
||||
results['from_name'] = NotifyEmail.unquote(results['qsd']['name'])
|
||||
|
||||
if 'timeout' in results['qsd'] and len(results['qsd']['timeout']):
|
||||
# Extract the timeout to associate with smtp server
|
||||
|
@ -367,10 +642,13 @@ class NotifyEmail(NotifyBase):
|
|||
# Store SMTP Host if specified
|
||||
if 'smtp' in results['qsd'] and len(results['qsd']['smtp']):
|
||||
# Extract the smtp server
|
||||
smtp_host = NotifyBase.unquote(results['qsd']['smtp'])
|
||||
smtp_host = NotifyEmail.unquote(results['qsd']['smtp'])
|
||||
|
||||
results['to'] = to_addr
|
||||
results['from'] = from_addr
|
||||
if 'mode' in results['qsd'] and len(results['qsd']['mode']):
|
||||
# Extract the secure mode to over-ride the default
|
||||
results['secure_mode'] = results['qsd']['mode'].lower()
|
||||
|
||||
results['from_addr'] = from_addr
|
||||
results['smtp_host'] = smtp_host
|
||||
|
||||
return results
|
||||
|
|
|
@ -1,20 +1,27 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Emby Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# For this plugin to work correct, the Emby server must be set up to allow
|
||||
# for remote connections.
|
||||
|
@ -28,9 +35,10 @@ from json import dumps
|
|||
from json import loads
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..utils import parse_bool
|
||||
from ..common import NotifyType
|
||||
from .. import __version__ as VERSION
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
||||
class NotifyEmby(NotifyBase):
|
||||
|
@ -65,6 +73,46 @@ class NotifyEmby(NotifyBase):
|
|||
# displayed for. The value is in milli-seconds
|
||||
emby_message_timeout_ms = 60000
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{host}',
|
||||
'{schema}://{host}:{port}',
|
||||
'{schema}://{user}:{password}@{host}',
|
||||
'{schema}://{user}:{password}@{host}:{port}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'host': {
|
||||
'name': _('Hostname'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'port': {
|
||||
'name': _('Port'),
|
||||
'type': 'int',
|
||||
'min': 1,
|
||||
'max': 65535,
|
||||
},
|
||||
'user': {
|
||||
'name': _('Username'),
|
||||
'type': 'string',
|
||||
},
|
||||
'password': {
|
||||
'name': _('Password'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
},
|
||||
})
|
||||
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'modal': {
|
||||
'name': _('Modal'),
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, modal=False, **kwargs):
|
||||
"""
|
||||
Initialize Emby Object
|
||||
|
@ -89,9 +137,10 @@ class NotifyEmby(NotifyBase):
|
|||
self.modal = modal
|
||||
|
||||
if not self.user:
|
||||
# Token was None
|
||||
self.logger.warning('No Username was specified.')
|
||||
raise TypeError('No Username was specified.')
|
||||
# User was not specified
|
||||
msg = 'No Username was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
return
|
||||
|
||||
|
@ -160,20 +209,19 @@ class NotifyEmby(NotifyBase):
|
|||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to authenticate user %s details: '
|
||||
'%s (error=%s).' % (
|
||||
self.user,
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyEmby.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to authenticate user %s details: '
|
||||
'(error=%s).' % (self.user, r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to authenticate Emby user {} details: '
|
||||
'{}{}error={}.'.format(
|
||||
self.user,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Emby Response:\r\n%s' % r.text)
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -321,20 +369,19 @@ class NotifyEmby(NotifyBase):
|
|||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to acquire session for user %s details: '
|
||||
'%s (error=%s).' % (
|
||||
self.user,
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyEmby.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to acquire session for user %s details: '
|
||||
'(error=%s).' % (self.user, r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to acquire Emby session for user {}: '
|
||||
'{}{}error={}.'.format(
|
||||
self.user,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Emby Response:\r\n%s' % r.text)
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return sessions
|
||||
|
@ -404,20 +451,20 @@ class NotifyEmby(NotifyBase):
|
|||
# The below show up if we were 'just' logged out
|
||||
requests.codes.ok,
|
||||
requests.codes.no_content):
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to logoff user %s details: '
|
||||
'%s (error=%s).' % (
|
||||
self.user,
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to logoff user %s details: '
|
||||
'(error=%s).' % (self.user, r.status_code))
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyEmby.http_response_code_lookup(r.status_code)
|
||||
|
||||
self.logger.debug('Emby Response:\r\n%s' % r.text)
|
||||
self.logger.warning(
|
||||
'Failed to logoff Emby user {}: '
|
||||
'{}{}error={}.'.format(
|
||||
self.user,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -438,7 +485,7 @@ class NotifyEmby(NotifyBase):
|
|||
self.user_id = None
|
||||
return True
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Emby Notification
|
||||
"""
|
||||
|
@ -487,6 +534,10 @@ class NotifyEmby(NotifyBase):
|
|||
session_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Emby Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before the requests are made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
session_url,
|
||||
|
@ -497,17 +548,19 @@ class NotifyEmby(NotifyBase):
|
|||
if r.status_code not in (
|
||||
requests.codes.ok,
|
||||
requests.codes.no_content):
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Emby notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyEmby.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Emby notification '
|
||||
'(error=%s).' % (r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Emby notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
|
@ -528,6 +581,40 @@ class NotifyEmby(NotifyBase):
|
|||
|
||||
return not has_error
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'modal': 'yes' if self.modal else 'no',
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
# Determine Authentication
|
||||
auth = ''
|
||||
if self.user and self.password:
|
||||
auth = '{user}:{password}@'.format(
|
||||
user=NotifyEmby.quote(self.user, safe=''),
|
||||
password=NotifyEmby.quote(self.password, safe=''),
|
||||
)
|
||||
else: # self.user is set
|
||||
auth = '{user}@'.format(
|
||||
user=NotifyEmby.quote(self.user, safe=''),
|
||||
)
|
||||
|
||||
return '{schema}://{auth}{hostname}{port}/?{args}'.format(
|
||||
schema=self.secure_protocol if self.secure else self.protocol,
|
||||
auth=auth,
|
||||
hostname=NotifyEmby.quote(self.host, safe=''),
|
||||
port='' if self.port is None or self.port == self.emby_default_port
|
||||
else ':{}'.format(self.port),
|
||||
args=NotifyEmby.urlencode(args),
|
||||
)
|
||||
|
||||
@property
|
||||
def is_authenticated(self):
|
||||
"""
|
||||
|
|
|
@ -1,26 +1,34 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Faast Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CON
|
||||
|
||||
import requests
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_bool
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
||||
class NotifyFaast(NotifyBase):
|
||||
|
@ -46,15 +54,44 @@ class NotifyFaast(NotifyBase):
|
|||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_72
|
||||
|
||||
def __init__(self, authtoken, **kwargs):
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{authtoken}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'authtoken': {
|
||||
'name': _('Authorization Token'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'image': {
|
||||
'name': _('Include Image'),
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'map_to': 'include_image',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, authtoken, include_image=True, **kwargs):
|
||||
"""
|
||||
Initialize Faast Object
|
||||
"""
|
||||
super(NotifyFaast, self).__init__(**kwargs)
|
||||
|
||||
# Store the Authentication Token
|
||||
self.authtoken = authtoken
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
# Associate an image with our post
|
||||
self.include_image = include_image
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Faast Notification
|
||||
"""
|
||||
|
@ -71,7 +108,10 @@ class NotifyFaast(NotifyBase):
|
|||
'message': body,
|
||||
}
|
||||
|
||||
image_url = self.image_url(notify_type)
|
||||
# Acquire our image if we're configured to do so
|
||||
image_url = None if not self.include_image \
|
||||
else self.image_url(notify_type)
|
||||
|
||||
if image_url:
|
||||
payload['icon_url'] = image_url
|
||||
|
||||
|
@ -79,6 +119,10 @@ class NotifyFaast(NotifyBase):
|
|||
self.notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Faast Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
self.notify_url,
|
||||
|
@ -88,18 +132,17 @@ class NotifyFaast(NotifyBase):
|
|||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Faast notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyFaast.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Faast notification '
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Faast notification:'
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -118,6 +161,25 @@ class NotifyFaast(NotifyBase):
|
|||
|
||||
return True
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'image': 'yes' if self.include_image else 'no',
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
return '{schema}://{authtoken}/?{args}'.format(
|
||||
schema=self.protocol,
|
||||
authtoken=NotifyFaast.quote(self.authtoken, safe=''),
|
||||
args=NotifyFaast.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
|
@ -131,9 +193,11 @@ class NotifyFaast(NotifyBase):
|
|||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
|
||||
# Store our authtoken using the host
|
||||
results['authtoken'] = results['host']
|
||||
results['authtoken'] = NotifyFaast.unquote(results['host'])
|
||||
|
||||
# Include image with our post
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get('image', True))
|
||||
|
||||
return results
|
||||
|
|
381
libs/apprise/plugins/NotifyFlock.py
Normal file
381
libs/apprise/plugins/NotifyFlock.py
Normal file
|
@ -0,0 +1,381 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# To use this plugin, you need to first access https://dev.flock.com/webhooks
|
||||
# Specifically https://dev.flock.com/webhooks/incoming
|
||||
#
|
||||
# To create a new incoming webhook for your account. You'll need to
|
||||
# follow the wizard to pre-determine the channel(s) you want your
|
||||
# message to broadcast to. When you've completed this, you will
|
||||
# recieve a URL that looks something like this:
|
||||
# https://api.flock.com/hooks/sendMessage/134b8gh0-eba0-4fa9-ab9c-257ced0e8221
|
||||
# ^
|
||||
# |
|
||||
# This is important <----------------------------------------^
|
||||
#
|
||||
# It becomes your 'token' that you will pass into this class
|
||||
#
|
||||
import re
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyType
|
||||
from ..common import NotifyFormat
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import parse_list
|
||||
from ..utils import parse_bool
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
FLOCK_HTTP_ERROR_MAP = {
|
||||
401: 'Unauthorized - Invalid Token.',
|
||||
}
|
||||
|
||||
# Default User
|
||||
FLOCK_DEFAULT_USER = 'apprise'
|
||||
|
||||
# Used to detect a channel/user
|
||||
IS_CHANNEL_RE = re.compile(r'^(#|g:)(?P<id>[A-Z0-9_]{12})$', re.I)
|
||||
IS_USER_RE = re.compile(r'^(@|u:)?(?P<id>[A-Z0-9_]{12})$', re.I)
|
||||
|
||||
# Token required as part of the API request
|
||||
# /134b8gh0-eba0-4fa9-ab9c-257ced0e8221
|
||||
IS_API_TOKEN = re.compile(r'^[a-z0-9-]{24}$', re.I)
|
||||
|
||||
|
||||
class NotifyFlock(NotifyBase):
|
||||
"""
|
||||
A wrapper for Flock Notifications
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Flock'
|
||||
|
||||
# The services URL
|
||||
service_url = 'https://flock.com/'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'flock'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_flock'
|
||||
|
||||
# Flock uses the http protocol with JSON requests
|
||||
notify_url = 'https://api.flock.com/hooks/sendMessage'
|
||||
|
||||
# API Wrapper
|
||||
notify_api = 'https://api.flock.co/v1/chat.sendMessage'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_72
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{token}',
|
||||
'{schema}://{user}@{token}',
|
||||
'{schema}://{user}@{token}/{targets}',
|
||||
'{schema}://{token}/{targets}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'token': {
|
||||
'name': _('Access Key'),
|
||||
'type': 'string',
|
||||
'regex': (r'[a-z0-9-]{24}', 'i'),
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'user': {
|
||||
'name': _('Bot Name'),
|
||||
'type': 'string',
|
||||
},
|
||||
'to_user': {
|
||||
'name': _('To User ID'),
|
||||
'type': 'string',
|
||||
'prefix': '@',
|
||||
'regex': (r'[A-Z0-9_]{12}', 'i'),
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'to_channel': {
|
||||
'name': _('To Channel ID'),
|
||||
'type': 'string',
|
||||
'prefix': '#',
|
||||
'regex': (r'[A-Z0-9_]{12}', 'i'),
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'targets': {
|
||||
'name': _('Targets'),
|
||||
'type': 'list:string',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'image': {
|
||||
'name': _('Include Image'),
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'map_to': 'include_image',
|
||||
},
|
||||
'to': {
|
||||
'alias_of': 'targets',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, token, targets=None, include_image=True, **kwargs):
|
||||
"""
|
||||
Initialize Flock Object
|
||||
"""
|
||||
super(NotifyFlock, self).__init__(**kwargs)
|
||||
|
||||
# Build ourselves a target list
|
||||
self.targets = list()
|
||||
|
||||
# Initialize our token object
|
||||
self.token = token.strip()
|
||||
|
||||
if not IS_API_TOKEN.match(self.token):
|
||||
msg = 'The Flock API Token specified ({}) is invalid.'.format(
|
||||
self.token)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Track any issues
|
||||
has_error = False
|
||||
|
||||
# Tidy our targets
|
||||
targets = parse_list(targets)
|
||||
|
||||
for target in targets:
|
||||
result = IS_USER_RE.match(target)
|
||||
if result:
|
||||
self.targets.append('u:' + result.group('id'))
|
||||
continue
|
||||
|
||||
result = IS_CHANNEL_RE.match(target)
|
||||
if result:
|
||||
self.targets.append('g:' + result.group('id'))
|
||||
continue
|
||||
|
||||
has_error = True
|
||||
self.logger.warning(
|
||||
'Ignoring invalid target ({}) specified.'.format(target))
|
||||
|
||||
if has_error and len(self.targets) == 0:
|
||||
# We have a bot token and no target(s) to message
|
||||
msg = 'No targets found with specified Flock Bot Token.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Track whether or not we want to send an image with our notification
|
||||
# or not.
|
||||
self.include_image = include_image
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Flock Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
# error tracking (used for function return)
|
||||
has_error = False
|
||||
|
||||
if self.notify_format == NotifyFormat.HTML:
|
||||
body = '<flockml>{}</flockml>'.format(body)
|
||||
|
||||
else:
|
||||
title = NotifyFlock.escape_html(title, whitespace=False)
|
||||
body = NotifyFlock.escape_html(body, whitespace=False)
|
||||
|
||||
body = '<flockml>{}{}</flockml>'.format(
|
||||
'' if not title else '<b>{}</b><br/>'.format(title), body)
|
||||
|
||||
payload = {
|
||||
'token': self.token,
|
||||
'flockml': body,
|
||||
'sendAs': {
|
||||
'name': FLOCK_DEFAULT_USER if not self.user else self.user,
|
||||
# A Profile Image is only configured if we're configured to
|
||||
# allow it
|
||||
'profileImage': None if not self.include_image
|
||||
else self.image_url(notify_type),
|
||||
}
|
||||
}
|
||||
|
||||
if len(self.targets):
|
||||
# Create a copy of our targets
|
||||
targets = list(self.targets)
|
||||
|
||||
while len(targets) > 0:
|
||||
# Get our first item
|
||||
target = targets.pop(0)
|
||||
|
||||
# Copy and update our payload
|
||||
_payload = payload.copy()
|
||||
_payload['to'] = target
|
||||
|
||||
if not self._post(self.notify_api, headers, _payload):
|
||||
has_error = True
|
||||
|
||||
else:
|
||||
# Webhook
|
||||
url = '{}/{}'.format(self.notify_url, self.token)
|
||||
if not self._post(url, headers, payload):
|
||||
has_error = True
|
||||
|
||||
return not has_error
|
||||
|
||||
def _post(self, url, headers, payload):
|
||||
"""
|
||||
A wrapper to the requests object
|
||||
"""
|
||||
|
||||
# error tracking (used for function return)
|
||||
has_error = False
|
||||
|
||||
self.logger.debug('Flock POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate))
|
||||
self.logger.debug('Flock Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyFlock.http_response_code_lookup(
|
||||
r.status_code, FLOCK_HTTP_ERROR_MAP)
|
||||
|
||||
self.logger.warning(
|
||||
'Failed to send Flock notification : '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Flock notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Flock notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
|
||||
return not has_error
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'image': 'yes' if self.include_image else 'no',
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
return '{schema}://{token}/{targets}?{args}'\
|
||||
.format(
|
||||
schema=self.secure_protocol,
|
||||
token=NotifyFlock.quote(self.token, safe=''),
|
||||
targets='/'.join(
|
||||
[NotifyFlock.quote(target, safe='')
|
||||
for target in self.targets]),
|
||||
args=NotifyFlock.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Get our entries; split_path() looks after unquoting content for us
|
||||
# by default
|
||||
results['targets'] = NotifyFlock.split_path(results['fullpath'])
|
||||
|
||||
# The 'to' makes it easier to use yaml configuration
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['targets'] += NotifyFlock.parse_list(results['qsd']['to'])
|
||||
|
||||
# The first token is stored in the hostname
|
||||
results['token'] = NotifyFlock.unquote(results['host'])
|
||||
|
||||
# Include images with our message
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get('image', True))
|
||||
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def parse_native_url(url):
|
||||
"""
|
||||
Support https://api.flock.com/hooks/sendMessage/TOKEN
|
||||
"""
|
||||
|
||||
result = re.match(
|
||||
r'^https?://api\.flock\.com/hooks/sendMessage/'
|
||||
r'(?P<token>[a-z0-9-]{24})/?'
|
||||
r'(?P<args>\?[.+])?$', url, re.I)
|
||||
|
||||
if result:
|
||||
return NotifyFlock.parse_url(
|
||||
'{schema}://{token}/{args}'.format(
|
||||
schema=NotifyFlock.secure_protocol,
|
||||
token=result.group('token'),
|
||||
args='' if not result.group('args')
|
||||
else result.group('args')))
|
||||
|
||||
return None
|
421
libs/apprise/plugins/NotifyGitter.py
Normal file
421
libs/apprise/plugins/NotifyGitter.py
Normal file
|
@ -0,0 +1,421 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# Once you visit: https://developer.gitter.im/apps you'll get a personal
|
||||
# access token that will look something like this:
|
||||
# b5647881d563fm846dfbb2c27d1fe8f669b8f026
|
||||
|
||||
# Don't worry about generating an app; this token is all you need to form
|
||||
# you're URL with. The syntax is as follows:
|
||||
# gitter://{token}/{channel}
|
||||
|
||||
# Hence a URL might look like the following:
|
||||
# gitter://b5647881d563fm846dfbb2c27d1fe8f669b8f026/apprise
|
||||
|
||||
# Note: You must have joined the channel to send a message to it!
|
||||
|
||||
# Official API reference: https://developer.gitter.im/docs/user-resource
|
||||
|
||||
import re
|
||||
import requests
|
||||
from json import loads
|
||||
from json import dumps
|
||||
from datetime import datetime
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyFormat
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_list
|
||||
from ..utils import parse_bool
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# API Gitter URL
|
||||
GITTER_API_URL = 'https://api.gitter.im/v1'
|
||||
|
||||
# Used to validate your personal access token
|
||||
VALIDATE_TOKEN = re.compile(r'^[a-z0-9]{40}$', re.I)
|
||||
|
||||
# Used to break path apart into list of targets
|
||||
TARGET_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
|
||||
class NotifyGitter(NotifyBase):
|
||||
"""
|
||||
A wrapper for Gitter Notifications
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Gitter'
|
||||
|
||||
# The services URL
|
||||
service_url = 'https://gitter.im/'
|
||||
|
||||
# All pushover requests are secure
|
||||
secure_protocol = 'gitter'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_gitter'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_32
|
||||
|
||||
# Gitter does not support a title
|
||||
title_maxlen = 0
|
||||
|
||||
# Gitter is kind enough to return how many more requests we're allowed to
|
||||
# continue to make within it's header response as:
|
||||
# X-RateLimit-Reset: The epoc time (in seconds) we can expect our
|
||||
# rate-limit to be reset.
|
||||
# X-RateLimit-Remaining: an integer identifying how many requests we're
|
||||
# still allow to make.
|
||||
request_rate_per_sec = 0
|
||||
|
||||
# For Tracking Purposes
|
||||
ratelimit_reset = datetime.utcnow()
|
||||
|
||||
# Default to 1
|
||||
ratelimit_remaining = 1
|
||||
|
||||
# Default Notification Format
|
||||
notify_format = NotifyFormat.MARKDOWN
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{token}:{targets}/',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'token': {
|
||||
'name': _('Token'),
|
||||
'type': 'string',
|
||||
'regex': (r'[a-z0-9]{40}', 'i'),
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'targets': {
|
||||
'name': _('Rooms'),
|
||||
'type': 'list:string',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'image': {
|
||||
'name': _('Include Image'),
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'map_to': 'include_image',
|
||||
},
|
||||
'to': {
|
||||
'alias_of': 'targets',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, token, targets, include_image=False, **kwargs):
|
||||
"""
|
||||
Initialize Gitter Object
|
||||
"""
|
||||
super(NotifyGitter, self).__init__(**kwargs)
|
||||
|
||||
try:
|
||||
# The personal access token associated with the account
|
||||
self.token = token.strip()
|
||||
|
||||
except AttributeError:
|
||||
# Token was None
|
||||
msg = 'No API Token was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not VALIDATE_TOKEN.match(self.token):
|
||||
msg = 'The Personal Access Token specified ({}) is invalid.' \
|
||||
.format(token)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Parse our targets
|
||||
self.targets = parse_list(targets)
|
||||
|
||||
# Used to track maping of rooms to their numeric id lookup for
|
||||
# messaging
|
||||
self._room_mapping = None
|
||||
|
||||
# Track whether or not we want to send an image with our notification
|
||||
# or not.
|
||||
self.include_image = include_image
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Gitter Notification
|
||||
"""
|
||||
|
||||
# error tracking (used for function return)
|
||||
has_error = False
|
||||
|
||||
# Set up our image for display if configured to do so
|
||||
image_url = None if not self.include_image \
|
||||
else self.image_url(notify_type)
|
||||
|
||||
if image_url:
|
||||
body = '\n{}'.format(image_url, body)
|
||||
|
||||
# Create a copy of the targets list
|
||||
targets = list(self.targets)
|
||||
if self._room_mapping is None:
|
||||
# Populate our room mapping
|
||||
self._room_mapping = {}
|
||||
postokay, response = self._fetch(url='rooms')
|
||||
if not postokay:
|
||||
return False
|
||||
|
||||
# Response generally looks like this:
|
||||
# [
|
||||
# {
|
||||
# noindex: False,
|
||||
# oneToOne: False,
|
||||
# avatarUrl: 'https://path/to/avatar/url',
|
||||
# url: '/apprise-notifications/community',
|
||||
# public: True,
|
||||
# tags: [],
|
||||
# lurk: False,
|
||||
# uri: 'apprise-notifications/community',
|
||||
# lastAccessTime: '2019-03-25T00:12:28.144Z',
|
||||
# topic: '',
|
||||
# roomMember: True,
|
||||
# groupId: '5c981cecd73408ce4fbbad2f',
|
||||
# githubType: 'REPO_CHANNEL',
|
||||
# unreadItems: 0,
|
||||
# mentions: 0,
|
||||
# security: 'PUBLIC',
|
||||
# userCount: 1,
|
||||
# id: '5c981cecd73408ce4fbbad31',
|
||||
# name: 'apprise/community'
|
||||
# }
|
||||
# ]
|
||||
for entry in response:
|
||||
self._room_mapping[entry['name'].lower().split('/')[0]] = {
|
||||
# The ID of the room
|
||||
'id': entry['id'],
|
||||
|
||||
# A descriptive name (useful for logging)
|
||||
'uri': entry['uri'],
|
||||
}
|
||||
|
||||
if len(targets) == 0:
|
||||
# No targets specified
|
||||
return False
|
||||
|
||||
while len(targets):
|
||||
target = targets.pop(0).lower()
|
||||
|
||||
if target not in self._room_mapping:
|
||||
self.logger.warning(
|
||||
'Failed to locate Gitter room {}'.format(target))
|
||||
|
||||
# Flag our error
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
# prepare our payload
|
||||
payload = {
|
||||
'text': body,
|
||||
}
|
||||
|
||||
# Our Notification URL
|
||||
notify_url = 'rooms/{}/chatMessages'.format(
|
||||
self._room_mapping[target]['id'])
|
||||
|
||||
# Perform our query
|
||||
postokay, response = self._fetch(
|
||||
notify_url, payload=dumps(payload), method='POST')
|
||||
|
||||
if not postokay:
|
||||
# Flag our error
|
||||
has_error = True
|
||||
|
||||
return not has_error
|
||||
|
||||
def _fetch(self, url, payload=None, method='GET'):
|
||||
"""
|
||||
Wrapper to POST
|
||||
|
||||
"""
|
||||
|
||||
# Prepare our headers:
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Accept': 'application/json',
|
||||
'Authorization': 'Bearer ' + self.token,
|
||||
}
|
||||
if payload:
|
||||
# Only set our header payload if it's defined
|
||||
headers['Content-Type'] = 'application/json'
|
||||
|
||||
# Default content response object
|
||||
content = {}
|
||||
|
||||
# Update our URL
|
||||
url = '{}/{}'.format(GITTER_API_URL, url)
|
||||
|
||||
# Some Debug Logging
|
||||
self.logger.debug('Gitter {} URL: {} (cert_verify={})'.format(
|
||||
method,
|
||||
url, self.verify_certificate))
|
||||
if payload:
|
||||
self.logger.debug('Gitter Payload: {}' .format(payload))
|
||||
|
||||
# By default set wait to None
|
||||
wait = None
|
||||
|
||||
if self.ratelimit_remaining == 0:
|
||||
# Determine how long we should wait for or if we should wait at
|
||||
# all. This isn't fool-proof because we can't be sure the client
|
||||
# time (calling this script) is completely synced up with the
|
||||
# Gitter server. One would hope we're on NTP and our clocks are
|
||||
# the same allowing this to role smoothly:
|
||||
|
||||
now = datetime.utcnow()
|
||||
if now < self.ratelimit_reset:
|
||||
# We need to throttle for the difference in seconds
|
||||
# We add 0.5 seconds to the end just to allow a grace
|
||||
# period.
|
||||
wait = (self.ratelimit_reset - now).total_seconds() + 0.5
|
||||
|
||||
# Always call throttle before any remote server i/o is made; for AWS
|
||||
# time plays a huge factor in the headers being sent with the payload.
|
||||
# So for AWS (SNS) requests we must throttle before they're generated
|
||||
# and not directly before the i/o call like other notification
|
||||
# services do.
|
||||
self.throttle(wait=wait)
|
||||
|
||||
# fetch function
|
||||
fn = requests.post if method == 'POST' else requests.get
|
||||
try:
|
||||
r = fn(
|
||||
url,
|
||||
data=payload,
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyGitter.http_response_code_lookup(r.status_code)
|
||||
|
||||
self.logger.warning(
|
||||
'Failed to send Gitter POST to {}: '
|
||||
'{}error={}.'.format(
|
||||
url,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Mark our failure
|
||||
return (False, content)
|
||||
|
||||
try:
|
||||
content = loads(r.content)
|
||||
|
||||
except (TypeError, ValueError):
|
||||
# ValueError = r.content is Unparsable
|
||||
# TypeError = r.content is None
|
||||
content = {}
|
||||
|
||||
try:
|
||||
self.ratelimit_remaining = \
|
||||
int(r.headers.get('X-RateLimit-Remaining'))
|
||||
self.ratelimit_reset = datetime.utcfromtimestamp(
|
||||
int(r.headers.get('X-RateLimit-Reset')))
|
||||
|
||||
except (TypeError, ValueError):
|
||||
# This is returned if we could not retrieve this information
|
||||
# gracefully accept this state and move on
|
||||
pass
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'Exception received when sending Gitter POST to {}: '.
|
||||
format(url))
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Mark our failure
|
||||
return (False, content)
|
||||
|
||||
return (True, content)
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'image': 'yes' if self.include_image else 'no',
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
return '{schema}://{token}/{targets}/?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
token=NotifyGitter.quote(self.token, safe=''),
|
||||
targets='/'.join(
|
||||
[NotifyGitter.quote(x, safe='') for x in self.targets]),
|
||||
args=NotifyGitter.urlencode(args))
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
results['token'] = NotifyGitter.unquote(results['host'])
|
||||
|
||||
# Get our entries; split_path() looks after unquoting content for us
|
||||
# by default
|
||||
results['targets'] = NotifyGitter.split_path(results['fullpath'])
|
||||
|
||||
# Support the 'to' variable so that we can support targets this way too
|
||||
# The 'to' makes it easier to use yaml configuration
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['targets'] += NotifyGitter.parse_list(results['qsd']['to'])
|
||||
|
||||
# Include images with our message
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get('image', False))
|
||||
|
||||
return results
|
278
libs/apprise/plugins/NotifyGnome.py
Normal file
278
libs/apprise/plugins/NotifyGnome.py
Normal file
|
@ -0,0 +1,278 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_bool
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# Default our global support flag
|
||||
NOTIFY_GNOME_SUPPORT_ENABLED = False
|
||||
|
||||
try:
|
||||
# 3rd party modules (Gnome Only)
|
||||
import gi
|
||||
|
||||
# require_version() call is required otherwise we generate a warning
|
||||
gi.require_version("Notify", "0.7")
|
||||
|
||||
# We can import the actual libraries we care about now:
|
||||
from gi.repository import Notify
|
||||
from gi.repository import GdkPixbuf
|
||||
|
||||
# We're good to go!
|
||||
NOTIFY_GNOME_SUPPORT_ENABLED = True
|
||||
|
||||
except (ImportError, ValueError):
|
||||
# No problem; we just simply can't support this plugin; we could
|
||||
# be in microsoft windows, or we just don't have the python-gobject
|
||||
# library available to us (or maybe one we don't support)?
|
||||
|
||||
# Alternativey A ValueError will get thrown upon calling
|
||||
# gi.require_version() if the requested Notify namespace isn't available
|
||||
pass
|
||||
|
||||
|
||||
# Urgencies
|
||||
class GnomeUrgency(object):
|
||||
LOW = 0
|
||||
NORMAL = 1
|
||||
HIGH = 2
|
||||
|
||||
|
||||
GNOME_URGENCIES = (
|
||||
GnomeUrgency.LOW,
|
||||
GnomeUrgency.NORMAL,
|
||||
GnomeUrgency.HIGH,
|
||||
)
|
||||
|
||||
|
||||
class NotifyGnome(NotifyBase):
|
||||
"""
|
||||
A wrapper for local Gnome Notifications
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Gnome Notification'
|
||||
|
||||
# The default protocol
|
||||
protocol = 'gnome'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_gnome'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_128
|
||||
|
||||
# Disable throttle rate for Gnome requests since they are normally
|
||||
# local anyway
|
||||
request_rate_per_sec = 0
|
||||
|
||||
# Limit results to just the first 10 line otherwise there is just to much
|
||||
# content to display
|
||||
body_max_line_count = 10
|
||||
|
||||
# A title can not be used for Gnome Messages. Setting this to zero will
|
||||
# cause any title (if defined) to get placed into the message body.
|
||||
title_maxlen = 0
|
||||
|
||||
# This entry is a bit hacky, but it allows us to unit-test this library
|
||||
# in an environment that simply doesn't have the gnome packages
|
||||
# available to us. It also allows us to handle situations where the
|
||||
# packages actually are present but we need to test that they aren't.
|
||||
# If anyone is seeing this had knows a better way of testing this
|
||||
# outside of what is defined in test/test_gnome_plugin.py, please
|
||||
# let me know! :)
|
||||
_enabled = NOTIFY_GNOME_SUPPORT_ENABLED
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://_/',
|
||||
)
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'urgency': {
|
||||
'name': _('Urgency'),
|
||||
'type': 'choice:int',
|
||||
'values': GNOME_URGENCIES,
|
||||
'default': GnomeUrgency.NORMAL,
|
||||
},
|
||||
'image': {
|
||||
'name': _('Include Image'),
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'map_to': 'include_image',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, urgency=None, include_image=True, **kwargs):
|
||||
"""
|
||||
Initialize Gnome Object
|
||||
"""
|
||||
|
||||
super(NotifyGnome, self).__init__(**kwargs)
|
||||
|
||||
# The urgency of the message
|
||||
if urgency not in GNOME_URGENCIES:
|
||||
self.urgency = GnomeUrgency.NORMAL
|
||||
|
||||
else:
|
||||
self.urgency = urgency
|
||||
|
||||
# Track whether or not we want to send an image with our notification
|
||||
# or not.
|
||||
self.include_image = include_image
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Gnome Notification
|
||||
"""
|
||||
|
||||
if not self._enabled:
|
||||
self.logger.warning(
|
||||
"Gnome Notifications are not supported by this system.")
|
||||
return False
|
||||
|
||||
try:
|
||||
# App initialization
|
||||
Notify.init(self.app_id)
|
||||
|
||||
# image path
|
||||
icon_path = None if not self.include_image \
|
||||
else self.image_path(notify_type, extension='.ico')
|
||||
|
||||
# Build message body
|
||||
notification = Notify.Notification.new(body)
|
||||
|
||||
# Assign urgency
|
||||
notification.set_urgency(self.urgency)
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
if icon_path:
|
||||
try:
|
||||
# Use Pixbuf to create the proper image type
|
||||
image = GdkPixbuf.Pixbuf.new_from_file(icon_path)
|
||||
|
||||
# Associate our image to our notification
|
||||
notification.set_icon_from_pixbuf(image)
|
||||
notification.set_image_from_pixbuf(image)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(
|
||||
"Could not load Gnome notification icon ({}): {}"
|
||||
.format(icon_path, e))
|
||||
|
||||
notification.show()
|
||||
self.logger.info('Sent Gnome notification.')
|
||||
|
||||
except Exception:
|
||||
self.logger.warning('Failed to send Gnome notification.')
|
||||
self.logger.exception('Gnome Exception')
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
_map = {
|
||||
GnomeUrgency.LOW: 'low',
|
||||
GnomeUrgency.NORMAL: 'normal',
|
||||
GnomeUrgency.HIGH: 'high',
|
||||
}
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'image': 'yes' if self.include_image else 'no',
|
||||
'urgency': 'normal' if self.urgency not in _map
|
||||
else _map[self.urgency],
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
return '{schema}://_/?{args}'.format(
|
||||
schema=self.protocol,
|
||||
args=NotifyGnome.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
There are no parameters nessisary for this protocol; simply having
|
||||
gnome:// is all you need. This function just makes sure that
|
||||
is in place.
|
||||
|
||||
"""
|
||||
|
||||
results = NotifyBase.parse_url(url)
|
||||
if not results:
|
||||
results = {
|
||||
'schema': NotifyGnome.protocol,
|
||||
'user': None,
|
||||
'password': None,
|
||||
'port': None,
|
||||
'host': '_',
|
||||
'fullpath': None,
|
||||
'path': None,
|
||||
'url': url,
|
||||
'qsd': {},
|
||||
}
|
||||
|
||||
# Include images with our message
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get('image', True))
|
||||
|
||||
# Gnome supports urgency, but we we also support the keyword priority
|
||||
# so that it is consistent with some of the other plugins
|
||||
urgency = results['qsd'].get('urgency', results['qsd'].get('priority'))
|
||||
if urgency and len(urgency):
|
||||
_map = {
|
||||
'0': GnomeUrgency.LOW,
|
||||
'l': GnomeUrgency.LOW,
|
||||
'n': GnomeUrgency.NORMAL,
|
||||
'1': GnomeUrgency.NORMAL,
|
||||
'h': GnomeUrgency.HIGH,
|
||||
'2': GnomeUrgency.HIGH,
|
||||
}
|
||||
|
||||
try:
|
||||
# Attempt to index/retrieve our urgency
|
||||
results['urgency'] = _map[urgency[0].lower()]
|
||||
|
||||
except KeyError:
|
||||
# No priority was set
|
||||
pass
|
||||
|
||||
return results
|
290
libs/apprise/plugins/NotifyGotify.py
Normal file
290
libs/apprise/plugins/NotifyGotify.py
Normal file
|
@ -0,0 +1,290 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# For this plugin to work correct, the Gotify server must be set up to allow
|
||||
# for remote connections.
|
||||
|
||||
# Gotify Docker configuration: https://hub.docker.com/r/gotify/server
|
||||
# Example: https://github.com/gotify/server/blob/\
|
||||
# f2c2688f0b5e6a816bbcec768ca1c0de5af76b88/ADD_MESSAGE_EXAMPLES.md#python
|
||||
# API: https://gotify.net/docs/swagger-docs
|
||||
|
||||
import six
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyType
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
||||
# Priorities
|
||||
class GotifyPriority(object):
|
||||
LOW = 0
|
||||
MODERATE = 3
|
||||
NORMAL = 5
|
||||
HIGH = 8
|
||||
EMERGENCY = 10
|
||||
|
||||
|
||||
GOTIFY_PRIORITIES = (
|
||||
GotifyPriority.LOW,
|
||||
GotifyPriority.MODERATE,
|
||||
GotifyPriority.NORMAL,
|
||||
GotifyPriority.HIGH,
|
||||
GotifyPriority.EMERGENCY,
|
||||
)
|
||||
|
||||
|
||||
class NotifyGotify(NotifyBase):
|
||||
"""
|
||||
A wrapper for Gotify Notifications
|
||||
"""
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Gotify'
|
||||
|
||||
# The services URL
|
||||
service_url = 'https://github.com/gotify/server'
|
||||
|
||||
# The default protocol
|
||||
protocol = 'gotify'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'gotifys'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_gotify'
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{host}/{token}',
|
||||
'{schema}://{host}:{port}/{token}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'token': {
|
||||
'name': _('Token'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'host': {
|
||||
'name': _('Hostname'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'port': {
|
||||
'name': _('Port'),
|
||||
'type': 'int',
|
||||
'min': 1,
|
||||
'max': 65535,
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'priority': {
|
||||
'name': _('Priority'),
|
||||
'type': 'choice:int',
|
||||
'values': GOTIFY_PRIORITIES,
|
||||
'default': GotifyPriority.NORMAL,
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, token, priority=None, **kwargs):
|
||||
"""
|
||||
Initialize Gotify Object
|
||||
|
||||
"""
|
||||
super(NotifyGotify, self).__init__(**kwargs)
|
||||
|
||||
if not isinstance(token, six.string_types):
|
||||
msg = 'An invalid Gotify token was specified.'
|
||||
self.logger.warning('msg')
|
||||
raise TypeError(msg)
|
||||
|
||||
if priority not in GOTIFY_PRIORITIES:
|
||||
self.priority = GotifyPriority.NORMAL
|
||||
|
||||
else:
|
||||
self.priority = priority
|
||||
|
||||
if self.secure:
|
||||
self.schema = 'https'
|
||||
|
||||
else:
|
||||
self.schema = 'http'
|
||||
|
||||
# Our access token does not get created until we first
|
||||
# authenticate with our Gotify server. The same goes for the
|
||||
# user id below.
|
||||
self.token = token
|
||||
|
||||
return
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Gotify Notification
|
||||
"""
|
||||
|
||||
url = '%s://%s' % (self.schema, self.host)
|
||||
if self.port:
|
||||
url += ':%d' % self.port
|
||||
|
||||
# Append our remaining path
|
||||
url += '/message'
|
||||
|
||||
# Define our parameteers
|
||||
params = {
|
||||
'token': self.token,
|
||||
}
|
||||
|
||||
# Prepare Gotify Object
|
||||
payload = {
|
||||
'priority': self.priority,
|
||||
'title': title,
|
||||
'message': body,
|
||||
}
|
||||
|
||||
# Our headers
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
self.logger.debug('Gotify POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Gotify Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before the requests are made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
params=params,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyGotify.http_response_code_lookup(r.status_code)
|
||||
|
||||
self.logger.warning(
|
||||
'Failed to send Gotify notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Mark our failure
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Gotify notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Gotify '
|
||||
'notification to %s.' % self.host)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Mark our failure
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'priority': self.priority,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
default_port = 443 if self.secure else 80
|
||||
|
||||
return '{schema}://{hostname}{port}/{token}/?{args}'.format(
|
||||
schema=self.secure_protocol if self.secure else self.protocol,
|
||||
hostname=NotifyGotify.quote(self.host, safe=''),
|
||||
port='' if self.port is None or self.port == default_port
|
||||
else ':{}'.format(self.port),
|
||||
token=NotifyGotify.quote(self.token, safe=''),
|
||||
args=NotifyGotify.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
if not results:
|
||||
# We're done early
|
||||
return results
|
||||
|
||||
# Retrieve our escaped entries found on the fullpath
|
||||
entries = NotifyBase.split_path(results['fullpath'])
|
||||
|
||||
# optionally find the provider key
|
||||
try:
|
||||
# The first entry is our token
|
||||
results['token'] = entries.pop(0)
|
||||
|
||||
except IndexError:
|
||||
# No token was set
|
||||
results['token'] = None
|
||||
|
||||
if 'priority' in results['qsd'] and len(results['qsd']['priority']):
|
||||
_map = {
|
||||
'l': GotifyPriority.LOW,
|
||||
'm': GotifyPriority.MODERATE,
|
||||
'n': GotifyPriority.NORMAL,
|
||||
'h': GotifyPriority.HIGH,
|
||||
'e': GotifyPriority.EMERGENCY,
|
||||
}
|
||||
try:
|
||||
results['priority'] = \
|
||||
_map[results['qsd']['priority'][0].lower()]
|
||||
|
||||
except KeyError:
|
||||
# No priority was set
|
||||
pass
|
||||
|
||||
return results
|
|
@ -1,264 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Growl Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
|
||||
from .gntp import notifier
|
||||
from .gntp import errors
|
||||
from ..NotifyBase import NotifyBase
|
||||
from ...common import NotifyImageSize
|
||||
|
||||
|
||||
# Priorities
|
||||
class GrowlPriority(object):
|
||||
LOW = -2
|
||||
MODERATE = -1
|
||||
NORMAL = 0
|
||||
HIGH = 1
|
||||
EMERGENCY = 2
|
||||
|
||||
|
||||
GROWL_PRIORITIES = (
|
||||
GrowlPriority.LOW,
|
||||
GrowlPriority.MODERATE,
|
||||
GrowlPriority.NORMAL,
|
||||
GrowlPriority.HIGH,
|
||||
GrowlPriority.EMERGENCY,
|
||||
)
|
||||
|
||||
GROWL_NOTIFICATION_TYPE = "New Messages"
|
||||
|
||||
|
||||
class NotifyGrowl(NotifyBase):
|
||||
"""
|
||||
A wrapper to Growl Notifications
|
||||
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Growl'
|
||||
|
||||
# The services URL
|
||||
service_url = 'http://growl.info/'
|
||||
|
||||
# The default protocol
|
||||
protocol = 'growl'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_growl'
|
||||
|
||||
# Default Growl Port
|
||||
default_port = 23053
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_72
|
||||
|
||||
def __init__(self, priority=None, version=2, **kwargs):
|
||||
"""
|
||||
Initialize Growl Object
|
||||
"""
|
||||
super(NotifyGrowl, self).__init__(**kwargs)
|
||||
|
||||
if not self.port:
|
||||
self.port = self.default_port
|
||||
|
||||
# The Priority of the message
|
||||
if priority not in GROWL_PRIORITIES:
|
||||
self.priority = GrowlPriority.NORMAL
|
||||
|
||||
else:
|
||||
self.priority = priority
|
||||
|
||||
# Always default the sticky flag to False
|
||||
self.sticky = False
|
||||
|
||||
# Store Version
|
||||
self.version = version
|
||||
|
||||
payload = {
|
||||
'applicationName': self.app_id,
|
||||
'notifications': [GROWL_NOTIFICATION_TYPE, ],
|
||||
'defaultNotifications': [GROWL_NOTIFICATION_TYPE, ],
|
||||
'hostname': self.host,
|
||||
'port': self.port,
|
||||
}
|
||||
|
||||
if self.password is not None:
|
||||
payload['password'] = self.password
|
||||
|
||||
self.logger.debug('Growl Registration Payload: %s' % str(payload))
|
||||
self.growl = notifier.GrowlNotifier(**payload)
|
||||
|
||||
try:
|
||||
self.growl.register()
|
||||
self.logger.debug(
|
||||
'Growl server registration completed successfully.'
|
||||
)
|
||||
|
||||
except errors.NetworkError:
|
||||
self.logger.warning(
|
||||
'A network error occured sending Growl '
|
||||
'notification to %s.' % self.host)
|
||||
raise TypeError(
|
||||
'A network error occured sending Growl '
|
||||
'notification to %s.' % self.host)
|
||||
|
||||
except errors.AuthError:
|
||||
self.logger.warning(
|
||||
'An authentication error occured sending Growl '
|
||||
'notification to %s.' % self.host)
|
||||
raise TypeError(
|
||||
'An authentication error occured sending Growl '
|
||||
'notification to %s.' % self.host)
|
||||
|
||||
except errors.UnsupportedError:
|
||||
self.logger.warning(
|
||||
'An unsupported error occured sending Growl '
|
||||
'notification to %s.' % self.host)
|
||||
raise TypeError(
|
||||
'An unsupported error occured sending Growl '
|
||||
'notification to %s.' % self.host)
|
||||
|
||||
return
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Growl Notification
|
||||
"""
|
||||
|
||||
# Limit results to just the first 2 line otherwise there is just to
|
||||
# much content to display
|
||||
body = re.split('[\r\n]+', body)
|
||||
body[0] = body[0].strip('#').strip()
|
||||
body = '\r\n'.join(body[0:2])
|
||||
|
||||
icon = None
|
||||
if self.version >= 2:
|
||||
# URL Based
|
||||
icon = self.image_url(notify_type)
|
||||
|
||||
else:
|
||||
# Raw
|
||||
icon = self.image_raw(notify_type)
|
||||
|
||||
payload = {
|
||||
'noteType': GROWL_NOTIFICATION_TYPE,
|
||||
'title': title,
|
||||
'description': body,
|
||||
'icon': icon is not None,
|
||||
'sticky': False,
|
||||
'priority': self.priority,
|
||||
}
|
||||
self.logger.debug('Growl Payload: %s' % str(payload))
|
||||
|
||||
# Update icon of payload to be raw data; this is intentionally done
|
||||
# here after we spit the debug message above (so we don't try to
|
||||
# print the binary contents of an image
|
||||
payload['icon'] = icon
|
||||
|
||||
try:
|
||||
response = self.growl.notify(**payload)
|
||||
if not isinstance(response, bool):
|
||||
self.logger.warning(
|
||||
'Growl notification failed to send with response: %s' %
|
||||
str(response),
|
||||
)
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Growl notification.')
|
||||
|
||||
except errors.BaseError as e:
|
||||
# Since Growl servers listen for UDP broadcasts, it's possible
|
||||
# that you will never get to this part of the code since there is
|
||||
# no acknowledgement as to whether it accepted what was sent to it
|
||||
# or not.
|
||||
|
||||
# However, if the host/server is unavailable, you will get to this
|
||||
# point of the code.
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Growl '
|
||||
'notification to %s.' % self.host)
|
||||
self.logger.debug('Growl Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
version = None
|
||||
if 'version' in results['qsd'] and len(results['qsd']['version']):
|
||||
# Allow the user to specify the version of the protocol to use.
|
||||
try:
|
||||
version = int(
|
||||
NotifyBase.unquote(
|
||||
results['qsd']['version']).strip().split('.')[0])
|
||||
|
||||
except (AttributeError, IndexError, TypeError, ValueError):
|
||||
NotifyBase.logger.warning(
|
||||
'An invalid Growl version of "%s" was specified and will '
|
||||
'be ignored.' % results['qsd']['version']
|
||||
)
|
||||
pass
|
||||
|
||||
if 'priority' in results['qsd'] and len(results['qsd']['priority']):
|
||||
_map = {
|
||||
'l': GrowlPriority.LOW,
|
||||
'-2': GrowlPriority.LOW,
|
||||
'm': GrowlPriority.MODERATE,
|
||||
'-1': GrowlPriority.MODERATE,
|
||||
'n': GrowlPriority.NORMAL,
|
||||
'0': GrowlPriority.NORMAL,
|
||||
'h': GrowlPriority.HIGH,
|
||||
'1': GrowlPriority.HIGH,
|
||||
'e': GrowlPriority.EMERGENCY,
|
||||
'2': GrowlPriority.EMERGENCY,
|
||||
}
|
||||
try:
|
||||
results['priority'] = \
|
||||
_map[results['qsd']['priority'][0].lower()]
|
||||
|
||||
except KeyError:
|
||||
# No priority was set
|
||||
pass
|
||||
|
||||
# Because of the URL formatting, the password is actually where the
|
||||
# username field is. For this reason, we just preform this small hack
|
||||
# to make it (the URL) conform correctly. The following strips out the
|
||||
# existing password entry (if exists) so that it can be swapped with
|
||||
# the new one we specify.
|
||||
if results.get('password', None) is None:
|
||||
results['password'] = results.get('user', None)
|
||||
|
||||
if version:
|
||||
results['version'] = version
|
||||
|
||||
return results
|
|
@ -1,21 +1,367 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
from . import NotifyGrowl
|
||||
from .gntp import notifier
|
||||
from .gntp import errors
|
||||
from ..NotifyBase import NotifyBase
|
||||
from ...common import NotifyImageSize
|
||||
from ...common import NotifyType
|
||||
from ...utils import parse_bool
|
||||
from ...AppriseLocale import gettext_lazy as _
|
||||
|
||||
__all__ = [
|
||||
'NotifyGrowl',
|
||||
]
|
||||
|
||||
# Priorities
|
||||
class GrowlPriority(object):
|
||||
LOW = -2
|
||||
MODERATE = -1
|
||||
NORMAL = 0
|
||||
HIGH = 1
|
||||
EMERGENCY = 2
|
||||
|
||||
|
||||
GROWL_PRIORITIES = (
|
||||
GrowlPriority.LOW,
|
||||
GrowlPriority.MODERATE,
|
||||
GrowlPriority.NORMAL,
|
||||
GrowlPriority.HIGH,
|
||||
GrowlPriority.EMERGENCY,
|
||||
)
|
||||
|
||||
GROWL_NOTIFICATION_TYPE = "New Messages"
|
||||
|
||||
|
||||
class NotifyGrowl(NotifyBase):
|
||||
"""
|
||||
A wrapper to Growl Notifications
|
||||
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Growl'
|
||||
|
||||
# The services URL
|
||||
service_url = 'http://growl.info/'
|
||||
|
||||
# The default protocol
|
||||
protocol = 'growl'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_growl'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_72
|
||||
|
||||
# Disable throttle rate for Growl requests since they are normally
|
||||
# local anyway
|
||||
request_rate_per_sec = 0
|
||||
|
||||
# A title can not be used for Growl Messages. Setting this to zero will
|
||||
# cause any title (if defined) to get placed into the message body.
|
||||
title_maxlen = 0
|
||||
|
||||
# Limit results to just the first 10 line otherwise there is just to much
|
||||
# content to display
|
||||
body_max_line_count = 2
|
||||
|
||||
# Default Growl Port
|
||||
default_port = 23053
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{apikey}',
|
||||
'{schema}://{apikey}/{providerkey}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'apikey': {
|
||||
'name': _('API Key'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
'map_to': 'host',
|
||||
},
|
||||
'providerkey': {
|
||||
'name': _('Provider Key'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'map_to': 'fullpath',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'priority': {
|
||||
'name': _('Priority'),
|
||||
'type': 'choice:int',
|
||||
'values': GROWL_PRIORITIES,
|
||||
'default': GrowlPriority.NORMAL,
|
||||
},
|
||||
'version': {
|
||||
'name': _('Version'),
|
||||
'type': 'choice:int',
|
||||
'values': (1, 2),
|
||||
'default': 2,
|
||||
},
|
||||
'image': {
|
||||
'name': _('Include Image'),
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'map_to': 'include_image',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, priority=None, version=2, include_image=True, **kwargs):
|
||||
"""
|
||||
Initialize Growl Object
|
||||
"""
|
||||
super(NotifyGrowl, self).__init__(**kwargs)
|
||||
|
||||
if not self.port:
|
||||
self.port = self.default_port
|
||||
|
||||
# The Priority of the message
|
||||
if priority not in GROWL_PRIORITIES:
|
||||
self.priority = GrowlPriority.NORMAL
|
||||
|
||||
else:
|
||||
self.priority = priority
|
||||
|
||||
# Always default the sticky flag to False
|
||||
self.sticky = False
|
||||
|
||||
# Store Version
|
||||
self.version = version
|
||||
|
||||
payload = {
|
||||
'applicationName': self.app_id,
|
||||
'notifications': [GROWL_NOTIFICATION_TYPE, ],
|
||||
'defaultNotifications': [GROWL_NOTIFICATION_TYPE, ],
|
||||
'hostname': self.host,
|
||||
'port': self.port,
|
||||
}
|
||||
|
||||
if self.password is not None:
|
||||
payload['password'] = self.password
|
||||
|
||||
self.logger.debug('Growl Registration Payload: %s' % str(payload))
|
||||
self.growl = notifier.GrowlNotifier(**payload)
|
||||
|
||||
try:
|
||||
self.growl.register()
|
||||
self.logger.debug(
|
||||
'Growl server registration completed successfully.'
|
||||
)
|
||||
|
||||
except errors.NetworkError:
|
||||
msg = 'A network error occured sending Growl ' \
|
||||
'notification to {}.'.format(self.host)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
except errors.AuthError:
|
||||
msg = 'An authentication error occured sending Growl ' \
|
||||
'notification to {}.'.format(self.host)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
except errors.UnsupportedError:
|
||||
msg = 'An unsupported error occured sending Growl ' \
|
||||
'notification to {}.'.format(self.host)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Track whether or not we want to send an image with our notification
|
||||
# or not.
|
||||
self.include_image = include_image
|
||||
|
||||
return
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Growl Notification
|
||||
"""
|
||||
|
||||
icon = None
|
||||
if self.version >= 2:
|
||||
# URL Based
|
||||
icon = None if not self.include_image \
|
||||
else self.image_url(notify_type)
|
||||
|
||||
else:
|
||||
# Raw
|
||||
icon = None if not self.include_image \
|
||||
else self.image_raw(notify_type)
|
||||
|
||||
payload = {
|
||||
'noteType': GROWL_NOTIFICATION_TYPE,
|
||||
'title': title,
|
||||
'description': body,
|
||||
'icon': icon is not None,
|
||||
'sticky': False,
|
||||
'priority': self.priority,
|
||||
}
|
||||
self.logger.debug('Growl Payload: %s' % str(payload))
|
||||
|
||||
# Update icon of payload to be raw data; this is intentionally done
|
||||
# here after we spit the debug message above (so we don't try to
|
||||
# print the binary contents of an image
|
||||
payload['icon'] = icon
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
response = self.growl.notify(**payload)
|
||||
if not isinstance(response, bool):
|
||||
self.logger.warning(
|
||||
'Growl notification failed to send with response: %s' %
|
||||
str(response),
|
||||
)
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Growl notification.')
|
||||
|
||||
except errors.BaseError as e:
|
||||
# Since Growl servers listen for UDP broadcasts, it's possible
|
||||
# that you will never get to this part of the code since there is
|
||||
# no acknowledgement as to whether it accepted what was sent to it
|
||||
# or not.
|
||||
|
||||
# However, if the host/server is unavailable, you will get to this
|
||||
# point of the code.
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Growl '
|
||||
'notification to %s.' % self.host)
|
||||
self.logger.debug('Growl Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
_map = {
|
||||
GrowlPriority.LOW: 'low',
|
||||
GrowlPriority.MODERATE: 'moderate',
|
||||
GrowlPriority.NORMAL: 'normal',
|
||||
GrowlPriority.HIGH: 'high',
|
||||
GrowlPriority.EMERGENCY: 'emergency',
|
||||
}
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'image': 'yes' if self.include_image else 'no',
|
||||
'priority':
|
||||
_map[GrowlPriority.NORMAL] if self.priority not in _map
|
||||
else _map[self.priority],
|
||||
'version': self.version,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
auth = ''
|
||||
if self.user:
|
||||
# The growl password is stored in the user field
|
||||
auth = '{password}@'.format(
|
||||
password=NotifyGrowl.quote(self.user, safe=''),
|
||||
)
|
||||
|
||||
return '{schema}://{auth}{hostname}{port}/?{args}'.format(
|
||||
schema=self.secure_protocol if self.secure else self.protocol,
|
||||
auth=auth,
|
||||
hostname=NotifyGrowl.quote(self.host, safe=''),
|
||||
port='' if self.port is None or self.port == self.default_port
|
||||
else ':{}'.format(self.port),
|
||||
args=NotifyGrowl.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
version = None
|
||||
if 'version' in results['qsd'] and len(results['qsd']['version']):
|
||||
# Allow the user to specify the version of the protocol to use.
|
||||
try:
|
||||
version = int(
|
||||
NotifyGrowl.unquote(
|
||||
results['qsd']['version']).strip().split('.')[0])
|
||||
|
||||
except (AttributeError, IndexError, TypeError, ValueError):
|
||||
NotifyGrowl.logger.warning(
|
||||
'An invalid Growl version of "%s" was specified and will '
|
||||
'be ignored.' % results['qsd']['version']
|
||||
)
|
||||
pass
|
||||
|
||||
if 'priority' in results['qsd'] and len(results['qsd']['priority']):
|
||||
_map = {
|
||||
'l': GrowlPriority.LOW,
|
||||
'm': GrowlPriority.MODERATE,
|
||||
'n': GrowlPriority.NORMAL,
|
||||
'h': GrowlPriority.HIGH,
|
||||
'e': GrowlPriority.EMERGENCY,
|
||||
}
|
||||
try:
|
||||
results['priority'] = \
|
||||
_map[results['qsd']['priority'][0].lower()]
|
||||
|
||||
except KeyError:
|
||||
# No priority was set
|
||||
pass
|
||||
|
||||
# Because of the URL formatting, the password is actually where the
|
||||
# username field is. For this reason, we just preform this small hack
|
||||
# to make it (the URL) conform correctly. The following strips out the
|
||||
# existing password entry (if exists) so that it can be swapped with
|
||||
# the new one we specify.
|
||||
if results.get('password', None) is None:
|
||||
results['password'] = results.get('user', None)
|
||||
|
||||
# Include images with our message
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get('image', True))
|
||||
|
||||
# Set our version
|
||||
if version:
|
||||
results['version'] = version
|
||||
|
||||
return results
|
||||
|
|
|
@ -18,10 +18,10 @@ __all__ = [
|
|||
'GrowlNotifier'
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = logging.getLogger('gntp')
|
||||
|
||||
|
||||
class GrowlNotifier(gntp.notifier.GrowlNotifier):
|
||||
class GrowlNotifier(notifier.GrowlNotifier):
|
||||
"""
|
||||
ConfigParser enhanced GrowlNotifier object
|
||||
|
||||
|
@ -36,7 +36,7 @@ class GrowlNotifier(gntp.notifier.GrowlNotifier):
|
|||
port = ?
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
config = gntp.shim.RawConfigParser({
|
||||
config = shim.RawConfigParser({
|
||||
'hostname': kwargs.get('hostname', 'localhost'),
|
||||
'password': kwargs.get('password'),
|
||||
'port': kwargs.get('port', 23053),
|
||||
|
@ -67,7 +67,7 @@ def mini(description, **kwargs):
|
|||
:param string description: Notification message
|
||||
"""
|
||||
kwargs['notifierFactory'] = GrowlNotifier
|
||||
gntp.notifier.mini(description, **kwargs)
|
||||
notifier.mini(description, **kwargs)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -27,7 +27,7 @@ __all__ = [
|
|||
'GrowlNotifier',
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = logging.getLogger('gntp')
|
||||
|
||||
|
||||
class GrowlNotifier(object):
|
||||
|
|
|
@ -27,16 +27,16 @@ if PY3:
|
|||
from configparser import RawConfigParser
|
||||
else:
|
||||
def b(s):
|
||||
if isinstance(s, unicode):
|
||||
if isinstance(s, unicode): # noqa
|
||||
return s.encode('utf8', 'replace')
|
||||
return s
|
||||
|
||||
def u(s):
|
||||
if isinstance(s, unicode):
|
||||
if isinstance(s, unicode): # noqa
|
||||
return s
|
||||
if isinstance(s, int):
|
||||
s = str(s)
|
||||
return unicode(s, "utf8", "replace")
|
||||
return unicode(s, "utf8", "replace") # noqa
|
||||
|
||||
from StringIO import StringIO
|
||||
from ConfigParser import RawConfigParser
|
||||
|
|
|
@ -2,19 +2,28 @@
|
|||
#
|
||||
# IFTTT (If-This-Then-That)
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
#
|
||||
# For this plugin to work, you need to add the Maker applet to your profile
|
||||
# Simply visit https://ifttt.com/search and search for 'Webhooks'
|
||||
|
@ -25,16 +34,19 @@
|
|||
# URL. For example, it might look like this:
|
||||
# https://maker.ifttt.com/use/a3nHB7gA9TfBQSqJAHklod
|
||||
#
|
||||
# In the above example a3nHB7gA9TfBQSqJAHklod becomes your {apikey}
|
||||
# In the above example a3nHB7gA9TfBQSqJAHklod becomes your {webhook_id}
|
||||
# You will need this to make this notification work correctly
|
||||
#
|
||||
# For each event you create you will assign it a name (this will be known as
|
||||
# the {event} when building your URL.
|
||||
import re
|
||||
import requests
|
||||
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_list
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
||||
class NotifyIFTTT(NotifyBase):
|
||||
|
@ -50,7 +62,7 @@ class NotifyIFTTT(NotifyBase):
|
|||
service_url = 'https://ifttt.com/'
|
||||
|
||||
# The default protocol
|
||||
protocol = 'ifttt'
|
||||
secure_protocol = 'ifttt'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_ifttt'
|
||||
|
@ -78,37 +90,102 @@ class NotifyIFTTT(NotifyBase):
|
|||
ifttt_default_type_key = 'value3'
|
||||
|
||||
# IFTTT uses the http protocol with JSON requests
|
||||
notify_url = 'https://maker.ifttt.com/trigger/{event}/with/key/{apikey}'
|
||||
notify_url = 'https://maker.ifttt.com/' \
|
||||
'trigger/{event}/with/key/{webhook_id}'
|
||||
|
||||
def __init__(self, apikey, event, event_args=None, **kwargs):
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{webhook_id}/{events}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'webhook_id': {
|
||||
'name': _('Webhook ID'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'events': {
|
||||
'name': _('Events'),
|
||||
'type': 'list:string',
|
||||
'required': True,
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'to': {
|
||||
'alias_of': 'events',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our token control
|
||||
template_kwargs = {
|
||||
'add_tokens': {
|
||||
'name': _('Add Tokens'),
|
||||
'prefix': '+',
|
||||
},
|
||||
'del_tokens': {
|
||||
'name': _('Remove Tokens'),
|
||||
'prefix': '-',
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, webhook_id, events, add_tokens=None, del_tokens=None,
|
||||
**kwargs):
|
||||
"""
|
||||
Initialize IFTTT Object
|
||||
|
||||
add_tokens can optionally be a dictionary of key/value pairs
|
||||
that you want to include in the IFTTT post to the server.
|
||||
|
||||
del_tokens can optionally be a list/tuple/set of tokens
|
||||
that you want to eliminate from the IFTTT post. There isn't
|
||||
much real functionality to this one unless you want to remove
|
||||
reference to Value1, Value2, and/or Value3
|
||||
|
||||
"""
|
||||
super(NotifyIFTTT, self).__init__(**kwargs)
|
||||
|
||||
if not apikey:
|
||||
raise TypeError('You must specify the Webhooks apikey.')
|
||||
if not webhook_id:
|
||||
msg = 'You must specify the Webhooks webhook_id.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not event:
|
||||
raise TypeError('You must specify the Event you wish to trigger.')
|
||||
# Store our Events we wish to trigger
|
||||
self.events = parse_list(events)
|
||||
|
||||
if not self.events:
|
||||
msg = 'You must specify at least one event you wish to trigger on.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Store our APIKey
|
||||
self.apikey = apikey
|
||||
self.webhook_id = webhook_id
|
||||
|
||||
# Store our Event we wish to trigger
|
||||
self.event = event
|
||||
# Tokens to include in post
|
||||
self.add_tokens = {}
|
||||
if add_tokens:
|
||||
self.add_tokens.update(add_tokens)
|
||||
|
||||
if isinstance(event_args, dict):
|
||||
# Make a copy of the arguments so that they can't change
|
||||
# outside of this plugin
|
||||
self.event_args = event_args.copy()
|
||||
# Tokens to remove
|
||||
self.del_tokens = []
|
||||
if del_tokens is not None:
|
||||
if isinstance(del_tokens, (list, tuple, set)):
|
||||
self.del_tokens = del_tokens
|
||||
|
||||
else:
|
||||
# Force a dictionary
|
||||
self.event_args = dict()
|
||||
elif isinstance(del_tokens, dict):
|
||||
# Convert the dictionary into a list
|
||||
self.del_tokens = set(del_tokens.keys())
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
else:
|
||||
msg = 'del_token must be a list; {} was provided'.format(
|
||||
str(type(del_tokens)))
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform IFTTT Notification
|
||||
"""
|
||||
|
@ -125,72 +202,113 @@ class NotifyIFTTT(NotifyBase):
|
|||
self.ifttt_default_type_key: notify_type,
|
||||
}
|
||||
|
||||
# Update our payload using any other event_args specified
|
||||
payload.update(self.event_args)
|
||||
# Add any new tokens expected (this can also potentially override
|
||||
# any entries defined above)
|
||||
payload.update(self.add_tokens)
|
||||
|
||||
# Eliminate empty fields; users wishing to cancel the use of the
|
||||
# self.ifttt_default_ entries can preset these keys to being
|
||||
# empty so that they get caught here and removed.
|
||||
payload = {x: y for x, y in payload.items() if y}
|
||||
# Eliminate fields flagged for removal otherwise ensure all tokens are
|
||||
# lowercase since that is what the IFTTT server expects from us.
|
||||
payload = {x.lower(): y for x, y in payload.items()
|
||||
if x not in self.del_tokens}
|
||||
|
||||
# URL to transmit content via
|
||||
url = self.notify_url.format(
|
||||
apikey=self.apikey,
|
||||
event=self.event,
|
||||
# error tracking (used for function return)
|
||||
has_error = False
|
||||
|
||||
# Create a copy of our event lit
|
||||
events = list(self.events)
|
||||
|
||||
while len(events):
|
||||
|
||||
# Retrive an entry off of our event list
|
||||
event = events.pop(0)
|
||||
|
||||
# URL to transmit content via
|
||||
url = self.notify_url.format(
|
||||
webhook_id=self.webhook_id,
|
||||
event=event,
|
||||
)
|
||||
|
||||
self.logger.debug('IFTTT POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('IFTTT Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
self.logger.debug(
|
||||
u"IFTTT HTTP response headers: %r" % r.headers)
|
||||
self.logger.debug(
|
||||
u"IFTTT HTTP response body: %r" % r.content)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyIFTTT.http_response_code_lookup(r.status_code)
|
||||
|
||||
self.logger.warning(
|
||||
'Failed to send IFTTT notification to {}: '
|
||||
'{}{}error={}.'.format(
|
||||
event,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
'Sent IFTTT notification to %s.' % event)
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending IFTTT:%s ' % (
|
||||
event) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
return not has_error
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
# Store any new key/value pairs added to our list
|
||||
args.update({'+{}'.format(k): v for k, v in self.add_tokens})
|
||||
args.update({'-{}'.format(k): '' for k in self.del_tokens})
|
||||
|
||||
return '{schema}://{webhook_id}@{events}/?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
webhook_id=NotifyIFTTT.quote(self.webhook_id, safe=''),
|
||||
events='/'.join([NotifyIFTTT.quote(x, safe='')
|
||||
for x in self.events]),
|
||||
args=NotifyIFTTT.urlencode(args),
|
||||
)
|
||||
|
||||
self.logger.debug('IFTTT POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('IFTTT Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
self.logger.debug(
|
||||
u"IFTTT HTTP response status: %r" % r.status_code)
|
||||
self.logger.debug(
|
||||
u"IFTTT HTTP response headers: %r" % r.headers)
|
||||
self.logger.debug(
|
||||
u"IFTTT HTTP response body: %r" % r.content)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send IFTTT:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
self.event,
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send IFTTT:%s '
|
||||
'notification (error=%s).' % (
|
||||
self.event,
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.content)
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
'Sent IFTTT notification to Event %s.' % self.event)
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending IFTTT:%s ' % (
|
||||
self.event) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
|
@ -204,23 +322,50 @@ class NotifyIFTTT(NotifyBase):
|
|||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Our API Key is the hostname if no user is specified
|
||||
results['webhook_id'] = \
|
||||
results['user'] if results['user'] else results['host']
|
||||
|
||||
# Unquote our API Key
|
||||
results['webhook_id'] = NotifyIFTTT.unquote(results['webhook_id'])
|
||||
|
||||
# Our Event
|
||||
results['event'] = results['host']
|
||||
results['events'] = list()
|
||||
if results['user']:
|
||||
# If a user was defined, then the hostname is actually a event
|
||||
# too
|
||||
results['events'].append(NotifyIFTTT.unquote(results['host']))
|
||||
|
||||
# Our API Key
|
||||
results['apikey'] = results['user']
|
||||
# Now fetch the remaining tokens
|
||||
results['events'].extend(NotifyIFTTT.split_path(results['fullpath']))
|
||||
|
||||
# Store ValueX entries based on each entry past the host
|
||||
results['event_args'] = {
|
||||
'{0}{1}'.format(NotifyIFTTT.ifttt_default_key_prefix, n + 1):
|
||||
NotifyBase.unquote(x)
|
||||
for n, x in enumerate(
|
||||
NotifyBase.split_path(results['fullpath'])) if x}
|
||||
|
||||
# Allow users to set key=val parameters to specify more types
|
||||
# of payload options
|
||||
results['event_args'].update(
|
||||
{k: NotifyBase.unquote(v)
|
||||
for k, v in results['qsd'].items()})
|
||||
# The 'to' makes it easier to use yaml configuration
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['events'] += \
|
||||
NotifyIFTTT.parse_list(results['qsd']['to'])
|
||||
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def parse_native_url(url):
|
||||
"""
|
||||
Support https://maker.ifttt.com/use/WEBHOOK_ID/EVENT_ID
|
||||
"""
|
||||
|
||||
result = re.match(
|
||||
r'^https?://maker\.ifttt\.com/use/'
|
||||
r'(?P<webhook_id>[A-Z0-9_-]+)'
|
||||
r'/?(?P<events>([A-Z0-9_-]+/?)+)?'
|
||||
r'/?(?P<args>\?[.+])?$', url, re.I)
|
||||
|
||||
if result:
|
||||
return NotifyIFTTT.parse_url(
|
||||
'{schema}://{webhook_id}{events}{args}'.format(
|
||||
schema=NotifyIFTTT.secure_protocol,
|
||||
webhook_id=result.group('webhook_id'),
|
||||
events='' if not result.group('events')
|
||||
else '@{}'.format(result.group('events')),
|
||||
args='' if not result.group('args')
|
||||
else result.group('args')))
|
||||
|
||||
return None
|
||||
|
|
|
@ -1,28 +1,36 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# JSON Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import six
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import compat_is_basestring
|
||||
from ..common import NotifyType
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
||||
class NotifyJSON(NotifyBase):
|
||||
|
@ -45,25 +53,112 @@ class NotifyJSON(NotifyBase):
|
|||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_128
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
# Disable throttle rate for JSON requests since they are normally
|
||||
# local anyway
|
||||
request_rate_per_sec = 0
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{host}',
|
||||
'{schema}://{host}:{port}',
|
||||
'{schema}://{user}@{host}',
|
||||
'{schema}://{user}@{host}:{port}',
|
||||
'{schema}://{user}:{password}@{host}',
|
||||
'{schema}://{user}:{password}@{host}:{port}',
|
||||
)
|
||||
|
||||
# Define our tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'host': {
|
||||
'name': _('Hostname'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'port': {
|
||||
'name': _('Port'),
|
||||
'type': 'int',
|
||||
'min': 1,
|
||||
'max': 65535,
|
||||
},
|
||||
'user': {
|
||||
'name': _('Username'),
|
||||
'type': 'string',
|
||||
},
|
||||
'password': {
|
||||
'name': _('Password'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
},
|
||||
})
|
||||
|
||||
# Define any kwargs we're using
|
||||
template_kwargs = {
|
||||
'headers': {
|
||||
'name': _('HTTP Header'),
|
||||
'prefix': '+',
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, headers=None, **kwargs):
|
||||
"""
|
||||
Initialize JSON Object
|
||||
|
||||
headers can be a dictionary of key/value pairs that you want to
|
||||
additionally include as part of the server headers to post with
|
||||
|
||||
"""
|
||||
super(NotifyJSON, self).__init__(**kwargs)
|
||||
|
||||
if self.secure:
|
||||
self.schema = 'https'
|
||||
|
||||
else:
|
||||
self.schema = 'http'
|
||||
|
||||
self.fullpath = kwargs.get('fullpath')
|
||||
if not compat_is_basestring(self.fullpath):
|
||||
if not isinstance(self.fullpath, six.string_types):
|
||||
self.fullpath = '/'
|
||||
|
||||
self.headers = {}
|
||||
if headers:
|
||||
# Store our extra headers
|
||||
self.headers.update(headers)
|
||||
|
||||
return
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
# Append our headers into our args
|
||||
args.update({'+{}'.format(k): v for k, v in self.headers.items()})
|
||||
|
||||
# Determine Authentication
|
||||
auth = ''
|
||||
if self.user and self.password:
|
||||
auth = '{user}:{password}@'.format(
|
||||
user=NotifyJSON.quote(self.user, safe=''),
|
||||
password=NotifyJSON.quote(self.password, safe=''),
|
||||
)
|
||||
elif self.user:
|
||||
auth = '{user}@'.format(
|
||||
user=NotifyJSON.quote(self.user, safe=''),
|
||||
)
|
||||
|
||||
default_port = 443 if self.secure else 80
|
||||
|
||||
return '{schema}://{auth}{hostname}{port}/?{args}'.format(
|
||||
schema=self.secure_protocol if self.secure else self.protocol,
|
||||
auth=auth,
|
||||
hostname=NotifyJSON.quote(self.host, safe=''),
|
||||
port='' if self.port is None or self.port == default_port
|
||||
else ':{}'.format(self.port),
|
||||
args=NotifyJSON.urlencode(args),
|
||||
)
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform JSON Notification
|
||||
"""
|
||||
|
@ -84,11 +179,17 @@ class NotifyJSON(NotifyBase):
|
|||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
# Apply any/all header over-rides defined
|
||||
headers.update(self.headers)
|
||||
|
||||
auth = None
|
||||
if self.user:
|
||||
auth = (self.user, self.password)
|
||||
|
||||
url = '%s://%s' % (self.schema, self.host)
|
||||
# Set our schema
|
||||
schema = 'https' if self.secure else 'http'
|
||||
|
||||
url = '%s://%s' % (schema, self.host)
|
||||
if isinstance(self.port, int):
|
||||
url += ':%d' % self.port
|
||||
|
||||
|
@ -98,6 +199,10 @@ class NotifyJSON(NotifyBase):
|
|||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('JSON Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
|
@ -107,17 +212,18 @@ class NotifyJSON(NotifyBase):
|
|||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send JSON notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyJSON.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send JSON notification '
|
||||
'(error=%s).' % (r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send JSON notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -135,3 +241,27 @@ class NotifyJSON(NotifyBase):
|
|||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Add our headers that the user can potentially over-ride if they wish
|
||||
# to to our returned result set
|
||||
results['headers'] = results['qsd-']
|
||||
results['headers'].update(results['qsd+'])
|
||||
|
||||
# Tidy our header entries by unquoting them
|
||||
results['headers'] = {NotifyJSON.unquote(x): NotifyJSON.unquote(y)
|
||||
for x, y in results['headers'].items()}
|
||||
|
||||
return results
|
||||
|
|
|
@ -1,20 +1,27 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Join Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# Join URL: http://joaoapps.com/join/
|
||||
# To use this plugin, you need to first access (make sure your browser allows
|
||||
|
@ -30,24 +37,22 @@ import re
|
|||
import requests
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import compat_is_basestring
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_list
|
||||
from ..utils import parse_bool
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# Token required as part of the API request
|
||||
VALIDATE_APIKEY = re.compile(r'[A-Za-z0-9]{32}')
|
||||
VALIDATE_APIKEY = re.compile(r'[a-z0-9]{32}', re.I)
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
JOIN_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
JOIN_HTTP_ERROR_MAP.update({
|
||||
JOIN_HTTP_ERROR_MAP = {
|
||||
401: 'Unauthorized - Invalid Token.',
|
||||
})
|
||||
|
||||
# Used to break path apart into list of devices
|
||||
DEVICE_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
}
|
||||
|
||||
# Used to detect a device
|
||||
IS_DEVICE_RE = re.compile(r'([A-Za-z0-9]{32})')
|
||||
IS_DEVICE_RE = re.compile(r'([a-z0-9]{32})', re.I)
|
||||
|
||||
# Used to detect a device
|
||||
IS_GROUP_RE = re.compile(
|
||||
|
@ -71,7 +76,7 @@ class NotifyJoin(NotifyBase):
|
|||
service_url = 'https://joaoapps.com/join/'
|
||||
|
||||
# The default protocol
|
||||
protocol = 'join'
|
||||
secure_protocol = 'join'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_join'
|
||||
|
@ -83,65 +88,101 @@ class NotifyJoin(NotifyBase):
|
|||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_72
|
||||
|
||||
# Limit results to just the first 2 line otherwise there is just to much
|
||||
# content to display
|
||||
body_max_line_count = 2
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 1000
|
||||
|
||||
def __init__(self, apikey, devices, **kwargs):
|
||||
# The default group to use if none is specified
|
||||
default_join_group = 'group.all'
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{apikey}/{targets}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'apikey': {
|
||||
'name': _('API Key'),
|
||||
'type': 'string',
|
||||
'regex': (r'[a-z0-9]{32}', 'i'),
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'device': {
|
||||
'name': _('Device ID'),
|
||||
'type': 'string',
|
||||
'regex': (r'[a-z0-9]{32}', 'i'),
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'group': {
|
||||
'name': _('Group'),
|
||||
'type': 'choice:string',
|
||||
'values': (
|
||||
'all', 'android', 'chrome', 'windows10', 'phone', 'tablet',
|
||||
'pc'),
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'targets': {
|
||||
'name': _('Targets'),
|
||||
'type': 'list:string',
|
||||
'required': True,
|
||||
},
|
||||
})
|
||||
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'image': {
|
||||
'name': _('Include Image'),
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'map_to': 'include_image',
|
||||
},
|
||||
'to': {
|
||||
'alias_of': 'targets',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, apikey, targets, include_image=True, **kwargs):
|
||||
"""
|
||||
Initialize Join Object
|
||||
"""
|
||||
super(NotifyJoin, self).__init__(**kwargs)
|
||||
|
||||
if not VALIDATE_APIKEY.match(apikey.strip()):
|
||||
self.logger.warning(
|
||||
'The first API Token specified (%s) is invalid.' % apikey,
|
||||
)
|
||||
|
||||
raise TypeError(
|
||||
'The first API Token specified (%s) is invalid.' % apikey,
|
||||
)
|
||||
msg = 'The JOIN API Token specified ({}) is invalid.'\
|
||||
.format(apikey)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# The token associated with the account
|
||||
self.apikey = apikey.strip()
|
||||
|
||||
if compat_is_basestring(devices):
|
||||
self.devices = [x for x in filter(bool, DEVICE_LIST_DELIM.split(
|
||||
devices,
|
||||
))]
|
||||
|
||||
elif isinstance(devices, (set, tuple, list)):
|
||||
self.devices = devices
|
||||
|
||||
else:
|
||||
self.devices = list()
|
||||
# Parse devices specified
|
||||
self.devices = parse_list(targets)
|
||||
|
||||
if len(self.devices) == 0:
|
||||
# Default to everyone
|
||||
self.devices.append('group.all')
|
||||
self.devices.append(self.default_join_group)
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
# Track whether or not we want to send an image with our notification
|
||||
# or not.
|
||||
self.include_image = include_image
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Join Notification
|
||||
"""
|
||||
|
||||
try:
|
||||
# Limit results to just the first 2 line otherwise
|
||||
# there is just to much content to display
|
||||
body = re.split('[\r\n]+', body)
|
||||
body[0] = body[0].strip('#').strip()
|
||||
body = '\r\n'.join(body[0:2])
|
||||
|
||||
except (AttributeError, TypeError):
|
||||
# body was None or not of a type string
|
||||
body = ''
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
}
|
||||
|
||||
# error tracking (used for function return)
|
||||
return_status = True
|
||||
has_error = False
|
||||
|
||||
# Create a copy of the devices list
|
||||
devices = list(self.devices)
|
||||
|
@ -149,14 +190,15 @@ class NotifyJoin(NotifyBase):
|
|||
device = devices.pop(0)
|
||||
group_re = IS_GROUP_RE.match(device)
|
||||
if group_re:
|
||||
device = 'group.%s' % group_re.group('name').lower()
|
||||
device = 'group.{}'.format(group_re.group('name').lower())
|
||||
|
||||
elif not IS_DEVICE_RE.match(device):
|
||||
self.logger.warning(
|
||||
"The specified device/group '%s' is invalid; skipping." % (
|
||||
device,
|
||||
)
|
||||
'Skipping specified invalid device/group "{}"'
|
||||
.format(device)
|
||||
)
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
url_args = {
|
||||
|
@ -166,7 +208,10 @@ class NotifyJoin(NotifyBase):
|
|||
'text': body,
|
||||
}
|
||||
|
||||
image_url = self.image_url(notify_type)
|
||||
# prepare our image for display if configured to do so
|
||||
image_url = None if not self.include_image \
|
||||
else self.image_url(notify_type)
|
||||
|
||||
if image_url:
|
||||
url_args['icon'] = image_url
|
||||
|
||||
|
@ -174,13 +219,16 @@ class NotifyJoin(NotifyBase):
|
|||
payload = {}
|
||||
|
||||
# Prepare the URL
|
||||
url = '%s?%s' % (self.notify_url, NotifyBase.urlencode(url_args))
|
||||
url = '%s?%s' % (self.notify_url, NotifyJoin.urlencode(url_args))
|
||||
|
||||
self.logger.debug('Join POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Join Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
|
@ -188,26 +236,27 @@ class NotifyJoin(NotifyBase):
|
|||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Join:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
device,
|
||||
JOIN_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyJoin.http_response_code_lookup(
|
||||
r.status_code, JOIN_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Join:%s '
|
||||
'notification (error=%s).' % (
|
||||
device,
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Join notification to {}: '
|
||||
'{}{}error={}.'.format(
|
||||
device,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
return_status = False
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Join notification to %s.' % device)
|
||||
|
@ -218,13 +267,32 @@ class NotifyJoin(NotifyBase):
|
|||
'notification.' % device
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return_status = False
|
||||
|
||||
if len(devices):
|
||||
# Prevent thrashing requests
|
||||
self.throttle()
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
return return_status
|
||||
return not has_error
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'image': 'yes' if self.include_image else 'no',
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
return '{schema}://{apikey}/{devices}/?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
apikey=NotifyJoin.quote(self.apikey, safe=''),
|
||||
devices='/'.join([NotifyJoin.quote(x, safe='')
|
||||
for x in self.devices]),
|
||||
args=NotifyJoin.urlencode(args))
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
|
@ -239,11 +307,30 @@ class NotifyJoin(NotifyBase):
|
|||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
devices = ' '.join(
|
||||
filter(bool, NotifyBase.split_path(results['fullpath'])))
|
||||
# Our API Key is the hostname if no user is specified
|
||||
results['apikey'] = \
|
||||
results['user'] if results['user'] else results['host']
|
||||
|
||||
results['apikey'] = results['host']
|
||||
results['devices'] = devices
|
||||
# Unquote our API Key
|
||||
results['apikey'] = NotifyJoin.unquote(results['apikey'])
|
||||
|
||||
# Our Devices
|
||||
results['targets'] = list()
|
||||
if results['user']:
|
||||
# If a user was defined, then the hostname is actually a target
|
||||
# too
|
||||
results['targets'].append(NotifyJoin.unquote(results['host']))
|
||||
|
||||
# Now fetch the remaining tokens
|
||||
results['targets'].extend(
|
||||
NotifyJoin.split_path(results['fullpath']))
|
||||
|
||||
# The 'to' makes it easier to use yaml configuration
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['targets'] += NotifyJoin.parse_list(results['qsd']['to'])
|
||||
|
||||
# Include images with our message
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get('image', True))
|
||||
|
||||
return results
|
||||
|
|
395
libs/apprise/plugins/NotifyMSTeams.py
Normal file
395
libs/apprise/plugins/NotifyMSTeams.py
Normal file
|
@ -0,0 +1,395 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# To use this plugin, you need to create a webhook; you can read more about
|
||||
# this here:
|
||||
# https://dev.outlook.com/Connectors/\
|
||||
# GetStarted#creating-messages-through-office-365-connectors-\
|
||||
# in-microsoft-teams
|
||||
#
|
||||
# More details are here on API Construction:
|
||||
# https://docs.microsoft.com/en-ca/outlook/actionable-messages/\
|
||||
# message-card-reference
|
||||
#
|
||||
# I personally created a free account at teams.microsoft.com and then
|
||||
# went to the store (bottom left hand side of slack like interface).
|
||||
#
|
||||
# From here you can search for 'Incoming Webhook'. Once you click on it,
|
||||
# you can associate the webhook with your team. At this point, you can
|
||||
# optionally also assign it a name, an avatar. Finally you'll have to
|
||||
# assign it a channel it will notify.
|
||||
#
|
||||
# When you've completed this, it will generate you a (webhook) URL that
|
||||
# looks like:
|
||||
# https://outlook.office.com/webhook/ \
|
||||
# abcdefgf8-2f4b-4eca-8f61-225c83db1967@abcdefg2-5a99-4849-8efc-\
|
||||
# c9e78d28e57d/IncomingWebhook/291289f63a8abd3593e834af4d79f9fe/\
|
||||
# a2329f43-0ffb-46ab-948b-c9abdad9d643
|
||||
#
|
||||
# Yes... The URL is that big... But it looks like this (greatly simplified):
|
||||
# https://outlook.office.com/webhook/ABCD/IncomingWebhook/DEFG/HIJK
|
||||
# ^ ^ ^
|
||||
# | | |
|
||||
# These are important <--------------^--------------------^----^
|
||||
#
|
||||
# You'll notice that the first token is actually 2 separated by an @ symbol
|
||||
# But lets just ignore that and assume it's one great big token instead.
|
||||
#
|
||||
# These 3 tokens is what you'll need to build your URL with:
|
||||
# msteams://ABCD/DEFG/HIJK
|
||||
#
|
||||
import re
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyType
|
||||
from ..common import NotifyFormat
|
||||
from ..utils import parse_bool
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# Used to prepare our UUID regex matching
|
||||
UUID4_RE = \
|
||||
r'[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}'
|
||||
|
||||
# Token required as part of the API request
|
||||
# /AAAAAAAAA@AAAAAAAAA/........./.........
|
||||
VALIDATE_TOKEN_A = re.compile(r'{}@{}'.format(UUID4_RE, UUID4_RE), re.I)
|
||||
|
||||
# Token required as part of the API request
|
||||
# /................../BBBBBBBBB/..........
|
||||
VALIDATE_TOKEN_B = re.compile(r'[A-Za-z0-9]{32}')
|
||||
|
||||
# Token required as part of the API request
|
||||
# /........./........./CCCCCCCCCCCCCCCCCCCCCCCC
|
||||
VALIDATE_TOKEN_C = re.compile(UUID4_RE, re.I)
|
||||
|
||||
|
||||
class NotifyMSTeams(NotifyBase):
|
||||
"""
|
||||
A wrapper for Microsoft Teams Notifications
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'MSTeams'
|
||||
|
||||
# The services URL
|
||||
service_url = 'https://teams.micrsoft.com/'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'msteams'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_msteams'
|
||||
|
||||
# MSTeams uses the http protocol with JSON requests
|
||||
notify_url = 'https://outlook.office.com/webhook'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_72
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 1000
|
||||
|
||||
# Default Notification Format
|
||||
notify_format = NotifyFormat.MARKDOWN
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{token_a}/{token_b}{token_c}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'token_a': {
|
||||
'name': _('Token A'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
'regex': (r'{}@{}'.format(UUID4_RE, UUID4_RE), 'i'),
|
||||
},
|
||||
'token_b': {
|
||||
'name': _('Token B'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
'regex': (r'[a-z0-9]{32}', 'i'),
|
||||
},
|
||||
'token_c': {
|
||||
'name': _('Token C'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
'regex': (UUID4_RE, 'i'),
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'image': {
|
||||
'name': _('Include Image'),
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'map_to': 'include_image',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, token_a, token_b, token_c, include_image=True,
|
||||
**kwargs):
|
||||
"""
|
||||
Initialize Microsoft Teams Object
|
||||
"""
|
||||
super(NotifyMSTeams, self).__init__(**kwargs)
|
||||
|
||||
if not token_a:
|
||||
msg = 'The first MSTeams API token is not specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not token_b:
|
||||
msg = 'The second MSTeams API token is not specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not token_c:
|
||||
msg = 'The third MSTeams API token is not specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not VALIDATE_TOKEN_A.match(token_a.strip()):
|
||||
msg = 'The first MSTeams API token specified ({}) is invalid.'\
|
||||
.format(token_a)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# The token associated with the account
|
||||
self.token_a = token_a.strip()
|
||||
|
||||
if not VALIDATE_TOKEN_B.match(token_b.strip()):
|
||||
msg = 'The second MSTeams API token specified ({}) is invalid.'\
|
||||
.format(token_b)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# The token associated with the account
|
||||
self.token_b = token_b.strip()
|
||||
|
||||
if not VALIDATE_TOKEN_C.match(token_c.strip()):
|
||||
msg = 'The third MSTeams API token specified ({}) is invalid.'\
|
||||
.format(token_c)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# The token associated with the account
|
||||
self.token_c = token_c.strip()
|
||||
|
||||
# Place a thumbnail image inline with the message body
|
||||
self.include_image = include_image
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Microsoft Teams Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
url = '%s/%s/IncomingWebhook/%s/%s' % (
|
||||
self.notify_url,
|
||||
self.token_a,
|
||||
self.token_b,
|
||||
self.token_c,
|
||||
)
|
||||
|
||||
# Prepare our payload
|
||||
payload = {
|
||||
"@type": "MessageCard",
|
||||
"@context": "https://schema.org/extensions",
|
||||
"summary": self.app_desc,
|
||||
"themeColor": self.color(notify_type),
|
||||
"sections": [
|
||||
{
|
||||
"activityImage": None,
|
||||
"activityTitle": title,
|
||||
"text": body,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
# Acquire our to-be footer icon if configured to do so
|
||||
image_url = None if not self.include_image \
|
||||
else self.image_url(notify_type)
|
||||
|
||||
if image_url:
|
||||
payload['sections'][0]['activityImage'] = image_url
|
||||
|
||||
self.logger.debug('MSTeams POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('MSTeams Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyMSTeams.http_response_code_lookup(r.status_code)
|
||||
|
||||
self.logger.warning(
|
||||
'Failed to send MSTeams notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# We failed
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent MSTeams notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending MSTeams notification.')
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# We failed
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'image': 'yes' if self.include_image else 'no',
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
return '{schema}://{token_a}/{token_b}/{token_c}/'\
|
||||
'?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
token_a=NotifyMSTeams.quote(self.token_a, safe=''),
|
||||
token_b=NotifyMSTeams.quote(self.token_b, safe=''),
|
||||
token_c=NotifyMSTeams.quote(self.token_c, safe=''),
|
||||
args=NotifyMSTeams.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url, verify_host=False)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Get unquoted entries
|
||||
entries = NotifyMSTeams.split_path(results['fullpath'])
|
||||
|
||||
if results.get('user'):
|
||||
# If a user was found, it's because it's still part of the first
|
||||
# token, so we concatinate them
|
||||
results['token_a'] = '{}@{}'.format(
|
||||
NotifyMSTeams.unquote(results['user']),
|
||||
NotifyMSTeams.unquote(results['host']),
|
||||
)
|
||||
|
||||
else:
|
||||
# The first token is stored in the hostname
|
||||
results['token_a'] = NotifyMSTeams.unquote(results['host'])
|
||||
|
||||
# Now fetch the remaining tokens
|
||||
try:
|
||||
results['token_b'] = entries.pop(0)
|
||||
|
||||
except IndexError:
|
||||
# We're done
|
||||
results['token_b'] = None
|
||||
|
||||
try:
|
||||
results['token_c'] = entries.pop(0)
|
||||
|
||||
except IndexError:
|
||||
# We're done
|
||||
results['token_c'] = None
|
||||
|
||||
# Get Image
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get('image', True))
|
||||
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def parse_native_url(url):
|
||||
"""
|
||||
Support:
|
||||
https://outlook.office.com/webhook/ABCD/IncomingWebhook/DEFG/HIJK
|
||||
"""
|
||||
|
||||
# We don't need to do incredibly details token matching as the purpose
|
||||
# of this is just to detect that were dealing with an msteams url
|
||||
# token parsing will occur once we initialize the function
|
||||
result = re.match(
|
||||
r'^https?://outlook\.office\.com/webhook/'
|
||||
r'(?P<token_a>[A-Z0-9-]+@[A-Z0-9-]+)/'
|
||||
r'IncomingWebhook/'
|
||||
r'(?P<token_b>[A-Z0-9]+)/'
|
||||
r'(?P<token_c>[A-Z0-9-]+)/?'
|
||||
r'(?P<args>\?[.+])?$', url, re.I)
|
||||
|
||||
if result:
|
||||
return NotifyMSTeams.parse_url(
|
||||
'{schema}://{token_a}/{token_b}/{token_c}/{args}'.format(
|
||||
schema=NotifyMSTeams.secure_protocol,
|
||||
token_a=result.group('token_a'),
|
||||
token_b=result.group('token_b'),
|
||||
token_c=result.group('token_c'),
|
||||
args='' if not result.group('args')
|
||||
else result.group('args')))
|
||||
|
||||
return None
|
391
libs/apprise/plugins/NotifyMailgun.py
Normal file
391
libs/apprise/plugins/NotifyMailgun.py
Normal file
|
@ -0,0 +1,391 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# Signup @ https://www.mailgun.com/
|
||||
#
|
||||
# Each domain will have an API key associated with it. If you sign up you'll
|
||||
# get a sandbox domain to use. Or if you set up your own, they'll have
|
||||
# api keys associated with them too. Find your API key out by visiting
|
||||
# https://app.mailgun.com/app/domains
|
||||
#
|
||||
# From here you can click on the domain you're interested in. You can acquire
|
||||
# the API Key from here which will look something like:
|
||||
# 4b4f2918c6c21ba0a26ad2af73c07f4d-dk5f51da-8f91a0df
|
||||
#
|
||||
# You'll also need to know the domain that is associated with your API key.
|
||||
# This will be obvious with a paid account because it will be the domain name
|
||||
# you've registered with them. But if you're using a test account, it will
|
||||
# be name of the sandbox you've set up such as:
|
||||
# sandbox74bda3414c06kb5acb946.mailgun.org
|
||||
#
|
||||
# Knowing this, you can buid your mailgun url as follows:
|
||||
# mailgun://{user}@{domain}/{apikey}
|
||||
# mailgun://{user}@{domain}/{apikey}/{email}
|
||||
#
|
||||
# You can email as many addresses as you want as:
|
||||
# mailgun://{user}@{domain}/{apikey}/{email1}/{email2}/{emailN}
|
||||
#
|
||||
# The {user}@{domain} effectively assembles the 'from' email address
|
||||
# the email will be transmitted from. If no email address is specified
|
||||
# then it will also become the 'to' address as well.
|
||||
#
|
||||
import re
|
||||
import requests
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_list
|
||||
from ..utils import is_email
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# Used to validate your personal access apikey
|
||||
VALIDATE_API_KEY = re.compile(r'^[a-z0-9]{32}-[a-z0-9]{8}-[a-z0-9]{8}$', re.I)
|
||||
|
||||
# Provide some known codes Mailgun uses and what they translate to:
|
||||
# Based on https://documentation.mailgun.com/en/latest/api-intro.html#errors
|
||||
MAILGUN_HTTP_ERROR_MAP = {
|
||||
400: 'A bad request was made to the server.',
|
||||
401: 'The provided API Key was not valid.',
|
||||
402: 'The request failed for a reason out of your control.',
|
||||
404: 'The requested API query is not valid.',
|
||||
413: 'Provided attachment is to big.',
|
||||
}
|
||||
|
||||
|
||||
# Priorities
|
||||
class MailgunRegion(object):
|
||||
US = 'us'
|
||||
EU = 'eu'
|
||||
|
||||
|
||||
# Mailgun APIs
|
||||
MAILGUN_API_LOOKUP = {
|
||||
MailgunRegion.US: 'https://api.mailgun.net/v3/',
|
||||
MailgunRegion.EU: 'https://api.eu.mailgun.net/v3/',
|
||||
}
|
||||
|
||||
# A List of our regions we can use for verification
|
||||
MAILGUN_REGIONS = (
|
||||
MailgunRegion.US,
|
||||
MailgunRegion.EU,
|
||||
)
|
||||
|
||||
|
||||
class NotifyMailgun(NotifyBase):
|
||||
"""
|
||||
A wrapper for Mailgun Notifications
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Mailgun'
|
||||
|
||||
# The services URL
|
||||
service_url = 'https://www.mailgun.com/'
|
||||
|
||||
# All pushover requests are secure
|
||||
secure_protocol = 'mailgun'
|
||||
|
||||
# Mailgun advertises they allow 300 requests per minute.
|
||||
# 60/300 = 0.2
|
||||
request_rate_per_sec = 0.20
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_mailgun'
|
||||
|
||||
# The default region to use if one isn't otherwise specified
|
||||
mailgun_default_region = MailgunRegion.US
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{user}@{host}:{apikey}/',
|
||||
'{schema}://{user}@{host}:{apikey}/{targets}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'user': {
|
||||
'name': _('User Name'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'host': {
|
||||
'name': _('Domain'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'apikey': {
|
||||
'name': _('API Key'),
|
||||
'type': 'string',
|
||||
'regex': (r'[a-z0-9]{32}-[a-z0-9]{8}-[a-z0-9]{8}', 'i'),
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'targets': {
|
||||
'name': _('Target Emails'),
|
||||
'type': 'list:string',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'name': {
|
||||
'name': _('From Name'),
|
||||
'type': 'string',
|
||||
'map_to': 'from_name',
|
||||
},
|
||||
'region': {
|
||||
'name': _('Region Name'),
|
||||
'type': 'choice:string',
|
||||
'values': MAILGUN_REGIONS,
|
||||
'default': MailgunRegion.US,
|
||||
'map_to': 'region_name',
|
||||
},
|
||||
'to': {
|
||||
'alias_of': 'targets',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, apikey, targets, from_name=None, region_name=None,
|
||||
**kwargs):
|
||||
"""
|
||||
Initialize Mailgun Object
|
||||
"""
|
||||
super(NotifyMailgun, self).__init__(**kwargs)
|
||||
|
||||
try:
|
||||
# The personal access apikey associated with the account
|
||||
self.apikey = apikey.strip()
|
||||
|
||||
except AttributeError:
|
||||
# Token was None
|
||||
msg = 'No API Key was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not VALIDATE_API_KEY.match(self.apikey):
|
||||
msg = 'The API Key specified ({}) is invalid.' \
|
||||
.format(apikey)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Validate our username
|
||||
if not self.user:
|
||||
msg = 'No username was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Parse our targets
|
||||
self.targets = parse_list(targets)
|
||||
|
||||
# Store our region
|
||||
try:
|
||||
self.region_name = self.mailgun_default_region \
|
||||
if region_name is None else region_name.lower()
|
||||
|
||||
if self.region_name not in MAILGUN_REGIONS:
|
||||
# allow the outer except to handle this common response
|
||||
raise
|
||||
except:
|
||||
# Invalid region specified
|
||||
msg = 'The region specified ({}) is invalid.' \
|
||||
.format(region_name)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Get our From username (if specified)
|
||||
self.from_name = from_name
|
||||
|
||||
# Get our from email address
|
||||
self.from_addr = '{user}@{host}'.format(user=self.user, host=self.host)
|
||||
|
||||
if not is_email(self.from_addr):
|
||||
# Parse Source domain based on from_addr
|
||||
msg = 'Invalid ~From~ email format: {}'.format(self.from_addr)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Mailgun Notification
|
||||
"""
|
||||
|
||||
# error tracking (used for function return)
|
||||
has_error = False
|
||||
|
||||
# Prepare our headers
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Accept': 'application/json',
|
||||
}
|
||||
|
||||
# Prepare our payload
|
||||
payload = {
|
||||
'from': '{name} <{addr}>'.format(
|
||||
name=self.app_id if not self.from_name else self.from_name,
|
||||
addr=self.from_addr),
|
||||
'subject': title,
|
||||
'text': body,
|
||||
}
|
||||
|
||||
# Prepare our URL as it's based on our hostname
|
||||
url = '{}{}/messages'.format(
|
||||
MAILGUN_API_LOOKUP[self.region_name], self.host)
|
||||
|
||||
# Create a copy of the targets list
|
||||
emails = list(self.targets)
|
||||
|
||||
if len(emails) == 0:
|
||||
# No email specified; use the from
|
||||
emails.append(self.from_addr)
|
||||
|
||||
while len(emails):
|
||||
# Get our email to notify
|
||||
email = emails.pop(0)
|
||||
|
||||
# Prepare our user
|
||||
payload['to'] = '{} <{}>'.format(email, email)
|
||||
|
||||
# Some Debug Logging
|
||||
self.logger.debug('Mailgun POST URL: {} (cert_verify={})'.format(
|
||||
url, self.verify_certificate))
|
||||
self.logger.debug('Mailgun Payload: {}' .format(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
auth=("api", self.apikey),
|
||||
data=payload,
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(
|
||||
r.status_code, MAILGUN_API_LOOKUP)
|
||||
|
||||
self.logger.warning(
|
||||
'Failed to send Mailgun notification to {}: '
|
||||
'{}{}error={}.'.format(
|
||||
email,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
'Sent Mailgun notification to {}.'.format(email))
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Mailgun:%s ' % (
|
||||
email) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
return not has_error
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
'region': self.region_name,
|
||||
}
|
||||
|
||||
if self.from_name is not None:
|
||||
# from_name specified; pass it back on the url
|
||||
args['name'] = self.from_name
|
||||
|
||||
return '{schema}://{user}@{host}/{apikey}/{targets}/?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
host=self.host,
|
||||
user=NotifyMailgun.quote(self.user, safe=''),
|
||||
apikey=NotifyMailgun.quote(self.apikey, safe=''),
|
||||
targets='/'.join(
|
||||
[NotifyMailgun.quote(x, safe='') for x in self.targets]),
|
||||
args=NotifyMailgun.urlencode(args))
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Get our entries; split_path() looks after unquoting content for us
|
||||
# by default
|
||||
results['targets'] = NotifyMailgun.split_path(results['fullpath'])
|
||||
|
||||
# Our very first entry is reserved for our api key
|
||||
try:
|
||||
results['apikey'] = results['targets'].pop(0)
|
||||
|
||||
except IndexError:
|
||||
# We're done - no API Key found
|
||||
results['apikey'] = None
|
||||
|
||||
if 'name' in results['qsd'] and len(results['qsd']['name']):
|
||||
# Extract from name to associate with from address
|
||||
results['from_name'] = \
|
||||
NotifyMailgun.unquote(results['qsd']['name'])
|
||||
|
||||
if 'region' in results['qsd'] and len(results['qsd']['region']):
|
||||
# Extract from name to associate with from address
|
||||
results['region_name'] = \
|
||||
NotifyMailgun.unquote(results['qsd']['region'])
|
||||
|
||||
# Support the 'to' variable so that we can support targets this way too
|
||||
# The 'to' makes it easier to use yaml configuration
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['targets'] += \
|
||||
NotifyMailgun.parse_list(results['qsd']['to'])
|
||||
|
||||
return results
|
1041
libs/apprise/plugins/NotifyMatrix.py
Normal file
1041
libs/apprise/plugins/NotifyMatrix.py
Normal file
File diff suppressed because it is too large
Load diff
|
@ -1,35 +1,46 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# MatterMost Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_bool
|
||||
from ..utils import parse_list
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# Some Reference Locations:
|
||||
# - https://docs.mattermost.com/developer/webhooks-incoming.html
|
||||
# - https://docs.mattermost.com/administration/config-settings.html
|
||||
|
||||
# Used to validate Authorization Token
|
||||
VALIDATE_AUTHTOKEN = re.compile(r'[A-Za-z0-9]{24,32}')
|
||||
VALIDATE_AUTHTOKEN = re.compile(r'[a-z0-9]{24,32}', re.I)
|
||||
|
||||
|
||||
class NotifyMatterMost(NotifyBase):
|
||||
|
@ -61,7 +72,71 @@ class NotifyMatterMost(NotifyBase):
|
|||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 4000
|
||||
|
||||
def __init__(self, authtoken, channel=None, **kwargs):
|
||||
# Mattermost does not have a title
|
||||
title_maxlen = 0
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{host}/{authtoken}',
|
||||
'{schema}://{host}/{authtoken}:{port}',
|
||||
'{schema}://{botname}@{host}/{authtoken}',
|
||||
'{schema}://{botname}@{host}:{port}/{authtoken}',
|
||||
'{schema}://{host}/{fullpath}/{authtoken}',
|
||||
'{schema}://{host}/{fullpath}{authtoken}:{port}',
|
||||
'{schema}://{botname}@{host}/{fullpath}/{authtoken}',
|
||||
'{schema}://{botname}@{host}:{port}/{fullpath}/{authtoken}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'host': {
|
||||
'name': _('Hostname'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'authtoken': {
|
||||
'name': _('Access Key'),
|
||||
'type': 'string',
|
||||
'regex': (r'[a-z0-9]{24,32}', 'i'),
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'fullpath': {
|
||||
'name': _('Path'),
|
||||
'type': 'string',
|
||||
},
|
||||
'botname': {
|
||||
'name': _('Bot Name'),
|
||||
'type': 'string',
|
||||
'map_to': 'user',
|
||||
},
|
||||
'port': {
|
||||
'name': _('Port'),
|
||||
'type': 'int',
|
||||
'min': 1,
|
||||
'max': 65535,
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'channels': {
|
||||
'name': _('Channels'),
|
||||
'type': 'list:string',
|
||||
},
|
||||
'image': {
|
||||
'name': _('Include Image'),
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'map_to': 'include_image',
|
||||
},
|
||||
'to': {
|
||||
'alias_of': 'channels',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, authtoken, fullpath=None, channels=None,
|
||||
include_image=False, **kwargs):
|
||||
"""
|
||||
Initialize MatterMost Object
|
||||
"""
|
||||
|
@ -73,39 +148,43 @@ class NotifyMatterMost(NotifyBase):
|
|||
else:
|
||||
self.schema = 'http'
|
||||
|
||||
# Our API Key
|
||||
# our full path
|
||||
self.fullpath = '' if not isinstance(
|
||||
fullpath, six.string_types) else fullpath.strip()
|
||||
|
||||
# Our Authorization Token
|
||||
self.authtoken = authtoken
|
||||
|
||||
# Validate authtoken
|
||||
if not authtoken:
|
||||
self.logger.warning(
|
||||
'Missing MatterMost Authorization Token.'
|
||||
)
|
||||
raise TypeError(
|
||||
'Missing MatterMost Authorization Token.'
|
||||
)
|
||||
msg = 'Missing MatterMost Authorization Token.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not VALIDATE_AUTHTOKEN.match(authtoken):
|
||||
self.logger.warning(
|
||||
'Invalid MatterMost Authorization Token Specified.'
|
||||
)
|
||||
raise TypeError(
|
||||
'Invalid MatterMost Authorization Token Specified.'
|
||||
)
|
||||
msg = 'Invalid MatterMost Authorization Token Specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# A Channel (optional)
|
||||
self.channel = channel
|
||||
# Optional Channels
|
||||
self.channels = parse_list(channels)
|
||||
|
||||
if not self.port:
|
||||
self.port = self.default_port
|
||||
|
||||
# Place a thumbnail image inline with the message body
|
||||
self.include_image = include_image
|
||||
|
||||
return
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform MatterMost Notification
|
||||
"""
|
||||
|
||||
# Create a copy of our channels, otherwise place a dummy entry
|
||||
channels = list(self.channels) if self.channels else [None, ]
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json'
|
||||
|
@ -113,64 +192,127 @@ class NotifyMatterMost(NotifyBase):
|
|||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'text': '###### %s\n%s' % (title, body),
|
||||
'icon_url': self.image_url(notify_type),
|
||||
'text': body,
|
||||
'icon_url': None,
|
||||
}
|
||||
|
||||
if self.user:
|
||||
payload['username'] = self.user
|
||||
# Acquire our image url if configured to do so
|
||||
image_url = None if not self.include_image \
|
||||
else self.image_url(notify_type)
|
||||
|
||||
else:
|
||||
payload['username'] = self.app_id
|
||||
if image_url:
|
||||
# Set our image configuration if told to do so
|
||||
payload['icon_url'] = image_url
|
||||
|
||||
if self.channel:
|
||||
payload['channel'] = self.channel
|
||||
# Set our user
|
||||
payload['username'] = self.user if self.user else self.app_id
|
||||
|
||||
url = '%s://%s:%d' % (self.schema, self.host, self.port)
|
||||
url += '/hooks/%s' % self.authtoken
|
||||
# For error tracking
|
||||
has_error = False
|
||||
|
||||
while len(channels):
|
||||
# Pop a channel off of the list
|
||||
channel = channels.pop(0)
|
||||
|
||||
if channel:
|
||||
payload['channel'] = channel
|
||||
|
||||
url = '{}://{}:{}{}/hooks/{}'.format(
|
||||
self.schema, self.host, self.port, self.fullpath,
|
||||
self.authtoken)
|
||||
|
||||
self.logger.debug('MatterMost POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('MatterMost Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyMatterMost.http_response_code_lookup(
|
||||
r.status_code)
|
||||
|
||||
self.logger.debug('MatterMost POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('MatterMost Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send MatterMost notification:'
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
'Failed to send MatterMost notification{}: '
|
||||
'{}{}error={}.'.format(
|
||||
'' if not channel
|
||||
else ' to channel {}'.format(channel),
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send MatterMost notification '
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
else:
|
||||
self.logger.info('Sent MatterMost notification.')
|
||||
# Flag our error
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending MatterMost '
|
||||
'notification.'
|
||||
else:
|
||||
self.logger.info(
|
||||
'Sent MatterMost notification{}.'.format(
|
||||
'' if not channel
|
||||
else ' to channel {}'.format(channel)))
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending MatterMost '
|
||||
'notification{}.'.format(
|
||||
'' if not channel
|
||||
else ' to channel {}'.format(channel)))
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Flag our error
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
# Return our overall status
|
||||
return not has_error
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'image': 'yes' if self.include_image else 'no',
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
if self.channels:
|
||||
# historically the value only accepted one channel and is
|
||||
# therefore identified as 'channel'. Channels have always been
|
||||
# optional, so that is why this setting is nested in an if block
|
||||
args['channel'] = ','.join(self.channels)
|
||||
|
||||
default_port = 443 if self.secure else self.default_port
|
||||
default_schema = self.secure_protocol if self.secure else self.protocol
|
||||
|
||||
return \
|
||||
'{schema}://{hostname}{port}{fullpath}{authtoken}/?{args}'.format(
|
||||
schema=default_schema,
|
||||
hostname=NotifyMatterMost.quote(self.host, safe=''),
|
||||
port='' if not self.port or self.port == default_port
|
||||
else ':{}'.format(self.port),
|
||||
fullpath='/' if not self.fullpath else '{}/'.format(
|
||||
NotifyMatterMost.quote(self.fullpath, safe='/')),
|
||||
authtoken=NotifyMatterMost.quote(self.authtoken, safe=''),
|
||||
args=NotifyMatterMost.urlencode(args),
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
|
@ -185,15 +327,33 @@ class NotifyMatterMost(NotifyBase):
|
|||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
authtoken = NotifyBase.split_path(results['fullpath'])[0]
|
||||
# Acquire our tokens; the last one will always be our authtoken
|
||||
# all entries before it will be our path
|
||||
tokens = NotifyMatterMost.split_path(results['fullpath'])
|
||||
|
||||
# Apply our settings now
|
||||
results['authtoken'] = None if not tokens else tokens.pop()
|
||||
|
||||
# Store our path
|
||||
results['fullpath'] = '' if not tokens \
|
||||
else '/{}'.format('/'.join(tokens))
|
||||
|
||||
# Define our optional list of channels to notify
|
||||
results['channels'] = list()
|
||||
|
||||
# Support both 'to' (for yaml configuration) and channel=
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
# Allow the user to specify the channel to post to
|
||||
results['channels'].append(
|
||||
NotifyMatterMost.parse_list(results['qsd']['to']))
|
||||
|
||||
channel = None
|
||||
if 'channel' in results['qsd'] and len(results['qsd']['channel']):
|
||||
# Allow the user to specify the channel to post to
|
||||
channel = NotifyBase.unquote(results['qsd']['channel']).strip()
|
||||
results['channels'].append(
|
||||
NotifyMatterMost.parse_list(results['qsd']['channel']))
|
||||
|
||||
results['authtoken'] = authtoken
|
||||
results['channel'] = channel
|
||||
# Image manipulation
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get('image', False))
|
||||
|
||||
return results
|
||||
|
|
|
@ -1,26 +1,34 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Prowl Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import requests
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyType
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# Used to validate API Key
|
||||
VALIDATE_APIKEY = re.compile(r'[A-Za-z0-9]{40}')
|
||||
|
@ -46,12 +54,11 @@ PROWL_PRIORITIES = (
|
|||
ProwlPriority.EMERGENCY,
|
||||
)
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
PROWL_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
HTTP_ERROR_MAP.update({
|
||||
# Provide some known codes Prowl uses and what they translate to:
|
||||
PROWL_HTTP_ERROR_MAP = {
|
||||
406: 'IP address has exceeded API limit',
|
||||
409: 'Request not aproved.',
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
class NotifyProwl(NotifyBase):
|
||||
|
@ -74,12 +81,47 @@ class NotifyProwl(NotifyBase):
|
|||
# Prowl uses the http protocol with JSON requests
|
||||
notify_url = 'https://api.prowlapp.com/publicapi/add'
|
||||
|
||||
# Disable throttle rate for Prowl requests since they are normally
|
||||
# local anyway
|
||||
request_rate_per_sec = 0
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 10000
|
||||
|
||||
# Defines the maximum allowable characters in the title
|
||||
title_maxlen = 1024
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{apikey}',
|
||||
'{schema}://{apikey}/{providerkey}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'apikey': {
|
||||
'name': _('API Key'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'providerkey': {
|
||||
'name': _('Provider Key'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'priority': {
|
||||
'name': _('Priority'),
|
||||
'type': 'choice:int',
|
||||
'values': PROWL_PRIORITIES,
|
||||
'default': ProwlPriority.NORMAL,
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, apikey, providerkey=None, priority=None, **kwargs):
|
||||
"""
|
||||
Initialize Prowl Object
|
||||
|
@ -93,12 +135,9 @@ class NotifyProwl(NotifyBase):
|
|||
self.priority = priority
|
||||
|
||||
if not VALIDATE_APIKEY.match(apikey):
|
||||
self.logger.warning(
|
||||
'The API key specified (%s) is invalid.' % apikey,
|
||||
)
|
||||
raise TypeError(
|
||||
'The API key specified (%s) is invalid.' % apikey,
|
||||
)
|
||||
msg = 'The API key specified ({}) is invalid.'.format(apikey)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Store the API key
|
||||
self.apikey = apikey
|
||||
|
@ -106,18 +145,17 @@ class NotifyProwl(NotifyBase):
|
|||
# Store the provider key (if specified)
|
||||
if providerkey:
|
||||
if not VALIDATE_PROVIDERKEY.match(providerkey):
|
||||
self.logger.warning(
|
||||
'The Provider key specified (%s) '
|
||||
'is invalid.' % providerkey)
|
||||
msg = \
|
||||
'The Provider key specified ({}) is invalid.' \
|
||||
.format(providerkey)
|
||||
|
||||
raise TypeError(
|
||||
'The Provider key specified (%s) '
|
||||
'is invalid.' % providerkey)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Store the Provider Key
|
||||
self.providerkey = providerkey
|
||||
|
||||
def notify(self, title, body, **kwargs):
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Prowl Notification
|
||||
"""
|
||||
|
@ -143,6 +181,10 @@ class NotifyProwl(NotifyBase):
|
|||
self.notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Prowl Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
self.notify_url,
|
||||
|
@ -152,20 +194,18 @@ class NotifyProwl(NotifyBase):
|
|||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Prowl notification: '
|
||||
'%s (error=%s).' % (
|
||||
PROWL_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(
|
||||
r.status_code, PROWL_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Prowl notification '
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Prowl notification:'
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -183,6 +223,36 @@ class NotifyProwl(NotifyBase):
|
|||
|
||||
return True
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
_map = {
|
||||
ProwlPriority.LOW: 'low',
|
||||
ProwlPriority.MODERATE: 'moderate',
|
||||
ProwlPriority.NORMAL: 'normal',
|
||||
ProwlPriority.HIGH: 'high',
|
||||
ProwlPriority.EMERGENCY: 'emergency',
|
||||
}
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'priority': 'normal' if self.priority not in _map
|
||||
else _map[self.priority],
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
return '{schema}://{apikey}/{providerkey}/?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
apikey=NotifyProwl.quote(self.apikey, safe=''),
|
||||
providerkey='' if not self.providerkey
|
||||
else NotifyProwl.quote(self.providerkey, safe=''),
|
||||
args=NotifyProwl.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
|
@ -196,28 +266,24 @@ class NotifyProwl(NotifyBase):
|
|||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
# Set the API Key
|
||||
results['apikey'] = NotifyProwl.unquote(results['host'])
|
||||
|
||||
# optionally find the provider key
|
||||
# Optionally try to find the provider key
|
||||
try:
|
||||
providerkey = [x for x in filter(
|
||||
bool, NotifyBase.split_path(results['fullpath']))][0]
|
||||
results['providerkey'] = \
|
||||
NotifyProwl.split_path(results['fullpath'])[0]
|
||||
|
||||
except (AttributeError, IndexError):
|
||||
providerkey = None
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
if 'priority' in results['qsd'] and len(results['qsd']['priority']):
|
||||
_map = {
|
||||
'l': ProwlPriority.LOW,
|
||||
'-2': ProwlPriority.LOW,
|
||||
'm': ProwlPriority.MODERATE,
|
||||
'-1': ProwlPriority.MODERATE,
|
||||
'n': ProwlPriority.NORMAL,
|
||||
'0': ProwlPriority.NORMAL,
|
||||
'h': ProwlPriority.HIGH,
|
||||
'1': ProwlPriority.HIGH,
|
||||
'e': ProwlPriority.EMERGENCY,
|
||||
'2': ProwlPriority.EMERGENCY,
|
||||
}
|
||||
try:
|
||||
results['priority'] = \
|
||||
|
@ -227,7 +293,4 @@ class NotifyProwl(NotifyBase):
|
|||
# No priority was set
|
||||
pass
|
||||
|
||||
results['apikey'] = results['host']
|
||||
results['providerkey'] = providerkey
|
||||
|
||||
return results
|
||||
|
|
|
@ -1,43 +1,44 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# PushBullet Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from .NotifyBase import IS_EMAIL_RE
|
||||
|
||||
from ..utils import compat_is_basestring
|
||||
from ..utils import GET_EMAIL_RE
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_list
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# Flag used as a placeholder to sending to all devices
|
||||
PUSHBULLET_SEND_TO_ALL = 'ALL_DEVICES'
|
||||
|
||||
# Used to break apart list of potential recipients by their delimiter
|
||||
# into a usable list.
|
||||
RECIPIENTS_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
PUSHBULLET_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
PUSHBULLET_HTTP_ERROR_MAP.update({
|
||||
# Provide some known codes Pushbullet uses and what they translate to:
|
||||
PUSHBULLET_HTTP_ERROR_MAP = {
|
||||
401: 'Unauthorized - Invalid Token.',
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
class NotifyPushBullet(NotifyBase):
|
||||
|
@ -60,27 +61,62 @@ class NotifyPushBullet(NotifyBase):
|
|||
# PushBullet uses the http protocol with JSON requests
|
||||
notify_url = 'https://api.pushbullet.com/v2/pushes'
|
||||
|
||||
def __init__(self, accesstoken, recipients=None, **kwargs):
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{accesstoken}',
|
||||
'{schema}://{accesstoken}/{targets}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'accesstoken': {
|
||||
'name': _('Access Token'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'target_device': {
|
||||
'name': _('Target Device'),
|
||||
'type': 'string',
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'target_channel': {
|
||||
'name': _('Target Channel'),
|
||||
'type': 'string',
|
||||
'prefix': '#',
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'target_email': {
|
||||
'name': _('Target Email'),
|
||||
'type': 'string',
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'targets': {
|
||||
'name': _('Targets'),
|
||||
'type': 'list:string',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'to': {
|
||||
'alias_of': 'targets',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, accesstoken, targets=None, **kwargs):
|
||||
"""
|
||||
Initialize PushBullet Object
|
||||
"""
|
||||
super(NotifyPushBullet, self).__init__(**kwargs)
|
||||
|
||||
self.accesstoken = accesstoken
|
||||
if compat_is_basestring(recipients):
|
||||
self.recipients = [x for x in filter(
|
||||
bool, RECIPIENTS_LIST_DELIM.split(recipients))]
|
||||
|
||||
elif isinstance(recipients, (set, tuple, list)):
|
||||
self.recipients = recipients
|
||||
self.targets = parse_list(targets)
|
||||
if len(self.targets) == 0:
|
||||
self.targets = (PUSHBULLET_SEND_TO_ALL, )
|
||||
|
||||
else:
|
||||
self.recipients = list()
|
||||
|
||||
if len(self.recipients) == 0:
|
||||
self.recipients = (PUSHBULLET_SEND_TO_ALL, )
|
||||
|
||||
def notify(self, title, body, **kwargs):
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform PushBullet Notification
|
||||
"""
|
||||
|
@ -94,10 +130,10 @@ class NotifyPushBullet(NotifyBase):
|
|||
# error tracking (used for function return)
|
||||
has_error = False
|
||||
|
||||
# Create a copy of the recipients list
|
||||
recipients = list(self.recipients)
|
||||
while len(recipients):
|
||||
recipient = recipients.pop(0)
|
||||
# Create a copy of the targets list
|
||||
targets = list(self.targets)
|
||||
while len(targets):
|
||||
recipient = targets.pop(0)
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
|
@ -110,7 +146,7 @@ class NotifyPushBullet(NotifyBase):
|
|||
# Send to all
|
||||
pass
|
||||
|
||||
elif IS_EMAIL_RE.match(recipient):
|
||||
elif GET_EMAIL_RE.match(recipient):
|
||||
payload['email'] = recipient
|
||||
self.logger.debug(
|
||||
"Recipient '%s' is an email address" % recipient)
|
||||
|
@ -128,6 +164,10 @@ class NotifyPushBullet(NotifyBase):
|
|||
self.notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('PushBullet Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
self.notify_url,
|
||||
|
@ -139,23 +179,24 @@ class NotifyPushBullet(NotifyBase):
|
|||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send PushBullet notification to '
|
||||
'"%s": %s (error=%s).' % (
|
||||
recipient,
|
||||
PUSHBULLET_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyPushBullet.http_response_code_lookup(
|
||||
r.status_code, PUSHBULLET_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send PushBullet notification to '
|
||||
'"%s" (error=%s).' % (recipient, r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send PushBullet notification to {}:'
|
||||
'{}{}error={}.'.format(
|
||||
recipient,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
|
@ -167,14 +208,37 @@ class NotifyPushBullet(NotifyBase):
|
|||
'notification to "%s".' % (recipient),
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
has_error = True
|
||||
|
||||
if len(recipients):
|
||||
# Prevent thrashing requests
|
||||
self.throttle()
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
return not has_error
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
targets = '/'.join([NotifyPushBullet.quote(x) for x in self.targets])
|
||||
if targets == PUSHBULLET_SEND_TO_ALL:
|
||||
# keyword is reserved for internal usage only; it's safe to remove
|
||||
# it from the recipients list
|
||||
targets = ''
|
||||
|
||||
return '{schema}://{accesstoken}/{targets}/?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
accesstoken=NotifyPushBullet.quote(self.accesstoken, safe=''),
|
||||
targets=targets,
|
||||
args=NotifyPushBullet.urlencode(args))
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
|
@ -188,10 +252,17 @@ class NotifyPushBullet(NotifyBase):
|
|||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
recipients = NotifyBase.unquote(results['fullpath'])
|
||||
# Fetch our targets
|
||||
results['targets'] = \
|
||||
NotifyPushBullet.split_path(results['fullpath'])
|
||||
|
||||
results['accesstoken'] = results['host']
|
||||
results['recipients'] = recipients
|
||||
# The 'to' makes it easier to use yaml configuration
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['targets'] += \
|
||||
NotifyPushBullet.parse_list(results['qsd']['to'])
|
||||
|
||||
# Setup the token; we store it in Access Token for global
|
||||
# plugin consistency with naming conventions
|
||||
results['accesstoken'] = NotifyPushBullet.unquote(results['host'])
|
||||
|
||||
return results
|
||||
|
|
|
@ -1,161 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Pushalot Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
PUSHALOT_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
PUSHALOT_HTTP_ERROR_MAP.update({
|
||||
406: 'Message throttle limit hit.',
|
||||
410: 'AuthorizedToken is no longer valid.',
|
||||
})
|
||||
|
||||
# Used to validate Authorization Token
|
||||
VALIDATE_AUTHTOKEN = re.compile(r'[A-Za-z0-9]{32}')
|
||||
|
||||
|
||||
class NotifyPushalot(NotifyBase):
|
||||
"""
|
||||
A wrapper for Pushalot Notifications
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Pushalot'
|
||||
|
||||
# The services URL
|
||||
service_url = 'https://pushalot.com/'
|
||||
|
||||
# The default protocol is always secured
|
||||
secure_protocol = 'palot'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_pushalot'
|
||||
|
||||
# Pushalot uses the http protocol with JSON requests
|
||||
notify_url = 'https://pushalot.com/api/sendmessage'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_72
|
||||
|
||||
def __init__(self, authtoken, is_important=False, **kwargs):
|
||||
"""
|
||||
Initialize Pushalot Object
|
||||
"""
|
||||
super(NotifyPushalot, self).__init__(**kwargs)
|
||||
|
||||
# Is Important Flag
|
||||
self.is_important = is_important
|
||||
|
||||
self.authtoken = authtoken
|
||||
# Validate authtoken
|
||||
if not VALIDATE_AUTHTOKEN.match(authtoken):
|
||||
self.logger.warning(
|
||||
'Invalid Pushalot Authorization Token Specified.'
|
||||
)
|
||||
raise TypeError(
|
||||
'Invalid Pushalot Authorization Token Specified.'
|
||||
)
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Pushalot Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'AuthorizationToken': self.authtoken,
|
||||
'IsImportant': self.is_important,
|
||||
'Title': title,
|
||||
'Body': body,
|
||||
'Source': self.app_id,
|
||||
}
|
||||
|
||||
image_url = self.image_url(notify_type)
|
||||
if image_url:
|
||||
payload['Image'] = image_url
|
||||
|
||||
self.logger.debug('Pushalot POST URL: %s (cert_verify=%r)' % (
|
||||
self.notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Pushalot Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
self.notify_url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Pushalot notification: '
|
||||
'%s (error=%s).' % (
|
||||
PUSHALOT_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Pushalot notification '
|
||||
'(error=%s).' % r.status_code)
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Pushalot notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Pushalot notification.')
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
results['authtoken'] = results['host']
|
||||
|
||||
return results
|
353
libs/apprise/plugins/NotifyPushed.py
Normal file
353
libs/apprise/plugins/NotifyPushed.py
Normal file
|
@ -0,0 +1,353 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import requests
|
||||
from json import dumps
|
||||
from itertools import chain
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_list
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# Used to detect and parse channels
|
||||
IS_CHANNEL = re.compile(r'^#(?P<name>[A-Za-z0-9]+)$')
|
||||
|
||||
# Used to detect and parse a users push id
|
||||
IS_USER_PUSHED_ID = re.compile(r'^@(?P<name>[A-Za-z0-9]+)$')
|
||||
|
||||
|
||||
class NotifyPushed(NotifyBase):
|
||||
"""
|
||||
A wrapper to Pushed Notifications
|
||||
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Pushed'
|
||||
|
||||
# The services URL
|
||||
service_url = 'https://pushed.co/'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'pushed'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_pushed'
|
||||
|
||||
# Pushed uses the http protocol with JSON requests
|
||||
notify_url = 'https://api.pushed.co/1/push'
|
||||
|
||||
# A title can not be used for Pushed Messages. Setting this to zero will
|
||||
# cause any title (if defined) to get placed into the message body.
|
||||
title_maxlen = 0
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 140
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{app_key}/{app_secret}',
|
||||
'{schema}://{app_key}/{app_secret}@{targets}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'app_key': {
|
||||
'name': _('Application Key'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'app_secret': {
|
||||
'name': _('Application Secret'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'target_user': {
|
||||
'name': _('Target User'),
|
||||
'prefix': '@',
|
||||
'type': 'string',
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'target_channel': {
|
||||
'name': _('Target Channel'),
|
||||
'type': 'string',
|
||||
'prefix': '#',
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'targets': {
|
||||
'name': _('Targets'),
|
||||
'type': 'list:string',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'to': {
|
||||
'alias_of': 'targets',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, app_key, app_secret, targets=None, **kwargs):
|
||||
"""
|
||||
Initialize Pushed Object
|
||||
|
||||
"""
|
||||
super(NotifyPushed, self).__init__(**kwargs)
|
||||
|
||||
if not app_key:
|
||||
msg = 'An invalid Application Key was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not app_secret:
|
||||
msg = 'An invalid Application Secret was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Initialize channel list
|
||||
self.channels = list()
|
||||
|
||||
# Initialize user list
|
||||
self.users = list()
|
||||
|
||||
# Validate recipients and drop bad ones:
|
||||
for target in parse_list(targets):
|
||||
result = IS_CHANNEL.match(target)
|
||||
if result:
|
||||
# store valid device
|
||||
self.channels.append(result.group('name'))
|
||||
continue
|
||||
|
||||
result = IS_USER_PUSHED_ID.match(target)
|
||||
if result:
|
||||
# store valid room
|
||||
self.users.append(result.group('name'))
|
||||
continue
|
||||
|
||||
self.logger.warning(
|
||||
'Dropped invalid channel/userid '
|
||||
'(%s) specified.' % target,
|
||||
)
|
||||
|
||||
# Store our data
|
||||
self.app_key = app_key
|
||||
self.app_secret = app_secret
|
||||
|
||||
return
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Pushed Notification
|
||||
"""
|
||||
|
||||
# Initiaize our error tracking
|
||||
has_error = False
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'app_key': self.app_key,
|
||||
'app_secret': self.app_secret,
|
||||
'target_type': 'app',
|
||||
'content': body,
|
||||
}
|
||||
|
||||
# So the logic is as follows:
|
||||
# - if no user/channel was specified, then we just simply notify the
|
||||
# app.
|
||||
# - if there are user/channels specified, then we only alert them
|
||||
# while respecting throttle limits (in the event there are a lot of
|
||||
# entries.
|
||||
|
||||
if len(self.channels) + len(self.users) == 0:
|
||||
# Just notify the app
|
||||
return self._send(
|
||||
payload=payload, notify_type=notify_type, **kwargs)
|
||||
|
||||
# If our code reaches here, we want to target channels and users (by
|
||||
# their Pushed_ID instead...
|
||||
|
||||
# Generate a copy of our original list
|
||||
channels = list(self.channels)
|
||||
users = list(self.users)
|
||||
|
||||
# Copy our payload
|
||||
_payload = dict(payload)
|
||||
_payload['target_type'] = 'channel'
|
||||
|
||||
while len(channels) > 0:
|
||||
# Get Channel
|
||||
_payload['target_alias'] = channels.pop(0)
|
||||
|
||||
if not self._send(
|
||||
payload=_payload, notify_type=notify_type, **kwargs):
|
||||
|
||||
# toggle flag
|
||||
has_error = True
|
||||
|
||||
# Copy our payload
|
||||
_payload = dict(payload)
|
||||
_payload['target_type'] = 'pushed_id'
|
||||
|
||||
# Send all our defined User Pushed ID's
|
||||
while len(users):
|
||||
# Get User's Pushed ID
|
||||
_payload['pushed_id'] = users.pop(0)
|
||||
|
||||
if not self._send(
|
||||
payload=_payload, notify_type=notify_type, **kwargs):
|
||||
|
||||
# toggle flag
|
||||
has_error = True
|
||||
|
||||
return not has_error
|
||||
|
||||
def _send(self, payload, notify_type, **kwargs):
|
||||
"""
|
||||
A lower level call that directly pushes a payload to the Pushed
|
||||
Notification servers. This should never be called directly; it is
|
||||
referenced automatically through the send() function.
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
self.logger.debug('Pushed POST URL: %s (cert_verify=%r)' % (
|
||||
self.notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Pushed Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
self.notify_url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyPushed.http_response_code_lookup(r.status_code)
|
||||
|
||||
self.logger.warning(
|
||||
'Failed to send Pushed notification:'
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Pushed notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Pushed notification.')
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
return '{schema}://{app_key}/{app_secret}/{targets}/?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
app_key=NotifyPushed.quote(self.app_key, safe=''),
|
||||
app_secret=NotifyPushed.quote(self.app_secret, safe=''),
|
||||
targets='/'.join(
|
||||
[NotifyPushed.quote(x) for x in chain(
|
||||
# Channels are prefixed with a pound/hashtag symbol
|
||||
['#{}'.format(x) for x in self.channels],
|
||||
# Users are prefixed with an @ symbol
|
||||
['@{}'.format(x) for x in self.users],
|
||||
)]),
|
||||
args=NotifyPushed.urlencode(args))
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
|
||||
# The first token is stored in the hostname
|
||||
app_key = NotifyPushed.unquote(results['host'])
|
||||
|
||||
entries = NotifyPushed.split_path(results['fullpath'])
|
||||
# Now fetch the remaining tokens
|
||||
try:
|
||||
app_secret = entries.pop(0)
|
||||
|
||||
except IndexError:
|
||||
# Force some bad values that will get caught
|
||||
# in parsing later
|
||||
app_secret = None
|
||||
app_key = None
|
||||
|
||||
# Get our recipients (based on remaining entries)
|
||||
results['targets'] = entries
|
||||
|
||||
# The 'to' makes it easier to use yaml configuration
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['targets'] += \
|
||||
NotifyPushed.parse_list(results['qsd']['to'])
|
||||
|
||||
results['app_key'] = app_key
|
||||
results['app_secret'] = app_secret
|
||||
|
||||
return results
|
|
@ -1,85 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Pushjet Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
import re
|
||||
from .pushjet import errors
|
||||
from .pushjet import pushjet
|
||||
|
||||
from ..NotifyBase import NotifyBase
|
||||
|
||||
PUBLIC_KEY_RE = re.compile(
|
||||
r'^[a-z0-9]{4}-[a-z0-9]{6}-[a-z0-9]{12}-[a-z0-9]{5}-[a-z0-9]{9}$', re.I)
|
||||
|
||||
SECRET_KEY_RE = re.compile(r'^[a-z0-9]{32}$', re.I)
|
||||
|
||||
|
||||
class NotifyPushjet(NotifyBase):
|
||||
"""
|
||||
A wrapper for Pushjet Notifications
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Pushjet'
|
||||
|
||||
# The services URL
|
||||
service_url = 'https://pushjet.io/'
|
||||
|
||||
# The default protocol
|
||||
protocol = 'pjet'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'pjets'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_pushjet'
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initialize Pushjet Object
|
||||
"""
|
||||
super(NotifyPushjet, self).__init__(**kwargs)
|
||||
|
||||
def notify(self, title, body, notify_type):
|
||||
"""
|
||||
Perform Pushjet Notification
|
||||
"""
|
||||
try:
|
||||
if self.user and self.host:
|
||||
server = "http://"
|
||||
if self.secure:
|
||||
server = "https://"
|
||||
|
||||
server += self.host
|
||||
if self.port:
|
||||
server += ":" + str(self.port)
|
||||
|
||||
api = pushjet.Api(server)
|
||||
service = api.Service(secret_key=self.user)
|
||||
|
||||
else:
|
||||
api = pushjet.Api(pushjet.DEFAULT_API_URL)
|
||||
service = api.Service(secret_key=self.host)
|
||||
|
||||
service.send(body, title)
|
||||
self.logger.info('Sent Pushjet notification.')
|
||||
|
||||
except (errors.PushjetError, ValueError) as e:
|
||||
self.logger.warning('Failed to send Pushjet notification.')
|
||||
self.logger.debug('Pushjet Exception: %s' % str(e))
|
||||
return False
|
||||
|
||||
return True
|
|
@ -1,21 +1,175 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
from . import NotifyPushjet
|
||||
import re
|
||||
from . import pushjet
|
||||
|
||||
__all__ = [
|
||||
'NotifyPushjet',
|
||||
]
|
||||
from ..NotifyBase import NotifyBase
|
||||
from ...common import NotifyType
|
||||
from ...AppriseLocale import gettext_lazy as _
|
||||
|
||||
PUBLIC_KEY_RE = re.compile(
|
||||
r'^[a-z0-9]{4}-[a-z0-9]{6}-[a-z0-9]{12}-[a-z0-9]{5}-[a-z0-9]{9}$', re.I)
|
||||
|
||||
SECRET_KEY_RE = re.compile(r'^[a-z0-9]{32}$', re.I)
|
||||
|
||||
|
||||
class NotifyPushjet(NotifyBase):
|
||||
"""
|
||||
A wrapper for Pushjet Notifications
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Pushjet'
|
||||
|
||||
# The default protocol
|
||||
protocol = 'pjet'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'pjets'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_pushjet'
|
||||
|
||||
# Disable throttle rate for Pushjet requests since they are normally
|
||||
# local anyway (the remote/online service is no more)
|
||||
request_rate_per_sec = 0
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{secret_key}@{host}',
|
||||
'{schema}://{secret_key}@{host}:{port}',
|
||||
)
|
||||
|
||||
# Define our tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'host': {
|
||||
'name': _('Hostname'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'port': {
|
||||
'name': _('Port'),
|
||||
'type': 'int',
|
||||
'min': 1,
|
||||
'max': 65535,
|
||||
},
|
||||
'secret_key': {
|
||||
'name': _('Secret Key'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
'private': True,
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, secret_key, **kwargs):
|
||||
"""
|
||||
Initialize Pushjet Object
|
||||
"""
|
||||
super(NotifyPushjet, self).__init__(**kwargs)
|
||||
|
||||
if not secret_key:
|
||||
# You must provide a Pushjet key to work with
|
||||
msg = 'You must specify a Pushjet Secret Key.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# store our key
|
||||
self.secret_key = secret_key
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Pushjet Notification
|
||||
"""
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
server = "https://" if self.secure else "http://"
|
||||
|
||||
server += self.host
|
||||
if self.port:
|
||||
server += ":" + str(self.port)
|
||||
|
||||
try:
|
||||
api = pushjet.pushjet.Api(server)
|
||||
service = api.Service(secret_key=self.secret_key)
|
||||
|
||||
service.send(body, title)
|
||||
self.logger.info('Sent Pushjet notification.')
|
||||
|
||||
except (pushjet.errors.PushjetError, ValueError) as e:
|
||||
self.logger.warning('Failed to send Pushjet notification.')
|
||||
self.logger.debug('Pushjet Exception: %s' % str(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
default_port = 443 if self.secure else 80
|
||||
|
||||
return '{schema}://{secret_key}@{hostname}{port}/?{args}'.format(
|
||||
schema=self.secure_protocol if self.secure else self.protocol,
|
||||
secret_key=NotifyPushjet.quote(self.secret_key, safe=''),
|
||||
hostname=NotifyPushjet.quote(self.host, safe=''),
|
||||
port='' if self.port is None or self.port == default_port
|
||||
else ':{}'.format(self.port),
|
||||
args=NotifyPushjet.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
Syntax:
|
||||
pjet://secret_key@hostname
|
||||
pjet://secret_key@hostname:port
|
||||
pjets://secret_key@hostname
|
||||
pjets://secret_key@hostname:port
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Store it as it's value
|
||||
results['secret_key'] = \
|
||||
NotifyPushjet.unquote(results.get('user'))
|
||||
|
||||
return results
|
||||
|
|
|
@ -1,8 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import requests
|
||||
from functools import partial
|
||||
|
||||
from six import text_type
|
||||
from six.moves.urllib.parse import urljoin
|
||||
|
||||
from .utilities import (
|
||||
NoNoneDict,
|
||||
requires_secret_key, with_api_bound,
|
||||
|
@ -10,14 +14,6 @@ from .utilities import (
|
|||
)
|
||||
from .errors import NonexistentError, SubscriptionError, RequestError, ServerError
|
||||
|
||||
import sys
|
||||
if sys.version_info[0] >= 3:
|
||||
from urllib.parse import urljoin
|
||||
unicode_type = str
|
||||
else:
|
||||
from urlparse import urljoin
|
||||
unicode_type = unicode
|
||||
|
||||
DEFAULT_API_URL = 'https://api.pushjet.io/'
|
||||
|
||||
class PushjetModel(object):
|
||||
|
@ -52,8 +48,8 @@ class Service(PushjetModel):
|
|||
raise ValueError("Invalid secret key provided.")
|
||||
elif public_key and not is_valid_public_key(public_key):
|
||||
raise ValueError("Invalid public key provided.")
|
||||
self.secret_key = unicode_type(secret_key) if secret_key else None
|
||||
self.public_key = unicode_type(public_key) if public_key else None
|
||||
self.secret_key = text_type(secret_key) if secret_key else None
|
||||
self.public_key = text_type(public_key) if public_key else None
|
||||
self.refresh()
|
||||
|
||||
def _request(self, endpoint, method, is_secret, params=None, data=None):
|
||||
|
@ -97,8 +93,8 @@ class Service(PushjetModel):
|
|||
if not data:
|
||||
return
|
||||
self._request('service', 'PATCH', is_secret=True, data=data)
|
||||
self.name = unicode_type(name)
|
||||
self.icon_url = unicode_type(icon_url)
|
||||
self.name = text_type(name)
|
||||
self.icon_url = text_type(icon_url)
|
||||
|
||||
@requires_secret_key
|
||||
def delete(self):
|
||||
|
@ -171,10 +167,10 @@ class Device(PushjetModel):
|
|||
return "<Pushjet Device: {}>".format(self.uuid)
|
||||
|
||||
def __init__(self, uuid):
|
||||
uuid = unicode_type(uuid)
|
||||
uuid = text_type(uuid)
|
||||
if not is_valid_uuid(uuid):
|
||||
raise ValueError("Invalid UUID provided. Try uuid.uuid4().")
|
||||
self.uuid = unicode_type(uuid)
|
||||
self.uuid = text_type(uuid)
|
||||
|
||||
def _request(self, endpoint, method, params=None, data=None):
|
||||
params = (params or {})
|
||||
|
@ -292,7 +288,7 @@ class Api(object):
|
|||
return "<Pushjet Api: {}>".format(self.url).encode(sys.stdout.encoding, errors='replace')
|
||||
|
||||
def __init__(self, url):
|
||||
self.url = unicode_type(url)
|
||||
self.url = text_type(url)
|
||||
self.Service = with_api_bound(Service, self)
|
||||
self.Device = with_api_bound(Device, self)
|
||||
|
||||
|
|
|
@ -1,27 +1,36 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Pushover Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
|
||||
from ..utils import compat_is_basestring
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_list
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# Flag used as a placeholder to sending to all devices
|
||||
PUSHOVER_SEND_TO_ALL = 'ALL_DEVICES'
|
||||
|
@ -30,7 +39,7 @@ PUSHOVER_SEND_TO_ALL = 'ALL_DEVICES'
|
|||
VALIDATE_TOKEN = re.compile(r'^[a-z0-9]{30}$', re.I)
|
||||
|
||||
# Used to detect a User and/or Group
|
||||
VALIDATE_USERGROUP = re.compile(r'^[a-z0-9]{30}$', re.I)
|
||||
VALIDATE_USER_KEY = re.compile(r'^[a-z0-9]{30}$', re.I)
|
||||
|
||||
# Used to detect a User and/or Group
|
||||
VALIDATE_DEVICE = re.compile(r'^[a-z0-9_]{1,25}$', re.I)
|
||||
|
@ -45,6 +54,57 @@ class PushoverPriority(object):
|
|||
EMERGENCY = 2
|
||||
|
||||
|
||||
# Sounds
|
||||
class PushoverSound(object):
|
||||
PUSHOVER = 'pushover'
|
||||
BIKE = 'bike'
|
||||
BUGLE = 'bugle'
|
||||
CASHREGISTER = 'cashregister'
|
||||
CLASSICAL = 'classical'
|
||||
COSMIC = 'cosmic'
|
||||
FALLING = 'falling'
|
||||
GAMELAN = 'gamelan'
|
||||
INCOMING = 'incoming'
|
||||
INTERMISSION = 'intermission'
|
||||
MAGIC = 'magic'
|
||||
MECHANICAL = 'mechanical'
|
||||
PIANOBAR = 'pianobar'
|
||||
SIREN = 'siren'
|
||||
SPACEALARM = 'spacealarm'
|
||||
TUGBOAT = 'tugboat'
|
||||
ALIEN = 'alien'
|
||||
CLIMB = 'climb'
|
||||
PERSISTENT = 'persistent'
|
||||
ECHO = 'echo'
|
||||
UPDOWN = 'updown'
|
||||
NONE = 'none'
|
||||
|
||||
|
||||
PUSHOVER_SOUNDS = (
|
||||
PushoverSound.PUSHOVER,
|
||||
PushoverSound.BIKE,
|
||||
PushoverSound.BUGLE,
|
||||
PushoverSound.CASHREGISTER,
|
||||
PushoverSound.CLASSICAL,
|
||||
PushoverSound.COSMIC,
|
||||
PushoverSound.FALLING,
|
||||
PushoverSound.GAMELAN,
|
||||
PushoverSound.INCOMING,
|
||||
PushoverSound.INTERMISSION,
|
||||
PushoverSound.MAGIC,
|
||||
PushoverSound.MECHANICAL,
|
||||
PushoverSound.PIANOBAR,
|
||||
PushoverSound.SIREN,
|
||||
PushoverSound.SPACEALARM,
|
||||
PushoverSound.TUGBOAT,
|
||||
PushoverSound.ALIEN,
|
||||
PushoverSound.CLIMB,
|
||||
PushoverSound.PERSISTENT,
|
||||
PushoverSound.ECHO,
|
||||
PushoverSound.UPDOWN,
|
||||
PushoverSound.NONE,
|
||||
)
|
||||
|
||||
PUSHOVER_PRIORITIES = (
|
||||
PushoverPriority.LOW,
|
||||
PushoverPriority.MODERATE,
|
||||
|
@ -53,14 +113,10 @@ PUSHOVER_PRIORITIES = (
|
|||
PushoverPriority.EMERGENCY,
|
||||
)
|
||||
|
||||
# Used to break path apart into list of devices
|
||||
DEVICE_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
PUSHOVER_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
PUSHOVER_HTTP_ERROR_MAP.update({
|
||||
PUSHOVER_HTTP_ERROR_MAP = {
|
||||
401: 'Unauthorized - Invalid Token.',
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
class NotifyPushover(NotifyBase):
|
||||
|
@ -86,7 +142,65 @@ class NotifyPushover(NotifyBase):
|
|||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 512
|
||||
|
||||
def __init__(self, token, devices=None, priority=None, **kwargs):
|
||||
# Default Pushover sound
|
||||
default_pushover_sound = PushoverSound.PUSHOVER
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{user_key}@{token}',
|
||||
'{schema}://{user_key}@{token}/{targets}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'user_key': {
|
||||
'name': _('User Key'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
'regex': (r'[a-z0-9]{30}', 'i'),
|
||||
'map_to': 'user',
|
||||
},
|
||||
'token': {
|
||||
'name': _('Access Token'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
'regex': (r'[a-z0-9]{30}', 'i'),
|
||||
},
|
||||
'target_device': {
|
||||
'name': _('Target Device'),
|
||||
'type': 'string',
|
||||
'regex': (r'[a-z0-9_]{1,25}', 'i'),
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'targets': {
|
||||
'name': _('Targets'),
|
||||
'type': 'list:string',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'priority': {
|
||||
'name': _('Priority'),
|
||||
'type': 'choice:int',
|
||||
'values': PUSHOVER_PRIORITIES,
|
||||
'default': PushoverPriority.NORMAL,
|
||||
},
|
||||
'sound': {
|
||||
'name': _('Sound'),
|
||||
'type': 'string',
|
||||
'regex': (r'[a-z]{1,12}', 'i'),
|
||||
'default': PushoverSound.PUSHOVER,
|
||||
},
|
||||
'to': {
|
||||
'alias_of': 'targets',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, token, targets=None, priority=None, sound=None,
|
||||
**kwargs):
|
||||
"""
|
||||
Initialize Pushover Object
|
||||
"""
|
||||
|
@ -98,30 +212,26 @@ class NotifyPushover(NotifyBase):
|
|||
|
||||
except AttributeError:
|
||||
# Token was None
|
||||
self.logger.warning('No API Token was specified.')
|
||||
raise TypeError('No API Token was specified.')
|
||||
msg = 'No API Token was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not VALIDATE_TOKEN.match(self.token):
|
||||
self.logger.warning(
|
||||
'The API Token specified (%s) is invalid.' % token,
|
||||
)
|
||||
raise TypeError(
|
||||
'The API Token specified (%s) is invalid.' % token,
|
||||
)
|
||||
msg = 'The API Token specified (%s) is invalid.'.format(token)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if compat_is_basestring(devices):
|
||||
self.devices = [x for x in filter(bool, DEVICE_LIST_DELIM.split(
|
||||
devices,
|
||||
))]
|
||||
self.targets = parse_list(targets)
|
||||
if len(self.targets) == 0:
|
||||
self.targets = (PUSHOVER_SEND_TO_ALL, )
|
||||
|
||||
elif isinstance(devices, (set, tuple, list)):
|
||||
self.devices = devices
|
||||
|
||||
else:
|
||||
self.devices = list()
|
||||
|
||||
if len(self.devices) == 0:
|
||||
self.devices = (PUSHOVER_SEND_TO_ALL, )
|
||||
# Setup our sound
|
||||
self.sound = NotifyPushover.default_pushover_sound \
|
||||
if not isinstance(sound, six.string_types) else sound.lower()
|
||||
if self.sound and self.sound not in PUSHOVER_SOUNDS:
|
||||
msg = 'The sound specified ({}) is invalid.'.format(sound)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# The Priority of the message
|
||||
if priority not in PUSHOVER_PRIORITIES:
|
||||
|
@ -131,18 +241,16 @@ class NotifyPushover(NotifyBase):
|
|||
self.priority = priority
|
||||
|
||||
if not self.user:
|
||||
self.logger.warning('No user was specified.')
|
||||
raise TypeError('No user was specified.')
|
||||
msg = 'No user key was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not VALIDATE_USERGROUP.match(self.user):
|
||||
self.logger.warning(
|
||||
'The user/group specified (%s) is invalid.' % self.user,
|
||||
)
|
||||
raise TypeError(
|
||||
'The user/group specified (%s) is invalid.' % self.user,
|
||||
)
|
||||
if not VALIDATE_USER_KEY.match(self.user):
|
||||
msg = 'The user key specified (%s) is invalid.' % self.user
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
def notify(self, title, body, **kwargs):
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Pushover Notification
|
||||
"""
|
||||
|
@ -157,7 +265,7 @@ class NotifyPushover(NotifyBase):
|
|||
has_error = False
|
||||
|
||||
# Create a copy of the devices list
|
||||
devices = list(self.devices)
|
||||
devices = list(self.targets)
|
||||
while len(devices):
|
||||
device = devices.pop(0)
|
||||
|
||||
|
@ -165,6 +273,8 @@ class NotifyPushover(NotifyBase):
|
|||
self.logger.warning(
|
||||
'The device specified (%s) is invalid.' % device,
|
||||
)
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
|
@ -176,12 +286,17 @@ class NotifyPushover(NotifyBase):
|
|||
'title': title,
|
||||
'message': body,
|
||||
'device': device,
|
||||
'sound': self.sound,
|
||||
}
|
||||
|
||||
self.logger.debug('Pushover POST URL: %s (cert_verify=%r)' % (
|
||||
self.notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Pushover Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
self.notify_url,
|
||||
|
@ -192,25 +307,24 @@ class NotifyPushover(NotifyBase):
|
|||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Pushover:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
device,
|
||||
PUSHOVER_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyPushover.http_response_code_lookup(
|
||||
r.status_code, PUSHOVER_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Pushover:%s '
|
||||
'notification (error=%s).' % (
|
||||
device,
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Pushover notification to {}: '
|
||||
'{}{}error={}.'.format(
|
||||
device,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
|
@ -222,14 +336,54 @@ class NotifyPushover(NotifyBase):
|
|||
device) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
has_error = True
|
||||
|
||||
if len(devices):
|
||||
# Prevent thrashing requests
|
||||
self.throttle()
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
return not has_error
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
_map = {
|
||||
PushoverPriority.LOW: 'low',
|
||||
PushoverPriority.MODERATE: 'moderate',
|
||||
PushoverPriority.NORMAL: 'normal',
|
||||
PushoverPriority.HIGH: 'high',
|
||||
PushoverPriority.EMERGENCY: 'emergency',
|
||||
}
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'priority':
|
||||
_map[PushoverPriority.NORMAL] if self.priority not in _map
|
||||
else _map[self.priority],
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
# Escape our devices
|
||||
devices = '/'.join([NotifyPushover.quote(x, safe='')
|
||||
for x in self.targets])
|
||||
|
||||
if devices == PUSHOVER_SEND_TO_ALL:
|
||||
# keyword is reserved for internal usage only; it's safe to remove
|
||||
# it from the devices list
|
||||
devices = ''
|
||||
|
||||
return '{schema}://{auth}{token}/{devices}/?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
auth='' if not self.user
|
||||
else '{user}@'.format(
|
||||
user=NotifyPushover.quote(self.user, safe='')),
|
||||
token=NotifyPushover.quote(self.token, safe=''),
|
||||
devices=devices,
|
||||
args=NotifyPushover.urlencode(args))
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
|
@ -243,21 +397,14 @@ class NotifyPushover(NotifyBase):
|
|||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
devices = NotifyBase.unquote(results['fullpath'])
|
||||
|
||||
# Set our priority
|
||||
if 'priority' in results['qsd'] and len(results['qsd']['priority']):
|
||||
_map = {
|
||||
'l': PushoverPriority.LOW,
|
||||
'-2': PushoverPriority.LOW,
|
||||
'm': PushoverPriority.MODERATE,
|
||||
'-1': PushoverPriority.MODERATE,
|
||||
'n': PushoverPriority.NORMAL,
|
||||
'0': PushoverPriority.NORMAL,
|
||||
'h': PushoverPriority.HIGH,
|
||||
'1': PushoverPriority.HIGH,
|
||||
'e': PushoverPriority.EMERGENCY,
|
||||
'2': PushoverPriority.EMERGENCY,
|
||||
}
|
||||
try:
|
||||
results['priority'] = \
|
||||
|
@ -267,7 +414,20 @@ class NotifyPushover(NotifyBase):
|
|||
# No priority was set
|
||||
pass
|
||||
|
||||
results['token'] = results['host']
|
||||
results['devices'] = devices
|
||||
# Retrieve all of our targets
|
||||
results['targets'] = NotifyPushover.split_path(results['fullpath'])
|
||||
|
||||
# Get the sound
|
||||
if 'sound' in results['qsd'] and len(results['qsd']['sound']):
|
||||
results['sound'] = \
|
||||
NotifyPushover.unquote(results['qsd']['sound'])
|
||||
|
||||
# The 'to' makes it easier to use yaml configuration
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['targets'] += \
|
||||
NotifyPushover.parse_list(results['qsd']['to'])
|
||||
|
||||
# Token
|
||||
results['token'] = NotifyPushover.unquote(results['host'])
|
||||
|
||||
return results
|
||||
|
|
|
@ -1,44 +1,76 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Notify Rocket.Chat Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
from json import loads
|
||||
from json import dumps
|
||||
from itertools import chain
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..utils import compat_is_basestring
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyFormat
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_list
|
||||
from ..utils import parse_bool
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
IS_CHANNEL = re.compile(r'^#(?P<name>[A-Za-z0-9]+)$')
|
||||
IS_CHANNEL = re.compile(r'^#(?P<name>[A-Za-z0-9_-]+)$')
|
||||
IS_USER = re.compile(r'^@(?P<name>[A-Za-z0-9._-]+)$')
|
||||
IS_ROOM_ID = re.compile(r'^(?P<name>[A-Za-z0-9]+)$')
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
RC_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
RC_HTTP_ERROR_MAP.update({
|
||||
RC_HTTP_ERROR_MAP = {
|
||||
400: 'Channel/RoomId is wrong format, or missing from server.',
|
||||
401: 'Authentication tokens provided is invalid or missing.',
|
||||
})
|
||||
}
|
||||
|
||||
# Used to break apart list of potential tags by their delimiter
|
||||
# into a usable list.
|
||||
LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
|
||||
class RocketChatAuthMode(object):
|
||||
"""
|
||||
The Chat Authentication mode is detected
|
||||
"""
|
||||
# providing a webhook
|
||||
WEBHOOK = "webhook"
|
||||
|
||||
# Providing a username and password (default)
|
||||
BASIC = "basic"
|
||||
|
||||
|
||||
# Define our authentication modes
|
||||
ROCKETCHAT_AUTH_MODES = (
|
||||
RocketChatAuthMode.WEBHOOK,
|
||||
RocketChatAuthMode.BASIC,
|
||||
)
|
||||
|
||||
|
||||
class NotifyRocketChat(NotifyBase):
|
||||
"""
|
||||
A wrapper for Notify Rocket.Chat Notifications
|
||||
|
@ -59,20 +91,104 @@ class NotifyRocketChat(NotifyBase):
|
|||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_rocketchat'
|
||||
|
||||
# Defines the maximum allowable characters in the title
|
||||
title_maxlen = 200
|
||||
# Allows the user to specify the NotifyImageSize object; this is supported
|
||||
# through the webhook
|
||||
image_size = NotifyImageSize.XY_128
|
||||
|
||||
def __init__(self, recipients=None, **kwargs):
|
||||
# The title is not used
|
||||
title_maxlen = 0
|
||||
|
||||
# The maximum size of the message
|
||||
body_maxlen = 1000
|
||||
|
||||
# Default to markdown
|
||||
notify_format = NotifyFormat.MARKDOWN
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{user}:{password}@{host}:{port}/{targets}',
|
||||
'{schema}://{user}:{password}@{host}/{targets}',
|
||||
'{schema}://{webhook}@{host}',
|
||||
'{schema}://{webhook}@{host}:{port}',
|
||||
'{schema}://{webhook}@{host}/{targets}',
|
||||
'{schema}://{webhook}@{host}:{port}/{targets}',
|
||||
)
|
||||
|
||||
# Define our template arguments
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'host': {
|
||||
'name': _('Hostname'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'port': {
|
||||
'name': _('Port'),
|
||||
'type': 'int',
|
||||
'min': 1,
|
||||
'max': 65535,
|
||||
},
|
||||
'user': {
|
||||
'name': _('Username'),
|
||||
'type': 'string',
|
||||
},
|
||||
'password': {
|
||||
'name': _('Password'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
},
|
||||
'webhook': {
|
||||
'name': _('Webhook'),
|
||||
'type': 'string',
|
||||
},
|
||||
'target_channel': {
|
||||
'name': _('Target Channel'),
|
||||
'type': 'string',
|
||||
'prefix': '#',
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'target_user': {
|
||||
'name': _('Target User'),
|
||||
'type': 'string',
|
||||
'prefix': '@',
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'target_room': {
|
||||
'name': _('Target Room ID'),
|
||||
'type': 'string',
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'targets': {
|
||||
'name': _('Targets'),
|
||||
'type': 'list:string',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'mode': {
|
||||
'name': _('Webhook Mode'),
|
||||
'type': 'choice:string',
|
||||
'values': ROCKETCHAT_AUTH_MODES,
|
||||
},
|
||||
'avatar': {
|
||||
'name': _('Use Avatar'),
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
},
|
||||
'to': {
|
||||
'alias_of': 'targets',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, webhook=None, targets=None, mode=None, avatar=True,
|
||||
**kwargs):
|
||||
"""
|
||||
Initialize Notify Rocket.Chat Object
|
||||
"""
|
||||
super(NotifyRocketChat, self).__init__(**kwargs)
|
||||
|
||||
if self.secure:
|
||||
self.schema = 'https'
|
||||
|
||||
else:
|
||||
self.schema = 'http'
|
||||
# Set our schema
|
||||
self.schema = 'https' if self.secure else 'http'
|
||||
|
||||
# Prepare our URL
|
||||
self.api_url = '%s://%s' % (self.schema, self.host)
|
||||
|
@ -80,27 +196,58 @@ class NotifyRocketChat(NotifyBase):
|
|||
if isinstance(self.port, int):
|
||||
self.api_url += ':%d' % self.port
|
||||
|
||||
self.api_url += '/api/v1/'
|
||||
|
||||
# Initialize channels list
|
||||
self.channels = list()
|
||||
|
||||
# Initialize room list
|
||||
self.rooms = list()
|
||||
|
||||
if recipients is None:
|
||||
recipients = []
|
||||
# Initialize user list (webhook only)
|
||||
self.users = list()
|
||||
|
||||
elif compat_is_basestring(recipients):
|
||||
recipients = [x for x in filter(bool, LIST_DELIM.split(
|
||||
recipients,
|
||||
))]
|
||||
# Assign our webhook (if defined)
|
||||
self.webhook = webhook
|
||||
|
||||
elif not isinstance(recipients, (set, tuple, list)):
|
||||
recipients = []
|
||||
# Place an avatar image to associate with our content
|
||||
self.avatar = avatar
|
||||
|
||||
# Used to track token headers upon authentication (if successful)
|
||||
# This is only used if not on webhook mode
|
||||
self.headers = {}
|
||||
|
||||
# Authentication mode
|
||||
self.mode = None \
|
||||
if not isinstance(mode, six.string_types) \
|
||||
else mode.lower()
|
||||
|
||||
if self.mode and self.mode not in ROCKETCHAT_AUTH_MODES:
|
||||
msg = 'The authentication mode specified ({}) is invalid.'.format(
|
||||
mode)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Detect our mode if it wasn't specified
|
||||
if not self.mode:
|
||||
if self.webhook is not None:
|
||||
# Just a username was specified, we treat this as a webhook
|
||||
self.mode = RocketChatAuthMode.WEBHOOK
|
||||
else:
|
||||
self.mode = RocketChatAuthMode.BASIC
|
||||
|
||||
if self.mode == RocketChatAuthMode.BASIC \
|
||||
and not (self.user and self.password):
|
||||
# Username & Password is required for Rocket Chat to work
|
||||
msg = 'No Rocket.Chat user/pass combo was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
elif self.mode == RocketChatAuthMode.WEBHOOK and not self.webhook:
|
||||
msg = 'No Rocket.Chat Incoming Webhook was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Validate recipients and drop bad ones:
|
||||
for recipient in recipients:
|
||||
for recipient in parse_list(targets):
|
||||
result = IS_CHANNEL.match(recipient)
|
||||
if result:
|
||||
# store valid device
|
||||
|
@ -113,121 +260,248 @@ class NotifyRocketChat(NotifyBase):
|
|||
self.rooms.append(result.group('name'))
|
||||
continue
|
||||
|
||||
result = IS_USER.match(recipient)
|
||||
if result:
|
||||
# store valid room
|
||||
self.users.append(result.group('name'))
|
||||
continue
|
||||
|
||||
self.logger.warning(
|
||||
'Dropped invalid channel/room ' +
|
||||
'(%s) specified.' % recipient,
|
||||
'Dropped invalid channel/room/user '
|
||||
'({}) specified.'.format(recipient),
|
||||
)
|
||||
|
||||
if len(self.rooms) == 0 and len(self.channels) == 0:
|
||||
raise TypeError(
|
||||
'No Rocket.Chat room and/or channels specified to notify.'
|
||||
if self.mode == RocketChatAuthMode.BASIC and \
|
||||
len(self.rooms) == 0 and len(self.channels) == 0:
|
||||
msg = 'No Rocket.Chat room and/or channels specified to notify.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
return
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
'avatar': 'yes' if self.avatar else 'no',
|
||||
'mode': self.mode,
|
||||
}
|
||||
|
||||
# Determine Authentication
|
||||
if self.mode == RocketChatAuthMode.BASIC:
|
||||
auth = '{user}:{password}@'.format(
|
||||
user=NotifyRocketChat.quote(self.user, safe=''),
|
||||
password=NotifyRocketChat.quote(self.password, safe=''),
|
||||
)
|
||||
else:
|
||||
auth = '{user}{webhook}@'.format(
|
||||
user='{}:'.format(NotifyRocketChat.quote(self.user, safe=''))
|
||||
if self.user else '',
|
||||
webhook=NotifyRocketChat.quote(self.webhook, safe=''),
|
||||
)
|
||||
|
||||
# Used to track token headers upon authentication (if successful)
|
||||
self.headers = {}
|
||||
default_port = 443 if self.secure else 80
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
return '{schema}://{auth}{hostname}{port}/{targets}/?{args}'.format(
|
||||
schema=self.secure_protocol if self.secure else self.protocol,
|
||||
auth=auth,
|
||||
hostname=NotifyRocketChat.quote(self.host, safe=''),
|
||||
port='' if self.port is None or self.port == default_port
|
||||
else ':{}'.format(self.port),
|
||||
targets='/'.join(
|
||||
[NotifyRocketChat.quote(x, safe='') for x in chain(
|
||||
# Channels are prefixed with a pound/hashtag symbol
|
||||
['#{}'.format(x) for x in self.channels],
|
||||
# Rooms are as is
|
||||
self.rooms,
|
||||
# Users
|
||||
['@{}'.format(x) for x in self.users],
|
||||
)]),
|
||||
args=NotifyRocketChat.urlencode(args),
|
||||
)
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
wrapper to send_notification since we can alert more then one channel
|
||||
wrapper to _send since we can alert more then one channel
|
||||
"""
|
||||
|
||||
# Call the _send_ function applicable to whatever mode we're in
|
||||
# - calls _send_webhook_notification if the mode variable is set
|
||||
# - calls _send_basic_notification if the mode variable is not set
|
||||
return getattr(self, '_send_{}_notification'.format(self.mode))(
|
||||
body=body, title=title, notify_type=notify_type, **kwargs)
|
||||
|
||||
def _send_webhook_notification(self, body, title='',
|
||||
notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Sends a webhook notification
|
||||
"""
|
||||
|
||||
# Our payload object
|
||||
payload = self._payload(body, title, notify_type)
|
||||
|
||||
# Assemble our webhook URL
|
||||
path = 'hooks/{}'.format(self.webhook)
|
||||
|
||||
# Build our list of channels/rooms/users (if any identified)
|
||||
targets = ['@{}'.format(u) for u in self.users]
|
||||
targets.extend(['#{}'.format(c) for c in self.channels])
|
||||
targets.extend(['{}'.format(r) for r in self.rooms])
|
||||
|
||||
if len(targets) == 0:
|
||||
# We can take an early exit
|
||||
return self._send(
|
||||
payload, notify_type=notify_type, path=path, **kwargs)
|
||||
|
||||
# Otherwise we want to iterate over each of the targets
|
||||
|
||||
# Initiaize our error tracking
|
||||
has_error = False
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
while len(targets):
|
||||
# Retrieve our target
|
||||
target = targets.pop(0)
|
||||
|
||||
# Assign our channel/room/user
|
||||
payload['channel'] = target
|
||||
|
||||
if not self._send(
|
||||
dumps(payload), notify_type=notify_type, path=path,
|
||||
headers=headers, **kwargs):
|
||||
|
||||
# toggle flag
|
||||
has_error = True
|
||||
|
||||
return not has_error
|
||||
|
||||
def _send_basic_notification(self, body, title='',
|
||||
notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Authenticates with the server using a user/pass combo for
|
||||
notifications.
|
||||
"""
|
||||
# Track whether we authenticated okay
|
||||
|
||||
if not self.login():
|
||||
return False
|
||||
|
||||
# Prepare our message
|
||||
text = '*%s*\r\n%s' % (title.replace('*', '\\*'), body)
|
||||
# prepare JSON Object
|
||||
payload = self._payload(body, title, notify_type)
|
||||
|
||||
# Initiaize our error tracking
|
||||
has_error = False
|
||||
|
||||
# Create a copy of our rooms and channels to notify against
|
||||
# Create a copy of our channels to notify against
|
||||
channels = list(self.channels)
|
||||
rooms = list(self.rooms)
|
||||
|
||||
_payload = payload.copy()
|
||||
while len(channels) > 0:
|
||||
# Get Channel
|
||||
channel = channels.pop(0)
|
||||
_payload['channel'] = channel
|
||||
|
||||
if not self.send_notification(
|
||||
{
|
||||
'text': text,
|
||||
'channel': channel,
|
||||
}, notify_type=notify_type, **kwargs):
|
||||
if not self._send(
|
||||
_payload, notify_type=notify_type, headers=self.headers,
|
||||
**kwargs):
|
||||
|
||||
# toggle flag
|
||||
has_error = True
|
||||
|
||||
if len(channels) + len(rooms) > 0:
|
||||
# Prevent thrashing requests
|
||||
self.throttle()
|
||||
|
||||
# Send all our defined room id's
|
||||
# Create a copy of our room id's to notify against
|
||||
rooms = list(self.rooms)
|
||||
_payload = payload.copy()
|
||||
while len(rooms):
|
||||
# Get Room
|
||||
room = rooms.pop(0)
|
||||
_payload['roomId'] = room
|
||||
|
||||
if not self.send_notification(
|
||||
{
|
||||
'text': text,
|
||||
'roomId': room,
|
||||
}, notify_type=notify_type, **kwargs):
|
||||
if not self._send(
|
||||
payload, notify_type=notify_type, headers=self.headers,
|
||||
**kwargs):
|
||||
|
||||
# toggle flag
|
||||
has_error = True
|
||||
|
||||
if len(rooms) > 0:
|
||||
# Prevent thrashing requests
|
||||
self.throttle()
|
||||
|
||||
# logout
|
||||
self.logout()
|
||||
|
||||
return not has_error
|
||||
|
||||
def send_notification(self, payload, notify_type, **kwargs):
|
||||
def _payload(self, body, title='', notify_type=NotifyType.INFO):
|
||||
"""
|
||||
Prepares a payload object
|
||||
"""
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
"text": body,
|
||||
}
|
||||
|
||||
# apply our images if they're set to be displayed
|
||||
image_url = self.image_url(notify_type)
|
||||
if self.avatar:
|
||||
payload['avatar'] = image_url
|
||||
|
||||
return payload
|
||||
|
||||
def _send(self, payload, notify_type, path='api/v1/chat.postMessage',
|
||||
headers=None, **kwargs):
|
||||
"""
|
||||
Perform Notify Rocket.Chat Notification
|
||||
"""
|
||||
|
||||
api_url = '{}/{}'.format(self.api_url, path)
|
||||
|
||||
self.logger.debug('Rocket.Chat POST URL: %s (cert_verify=%r)' % (
|
||||
self.api_url + 'chat.postMessage', self.verify_certificate,
|
||||
))
|
||||
api_url, self.verify_certificate))
|
||||
self.logger.debug('Rocket.Chat Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
self.api_url + 'chat.postMessage',
|
||||
api_url,
|
||||
data=payload,
|
||||
headers=self.headers,
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Rocket.Chat notification: ' +
|
||||
'%s (error=%s).' % (
|
||||
RC_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyRocketChat.http_response_code_lookup(
|
||||
r.status_code, RC_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Rocket.Chat notification ' +
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Rocket.Chat {}:notification: '
|
||||
'{}{}error={}.'.format(
|
||||
self.mode,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.debug('Rocket.Chat Server Response: %s.' % r.text)
|
||||
self.logger.info('Sent Rocket.Chat notification.')
|
||||
self.logger.info(
|
||||
'Sent Rocket.Chat {}:notification.'.format(self.mode))
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Rocket.Chat ' +
|
||||
'notification.')
|
||||
'A Connection error occured sending Rocket.Chat '
|
||||
'{}:notification.'.format(self.mode))
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
|
@ -240,41 +514,46 @@ class NotifyRocketChat(NotifyBase):
|
|||
login to our server
|
||||
|
||||
"""
|
||||
|
||||
payload = {
|
||||
'username': self.user,
|
||||
'password': self.password,
|
||||
}
|
||||
|
||||
api_url = '{}/{}'.format(self.api_url, 'api/v1/login')
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
self.api_url + 'login',
|
||||
api_url,
|
||||
data=payload,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to authenticate with Rocket.Chat server: ' +
|
||||
'%s (error=%s).' % (
|
||||
RC_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyRocketChat.http_response_code_lookup(
|
||||
r.status_code, RC_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to authenticate with Rocket.Chat server ' +
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to authenticate {} with Rocket.Chat: '
|
||||
'{}{}error={}.'.format(
|
||||
self.user,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.debug('Rocket.Chat authentication successful')
|
||||
response = loads(r.text)
|
||||
response = loads(r.content)
|
||||
if response.get('status') != "success":
|
||||
self.logger.warning(
|
||||
'Could not authenticate with Rocket.Chat server.')
|
||||
'Could not authenticate {} with Rocket.Chat.'.format(
|
||||
self.user))
|
||||
return False
|
||||
|
||||
# Set our headers for further communication
|
||||
|
@ -285,8 +564,8 @@ class NotifyRocketChat(NotifyBase):
|
|||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured authenticating to the ' +
|
||||
'Rocket.Chat server.')
|
||||
'A Connection error occured authenticating {} on '
|
||||
'Rocket.Chat.'.format(self.user))
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return False
|
||||
|
||||
|
@ -296,26 +575,30 @@ class NotifyRocketChat(NotifyBase):
|
|||
"""
|
||||
logout of our server
|
||||
"""
|
||||
|
||||
api_url = '{}/{}'.format(self.api_url, 'api/v1/logout')
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
self.api_url + 'logout',
|
||||
api_url,
|
||||
headers=self.headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to log off Rocket.Chat server: ' +
|
||||
'%s (error=%s).' % (
|
||||
RC_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyRocketChat.http_response_code_lookup(
|
||||
r.status_code, RC_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to log off Rocket.Chat server ' +
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to logoff {} from Rocket.Chat: '
|
||||
'{}{}error={}.'.format(
|
||||
self.user,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -323,11 +606,11 @@ class NotifyRocketChat(NotifyBase):
|
|||
else:
|
||||
self.logger.debug(
|
||||
'Rocket.Chat log off successful; response %s.' % (
|
||||
r.text))
|
||||
r.content))
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured logging off the ' +
|
||||
'A Connection error occured logging off the '
|
||||
'Rocket.Chat server')
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return False
|
||||
|
@ -341,13 +624,69 @@ class NotifyRocketChat(NotifyBase):
|
|||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
# Attempt to detect the webhook (if specified in the URL)
|
||||
# If no webhook is specified, then we just pass along as if nothing
|
||||
# happened. However if we do find a webhook, we want to rebuild our
|
||||
# URL without it since it conflicts with standard URLs. Support
|
||||
# %2F since that is a forward slash escaped
|
||||
|
||||
# rocket://webhook@host
|
||||
# rocket://user:webhook@host
|
||||
match = re.match(
|
||||
r'^\s*(?P<schema>[^:]+://)((?P<user>[^:]+):)?'
|
||||
r'(?P<webhook>[a-z0-9]+(/|%2F)'
|
||||
r'[a-z0-9]+)\@(?P<url>.+)$', url, re.I)
|
||||
|
||||
except TypeError:
|
||||
# Not a string
|
||||
return None
|
||||
|
||||
if match:
|
||||
# Re-assemble our URL without the webhook
|
||||
url = '{schema}{user}{url}'.format(
|
||||
schema=match.group('schema'),
|
||||
user='{}@'.format(match.group('user'))
|
||||
if match.group('user') else '',
|
||||
url=match.group('url'),
|
||||
)
|
||||
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
results['recipients'] = NotifyBase.unquote(results['fullpath'])
|
||||
if match:
|
||||
# store our webhook
|
||||
results['webhook'] = \
|
||||
NotifyRocketChat.unquote(match.group('webhook'))
|
||||
|
||||
# Take on the password too in the event we're in basic mode
|
||||
# We do not unquote() as this is done at a later state
|
||||
results['password'] = match.group('webhook')
|
||||
|
||||
# Apply our targets
|
||||
results['targets'] = NotifyRocketChat.split_path(results['fullpath'])
|
||||
|
||||
# The user may have forced the mode
|
||||
if 'mode' in results['qsd'] and len(results['qsd']['mode']):
|
||||
results['mode'] = \
|
||||
NotifyRocketChat.unquote(results['qsd']['mode'])
|
||||
|
||||
# avatar icon
|
||||
results['avatar'] = \
|
||||
parse_bool(results['qsd'].get('avatar', True))
|
||||
|
||||
# The 'to' makes it easier to use yaml configuration
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['targets'] += \
|
||||
NotifyRocketChat.parse_list(results['qsd']['to'])
|
||||
|
||||
# The 'webhook' over-ride (if specified)
|
||||
if 'webhook' in results['qsd'] and len(results['qsd']['webhook']):
|
||||
results['webhook'] = \
|
||||
NotifyRocketChat.unquote(results['qsd']['webhook'])
|
||||
|
||||
return results
|
||||
|
|
377
libs/apprise/plugins/NotifyRyver.py
Normal file
377
libs/apprise/plugins/NotifyRyver.py
Normal file
|
@ -0,0 +1,377 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# To use this plugin, you need to first generate a webhook.
|
||||
|
||||
# When you're complete, you will recieve a URL that looks something like this:
|
||||
# https://apprise.ryver.com/application/webhook/ckhrjW8w672m6HG
|
||||
# ^ ^
|
||||
# | |
|
||||
# These are important <---^----------------------------------------^
|
||||
#
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_bool
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# Token required as part of the API request
|
||||
VALIDATE_TOKEN = re.compile(r'[A-Z0-9]{15}', re.I)
|
||||
|
||||
# Organization required as part of the API request
|
||||
VALIDATE_ORG = re.compile(r'[A-Z0-9_-]{3,32}', re.I)
|
||||
|
||||
|
||||
class RyverWebhookMode(object):
|
||||
"""
|
||||
Ryver supports to webhook modes
|
||||
"""
|
||||
SLACK = 'slack'
|
||||
RYVER = 'ryver'
|
||||
|
||||
|
||||
# Define the types in a list for validation purposes
|
||||
RYVER_WEBHOOK_MODES = (
|
||||
RyverWebhookMode.SLACK,
|
||||
RyverWebhookMode.RYVER,
|
||||
)
|
||||
|
||||
|
||||
class NotifyRyver(NotifyBase):
|
||||
"""
|
||||
A wrapper for Ryver Notifications
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Ryver'
|
||||
|
||||
# The services URL
|
||||
service_url = 'https://ryver.com/'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'ryver'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_ryver'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_72
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 1000
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{organization}/{token}',
|
||||
'{schema}://{user}@{organization}/{token}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'organization': {
|
||||
'name': _('Organization'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'token': {
|
||||
'name': _('Token'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
'private': True,
|
||||
},
|
||||
'user': {
|
||||
'name': _('Bot Name'),
|
||||
'type': 'string',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'mode': {
|
||||
'name': _('Webhook Mode'),
|
||||
'type': 'choice:string',
|
||||
'values': RYVER_WEBHOOK_MODES,
|
||||
'default': RyverWebhookMode.RYVER,
|
||||
},
|
||||
'image': {
|
||||
'name': _('Include Image'),
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'map_to': 'include_image',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, organization, token, mode=RyverWebhookMode.RYVER,
|
||||
include_image=True, **kwargs):
|
||||
"""
|
||||
Initialize Ryver Object
|
||||
"""
|
||||
super(NotifyRyver, self).__init__(**kwargs)
|
||||
|
||||
if not token:
|
||||
msg = 'No Ryver token was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not organization:
|
||||
msg = 'No Ryver organization was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not VALIDATE_TOKEN.match(token.strip()):
|
||||
msg = 'The Ryver token specified ({}) is invalid.'\
|
||||
.format(token)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not VALIDATE_ORG.match(organization.strip()):
|
||||
msg = 'The Ryver organization specified ({}) is invalid.'\
|
||||
.format(organization)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Store our webhook mode
|
||||
self.mode = None \
|
||||
if not isinstance(mode, six.string_types) else mode.lower()
|
||||
|
||||
if self.mode not in RYVER_WEBHOOK_MODES:
|
||||
msg = 'The Ryver webhook mode specified ({}) is invalid.' \
|
||||
.format(mode)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# The organization associated with the account
|
||||
self.organization = organization.strip()
|
||||
|
||||
# The token associated with the account
|
||||
self.token = token.strip()
|
||||
|
||||
# Place an image inline with the message body
|
||||
self.include_image = include_image
|
||||
|
||||
# Slack formatting requirements are defined here which Ryver supports:
|
||||
# https://api.slack.com/docs/message-formatting
|
||||
self._re_formatting_map = {
|
||||
# New lines must become the string version
|
||||
r'\r\*\n': '\\n',
|
||||
# Escape other special characters
|
||||
r'&': '&',
|
||||
r'<': '<',
|
||||
r'>': '>',
|
||||
}
|
||||
|
||||
# Iterate over above list and store content accordingly
|
||||
self._re_formatting_rules = re.compile(
|
||||
r'(' + '|'.join(self._re_formatting_map.keys()) + r')',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Ryver Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
if self.mode == RyverWebhookMode.SLACK:
|
||||
# Perform Slack formatting
|
||||
title = self._re_formatting_rules.sub( # pragma: no branch
|
||||
lambda x: self._re_formatting_map[x.group()], title,
|
||||
)
|
||||
body = self._re_formatting_rules.sub( # pragma: no branch
|
||||
lambda x: self._re_formatting_map[x.group()], body,
|
||||
)
|
||||
|
||||
url = 'https://{}.ryver.com/application/webhook/{}'.format(
|
||||
self.organization,
|
||||
self.token,
|
||||
)
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'body': body if not title else '**{}**\r\n{}'.format(title, body),
|
||||
'createSource': {
|
||||
'displayName': self.user,
|
||||
'avatar': None,
|
||||
},
|
||||
}
|
||||
|
||||
# Acquire our image url if configured to do so
|
||||
image_url = None if not self.include_image else \
|
||||
self.image_url(notify_type)
|
||||
|
||||
if image_url:
|
||||
payload['createSource']['avatar'] = image_url
|
||||
|
||||
self.logger.debug('Ryver POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Ryver Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
self.logger.warning(
|
||||
'Failed to send Ryver notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Ryver notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Ryver:%s ' % (
|
||||
self.organization) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'image': 'yes' if self.include_image else 'no',
|
||||
'mode': self.mode,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
# Determine if there is a botname present
|
||||
botname = ''
|
||||
if self.user:
|
||||
botname = '{botname}@'.format(
|
||||
botname=NotifyRyver.quote(self.user, safe=''),
|
||||
)
|
||||
|
||||
return '{schema}://{botname}{organization}/{token}/?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
botname=botname,
|
||||
organization=NotifyRyver.quote(self.organization, safe=''),
|
||||
token=NotifyRyver.quote(self.token, safe=''),
|
||||
args=NotifyRyver.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# The first token is stored in the hostname
|
||||
results['organization'] = NotifyRyver.unquote(results['host'])
|
||||
|
||||
# Now fetch the remaining tokens
|
||||
try:
|
||||
results['token'] = \
|
||||
NotifyRyver.split_path(results['fullpath'])[0]
|
||||
|
||||
except IndexError:
|
||||
# no token
|
||||
results['token'] = None
|
||||
|
||||
if 'webhook' in results['qsd']:
|
||||
# Deprication Notice issued for v0.7.5
|
||||
NotifyRyver.logger.deprecate(
|
||||
'The Ryver URL contains the parameter '
|
||||
'"webhook=" which will be deprecated in an upcoming '
|
||||
'release. Please use "mode=" instead.'
|
||||
)
|
||||
|
||||
# use mode= for consistency with the other plugins but we also
|
||||
# support webhook= for backwards compatibility.
|
||||
results['mode'] = results['qsd'].get(
|
||||
'mode', results['qsd'].get(
|
||||
'webhook', RyverWebhookMode.RYVER))
|
||||
|
||||
# use image= for consistency with the other plugins
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get('image', True))
|
||||
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def parse_native_url(url):
|
||||
"""
|
||||
Support https://RYVER_ORG.ryver.com/application/webhook/TOKEN
|
||||
"""
|
||||
|
||||
result = re.match(
|
||||
r'^https?://(?P<org>[A-Z0-9_-]+)\.ryver\.com/application/webhook/'
|
||||
r'(?P<webhook_token>[A-Z0-9]+)/?'
|
||||
r'(?P<args>\?[.+])?$', url, re.I)
|
||||
|
||||
if result:
|
||||
return NotifyRyver.parse_url(
|
||||
'{schema}://{org}/{webhook_token}/{args}'.format(
|
||||
schema=NotifyRyver.secure_protocol,
|
||||
org=result.group('org'),
|
||||
webhook_token=result.group('webhook_token'),
|
||||
args='' if not result.group('args')
|
||||
else result.group('args')))
|
||||
|
||||
return None
|
674
libs/apprise/plugins/NotifySNS.py
Normal file
674
libs/apprise/plugins/NotifySNS.py
Normal file
|
@ -0,0 +1,674 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import hmac
|
||||
import requests
|
||||
from hashlib import sha256
|
||||
from datetime import datetime
|
||||
from collections import OrderedDict
|
||||
from xml.etree import ElementTree
|
||||
from itertools import chain
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_list
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# Some Phone Number Detection
|
||||
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
|
||||
|
||||
# Topic Detection
|
||||
# Summary: 256 Characters max, only alpha/numeric plus underscore (_) and
|
||||
# dash (-) additionally allowed.
|
||||
#
|
||||
# Soure: https://docs.aws.amazon.com/AWSSimpleQueueService/latest\
|
||||
# /SQSDeveloperGuide/sqs-limits.html#limits-queues
|
||||
#
|
||||
# Allow a starting hashtag (#) specification to help eliminate possible
|
||||
# ambiguity between a topic that is comprised of all digits and a phone number
|
||||
IS_TOPIC = re.compile(r'^#?(?P<name>[A-Za-z0-9_-]+)\s*$')
|
||||
|
||||
# Because our AWS Access Key Secret contains slashes, we actually use the
|
||||
# region as a delimiter. This is a bit hacky; but it's much easier than having
|
||||
# users of this product search though this Access Key Secret and escape all
|
||||
# of the forward slashes!
|
||||
IS_REGION = re.compile(
|
||||
r'^\s*(?P<country>[a-z]{2})-(?P<area>[a-z]+)-(?P<no>[0-9]+)\s*$', re.I)
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
AWS_HTTP_ERROR_MAP = {
|
||||
403: 'Unauthorized - Invalid Access/Secret Key Combination.',
|
||||
}
|
||||
|
||||
|
||||
class NotifySNS(NotifyBase):
|
||||
"""
|
||||
A wrapper for AWS SNS (Amazon Simple Notification)
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'AWS Simple Notification Service (SNS)'
|
||||
|
||||
# The services URL
|
||||
service_url = 'https://aws.amazon.com/sns/'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'sns'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_sns'
|
||||
|
||||
# AWS is pretty good for handling data load so request limits
|
||||
# can occur in much shorter bursts
|
||||
request_rate_per_sec = 2.5
|
||||
|
||||
# The maximum length of the body
|
||||
# Source: https://docs.aws.amazon.com/sns/latest/api/API_Publish.html
|
||||
body_maxlen = 140
|
||||
|
||||
# A title can not be used for SMS Messages. Setting this to zero will
|
||||
# cause any title (if defined) to get placed into the message body.
|
||||
title_maxlen = 0
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{access_key_id}/{secret_access_key}{region}/{targets}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'access_key_id': {
|
||||
'name': _('Access Key ID'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'secret_access_key': {
|
||||
'name': _('Secret Access Key'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'region': {
|
||||
'name': _('Region'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
'regex': (r'[a-z]{2}-[a-z]+-[0-9]+', 'i'),
|
||||
'map_to': 'region_name',
|
||||
},
|
||||
'target_phone_no': {
|
||||
'name': _('Target Phone No'),
|
||||
'type': 'string',
|
||||
'map_to': 'targets',
|
||||
'regex': (r'[0-9\s)(+-]+', 'i')
|
||||
},
|
||||
'target_topic': {
|
||||
'name': _('Target Topic'),
|
||||
'type': 'string',
|
||||
'map_to': 'targets',
|
||||
'prefix': '#',
|
||||
'regex': (r'[A-Za-z0-9_-]+', 'i'),
|
||||
},
|
||||
'targets': {
|
||||
'name': _('Targets'),
|
||||
'type': 'list:string',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'to': {
|
||||
'alias_of': 'targets',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, access_key_id, secret_access_key, region_name,
|
||||
targets=None, **kwargs):
|
||||
"""
|
||||
Initialize Notify AWS SNS Object
|
||||
"""
|
||||
super(NotifySNS, self).__init__(**kwargs)
|
||||
|
||||
if not access_key_id:
|
||||
msg = 'An invalid AWS Access Key ID was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not secret_access_key:
|
||||
msg = 'An invalid AWS Secret Access Key was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not (region_name and IS_REGION.match(region_name)):
|
||||
msg = 'An invalid AWS Region was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Initialize topic list
|
||||
self.topics = list()
|
||||
|
||||
# Initialize numbers list
|
||||
self.phone = list()
|
||||
|
||||
# Store our AWS API Key
|
||||
self.aws_access_key_id = access_key_id
|
||||
|
||||
# Store our AWS API Secret Access key
|
||||
self.aws_secret_access_key = secret_access_key
|
||||
|
||||
# Acquire our AWS Region Name:
|
||||
# eg. us-east-1, cn-north-1, us-west-2, ...
|
||||
self.aws_region_name = region_name
|
||||
|
||||
# Set our notify_url based on our region
|
||||
self.notify_url = 'https://sns.{}.amazonaws.com/'\
|
||||
.format(self.aws_region_name)
|
||||
|
||||
# AWS Service Details
|
||||
self.aws_service_name = 'sns'
|
||||
self.aws_canonical_uri = '/'
|
||||
|
||||
# AWS Authentication Details
|
||||
self.aws_auth_version = 'AWS4'
|
||||
self.aws_auth_algorithm = 'AWS4-HMAC-SHA256'
|
||||
self.aws_auth_request = 'aws4_request'
|
||||
|
||||
# Get our targets
|
||||
targets = parse_list(targets)
|
||||
|
||||
# Validate targets and drop bad ones:
|
||||
for target in targets:
|
||||
result = IS_PHONE_NO.match(target)
|
||||
if result:
|
||||
# Further check our phone # for it's digit count
|
||||
# if it's less than 10, then we can assume it's
|
||||
# a poorly specified phone no and spit a warning
|
||||
result = ''.join(re.findall(r'\d+', result.group('phone')))
|
||||
if len(result) < 11 or len(result) > 14:
|
||||
self.logger.warning(
|
||||
'Dropped invalid phone # '
|
||||
'(%s) specified.' % target,
|
||||
)
|
||||
continue
|
||||
|
||||
# store valid phone number
|
||||
self.phone.append('+{}'.format(result))
|
||||
continue
|
||||
|
||||
result = IS_TOPIC.match(target)
|
||||
if result:
|
||||
# store valid topic
|
||||
self.topics.append(result.group('name'))
|
||||
continue
|
||||
|
||||
self.logger.warning(
|
||||
'Dropped invalid phone/topic '
|
||||
'(%s) specified.' % target,
|
||||
)
|
||||
|
||||
if len(self.phone) == 0 and len(self.topics) == 0:
|
||||
self.logger.warning(
|
||||
'There are no valid target(s) identified to notify.')
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
wrapper to send_notification since we can alert more then one channel
|
||||
"""
|
||||
|
||||
# Initiaize our error tracking
|
||||
error_count = 0
|
||||
|
||||
# Create a copy of our phone #'s to notify against
|
||||
phone = list(self.phone)
|
||||
topics = list(self.topics)
|
||||
|
||||
while len(phone) > 0:
|
||||
|
||||
# Get Phone No
|
||||
no = phone.pop(0)
|
||||
|
||||
# Prepare SNS Message Payload
|
||||
payload = {
|
||||
'Action': u'Publish',
|
||||
'Message': body,
|
||||
'Version': u'2010-03-31',
|
||||
'PhoneNumber': no,
|
||||
}
|
||||
|
||||
(result, _) = self._post(payload=payload, to=no)
|
||||
if not result:
|
||||
error_count += 1
|
||||
|
||||
# Send all our defined topic id's
|
||||
while len(topics):
|
||||
|
||||
# Get Topic
|
||||
topic = topics.pop(0)
|
||||
|
||||
# First ensure our topic exists, if it doesn't, it gets created
|
||||
payload = {
|
||||
'Action': u'CreateTopic',
|
||||
'Version': u'2010-03-31',
|
||||
'Name': topic,
|
||||
}
|
||||
|
||||
(result, response) = self._post(payload=payload, to=topic)
|
||||
if not result:
|
||||
error_count += 1
|
||||
continue
|
||||
|
||||
# Get the Amazon Resource Name
|
||||
topic_arn = response.get('topic_arn')
|
||||
if not topic_arn:
|
||||
# Could not acquire our topic; we're done
|
||||
error_count += 1
|
||||
continue
|
||||
|
||||
# Build our payload now that we know our topic_arn
|
||||
payload = {
|
||||
'Action': u'Publish',
|
||||
'Version': u'2010-03-31',
|
||||
'TopicArn': topic_arn,
|
||||
'Message': body,
|
||||
}
|
||||
|
||||
# Send our payload to AWS
|
||||
(result, _) = self._post(payload=payload, to=topic)
|
||||
if not result:
|
||||
error_count += 1
|
||||
|
||||
return error_count == 0
|
||||
|
||||
def _post(self, payload, to):
|
||||
"""
|
||||
Wrapper to request.post() to manage it's response better and make
|
||||
the send() function cleaner and easier to maintain.
|
||||
|
||||
This function returns True if the _post was successful and False
|
||||
if it wasn't.
|
||||
"""
|
||||
|
||||
# Always call throttle before any remote server i/o is made; for AWS
|
||||
# time plays a huge factor in the headers being sent with the payload.
|
||||
# So for AWS (SNS) requests we must throttle before they're generated
|
||||
# and not directly before the i/o call like other notification
|
||||
# services do.
|
||||
self.throttle()
|
||||
|
||||
# Convert our payload from a dict() into a urlencoded string
|
||||
payload = NotifySNS.urlencode(payload)
|
||||
|
||||
# Prepare our Notification URL
|
||||
# Prepare our AWS Headers based on our payload
|
||||
headers = self.aws_prepare_request(payload)
|
||||
|
||||
self.logger.debug('AWS POST URL: %s (cert_verify=%r)' % (
|
||||
self.notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('AWS Payload: %s' % str(payload))
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
self.notify_url,
|
||||
data=payload,
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifySNS.http_response_code_lookup(
|
||||
r.status_code, AWS_HTTP_ERROR_MAP)
|
||||
|
||||
self.logger.warning(
|
||||
'Failed to send AWS notification to {}: '
|
||||
'{}{}error={}.'.format(
|
||||
to,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
return (False, NotifySNS.aws_response_to_dict(r.content))
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
'Sent AWS notification to "%s".' % (to))
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending AWS '
|
||||
'notification to "%s".' % (to),
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return (False, NotifySNS.aws_response_to_dict(None))
|
||||
|
||||
return (True, NotifySNS.aws_response_to_dict(r.content))
|
||||
|
||||
def aws_prepare_request(self, payload, reference=None):
|
||||
"""
|
||||
Takes the intended payload and returns the headers for it.
|
||||
|
||||
The payload is presumed to have been already urlencoded()
|
||||
|
||||
"""
|
||||
|
||||
# Define our AWS header
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8',
|
||||
|
||||
# Populated below
|
||||
'Content-Length': 0,
|
||||
'Authorization': None,
|
||||
'X-Amz-Date': None,
|
||||
}
|
||||
|
||||
# Get a reference time (used for header construction)
|
||||
reference = datetime.utcnow()
|
||||
|
||||
# Provide Content-Length
|
||||
headers['Content-Length'] = str(len(payload))
|
||||
|
||||
# Amazon Date Format
|
||||
amzdate = reference.strftime('%Y%m%dT%H%M%SZ')
|
||||
headers['X-Amz-Date'] = amzdate
|
||||
|
||||
# Credential Scope
|
||||
scope = '{date}/{region}/{service}/{request}'.format(
|
||||
date=reference.strftime('%Y%m%d'),
|
||||
region=self.aws_region_name,
|
||||
service=self.aws_service_name,
|
||||
request=self.aws_auth_request,
|
||||
)
|
||||
|
||||
# Similar to headers; but a subset. keys must be lowercase
|
||||
signed_headers = OrderedDict([
|
||||
('content-type', headers['Content-Type']),
|
||||
('host', '{service}.{region}.amazonaws.com'.format(
|
||||
service=self.aws_service_name,
|
||||
region=self.aws_region_name)),
|
||||
('x-amz-date', headers['X-Amz-Date']),
|
||||
])
|
||||
|
||||
#
|
||||
# Build Canonical Request Object
|
||||
#
|
||||
canonical_request = '\n'.join([
|
||||
# Method
|
||||
u'POST',
|
||||
|
||||
# URL
|
||||
self.aws_canonical_uri,
|
||||
|
||||
# Query String (none set for POST)
|
||||
'',
|
||||
|
||||
# Header Content (must include \n at end!)
|
||||
# All entries except characters in amazon date must be
|
||||
# lowercase
|
||||
'\n'.join(['%s:%s' % (k, v)
|
||||
for k, v in signed_headers.items()]) + '\n',
|
||||
|
||||
# Header Entries (in same order identified above)
|
||||
';'.join(signed_headers.keys()),
|
||||
|
||||
# Payload
|
||||
sha256(payload.encode('utf-8')).hexdigest(),
|
||||
])
|
||||
|
||||
# Prepare Unsigned Signature
|
||||
to_sign = '\n'.join([
|
||||
self.aws_auth_algorithm,
|
||||
amzdate,
|
||||
scope,
|
||||
sha256(canonical_request.encode('utf-8')).hexdigest(),
|
||||
])
|
||||
|
||||
# Our Authorization header
|
||||
headers['Authorization'] = ', '.join([
|
||||
'{algorithm} Credential={key}/{scope}'.format(
|
||||
algorithm=self.aws_auth_algorithm,
|
||||
key=self.aws_access_key_id,
|
||||
scope=scope,
|
||||
),
|
||||
'SignedHeaders={signed_headers}'.format(
|
||||
signed_headers=';'.join(signed_headers.keys()),
|
||||
),
|
||||
'Signature={signature}'.format(
|
||||
signature=self.aws_auth_signature(to_sign, reference)
|
||||
),
|
||||
])
|
||||
|
||||
return headers
|
||||
|
||||
def aws_auth_signature(self, to_sign, reference):
|
||||
"""
|
||||
Generates a AWS v4 signature based on provided payload
|
||||
which should be in the form of a string.
|
||||
"""
|
||||
|
||||
def _sign(key, msg, to_hex=False):
|
||||
"""
|
||||
Perform AWS Signing
|
||||
"""
|
||||
if to_hex:
|
||||
return hmac.new(key, msg.encode('utf-8'), sha256).hexdigest()
|
||||
return hmac.new(key, msg.encode('utf-8'), sha256).digest()
|
||||
|
||||
_date = _sign((
|
||||
self.aws_auth_version +
|
||||
self.aws_secret_access_key).encode('utf-8'),
|
||||
reference.strftime('%Y%m%d'))
|
||||
|
||||
_region = _sign(_date, self.aws_region_name)
|
||||
_service = _sign(_region, self.aws_service_name)
|
||||
_signed = _sign(_service, self.aws_auth_request)
|
||||
return _sign(_signed, to_sign, to_hex=True)
|
||||
|
||||
@staticmethod
|
||||
def aws_response_to_dict(aws_response):
|
||||
"""
|
||||
Takes an AWS Response object as input and returns it as a dictionary
|
||||
but not befor extracting out what is useful to us first.
|
||||
|
||||
eg:
|
||||
IN:
|
||||
<CreateTopicResponse
|
||||
xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
|
||||
<CreateTopicResult>
|
||||
<TopicArn>arn:aws:sns:us-east-1:000000000000:abcd</TopicArn>
|
||||
</CreateTopicResult>
|
||||
<ResponseMetadata>
|
||||
<RequestId>604bef0f-369c-50c5-a7a4-bbd474c83d6a</RequestId>
|
||||
</ResponseMetadata>
|
||||
</CreateTopicResponse>
|
||||
|
||||
OUT:
|
||||
{
|
||||
type: 'CreateTopicResponse',
|
||||
request_id: '604bef0f-369c-50c5-a7a4-bbd474c83d6a',
|
||||
topic_arn: 'arn:aws:sns:us-east-1:000000000000:abcd',
|
||||
}
|
||||
"""
|
||||
|
||||
# Define ourselves a set of directives we want to keep if found and
|
||||
# then identify the value we want to map them to in our response
|
||||
# object
|
||||
aws_keep_map = {
|
||||
'RequestId': 'request_id',
|
||||
'TopicArn': 'topic_arn',
|
||||
'MessageId': 'message_id',
|
||||
|
||||
# Error Message Handling
|
||||
'Type': 'error_type',
|
||||
'Code': 'error_code',
|
||||
'Message': 'error_message',
|
||||
}
|
||||
|
||||
# A default response object that we'll manipulate as we pull more data
|
||||
# from our AWS Response object
|
||||
response = {
|
||||
'type': None,
|
||||
'request_id': None,
|
||||
}
|
||||
|
||||
try:
|
||||
# we build our tree, but not before first eliminating any
|
||||
# reference to namespacing (if present) as it makes parsing
|
||||
# the tree so much easier.
|
||||
root = ElementTree.fromstring(
|
||||
re.sub(' xmlns="[^"]+"', '', aws_response, count=1))
|
||||
|
||||
# Store our response tag object name
|
||||
response['type'] = str(root.tag)
|
||||
|
||||
def _xml_iter(root, response):
|
||||
if len(root) > 0:
|
||||
for child in root:
|
||||
# use recursion to parse everything
|
||||
_xml_iter(child, response)
|
||||
|
||||
elif root.tag in aws_keep_map.keys():
|
||||
response[aws_keep_map[root.tag]] = (root.text).strip()
|
||||
|
||||
# Recursivly iterate over our AWS Response to extract the
|
||||
# fields we're interested in in efforts to populate our response
|
||||
# object.
|
||||
_xml_iter(root, response)
|
||||
|
||||
except (ElementTree.ParseError, TypeError):
|
||||
# bad data just causes us to generate a bad response
|
||||
pass
|
||||
|
||||
return response
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
return '{schema}://{key_id}/{key_secret}/{region}/{targets}/'\
|
||||
'?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
key_id=NotifySNS.quote(self.aws_access_key_id, safe=''),
|
||||
key_secret=NotifySNS.quote(
|
||||
self.aws_secret_access_key, safe=''),
|
||||
region=NotifySNS.quote(self.aws_region_name, safe=''),
|
||||
targets='/'.join(
|
||||
[NotifySNS.quote(x) for x in chain(
|
||||
# Phone # are prefixed with a plus symbol
|
||||
['+{}'.format(x) for x in self.phone],
|
||||
# Topics are prefixed with a pound/hashtag symbol
|
||||
['#{}'.format(x) for x in self.topics],
|
||||
)]),
|
||||
args=NotifySNS.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# The AWS Access Key ID is stored in the hostname
|
||||
access_key_id = NotifySNS.unquote(results['host'])
|
||||
|
||||
# Our AWS Access Key Secret contains slashes in it which unfortunately
|
||||
# means it is of variable length after the hostname. Since we require
|
||||
# that the user provides the region code, we intentionally use this
|
||||
# as our delimiter to detect where our Secret is.
|
||||
secret_access_key = None
|
||||
region_name = None
|
||||
|
||||
# We need to iterate over each entry in the fullpath and find our
|
||||
# region. Once we get there we stop and build our secret from our
|
||||
# accumulated data.
|
||||
secret_access_key_parts = list()
|
||||
|
||||
# Start with a list of entries to work with
|
||||
entries = NotifySNS.split_path(results['fullpath'])
|
||||
|
||||
# Section 1: Get Region and Access Secret
|
||||
index = 0
|
||||
for i, entry in enumerate(entries):
|
||||
|
||||
# Are we at the region yet?
|
||||
result = IS_REGION.match(entry)
|
||||
if result:
|
||||
# We found our Region; Rebuild our access key secret based on
|
||||
# all entries we found prior to this:
|
||||
secret_access_key = '/'.join(secret_access_key_parts)
|
||||
|
||||
# Ensure region is nicely formatted
|
||||
region_name = "{country}-{area}-{no}".format(
|
||||
country=result.group('country').lower(),
|
||||
area=result.group('area').lower(),
|
||||
no=result.group('no'),
|
||||
)
|
||||
|
||||
# Track our index as we'll use this to grab the remaining
|
||||
# content in the next Section
|
||||
index = i + 1
|
||||
|
||||
# We're done with Section 1
|
||||
break
|
||||
|
||||
# Store our secret parts
|
||||
secret_access_key_parts.append(entry)
|
||||
|
||||
# Section 2: Get our Recipients (basically all remaining entries)
|
||||
results['targets'] = entries[index:]
|
||||
|
||||
# Support the 'to' variable so that we can support rooms this way too
|
||||
# The 'to' makes it easier to use yaml configuration
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['targets'] += \
|
||||
NotifySNS.parse_list(results['qsd']['to'])
|
||||
|
||||
# Store our other detected data (if at all)
|
||||
results['region_name'] = region_name
|
||||
results['access_key_id'] = access_key_id
|
||||
results['secret_access_key'] = secret_access_key
|
||||
|
||||
# Return our result set
|
||||
return results
|
|
@ -1,20 +1,27 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Slack Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# To use this plugin, you need to first access https://api.slack.com
|
||||
# Specifically https://my.slack.com/services/new/incoming-webhook/
|
||||
|
@ -34,9 +41,12 @@ from json import dumps
|
|||
from time import time
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import compat_is_basestring
|
||||
from ..common import NotifyType
|
||||
from ..common import NotifyFormat
|
||||
from ..utils import parse_bool
|
||||
from ..utils import parse_list
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# Token required as part of the API request
|
||||
# /AAAAAAAAA/........./........................
|
||||
|
@ -54,16 +64,15 @@ VALIDATE_TOKEN_C = re.compile(r'[A-Za-z0-9]{24}')
|
|||
SLACK_DEFAULT_USER = 'apprise'
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
SLACK_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
SLACK_HTTP_ERROR_MAP.update({
|
||||
SLACK_HTTP_ERROR_MAP = {
|
||||
401: 'Unauthorized - Invalid Token.',
|
||||
})
|
||||
}
|
||||
|
||||
# Used to break path apart into list of channels
|
||||
CHANNEL_LIST_DELIM = re.compile(r'[ \t\r\n,#\\/]+')
|
||||
|
||||
# Used to detect a channel
|
||||
IS_CHANNEL_RE = re.compile(r'[+#@]?([A-Z0-9_]{1,32})', re.I)
|
||||
IS_VALID_TARGET_RE = re.compile(r'[+#@]?([A-Z0-9_]{1,32})', re.I)
|
||||
|
||||
|
||||
class NotifySlack(NotifyBase):
|
||||
|
@ -92,41 +101,127 @@ class NotifySlack(NotifyBase):
|
|||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 1000
|
||||
|
||||
def __init__(self, token_a, token_b, token_c, channels, **kwargs):
|
||||
# Default Notification Format
|
||||
notify_format = NotifyFormat.MARKDOWN
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{token_a}/{token_b}{token_c}',
|
||||
'{schema}://{botname}@{token_a}/{token_b}{token_c}',
|
||||
'{schema}://{token_a}/{token_b}{token_c}/{targets}',
|
||||
'{schema}://{botname}@{token_a}/{token_b}{token_c}/{targets}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'botname': {
|
||||
'name': _('Bot Name'),
|
||||
'type': 'string',
|
||||
'map_to': 'user',
|
||||
},
|
||||
'token_a': {
|
||||
'name': _('Token A'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
'regex': (r'[A-Z0-9]{9}', 'i'),
|
||||
},
|
||||
'token_b': {
|
||||
'name': _('Token B'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
'regex': (r'[A-Z0-9]{9}', 'i'),
|
||||
},
|
||||
'token_c': {
|
||||
'name': _('Token C'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
'regex': (r'[A-Za-z0-9]{24}', 'i'),
|
||||
},
|
||||
'target_encoded_id': {
|
||||
'name': _('Target Encoded ID'),
|
||||
'type': 'string',
|
||||
'prefix': '+',
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'target_user': {
|
||||
'name': _('Target User'),
|
||||
'type': 'string',
|
||||
'prefix': '@',
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'target_channels': {
|
||||
'name': _('Target Channel'),
|
||||
'type': 'string',
|
||||
'prefix': '#',
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'targets': {
|
||||
'name': _('Targets'),
|
||||
'type': 'list:string',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'image': {
|
||||
'name': _('Include Image'),
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'map_to': 'include_image',
|
||||
},
|
||||
'to': {
|
||||
'alias_of': 'targets',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, token_a, token_b, token_c, targets,
|
||||
include_image=True, **kwargs):
|
||||
"""
|
||||
Initialize Slack Object
|
||||
"""
|
||||
super(NotifySlack, self).__init__(**kwargs)
|
||||
|
||||
if not token_a:
|
||||
msg = 'The first API token is not specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not token_b:
|
||||
msg = 'The second API token is not specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not token_c:
|
||||
msg = 'The third API token is not specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not VALIDATE_TOKEN_A.match(token_a.strip()):
|
||||
self.logger.warning(
|
||||
'The first API Token specified (%s) is invalid.' % token_a,
|
||||
)
|
||||
raise TypeError(
|
||||
'The first API Token specified (%s) is invalid.' % token_a,
|
||||
)
|
||||
msg = 'The first API token specified ({}) is invalid.'\
|
||||
.format(token_a)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# The token associated with the account
|
||||
self.token_a = token_a.strip()
|
||||
|
||||
if not VALIDATE_TOKEN_B.match(token_b.strip()):
|
||||
self.logger.warning(
|
||||
'The second API Token specified (%s) is invalid.' % token_b,
|
||||
)
|
||||
raise TypeError(
|
||||
'The second API Token specified (%s) is invalid.' % token_b,
|
||||
)
|
||||
msg = 'The second API token specified ({}) is invalid.'\
|
||||
.format(token_b)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# The token associated with the account
|
||||
self.token_b = token_b.strip()
|
||||
|
||||
if not VALIDATE_TOKEN_C.match(token_c.strip()):
|
||||
self.logger.warning(
|
||||
'The third API Token specified (%s) is invalid.' % token_c,
|
||||
)
|
||||
raise TypeError(
|
||||
'The third API Token specified (%s) is invalid.' % token_c,
|
||||
)
|
||||
msg = 'The third API token specified ({}) is invalid.'\
|
||||
.format(token_c)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# The token associated with the account
|
||||
self.token_c = token_c.strip()
|
||||
|
@ -134,22 +229,14 @@ class NotifySlack(NotifyBase):
|
|||
if not self.user:
|
||||
self.logger.warning(
|
||||
'No user was specified; using %s.' % SLACK_DEFAULT_USER)
|
||||
self.user = SLACK_DEFAULT_USER
|
||||
|
||||
if compat_is_basestring(channels):
|
||||
self.channels = [x for x in filter(bool, CHANNEL_LIST_DELIM.split(
|
||||
channels,
|
||||
))]
|
||||
|
||||
elif isinstance(channels, (set, tuple, list)):
|
||||
self.channels = channels
|
||||
|
||||
else:
|
||||
self.channels = list()
|
||||
|
||||
# Build list of channels
|
||||
self.channels = parse_list(targets)
|
||||
if len(self.channels) == 0:
|
||||
self.logger.warning('No channel(s) were specified.')
|
||||
raise TypeError('No channel(s) were specified.')
|
||||
# No problem; the webhook is smart enough to just notify the
|
||||
# channel it was created for; adding 'None' is just used as
|
||||
# a flag lower to not set the channels
|
||||
self.channels.append(None)
|
||||
|
||||
# Formatting requirements are defined here:
|
||||
# https://api.slack.com/docs/message-formatting
|
||||
|
@ -168,7 +255,10 @@ class NotifySlack(NotifyBase):
|
|||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
# Place a thumbnail image inline with the message body
|
||||
self.include_image = include_image
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Slack Notification
|
||||
"""
|
||||
|
@ -179,7 +269,7 @@ class NotifySlack(NotifyBase):
|
|||
}
|
||||
|
||||
# error tracking (used for function return)
|
||||
notify_okay = True
|
||||
has_error = False
|
||||
|
||||
# Perform Formatting
|
||||
title = self._re_formatting_rules.sub( # pragma: no branch
|
||||
|
@ -195,47 +285,52 @@ class NotifySlack(NotifyBase):
|
|||
self.token_c,
|
||||
)
|
||||
|
||||
image_url = self.image_url(notify_type)
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'username': self.user if self.user else SLACK_DEFAULT_USER,
|
||||
# Use Markdown language
|
||||
'mrkdwn': (self.notify_format == NotifyFormat.MARKDOWN),
|
||||
'attachments': [{
|
||||
'title': title,
|
||||
'text': body,
|
||||
'color': self.color(notify_type),
|
||||
# Time
|
||||
'ts': time(),
|
||||
'footer': self.app_id,
|
||||
}],
|
||||
}
|
||||
|
||||
# Create a copy of the channel list
|
||||
channels = list(self.channels)
|
||||
while len(channels):
|
||||
channel = channels.pop(0)
|
||||
if not IS_CHANNEL_RE.match(channel):
|
||||
self.logger.warning(
|
||||
"The specified channel '%s' is invalid; skipping." % (
|
||||
channel,
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
if len(channel) > 1 and channel[0] == '+':
|
||||
# Treat as encoded id if prefixed with a +
|
||||
_channel = channel[1:]
|
||||
if channel is not None:
|
||||
# Channel over-ride was specified
|
||||
if not IS_VALID_TARGET_RE.match(channel):
|
||||
self.logger.warning(
|
||||
"The specified target {} is invalid;"
|
||||
"skipping.".format(channel))
|
||||
|
||||
elif len(channel) > 1 and channel[0] == '@':
|
||||
# Treat @ value 'as is'
|
||||
_channel = channel
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
else:
|
||||
# Prefix with channel hash tag
|
||||
_channel = '#%s' % channel
|
||||
if len(channel) > 1 and channel[0] == '+':
|
||||
# Treat as encoded id if prefixed with a +
|
||||
payload['channel'] = channel[1:]
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'channel': _channel,
|
||||
'username': self.user,
|
||||
# Use Markdown language
|
||||
'mrkdwn': True,
|
||||
'attachments': [{
|
||||
'title': title,
|
||||
'text': body,
|
||||
'color': self.color(notify_type),
|
||||
# Time
|
||||
'ts': time(),
|
||||
'footer': self.app_id,
|
||||
}],
|
||||
}
|
||||
elif len(channel) > 1 and channel[0] == '@':
|
||||
# Treat @ value 'as is'
|
||||
payload['channel'] = channel
|
||||
|
||||
else:
|
||||
# Prefix with channel hash tag
|
||||
payload['channel'] = '#%s' % channel
|
||||
|
||||
# Acquire our to-be footer icon if configured to do so
|
||||
image_url = None if not self.include_image \
|
||||
else self.image_url(notify_type)
|
||||
|
||||
if image_url:
|
||||
payload['attachments'][0]['footer_icon'] = image_url
|
||||
|
@ -244,6 +339,9 @@ class NotifySlack(NotifyBase):
|
|||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Slack Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
|
@ -253,42 +351,77 @@ class NotifySlack(NotifyBase):
|
|||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Slack:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
channel,
|
||||
SLACK_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifySlack.http_response_code_lookup(
|
||||
r.status_code, SLACK_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Slack:%s '
|
||||
'notification (error=%s).' % (
|
||||
channel,
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Slack notification{}: '
|
||||
'{}{}error={}.'.format(
|
||||
' to {}'.format(channel)
|
||||
if channel is not None else '',
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
notify_okay = False
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Slack notification.')
|
||||
self.logger.info(
|
||||
'Sent Slack notification{}.'.format(
|
||||
' to {}'.format(channel)
|
||||
if channel is not None else ''))
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Slack:%s ' % (
|
||||
channel) + 'notification.'
|
||||
)
|
||||
'A Connection error occured sending Slack '
|
||||
'notification{}.'.format(
|
||||
' to {}'.format(channel)
|
||||
if channel is not None else ''))
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
notify_okay = False
|
||||
|
||||
if len(channels):
|
||||
# Prevent thrashing requests
|
||||
self.throttle()
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
return notify_okay
|
||||
return not has_error
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'image': 'yes' if self.include_image else 'no',
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
# Determine if there is a botname present
|
||||
botname = ''
|
||||
if self.user:
|
||||
botname = '{botname}@'.format(
|
||||
botname=NotifySlack.quote(self.user, safe=''),
|
||||
)
|
||||
|
||||
return '{schema}://{botname}{token_a}/{token_b}/{token_c}/{targets}/'\
|
||||
'?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
botname=botname,
|
||||
token_a=NotifySlack.quote(self.token_a, safe=''),
|
||||
token_b=NotifySlack.quote(self.token_b, safe=''),
|
||||
token_c=NotifySlack.quote(self.token_c, safe=''),
|
||||
targets='/'.join(
|
||||
[NotifySlack.quote(x, safe='') for x in self.channels]),
|
||||
args=NotifySlack.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
|
@ -303,26 +436,64 @@ class NotifySlack(NotifyBase):
|
|||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
# Get unquoted entries
|
||||
entries = NotifySlack.split_path(results['fullpath'])
|
||||
|
||||
# The first token is stored in the hostname
|
||||
token_a = results['host']
|
||||
results['token_a'] = NotifySlack.unquote(results['host'])
|
||||
|
||||
# Now fetch the remaining tokens
|
||||
try:
|
||||
token_b, token_c = [x for x in filter(
|
||||
bool, NotifyBase.split_path(results['fullpath']))][0:2]
|
||||
results['token_b'] = entries.pop(0)
|
||||
|
||||
except (ValueError, AttributeError, IndexError):
|
||||
except IndexError:
|
||||
# We're done
|
||||
return None
|
||||
results['token_b'] = None
|
||||
|
||||
channels = [x for x in filter(
|
||||
bool, NotifyBase.split_path(results['fullpath']))][2:]
|
||||
try:
|
||||
results['token_c'] = entries.pop(0)
|
||||
|
||||
results['token_a'] = token_a
|
||||
results['token_b'] = token_b
|
||||
results['token_c'] = token_c
|
||||
results['channels'] = channels
|
||||
except IndexError:
|
||||
# We're done
|
||||
results['token_c'] = None
|
||||
|
||||
# assign remaining entries to the channels we wish to notify
|
||||
results['targets'] = entries
|
||||
|
||||
# Support the 'to' variable so that we can support rooms this way too
|
||||
# The 'to' makes it easier to use yaml configuration
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['targets'] += [x for x in filter(
|
||||
bool, CHANNEL_LIST_DELIM.split(
|
||||
NotifySlack.unquote(results['qsd']['to'])))]
|
||||
|
||||
# Get Image
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get('image', True))
|
||||
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def parse_native_url(url):
|
||||
"""
|
||||
Support https://hooks.slack.com/services/TOKEN_A/TOKEN_B/TOKEN_C
|
||||
"""
|
||||
|
||||
result = re.match(
|
||||
r'^https?://hooks\.slack\.com/services/'
|
||||
r'(?P<token_a>[A-Z0-9]{9})/'
|
||||
r'(?P<token_b>[A-Z0-9]{9})/'
|
||||
r'(?P<token_c>[A-Z0-9]{24})/?'
|
||||
r'(?P<args>\?[.+])?$', url, re.I)
|
||||
|
||||
if result:
|
||||
return NotifySlack.parse_url(
|
||||
'{schema}://{token_a}/{token_b}/{token_c}/{args}'.format(
|
||||
schema=NotifySlack.secure_protocol,
|
||||
token_a=result.group('token_a'),
|
||||
token_b=result.group('token_b'),
|
||||
token_c=result.group('token_c'),
|
||||
args='' if not result.group('args')
|
||||
else result.group('args')))
|
||||
|
||||
return None
|
||||
|
|
|
@ -1,246 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Stride Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
# When you sign-up with stride.com they'll ask if you want to join a channel
|
||||
# or create your own.
|
||||
#
|
||||
# Once you get set up, you'll have the option of creating a channel.
|
||||
#
|
||||
# Now you'll want to connect apprise up. To do this, you need to go to
|
||||
# the App Manager an choose to 'Connect your own app'. It will get you
|
||||
# to provide a 'token name' which can be whatever you want. Call it
|
||||
# 'Apprise' if you want (it really doesn't matter) and then click the
|
||||
# 'Create' button.
|
||||
#
|
||||
# When it completes it will generate a token that looks something like:
|
||||
# HQFtq4pF8rKFOlKTm9Th
|
||||
#
|
||||
# This will become your AUTH_TOKEN
|
||||
#
|
||||
# It will also provide you a conversation URL that might look like:
|
||||
# https://api.atlassian.com/site/ce171c45-79ae-4fec-a73d-5a4b7a322872/\
|
||||
# conversation/a54a80b3-eaad-4564-9a3a-f6653bcfb100/message
|
||||
#
|
||||
# Simplified, it looks like this:
|
||||
# https://api.atlassian.com/site/CLOUD_ID/conversation/CONVO_ID/message
|
||||
#
|
||||
# This plugin will simply work using the url of:
|
||||
# stride://AUTH_TOKEN/CLOUD_ID/CONVO_ID
|
||||
#
|
||||
import requests
|
||||
import re
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
|
||||
# A Simple UUID4 checker
|
||||
IS_VALID_TOKEN = re.compile(
|
||||
r'([0-9a-f]{8})-*([0-9a-f]{4})-*(4[0-9a-f]{3})-*'
|
||||
r'([89ab][0-9a-f]{3})-*([0-9a-f]{12})', re.I)
|
||||
|
||||
|
||||
class NotifyStride(NotifyBase):
|
||||
"""
|
||||
A wrapper to Stride Notifications
|
||||
|
||||
"""
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Stride'
|
||||
|
||||
# The services URL
|
||||
service_url = 'https://www.stride.com/'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'stride'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_stride'
|
||||
|
||||
# Stride Webhook
|
||||
notify_url = 'https://api.atlassian.com/site/{cloud_id}/' \
|
||||
'conversation/{convo_id}/message'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_256
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 2000
|
||||
|
||||
def __init__(self, auth_token, cloud_id, convo_id, **kwargs):
|
||||
"""
|
||||
Initialize Stride Object
|
||||
|
||||
"""
|
||||
super(NotifyStride, self).__init__(**kwargs)
|
||||
|
||||
if not auth_token:
|
||||
raise TypeError(
|
||||
'An invalid Authorization token was specified.'
|
||||
)
|
||||
|
||||
if not cloud_id:
|
||||
raise TypeError('No Cloud ID was specified.')
|
||||
|
||||
cloud_id_re = IS_VALID_TOKEN.match(cloud_id)
|
||||
if cloud_id_re is None:
|
||||
raise TypeError('The specified Cloud ID is not a valid UUID.')
|
||||
|
||||
if not convo_id:
|
||||
raise TypeError('No Conversation ID was specified.')
|
||||
|
||||
convo_id_re = IS_VALID_TOKEN.match(convo_id)
|
||||
if convo_id_re is None:
|
||||
raise TypeError(
|
||||
'The specified Conversation ID is not a valid UUID.')
|
||||
|
||||
# Store our validated token
|
||||
self.cloud_id = '{0}-{1}-{2}-{3}-{4}'.format(
|
||||
cloud_id_re.group(0),
|
||||
cloud_id_re.group(1),
|
||||
cloud_id_re.group(2),
|
||||
cloud_id_re.group(3),
|
||||
cloud_id_re.group(4),
|
||||
)
|
||||
|
||||
# Store our validated token
|
||||
self.convo_id = '{0}-{1}-{2}-{3}-{4}'.format(
|
||||
convo_id_re.group(0),
|
||||
convo_id_re.group(1),
|
||||
convo_id_re.group(2),
|
||||
convo_id_re.group(3),
|
||||
convo_id_re.group(4),
|
||||
)
|
||||
|
||||
self.auth_token = auth_token
|
||||
|
||||
return
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Stride Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Authorization': 'Bearer {auth_token}'.format(
|
||||
auth_token=self.auth_token),
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
# Prepare JSON Object
|
||||
payload = {
|
||||
"body": {
|
||||
"version": 1,
|
||||
"type": "doc",
|
||||
"content": [{
|
||||
"type": "paragraph",
|
||||
"content": [{
|
||||
"type": "text",
|
||||
"text": body,
|
||||
}],
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
||||
# Construct Notify URL
|
||||
notify_url = self.notify_url.format(
|
||||
cloud_id=self.cloud_id,
|
||||
convo_id=self.convo_id,
|
||||
)
|
||||
|
||||
self.logger.debug('Stride POST URL: %s (cert_verify=%r)' % (
|
||||
notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Stride Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
notify_url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code not in (
|
||||
requests.codes.ok, requests.codes.no_content):
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Stride notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Stride notification '
|
||||
'(error=%s).' % r.status_code)
|
||||
|
||||
self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Stride notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Stride '
|
||||
'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
Syntax:
|
||||
stride://auth_token/cloud_id/convo_id
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Store our Authentication Token
|
||||
auth_token = results['host']
|
||||
|
||||
# Now fetch our tokens
|
||||
try:
|
||||
(ta, tb) = [x for x in filter(bool, NotifyBase.split_path(
|
||||
results['fullpath']))][0:2]
|
||||
|
||||
except (ValueError, AttributeError, IndexError):
|
||||
# Force some bad values that will get caught
|
||||
# in parsing later
|
||||
ta = None
|
||||
tb = None
|
||||
|
||||
results['cloud_id'] = ta
|
||||
results['convo_id'] = tb
|
||||
results['auth_token'] = auth_token
|
||||
|
||||
return results
|
|
@ -1,20 +1,27 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Telegram Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# To use this plugin, you need to first access https://api.telegram.org
|
||||
# You need to create a bot and acquire it's Token Identifier (bot_token)
|
||||
|
@ -38,7 +45,7 @@
|
|||
# api key that the BotFather gave you.
|
||||
#
|
||||
# For example, a url might look like this:
|
||||
# https://api.telegram.org/bot123456789:alphanumeri_characters/getMe
|
||||
# https://api.telegram.org/bot123456789:alphanumeric_characters/getMe
|
||||
#
|
||||
# Development API Reference::
|
||||
# - https://core.telegram.org/bots/api
|
||||
|
@ -51,10 +58,12 @@ from json import loads
|
|||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyType
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import compat_is_basestring
|
||||
from ..common import NotifyFormat
|
||||
from ..utils import parse_bool
|
||||
from ..utils import parse_list
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
TELEGRAM_IMAGE_XY = NotifyImageSize.XY_256
|
||||
|
||||
|
@ -73,9 +82,6 @@ IS_CHAT_ID_RE = re.compile(
|
|||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
# Used to break path apart into list of chat identifiers
|
||||
CHAT_ID_LIST_DELIM = re.compile(r'[ \t\r\n,#\\/]+')
|
||||
|
||||
|
||||
class NotifyTelegram(NotifyBase):
|
||||
"""
|
||||
|
@ -102,8 +108,55 @@ class NotifyTelegram(NotifyBase):
|
|||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 4096
|
||||
|
||||
def __init__(self, bot_token, chat_ids, detect_bot_owner=True,
|
||||
include_image=True, **kwargs):
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{bot_token}',
|
||||
'{schema}://{bot_token}/{targets}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'bot_token': {
|
||||
'name': _('Bot Token'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
'regex': (r'(bot)?[0-9]+:[a-z0-9_-]+', 'i'),
|
||||
},
|
||||
'target_user': {
|
||||
'name': _('Target Chat ID'),
|
||||
'type': 'string',
|
||||
'map_to': 'targets',
|
||||
'map_to': 'targets',
|
||||
'regex': (r'((-?[0-9]{1,32})|([a-z_-][a-z0-9_-]+))', 'i'),
|
||||
},
|
||||
'targets': {
|
||||
'name': _('Targets'),
|
||||
'type': 'list:string',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'image': {
|
||||
'name': _('Include Image'),
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'map_to': 'include_image',
|
||||
},
|
||||
'detect': {
|
||||
'name': _('Detect Bot Owner'),
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'map_to': 'detect_owner',
|
||||
},
|
||||
'to': {
|
||||
'alias_of': 'targets',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, bot_token, targets, detect_owner=True,
|
||||
include_image=False, **kwargs):
|
||||
"""
|
||||
Initialize Telegram Object
|
||||
"""
|
||||
|
@ -114,42 +167,38 @@ class NotifyTelegram(NotifyBase):
|
|||
|
||||
except AttributeError:
|
||||
# Token was None
|
||||
self.logger.warning('No Bot Token was specified.')
|
||||
raise TypeError('No Bot Token was specified.')
|
||||
err = 'No Bot Token was specified.'
|
||||
self.logger.warning(err)
|
||||
raise TypeError(err)
|
||||
|
||||
result = VALIDATE_BOT_TOKEN.match(self.bot_token)
|
||||
if not result:
|
||||
raise TypeError(
|
||||
'The Bot Token specified (%s) is invalid.' % bot_token,
|
||||
)
|
||||
err = 'The Bot Token specified (%s) is invalid.' % bot_token
|
||||
self.logger.warning(err)
|
||||
raise TypeError(err)
|
||||
|
||||
# Store our Bot Token
|
||||
self.bot_token = result.group('key')
|
||||
|
||||
if compat_is_basestring(chat_ids):
|
||||
self.chat_ids = [x for x in filter(bool, CHAT_ID_LIST_DELIM.split(
|
||||
chat_ids,
|
||||
))]
|
||||
# Parse our list
|
||||
self.targets = parse_list(targets)
|
||||
|
||||
elif isinstance(chat_ids, (set, tuple, list)):
|
||||
self.chat_ids = list(chat_ids)
|
||||
|
||||
else:
|
||||
self.chat_ids = list()
|
||||
self.detect_owner = detect_owner
|
||||
|
||||
if self.user:
|
||||
# Treat this as a channel too
|
||||
self.chat_ids.append(self.user)
|
||||
self.targets.append(self.user)
|
||||
|
||||
if len(self.chat_ids) == 0 and detect_bot_owner:
|
||||
if len(self.targets) == 0 and self.detect_owner:
|
||||
_id = self.detect_bot_owner()
|
||||
if _id:
|
||||
# Store our id
|
||||
self.chat_ids = [str(_id)]
|
||||
self.targets.append(str(_id))
|
||||
|
||||
if len(self.chat_ids) == 0:
|
||||
self.logger.warning('No chat_id(s) were specified.')
|
||||
raise TypeError('No chat_id(s) were specified.')
|
||||
if len(self.targets) == 0:
|
||||
err = 'No chat_id(s) were specified.'
|
||||
self.logger.warning(err)
|
||||
raise TypeError(err)
|
||||
|
||||
# Track whether or not we want to send an image with our notification
|
||||
# or not.
|
||||
|
@ -169,15 +218,25 @@ class NotifyTelegram(NotifyBase):
|
|||
'sendPhoto'
|
||||
)
|
||||
|
||||
# Acquire our image path if configured to do so; we don't bother
|
||||
# checking to see if selfinclude_image is set here because the
|
||||
# send_image() function itself (this function) checks this flag
|
||||
# already
|
||||
path = self.image_path(notify_type)
|
||||
|
||||
if not path:
|
||||
# No image to send
|
||||
self.logger.debug(
|
||||
'Telegram Image does not exist for %s' % (
|
||||
notify_type))
|
||||
return None
|
||||
'Telegram Image does not exist for %s' % (notify_type))
|
||||
|
||||
files = {'photo': (basename(path), open(path), 'rb')}
|
||||
# No need to fail; we may have been configured this way through
|
||||
# the apprise.AssetObject()
|
||||
return True
|
||||
|
||||
# Configure file payload (for upload)
|
||||
files = {
|
||||
'photo': (basename(path), open(path), 'rb'),
|
||||
}
|
||||
|
||||
payload = {
|
||||
'chat_id': chat_id,
|
||||
|
@ -197,19 +256,18 @@ class NotifyTelegram(NotifyBase):
|
|||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to post Telegram Image: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyTelegram.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to detect Telegram Image. (error=%s).' % (
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Telegram Image: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
return False
|
||||
|
||||
except requests.RequestException as e:
|
||||
|
@ -250,38 +308,36 @@ class NotifyTelegram(NotifyBase):
|
|||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyTelegram.http_response_code_lookup(r.status_code)
|
||||
|
||||
try:
|
||||
# Try to get the error message if we can:
|
||||
error_msg = loads(r.content)['description']
|
||||
|
||||
except:
|
||||
except Exception:
|
||||
error_msg = None
|
||||
|
||||
try:
|
||||
if error_msg:
|
||||
self.logger.warning(
|
||||
'Failed to detect Telegram user: (%s) %s.' % (
|
||||
r.status_code, error_msg))
|
||||
|
||||
else:
|
||||
self.logger.warning(
|
||||
'Failed to detect Telegram user: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
if error_msg:
|
||||
self.logger.warning(
|
||||
'Failed to detect Telegram user. (error=%s).' % (
|
||||
'Failed to detect the Telegram user: (%s) %s.' % (
|
||||
r.status_code, error_msg))
|
||||
|
||||
else:
|
||||
self.logger.warning(
|
||||
'Failed to detect the Telegram user: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
return 0
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A connection error occured detecting Telegram User.')
|
||||
'A connection error occured detecting the Telegram User.')
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return 0
|
||||
|
||||
|
@ -328,7 +384,7 @@ class NotifyTelegram(NotifyBase):
|
|||
|
||||
return 0
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Telegram Notification
|
||||
"""
|
||||
|
@ -349,29 +405,58 @@ class NotifyTelegram(NotifyBase):
|
|||
|
||||
payload = {}
|
||||
|
||||
# HTML Spaces ( ) and tabs ( ) aren't supported
|
||||
# See https://core.telegram.org/bots/api#html-style
|
||||
title = re.sub(' ?', ' ', title, re.I)
|
||||
body = re.sub(' ?', ' ', body, re.I)
|
||||
# Tabs become 3 spaces
|
||||
title = re.sub(' ?', ' ', title, re.I)
|
||||
body = re.sub(' ?', ' ', body, re.I)
|
||||
# Prepare Email Message
|
||||
if self.notify_format == NotifyFormat.MARKDOWN:
|
||||
payload['parse_mode'] = 'MARKDOWN'
|
||||
|
||||
# HTML
|
||||
title = NotifyBase.escape_html(title, whitespace=False)
|
||||
body = NotifyBase.escape_html(body, whitespace=False)
|
||||
else:
|
||||
# Either TEXT or HTML; if TEXT we'll make it HTML
|
||||
payload['parse_mode'] = 'HTML'
|
||||
|
||||
payload['parse_mode'] = 'HTML'
|
||||
# HTML Spaces ( ) and tabs ( ) aren't supported
|
||||
# See https://core.telegram.org/bots/api#html-style
|
||||
body = re.sub(' ?', ' ', body, re.I)
|
||||
|
||||
payload['text'] = '<b>%s</b>\r\n%s' % (
|
||||
title,
|
||||
body,
|
||||
)
|
||||
# Tabs become 3 spaces
|
||||
body = re.sub(' ?', ' ', body, re.I)
|
||||
|
||||
if title:
|
||||
# HTML Spaces ( ) and tabs ( ) aren't supported
|
||||
# See https://core.telegram.org/bots/api#html-style
|
||||
title = re.sub(' ?', ' ', title, re.I)
|
||||
|
||||
# Tabs become 3 spaces
|
||||
title = re.sub(' ?', ' ', title, re.I)
|
||||
|
||||
# HTML
|
||||
title = NotifyTelegram.escape_html(title, whitespace=False)
|
||||
|
||||
# HTML
|
||||
body = NotifyTelegram.escape_html(body, whitespace=False)
|
||||
|
||||
if title and self.notify_format == NotifyFormat.TEXT:
|
||||
# Text HTML Formatting
|
||||
payload['text'] = '<b>%s</b>\r\n%s' % (
|
||||
title,
|
||||
body,
|
||||
)
|
||||
|
||||
elif title:
|
||||
# Already HTML; trust developer has wrapped
|
||||
# the title appropriately
|
||||
payload['text'] = '%s\r\n%s' % (
|
||||
title,
|
||||
body,
|
||||
)
|
||||
|
||||
else:
|
||||
# Assign the body
|
||||
payload['text'] = body
|
||||
|
||||
# Create a copy of the chat_ids list
|
||||
chat_ids = list(self.chat_ids)
|
||||
while len(chat_ids):
|
||||
chat_id = chat_ids.pop(0)
|
||||
targets = list(self.targets)
|
||||
while len(targets):
|
||||
chat_id = targets.pop(0)
|
||||
chat_id = IS_CHAT_ID_RE.match(chat_id)
|
||||
if not chat_id:
|
||||
self.logger.warning(
|
||||
|
@ -379,6 +464,8 @@ class NotifyTelegram(NotifyBase):
|
|||
chat_id,
|
||||
)
|
||||
)
|
||||
|
||||
# Flag our error
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
|
@ -390,14 +477,14 @@ class NotifyTelegram(NotifyBase):
|
|||
# ID
|
||||
payload['chat_id'] = int(chat_id.group('idno'))
|
||||
|
||||
# Always call throttle before any remote server i/o is made;
|
||||
# Telegram throttles to occur before sending the image so that
|
||||
# content can arrive together.
|
||||
self.throttle()
|
||||
|
||||
if self.include_image is True:
|
||||
# Send an image
|
||||
if self.send_image(
|
||||
payload['chat_id'], notify_type) is not None:
|
||||
# We sent a post (whether we were successful or not)
|
||||
# we still hit the remote server... just throttle
|
||||
# before our next hit server query
|
||||
self.throttle()
|
||||
self.send_image(payload['chat_id'], notify_type)
|
||||
|
||||
self.logger.debug('Telegram POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
|
@ -414,40 +501,29 @@ class NotifyTelegram(NotifyBase):
|
|||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyTelegram.http_response_code_lookup(r.status_code)
|
||||
|
||||
try:
|
||||
# Try to get the error message if we can:
|
||||
error_msg = loads(r.content)['description']
|
||||
|
||||
except:
|
||||
except Exception:
|
||||
error_msg = None
|
||||
|
||||
try:
|
||||
if error_msg:
|
||||
self.logger.warning(
|
||||
'Failed to send Telegram:%s '
|
||||
'notification: (%s) %s.' % (
|
||||
payload['chat_id'],
|
||||
r.status_code, error_msg))
|
||||
self.logger.warning(
|
||||
'Failed to send Telegram notification to {}: '
|
||||
'{}, error={}.'.format(
|
||||
payload['chat_id'],
|
||||
error_msg if error_msg else status_str,
|
||||
r.status_code))
|
||||
|
||||
else:
|
||||
self.logger.warning(
|
||||
'Failed to send Telegram:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
payload['chat_id'],
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Telegram:%s '
|
||||
'notification (error=%s).' % (
|
||||
payload['chat_id'], r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Flag our error
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Telegram notification.')
|
||||
|
@ -458,15 +534,36 @@ class NotifyTelegram(NotifyBase):
|
|||
payload['chat_id']) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
has_error = True
|
||||
|
||||
finally:
|
||||
if len(chat_ids):
|
||||
# Prevent thrashing requests
|
||||
self.throttle()
|
||||
# Flag our error
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
return not has_error
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'image': self.include_image,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
'detect': 'yes' if self.detect_owner else 'no',
|
||||
}
|
||||
|
||||
# No need to check the user token because the user automatically gets
|
||||
# appended into the list of chat ids
|
||||
return '{schema}://{bot_token}/{targets}/?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
bot_token=NotifyTelegram.quote(self.bot_token, safe=''),
|
||||
targets='/'.join(
|
||||
[NotifyTelegram.quote('@{}'.format(x)) for x in self.targets]),
|
||||
args=NotifyTelegram.urlencode(args))
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
|
@ -474,21 +571,21 @@ class NotifyTelegram(NotifyBase):
|
|||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
# This is a dirty hack; but it's the only work around to
|
||||
# tgram:// messages since the bot_token has a colon in it.
|
||||
# It invalidates an normal URL.
|
||||
# This is a dirty hack; but it's the only work around to tgram://
|
||||
# messages since the bot_token has a colon in it. It invalidates a
|
||||
# normal URL.
|
||||
|
||||
# This hack searches for this bogus URL and corrects it
|
||||
# so we can properly load it further down. The other
|
||||
# alternative is to ask users to actually change the colon
|
||||
# into a slash (which will work too), but it's more likely
|
||||
# to cause confusion... So this is the next best thing
|
||||
# This hack searches for this bogus URL and corrects it so we can
|
||||
# properly load it further down. The other alternative is to ask users
|
||||
# to actually change the colon into a slash (which will work too), but
|
||||
# it's more likely to cause confusion... So this is the next best thing
|
||||
# we also check for %3A (incase the URL is encoded) as %3A == :
|
||||
try:
|
||||
tgram = re.match(
|
||||
r'(?P<protocol>%s://)(bot)?(?P<prefix>([a-z0-9_-]+)'
|
||||
r'(:[a-z0-9_-]+)?@)?(?P<btoken_a>[0-9]+):+'
|
||||
r'(?P<remaining>.*)$' % NotifyTelegram.secure_protocol,
|
||||
url, re.I)
|
||||
r'(?P<protocol>{schema}://)(bot)?(?P<prefix>([a-z0-9_-]+)'
|
||||
r'(:[a-z0-9_-]+)?@)?(?P<btoken_a>[0-9]+)(:|%3A)+'
|
||||
r'(?P<remaining>.*)$'.format(
|
||||
schema=NotifyTelegram.secure_protocol), url, re.I)
|
||||
|
||||
except (TypeError, AttributeError):
|
||||
# url is bad; force tgram to be None
|
||||
|
@ -500,14 +597,11 @@ class NotifyTelegram(NotifyBase):
|
|||
|
||||
if tgram.group('prefix'):
|
||||
# Try again
|
||||
results = NotifyBase.parse_url(
|
||||
'%s%s%s/%s' % (
|
||||
tgram.group('protocol'),
|
||||
tgram.group('prefix'),
|
||||
tgram.group('btoken_a'),
|
||||
tgram.group('remaining'),
|
||||
),
|
||||
)
|
||||
results = NotifyBase.parse_url('%s%s%s/%s' % (
|
||||
tgram.group('protocol'),
|
||||
tgram.group('prefix'),
|
||||
tgram.group('btoken_a'),
|
||||
tgram.group('remaining')))
|
||||
|
||||
else:
|
||||
# Try again
|
||||
|
@ -520,26 +614,34 @@ class NotifyTelegram(NotifyBase):
|
|||
)
|
||||
|
||||
# The first token is stored in the hostname
|
||||
bot_token_a = results['host']
|
||||
bot_token_a = NotifyTelegram.unquote(results['host'])
|
||||
|
||||
# Get a nice unquoted list of path entries
|
||||
entries = NotifyTelegram.split_path(results['fullpath'])
|
||||
|
||||
# Now fetch the remaining tokens
|
||||
bot_token_b = [x for x in filter(
|
||||
bool, NotifyBase.split_path(results['fullpath']))][0]
|
||||
bot_token_b = entries.pop(0)
|
||||
|
||||
bot_token = '%s:%s' % (bot_token_a, bot_token_b)
|
||||
|
||||
chat_ids = ','.join(
|
||||
[x for x in filter(
|
||||
bool, NotifyBase.split_path(results['fullpath']))][1:])
|
||||
# Store our chat ids (as these are the remaining entries)
|
||||
results['targets'] = entries
|
||||
|
||||
# Support the 'to' variable so that we can support rooms this way too
|
||||
# The 'to' makes it easier to use yaml configuration
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['targets'] += \
|
||||
NotifyTelegram.parse_list(results['qsd']['to'])
|
||||
|
||||
# Store our bot token
|
||||
results['bot_token'] = bot_token
|
||||
|
||||
# Store our chat ids
|
||||
results['chat_ids'] = chat_ids
|
||||
|
||||
# Include images with our message
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get('image', False))
|
||||
|
||||
# Include images with our message
|
||||
results['detect_owner'] = \
|
||||
parse_bool(results['qsd'].get('detect', True))
|
||||
|
||||
return results
|
||||
|
|
|
@ -1,180 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# (Super) Toasty Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
import requests
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
# Used to break apart list of potential devices by their delimiter
|
||||
# into a usable list.
|
||||
DEVICES_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
|
||||
class NotifyToasty(NotifyBase):
|
||||
"""
|
||||
A wrapper for Toasty Notifications
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Toasty'
|
||||
|
||||
# The services URL
|
||||
service_url = 'http://supertoasty.com/'
|
||||
|
||||
# The default protocol
|
||||
protocol = 'toasty'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_toasty'
|
||||
|
||||
# Toasty uses the http protocol with JSON requests
|
||||
notify_url = 'http://api.supertoasty.com/notify/'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_128
|
||||
|
||||
def __init__(self, devices, **kwargs):
|
||||
"""
|
||||
Initialize Toasty Object
|
||||
"""
|
||||
super(NotifyToasty, self).__init__(**kwargs)
|
||||
|
||||
if compat_is_basestring(devices):
|
||||
self.devices = [x for x in filter(bool, DEVICES_LIST_DELIM.split(
|
||||
devices,
|
||||
))]
|
||||
|
||||
elif isinstance(devices, (set, tuple, list)):
|
||||
self.devices = devices
|
||||
|
||||
else:
|
||||
self.devices = list()
|
||||
|
||||
if len(devices) == 0:
|
||||
raise TypeError('You must specify at least 1 device.')
|
||||
|
||||
if not self.user:
|
||||
raise TypeError('You must specify a username.')
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Toasty Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'multipart/form-data',
|
||||
}
|
||||
|
||||
# error tracking (used for function return)
|
||||
has_error = False
|
||||
|
||||
# Create a copy of the devices list
|
||||
devices = list(self.devices)
|
||||
while len(devices):
|
||||
device = devices.pop(0)
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'sender': NotifyBase.quote(self.user),
|
||||
'title': NotifyBase.quote(title),
|
||||
'text': NotifyBase.quote(body),
|
||||
}
|
||||
|
||||
image_url = self.image_url(notify_type)
|
||||
if image_url:
|
||||
payload['image'] = image_url
|
||||
|
||||
# URL to transmit content via
|
||||
url = '%s%s' % (self.notify_url, device)
|
||||
|
||||
self.logger.debug('Toasty POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Toasty Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.get(
|
||||
url,
|
||||
data=payload,
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Toasty:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
device,
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Toasty:%s '
|
||||
'notification (error=%s).' % (
|
||||
device,
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
|
||||
# Return; we're done
|
||||
has_error = True
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
'Sent Toasty notification to %s.' % device)
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Toasty:%s ' % (
|
||||
device) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
has_error = True
|
||||
|
||||
if len(devices):
|
||||
# Prevent thrashing requests
|
||||
self.throttle()
|
||||
|
||||
return not has_error
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
devices = NotifyBase.unquote(results['fullpath'])
|
||||
|
||||
# Store our devices
|
||||
results['devices'] = '%s/%s' % (results['host'], devices)
|
||||
|
||||
return results
|
450
libs/apprise/plugins/NotifyTwilio.py
Normal file
450
libs/apprise/plugins/NotifyTwilio.py
Normal file
|
@ -0,0 +1,450 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# To use this service you will need a Twillio account to which you can get your
|
||||
# AUTH_TOKEN and ACCOUNT SID right from your console/dashboard at:
|
||||
# https://www.twilio.com/console
|
||||
#
|
||||
# You will also need to send the SMS From a phone number or account id name.
|
||||
|
||||
# This is identified as the source (or where the SMS message will originate
|
||||
# from). Activated phone numbers can be found on your dashboard here:
|
||||
# - https://www.twilio.com/console/phone-numbers/incoming
|
||||
#
|
||||
# Alternatively, you can open your wallet and request a different Twilio
|
||||
# phone # from:
|
||||
# https://www.twilio.com/console/phone-numbers/search
|
||||
#
|
||||
# or consider purchasing a short-code from here:
|
||||
# https://www.twilio.com/docs/glossary/what-is-a-short-code
|
||||
#
|
||||
import re
|
||||
import requests
|
||||
from json import loads
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_list
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
||||
# Used to validate your personal access apikey
|
||||
VALIDATE_AUTH_TOKEN = re.compile(r'^[a-f0-9]{32}$', re.I)
|
||||
VALIDATE_ACCOUNT_SID = re.compile(r'^AC[a-f0-9]{32}$', re.I)
|
||||
|
||||
# Some Phone Number Detection
|
||||
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
|
||||
|
||||
|
||||
class NotifyTwilio(NotifyBase):
|
||||
"""
|
||||
A wrapper for Twilio Notifications
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Twilio'
|
||||
|
||||
# The services URL
|
||||
service_url = 'https://www.twilio.com/'
|
||||
|
||||
# All pushover requests are secure
|
||||
secure_protocol = 'twilio'
|
||||
|
||||
# Allow 300 requests per minute.
|
||||
# 60/300 = 0.2
|
||||
request_rate_per_sec = 0.20
|
||||
|
||||
# the number of seconds undelivered messages should linger for
|
||||
# in the Twilio queue
|
||||
validity_period = 14400
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_twilio'
|
||||
|
||||
# Twilio uses the http protocol with JSON requests
|
||||
notify_url = 'https://api.twilio.com/2010-04-01/Accounts/' \
|
||||
'{sid}/Messages.json'
|
||||
|
||||
# The maximum length of the body
|
||||
body_maxlen = 140
|
||||
|
||||
# A title can not be used for SMS Messages. Setting this to zero will
|
||||
# cause any title (if defined) to get placed into the message body.
|
||||
title_maxlen = 0
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{account_sid}:{auth_token}@{from_phone}',
|
||||
'{schema}://{account_sid}:{auth_token}@{from_phone}/{targets}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'account_sid': {
|
||||
'name': _('Account SID'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
'regex': (r'AC[a-f0-9]{32}', 'i'),
|
||||
},
|
||||
'auth_token': {
|
||||
'name': _('Auth Token'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
'regex': (r'[a-f0-9]{32}', 'i'),
|
||||
},
|
||||
'from_phone': {
|
||||
'name': _('From Phone No'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
'regex': (r'\+?[0-9\s)(+-]+', 'i'),
|
||||
'map_to': 'source',
|
||||
},
|
||||
'target_phone': {
|
||||
'name': _('Target Phone No'),
|
||||
'type': 'string',
|
||||
'prefix': '+',
|
||||
'regex': (r'[0-9\s)(+-]+', 'i'),
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'short_code': {
|
||||
'name': _('Target Short Code'),
|
||||
'type': 'string',
|
||||
'regex': (r'[0-9]{5,6}', 'i'),
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'targets': {
|
||||
'name': _('Targets'),
|
||||
'type': 'list:string',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'to': {
|
||||
'alias_of': 'targets',
|
||||
},
|
||||
'from': {
|
||||
'alias_of': 'from_phone',
|
||||
},
|
||||
'sid': {
|
||||
'alias_of': 'account_sid',
|
||||
},
|
||||
'token': {
|
||||
'alias_of': 'auth_token',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, account_sid, auth_token, source, targets=None,
|
||||
**kwargs):
|
||||
"""
|
||||
Initialize Twilio Object
|
||||
"""
|
||||
super(NotifyTwilio, self).__init__(**kwargs)
|
||||
|
||||
try:
|
||||
# The Account SID associated with the account
|
||||
self.account_sid = account_sid.strip()
|
||||
|
||||
except AttributeError:
|
||||
# Token was None
|
||||
msg = 'No Account SID was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not VALIDATE_ACCOUNT_SID.match(self.account_sid):
|
||||
msg = 'The Account SID specified ({}) is invalid.' \
|
||||
.format(account_sid)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
try:
|
||||
# The authentication token associated with the account
|
||||
self.auth_token = auth_token.strip()
|
||||
|
||||
except AttributeError:
|
||||
# Token was None
|
||||
msg = 'No Auth Token was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not VALIDATE_AUTH_TOKEN.match(self.auth_token):
|
||||
msg = 'The Auth Token specified ({}) is invalid.' \
|
||||
.format(auth_token)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# The Source Phone # and/or short-code
|
||||
self.source = source
|
||||
|
||||
if not IS_PHONE_NO.match(self.source):
|
||||
msg = 'The Account (From) Phone # or Short-code specified ' \
|
||||
'({}) is invalid.'.format(source)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Tidy source
|
||||
self.source = re.sub(r'[^\d]+', '', self.source)
|
||||
|
||||
if len(self.source) < 11 or len(self.source) > 14:
|
||||
# https://www.twilio.com/docs/glossary/what-is-a-short-code
|
||||
# A short code is a special 5 or 6 digit telephone number
|
||||
# that's shorter than a full phone number.
|
||||
if len(self.source) not in (5, 6):
|
||||
msg = 'The Account (From) Phone # specified ' \
|
||||
'({}) is invalid.'.format(source)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# else... it as a short code so we're okay
|
||||
|
||||
else:
|
||||
# We're dealing with a phone number; so we need to just
|
||||
# place a plus symbol at the end of it
|
||||
self.source = '+{}'.format(self.source)
|
||||
|
||||
# Parse our targets
|
||||
self.targets = list()
|
||||
|
||||
for target in parse_list(targets):
|
||||
# Validate targets and drop bad ones:
|
||||
result = IS_PHONE_NO.match(target)
|
||||
if result:
|
||||
# Further check our phone # for it's digit count
|
||||
# if it's less than 10, then we can assume it's
|
||||
# a poorly specified phone no and spit a warning
|
||||
result = ''.join(re.findall(r'\d+', result.group('phone')))
|
||||
if len(result) < 11 or len(result) > 14:
|
||||
self.logger.warning(
|
||||
'Dropped invalid phone # '
|
||||
'({}) specified.'.format(target),
|
||||
)
|
||||
continue
|
||||
|
||||
# store valid phone number
|
||||
self.targets.append('+{}'.format(result))
|
||||
continue
|
||||
|
||||
self.logger.warning(
|
||||
'Dropped invalid phone # '
|
||||
'({}) specified.'.format(target),
|
||||
)
|
||||
|
||||
if len(self.targets) == 0:
|
||||
msg = 'There are no valid targets identified to notify.'
|
||||
if len(self.source) in (5, 6):
|
||||
# raise a warning since we're a short-code. We need
|
||||
# a number to message
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Twilio Notification
|
||||
"""
|
||||
|
||||
# error tracking (used for function return)
|
||||
has_error = False
|
||||
|
||||
# Prepare our headers
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Accept': 'application/json',
|
||||
}
|
||||
|
||||
# Prepare our payload
|
||||
payload = {
|
||||
'Body': body,
|
||||
'From': self.source,
|
||||
|
||||
# The To gets populated in the loop below
|
||||
'To': None,
|
||||
}
|
||||
|
||||
# Prepare our Twilio URL
|
||||
url = self.notify_url.format(sid=self.account_sid)
|
||||
|
||||
# Create a copy of the targets list
|
||||
targets = list(self.targets)
|
||||
|
||||
# Set up our authentication
|
||||
auth = (self.account_sid, self.auth_token)
|
||||
|
||||
if len(targets) == 0:
|
||||
# No sources specified, use our own phone no
|
||||
targets.append(self.source)
|
||||
|
||||
while len(targets):
|
||||
# Get our target to notify
|
||||
target = targets.pop(0)
|
||||
|
||||
# Prepare our user
|
||||
payload['To'] = target
|
||||
|
||||
# Some Debug Logging
|
||||
self.logger.debug('Twilio POST URL: {} (cert_verify={})'.format(
|
||||
url, self.verify_certificate))
|
||||
self.logger.debug('Twilio Payload: {}' .format(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
auth=auth,
|
||||
data=payload,
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code not in (
|
||||
requests.codes.created, requests.codes.ok):
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
# set up our status code to use
|
||||
status_code = r.status_code
|
||||
|
||||
try:
|
||||
# Update our status response if we can
|
||||
json_response = loads(r.content)
|
||||
status_code = json_response.get('code', status_code)
|
||||
status_str = json_response.get('message', status_str)
|
||||
|
||||
except (AttributeError, ValueError):
|
||||
# could not parse JSON response... just use the status
|
||||
# we already have.
|
||||
|
||||
# AttributeError means r.content was None
|
||||
pass
|
||||
|
||||
self.logger.warning(
|
||||
'Failed to send Twilio notification to {}: '
|
||||
'{}{}error={}.'.format(
|
||||
target,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
status_code))
|
||||
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
'Sent Twilio notification to {}.'.format(target))
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Twilio:%s ' % (
|
||||
target) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
return not has_error
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
return '{schema}://{sid}:{token}@{source}/{targets}/?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
sid=self.account_sid,
|
||||
token=self.auth_token,
|
||||
source=NotifyTwilio.quote(self.source, safe=''),
|
||||
targets='/'.join(
|
||||
[NotifyTwilio.quote(x, safe='') for x in self.targets]),
|
||||
args=NotifyTwilio.urlencode(args))
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url, verify_host=False)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Get our entries; split_path() looks after unquoting content for us
|
||||
# by default
|
||||
results['targets'] = NotifyTwilio.split_path(results['fullpath'])
|
||||
|
||||
# The hostname is our source number
|
||||
results['source'] = NotifyTwilio.unquote(results['host'])
|
||||
|
||||
# Get our account_side and auth_token from the user/pass config
|
||||
results['account_sid'] = NotifyTwilio.unquote(results['user'])
|
||||
results['auth_token'] = NotifyTwilio.unquote(results['password'])
|
||||
|
||||
# Auth Token
|
||||
if 'token' in results['qsd'] and len(results['qsd']['token']):
|
||||
# Extract the account sid from an argument
|
||||
results['auth_token'] = \
|
||||
NotifyTwilio.unquote(results['qsd']['token'])
|
||||
|
||||
# Account SID
|
||||
if 'sid' in results['qsd'] and len(results['qsd']['sid']):
|
||||
# Extract the account sid from an argument
|
||||
results['account_sid'] = \
|
||||
NotifyTwilio.unquote(results['qsd']['sid'])
|
||||
|
||||
# Support the 'from' and 'source' variable so that we can support
|
||||
# targets this way too.
|
||||
# The 'from' makes it easier to use yaml configuration
|
||||
if 'from' in results['qsd'] and len(results['qsd']['from']):
|
||||
results['source'] = \
|
||||
NotifyTwilio.unquote(results['qsd']['from'])
|
||||
if 'source' in results['qsd'] and len(results['qsd']['source']):
|
||||
results['source'] = \
|
||||
NotifyTwilio.unquote(results['qsd']['source'])
|
||||
|
||||
# Support the 'to' variable so that we can support targets this way too
|
||||
# The 'to' makes it easier to use yaml configuration
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['targets'] += \
|
||||
NotifyTwilio.parse_list(results['qsd']['to'])
|
||||
|
||||
return results
|
|
@ -1,161 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Twitter Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
from . import tweepy
|
||||
from ..NotifyBase import NotifyBase
|
||||
|
||||
|
||||
class NotifyTwitter(NotifyBase):
|
||||
"""
|
||||
A wrapper to Twitter Notifications
|
||||
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Twitter'
|
||||
|
||||
# The services URL
|
||||
service_url = 'https://twitter.com/'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'tweet'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_twitter'
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
# This is used during a Private DM Message Size (not Public Tweets
|
||||
# which are limited to 240 characters)
|
||||
body_maxlen = 4096
|
||||
|
||||
def __init__(self, ckey, csecret, akey, asecret, **kwargs):
|
||||
"""
|
||||
Initialize Twitter Object
|
||||
|
||||
"""
|
||||
super(NotifyTwitter, self).__init__(**kwargs)
|
||||
|
||||
if not ckey:
|
||||
raise TypeError(
|
||||
'An invalid Consumer API Key was specified.'
|
||||
)
|
||||
|
||||
if not csecret:
|
||||
raise TypeError(
|
||||
'An invalid Consumer Secret API Key was specified.'
|
||||
)
|
||||
|
||||
if not akey:
|
||||
raise TypeError(
|
||||
'An invalid Acess Token API Key was specified.'
|
||||
)
|
||||
|
||||
if not asecret:
|
||||
raise TypeError(
|
||||
'An invalid Acess Token Secret API Key was specified.'
|
||||
)
|
||||
|
||||
if not self.user:
|
||||
raise TypeError(
|
||||
'No user was specified.'
|
||||
)
|
||||
|
||||
# Store our data
|
||||
self.ckey = ckey
|
||||
self.csecret = csecret
|
||||
self.akey = akey
|
||||
self.asecret = asecret
|
||||
|
||||
return
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Twitter Notification
|
||||
"""
|
||||
|
||||
try:
|
||||
# Attempt to Establish a connection to Twitter
|
||||
self.auth = tweepy.OAuthHandler(self.ckey, self.csecret)
|
||||
|
||||
# Apply our Access Tokens
|
||||
self.auth.set_access_token(self.akey, self.asecret)
|
||||
|
||||
except Exception:
|
||||
self.logger.warning(
|
||||
'Twitter authentication failed; '
|
||||
'please verify your configuration.'
|
||||
)
|
||||
return False
|
||||
|
||||
text = '%s\r\n%s' % (title, body)
|
||||
try:
|
||||
# Get our API
|
||||
api = tweepy.API(self.auth)
|
||||
|
||||
# Send our Direct Message
|
||||
api.send_direct_message(self.user, text=text)
|
||||
self.logger.info('Sent Twitter DM notification.')
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Twitter '
|
||||
'direct message to %s.' % self.user)
|
||||
self.logger.debug('Twitter Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
|
||||
# The first token is stored in the hostname
|
||||
consumer_key = results['host']
|
||||
|
||||
# Now fetch the remaining tokens
|
||||
try:
|
||||
consumer_secret, access_token_key, access_token_secret = \
|
||||
[x for x in filter(bool, NotifyBase.split_path(
|
||||
results['fullpath']))][0:3]
|
||||
|
||||
except (ValueError, AttributeError, IndexError):
|
||||
# Force some bad values that will get caught
|
||||
# in parsing later
|
||||
consumer_secret = None
|
||||
access_token_key = None
|
||||
access_token_secret = None
|
||||
|
||||
results['ckey'] = consumer_key
|
||||
results['csecret'] = consumer_secret
|
||||
results['akey'] = access_token_key
|
||||
results['asecret'] = access_token_secret
|
||||
|
||||
return results
|
|
@ -1,21 +1,272 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
from . import NotifyTwitter
|
||||
from . import tweepy
|
||||
from ..NotifyBase import NotifyBase
|
||||
from ...common import NotifyType
|
||||
from ...utils import parse_list
|
||||
from ...AppriseLocale import gettext_lazy as _
|
||||
|
||||
__all__ = [
|
||||
'NotifyTwitter',
|
||||
]
|
||||
|
||||
class NotifyTwitter(NotifyBase):
|
||||
"""
|
||||
A wrapper to Twitter Notifications
|
||||
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Twitter'
|
||||
|
||||
# The services URL
|
||||
service_url = 'https://twitter.com/'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'tweet'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_twitter'
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
# This is used during a Private DM Message Size (not Public Tweets
|
||||
# which are limited to 240 characters)
|
||||
body_maxlen = 4096
|
||||
|
||||
# Twitter does have titles when creating a message
|
||||
title_maxlen = 0
|
||||
|
||||
templates = (
|
||||
'{schema}://{user}@{ckey}{csecret}/{akey}/{asecret}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'ckey': {
|
||||
'name': _('Consumer Key'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'csecret': {
|
||||
'name': _('Consumer Secret'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'akey': {
|
||||
'name': _('Access Key'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'asecret': {
|
||||
'name': _('Access Secret'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'user': {
|
||||
'name': _('User'),
|
||||
'type': 'string',
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'targets': {
|
||||
'name': _('Targets'),
|
||||
'type': 'list:string',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'to': {
|
||||
'alias_of': 'targets',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, ckey, csecret, akey, asecret, targets=None, **kwargs):
|
||||
"""
|
||||
Initialize Twitter Object
|
||||
|
||||
"""
|
||||
super(NotifyTwitter, self).__init__(**kwargs)
|
||||
|
||||
if not ckey:
|
||||
msg = 'An invalid Consumer API Key was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not csecret:
|
||||
msg = 'An invalid Consumer Secret API Key was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not akey:
|
||||
msg = 'An invalid Access Token API Key was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not asecret:
|
||||
msg = 'An invalid Access Token Secret API Key was specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Identify our targets
|
||||
self.targets = parse_list(targets)
|
||||
|
||||
if len(self.targets) == 0 and not self.user:
|
||||
msg = 'No user(s) were specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Store our data
|
||||
self.ckey = ckey
|
||||
self.csecret = csecret
|
||||
self.akey = akey
|
||||
self.asecret = asecret
|
||||
|
||||
return
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Twitter Notification
|
||||
"""
|
||||
|
||||
try:
|
||||
# Attempt to Establish a connection to Twitter
|
||||
self.auth = tweepy.OAuthHandler(self.ckey, self.csecret)
|
||||
|
||||
# Apply our Access Tokens
|
||||
self.auth.set_access_token(self.akey, self.asecret)
|
||||
|
||||
except Exception:
|
||||
self.logger.warning(
|
||||
'Twitter authentication failed; '
|
||||
'please verify your configuration.'
|
||||
)
|
||||
return False
|
||||
|
||||
# Get ourselves a list of targets
|
||||
users = list(self.targets)
|
||||
if not users:
|
||||
# notify ourselves
|
||||
users.append(self.user)
|
||||
|
||||
# Error Tracking
|
||||
has_error = False
|
||||
|
||||
while len(users) > 0:
|
||||
# Get our user
|
||||
user = users.pop(0)
|
||||
|
||||
# Always call throttle before any remote server i/o is made to
|
||||
# avoid thrashing the remote server and risk being blocked.
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
# Get our API
|
||||
api = tweepy.API(self.auth)
|
||||
|
||||
# Send our Direct Message
|
||||
api.send_direct_message(user, text=body)
|
||||
self.logger.info(
|
||||
'Sent Twitter DM notification to {}.'.format(user))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Twitter '
|
||||
'direct message to %s.' % user)
|
||||
self.logger.debug('Twitter Exception: %s' % str(e))
|
||||
|
||||
# Track our error
|
||||
has_error = True
|
||||
|
||||
return not has_error
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
if len(self.targets) > 0:
|
||||
args['to'] = ','.join([NotifyTwitter.quote(x, safe='')
|
||||
for x in self.targets])
|
||||
|
||||
return '{schema}://{auth}{ckey}/{csecret}/{akey}/{asecret}' \
|
||||
'/?{args}'.format(
|
||||
auth='' if not self.user else '{user}@'.format(
|
||||
user=NotifyTwitter.quote(self.user, safe='')),
|
||||
schema=self.secure_protocol,
|
||||
ckey=NotifyTwitter.quote(self.ckey, safe=''),
|
||||
asecret=NotifyTwitter.quote(self.csecret, safe=''),
|
||||
akey=NotifyTwitter.quote(self.akey, safe=''),
|
||||
csecret=NotifyTwitter.quote(self.asecret, safe=''),
|
||||
args=NotifyTwitter.urlencode(args))
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
|
||||
# The first token is stored in the hostname
|
||||
consumer_key = NotifyTwitter.unquote(results['host'])
|
||||
|
||||
# Now fetch the remaining tokens
|
||||
try:
|
||||
consumer_secret, access_token_key, access_token_secret = \
|
||||
NotifyTwitter.split_path(results['fullpath'])[0:3]
|
||||
|
||||
except (ValueError, AttributeError, IndexError):
|
||||
# Force some bad values that will get caught
|
||||
# in parsing later
|
||||
consumer_secret = None
|
||||
access_token_key = None
|
||||
access_token_secret = None
|
||||
|
||||
results['ckey'] = consumer_key
|
||||
results['csecret'] = consumer_secret
|
||||
results['akey'] = access_token_key
|
||||
results['asecret'] = access_token_secret
|
||||
|
||||
# Support the to= allowing one to identify more then one user to tweet
|
||||
# too
|
||||
results['targets'] = NotifyTwitter.parse_list(results['qsd'].get('to'))
|
||||
|
||||
return results
|
||||
|
|
268
libs/apprise/plugins/NotifyWebexTeams.py
Normal file
268
libs/apprise/plugins/NotifyWebexTeams.py
Normal file
|
@ -0,0 +1,268 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# At the time I created this plugin, their website had lots of issues with the
|
||||
# Firefox Browser. I fell back to Chrome and had no problems.
|
||||
|
||||
# To use this plugin, you need to first access https://teams.webex.com and
|
||||
# make yourself an account if you don't already have one. You'll want to
|
||||
# create at least one 'space' before getting the 'incoming webhook'.
|
||||
#
|
||||
# Next you'll need to install the 'Incoming webhook' plugin found under
|
||||
# the 'other' category here: https://apphub.webex.com/integrations/
|
||||
|
||||
# These links may not always work as time goes by and websites always
|
||||
# change, but at the time of creating this plugin this was a direct link
|
||||
# to it: https://apphub.webex.com/integrations/incoming-webhooks-cisco-systems
|
||||
|
||||
# If you're logged in, you'll be able to click on the 'Connect' button. From
|
||||
# there you'll need to accept the permissions it will ask of you. Give the
|
||||
# webhook a name such as 'apprise'.
|
||||
# When you're complete, you will recieve a URL that looks something like this:
|
||||
# https://api.ciscospark.com/v1/webhooks/incoming/\
|
||||
# Y3lzY29zcGkyazovL3VzL1dFQkhPT0sajkkzYWU4fTMtMGE4Yy00
|
||||
#
|
||||
# The last part of the URL is all you need to be interested in. Think of this
|
||||
# url as:
|
||||
# https://api.ciscospark.com/v1/webhooks/incoming/{token}
|
||||
#
|
||||
# You will need to assemble all of your URLs for this plugin to work as:
|
||||
# wxteams://{token}
|
||||
#
|
||||
# Resources
|
||||
# - https://developer.webex.com/docs/api/basics - markdown/post syntax
|
||||
# - https://developer.cisco.com/ecosystem/webex/apps/\
|
||||
# incoming-webhooks-cisco-systems/ - Simple webhook example
|
||||
|
||||
import re
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyType
|
||||
from ..common import NotifyFormat
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# Token required as part of the API request
|
||||
VALIDATE_TOKEN = re.compile(r'[a-z0-9]{80}', re.I)
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
# Based on: https://developer.webex.com/docs/api/basics/rate-limiting
|
||||
WEBEX_HTTP_ERROR_MAP = {
|
||||
401: 'Unauthorized - Invalid Token.',
|
||||
415: 'Unsuported media specified',
|
||||
429: 'To many consecutive requests were made.',
|
||||
503: 'Service is overloaded, try again later',
|
||||
}
|
||||
|
||||
|
||||
class NotifyWebexTeams(NotifyBase):
|
||||
"""
|
||||
A wrapper for Webex Teams Notifications
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Cisco Webex Teams'
|
||||
|
||||
# The services URL
|
||||
service_url = 'https://webex.teams.com/'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'wxteams'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_wxteams'
|
||||
|
||||
# Webex Teams uses the http protocol with JSON requests
|
||||
notify_url = 'https://api.ciscospark.com/v1/webhooks/incoming/'
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 1000
|
||||
|
||||
# We don't support titles for Webex notifications
|
||||
title_maxlen = 0
|
||||
|
||||
# Default to markdown; fall back to text
|
||||
notify_format = NotifyFormat.MARKDOWN
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{token}',
|
||||
)
|
||||
|
||||
# Define our template tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'token': {
|
||||
'name': _('Token'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
'regex': (r'[a-z0-9]{80}', 'i'),
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, token, **kwargs):
|
||||
"""
|
||||
Initialize Webex Teams Object
|
||||
"""
|
||||
super(NotifyWebexTeams, self).__init__(**kwargs)
|
||||
|
||||
if not token:
|
||||
msg = 'The Webex Teams token is not specified.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
if not VALIDATE_TOKEN.match(token.strip()):
|
||||
msg = 'The Webex Teams token specified ({}) is invalid.'\
|
||||
.format(token)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# The token associated with the account
|
||||
self.token = token.strip()
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Webex Teams Notification
|
||||
"""
|
||||
|
||||
# Setup our headers
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
# Prepare our URL
|
||||
url = '{}/{}'.format(self.notify_url, self.token)
|
||||
|
||||
payload = {
|
||||
'markdown' if (self.notify_format == NotifyFormat.MARKDOWN)
|
||||
else 'text': body,
|
||||
}
|
||||
|
||||
self.logger.debug('Webex Teams POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Webex Teams Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code not in (
|
||||
requests.codes.ok, requests.codes.no_content):
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyWebexTeams.http_response_code_lookup(
|
||||
r.status_code)
|
||||
|
||||
self.logger.warning(
|
||||
'Failed to send Webex Teams notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
'Sent Webex Teams notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Webex Teams '
|
||||
'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
return '{schema}://{token}/?{args}'.format(
|
||||
schema=self.secure_protocol,
|
||||
token=NotifyWebexTeams.quote(self.token, safe=''),
|
||||
args=NotifyWebexTeams.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url, verify_host=False)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# The first token is stored in the hostname
|
||||
results['token'] = NotifyWebexTeams.unquote(results['host'])
|
||||
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def parse_native_url(url):
|
||||
"""
|
||||
Support https://api.ciscospark.com/v1/webhooks/incoming/WEBHOOK_TOKEN
|
||||
"""
|
||||
|
||||
result = re.match(
|
||||
r'^https?://api\.ciscospark\.com/v[1-9][0-9]*/webhooks/incoming/'
|
||||
r'(?P<webhook_token>[A-Z0-9_-]+)/?'
|
||||
r'(?P<args>\?[.+])?$', url, re.I)
|
||||
|
||||
if result:
|
||||
return NotifyWebexTeams.parse_url(
|
||||
'{schema}://{webhook_token}/{args}'.format(
|
||||
schema=NotifyWebexTeams.secure_protocol,
|
||||
webhook_token=result.group('webhook_token'),
|
||||
args='' if not result.group('args')
|
||||
else result.group('args')))
|
||||
|
||||
return None
|
|
@ -1,28 +1,38 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Windows Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
import re
|
||||
from time import sleep
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_bool
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# Default our global support flag
|
||||
NOTIFY_WINDOWS_SUPPORT_ENABLED = False
|
||||
|
@ -56,9 +66,20 @@ class NotifyWindows(NotifyBase):
|
|||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_windows'
|
||||
|
||||
# Disable throttle rate for Windows requests since they are normally
|
||||
# local anyway
|
||||
request_rate_per_sec = 0
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_128
|
||||
|
||||
# Limit results to just the first 2 line otherwise there is just to much
|
||||
# content to display
|
||||
body_max_line_count = 2
|
||||
|
||||
# The number of seconds to display the popup for
|
||||
default_popup_duration_sec = 12
|
||||
|
||||
# This entry is a bit hacky, but it allows us to unit-test this library
|
||||
# in an environment that simply doesn't have the windows packages
|
||||
# available to us. It also allows us to handle situations where the
|
||||
|
@ -68,18 +89,44 @@ class NotifyWindows(NotifyBase):
|
|||
# let me know! :)
|
||||
_enabled = NOTIFY_WINDOWS_SUPPORT_ENABLED
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://_/',
|
||||
)
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'duration': {
|
||||
'name': _('Duration'),
|
||||
'type': 'int',
|
||||
'min': 1,
|
||||
'default': 12,
|
||||
},
|
||||
'image': {
|
||||
'name': _('Include Image'),
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'map_to': 'include_image',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, include_image=True, duration=None, **kwargs):
|
||||
"""
|
||||
Initialize Windows Object
|
||||
"""
|
||||
|
||||
super(NotifyWindows, self).__init__(**kwargs)
|
||||
|
||||
# Number of seconds to display notification for
|
||||
self.duration = 12
|
||||
self.duration = self.default_popup_duration_sec \
|
||||
if not (isinstance(duration, int) and duration > 0) else duration
|
||||
|
||||
# Define our handler
|
||||
self.hwnd = None
|
||||
|
||||
super(NotifyWindows, self).__init__(**kwargs)
|
||||
# Track whether or not we want to send an image with our notification
|
||||
# or not.
|
||||
self.include_image = include_image
|
||||
|
||||
def _on_destroy(self, hwnd, msg, wparam, lparam):
|
||||
"""
|
||||
|
@ -92,7 +139,7 @@ class NotifyWindows(NotifyBase):
|
|||
|
||||
return None
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Windows Notification
|
||||
"""
|
||||
|
@ -102,11 +149,8 @@ class NotifyWindows(NotifyBase):
|
|||
"Windows Notifications are not supported by this system.")
|
||||
return False
|
||||
|
||||
# Limit results to just the first 2 line otherwise
|
||||
# there is just to much content to display
|
||||
body = re.split('[\r\n]+', body)
|
||||
body[0] = body[0].strip('#').strip()
|
||||
body = '\r\n'.join(body[0:2])
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
# Register destruction callback
|
||||
|
@ -127,20 +171,26 @@ class NotifyWindows(NotifyBase):
|
|||
self.hinst, None)
|
||||
win32gui.UpdateWindow(self.hwnd)
|
||||
|
||||
# image path
|
||||
icon_path = self.image_path(notify_type, extension='.ico')
|
||||
icon_flags = win32con.LR_LOADFROMFILE | win32con.LR_DEFAULTSIZE
|
||||
# image path (if configured to acquire)
|
||||
icon_path = None if not self.include_image \
|
||||
else self.image_path(notify_type, extension='.ico')
|
||||
|
||||
try:
|
||||
hicon = win32gui.LoadImage(
|
||||
self.hinst, icon_path, win32con.IMAGE_ICON, 0, 0,
|
||||
icon_flags)
|
||||
if icon_path:
|
||||
icon_flags = win32con.LR_LOADFROMFILE | win32con.LR_DEFAULTSIZE
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(
|
||||
"Could not load windows notification icon ({}): {}"
|
||||
.format(icon_path, e))
|
||||
try:
|
||||
hicon = win32gui.LoadImage(
|
||||
self.hinst, icon_path, win32con.IMAGE_ICON, 0, 0,
|
||||
icon_flags)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(
|
||||
"Could not load windows notification icon ({}): {}"
|
||||
.format(icon_path, e))
|
||||
|
||||
# disable icon
|
||||
hicon = win32gui.LoadIcon(0, win32con.IDI_APPLICATION)
|
||||
else:
|
||||
# disable icon
|
||||
hicon = win32gui.LoadIcon(0, win32con.IDI_APPLICATION)
|
||||
|
||||
|
@ -160,13 +210,32 @@ class NotifyWindows(NotifyBase):
|
|||
|
||||
self.logger.info('Sent Windows notification.')
|
||||
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
self.logger.warning('Failed to send Windows notification.')
|
||||
self.logger.exception('Windows Exception')
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'image': 'yes' if self.include_image else 'no',
|
||||
'duration': str(self.duration),
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
return '{schema}://_/?{args}'.format(
|
||||
schema=self.protocol,
|
||||
args=NotifyWindows.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
|
@ -176,15 +245,31 @@ class NotifyWindows(NotifyBase):
|
|||
|
||||
"""
|
||||
|
||||
# return a very basic set of requirements
|
||||
return {
|
||||
'schema': NotifyWindows.protocol,
|
||||
'user': None,
|
||||
'password': None,
|
||||
'port': None,
|
||||
'host': 'localhost',
|
||||
'fullpath': None,
|
||||
'path': None,
|
||||
'url': url,
|
||||
'qsd': {},
|
||||
}
|
||||
results = NotifyBase.parse_url(url)
|
||||
if not results:
|
||||
results = {
|
||||
'schema': NotifyWindows.protocol,
|
||||
'user': None,
|
||||
'password': None,
|
||||
'port': None,
|
||||
'host': '_',
|
||||
'fullpath': None,
|
||||
'path': None,
|
||||
'url': url,
|
||||
'qsd': {},
|
||||
}
|
||||
|
||||
# Include images with our message
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get('image', True))
|
||||
|
||||
# Set duration
|
||||
try:
|
||||
results['duration'] = int(results['qsd'].get('duration'))
|
||||
|
||||
except (TypeError, ValueError):
|
||||
# Not a valid integer; ignore entry
|
||||
pass
|
||||
|
||||
# return results
|
||||
return results
|
||||
|
|
|
@ -1,29 +1,36 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# XBMC/KODI Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyType
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import parse_bool
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
||||
class NotifyXBMC(NotifyBase):
|
||||
|
@ -37,41 +44,103 @@ class NotifyXBMC(NotifyBase):
|
|||
# The services URL
|
||||
service_url = 'http://kodi.tv/'
|
||||
|
||||
xbmc_protocol = 'xbmc'
|
||||
xbmc_secure_protocol = 'xbmcs'
|
||||
kodi_protocol = 'kodi'
|
||||
kodi_secure_protocol = 'kodis'
|
||||
|
||||
# The default protocols
|
||||
protocol = ('xbmc', 'kodi')
|
||||
protocol = (xbmc_protocol, kodi_protocol)
|
||||
|
||||
# The default secure protocols
|
||||
secure_protocol = ('xbmc', 'kodis')
|
||||
secure_protocol = (xbmc_secure_protocol, kodi_secure_protocol)
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_kodi'
|
||||
|
||||
# Disable throttle rate for XBMC/KODI requests since they are normally
|
||||
# local anyway
|
||||
request_rate_per_sec = 0
|
||||
|
||||
# Limit results to just the first 2 line otherwise there is just to much
|
||||
# content to display
|
||||
body_max_line_count = 2
|
||||
|
||||
# XBMC uses the http protocol with JSON requests
|
||||
xbmc_default_port = 8080
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_128
|
||||
|
||||
# The number of seconds to display the popup for
|
||||
default_popup_duration_sec = 12
|
||||
|
||||
# XBMC default protocol version (v2)
|
||||
xbmc_remote_protocol = 2
|
||||
|
||||
# KODI default protocol version (v6)
|
||||
kodi_remote_protocol = 6
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{host}',
|
||||
'{schema}://{host}:{port}',
|
||||
'{schema}://{user}:{password}@{host}',
|
||||
'{schema}://{user}:{password}@{host}:{port}',
|
||||
)
|
||||
|
||||
# Define our tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'host': {
|
||||
'name': _('Hostname'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'port': {
|
||||
'name': _('Port'),
|
||||
'type': 'int',
|
||||
'min': 1,
|
||||
'max': 65535,
|
||||
},
|
||||
'user': {
|
||||
'name': _('Username'),
|
||||
'type': 'string',
|
||||
},
|
||||
'password': {
|
||||
'name': _('Password'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'duration': {
|
||||
'name': _('Duration'),
|
||||
'type': 'int',
|
||||
'min': 1,
|
||||
'default': 12,
|
||||
},
|
||||
'image': {
|
||||
'name': _('Include Image'),
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'map_to': 'include_image',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, include_image=True, duration=None, **kwargs):
|
||||
"""
|
||||
Initialize XBMC/KODI Object
|
||||
"""
|
||||
super(NotifyXBMC, self).__init__(**kwargs)
|
||||
|
||||
# Number of micro-seconds to display notification for
|
||||
self.duration = 12000
|
||||
# Number of seconds to display notification for
|
||||
self.duration = self.default_popup_duration_sec \
|
||||
if not (isinstance(duration, int) and duration > 0) else duration
|
||||
|
||||
if self.secure:
|
||||
self.schema = 'https'
|
||||
|
||||
else:
|
||||
self.schema = 'http'
|
||||
# Build our schema
|
||||
self.schema = 'https' if self.secure else 'http'
|
||||
|
||||
# Prepare the default header
|
||||
self.headers = {
|
||||
|
@ -82,6 +151,10 @@ class NotifyXBMC(NotifyBase):
|
|||
# Default protocol
|
||||
self.protocol = kwargs.get('protocol', self.xbmc_remote_protocol)
|
||||
|
||||
# Track whether or not we want to send an image with our notification
|
||||
# or not.
|
||||
self.include_image = include_image
|
||||
|
||||
def _payload_60(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Builds payload for KODI API v6.0
|
||||
|
@ -96,13 +169,17 @@ class NotifyXBMC(NotifyBase):
|
|||
'params': {
|
||||
'title': title,
|
||||
'message': body,
|
||||
# displaytime is defined in microseconds
|
||||
'displaytime': self.duration,
|
||||
# displaytime is defined in microseconds so we need to just
|
||||
# do some simple math
|
||||
'displaytime': int(self.duration * 1000),
|
||||
},
|
||||
'id': 1,
|
||||
}
|
||||
|
||||
image_url = self.image_url(notify_type)
|
||||
# Acquire our image url if configured to do so
|
||||
image_url = None if not self.include_image else \
|
||||
self.image_url(notify_type)
|
||||
|
||||
if image_url:
|
||||
payload['params']['image'] = image_url
|
||||
if notify_type is NotifyType.FAILURE:
|
||||
|
@ -130,29 +207,27 @@ class NotifyXBMC(NotifyBase):
|
|||
'params': {
|
||||
'title': title,
|
||||
'message': body,
|
||||
# displaytime is defined in microseconds
|
||||
'displaytime': self.duration,
|
||||
# displaytime is defined in microseconds so we need to just
|
||||
# do some simple math
|
||||
'displaytime': int(self.duration * 1000),
|
||||
},
|
||||
'id': 1,
|
||||
}
|
||||
|
||||
image_url = self.image_url(notify_type)
|
||||
# Include our logo if configured to do so
|
||||
image_url = None if not self.include_image \
|
||||
else self.image_url(notify_type)
|
||||
|
||||
if image_url:
|
||||
payload['params']['image'] = image_url
|
||||
|
||||
return (self.headers, dumps(payload))
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform XBMC/KODI Notification
|
||||
"""
|
||||
|
||||
# Limit results to just the first 2 line otherwise
|
||||
# there is just to much content to display
|
||||
body = re.split('[\r\n]+', body)
|
||||
body[0] = body[0].strip('#').strip()
|
||||
body = '\r\n'.join(body[0:2])
|
||||
|
||||
if self.protocol == self.xbmc_remote_protocol:
|
||||
# XBMC v2.0
|
||||
(headers, payload) = self._payload_20(
|
||||
|
@ -177,6 +252,10 @@ class NotifyXBMC(NotifyBase):
|
|||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('XBMC/KODI Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
|
@ -187,17 +266,17 @@ class NotifyXBMC(NotifyBase):
|
|||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send XBMC/KODI notification:'
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyXBMC.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send XBMC/KODI notification '
|
||||
'(error=%s).' % r.status_code)
|
||||
self.logger.warning(
|
||||
'Failed to send XBMC/KODI notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -217,6 +296,48 @@ class NotifyXBMC(NotifyBase):
|
|||
|
||||
return True
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'image': 'yes' if self.include_image else 'no',
|
||||
'duration': str(self.duration),
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
# Determine Authentication
|
||||
auth = ''
|
||||
if self.user and self.password:
|
||||
auth = '{user}:{password}@'.format(
|
||||
user=NotifyXBMC.quote(self.user, safe=''),
|
||||
password=NotifyXBMC.quote(self.password, safe=''),
|
||||
)
|
||||
elif self.user:
|
||||
auth = '{user}@'.format(
|
||||
user=NotifyXBMC.quote(self.user, safe=''),
|
||||
)
|
||||
|
||||
default_schema = self.xbmc_protocol if (
|
||||
self.protocol <= self.xbmc_remote_protocol) else self.kodi_protocol
|
||||
default_port = 443 if self.secure else self.xbmc_default_port
|
||||
if self.secure:
|
||||
# Append 's' to schema
|
||||
default_schema + 's'
|
||||
|
||||
return '{schema}://{auth}{hostname}{port}/?{args}'.format(
|
||||
schema=default_schema,
|
||||
auth=auth,
|
||||
hostname=NotifyXBMC.quote(self.host, safe=''),
|
||||
port='' if not self.port or self.port == default_port
|
||||
else ':{}'.format(self.port),
|
||||
args=NotifyXBMC.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
|
@ -243,4 +364,16 @@ class NotifyXBMC(NotifyBase):
|
|||
# KODI Support
|
||||
results['protocol'] = NotifyXBMC.kodi_remote_protocol
|
||||
|
||||
# Include images with our message
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get('image', True))
|
||||
|
||||
# Set duration
|
||||
try:
|
||||
results['duration'] = abs(int(results['qsd'].get('duration')))
|
||||
|
||||
except (TypeError, ValueError):
|
||||
# Not a valid integer; ignore entry
|
||||
pass
|
||||
|
||||
return results
|
||||
|
|
|
@ -1,28 +1,36 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# XML Notify Wrapper
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import compat_is_basestring
|
||||
from ..common import NotifyType
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
||||
class NotifyXML(NotifyBase):
|
||||
|
@ -45,9 +53,62 @@ class NotifyXML(NotifyBase):
|
|||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_128
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
# Disable throttle rate for JSON requests since they are normally
|
||||
# local anyway
|
||||
request_rate_per_sec = 0
|
||||
|
||||
# Define object templates
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{host}',
|
||||
'{schema}://{host}:{port}',
|
||||
'{schema}://{user}@{host}',
|
||||
'{schema}://{user}@{host}:{port}',
|
||||
'{schema}://{user}:{password}@{host}',
|
||||
'{schema}://{user}:{password}@{host}:{port}',
|
||||
)
|
||||
|
||||
# Define our tokens; these are the minimum tokens required required to
|
||||
# be passed into this function (as arguments). The syntax appends any
|
||||
# previously defined in the base package and builds onto them
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'host': {
|
||||
'name': _('Hostname'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'port': {
|
||||
'name': _('Port'),
|
||||
'type': 'int',
|
||||
'min': 1,
|
||||
'max': 65535,
|
||||
},
|
||||
'user': {
|
||||
'name': _('Username'),
|
||||
'type': 'string',
|
||||
},
|
||||
'password': {
|
||||
'name': _('Password'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
},
|
||||
})
|
||||
|
||||
# Define any kwargs we're using
|
||||
template_kwargs = {
|
||||
'headers': {
|
||||
'name': _('HTTP Header'),
|
||||
'prefix': '+',
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, headers=None, **kwargs):
|
||||
"""
|
||||
Initialize XML Object
|
||||
|
||||
headers can be a dictionary of key/value pairs that you want to
|
||||
additionally include as part of the server headers to post with
|
||||
|
||||
"""
|
||||
super(NotifyXML, self).__init__(**kwargs)
|
||||
|
||||
|
@ -66,19 +127,56 @@ class NotifyXML(NotifyBase):
|
|||
</soapenv:Body>
|
||||
</soapenv:Envelope>"""
|
||||
|
||||
if self.secure:
|
||||
self.schema = 'https'
|
||||
|
||||
else:
|
||||
self.schema = 'http'
|
||||
|
||||
self.fullpath = kwargs.get('fullpath')
|
||||
if not compat_is_basestring(self.fullpath):
|
||||
if not isinstance(self.fullpath, six.string_types):
|
||||
self.fullpath = '/'
|
||||
|
||||
self.headers = {}
|
||||
if headers:
|
||||
# Store our extra headers
|
||||
self.headers.update(headers)
|
||||
|
||||
return
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
# Append our headers into our args
|
||||
args.update({'+{}'.format(k): v for k, v in self.headers.items()})
|
||||
|
||||
# Determine Authentication
|
||||
auth = ''
|
||||
if self.user and self.password:
|
||||
auth = '{user}:{password}@'.format(
|
||||
user=NotifyXML.quote(self.user, safe=''),
|
||||
password=NotifyXML.quote(self.password, safe=''),
|
||||
)
|
||||
elif self.user:
|
||||
auth = '{user}@'.format(
|
||||
user=NotifyXML.quote(self.user, safe=''),
|
||||
)
|
||||
|
||||
default_port = 443 if self.secure else 80
|
||||
|
||||
return '{schema}://{auth}{hostname}{port}/?{args}'.format(
|
||||
schema=self.secure_protocol if self.secure else self.protocol,
|
||||
auth=auth,
|
||||
hostname=NotifyXML.quote(self.host, safe=''),
|
||||
port='' if self.port is None or self.port == default_port
|
||||
else ':{}'.format(self.port),
|
||||
args=NotifyXML.urlencode(args),
|
||||
)
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform XML Notification
|
||||
"""
|
||||
|
@ -89,10 +187,14 @@ class NotifyXML(NotifyBase):
|
|||
'Content-Type': 'application/xml'
|
||||
}
|
||||
|
||||
# Apply any/all header over-rides defined
|
||||
headers.update(self.headers)
|
||||
|
||||
re_map = {
|
||||
'{MESSAGE_TYPE}': NotifyBase.quote(notify_type),
|
||||
'{SUBJECT}': NotifyBase.quote(title),
|
||||
'{MESSAGE}': NotifyBase.quote(body),
|
||||
'{MESSAGE_TYPE}': NotifyXML.escape_html(
|
||||
notify_type, whitespace=False),
|
||||
'{SUBJECT}': NotifyXML.escape_html(title, whitespace=False),
|
||||
'{MESSAGE}': NotifyXML.escape_html(body, whitespace=False),
|
||||
}
|
||||
|
||||
# Iterate over above list and store content accordingly
|
||||
|
@ -105,7 +207,10 @@ class NotifyXML(NotifyBase):
|
|||
if self.user:
|
||||
auth = (self.user, self.password)
|
||||
|
||||
url = '%s://%s' % (self.schema, self.host)
|
||||
# Set our schema
|
||||
schema = 'https' if self.secure else 'http'
|
||||
|
||||
url = '%s://%s' % (schema, self.host)
|
||||
if isinstance(self.port, int):
|
||||
url += ':%d' % self.port
|
||||
|
||||
|
@ -116,6 +221,10 @@ class NotifyXML(NotifyBase):
|
|||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('XML Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
|
@ -125,17 +234,18 @@ class NotifyXML(NotifyBase):
|
|||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send XML notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyXML.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send XML notification '
|
||||
'(error=%s).' % r.status_code)
|
||||
self.logger.warning(
|
||||
'Failed to send XML notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -153,3 +263,27 @@ class NotifyXML(NotifyBase):
|
|||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Add our headers that the user can potentially over-ride if they wish
|
||||
# to to our returned result set
|
||||
results['headers'] = results['qsd-']
|
||||
results['headers'].update(results['qsd+'])
|
||||
|
||||
# Tidy our header entries by unquoting them
|
||||
results['headers'] = {NotifyXML.unquote(x): NotifyXML.unquote(y)
|
||||
for x, y in results['headers'].items()}
|
||||
|
||||
return results
|
||||
|
|
425
libs/apprise/plugins/NotifyXMPP.py
Normal file
425
libs/apprise/plugins/NotifyXMPP.py
Normal file
|
@ -0,0 +1,425 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import ssl
|
||||
from os.path import isfile
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_list
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
# xep string parser
|
||||
XEP_PARSE_RE = re.compile('^[^1-9]*(?P<xep>[1-9][0-9]{0,3})$')
|
||||
|
||||
# Default our global support flag
|
||||
NOTIFY_XMPP_SUPPORT_ENABLED = False
|
||||
|
||||
# Taken from https://golang.org/src/crypto/x509/root_linux.go
|
||||
CA_CERTIFICATE_FILE_LOCATIONS = [
|
||||
# Debian/Ubuntu/Gentoo etc.
|
||||
"/etc/ssl/certs/ca-certificates.crt",
|
||||
# Fedora/RHEL 6
|
||||
"/etc/pki/tls/certs/ca-bundle.crt",
|
||||
# OpenSUSE
|
||||
"/etc/ssl/ca-bundle.pem",
|
||||
# OpenELEC
|
||||
"/etc/pki/tls/cacert.pem",
|
||||
# CentOS/RHEL 7
|
||||
"/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
|
||||
]
|
||||
|
||||
try:
|
||||
# Import sleekxmpp if available
|
||||
import sleekxmpp
|
||||
|
||||
NOTIFY_XMPP_SUPPORT_ENABLED = True
|
||||
|
||||
except ImportError:
|
||||
# No problem; we just simply can't support this plugin because we're
|
||||
# either using Linux, or simply do not have sleekxmpp installed.
|
||||
pass
|
||||
|
||||
|
||||
class NotifyXMPP(NotifyBase):
|
||||
"""
|
||||
A wrapper for XMPP Notifications
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'XMPP'
|
||||
|
||||
# The default protocol
|
||||
protocol = 'xmpp'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'xmpps'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_xmpp'
|
||||
|
||||
# The default XMPP port
|
||||
default_unsecure_port = 5222
|
||||
|
||||
# The default XMPP secure port
|
||||
default_secure_port = 5223
|
||||
|
||||
# XMPP does not support a title
|
||||
title_maxlen = 0
|
||||
|
||||
# This entry is a bit hacky, but it allows us to unit-test this library
|
||||
# in an environment that simply doesn't have the sleekxmpp package
|
||||
# available to us.
|
||||
#
|
||||
# If anyone is seeing this had knows a better way of testing this
|
||||
# outside of what is defined in test/test_xmpp_plugin.py, please
|
||||
# let me know! :)
|
||||
_enabled = NOTIFY_XMPP_SUPPORT_ENABLED
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{host}',
|
||||
'{schema}://{password}@{host}',
|
||||
'{schema}://{password}@{host}:{port}',
|
||||
'{schema}://{user}:{password}@{host}',
|
||||
'{schema}://{user}:{password}@{host}:{port}',
|
||||
'{schema}://{host}/{targets}',
|
||||
'{schema}://{password}@{host}/{targets}',
|
||||
'{schema}://{password}@{host}:{port}/{targets}',
|
||||
'{schema}://{user}:{password}@{host}/{targets}',
|
||||
'{schema}://{user}:{password}@{host}:{port}/{targets}',
|
||||
)
|
||||
|
||||
# Define our tokens
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'host': {
|
||||
'name': _('Hostname'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'port': {
|
||||
'name': _('Port'),
|
||||
'type': 'int',
|
||||
'min': 1,
|
||||
'max': 65535,
|
||||
},
|
||||
'user': {
|
||||
'name': _('Username'),
|
||||
'type': 'string',
|
||||
},
|
||||
'password': {
|
||||
'name': _('Password'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
'required': True,
|
||||
},
|
||||
'target_jid': {
|
||||
'name': _('Target JID'),
|
||||
'type': 'string',
|
||||
'map_to': 'targets',
|
||||
},
|
||||
'targets': {
|
||||
'name': _('Targets'),
|
||||
'type': 'list:string',
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'to': {
|
||||
'alias_of': 'targets',
|
||||
},
|
||||
'xep': {
|
||||
'name': _('XEP'),
|
||||
'type': 'list:string',
|
||||
'prefix': 'xep-',
|
||||
'regex': (r'[1-9][0-9]{0,3}', 'i'),
|
||||
},
|
||||
'jid': {
|
||||
'name': _('Source JID'),
|
||||
'type': 'string',
|
||||
},
|
||||
})
|
||||
|
||||
def __init__(self, targets=None, jid=None, xep=None, **kwargs):
|
||||
"""
|
||||
Initialize XMPP Object
|
||||
"""
|
||||
super(NotifyXMPP, self).__init__(**kwargs)
|
||||
|
||||
# JID Details:
|
||||
# - JID's normally have an @ symbol in them, but it is not required
|
||||
# - Each allowable portion of a JID MUST NOT be more than 1023 bytes
|
||||
# in length.
|
||||
# - JID's can identify resource paths at the end separated by slashes
|
||||
# hence the following is valid: user@example.com/resource/path
|
||||
|
||||
# Since JID's can clash with URLs offered by aprise (specifically the
|
||||
# resource paths we need to allow users an alternative character to
|
||||
# represent the slashes. The grammer is defined here:
|
||||
# https://xmpp.org/extensions/xep-0029.html as follows:
|
||||
#
|
||||
# <JID> ::= [<node>"@"]<domain>["/"<resource>]
|
||||
# <node> ::= <conforming-char>[<conforming-char>]*
|
||||
# <domain> ::= <hname>["."<hname>]*
|
||||
# <resource> ::= <any-char>[<any-char>]*
|
||||
# <hname> ::= <let>|<dig>[[<let>|<dig>|"-"]*<let>|<dig>]
|
||||
# <let> ::= [a-z] | [A-Z]
|
||||
# <dig> ::= [0-9]
|
||||
# <conforming-char> ::= #x21 | [#x23-#x25] | [#x28-#x2E] |
|
||||
# [#x30-#x39] | #x3B | #x3D | #x3F |
|
||||
# [#x41-#x7E] | [#x80-#xD7FF] |
|
||||
# [#xE000-#xFFFD] | [#x10000-#x10FFFF]
|
||||
# <any-char> ::= [#x20-#xD7FF] | [#xE000-#xFFFD] |
|
||||
# [#x10000-#x10FFFF]
|
||||
|
||||
# The best way to do this is to choose characters that aren't allowed
|
||||
# in this case we will use comma and/or space.
|
||||
|
||||
# Assemble our jid using the information available to us:
|
||||
self.jid = jid
|
||||
|
||||
if not (self.user or self.password):
|
||||
# you must provide a jid/pass for this to work; if no password
|
||||
# is specified then the user field acts as the password instead
|
||||
# so we know that if there is no user specified, our url was
|
||||
# really busted up.
|
||||
msg = 'You must specify a XMPP password'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# See https://xmpp.org/extensions/ for details on xep values
|
||||
if xep is None:
|
||||
# Default xep setting
|
||||
self.xep = [
|
||||
# xep_0030: Service Discovery
|
||||
30,
|
||||
# xep_0199: XMPP Ping
|
||||
199,
|
||||
]
|
||||
|
||||
else:
|
||||
# Prepare the list
|
||||
_xep = parse_list(xep)
|
||||
self.xep = []
|
||||
|
||||
for xep in _xep:
|
||||
result = XEP_PARSE_RE.match(xep)
|
||||
if result is not None:
|
||||
self.xep.append(int(result.group('xep')))
|
||||
|
||||
else:
|
||||
self.logger.warning(
|
||||
"Could not load XMPP xep {}".format(xep))
|
||||
|
||||
# By default we send ourselves a message
|
||||
if targets:
|
||||
self.targets = parse_list(targets)
|
||||
|
||||
else:
|
||||
self.targets = list()
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform XMPP Notification
|
||||
"""
|
||||
|
||||
if not self._enabled:
|
||||
self.logger.warning(
|
||||
'XMPP Notifications are not supported by this system '
|
||||
'- install sleekxmpp.')
|
||||
return False
|
||||
|
||||
# Detect our JID if it isn't otherwise specified
|
||||
jid = self.jid
|
||||
password = self.password
|
||||
if not jid:
|
||||
if self.user and self.password:
|
||||
# xmpp://user:password@hostname
|
||||
jid = '{}@{}'.format(self.user, self.host)
|
||||
|
||||
else:
|
||||
# xmpp://password@hostname
|
||||
jid = self.host
|
||||
password = self.password if self.password else self.user
|
||||
|
||||
# Prepare our object
|
||||
xmpp = sleekxmpp.ClientXMPP(jid, password)
|
||||
|
||||
for xep in self.xep:
|
||||
# Load xep entries
|
||||
xmpp.register_plugin('xep_{0:04d}'.format(xep))
|
||||
|
||||
if self.secure:
|
||||
xmpp.ssl_version = ssl.PROTOCOL_TLSv1
|
||||
# If the python version supports it, use highest TLS version
|
||||
# automatically
|
||||
if hasattr(ssl, "PROTOCOL_TLS"):
|
||||
# Use the best version of TLS available to us
|
||||
xmpp.ssl_version = ssl.PROTOCOL_TLS
|
||||
|
||||
xmpp.ca_certs = None
|
||||
if self.verify_certificate:
|
||||
# Set the ca_certs variable for certificate verification
|
||||
xmpp.ca_certs = next(
|
||||
(cert for cert in CA_CERTIFICATE_FILE_LOCATIONS
|
||||
if isfile(cert)), None)
|
||||
|
||||
if xmpp.ca_certs is None:
|
||||
self.logger.warning(
|
||||
'XMPP Secure comunication can not be verified; '
|
||||
'no CA certificate found')
|
||||
|
||||
# Acquire our port number
|
||||
if not self.port:
|
||||
port = self.default_secure_port \
|
||||
if self.secure else self.default_unsecure_port
|
||||
|
||||
else:
|
||||
port = self.port
|
||||
|
||||
# Establish our connection
|
||||
if not xmpp.connect((self.host, port)):
|
||||
return False
|
||||
|
||||
xmpp.send_presence()
|
||||
|
||||
try:
|
||||
xmpp.get_roster()
|
||||
|
||||
except sleekxmpp.exceptions.IqError as e:
|
||||
self.logger.warning('There was an error getting the XMPP roster.')
|
||||
self.logger.debug(e.iq['error']['condition'])
|
||||
xmpp.disconnect()
|
||||
return False
|
||||
|
||||
except sleekxmpp.exceptions.IqTimeout:
|
||||
self.logger.warning('XMPP Server is taking too long to respond.')
|
||||
xmpp.disconnect()
|
||||
return False
|
||||
|
||||
targets = list(self.targets)
|
||||
if not targets:
|
||||
# We always default to notifying ourselves
|
||||
targets.append(jid)
|
||||
|
||||
while len(targets) > 0:
|
||||
|
||||
# Get next target (via JID)
|
||||
target = targets.pop(0)
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
# The message we wish to send, and the JID that
|
||||
# will receive it.
|
||||
xmpp.send_message(mto=target, mbody=body, mtype='chat')
|
||||
|
||||
# Using wait=True ensures that the send queue will be
|
||||
# emptied before ending the session.
|
||||
xmpp.disconnect(wait=True)
|
||||
|
||||
return True
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'format': self.notify_format,
|
||||
'overflow': self.overflow_mode,
|
||||
'verify': 'yes' if self.verify_certificate else 'no',
|
||||
}
|
||||
|
||||
if self.jid:
|
||||
args['jid'] = self.jid
|
||||
|
||||
if self.xep:
|
||||
# xep are integers, so we need to just iterate over a list and
|
||||
# switch them to a string
|
||||
args['xep'] = ','.join([str(xep) for xep in self.xep])
|
||||
|
||||
# Target JID(s) can clash with our existing paths, so we just use comma
|
||||
# and/or space as a delimiters - %20 = space
|
||||
jids = '%20'.join([NotifyXMPP.quote(x, safe='') for x in self.targets])
|
||||
|
||||
default_port = self.default_secure_port \
|
||||
if self.secure else self.default_unsecure_port
|
||||
|
||||
default_schema = self.secure_protocol if self.secure else self.protocol
|
||||
|
||||
if self.user and self.password:
|
||||
auth = '{}:{}'.format(
|
||||
NotifyXMPP.quote(self.user, safe=''),
|
||||
NotifyXMPP.quote(self.password, safe=''))
|
||||
|
||||
else:
|
||||
auth = self.password if self.password else self.user
|
||||
|
||||
return '{schema}://{auth}@{hostname}{port}/{jids}?{args}'.format(
|
||||
auth=auth,
|
||||
schema=default_schema,
|
||||
hostname=NotifyXMPP.quote(self.host, safe=''),
|
||||
port='' if not self.port or self.port == default_port
|
||||
else ':{}'.format(self.port),
|
||||
jids=jids,
|
||||
args=NotifyXMPP.urlencode(args),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Get our targets; we ignore path slashes since they identify
|
||||
# our resources
|
||||
results['targets'] = NotifyXMPP.parse_list(results['fullpath'])
|
||||
|
||||
# Over-ride the xep plugins
|
||||
if 'xep' in results['qsd'] and len(results['qsd']['xep']):
|
||||
results['xep'] = \
|
||||
NotifyXMPP.parse_list(results['qsd']['xep'])
|
||||
|
||||
# Over-ride the default (and detected) jid
|
||||
if 'jid' in results['qsd'] and len(results['qsd']['jid']):
|
||||
results['jid'] = NotifyXMPP.unquote(results['qsd']['jid'])
|
||||
|
||||
# Over-ride the default (and detected) jid
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['targets'] += \
|
||||
NotifyXMPP.parse_list(results['qsd']['to'])
|
||||
|
||||
return results
|
|
@ -1,72 +1,64 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Our service wrappers
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# Used for Testing; specifically test_email_plugin.py needs access
|
||||
# to the modules WEBBASE_LOOKUP_TABLE and WebBaseLogin objects
|
||||
import six
|
||||
import re
|
||||
import copy
|
||||
|
||||
from os import listdir
|
||||
from os.path import dirname
|
||||
from os.path import abspath
|
||||
|
||||
# Used for testing
|
||||
from . import NotifyEmail as NotifyEmailBase
|
||||
|
||||
from .NotifyBoxcar import NotifyBoxcar
|
||||
from .NotifyDiscord import NotifyDiscord
|
||||
from .NotifyEmail import NotifyEmail
|
||||
from .NotifyEmby import NotifyEmby
|
||||
from .NotifyFaast import NotifyFaast
|
||||
from .NotifyGrowl.NotifyGrowl import NotifyGrowl
|
||||
from .NotifyIFTTT import NotifyIFTTT
|
||||
from .NotifyJoin import NotifyJoin
|
||||
from .NotifyJSON import NotifyJSON
|
||||
from .NotifyMatterMost import NotifyMatterMost
|
||||
from .NotifyProwl import NotifyProwl
|
||||
from .NotifyPushalot import NotifyPushalot
|
||||
from .NotifyPushBullet import NotifyPushBullet
|
||||
from .NotifyPushjet.NotifyPushjet import NotifyPushjet
|
||||
from .NotifyPushover import NotifyPushover
|
||||
from .NotifyRocketChat import NotifyRocketChat
|
||||
from .NotifySlack import NotifySlack
|
||||
from .NotifyStride import NotifyStride
|
||||
from .NotifyTelegram import NotifyTelegram
|
||||
from .NotifyToasty import NotifyToasty
|
||||
from .NotifyTwitter.NotifyTwitter import NotifyTwitter
|
||||
from .NotifyXBMC import NotifyXBMC
|
||||
from .NotifyXML import NotifyXML
|
||||
from .NotifyWindows import NotifyWindows
|
||||
|
||||
# Required until re-factored into base code
|
||||
from .NotifyPushjet import pushjet
|
||||
from .NotifyGrowl import gntp
|
||||
from .NotifyTwitter import tweepy
|
||||
|
||||
# NotifyBase object is passed in as a module not class
|
||||
from . import NotifyBase
|
||||
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NOTIFY_IMAGE_SIZES
|
||||
from ..common import NotifyType
|
||||
from ..common import NOTIFY_TYPES
|
||||
from ..utils import parse_list
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
from ..AppriseLocale import LazyTranslation
|
||||
|
||||
# Maintains a mapping of all of the Notification services
|
||||
SCHEMA_MAP = {}
|
||||
|
||||
__all__ = [
|
||||
# Notification Services
|
||||
'NotifyBoxcar', 'NotifyEmail', 'NotifyEmby', 'NotifyDiscord',
|
||||
'NotifyFaast', 'NotifyGrowl', 'NotifyIFTTT', 'NotifyJoin', 'NotifyJSON',
|
||||
'NotifyMatterMost', 'NotifyProwl', 'NotifyPushalot',
|
||||
'NotifyPushBullet', 'NotifyPushjet', 'NotifyPushover', 'NotifyRocketChat',
|
||||
'NotifySlack', 'NotifyStride', 'NotifyToasty', 'NotifyTwitter',
|
||||
'NotifyTelegram', 'NotifyXBMC', 'NotifyXML', 'NotifyWindows',
|
||||
|
||||
# Reference
|
||||
'NotifyImageSize', 'NOTIFY_IMAGE_SIZES', 'NotifyType', 'NOTIFY_TYPES',
|
||||
'NotifyBase',
|
||||
|
||||
# NotifyEmail Base References (used for Testing)
|
||||
# NotifyEmail Base Module (used for NotifyEmail testing)
|
||||
'NotifyEmailBase',
|
||||
|
||||
# gntp (used for NotifyGrowl Testing)
|
||||
|
@ -78,3 +70,347 @@ __all__ = [
|
|||
# tweepy (used for NotifyTwitter Testing)
|
||||
'tweepy',
|
||||
]
|
||||
|
||||
# we mirror our base purely for the ability to reset everything; this
|
||||
# is generally only used in testing and should not be used by developers
|
||||
MODULE_MAP = {}
|
||||
|
||||
|
||||
# Load our Lookup Matrix
|
||||
def __load_matrix(path=abspath(dirname(__file__)), name='apprise.plugins'):
|
||||
"""
|
||||
Dynamically load our schema map; this allows us to gracefully
|
||||
skip over modules we simply don't have the dependencies for.
|
||||
|
||||
"""
|
||||
# Used for the detection of additional Notify Services objects
|
||||
# The .py extension is optional as we support loading directories too
|
||||
module_re = re.compile(r'^(?P<name>Notify[a-z0-9]+)(\.py)?$', re.I)
|
||||
|
||||
for f in listdir(path):
|
||||
match = module_re.match(f)
|
||||
if not match:
|
||||
# keep going
|
||||
continue
|
||||
|
||||
# Store our notification/plugin name:
|
||||
plugin_name = match.group('name')
|
||||
try:
|
||||
module = __import__(
|
||||
'{}.{}'.format(name, plugin_name),
|
||||
globals(), locals(),
|
||||
fromlist=[plugin_name])
|
||||
|
||||
except ImportError:
|
||||
# No problem, we can't use this object
|
||||
continue
|
||||
|
||||
if not hasattr(module, plugin_name):
|
||||
# Not a library we can load as it doesn't follow the simple rule
|
||||
# that the class must bear the same name as the notification
|
||||
# file itself.
|
||||
continue
|
||||
|
||||
# Get our plugin
|
||||
plugin = getattr(module, plugin_name)
|
||||
if not hasattr(plugin, 'app_id'):
|
||||
# Filter out non-notification modules
|
||||
continue
|
||||
|
||||
elif plugin_name in MODULE_MAP:
|
||||
# we're already handling this object
|
||||
continue
|
||||
|
||||
# Add our plugin name to our module map
|
||||
MODULE_MAP[plugin_name] = {
|
||||
'plugin': plugin,
|
||||
'module': module,
|
||||
}
|
||||
|
||||
# Add our module name to our __all__
|
||||
__all__.append(plugin_name)
|
||||
|
||||
# Load our module into memory so it's accessible to all
|
||||
globals()[plugin_name] = plugin
|
||||
|
||||
# Load protocol(s) if defined
|
||||
proto = getattr(plugin, 'protocol', None)
|
||||
if isinstance(proto, six.string_types):
|
||||
if proto not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[proto] = plugin
|
||||
|
||||
elif isinstance(proto, (set, list, tuple)):
|
||||
# Support iterables list types
|
||||
for p in proto:
|
||||
if p not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[p] = plugin
|
||||
|
||||
# Load secure protocol(s) if defined
|
||||
protos = getattr(plugin, 'secure_protocol', None)
|
||||
if isinstance(protos, six.string_types):
|
||||
if protos not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[protos] = plugin
|
||||
|
||||
if isinstance(protos, (set, list, tuple)):
|
||||
# Support iterables list types
|
||||
for p in protos:
|
||||
if p not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[p] = plugin
|
||||
|
||||
return SCHEMA_MAP
|
||||
|
||||
|
||||
# Reset our Lookup Matrix
|
||||
def __reset_matrix():
|
||||
"""
|
||||
Restores the Lookup matrix to it's base setting. This is only used through
|
||||
testing and should not be directly called.
|
||||
"""
|
||||
|
||||
# Reset our schema map
|
||||
SCHEMA_MAP.clear()
|
||||
|
||||
# Iterate over our module map so we can clear out our __all__ and globals
|
||||
for plugin_name in MODULE_MAP.keys():
|
||||
# Clear out globals
|
||||
del globals()[plugin_name]
|
||||
|
||||
# Remove element from plugins
|
||||
__all__.remove(plugin_name)
|
||||
|
||||
# Clear out our module map
|
||||
MODULE_MAP.clear()
|
||||
|
||||
|
||||
# Dynamically build our schema base
|
||||
__load_matrix()
|
||||
|
||||
|
||||
def _sanitize_token(tokens, default_delimiter):
|
||||
"""
|
||||
This is called by the details() function and santizes the output by
|
||||
populating expected and consistent arguments if they weren't otherwise
|
||||
specified.
|
||||
|
||||
"""
|
||||
|
||||
# Iterate over our tokens
|
||||
for key in tokens.keys():
|
||||
|
||||
for element in tokens[key].keys():
|
||||
# Perform translations (if detected to do so)
|
||||
if isinstance(tokens[key][element], LazyTranslation):
|
||||
tokens[key][element] = str(tokens[key][element])
|
||||
|
||||
if 'alias_of' in tokens[key]:
|
||||
# Do not touch this field
|
||||
continue
|
||||
|
||||
if 'map_to' not in tokens[key]:
|
||||
# Default type to key
|
||||
tokens[key]['map_to'] = key
|
||||
|
||||
if 'type' not in tokens[key]:
|
||||
# Default type to string
|
||||
tokens[key]['type'] = 'string'
|
||||
|
||||
elif tokens[key]['type'].startswith('list') \
|
||||
and 'delim' not in tokens[key]:
|
||||
# Default list delimiter (if not otherwise specified)
|
||||
tokens[key]['delim'] = default_delimiter
|
||||
|
||||
elif tokens[key]['type'].startswith('choice') \
|
||||
and 'default' not in tokens[key] \
|
||||
and 'values' in tokens[key] \
|
||||
and len(tokens[key]['values']) == 1:
|
||||
# If there is only one choice; then make it the default
|
||||
tokens[key]['default'] = \
|
||||
tokens[key]['values'][0]
|
||||
|
||||
if 'regex' in tokens[key]:
|
||||
# Verify that we are a tuple; convert strings to tuples
|
||||
if isinstance(tokens[key]['regex'], six.string_types):
|
||||
# Default tuple setup
|
||||
tokens[key]['regex'] = \
|
||||
(tokens[key]['regex'], None)
|
||||
|
||||
elif not isinstance(tokens[key]['regex'], (list, tuple)):
|
||||
# Invalid regex
|
||||
del tokens[key]['regex']
|
||||
|
||||
if 'required' not in tokens[key]:
|
||||
# Default required is False
|
||||
tokens[key]['required'] = False
|
||||
|
||||
if 'private' not in tokens[key]:
|
||||
# Private flag defaults to False if not set
|
||||
tokens[key]['private'] = False
|
||||
return
|
||||
|
||||
|
||||
def details(plugin):
|
||||
"""
|
||||
Provides templates that can be used by developers to build URLs
|
||||
dynamically.
|
||||
|
||||
If a list of templates is provided, then they will be used over
|
||||
the default value.
|
||||
|
||||
If a list of tokens are provided, then they will over-ride any
|
||||
additional settings built from this script and/or will be appended
|
||||
to them afterwards.
|
||||
"""
|
||||
|
||||
# Our unique list of parsing will be based on the provided templates
|
||||
# if none are provided we will use our own
|
||||
templates = tuple(plugin.templates)
|
||||
|
||||
# The syntax is simple
|
||||
# {
|
||||
# # The token_name must tie back to an entry found in the
|
||||
# # templates list.
|
||||
# 'token_name': {
|
||||
#
|
||||
# # types can be 'string', 'int', 'choice', 'list, 'float'
|
||||
# # both choice and list may additionally have a : identify
|
||||
# # what the list/choice type is comprised of; the default
|
||||
# # is string.
|
||||
# 'type': 'choice:string',
|
||||
#
|
||||
# # values will only exist the type must be a fixed
|
||||
# # list of inputs (generated from type choice for example)
|
||||
#
|
||||
# # If this is a choice:bool then you should ALWAYS define
|
||||
# # this list as a (True, False) such as ('Yes, 'No') or
|
||||
# # ('Enabled', 'Disabled'), etc
|
||||
# 'values': [ 'http', 'https' ],
|
||||
#
|
||||
# # Identifies if the entry specified is required or not
|
||||
# 'required': True,
|
||||
#
|
||||
# # Identify a default value
|
||||
# 'default': 'http',
|
||||
#
|
||||
# # Optional Verification Entries min and max are for floats
|
||||
# # and/or integers
|
||||
# 'min': 4,
|
||||
# 'max': 5,
|
||||
#
|
||||
# # A list will always identify a delimiter. If this is
|
||||
# # part of a path, this may be a '/', or it could be a
|
||||
# # comma and/or space. delimiters are always in a list
|
||||
# # eg (if space and/or comma is a delimiter the entry
|
||||
# # would look like: 'delim': [',' , ' ' ]
|
||||
# 'delim': None,
|
||||
#
|
||||
# # Use regex if you want to share the regular expression
|
||||
# # required to validate the field. The regex will never
|
||||
# # accomodate the prefix (if one is specified). That is
|
||||
# # up to the user building the URLs to include the prefix
|
||||
# # on the URL when constructing it.
|
||||
# # The format is ('regex', 'reg options')
|
||||
# 'regex': (r'[A-Z0-9]+', 'i'),
|
||||
#
|
||||
# # A Prefix is always a string, to differentiate between
|
||||
# # multiple arguments, sometimes content is prefixed.
|
||||
# 'prefix': '@',
|
||||
#
|
||||
# # By default the key of this object is to be interpreted
|
||||
# # as the argument to the notification in question. However
|
||||
# # To accomodate cases where there are multiple types that
|
||||
# # all map to the same entry, one can find a map_to value.
|
||||
# 'map_to': 'function_arg',
|
||||
#
|
||||
# # Some arguments act as an alias_of an already defined object
|
||||
# # This plays a role more with configuration file generation
|
||||
# # since yaml files allow you to define different argumuments
|
||||
# # in line to simplify things. If this directive is set, then
|
||||
# # it should be treated exactly the same as the object it is
|
||||
# # an alias of
|
||||
# 'alias_of': 'function_arg',
|
||||
#
|
||||
# # Advise developers to consider the potential sensitivity
|
||||
# # of this field owned by the user. This is for passwords,
|
||||
# # and api keys, etc...
|
||||
# 'private': False,
|
||||
# },
|
||||
# }
|
||||
|
||||
# Template tokens identify the arguments required to initialize the
|
||||
# plugin itself. It identifies all of the tokens and provides some
|
||||
# details on their use. Each token defined should in some way map
|
||||
# back to at least one URL {token} defined in the templates
|
||||
|
||||
# Since we nest a dictionary within a dictionary, a simple copy isn't
|
||||
# enough. a deepcopy allows us to manipulate this object in this
|
||||
# funtion without obstructing the original.
|
||||
template_tokens = copy.deepcopy(plugin.template_tokens)
|
||||
|
||||
# Arguments and/or Options either have a default value and/or are
|
||||
# optional to be set.
|
||||
#
|
||||
# Since we nest a dictionary within a dictionary, a simple copy isn't
|
||||
# enough. a deepcopy allows us to manipulate this object in this
|
||||
# funtion without obstructing the original.
|
||||
template_args = copy.deepcopy(plugin.template_args)
|
||||
|
||||
# Our template keyword arguments ?+key=value&-key=value
|
||||
# Basically the user provides both the key and the value. this is only
|
||||
# possibly by identifying the key prefix required for them to be
|
||||
# interpreted hence the +/- keys are built into apprise by default for easy
|
||||
# reference. In these cases, entry might look like '+' being the prefix:
|
||||
# {
|
||||
# 'arg_name': {
|
||||
# 'name': 'label',
|
||||
# 'prefix': '+',
|
||||
# }
|
||||
# }
|
||||
#
|
||||
# Since we nest a dictionary within a dictionary, a simple copy isn't
|
||||
# enough. a deepcopy allows us to manipulate this object in this
|
||||
# funtion without obstructing the original.
|
||||
template_kwargs = copy.deepcopy(plugin.template_kwargs)
|
||||
|
||||
# We automatically create a schema entry
|
||||
template_tokens['schema'] = {
|
||||
'name': _('Schema'),
|
||||
'type': 'choice:string',
|
||||
'required': True,
|
||||
'values': parse_list(plugin.secure_protocol, plugin.protocol)
|
||||
}
|
||||
|
||||
# Sanitize our tokens
|
||||
_sanitize_token(template_tokens, default_delimiter=('/', ))
|
||||
# Delimiter(s) are space and/or comma
|
||||
_sanitize_token(template_args, default_delimiter=(',', ' '))
|
||||
_sanitize_token(template_kwargs, default_delimiter=(',', ' '))
|
||||
|
||||
# Argument/Option Handling
|
||||
for key in list(template_args.keys()):
|
||||
|
||||
# _lookup_default looks up what the default value
|
||||
if '_lookup_default' in template_args[key]:
|
||||
template_args[key]['default'] = getattr(
|
||||
plugin, template_args[key]['_lookup_default'])
|
||||
|
||||
# Tidy as we don't want to pass this along in response
|
||||
del template_args[key]['_lookup_default']
|
||||
|
||||
# _exists_if causes the argument to only exist IF after checking
|
||||
# the return of an internal variable requiring a check
|
||||
if '_exists_if' in template_args[key]:
|
||||
if not getattr(plugin,
|
||||
template_args[key]['_exists_if']):
|
||||
# Remove entire object
|
||||
del template_args[key]
|
||||
|
||||
else:
|
||||
# We only nee to remove this key
|
||||
del template_args[key]['_exists_if']
|
||||
|
||||
return {
|
||||
'templates': templates,
|
||||
'tokens': template_tokens,
|
||||
'args': template_args,
|
||||
'kwargs': template_kwargs,
|
||||
}
|
||||
|
|
|
@ -1,23 +1,32 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# A simple collection of general functions
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# This file is part of apprise.
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
|
||||
import six
|
||||
import contextlib
|
||||
import os
|
||||
from os.path import expanduser
|
||||
|
||||
try:
|
||||
|
@ -25,17 +34,12 @@ try:
|
|||
from urllib import unquote
|
||||
from urllib import quote
|
||||
from urlparse import urlparse
|
||||
from urlparse import parse_qsl
|
||||
|
||||
except ImportError:
|
||||
# Python 3.x
|
||||
from urllib.parse import unquote
|
||||
from urllib.parse import quote
|
||||
from urllib.parse import urlparse
|
||||
from urllib.parse import parse_qsl
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# URL Indexing Table for returns via parse_url()
|
||||
VALID_URL_RE = re.compile(
|
||||
|
@ -84,6 +88,31 @@ TIDY_NUX_TRIM_RE = re.compile(
|
|||
),
|
||||
)
|
||||
|
||||
# The handling of custom arguments passed in the URL; we treat any
|
||||
# argument (which would otherwise appear in the qsd area of our parse_url()
|
||||
# function differently if they start with a + or - value
|
||||
NOTIFY_CUSTOM_ADD_TOKENS = re.compile(r'^( |\+)(?P<key>.*)\s*')
|
||||
NOTIFY_CUSTOM_DEL_TOKENS = re.compile(r'^-(?P<key>.*)\s*')
|
||||
|
||||
# Used for attempting to acquire the schema if the URL can't be parsed.
|
||||
GET_SCHEMA_RE = re.compile(r'\s*(?P<schema>[a-z0-9]{2,9})://.*$', re.I)
|
||||
|
||||
# Regular expression based and expanded from:
|
||||
# http://www.regular-expressions.info/email.html
|
||||
GET_EMAIL_RE = re.compile(
|
||||
r"((?P<label>[^+]+)\+)?"
|
||||
r"(?P<userid>[a-z0-9$%=_~-]+"
|
||||
r"(?:\.[a-z0-9$%+=_~-]+)"
|
||||
r"*)@(?P<domain>(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+"
|
||||
r"[a-z0-9](?:[a-z0-9-]*"
|
||||
r"[a-z0-9]))?",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
# Regular expression used to destinguish between multiple URLs
|
||||
URL_DETECTION_RE = re.compile(
|
||||
r'([a-z0-9]+?:\/\/.*?)[\s,]*(?=$|[a-z0-9]+?:\/\/)', re.I)
|
||||
|
||||
|
||||
def is_hostname(hostname):
|
||||
"""
|
||||
|
@ -99,18 +128,22 @@ def is_hostname(hostname):
|
|||
return all(allowed.match(x) for x in hostname.split("."))
|
||||
|
||||
|
||||
def compat_is_basestring(content):
|
||||
"""
|
||||
Python 3 support for checking if content is unicode and/or
|
||||
of a string type
|
||||
"""
|
||||
try:
|
||||
# Python v2.x
|
||||
return isinstance(content, basestring)
|
||||
def is_email(address):
|
||||
"""Determine if the specified entry is an email address
|
||||
|
||||
except NameError:
|
||||
# Python v3.x
|
||||
return isinstance(content, str)
|
||||
Args:
|
||||
address (str): The string you want to check.
|
||||
|
||||
Returns:
|
||||
bool: Returns True if the address specified is an email address
|
||||
and False if it isn't.
|
||||
"""
|
||||
|
||||
try:
|
||||
return GET_EMAIL_RE.match(address) is not None
|
||||
except TypeError:
|
||||
# invalid syntax
|
||||
return False
|
||||
|
||||
|
||||
def tidy_path(path):
|
||||
|
@ -133,6 +166,81 @@ def tidy_path(path):
|
|||
return path
|
||||
|
||||
|
||||
def parse_qsd(qs):
|
||||
"""
|
||||
Query String Dictionary Builder
|
||||
|
||||
A custom implimentation of the parse_qsl() function already provided
|
||||
by Python. This function is slightly more light weight and gives us
|
||||
more control over parsing out arguments such as the plus/+ symbol
|
||||
at the head of a key/value pair.
|
||||
|
||||
qs should be a query string part made up as part of the URL such as
|
||||
a=1&c=2&d=
|
||||
|
||||
a=1 gets interpreted as { 'a': '1' }
|
||||
a= gets interpreted as { 'a': '' }
|
||||
a gets interpreted as { 'a': '' }
|
||||
|
||||
|
||||
This function returns a result object that fits with the apprise
|
||||
expected parameters (populating the 'qsd' portion of the dictionary
|
||||
"""
|
||||
|
||||
# Our return result set:
|
||||
result = {
|
||||
# The arguments passed in (the parsed query). This is in a dictionary
|
||||
# of {'key': 'val', etc }. Keys are all made lowercase before storing
|
||||
# to simplify access to them.
|
||||
'qsd': {},
|
||||
|
||||
# Detected Entries that start with + or - are additionally stored in
|
||||
# these values (un-touched). The +/- however are stripped from their
|
||||
# name before they are stored here.
|
||||
'qsd+': {},
|
||||
'qsd-': {},
|
||||
}
|
||||
|
||||
pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
|
||||
for name_value in pairs:
|
||||
nv = name_value.split('=', 1)
|
||||
# Handle case of a control-name with no equal sign
|
||||
if len(nv) != 2:
|
||||
nv.append('')
|
||||
|
||||
# Apprise keys can start with a + symbol; so we need to skip over
|
||||
# the very first entry
|
||||
key = '{}{}'.format(
|
||||
'' if len(nv[0]) == 0 else nv[0][0],
|
||||
'' if len(nv[0]) <= 1 else nv[0][1:].replace('+', ' '),
|
||||
)
|
||||
|
||||
key = unquote(key)
|
||||
key = '' if not key else key
|
||||
|
||||
val = nv[1].replace('+', ' ')
|
||||
val = unquote(val)
|
||||
val = '' if not val else val.strip()
|
||||
|
||||
# Always Query String Dictionary (qsd) for every entry we have
|
||||
# content is always made lowercase for easy indexing
|
||||
result['qsd'][key.lower().strip()] = val
|
||||
|
||||
# Check for tokens that start with a addition/plus symbol (+)
|
||||
k = NOTIFY_CUSTOM_ADD_TOKENS.match(key)
|
||||
if k is not None:
|
||||
# Store content 'as-is'
|
||||
result['qsd+'][k.group('key')] = val
|
||||
|
||||
# Check for tokens that start with a subtraction/hyphen symbol (-)
|
||||
k = NOTIFY_CUSTOM_DEL_TOKENS.match(key)
|
||||
if k is not None:
|
||||
# Store content 'as-is'
|
||||
result['qsd-'][k.group('key')] = val
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def parse_url(url, default_schema='http', verify_host=True):
|
||||
"""A function that greatly simplifies the parsing of a url
|
||||
specified by the end user.
|
||||
|
@ -156,7 +264,7 @@ def parse_url(url, default_schema='http', verify_host=True):
|
|||
content could not be extracted.
|
||||
"""
|
||||
|
||||
if not compat_is_basestring(url):
|
||||
if not isinstance(url, six.string_types):
|
||||
# Simple error checking
|
||||
return None
|
||||
|
||||
|
@ -180,10 +288,17 @@ def parse_url(url, default_schema='http', verify_host=True):
|
|||
'schema': None,
|
||||
# The schema
|
||||
'url': None,
|
||||
# The arguments passed in (the parsed query)
|
||||
# This is in a dictionary of {'key': 'val', etc }
|
||||
# The arguments passed in (the parsed query). This is in a dictionary
|
||||
# of {'key': 'val', etc }. Keys are all made lowercase before storing
|
||||
# to simplify access to them.
|
||||
# qsd = Query String Dictionary
|
||||
'qsd': {}
|
||||
'qsd': {},
|
||||
|
||||
# Detected Entries that start with + or - are additionally stored in
|
||||
# these values (un-touched). The +/- however are stripped from their
|
||||
# name before they are stored here.
|
||||
'qsd+': {},
|
||||
'qsd-': {},
|
||||
}
|
||||
|
||||
qsdata = ''
|
||||
|
@ -210,17 +325,22 @@ def parse_url(url, default_schema='http', verify_host=True):
|
|||
# No qsdata
|
||||
pass
|
||||
|
||||
# Parse Query Arugments ?val=key&key=val
|
||||
# while ensuring that all keys are lowercase
|
||||
if qsdata:
|
||||
result.update(parse_qsd(qsdata))
|
||||
|
||||
# Now do a proper extraction of data
|
||||
parsed = urlparse('http://%s' % host)
|
||||
|
||||
# Parse results
|
||||
result['host'] = parsed[1].strip()
|
||||
|
||||
if not result['host']:
|
||||
# Nothing more we can do without a hostname
|
||||
return None
|
||||
|
||||
result['fullpath'] = quote(unquote(tidy_path(parsed[2].strip())))
|
||||
|
||||
try:
|
||||
# Handle trailing slashes removed by tidy_path
|
||||
if result['fullpath'][-1] not in ('/', '\\') and \
|
||||
|
@ -232,16 +352,6 @@ def parse_url(url, default_schema='http', verify_host=True):
|
|||
# and therefore, no trailing slash
|
||||
pass
|
||||
|
||||
# Parse Query Arugments ?val=key&key=val
|
||||
# while ensureing that all keys are lowercase
|
||||
if qsdata:
|
||||
result['qsd'] = dict([(k.lower().strip(), v.strip())
|
||||
for k, v in parse_qsl(
|
||||
qsdata,
|
||||
keep_blank_values=True,
|
||||
strict_parsing=False,
|
||||
)])
|
||||
|
||||
if not result['fullpath']:
|
||||
# Default
|
||||
result['fullpath'] = None
|
||||
|
@ -256,7 +366,7 @@ def parse_url(url, default_schema='http', verify_host=True):
|
|||
result['query'] = None
|
||||
try:
|
||||
(result['user'], result['host']) = \
|
||||
re.split(r'[\s@]+', result['host'])[:2]
|
||||
re.split(r'[@]+', result['host'])[:2]
|
||||
|
||||
except ValueError:
|
||||
# no problem then, host only exists
|
||||
|
@ -266,7 +376,7 @@ def parse_url(url, default_schema='http', verify_host=True):
|
|||
if result['user'] is not None:
|
||||
try:
|
||||
(result['user'], result['password']) = \
|
||||
re.split(r'[:\s]+', result['user'])[:2]
|
||||
re.split(r'[:]+', result['user'])[:2]
|
||||
|
||||
except ValueError:
|
||||
# no problem then, user only exists
|
||||
|
@ -275,7 +385,7 @@ def parse_url(url, default_schema='http', verify_host=True):
|
|||
|
||||
try:
|
||||
(result['host'], result['port']) = \
|
||||
re.split(r'[\s:]+', result['host'])[:2]
|
||||
re.split(r'[:]+', result['host'])[:2]
|
||||
|
||||
except ValueError:
|
||||
# no problem then, user only exists
|
||||
|
@ -299,10 +409,10 @@ def parse_url(url, default_schema='http', verify_host=True):
|
|||
|
||||
# Re-assemble cleaned up version of the url
|
||||
result['url'] = '%s://' % result['schema']
|
||||
if compat_is_basestring(result['user']):
|
||||
if isinstance(result['user'], six.string_types):
|
||||
result['url'] += result['user']
|
||||
|
||||
if compat_is_basestring(result['password']):
|
||||
if isinstance(result['password'], six.string_types):
|
||||
result['url'] += ':%s@' % result['password']
|
||||
|
||||
else:
|
||||
|
@ -328,7 +438,7 @@ def parse_bool(arg, default=False):
|
|||
If the content could not be parsed, then the default is returned.
|
||||
"""
|
||||
|
||||
if compat_is_basestring(arg):
|
||||
if isinstance(arg, six.string_types):
|
||||
# no = no - False
|
||||
# of = short for off - False
|
||||
# 0 = int for False
|
||||
|
@ -358,6 +468,28 @@ def parse_bool(arg, default=False):
|
|||
return bool(arg)
|
||||
|
||||
|
||||
def split_urls(urls):
|
||||
"""
|
||||
Takes a string containing URLs separated by comma's and/or spaces and
|
||||
returns a list.
|
||||
"""
|
||||
|
||||
try:
|
||||
results = URL_DETECTION_RE.findall(urls)
|
||||
|
||||
except TypeError:
|
||||
results = []
|
||||
|
||||
if len(results) > 0 and results[len(results) - 1][-1] != urls[-1]:
|
||||
# we always want to save the end of url URL if we can; This handles
|
||||
# cases where there is actually a comma (,) at the end of a single URL
|
||||
# that would have otherwise got lost when our regex passed over it.
|
||||
results[len(results) - 1] += \
|
||||
re.match(r'.*?([\s,]+)?$', urls).group(1).rstrip()
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def parse_list(*args):
|
||||
"""
|
||||
Take a string list and break it into a delimited
|
||||
|
@ -381,12 +513,16 @@ def parse_list(*args):
|
|||
|
||||
result = []
|
||||
for arg in args:
|
||||
if compat_is_basestring(arg):
|
||||
if isinstance(arg, six.string_types):
|
||||
result += re.split(STRING_DELIMITERS, arg)
|
||||
|
||||
elif isinstance(arg, (set, list, tuple)):
|
||||
result += parse_list(*arg)
|
||||
|
||||
elif arg is None:
|
||||
# Ignore
|
||||
continue
|
||||
|
||||
else:
|
||||
# Convert whatever it is to a string and work with it
|
||||
result += parse_list(str(arg))
|
||||
|
@ -398,3 +534,86 @@ def parse_list(*args):
|
|||
# a list, we need to change it into a list object to remain compatible with
|
||||
# both distribution types.
|
||||
return sorted([x for x in filter(bool, list(set(result)))])
|
||||
|
||||
|
||||
def is_exclusive_match(logic, data):
|
||||
"""
|
||||
|
||||
The data variable should always be a set of strings that the logic can be
|
||||
compared against. It should be a set. If it isn't already, then it will
|
||||
be converted as such. These identify the tags themselves.
|
||||
|
||||
Our logic should be a list as well:
|
||||
- top level entries are treated as an 'or'
|
||||
- second level (or more) entries are treated as 'and'
|
||||
|
||||
examples:
|
||||
logic="tagA, tagB" = tagA or tagB
|
||||
logic=['tagA', 'tagB'] = tagA or tagB
|
||||
logic=[('tagA', 'tagC'), 'tagB'] = (tagA and tagC) or tagB
|
||||
logic=[('tagB', 'tagC')] = tagB and tagC
|
||||
"""
|
||||
|
||||
if logic is None:
|
||||
# If there is no logic to apply then we're done early
|
||||
return True
|
||||
|
||||
elif isinstance(logic, six.string_types):
|
||||
# Update our logic to support our delimiters
|
||||
logic = set(parse_list(logic))
|
||||
|
||||
if not isinstance(logic, (list, tuple, set)):
|
||||
# garbage input
|
||||
return False
|
||||
|
||||
# using the data detected; determine if we'll allow the
|
||||
# notification to be sent or not
|
||||
matched = (len(logic) == 0)
|
||||
|
||||
# Every entry here will be or'ed with the next
|
||||
for entry in logic:
|
||||
if not isinstance(entry, (six.string_types, list, tuple, set)):
|
||||
# Garbage entry in our logic found
|
||||
return False
|
||||
|
||||
# treat these entries as though all elements found
|
||||
# must exist in the notification service
|
||||
entries = set(parse_list(entry))
|
||||
|
||||
if len(entries.intersection(data)) == len(entries):
|
||||
# our set contains all of the entries found
|
||||
# in our notification data set
|
||||
matched = True
|
||||
break
|
||||
|
||||
# else: keep looking
|
||||
|
||||
# Return True if we matched against our logic (or simply none was
|
||||
# specified).
|
||||
return matched
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def environ(*remove, **update):
|
||||
"""
|
||||
Temporarily updates the ``os.environ`` dictionary in-place.
|
||||
|
||||
The ``os.environ`` dictionary is updated in-place so that the modification
|
||||
is sure to work in all situations.
|
||||
|
||||
:param remove: Environment variable(s) to remove.
|
||||
:param update: Dictionary of environment variables and values to
|
||||
add/update.
|
||||
"""
|
||||
|
||||
# Create a backup of our environment for restoration purposes
|
||||
env_orig = os.environ.copy()
|
||||
|
||||
try:
|
||||
os.environ.update(update)
|
||||
[os.environ.pop(k, None) for k in remove]
|
||||
yield
|
||||
|
||||
finally:
|
||||
# Restore our snapshot
|
||||
os.environ = env_orig.copy()
|
||||
|
|
|
@ -56,7 +56,7 @@ DEFAULT_USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWeb"\
|
|||
"Kit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36"
|
||||
|
||||
|
||||
ENDPOINT_RE = re.compile(ur'(?uis)<form action="/subtitles/(.+)">.*?<input type="text"')
|
||||
ENDPOINT_RE = re.compile(ur'(?uis)<form.+?action="/subtitles/(.+)">.*?<input type="text"')
|
||||
|
||||
|
||||
class NewEndpoint(Exception):
|
||||
|
@ -64,24 +64,14 @@ class NewEndpoint(Exception):
|
|||
|
||||
|
||||
# utils
|
||||
def soup_for(url, session=None, user_agent=DEFAULT_USER_AGENT):
|
||||
def soup_for(url, data=None, session=None, user_agent=DEFAULT_USER_AGENT):
|
||||
url = re.sub("\s", "+", url)
|
||||
if not session:
|
||||
r = Request(url, data=None, headers=dict(HEADERS, **{"User-Agent": user_agent}))
|
||||
html = urlopen(r).read().decode("utf-8")
|
||||
else:
|
||||
ret = session.get(url)
|
||||
try:
|
||||
ret.raise_for_status()
|
||||
except requests.HTTPError, e:
|
||||
if e.response.status_code == 404:
|
||||
m = ENDPOINT_RE.search(ret.text)
|
||||
if m:
|
||||
try:
|
||||
raise NewEndpoint(m.group(1))
|
||||
except:
|
||||
pass
|
||||
raise
|
||||
ret = session.post(url, data=data)
|
||||
ret.raise_for_status()
|
||||
html = ret.text
|
||||
return BeautifulSoup(html, "html.parser")
|
||||
|
||||
|
@ -272,33 +262,22 @@ def get_first_film(soup, section, year=None, session=None):
|
|||
|
||||
def search(term, release=True, session=None, year=None, limit_to=SearchTypes.Exact, throttle=0):
|
||||
# note to subscene: if you actually start to randomize the endpoint, we'll have to query your server even more
|
||||
endpoints = ["searching", "search", "srch", "find"]
|
||||
|
||||
if release:
|
||||
endpoints = ["release"]
|
||||
endpoint = "release"
|
||||
else:
|
||||
endpoint = region.get("subscene_endpoint")
|
||||
if endpoint is not NO_VALUE and endpoint not in endpoints:
|
||||
endpoints.insert(0, endpoint)
|
||||
endpoint = region.get("subscene_endpoint2")
|
||||
if endpoint is NO_VALUE:
|
||||
ret = session.get(SITE_DOMAIN)
|
||||
time.sleep(throttle)
|
||||
m = ENDPOINT_RE.search(ret.text)
|
||||
if m:
|
||||
endpoint = m.group(1).strip()
|
||||
logger.debug("Switching main endpoint to %s", endpoint)
|
||||
region.set("subscene_endpoint2", endpoint)
|
||||
|
||||
soup = None
|
||||
for endpoint in endpoints:
|
||||
try:
|
||||
soup = soup_for("%s/subtitles/%s?q=%s" % (SITE_DOMAIN, endpoint, term),
|
||||
session=session)
|
||||
|
||||
except NewEndpoint, e:
|
||||
new_endpoint = e.message
|
||||
if new_endpoint not in endpoints:
|
||||
new_endpoint = new_endpoint.strip()
|
||||
logger.debug("Switching main endpoint to %s", new_endpoint)
|
||||
region.set("subscene_endpoint", new_endpoint)
|
||||
time.sleep(throttle)
|
||||
return search(term, release=release, session=session, year=year, limit_to=limit_to, throttle=throttle)
|
||||
else:
|
||||
region.delete("subscene_endpoint")
|
||||
raise Exception("New endpoint %s didn't work; exiting" % new_endpoint)
|
||||
break
|
||||
soup = soup_for("%s/subtitles/%s" % (SITE_DOMAIN, endpoint), data={"query": term},
|
||||
session=session)
|
||||
|
||||
if soup:
|
||||
if "Subtitle search by" in str(soup):
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
apprise=0.5.2
|
||||
apprise=0.7.8
|
||||
apscheduler=3.5.1
|
||||
babelfish=0.5.5
|
||||
backports.functools-lru-cache=1.5
|
||||
|
|
406
libs/yaml/__init__.py
Normal file
406
libs/yaml/__init__.py
Normal file
|
@ -0,0 +1,406 @@
|
|||
|
||||
from error import *
|
||||
|
||||
from tokens import *
|
||||
from events import *
|
||||
from nodes import *
|
||||
|
||||
from loader import *
|
||||
from dumper import *
|
||||
|
||||
__version__ = '5.1'
|
||||
|
||||
try:
|
||||
from cyaml import *
|
||||
__with_libyaml__ = True
|
||||
except ImportError:
|
||||
__with_libyaml__ = False
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# Warnings control
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# 'Global' warnings state:
|
||||
_warnings_enabled = {
|
||||
'YAMLLoadWarning': True,
|
||||
}
|
||||
|
||||
# Get or set global warnings' state
|
||||
def warnings(settings=None):
|
||||
if settings is None:
|
||||
return _warnings_enabled
|
||||
|
||||
if type(settings) is dict:
|
||||
for key in settings:
|
||||
if key in _warnings_enabled:
|
||||
_warnings_enabled[key] = settings[key]
|
||||
|
||||
# Warn when load() is called without Loader=...
|
||||
class YAMLLoadWarning(RuntimeWarning):
|
||||
pass
|
||||
|
||||
def load_warning(method):
|
||||
if _warnings_enabled['YAMLLoadWarning'] is False:
|
||||
return
|
||||
|
||||
import warnings
|
||||
|
||||
message = (
|
||||
"calling yaml.%s() without Loader=... is deprecated, as the "
|
||||
"default Loader is unsafe. Please read "
|
||||
"https://msg.pyyaml.org/load for full details."
|
||||
) % method
|
||||
|
||||
warnings.warn(message, YAMLLoadWarning, stacklevel=3)
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
def scan(stream, Loader=Loader):
|
||||
"""
|
||||
Scan a YAML stream and produce scanning tokens.
|
||||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
while loader.check_token():
|
||||
yield loader.get_token()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
def parse(stream, Loader=Loader):
|
||||
"""
|
||||
Parse a YAML stream and produce parsing events.
|
||||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
while loader.check_event():
|
||||
yield loader.get_event()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
def compose(stream, Loader=Loader):
|
||||
"""
|
||||
Parse the first YAML document in a stream
|
||||
and produce the corresponding representation tree.
|
||||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
return loader.get_single_node()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
def compose_all(stream, Loader=Loader):
|
||||
"""
|
||||
Parse all YAML documents in a stream
|
||||
and produce corresponding representation trees.
|
||||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
while loader.check_node():
|
||||
yield loader.get_node()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
def load(stream, Loader=None):
|
||||
"""
|
||||
Parse the first YAML document in a stream
|
||||
and produce the corresponding Python object.
|
||||
"""
|
||||
if Loader is None:
|
||||
load_warning('load')
|
||||
Loader = FullLoader
|
||||
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
return loader.get_single_data()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
def load_all(stream, Loader=None):
|
||||
"""
|
||||
Parse all YAML documents in a stream
|
||||
and produce corresponding Python objects.
|
||||
"""
|
||||
if Loader is None:
|
||||
load_warning('load_all')
|
||||
Loader = FullLoader
|
||||
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
while loader.check_data():
|
||||
yield loader.get_data()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
def full_load(stream):
|
||||
"""
|
||||
Parse the first YAML document in a stream
|
||||
and produce the corresponding Python object.
|
||||
|
||||
Resolve all tags except those known to be
|
||||
unsafe on untrusted input.
|
||||
"""
|
||||
return load(stream, FullLoader)
|
||||
|
||||
def full_load_all(stream):
|
||||
"""
|
||||
Parse all YAML documents in a stream
|
||||
and produce corresponding Python objects.
|
||||
|
||||
Resolve all tags except those known to be
|
||||
unsafe on untrusted input.
|
||||
"""
|
||||
return load_all(stream, FullLoader)
|
||||
|
||||
def safe_load(stream):
|
||||
"""
|
||||
Parse the first YAML document in a stream
|
||||
and produce the corresponding Python object.
|
||||
|
||||
Resolve only basic YAML tags. This is known
|
||||
to be safe for untrusted input.
|
||||
"""
|
||||
return load(stream, SafeLoader)
|
||||
|
||||
def safe_load_all(stream):
|
||||
"""
|
||||
Parse all YAML documents in a stream
|
||||
and produce corresponding Python objects.
|
||||
|
||||
Resolve only basic YAML tags. This is known
|
||||
to be safe for untrusted input.
|
||||
"""
|
||||
return load_all(stream, SafeLoader)
|
||||
|
||||
def unsafe_load(stream):
|
||||
"""
|
||||
Parse the first YAML document in a stream
|
||||
and produce the corresponding Python object.
|
||||
|
||||
Resolve all tags, even those known to be
|
||||
unsafe on untrusted input.
|
||||
"""
|
||||
return load(stream, UnsafeLoader)
|
||||
|
||||
def unsafe_load_all(stream):
|
||||
"""
|
||||
Parse all YAML documents in a stream
|
||||
and produce corresponding Python objects.
|
||||
|
||||
Resolve all tags, even those known to be
|
||||
unsafe on untrusted input.
|
||||
"""
|
||||
return load_all(stream, UnsafeLoader)
|
||||
|
||||
def emit(events, stream=None, Dumper=Dumper,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None):
|
||||
"""
|
||||
Emit YAML parsing events into a stream.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
getvalue = None
|
||||
if stream is None:
|
||||
from StringIO import StringIO
|
||||
stream = StringIO()
|
||||
getvalue = stream.getvalue
|
||||
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break)
|
||||
try:
|
||||
for event in events:
|
||||
dumper.emit(event)
|
||||
finally:
|
||||
dumper.dispose()
|
||||
if getvalue:
|
||||
return getvalue()
|
||||
|
||||
def serialize_all(nodes, stream=None, Dumper=Dumper,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding='utf-8', explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None):
|
||||
"""
|
||||
Serialize a sequence of representation trees into a YAML stream.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
getvalue = None
|
||||
if stream is None:
|
||||
if encoding is None:
|
||||
from StringIO import StringIO
|
||||
else:
|
||||
from cStringIO import StringIO
|
||||
stream = StringIO()
|
||||
getvalue = stream.getvalue
|
||||
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
encoding=encoding, version=version, tags=tags,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end)
|
||||
try:
|
||||
dumper.open()
|
||||
for node in nodes:
|
||||
dumper.serialize(node)
|
||||
dumper.close()
|
||||
finally:
|
||||
dumper.dispose()
|
||||
if getvalue:
|
||||
return getvalue()
|
||||
|
||||
def serialize(node, stream=None, Dumper=Dumper, **kwds):
|
||||
"""
|
||||
Serialize a representation tree into a YAML stream.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
return serialize_all([node], stream, Dumper=Dumper, **kwds)
|
||||
|
||||
def dump_all(documents, stream=None, Dumper=Dumper,
|
||||
default_style=None, default_flow_style=False,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding='utf-8', explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, sort_keys=True):
|
||||
"""
|
||||
Serialize a sequence of Python objects into a YAML stream.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
getvalue = None
|
||||
if stream is None:
|
||||
if encoding is None:
|
||||
from StringIO import StringIO
|
||||
else:
|
||||
from cStringIO import StringIO
|
||||
stream = StringIO()
|
||||
getvalue = stream.getvalue
|
||||
dumper = Dumper(stream, default_style=default_style,
|
||||
default_flow_style=default_flow_style,
|
||||
canonical=canonical, indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
encoding=encoding, version=version, tags=tags,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end, sort_keys=sort_keys)
|
||||
try:
|
||||
dumper.open()
|
||||
for data in documents:
|
||||
dumper.represent(data)
|
||||
dumper.close()
|
||||
finally:
|
||||
dumper.dispose()
|
||||
if getvalue:
|
||||
return getvalue()
|
||||
|
||||
def dump(data, stream=None, Dumper=Dumper, **kwds):
|
||||
"""
|
||||
Serialize a Python object into a YAML stream.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
return dump_all([data], stream, Dumper=Dumper, **kwds)
|
||||
|
||||
def safe_dump_all(documents, stream=None, **kwds):
|
||||
"""
|
||||
Serialize a sequence of Python objects into a YAML stream.
|
||||
Produce only basic YAML tags.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
|
||||
|
||||
def safe_dump(data, stream=None, **kwds):
|
||||
"""
|
||||
Serialize a Python object into a YAML stream.
|
||||
Produce only basic YAML tags.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
return dump_all([data], stream, Dumper=SafeDumper, **kwds)
|
||||
|
||||
def add_implicit_resolver(tag, regexp, first=None,
|
||||
Loader=Loader, Dumper=Dumper):
|
||||
"""
|
||||
Add an implicit scalar detector.
|
||||
If an implicit scalar value matches the given regexp,
|
||||
the corresponding tag is assigned to the scalar.
|
||||
first is a sequence of possible initial characters or None.
|
||||
"""
|
||||
Loader.add_implicit_resolver(tag, regexp, first)
|
||||
Dumper.add_implicit_resolver(tag, regexp, first)
|
||||
|
||||
def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
|
||||
"""
|
||||
Add a path based resolver for the given tag.
|
||||
A path is a list of keys that forms a path
|
||||
to a node in the representation tree.
|
||||
Keys can be string values, integers, or None.
|
||||
"""
|
||||
Loader.add_path_resolver(tag, path, kind)
|
||||
Dumper.add_path_resolver(tag, path, kind)
|
||||
|
||||
def add_constructor(tag, constructor, Loader=Loader):
|
||||
"""
|
||||
Add a constructor for the given tag.
|
||||
Constructor is a function that accepts a Loader instance
|
||||
and a node object and produces the corresponding Python object.
|
||||
"""
|
||||
Loader.add_constructor(tag, constructor)
|
||||
|
||||
def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
|
||||
"""
|
||||
Add a multi-constructor for the given tag prefix.
|
||||
Multi-constructor is called for a node if its tag starts with tag_prefix.
|
||||
Multi-constructor accepts a Loader instance, a tag suffix,
|
||||
and a node object and produces the corresponding Python object.
|
||||
"""
|
||||
Loader.add_multi_constructor(tag_prefix, multi_constructor)
|
||||
|
||||
def add_representer(data_type, representer, Dumper=Dumper):
|
||||
"""
|
||||
Add a representer for the given type.
|
||||
Representer is a function accepting a Dumper instance
|
||||
and an instance of the given data type
|
||||
and producing the corresponding representation node.
|
||||
"""
|
||||
Dumper.add_representer(data_type, representer)
|
||||
|
||||
def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
|
||||
"""
|
||||
Add a representer for the given type.
|
||||
Multi-representer is a function accepting a Dumper instance
|
||||
and an instance of the given data type or subtype
|
||||
and producing the corresponding representation node.
|
||||
"""
|
||||
Dumper.add_multi_representer(data_type, multi_representer)
|
||||
|
||||
class YAMLObjectMetaclass(type):
|
||||
"""
|
||||
The metaclass for YAMLObject.
|
||||
"""
|
||||
def __init__(cls, name, bases, kwds):
|
||||
super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
|
||||
if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
|
||||
cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
|
||||
cls.yaml_dumper.add_representer(cls, cls.to_yaml)
|
||||
|
||||
class YAMLObject(object):
|
||||
"""
|
||||
An object that can dump itself to a YAML stream
|
||||
and load itself from a YAML stream.
|
||||
"""
|
||||
|
||||
__metaclass__ = YAMLObjectMetaclass
|
||||
__slots__ = () # no direct instantiation, so allow immutable subclasses
|
||||
|
||||
yaml_loader = Loader
|
||||
yaml_dumper = Dumper
|
||||
|
||||
yaml_tag = None
|
||||
yaml_flow_style = None
|
||||
|
||||
def from_yaml(cls, loader, node):
|
||||
"""
|
||||
Convert a representation node to a Python object.
|
||||
"""
|
||||
return loader.construct_yaml_object(node, cls)
|
||||
from_yaml = classmethod(from_yaml)
|
||||
|
||||
def to_yaml(cls, dumper, data):
|
||||
"""
|
||||
Convert a Python object to a representation node.
|
||||
"""
|
||||
return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
|
||||
flow_style=cls.yaml_flow_style)
|
||||
to_yaml = classmethod(to_yaml)
|
||||
|
139
libs/yaml/composer.py
Normal file
139
libs/yaml/composer.py
Normal file
|
@ -0,0 +1,139 @@
|
|||
|
||||
__all__ = ['Composer', 'ComposerError']
|
||||
|
||||
from error import MarkedYAMLError
|
||||
from events import *
|
||||
from nodes import *
|
||||
|
||||
class ComposerError(MarkedYAMLError):
|
||||
pass
|
||||
|
||||
class Composer(object):
|
||||
|
||||
def __init__(self):
|
||||
self.anchors = {}
|
||||
|
||||
def check_node(self):
|
||||
# Drop the STREAM-START event.
|
||||
if self.check_event(StreamStartEvent):
|
||||
self.get_event()
|
||||
|
||||
# If there are more documents available?
|
||||
return not self.check_event(StreamEndEvent)
|
||||
|
||||
def get_node(self):
|
||||
# Get the root node of the next document.
|
||||
if not self.check_event(StreamEndEvent):
|
||||
return self.compose_document()
|
||||
|
||||
def get_single_node(self):
|
||||
# Drop the STREAM-START event.
|
||||
self.get_event()
|
||||
|
||||
# Compose a document if the stream is not empty.
|
||||
document = None
|
||||
if not self.check_event(StreamEndEvent):
|
||||
document = self.compose_document()
|
||||
|
||||
# Ensure that the stream contains no more documents.
|
||||
if not self.check_event(StreamEndEvent):
|
||||
event = self.get_event()
|
||||
raise ComposerError("expected a single document in the stream",
|
||||
document.start_mark, "but found another document",
|
||||
event.start_mark)
|
||||
|
||||
# Drop the STREAM-END event.
|
||||
self.get_event()
|
||||
|
||||
return document
|
||||
|
||||
def compose_document(self):
|
||||
# Drop the DOCUMENT-START event.
|
||||
self.get_event()
|
||||
|
||||
# Compose the root node.
|
||||
node = self.compose_node(None, None)
|
||||
|
||||
# Drop the DOCUMENT-END event.
|
||||
self.get_event()
|
||||
|
||||
self.anchors = {}
|
||||
return node
|
||||
|
||||
def compose_node(self, parent, index):
|
||||
if self.check_event(AliasEvent):
|
||||
event = self.get_event()
|
||||
anchor = event.anchor
|
||||
if anchor not in self.anchors:
|
||||
raise ComposerError(None, None, "found undefined alias %r"
|
||||
% anchor.encode('utf-8'), event.start_mark)
|
||||
return self.anchors[anchor]
|
||||
event = self.peek_event()
|
||||
anchor = event.anchor
|
||||
if anchor is not None:
|
||||
if anchor in self.anchors:
|
||||
raise ComposerError("found duplicate anchor %r; first occurrence"
|
||||
% anchor.encode('utf-8'), self.anchors[anchor].start_mark,
|
||||
"second occurrence", event.start_mark)
|
||||
self.descend_resolver(parent, index)
|
||||
if self.check_event(ScalarEvent):
|
||||
node = self.compose_scalar_node(anchor)
|
||||
elif self.check_event(SequenceStartEvent):
|
||||
node = self.compose_sequence_node(anchor)
|
||||
elif self.check_event(MappingStartEvent):
|
||||
node = self.compose_mapping_node(anchor)
|
||||
self.ascend_resolver()
|
||||
return node
|
||||
|
||||
def compose_scalar_node(self, anchor):
|
||||
event = self.get_event()
|
||||
tag = event.tag
|
||||
if tag is None or tag == u'!':
|
||||
tag = self.resolve(ScalarNode, event.value, event.implicit)
|
||||
node = ScalarNode(tag, event.value,
|
||||
event.start_mark, event.end_mark, style=event.style)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
return node
|
||||
|
||||
def compose_sequence_node(self, anchor):
|
||||
start_event = self.get_event()
|
||||
tag = start_event.tag
|
||||
if tag is None or tag == u'!':
|
||||
tag = self.resolve(SequenceNode, None, start_event.implicit)
|
||||
node = SequenceNode(tag, [],
|
||||
start_event.start_mark, None,
|
||||
flow_style=start_event.flow_style)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
index = 0
|
||||
while not self.check_event(SequenceEndEvent):
|
||||
node.value.append(self.compose_node(node, index))
|
||||
index += 1
|
||||
end_event = self.get_event()
|
||||
node.end_mark = end_event.end_mark
|
||||
return node
|
||||
|
||||
def compose_mapping_node(self, anchor):
|
||||
start_event = self.get_event()
|
||||
tag = start_event.tag
|
||||
if tag is None or tag == u'!':
|
||||
tag = self.resolve(MappingNode, None, start_event.implicit)
|
||||
node = MappingNode(tag, [],
|
||||
start_event.start_mark, None,
|
||||
flow_style=start_event.flow_style)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
while not self.check_event(MappingEndEvent):
|
||||
#key_event = self.peek_event()
|
||||
item_key = self.compose_node(node, None)
|
||||
#if item_key in node.value:
|
||||
# raise ComposerError("while composing a mapping", start_event.start_mark,
|
||||
# "found duplicate key", key_event.start_mark)
|
||||
item_value = self.compose_node(node, item_key)
|
||||
#node.value[item_key] = item_value
|
||||
node.value.append((item_key, item_value))
|
||||
end_event = self.get_event()
|
||||
node.end_mark = end_event.end_mark
|
||||
return node
|
||||
|
709
libs/yaml/constructor.py
Normal file
709
libs/yaml/constructor.py
Normal file
|
@ -0,0 +1,709 @@
|
|||
|
||||
__all__ = [
|
||||
'BaseConstructor',
|
||||
'SafeConstructor',
|
||||
'FullConstructor',
|
||||
'UnsafeConstructor',
|
||||
'Constructor',
|
||||
'ConstructorError'
|
||||
]
|
||||
|
||||
from error import *
|
||||
from nodes import *
|
||||
|
||||
import datetime
|
||||
|
||||
import binascii, re, sys, types
|
||||
|
||||
class ConstructorError(MarkedYAMLError):
|
||||
pass
|
||||
|
||||
class BaseConstructor(object):
|
||||
|
||||
yaml_constructors = {}
|
||||
yaml_multi_constructors = {}
|
||||
|
||||
def __init__(self):
|
||||
self.constructed_objects = {}
|
||||
self.recursive_objects = {}
|
||||
self.state_generators = []
|
||||
self.deep_construct = False
|
||||
|
||||
def check_data(self):
|
||||
# If there are more documents available?
|
||||
return self.check_node()
|
||||
|
||||
def get_data(self):
|
||||
# Construct and return the next document.
|
||||
if self.check_node():
|
||||
return self.construct_document(self.get_node())
|
||||
|
||||
def get_single_data(self):
|
||||
# Ensure that the stream contains a single document and construct it.
|
||||
node = self.get_single_node()
|
||||
if node is not None:
|
||||
return self.construct_document(node)
|
||||
return None
|
||||
|
||||
def construct_document(self, node):
|
||||
data = self.construct_object(node)
|
||||
while self.state_generators:
|
||||
state_generators = self.state_generators
|
||||
self.state_generators = []
|
||||
for generator in state_generators:
|
||||
for dummy in generator:
|
||||
pass
|
||||
self.constructed_objects = {}
|
||||
self.recursive_objects = {}
|
||||
self.deep_construct = False
|
||||
return data
|
||||
|
||||
def construct_object(self, node, deep=False):
|
||||
if node in self.constructed_objects:
|
||||
return self.constructed_objects[node]
|
||||
if deep:
|
||||
old_deep = self.deep_construct
|
||||
self.deep_construct = True
|
||||
if node in self.recursive_objects:
|
||||
raise ConstructorError(None, None,
|
||||
"found unconstructable recursive node", node.start_mark)
|
||||
self.recursive_objects[node] = None
|
||||
constructor = None
|
||||
tag_suffix = None
|
||||
if node.tag in self.yaml_constructors:
|
||||
constructor = self.yaml_constructors[node.tag]
|
||||
else:
|
||||
for tag_prefix in self.yaml_multi_constructors:
|
||||
if node.tag.startswith(tag_prefix):
|
||||
tag_suffix = node.tag[len(tag_prefix):]
|
||||
constructor = self.yaml_multi_constructors[tag_prefix]
|
||||
break
|
||||
else:
|
||||
if None in self.yaml_multi_constructors:
|
||||
tag_suffix = node.tag
|
||||
constructor = self.yaml_multi_constructors[None]
|
||||
elif None in self.yaml_constructors:
|
||||
constructor = self.yaml_constructors[None]
|
||||
elif isinstance(node, ScalarNode):
|
||||
constructor = self.__class__.construct_scalar
|
||||
elif isinstance(node, SequenceNode):
|
||||
constructor = self.__class__.construct_sequence
|
||||
elif isinstance(node, MappingNode):
|
||||
constructor = self.__class__.construct_mapping
|
||||
if tag_suffix is None:
|
||||
data = constructor(self, node)
|
||||
else:
|
||||
data = constructor(self, tag_suffix, node)
|
||||
if isinstance(data, types.GeneratorType):
|
||||
generator = data
|
||||
data = generator.next()
|
||||
if self.deep_construct:
|
||||
for dummy in generator:
|
||||
pass
|
||||
else:
|
||||
self.state_generators.append(generator)
|
||||
self.constructed_objects[node] = data
|
||||
del self.recursive_objects[node]
|
||||
if deep:
|
||||
self.deep_construct = old_deep
|
||||
return data
|
||||
|
||||
def construct_scalar(self, node):
|
||||
if not isinstance(node, ScalarNode):
|
||||
raise ConstructorError(None, None,
|
||||
"expected a scalar node, but found %s" % node.id,
|
||||
node.start_mark)
|
||||
return node.value
|
||||
|
||||
def construct_sequence(self, node, deep=False):
|
||||
if not isinstance(node, SequenceNode):
|
||||
raise ConstructorError(None, None,
|
||||
"expected a sequence node, but found %s" % node.id,
|
||||
node.start_mark)
|
||||
return [self.construct_object(child, deep=deep)
|
||||
for child in node.value]
|
||||
|
||||
def construct_mapping(self, node, deep=False):
|
||||
if not isinstance(node, MappingNode):
|
||||
raise ConstructorError(None, None,
|
||||
"expected a mapping node, but found %s" % node.id,
|
||||
node.start_mark)
|
||||
mapping = {}
|
||||
for key_node, value_node in node.value:
|
||||
key = self.construct_object(key_node, deep=deep)
|
||||
try:
|
||||
hash(key)
|
||||
except TypeError, exc:
|
||||
raise ConstructorError("while constructing a mapping", node.start_mark,
|
||||
"found unacceptable key (%s)" % exc, key_node.start_mark)
|
||||
value = self.construct_object(value_node, deep=deep)
|
||||
mapping[key] = value
|
||||
return mapping
|
||||
|
||||
def construct_pairs(self, node, deep=False):
|
||||
if not isinstance(node, MappingNode):
|
||||
raise ConstructorError(None, None,
|
||||
"expected a mapping node, but found %s" % node.id,
|
||||
node.start_mark)
|
||||
pairs = []
|
||||
for key_node, value_node in node.value:
|
||||
key = self.construct_object(key_node, deep=deep)
|
||||
value = self.construct_object(value_node, deep=deep)
|
||||
pairs.append((key, value))
|
||||
return pairs
|
||||
|
||||
def add_constructor(cls, tag, constructor):
|
||||
if not 'yaml_constructors' in cls.__dict__:
|
||||
cls.yaml_constructors = cls.yaml_constructors.copy()
|
||||
cls.yaml_constructors[tag] = constructor
|
||||
add_constructor = classmethod(add_constructor)
|
||||
|
||||
def add_multi_constructor(cls, tag_prefix, multi_constructor):
|
||||
if not 'yaml_multi_constructors' in cls.__dict__:
|
||||
cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
|
||||
cls.yaml_multi_constructors[tag_prefix] = multi_constructor
|
||||
add_multi_constructor = classmethod(add_multi_constructor)
|
||||
|
||||
class SafeConstructor(BaseConstructor):
|
||||
|
||||
def construct_scalar(self, node):
|
||||
if isinstance(node, MappingNode):
|
||||
for key_node, value_node in node.value:
|
||||
if key_node.tag == u'tag:yaml.org,2002:value':
|
||||
return self.construct_scalar(value_node)
|
||||
return BaseConstructor.construct_scalar(self, node)
|
||||
|
||||
def flatten_mapping(self, node):
|
||||
merge = []
|
||||
index = 0
|
||||
while index < len(node.value):
|
||||
key_node, value_node = node.value[index]
|
||||
if key_node.tag == u'tag:yaml.org,2002:merge':
|
||||
del node.value[index]
|
||||
if isinstance(value_node, MappingNode):
|
||||
self.flatten_mapping(value_node)
|
||||
merge.extend(value_node.value)
|
||||
elif isinstance(value_node, SequenceNode):
|
||||
submerge = []
|
||||
for subnode in value_node.value:
|
||||
if not isinstance(subnode, MappingNode):
|
||||
raise ConstructorError("while constructing a mapping",
|
||||
node.start_mark,
|
||||
"expected a mapping for merging, but found %s"
|
||||
% subnode.id, subnode.start_mark)
|
||||
self.flatten_mapping(subnode)
|
||||
submerge.append(subnode.value)
|
||||
submerge.reverse()
|
||||
for value in submerge:
|
||||
merge.extend(value)
|
||||
else:
|
||||
raise ConstructorError("while constructing a mapping", node.start_mark,
|
||||
"expected a mapping or list of mappings for merging, but found %s"
|
||||
% value_node.id, value_node.start_mark)
|
||||
elif key_node.tag == u'tag:yaml.org,2002:value':
|
||||
key_node.tag = u'tag:yaml.org,2002:str'
|
||||
index += 1
|
||||
else:
|
||||
index += 1
|
||||
if merge:
|
||||
node.value = merge + node.value
|
||||
|
||||
def construct_mapping(self, node, deep=False):
|
||||
if isinstance(node, MappingNode):
|
||||
self.flatten_mapping(node)
|
||||
return BaseConstructor.construct_mapping(self, node, deep=deep)
|
||||
|
||||
def construct_yaml_null(self, node):
|
||||
self.construct_scalar(node)
|
||||
return None
|
||||
|
||||
bool_values = {
|
||||
u'yes': True,
|
||||
u'no': False,
|
||||
u'true': True,
|
||||
u'false': False,
|
||||
u'on': True,
|
||||
u'off': False,
|
||||
}
|
||||
|
||||
def construct_yaml_bool(self, node):
|
||||
value = self.construct_scalar(node)
|
||||
return self.bool_values[value.lower()]
|
||||
|
||||
def construct_yaml_int(self, node):
|
||||
value = str(self.construct_scalar(node))
|
||||
value = value.replace('_', '')
|
||||
sign = +1
|
||||
if value[0] == '-':
|
||||
sign = -1
|
||||
if value[0] in '+-':
|
||||
value = value[1:]
|
||||
if value == '0':
|
||||
return 0
|
||||
elif value.startswith('0b'):
|
||||
return sign*int(value[2:], 2)
|
||||
elif value.startswith('0x'):
|
||||
return sign*int(value[2:], 16)
|
||||
elif value[0] == '0':
|
||||
return sign*int(value, 8)
|
||||
elif ':' in value:
|
||||
digits = [int(part) for part in value.split(':')]
|
||||
digits.reverse()
|
||||
base = 1
|
||||
value = 0
|
||||
for digit in digits:
|
||||
value += digit*base
|
||||
base *= 60
|
||||
return sign*value
|
||||
else:
|
||||
return sign*int(value)
|
||||
|
||||
inf_value = 1e300
|
||||
while inf_value != inf_value*inf_value:
|
||||
inf_value *= inf_value
|
||||
nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99).
|
||||
|
||||
def construct_yaml_float(self, node):
|
||||
value = str(self.construct_scalar(node))
|
||||
value = value.replace('_', '').lower()
|
||||
sign = +1
|
||||
if value[0] == '-':
|
||||
sign = -1
|
||||
if value[0] in '+-':
|
||||
value = value[1:]
|
||||
if value == '.inf':
|
||||
return sign*self.inf_value
|
||||
elif value == '.nan':
|
||||
return self.nan_value
|
||||
elif ':' in value:
|
||||
digits = [float(part) for part in value.split(':')]
|
||||
digits.reverse()
|
||||
base = 1
|
||||
value = 0.0
|
||||
for digit in digits:
|
||||
value += digit*base
|
||||
base *= 60
|
||||
return sign*value
|
||||
else:
|
||||
return sign*float(value)
|
||||
|
||||
def construct_yaml_binary(self, node):
|
||||
value = self.construct_scalar(node)
|
||||
try:
|
||||
return str(value).decode('base64')
|
||||
except (binascii.Error, UnicodeEncodeError), exc:
|
||||
raise ConstructorError(None, None,
|
||||
"failed to decode base64 data: %s" % exc, node.start_mark)
|
||||
|
||||
timestamp_regexp = re.compile(
|
||||
ur'''^(?P<year>[0-9][0-9][0-9][0-9])
|
||||
-(?P<month>[0-9][0-9]?)
|
||||
-(?P<day>[0-9][0-9]?)
|
||||
(?:(?:[Tt]|[ \t]+)
|
||||
(?P<hour>[0-9][0-9]?)
|
||||
:(?P<minute>[0-9][0-9])
|
||||
:(?P<second>[0-9][0-9])
|
||||
(?:\.(?P<fraction>[0-9]*))?
|
||||
(?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
|
||||
(?::(?P<tz_minute>[0-9][0-9]))?))?)?$''', re.X)
|
||||
|
||||
def construct_yaml_timestamp(self, node):
|
||||
value = self.construct_scalar(node)
|
||||
match = self.timestamp_regexp.match(node.value)
|
||||
values = match.groupdict()
|
||||
year = int(values['year'])
|
||||
month = int(values['month'])
|
||||
day = int(values['day'])
|
||||
if not values['hour']:
|
||||
return datetime.date(year, month, day)
|
||||
hour = int(values['hour'])
|
||||
minute = int(values['minute'])
|
||||
second = int(values['second'])
|
||||
fraction = 0
|
||||
if values['fraction']:
|
||||
fraction = values['fraction'][:6]
|
||||
while len(fraction) < 6:
|
||||
fraction += '0'
|
||||
fraction = int(fraction)
|
||||
delta = None
|
||||
if values['tz_sign']:
|
||||
tz_hour = int(values['tz_hour'])
|
||||
tz_minute = int(values['tz_minute'] or 0)
|
||||
delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
|
||||
if values['tz_sign'] == '-':
|
||||
delta = -delta
|
||||
data = datetime.datetime(year, month, day, hour, minute, second, fraction)
|
||||
if delta:
|
||||
data -= delta
|
||||
return data
|
||||
|
||||
def construct_yaml_omap(self, node):
|
||||
# Note: we do not check for duplicate keys, because it's too
|
||||
# CPU-expensive.
|
||||
omap = []
|
||||
yield omap
|
||||
if not isinstance(node, SequenceNode):
|
||||
raise ConstructorError("while constructing an ordered map", node.start_mark,
|
||||
"expected a sequence, but found %s" % node.id, node.start_mark)
|
||||
for subnode in node.value:
|
||||
if not isinstance(subnode, MappingNode):
|
||||
raise ConstructorError("while constructing an ordered map", node.start_mark,
|
||||
"expected a mapping of length 1, but found %s" % subnode.id,
|
||||
subnode.start_mark)
|
||||
if len(subnode.value) != 1:
|
||||
raise ConstructorError("while constructing an ordered map", node.start_mark,
|
||||
"expected a single mapping item, but found %d items" % len(subnode.value),
|
||||
subnode.start_mark)
|
||||
key_node, value_node = subnode.value[0]
|
||||
key = self.construct_object(key_node)
|
||||
value = self.construct_object(value_node)
|
||||
omap.append((key, value))
|
||||
|
||||
def construct_yaml_pairs(self, node):
|
||||
# Note: the same code as `construct_yaml_omap`.
|
||||
pairs = []
|
||||
yield pairs
|
||||
if not isinstance(node, SequenceNode):
|
||||
raise ConstructorError("while constructing pairs", node.start_mark,
|
||||
"expected a sequence, but found %s" % node.id, node.start_mark)
|
||||
for subnode in node.value:
|
||||
if not isinstance(subnode, MappingNode):
|
||||
raise ConstructorError("while constructing pairs", node.start_mark,
|
||||
"expected a mapping of length 1, but found %s" % subnode.id,
|
||||
subnode.start_mark)
|
||||
if len(subnode.value) != 1:
|
||||
raise ConstructorError("while constructing pairs", node.start_mark,
|
||||
"expected a single mapping item, but found %d items" % len(subnode.value),
|
||||
subnode.start_mark)
|
||||
key_node, value_node = subnode.value[0]
|
||||
key = self.construct_object(key_node)
|
||||
value = self.construct_object(value_node)
|
||||
pairs.append((key, value))
|
||||
|
||||
def construct_yaml_set(self, node):
|
||||
data = set()
|
||||
yield data
|
||||
value = self.construct_mapping(node)
|
||||
data.update(value)
|
||||
|
||||
def construct_yaml_str(self, node):
|
||||
value = self.construct_scalar(node)
|
||||
try:
|
||||
return value.encode('ascii')
|
||||
except UnicodeEncodeError:
|
||||
return value
|
||||
|
||||
def construct_yaml_seq(self, node):
|
||||
data = []
|
||||
yield data
|
||||
data.extend(self.construct_sequence(node))
|
||||
|
||||
def construct_yaml_map(self, node):
|
||||
data = {}
|
||||
yield data
|
||||
value = self.construct_mapping(node)
|
||||
data.update(value)
|
||||
|
||||
def construct_yaml_object(self, node, cls):
|
||||
data = cls.__new__(cls)
|
||||
yield data
|
||||
if hasattr(data, '__setstate__'):
|
||||
state = self.construct_mapping(node, deep=True)
|
||||
data.__setstate__(state)
|
||||
else:
|
||||
state = self.construct_mapping(node)
|
||||
data.__dict__.update(state)
|
||||
|
||||
def construct_undefined(self, node):
|
||||
raise ConstructorError(None, None,
|
||||
"could not determine a constructor for the tag %r" % node.tag.encode('utf-8'),
|
||||
node.start_mark)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:null',
|
||||
SafeConstructor.construct_yaml_null)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:bool',
|
||||
SafeConstructor.construct_yaml_bool)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:int',
|
||||
SafeConstructor.construct_yaml_int)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:float',
|
||||
SafeConstructor.construct_yaml_float)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:binary',
|
||||
SafeConstructor.construct_yaml_binary)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:timestamp',
|
||||
SafeConstructor.construct_yaml_timestamp)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:omap',
|
||||
SafeConstructor.construct_yaml_omap)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:pairs',
|
||||
SafeConstructor.construct_yaml_pairs)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:set',
|
||||
SafeConstructor.construct_yaml_set)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:str',
|
||||
SafeConstructor.construct_yaml_str)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:seq',
|
||||
SafeConstructor.construct_yaml_seq)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:map',
|
||||
SafeConstructor.construct_yaml_map)
|
||||
|
||||
SafeConstructor.add_constructor(None,
|
||||
SafeConstructor.construct_undefined)
|
||||
|
||||
class FullConstructor(SafeConstructor):
|
||||
|
||||
def construct_python_str(self, node):
|
||||
return self.construct_scalar(node).encode('utf-8')
|
||||
|
||||
def construct_python_unicode(self, node):
|
||||
return self.construct_scalar(node)
|
||||
|
||||
def construct_python_long(self, node):
|
||||
return long(self.construct_yaml_int(node))
|
||||
|
||||
def construct_python_complex(self, node):
|
||||
return complex(self.construct_scalar(node))
|
||||
|
||||
def construct_python_tuple(self, node):
|
||||
return tuple(self.construct_sequence(node))
|
||||
|
||||
def find_python_module(self, name, mark, unsafe=False):
|
||||
if not name:
|
||||
raise ConstructorError("while constructing a Python module", mark,
|
||||
"expected non-empty name appended to the tag", mark)
|
||||
if unsafe:
|
||||
try:
|
||||
__import__(name)
|
||||
except ImportError, exc:
|
||||
raise ConstructorError("while constructing a Python module", mark,
|
||||
"cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark)
|
||||
if not name in sys.modules:
|
||||
raise ConstructorError("while constructing a Python module", mark,
|
||||
"module %r is not imported" % name.encode('utf-8'), mark)
|
||||
return sys.modules[name]
|
||||
|
||||
def find_python_name(self, name, mark, unsafe=False):
|
||||
if not name:
|
||||
raise ConstructorError("while constructing a Python object", mark,
|
||||
"expected non-empty name appended to the tag", mark)
|
||||
if u'.' in name:
|
||||
module_name, object_name = name.rsplit('.', 1)
|
||||
else:
|
||||
module_name = '__builtin__'
|
||||
object_name = name
|
||||
if unsafe:
|
||||
try:
|
||||
__import__(module_name)
|
||||
except ImportError, exc:
|
||||
raise ConstructorError("while constructing a Python object", mark,
|
||||
"cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark)
|
||||
if not module_name in sys.modules:
|
||||
raise ConstructorError("while constructing a Python object", mark,
|
||||
"module %r is not imported" % module_name.encode('utf-8'), mark)
|
||||
module = sys.modules[module_name]
|
||||
if not hasattr(module, object_name):
|
||||
raise ConstructorError("while constructing a Python object", mark,
|
||||
"cannot find %r in the module %r" % (object_name.encode('utf-8'),
|
||||
module.__name__), mark)
|
||||
return getattr(module, object_name)
|
||||
|
||||
def construct_python_name(self, suffix, node):
|
||||
value = self.construct_scalar(node)
|
||||
if value:
|
||||
raise ConstructorError("while constructing a Python name", node.start_mark,
|
||||
"expected the empty value, but found %r" % value.encode('utf-8'),
|
||||
node.start_mark)
|
||||
return self.find_python_name(suffix, node.start_mark)
|
||||
|
||||
def construct_python_module(self, suffix, node):
|
||||
value = self.construct_scalar(node)
|
||||
if value:
|
||||
raise ConstructorError("while constructing a Python module", node.start_mark,
|
||||
"expected the empty value, but found %r" % value.encode('utf-8'),
|
||||
node.start_mark)
|
||||
return self.find_python_module(suffix, node.start_mark)
|
||||
|
||||
class classobj: pass
|
||||
|
||||
def make_python_instance(self, suffix, node,
|
||||
args=None, kwds=None, newobj=False, unsafe=False):
|
||||
if not args:
|
||||
args = []
|
||||
if not kwds:
|
||||
kwds = {}
|
||||
cls = self.find_python_name(suffix, node.start_mark)
|
||||
if not (unsafe or isinstance(cls, type) or isinstance(cls, type(self.classobj))):
|
||||
raise ConstructorError("while constructing a Python instance", node.start_mark,
|
||||
"expected a class, but found %r" % type(cls),
|
||||
node.start_mark)
|
||||
if newobj and isinstance(cls, type(self.classobj)) \
|
||||
and not args and not kwds:
|
||||
instance = self.classobj()
|
||||
instance.__class__ = cls
|
||||
return instance
|
||||
elif newobj and isinstance(cls, type):
|
||||
return cls.__new__(cls, *args, **kwds)
|
||||
else:
|
||||
return cls(*args, **kwds)
|
||||
|
||||
def set_python_instance_state(self, instance, state):
|
||||
if hasattr(instance, '__setstate__'):
|
||||
instance.__setstate__(state)
|
||||
else:
|
||||
slotstate = {}
|
||||
if isinstance(state, tuple) and len(state) == 2:
|
||||
state, slotstate = state
|
||||
if hasattr(instance, '__dict__'):
|
||||
instance.__dict__.update(state)
|
||||
elif state:
|
||||
slotstate.update(state)
|
||||
for key, value in slotstate.items():
|
||||
setattr(object, key, value)
|
||||
|
||||
def construct_python_object(self, suffix, node):
|
||||
# Format:
|
||||
# !!python/object:module.name { ... state ... }
|
||||
instance = self.make_python_instance(suffix, node, newobj=True)
|
||||
yield instance
|
||||
deep = hasattr(instance, '__setstate__')
|
||||
state = self.construct_mapping(node, deep=deep)
|
||||
self.set_python_instance_state(instance, state)
|
||||
|
||||
def construct_python_object_apply(self, suffix, node, newobj=False):
|
||||
# Format:
|
||||
# !!python/object/apply # (or !!python/object/new)
|
||||
# args: [ ... arguments ... ]
|
||||
# kwds: { ... keywords ... }
|
||||
# state: ... state ...
|
||||
# listitems: [ ... listitems ... ]
|
||||
# dictitems: { ... dictitems ... }
|
||||
# or short format:
|
||||
# !!python/object/apply [ ... arguments ... ]
|
||||
# The difference between !!python/object/apply and !!python/object/new
|
||||
# is how an object is created, check make_python_instance for details.
|
||||
if isinstance(node, SequenceNode):
|
||||
args = self.construct_sequence(node, deep=True)
|
||||
kwds = {}
|
||||
state = {}
|
||||
listitems = []
|
||||
dictitems = {}
|
||||
else:
|
||||
value = self.construct_mapping(node, deep=True)
|
||||
args = value.get('args', [])
|
||||
kwds = value.get('kwds', {})
|
||||
state = value.get('state', {})
|
||||
listitems = value.get('listitems', [])
|
||||
dictitems = value.get('dictitems', {})
|
||||
instance = self.make_python_instance(suffix, node, args, kwds, newobj)
|
||||
if state:
|
||||
self.set_python_instance_state(instance, state)
|
||||
if listitems:
|
||||
instance.extend(listitems)
|
||||
if dictitems:
|
||||
for key in dictitems:
|
||||
instance[key] = dictitems[key]
|
||||
return instance
|
||||
|
||||
def construct_python_object_new(self, suffix, node):
|
||||
return self.construct_python_object_apply(suffix, node, newobj=True)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:python/none',
|
||||
FullConstructor.construct_yaml_null)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:python/bool',
|
||||
FullConstructor.construct_yaml_bool)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:python/str',
|
||||
FullConstructor.construct_python_str)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:python/unicode',
|
||||
FullConstructor.construct_python_unicode)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:python/int',
|
||||
FullConstructor.construct_yaml_int)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:python/long',
|
||||
FullConstructor.construct_python_long)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:python/float',
|
||||
FullConstructor.construct_yaml_float)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:python/complex',
|
||||
FullConstructor.construct_python_complex)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:python/list',
|
||||
FullConstructor.construct_yaml_seq)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:python/tuple',
|
||||
FullConstructor.construct_python_tuple)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
u'tag:yaml.org,2002:python/dict',
|
||||
FullConstructor.construct_yaml_map)
|
||||
|
||||
FullConstructor.add_multi_constructor(
|
||||
u'tag:yaml.org,2002:python/name:',
|
||||
FullConstructor.construct_python_name)
|
||||
|
||||
FullConstructor.add_multi_constructor(
|
||||
u'tag:yaml.org,2002:python/module:',
|
||||
FullConstructor.construct_python_module)
|
||||
|
||||
FullConstructor.add_multi_constructor(
|
||||
u'tag:yaml.org,2002:python/object:',
|
||||
FullConstructor.construct_python_object)
|
||||
|
||||
FullConstructor.add_multi_constructor(
|
||||
u'tag:yaml.org,2002:python/object/apply:',
|
||||
FullConstructor.construct_python_object_apply)
|
||||
|
||||
FullConstructor.add_multi_constructor(
|
||||
u'tag:yaml.org,2002:python/object/new:',
|
||||
FullConstructor.construct_python_object_new)
|
||||
|
||||
class UnsafeConstructor(FullConstructor):
|
||||
|
||||
def find_python_module(self, name, mark):
|
||||
return super(UnsafeConstructor, self).find_python_module(name, mark, unsafe=True)
|
||||
|
||||
def find_python_name(self, name, mark):
|
||||
return super(UnsafeConstructor, self).find_python_name(name, mark, unsafe=True)
|
||||
|
||||
def make_python_instance(self, suffix, node, args=None, kwds=None, newobj=False):
|
||||
return super(UnsafeConstructor, self).make_python_instance(
|
||||
suffix, node, args, kwds, newobj, unsafe=True)
|
||||
|
||||
# Constructor is same as UnsafeConstructor. Need to leave this in place in case
|
||||
# people have extended it directly.
|
||||
class Constructor(UnsafeConstructor):
|
||||
pass
|
101
libs/yaml/cyaml.py
Normal file
101
libs/yaml/cyaml.py
Normal file
|
@ -0,0 +1,101 @@
|
|||
|
||||
__all__ = [
|
||||
'CBaseLoader', 'CSafeLoader', 'CFullLoader', 'CUnsafeLoader', 'CLoader',
|
||||
'CBaseDumper', 'CSafeDumper', 'CDumper'
|
||||
]
|
||||
|
||||
from _yaml import CParser, CEmitter
|
||||
|
||||
from constructor import *
|
||||
|
||||
from serializer import *
|
||||
from representer import *
|
||||
|
||||
from resolver import *
|
||||
|
||||
class CBaseLoader(CParser, BaseConstructor, BaseResolver):
|
||||
|
||||
def __init__(self, stream):
|
||||
CParser.__init__(self, stream)
|
||||
BaseConstructor.__init__(self)
|
||||
BaseResolver.__init__(self)
|
||||
|
||||
class CSafeLoader(CParser, SafeConstructor, Resolver):
|
||||
|
||||
def __init__(self, stream):
|
||||
CParser.__init__(self, stream)
|
||||
SafeConstructor.__init__(self)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class CFullLoader(CParser, FullConstructor, Resolver):
|
||||
|
||||
def __init__(self, stream):
|
||||
CParser.__init__(self, stream)
|
||||
FullConstructor.__init__(self)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class CUnsafeLoader(CParser, UnsafeConstructor, Resolver):
|
||||
|
||||
def __init__(self, stream):
|
||||
CParser.__init__(self, stream)
|
||||
UnsafeConstructor.__init__(self)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class CLoader(CParser, Constructor, Resolver):
|
||||
|
||||
def __init__(self, stream):
|
||||
CParser.__init__(self, stream)
|
||||
Constructor.__init__(self)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver):
|
||||
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=False,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, sort_keys=True):
|
||||
CEmitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width, encoding=encoding,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
Representer.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style, sort_keys=sort_keys)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class CSafeDumper(CEmitter, SafeRepresenter, Resolver):
|
||||
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=False,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, sort_keys=True):
|
||||
CEmitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width, encoding=encoding,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
SafeRepresenter.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style, sort_keys=sort_keys)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class CDumper(CEmitter, Serializer, Representer, Resolver):
|
||||
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=False,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, sort_keys=True):
|
||||
CEmitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width, encoding=encoding,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
Representer.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style, sort_keys=sort_keys)
|
||||
Resolver.__init__(self)
|
||||
|
62
libs/yaml/dumper.py
Normal file
62
libs/yaml/dumper.py
Normal file
|
@ -0,0 +1,62 @@
|
|||
|
||||
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper']
|
||||
|
||||
from emitter import *
|
||||
from serializer import *
|
||||
from representer import *
|
||||
from resolver import *
|
||||
|
||||
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
|
||||
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=False,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, sort_keys=True):
|
||||
Emitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break)
|
||||
Serializer.__init__(self, encoding=encoding,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
Representer.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style, sort_keys=sort_keys)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
|
||||
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=False,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, sort_keys=True):
|
||||
Emitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break)
|
||||
Serializer.__init__(self, encoding=encoding,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
SafeRepresenter.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style, sort_keys=sort_keys)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class Dumper(Emitter, Serializer, Representer, Resolver):
|
||||
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=False,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, sort_keys=True):
|
||||
Emitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break)
|
||||
Serializer.__init__(self, encoding=encoding,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
Representer.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style, sort_keys=sort_keys)
|
||||
Resolver.__init__(self)
|
||||
|
1144
libs/yaml/emitter.py
Normal file
1144
libs/yaml/emitter.py
Normal file
File diff suppressed because it is too large
Load diff
75
libs/yaml/error.py
Normal file
75
libs/yaml/error.py
Normal file
|
@ -0,0 +1,75 @@
|
|||
|
||||
__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError']
|
||||
|
||||
class Mark(object):
|
||||
|
||||
def __init__(self, name, index, line, column, buffer, pointer):
|
||||
self.name = name
|
||||
self.index = index
|
||||
self.line = line
|
||||
self.column = column
|
||||
self.buffer = buffer
|
||||
self.pointer = pointer
|
||||
|
||||
def get_snippet(self, indent=4, max_length=75):
|
||||
if self.buffer is None:
|
||||
return None
|
||||
head = ''
|
||||
start = self.pointer
|
||||
while start > 0 and self.buffer[start-1] not in u'\0\r\n\x85\u2028\u2029':
|
||||
start -= 1
|
||||
if self.pointer-start > max_length/2-1:
|
||||
head = ' ... '
|
||||
start += 5
|
||||
break
|
||||
tail = ''
|
||||
end = self.pointer
|
||||
while end < len(self.buffer) and self.buffer[end] not in u'\0\r\n\x85\u2028\u2029':
|
||||
end += 1
|
||||
if end-self.pointer > max_length/2-1:
|
||||
tail = ' ... '
|
||||
end -= 5
|
||||
break
|
||||
snippet = self.buffer[start:end].encode('utf-8')
|
||||
return ' '*indent + head + snippet + tail + '\n' \
|
||||
+ ' '*(indent+self.pointer-start+len(head)) + '^'
|
||||
|
||||
def __str__(self):
|
||||
snippet = self.get_snippet()
|
||||
where = " in \"%s\", line %d, column %d" \
|
||||
% (self.name, self.line+1, self.column+1)
|
||||
if snippet is not None:
|
||||
where += ":\n"+snippet
|
||||
return where
|
||||
|
||||
class YAMLError(Exception):
|
||||
pass
|
||||
|
||||
class MarkedYAMLError(YAMLError):
|
||||
|
||||
def __init__(self, context=None, context_mark=None,
|
||||
problem=None, problem_mark=None, note=None):
|
||||
self.context = context
|
||||
self.context_mark = context_mark
|
||||
self.problem = problem
|
||||
self.problem_mark = problem_mark
|
||||
self.note = note
|
||||
|
||||
def __str__(self):
|
||||
lines = []
|
||||
if self.context is not None:
|
||||
lines.append(self.context)
|
||||
if self.context_mark is not None \
|
||||
and (self.problem is None or self.problem_mark is None
|
||||
or self.context_mark.name != self.problem_mark.name
|
||||
or self.context_mark.line != self.problem_mark.line
|
||||
or self.context_mark.column != self.problem_mark.column):
|
||||
lines.append(str(self.context_mark))
|
||||
if self.problem is not None:
|
||||
lines.append(self.problem)
|
||||
if self.problem_mark is not None:
|
||||
lines.append(str(self.problem_mark))
|
||||
if self.note is not None:
|
||||
lines.append(self.note)
|
||||
return '\n'.join(lines)
|
||||
|
86
libs/yaml/events.py
Normal file
86
libs/yaml/events.py
Normal file
|
@ -0,0 +1,86 @@
|
|||
|
||||
# Abstract classes.
|
||||
|
||||
class Event(object):
|
||||
def __init__(self, start_mark=None, end_mark=None):
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
def __repr__(self):
|
||||
attributes = [key for key in ['anchor', 'tag', 'implicit', 'value']
|
||||
if hasattr(self, key)]
|
||||
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
|
||||
for key in attributes])
|
||||
return '%s(%s)' % (self.__class__.__name__, arguments)
|
||||
|
||||
class NodeEvent(Event):
|
||||
def __init__(self, anchor, start_mark=None, end_mark=None):
|
||||
self.anchor = anchor
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
|
||||
class CollectionStartEvent(NodeEvent):
|
||||
def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None,
|
||||
flow_style=None):
|
||||
self.anchor = anchor
|
||||
self.tag = tag
|
||||
self.implicit = implicit
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
self.flow_style = flow_style
|
||||
|
||||
class CollectionEndEvent(Event):
|
||||
pass
|
||||
|
||||
# Implementations.
|
||||
|
||||
class StreamStartEvent(Event):
|
||||
def __init__(self, start_mark=None, end_mark=None, encoding=None):
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
self.encoding = encoding
|
||||
|
||||
class StreamEndEvent(Event):
|
||||
pass
|
||||
|
||||
class DocumentStartEvent(Event):
|
||||
def __init__(self, start_mark=None, end_mark=None,
|
||||
explicit=None, version=None, tags=None):
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
self.explicit = explicit
|
||||
self.version = version
|
||||
self.tags = tags
|
||||
|
||||
class DocumentEndEvent(Event):
|
||||
def __init__(self, start_mark=None, end_mark=None,
|
||||
explicit=None):
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
self.explicit = explicit
|
||||
|
||||
class AliasEvent(NodeEvent):
|
||||
pass
|
||||
|
||||
class ScalarEvent(NodeEvent):
|
||||
def __init__(self, anchor, tag, implicit, value,
|
||||
start_mark=None, end_mark=None, style=None):
|
||||
self.anchor = anchor
|
||||
self.tag = tag
|
||||
self.implicit = implicit
|
||||
self.value = value
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
self.style = style
|
||||
|
||||
class SequenceStartEvent(CollectionStartEvent):
|
||||
pass
|
||||
|
||||
class SequenceEndEvent(CollectionEndEvent):
|
||||
pass
|
||||
|
||||
class MappingStartEvent(CollectionStartEvent):
|
||||
pass
|
||||
|
||||
class MappingEndEvent(CollectionEndEvent):
|
||||
pass
|
||||
|
63
libs/yaml/loader.py
Normal file
63
libs/yaml/loader.py
Normal file
|
@ -0,0 +1,63 @@
|
|||
|
||||
__all__ = ['BaseLoader', 'FullLoader', 'SafeLoader', 'Loader', 'UnsafeLoader']
|
||||
|
||||
from reader import *
|
||||
from scanner import *
|
||||
from parser import *
|
||||
from composer import *
|
||||
from constructor import *
|
||||
from resolver import *
|
||||
|
||||
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
|
||||
|
||||
def __init__(self, stream):
|
||||
Reader.__init__(self, stream)
|
||||
Scanner.__init__(self)
|
||||
Parser.__init__(self)
|
||||
Composer.__init__(self)
|
||||
BaseConstructor.__init__(self)
|
||||
BaseResolver.__init__(self)
|
||||
|
||||
class FullLoader(Reader, Scanner, Parser, Composer, FullConstructor, Resolver):
|
||||
|
||||
def __init__(self, stream):
|
||||
Reader.__init__(self, stream)
|
||||
Scanner.__init__(self)
|
||||
Parser.__init__(self)
|
||||
Composer.__init__(self)
|
||||
FullConstructor.__init__(self)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver):
|
||||
|
||||
def __init__(self, stream):
|
||||
Reader.__init__(self, stream)
|
||||
Scanner.__init__(self)
|
||||
Parser.__init__(self)
|
||||
Composer.__init__(self)
|
||||
SafeConstructor.__init__(self)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
|
||||
|
||||
def __init__(self, stream):
|
||||
Reader.__init__(self, stream)
|
||||
Scanner.__init__(self)
|
||||
Parser.__init__(self)
|
||||
Composer.__init__(self)
|
||||
Constructor.__init__(self)
|
||||
Resolver.__init__(self)
|
||||
|
||||
# UnsafeLoader is the same as Loader (which is and was always unsafe on
|
||||
# untrusted input). Use of either Loader or UnsafeLoader should be rare, since
|
||||
# FullLoad should be able to load almost all YAML safely. Loader is left intact
|
||||
# to ensure backwards compatability.
|
||||
class UnsafeLoader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
|
||||
|
||||
def __init__(self, stream):
|
||||
Reader.__init__(self, stream)
|
||||
Scanner.__init__(self)
|
||||
Parser.__init__(self)
|
||||
Composer.__init__(self)
|
||||
Constructor.__init__(self)
|
||||
Resolver.__init__(self)
|
49
libs/yaml/nodes.py
Normal file
49
libs/yaml/nodes.py
Normal file
|
@ -0,0 +1,49 @@
|
|||
|
||||
class Node(object):
|
||||
def __init__(self, tag, value, start_mark, end_mark):
|
||||
self.tag = tag
|
||||
self.value = value
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
def __repr__(self):
|
||||
value = self.value
|
||||
#if isinstance(value, list):
|
||||
# if len(value) == 0:
|
||||
# value = '<empty>'
|
||||
# elif len(value) == 1:
|
||||
# value = '<1 item>'
|
||||
# else:
|
||||
# value = '<%d items>' % len(value)
|
||||
#else:
|
||||
# if len(value) > 75:
|
||||
# value = repr(value[:70]+u' ... ')
|
||||
# else:
|
||||
# value = repr(value)
|
||||
value = repr(value)
|
||||
return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value)
|
||||
|
||||
class ScalarNode(Node):
|
||||
id = 'scalar'
|
||||
def __init__(self, tag, value,
|
||||
start_mark=None, end_mark=None, style=None):
|
||||
self.tag = tag
|
||||
self.value = value
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
self.style = style
|
||||
|
||||
class CollectionNode(Node):
|
||||
def __init__(self, tag, value,
|
||||
start_mark=None, end_mark=None, flow_style=None):
|
||||
self.tag = tag
|
||||
self.value = value
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
self.flow_style = flow_style
|
||||
|
||||
class SequenceNode(CollectionNode):
|
||||
id = 'sequence'
|
||||
|
||||
class MappingNode(CollectionNode):
|
||||
id = 'mapping'
|
||||
|
589
libs/yaml/parser.py
Normal file
589
libs/yaml/parser.py
Normal file
|
@ -0,0 +1,589 @@
|
|||
|
||||
# The following YAML grammar is LL(1) and is parsed by a recursive descent
|
||||
# parser.
|
||||
#
|
||||
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
|
||||
# implicit_document ::= block_node DOCUMENT-END*
|
||||
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
|
||||
# block_node_or_indentless_sequence ::=
|
||||
# ALIAS
|
||||
# | properties (block_content | indentless_block_sequence)?
|
||||
# | block_content
|
||||
# | indentless_block_sequence
|
||||
# block_node ::= ALIAS
|
||||
# | properties block_content?
|
||||
# | block_content
|
||||
# flow_node ::= ALIAS
|
||||
# | properties flow_content?
|
||||
# | flow_content
|
||||
# properties ::= TAG ANCHOR? | ANCHOR TAG?
|
||||
# block_content ::= block_collection | flow_collection | SCALAR
|
||||
# flow_content ::= flow_collection | SCALAR
|
||||
# block_collection ::= block_sequence | block_mapping
|
||||
# flow_collection ::= flow_sequence | flow_mapping
|
||||
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
|
||||
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
|
||||
# block_mapping ::= BLOCK-MAPPING_START
|
||||
# ((KEY block_node_or_indentless_sequence?)?
|
||||
# (VALUE block_node_or_indentless_sequence?)?)*
|
||||
# BLOCK-END
|
||||
# flow_sequence ::= FLOW-SEQUENCE-START
|
||||
# (flow_sequence_entry FLOW-ENTRY)*
|
||||
# flow_sequence_entry?
|
||||
# FLOW-SEQUENCE-END
|
||||
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
# flow_mapping ::= FLOW-MAPPING-START
|
||||
# (flow_mapping_entry FLOW-ENTRY)*
|
||||
# flow_mapping_entry?
|
||||
# FLOW-MAPPING-END
|
||||
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
#
|
||||
# FIRST sets:
|
||||
#
|
||||
# stream: { STREAM-START }
|
||||
# explicit_document: { DIRECTIVE DOCUMENT-START }
|
||||
# implicit_document: FIRST(block_node)
|
||||
# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
|
||||
# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
|
||||
# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
|
||||
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
# block_sequence: { BLOCK-SEQUENCE-START }
|
||||
# block_mapping: { BLOCK-MAPPING-START }
|
||||
# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
|
||||
# indentless_sequence: { ENTRY }
|
||||
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
# flow_sequence: { FLOW-SEQUENCE-START }
|
||||
# flow_mapping: { FLOW-MAPPING-START }
|
||||
# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
|
||||
# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
|
||||
|
||||
__all__ = ['Parser', 'ParserError']
|
||||
|
||||
from error import MarkedYAMLError
|
||||
from tokens import *
|
||||
from events import *
|
||||
from scanner import *
|
||||
|
||||
class ParserError(MarkedYAMLError):
|
||||
pass
|
||||
|
||||
class Parser(object):
|
||||
# Since writing a recursive-descendant parser is a straightforward task, we
|
||||
# do not give many comments here.
|
||||
|
||||
DEFAULT_TAGS = {
|
||||
u'!': u'!',
|
||||
u'!!': u'tag:yaml.org,2002:',
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self.current_event = None
|
||||
self.yaml_version = None
|
||||
self.tag_handles = {}
|
||||
self.states = []
|
||||
self.marks = []
|
||||
self.state = self.parse_stream_start
|
||||
|
||||
def dispose(self):
|
||||
# Reset the state attributes (to clear self-references)
|
||||
self.states = []
|
||||
self.state = None
|
||||
|
||||
def check_event(self, *choices):
|
||||
# Check the type of the next event.
|
||||
if self.current_event is None:
|
||||
if self.state:
|
||||
self.current_event = self.state()
|
||||
if self.current_event is not None:
|
||||
if not choices:
|
||||
return True
|
||||
for choice in choices:
|
||||
if isinstance(self.current_event, choice):
|
||||
return True
|
||||
return False
|
||||
|
||||
def peek_event(self):
|
||||
# Get the next event.
|
||||
if self.current_event is None:
|
||||
if self.state:
|
||||
self.current_event = self.state()
|
||||
return self.current_event
|
||||
|
||||
def get_event(self):
|
||||
# Get the next event and proceed further.
|
||||
if self.current_event is None:
|
||||
if self.state:
|
||||
self.current_event = self.state()
|
||||
value = self.current_event
|
||||
self.current_event = None
|
||||
return value
|
||||
|
||||
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
|
||||
# implicit_document ::= block_node DOCUMENT-END*
|
||||
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
|
||||
|
||||
def parse_stream_start(self):
|
||||
|
||||
# Parse the stream start.
|
||||
token = self.get_token()
|
||||
event = StreamStartEvent(token.start_mark, token.end_mark,
|
||||
encoding=token.encoding)
|
||||
|
||||
# Prepare the next state.
|
||||
self.state = self.parse_implicit_document_start
|
||||
|
||||
return event
|
||||
|
||||
def parse_implicit_document_start(self):
|
||||
|
||||
# Parse an implicit document.
|
||||
if not self.check_token(DirectiveToken, DocumentStartToken,
|
||||
StreamEndToken):
|
||||
self.tag_handles = self.DEFAULT_TAGS
|
||||
token = self.peek_token()
|
||||
start_mark = end_mark = token.start_mark
|
||||
event = DocumentStartEvent(start_mark, end_mark,
|
||||
explicit=False)
|
||||
|
||||
# Prepare the next state.
|
||||
self.states.append(self.parse_document_end)
|
||||
self.state = self.parse_block_node
|
||||
|
||||
return event
|
||||
|
||||
else:
|
||||
return self.parse_document_start()
|
||||
|
||||
def parse_document_start(self):
|
||||
|
||||
# Parse any extra document end indicators.
|
||||
while self.check_token(DocumentEndToken):
|
||||
self.get_token()
|
||||
|
||||
# Parse an explicit document.
|
||||
if not self.check_token(StreamEndToken):
|
||||
token = self.peek_token()
|
||||
start_mark = token.start_mark
|
||||
version, tags = self.process_directives()
|
||||
if not self.check_token(DocumentStartToken):
|
||||
raise ParserError(None, None,
|
||||
"expected '<document start>', but found %r"
|
||||
% self.peek_token().id,
|
||||
self.peek_token().start_mark)
|
||||
token = self.get_token()
|
||||
end_mark = token.end_mark
|
||||
event = DocumentStartEvent(start_mark, end_mark,
|
||||
explicit=True, version=version, tags=tags)
|
||||
self.states.append(self.parse_document_end)
|
||||
self.state = self.parse_document_content
|
||||
else:
|
||||
# Parse the end of the stream.
|
||||
token = self.get_token()
|
||||
event = StreamEndEvent(token.start_mark, token.end_mark)
|
||||
assert not self.states
|
||||
assert not self.marks
|
||||
self.state = None
|
||||
return event
|
||||
|
||||
def parse_document_end(self):
|
||||
|
||||
# Parse the document end.
|
||||
token = self.peek_token()
|
||||
start_mark = end_mark = token.start_mark
|
||||
explicit = False
|
||||
if self.check_token(DocumentEndToken):
|
||||
token = self.get_token()
|
||||
end_mark = token.end_mark
|
||||
explicit = True
|
||||
event = DocumentEndEvent(start_mark, end_mark,
|
||||
explicit=explicit)
|
||||
|
||||
# Prepare the next state.
|
||||
self.state = self.parse_document_start
|
||||
|
||||
return event
|
||||
|
||||
def parse_document_content(self):
|
||||
if self.check_token(DirectiveToken,
|
||||
DocumentStartToken, DocumentEndToken, StreamEndToken):
|
||||
event = self.process_empty_scalar(self.peek_token().start_mark)
|
||||
self.state = self.states.pop()
|
||||
return event
|
||||
else:
|
||||
return self.parse_block_node()
|
||||
|
||||
def process_directives(self):
|
||||
self.yaml_version = None
|
||||
self.tag_handles = {}
|
||||
while self.check_token(DirectiveToken):
|
||||
token = self.get_token()
|
||||
if token.name == u'YAML':
|
||||
if self.yaml_version is not None:
|
||||
raise ParserError(None, None,
|
||||
"found duplicate YAML directive", token.start_mark)
|
||||
major, minor = token.value
|
||||
if major != 1:
|
||||
raise ParserError(None, None,
|
||||
"found incompatible YAML document (version 1.* is required)",
|
||||
token.start_mark)
|
||||
self.yaml_version = token.value
|
||||
elif token.name == u'TAG':
|
||||
handle, prefix = token.value
|
||||
if handle in self.tag_handles:
|
||||
raise ParserError(None, None,
|
||||
"duplicate tag handle %r" % handle.encode('utf-8'),
|
||||
token.start_mark)
|
||||
self.tag_handles[handle] = prefix
|
||||
if self.tag_handles:
|
||||
value = self.yaml_version, self.tag_handles.copy()
|
||||
else:
|
||||
value = self.yaml_version, None
|
||||
for key in self.DEFAULT_TAGS:
|
||||
if key not in self.tag_handles:
|
||||
self.tag_handles[key] = self.DEFAULT_TAGS[key]
|
||||
return value
|
||||
|
||||
# block_node_or_indentless_sequence ::= ALIAS
|
||||
# | properties (block_content | indentless_block_sequence)?
|
||||
# | block_content
|
||||
# | indentless_block_sequence
|
||||
# block_node ::= ALIAS
|
||||
# | properties block_content?
|
||||
# | block_content
|
||||
# flow_node ::= ALIAS
|
||||
# | properties flow_content?
|
||||
# | flow_content
|
||||
# properties ::= TAG ANCHOR? | ANCHOR TAG?
|
||||
# block_content ::= block_collection | flow_collection | SCALAR
|
||||
# flow_content ::= flow_collection | SCALAR
|
||||
# block_collection ::= block_sequence | block_mapping
|
||||
# flow_collection ::= flow_sequence | flow_mapping
|
||||
|
||||
def parse_block_node(self):
|
||||
return self.parse_node(block=True)
|
||||
|
||||
def parse_flow_node(self):
|
||||
return self.parse_node()
|
||||
|
||||
def parse_block_node_or_indentless_sequence(self):
|
||||
return self.parse_node(block=True, indentless_sequence=True)
|
||||
|
||||
def parse_node(self, block=False, indentless_sequence=False):
|
||||
if self.check_token(AliasToken):
|
||||
token = self.get_token()
|
||||
event = AliasEvent(token.value, token.start_mark, token.end_mark)
|
||||
self.state = self.states.pop()
|
||||
else:
|
||||
anchor = None
|
||||
tag = None
|
||||
start_mark = end_mark = tag_mark = None
|
||||
if self.check_token(AnchorToken):
|
||||
token = self.get_token()
|
||||
start_mark = token.start_mark
|
||||
end_mark = token.end_mark
|
||||
anchor = token.value
|
||||
if self.check_token(TagToken):
|
||||
token = self.get_token()
|
||||
tag_mark = token.start_mark
|
||||
end_mark = token.end_mark
|
||||
tag = token.value
|
||||
elif self.check_token(TagToken):
|
||||
token = self.get_token()
|
||||
start_mark = tag_mark = token.start_mark
|
||||
end_mark = token.end_mark
|
||||
tag = token.value
|
||||
if self.check_token(AnchorToken):
|
||||
token = self.get_token()
|
||||
end_mark = token.end_mark
|
||||
anchor = token.value
|
||||
if tag is not None:
|
||||
handle, suffix = tag
|
||||
if handle is not None:
|
||||
if handle not in self.tag_handles:
|
||||
raise ParserError("while parsing a node", start_mark,
|
||||
"found undefined tag handle %r" % handle.encode('utf-8'),
|
||||
tag_mark)
|
||||
tag = self.tag_handles[handle]+suffix
|
||||
else:
|
||||
tag = suffix
|
||||
#if tag == u'!':
|
||||
# raise ParserError("while parsing a node", start_mark,
|
||||
# "found non-specific tag '!'", tag_mark,
|
||||
# "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
|
||||
if start_mark is None:
|
||||
start_mark = end_mark = self.peek_token().start_mark
|
||||
event = None
|
||||
implicit = (tag is None or tag == u'!')
|
||||
if indentless_sequence and self.check_token(BlockEntryToken):
|
||||
end_mark = self.peek_token().end_mark
|
||||
event = SequenceStartEvent(anchor, tag, implicit,
|
||||
start_mark, end_mark)
|
||||
self.state = self.parse_indentless_sequence_entry
|
||||
else:
|
||||
if self.check_token(ScalarToken):
|
||||
token = self.get_token()
|
||||
end_mark = token.end_mark
|
||||
if (token.plain and tag is None) or tag == u'!':
|
||||
implicit = (True, False)
|
||||
elif tag is None:
|
||||
implicit = (False, True)
|
||||
else:
|
||||
implicit = (False, False)
|
||||
event = ScalarEvent(anchor, tag, implicit, token.value,
|
||||
start_mark, end_mark, style=token.style)
|
||||
self.state = self.states.pop()
|
||||
elif self.check_token(FlowSequenceStartToken):
|
||||
end_mark = self.peek_token().end_mark
|
||||
event = SequenceStartEvent(anchor, tag, implicit,
|
||||
start_mark, end_mark, flow_style=True)
|
||||
self.state = self.parse_flow_sequence_first_entry
|
||||
elif self.check_token(FlowMappingStartToken):
|
||||
end_mark = self.peek_token().end_mark
|
||||
event = MappingStartEvent(anchor, tag, implicit,
|
||||
start_mark, end_mark, flow_style=True)
|
||||
self.state = self.parse_flow_mapping_first_key
|
||||
elif block and self.check_token(BlockSequenceStartToken):
|
||||
end_mark = self.peek_token().start_mark
|
||||
event = SequenceStartEvent(anchor, tag, implicit,
|
||||
start_mark, end_mark, flow_style=False)
|
||||
self.state = self.parse_block_sequence_first_entry
|
||||
elif block and self.check_token(BlockMappingStartToken):
|
||||
end_mark = self.peek_token().start_mark
|
||||
event = MappingStartEvent(anchor, tag, implicit,
|
||||
start_mark, end_mark, flow_style=False)
|
||||
self.state = self.parse_block_mapping_first_key
|
||||
elif anchor is not None or tag is not None:
|
||||
# Empty scalars are allowed even if a tag or an anchor is
|
||||
# specified.
|
||||
event = ScalarEvent(anchor, tag, (implicit, False), u'',
|
||||
start_mark, end_mark)
|
||||
self.state = self.states.pop()
|
||||
else:
|
||||
if block:
|
||||
node = 'block'
|
||||
else:
|
||||
node = 'flow'
|
||||
token = self.peek_token()
|
||||
raise ParserError("while parsing a %s node" % node, start_mark,
|
||||
"expected the node content, but found %r" % token.id,
|
||||
token.start_mark)
|
||||
return event
|
||||
|
||||
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
|
||||
|
||||
def parse_block_sequence_first_entry(self):
|
||||
token = self.get_token()
|
||||
self.marks.append(token.start_mark)
|
||||
return self.parse_block_sequence_entry()
|
||||
|
||||
def parse_block_sequence_entry(self):
|
||||
if self.check_token(BlockEntryToken):
|
||||
token = self.get_token()
|
||||
if not self.check_token(BlockEntryToken, BlockEndToken):
|
||||
self.states.append(self.parse_block_sequence_entry)
|
||||
return self.parse_block_node()
|
||||
else:
|
||||
self.state = self.parse_block_sequence_entry
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
if not self.check_token(BlockEndToken):
|
||||
token = self.peek_token()
|
||||
raise ParserError("while parsing a block collection", self.marks[-1],
|
||||
"expected <block end>, but found %r" % token.id, token.start_mark)
|
||||
token = self.get_token()
|
||||
event = SequenceEndEvent(token.start_mark, token.end_mark)
|
||||
self.state = self.states.pop()
|
||||
self.marks.pop()
|
||||
return event
|
||||
|
||||
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
|
||||
|
||||
def parse_indentless_sequence_entry(self):
|
||||
if self.check_token(BlockEntryToken):
|
||||
token = self.get_token()
|
||||
if not self.check_token(BlockEntryToken,
|
||||
KeyToken, ValueToken, BlockEndToken):
|
||||
self.states.append(self.parse_indentless_sequence_entry)
|
||||
return self.parse_block_node()
|
||||
else:
|
||||
self.state = self.parse_indentless_sequence_entry
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
token = self.peek_token()
|
||||
event = SequenceEndEvent(token.start_mark, token.start_mark)
|
||||
self.state = self.states.pop()
|
||||
return event
|
||||
|
||||
# block_mapping ::= BLOCK-MAPPING_START
|
||||
# ((KEY block_node_or_indentless_sequence?)?
|
||||
# (VALUE block_node_or_indentless_sequence?)?)*
|
||||
# BLOCK-END
|
||||
|
||||
def parse_block_mapping_first_key(self):
|
||||
token = self.get_token()
|
||||
self.marks.append(token.start_mark)
|
||||
return self.parse_block_mapping_key()
|
||||
|
||||
def parse_block_mapping_key(self):
|
||||
if self.check_token(KeyToken):
|
||||
token = self.get_token()
|
||||
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
|
||||
self.states.append(self.parse_block_mapping_value)
|
||||
return self.parse_block_node_or_indentless_sequence()
|
||||
else:
|
||||
self.state = self.parse_block_mapping_value
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
if not self.check_token(BlockEndToken):
|
||||
token = self.peek_token()
|
||||
raise ParserError("while parsing a block mapping", self.marks[-1],
|
||||
"expected <block end>, but found %r" % token.id, token.start_mark)
|
||||
token = self.get_token()
|
||||
event = MappingEndEvent(token.start_mark, token.end_mark)
|
||||
self.state = self.states.pop()
|
||||
self.marks.pop()
|
||||
return event
|
||||
|
||||
def parse_block_mapping_value(self):
|
||||
if self.check_token(ValueToken):
|
||||
token = self.get_token()
|
||||
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
|
||||
self.states.append(self.parse_block_mapping_key)
|
||||
return self.parse_block_node_or_indentless_sequence()
|
||||
else:
|
||||
self.state = self.parse_block_mapping_key
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
else:
|
||||
self.state = self.parse_block_mapping_key
|
||||
token = self.peek_token()
|
||||
return self.process_empty_scalar(token.start_mark)
|
||||
|
||||
# flow_sequence ::= FLOW-SEQUENCE-START
|
||||
# (flow_sequence_entry FLOW-ENTRY)*
|
||||
# flow_sequence_entry?
|
||||
# FLOW-SEQUENCE-END
|
||||
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
#
|
||||
# Note that while production rules for both flow_sequence_entry and
|
||||
# flow_mapping_entry are equal, their interpretations are different.
|
||||
# For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
|
||||
# generate an inline mapping (set syntax).
|
||||
|
||||
def parse_flow_sequence_first_entry(self):
|
||||
token = self.get_token()
|
||||
self.marks.append(token.start_mark)
|
||||
return self.parse_flow_sequence_entry(first=True)
|
||||
|
||||
def parse_flow_sequence_entry(self, first=False):
|
||||
if not self.check_token(FlowSequenceEndToken):
|
||||
if not first:
|
||||
if self.check_token(FlowEntryToken):
|
||||
self.get_token()
|
||||
else:
|
||||
token = self.peek_token()
|
||||
raise ParserError("while parsing a flow sequence", self.marks[-1],
|
||||
"expected ',' or ']', but got %r" % token.id, token.start_mark)
|
||||
|
||||
if self.check_token(KeyToken):
|
||||
token = self.peek_token()
|
||||
event = MappingStartEvent(None, None, True,
|
||||
token.start_mark, token.end_mark,
|
||||
flow_style=True)
|
||||
self.state = self.parse_flow_sequence_entry_mapping_key
|
||||
return event
|
||||
elif not self.check_token(FlowSequenceEndToken):
|
||||
self.states.append(self.parse_flow_sequence_entry)
|
||||
return self.parse_flow_node()
|
||||
token = self.get_token()
|
||||
event = SequenceEndEvent(token.start_mark, token.end_mark)
|
||||
self.state = self.states.pop()
|
||||
self.marks.pop()
|
||||
return event
|
||||
|
||||
def parse_flow_sequence_entry_mapping_key(self):
|
||||
token = self.get_token()
|
||||
if not self.check_token(ValueToken,
|
||||
FlowEntryToken, FlowSequenceEndToken):
|
||||
self.states.append(self.parse_flow_sequence_entry_mapping_value)
|
||||
return self.parse_flow_node()
|
||||
else:
|
||||
self.state = self.parse_flow_sequence_entry_mapping_value
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
|
||||
def parse_flow_sequence_entry_mapping_value(self):
|
||||
if self.check_token(ValueToken):
|
||||
token = self.get_token()
|
||||
if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
|
||||
self.states.append(self.parse_flow_sequence_entry_mapping_end)
|
||||
return self.parse_flow_node()
|
||||
else:
|
||||
self.state = self.parse_flow_sequence_entry_mapping_end
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
else:
|
||||
self.state = self.parse_flow_sequence_entry_mapping_end
|
||||
token = self.peek_token()
|
||||
return self.process_empty_scalar(token.start_mark)
|
||||
|
||||
def parse_flow_sequence_entry_mapping_end(self):
|
||||
self.state = self.parse_flow_sequence_entry
|
||||
token = self.peek_token()
|
||||
return MappingEndEvent(token.start_mark, token.start_mark)
|
||||
|
||||
# flow_mapping ::= FLOW-MAPPING-START
|
||||
# (flow_mapping_entry FLOW-ENTRY)*
|
||||
# flow_mapping_entry?
|
||||
# FLOW-MAPPING-END
|
||||
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
|
||||
def parse_flow_mapping_first_key(self):
|
||||
token = self.get_token()
|
||||
self.marks.append(token.start_mark)
|
||||
return self.parse_flow_mapping_key(first=True)
|
||||
|
||||
def parse_flow_mapping_key(self, first=False):
|
||||
if not self.check_token(FlowMappingEndToken):
|
||||
if not first:
|
||||
if self.check_token(FlowEntryToken):
|
||||
self.get_token()
|
||||
else:
|
||||
token = self.peek_token()
|
||||
raise ParserError("while parsing a flow mapping", self.marks[-1],
|
||||
"expected ',' or '}', but got %r" % token.id, token.start_mark)
|
||||
if self.check_token(KeyToken):
|
||||
token = self.get_token()
|
||||
if not self.check_token(ValueToken,
|
||||
FlowEntryToken, FlowMappingEndToken):
|
||||
self.states.append(self.parse_flow_mapping_value)
|
||||
return self.parse_flow_node()
|
||||
else:
|
||||
self.state = self.parse_flow_mapping_value
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
elif not self.check_token(FlowMappingEndToken):
|
||||
self.states.append(self.parse_flow_mapping_empty_value)
|
||||
return self.parse_flow_node()
|
||||
token = self.get_token()
|
||||
event = MappingEndEvent(token.start_mark, token.end_mark)
|
||||
self.state = self.states.pop()
|
||||
self.marks.pop()
|
||||
return event
|
||||
|
||||
def parse_flow_mapping_value(self):
|
||||
if self.check_token(ValueToken):
|
||||
token = self.get_token()
|
||||
if not self.check_token(FlowEntryToken, FlowMappingEndToken):
|
||||
self.states.append(self.parse_flow_mapping_key)
|
||||
return self.parse_flow_node()
|
||||
else:
|
||||
self.state = self.parse_flow_mapping_key
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
else:
|
||||
self.state = self.parse_flow_mapping_key
|
||||
token = self.peek_token()
|
||||
return self.process_empty_scalar(token.start_mark)
|
||||
|
||||
def parse_flow_mapping_empty_value(self):
|
||||
self.state = self.parse_flow_mapping_key
|
||||
return self.process_empty_scalar(self.peek_token().start_mark)
|
||||
|
||||
def process_empty_scalar(self, mark):
|
||||
return ScalarEvent(None, None, (True, False), u'', mark, mark)
|
||||
|
188
libs/yaml/reader.py
Normal file
188
libs/yaml/reader.py
Normal file
|
@ -0,0 +1,188 @@
|
|||
# This module contains abstractions for the input stream. You don't have to
|
||||
# looks further, there are no pretty code.
|
||||
#
|
||||
# We define two classes here.
|
||||
#
|
||||
# Mark(source, line, column)
|
||||
# It's just a record and its only use is producing nice error messages.
|
||||
# Parser does not use it for any other purposes.
|
||||
#
|
||||
# Reader(source, data)
|
||||
# Reader determines the encoding of `data` and converts it to unicode.
|
||||
# Reader provides the following methods and attributes:
|
||||
# reader.peek(length=1) - return the next `length` characters
|
||||
# reader.forward(length=1) - move the current position to `length` characters.
|
||||
# reader.index - the number of the current character.
|
||||
# reader.line, stream.column - the line and the column of the current character.
|
||||
|
||||
__all__ = ['Reader', 'ReaderError']
|
||||
|
||||
from error import YAMLError, Mark
|
||||
|
||||
import codecs, re, sys
|
||||
|
||||
has_ucs4 = sys.maxunicode > 0xffff
|
||||
|
||||
class ReaderError(YAMLError):
|
||||
|
||||
def __init__(self, name, position, character, encoding, reason):
|
||||
self.name = name
|
||||
self.character = character
|
||||
self.position = position
|
||||
self.encoding = encoding
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
if isinstance(self.character, str):
|
||||
return "'%s' codec can't decode byte #x%02x: %s\n" \
|
||||
" in \"%s\", position %d" \
|
||||
% (self.encoding, ord(self.character), self.reason,
|
||||
self.name, self.position)
|
||||
else:
|
||||
return "unacceptable character #x%04x: %s\n" \
|
||||
" in \"%s\", position %d" \
|
||||
% (self.character, self.reason,
|
||||
self.name, self.position)
|
||||
|
||||
class Reader(object):
|
||||
# Reader:
|
||||
# - determines the data encoding and converts it to unicode,
|
||||
# - checks if characters are in allowed range,
|
||||
# - adds '\0' to the end.
|
||||
|
||||
# Reader accepts
|
||||
# - a `str` object,
|
||||
# - a `unicode` object,
|
||||
# - a file-like object with its `read` method returning `str`,
|
||||
# - a file-like object with its `read` method returning `unicode`.
|
||||
|
||||
# Yeah, it's ugly and slow.
|
||||
|
||||
def __init__(self, stream):
|
||||
self.name = None
|
||||
self.stream = None
|
||||
self.stream_pointer = 0
|
||||
self.eof = True
|
||||
self.buffer = u''
|
||||
self.pointer = 0
|
||||
self.raw_buffer = None
|
||||
self.raw_decode = None
|
||||
self.encoding = None
|
||||
self.index = 0
|
||||
self.line = 0
|
||||
self.column = 0
|
||||
if isinstance(stream, unicode):
|
||||
self.name = "<unicode string>"
|
||||
self.check_printable(stream)
|
||||
self.buffer = stream+u'\0'
|
||||
elif isinstance(stream, str):
|
||||
self.name = "<string>"
|
||||
self.raw_buffer = stream
|
||||
self.determine_encoding()
|
||||
else:
|
||||
self.stream = stream
|
||||
self.name = getattr(stream, 'name', "<file>")
|
||||
self.eof = False
|
||||
self.raw_buffer = ''
|
||||
self.determine_encoding()
|
||||
|
||||
def peek(self, index=0):
|
||||
try:
|
||||
return self.buffer[self.pointer+index]
|
||||
except IndexError:
|
||||
self.update(index+1)
|
||||
return self.buffer[self.pointer+index]
|
||||
|
||||
def prefix(self, length=1):
|
||||
if self.pointer+length >= len(self.buffer):
|
||||
self.update(length)
|
||||
return self.buffer[self.pointer:self.pointer+length]
|
||||
|
||||
def forward(self, length=1):
|
||||
if self.pointer+length+1 >= len(self.buffer):
|
||||
self.update(length+1)
|
||||
while length:
|
||||
ch = self.buffer[self.pointer]
|
||||
self.pointer += 1
|
||||
self.index += 1
|
||||
if ch in u'\n\x85\u2028\u2029' \
|
||||
or (ch == u'\r' and self.buffer[self.pointer] != u'\n'):
|
||||
self.line += 1
|
||||
self.column = 0
|
||||
elif ch != u'\uFEFF':
|
||||
self.column += 1
|
||||
length -= 1
|
||||
|
||||
def get_mark(self):
|
||||
if self.stream is None:
|
||||
return Mark(self.name, self.index, self.line, self.column,
|
||||
self.buffer, self.pointer)
|
||||
else:
|
||||
return Mark(self.name, self.index, self.line, self.column,
|
||||
None, None)
|
||||
|
||||
def determine_encoding(self):
|
||||
while not self.eof and len(self.raw_buffer) < 2:
|
||||
self.update_raw()
|
||||
if not isinstance(self.raw_buffer, unicode):
|
||||
if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
|
||||
self.raw_decode = codecs.utf_16_le_decode
|
||||
self.encoding = 'utf-16-le'
|
||||
elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
|
||||
self.raw_decode = codecs.utf_16_be_decode
|
||||
self.encoding = 'utf-16-be'
|
||||
else:
|
||||
self.raw_decode = codecs.utf_8_decode
|
||||
self.encoding = 'utf-8'
|
||||
self.update(1)
|
||||
|
||||
if has_ucs4:
|
||||
NON_PRINTABLE = re.compile(u'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD\U00010000-\U0010ffff]')
|
||||
else:
|
||||
NON_PRINTABLE = re.compile(u'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]')
|
||||
def check_printable(self, data):
|
||||
match = self.NON_PRINTABLE.search(data)
|
||||
if match:
|
||||
character = match.group()
|
||||
position = self.index+(len(self.buffer)-self.pointer)+match.start()
|
||||
raise ReaderError(self.name, position, ord(character),
|
||||
'unicode', "special characters are not allowed")
|
||||
|
||||
def update(self, length):
|
||||
if self.raw_buffer is None:
|
||||
return
|
||||
self.buffer = self.buffer[self.pointer:]
|
||||
self.pointer = 0
|
||||
while len(self.buffer) < length:
|
||||
if not self.eof:
|
||||
self.update_raw()
|
||||
if self.raw_decode is not None:
|
||||
try:
|
||||
data, converted = self.raw_decode(self.raw_buffer,
|
||||
'strict', self.eof)
|
||||
except UnicodeDecodeError, exc:
|
||||
character = exc.object[exc.start]
|
||||
if self.stream is not None:
|
||||
position = self.stream_pointer-len(self.raw_buffer)+exc.start
|
||||
else:
|
||||
position = exc.start
|
||||
raise ReaderError(self.name, position, character,
|
||||
exc.encoding, exc.reason)
|
||||
else:
|
||||
data = self.raw_buffer
|
||||
converted = len(data)
|
||||
self.check_printable(data)
|
||||
self.buffer += data
|
||||
self.raw_buffer = self.raw_buffer[converted:]
|
||||
if self.eof:
|
||||
self.buffer += u'\0'
|
||||
self.raw_buffer = None
|
||||
break
|
||||
|
||||
def update_raw(self, size=1024):
|
||||
data = self.stream.read(size)
|
||||
if data:
|
||||
self.raw_buffer += data
|
||||
self.stream_pointer += len(data)
|
||||
else:
|
||||
self.eof = True
|
488
libs/yaml/representer.py
Normal file
488
libs/yaml/representer.py
Normal file
|
@ -0,0 +1,488 @@
|
|||
|
||||
__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer',
|
||||
'RepresenterError']
|
||||
|
||||
from error import *
|
||||
from nodes import *
|
||||
|
||||
import datetime
|
||||
|
||||
import sys, copy_reg, types
|
||||
|
||||
class RepresenterError(YAMLError):
|
||||
pass
|
||||
|
||||
class BaseRepresenter(object):
|
||||
|
||||
yaml_representers = {}
|
||||
yaml_multi_representers = {}
|
||||
|
||||
def __init__(self, default_style=None, default_flow_style=False, sort_keys=True):
|
||||
self.default_style = default_style
|
||||
self.default_flow_style = default_flow_style
|
||||
self.sort_keys = sort_keys
|
||||
self.represented_objects = {}
|
||||
self.object_keeper = []
|
||||
self.alias_key = None
|
||||
|
||||
def represent(self, data):
|
||||
node = self.represent_data(data)
|
||||
self.serialize(node)
|
||||
self.represented_objects = {}
|
||||
self.object_keeper = []
|
||||
self.alias_key = None
|
||||
|
||||
def get_classobj_bases(self, cls):
|
||||
bases = [cls]
|
||||
for base in cls.__bases__:
|
||||
bases.extend(self.get_classobj_bases(base))
|
||||
return bases
|
||||
|
||||
def represent_data(self, data):
|
||||
if self.ignore_aliases(data):
|
||||
self.alias_key = None
|
||||
else:
|
||||
self.alias_key = id(data)
|
||||
if self.alias_key is not None:
|
||||
if self.alias_key in self.represented_objects:
|
||||
node = self.represented_objects[self.alias_key]
|
||||
#if node is None:
|
||||
# raise RepresenterError("recursive objects are not allowed: %r" % data)
|
||||
return node
|
||||
#self.represented_objects[alias_key] = None
|
||||
self.object_keeper.append(data)
|
||||
data_types = type(data).__mro__
|
||||
if type(data) is types.InstanceType:
|
||||
data_types = self.get_classobj_bases(data.__class__)+list(data_types)
|
||||
if data_types[0] in self.yaml_representers:
|
||||
node = self.yaml_representers[data_types[0]](self, data)
|
||||
else:
|
||||
for data_type in data_types:
|
||||
if data_type in self.yaml_multi_representers:
|
||||
node = self.yaml_multi_representers[data_type](self, data)
|
||||
break
|
||||
else:
|
||||
if None in self.yaml_multi_representers:
|
||||
node = self.yaml_multi_representers[None](self, data)
|
||||
elif None in self.yaml_representers:
|
||||
node = self.yaml_representers[None](self, data)
|
||||
else:
|
||||
node = ScalarNode(None, unicode(data))
|
||||
#if alias_key is not None:
|
||||
# self.represented_objects[alias_key] = node
|
||||
return node
|
||||
|
||||
def add_representer(cls, data_type, representer):
|
||||
if not 'yaml_representers' in cls.__dict__:
|
||||
cls.yaml_representers = cls.yaml_representers.copy()
|
||||
cls.yaml_representers[data_type] = representer
|
||||
add_representer = classmethod(add_representer)
|
||||
|
||||
def add_multi_representer(cls, data_type, representer):
|
||||
if not 'yaml_multi_representers' in cls.__dict__:
|
||||
cls.yaml_multi_representers = cls.yaml_multi_representers.copy()
|
||||
cls.yaml_multi_representers[data_type] = representer
|
||||
add_multi_representer = classmethod(add_multi_representer)
|
||||
|
||||
def represent_scalar(self, tag, value, style=None):
|
||||
if style is None:
|
||||
style = self.default_style
|
||||
node = ScalarNode(tag, value, style=style)
|
||||
if self.alias_key is not None:
|
||||
self.represented_objects[self.alias_key] = node
|
||||
return node
|
||||
|
||||
def represent_sequence(self, tag, sequence, flow_style=None):
|
||||
value = []
|
||||
node = SequenceNode(tag, value, flow_style=flow_style)
|
||||
if self.alias_key is not None:
|
||||
self.represented_objects[self.alias_key] = node
|
||||
best_style = True
|
||||
for item in sequence:
|
||||
node_item = self.represent_data(item)
|
||||
if not (isinstance(node_item, ScalarNode) and not node_item.style):
|
||||
best_style = False
|
||||
value.append(node_item)
|
||||
if flow_style is None:
|
||||
if self.default_flow_style is not None:
|
||||
node.flow_style = self.default_flow_style
|
||||
else:
|
||||
node.flow_style = best_style
|
||||
return node
|
||||
|
||||
def represent_mapping(self, tag, mapping, flow_style=None):
|
||||
value = []
|
||||
node = MappingNode(tag, value, flow_style=flow_style)
|
||||
if self.alias_key is not None:
|
||||
self.represented_objects[self.alias_key] = node
|
||||
best_style = True
|
||||
if hasattr(mapping, 'items'):
|
||||
mapping = mapping.items()
|
||||
if self.sort_keys:
|
||||
mapping.sort()
|
||||
for item_key, item_value in mapping:
|
||||
node_key = self.represent_data(item_key)
|
||||
node_value = self.represent_data(item_value)
|
||||
if not (isinstance(node_key, ScalarNode) and not node_key.style):
|
||||
best_style = False
|
||||
if not (isinstance(node_value, ScalarNode) and not node_value.style):
|
||||
best_style = False
|
||||
value.append((node_key, node_value))
|
||||
if flow_style is None:
|
||||
if self.default_flow_style is not None:
|
||||
node.flow_style = self.default_flow_style
|
||||
else:
|
||||
node.flow_style = best_style
|
||||
return node
|
||||
|
||||
def ignore_aliases(self, data):
|
||||
return False
|
||||
|
||||
class SafeRepresenter(BaseRepresenter):
|
||||
|
||||
def ignore_aliases(self, data):
|
||||
if data is None:
|
||||
return True
|
||||
if isinstance(data, tuple) and data == ():
|
||||
return True
|
||||
if isinstance(data, (str, unicode, bool, int, float)):
|
||||
return True
|
||||
|
||||
def represent_none(self, data):
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:null',
|
||||
u'null')
|
||||
|
||||
def represent_str(self, data):
|
||||
tag = None
|
||||
style = None
|
||||
try:
|
||||
data = unicode(data, 'ascii')
|
||||
tag = u'tag:yaml.org,2002:str'
|
||||
except UnicodeDecodeError:
|
||||
try:
|
||||
data = unicode(data, 'utf-8')
|
||||
tag = u'tag:yaml.org,2002:str'
|
||||
except UnicodeDecodeError:
|
||||
data = data.encode('base64')
|
||||
tag = u'tag:yaml.org,2002:binary'
|
||||
style = '|'
|
||||
return self.represent_scalar(tag, data, style=style)
|
||||
|
||||
def represent_unicode(self, data):
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:str', data)
|
||||
|
||||
def represent_bool(self, data):
|
||||
if data:
|
||||
value = u'true'
|
||||
else:
|
||||
value = u'false'
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:bool', value)
|
||||
|
||||
def represent_int(self, data):
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data))
|
||||
|
||||
def represent_long(self, data):
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data))
|
||||
|
||||
inf_value = 1e300
|
||||
while repr(inf_value) != repr(inf_value*inf_value):
|
||||
inf_value *= inf_value
|
||||
|
||||
def represent_float(self, data):
|
||||
if data != data or (data == 0.0 and data == 1.0):
|
||||
value = u'.nan'
|
||||
elif data == self.inf_value:
|
||||
value = u'.inf'
|
||||
elif data == -self.inf_value:
|
||||
value = u'-.inf'
|
||||
else:
|
||||
value = unicode(repr(data)).lower()
|
||||
# Note that in some cases `repr(data)` represents a float number
|
||||
# without the decimal parts. For instance:
|
||||
# >>> repr(1e17)
|
||||
# '1e17'
|
||||
# Unfortunately, this is not a valid float representation according
|
||||
# to the definition of the `!!float` tag. We fix this by adding
|
||||
# '.0' before the 'e' symbol.
|
||||
if u'.' not in value and u'e' in value:
|
||||
value = value.replace(u'e', u'.0e', 1)
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:float', value)
|
||||
|
||||
def represent_list(self, data):
|
||||
#pairs = (len(data) > 0 and isinstance(data, list))
|
||||
#if pairs:
|
||||
# for item in data:
|
||||
# if not isinstance(item, tuple) or len(item) != 2:
|
||||
# pairs = False
|
||||
# break
|
||||
#if not pairs:
|
||||
return self.represent_sequence(u'tag:yaml.org,2002:seq', data)
|
||||
#value = []
|
||||
#for item_key, item_value in data:
|
||||
# value.append(self.represent_mapping(u'tag:yaml.org,2002:map',
|
||||
# [(item_key, item_value)]))
|
||||
#return SequenceNode(u'tag:yaml.org,2002:pairs', value)
|
||||
|
||||
def represent_dict(self, data):
|
||||
return self.represent_mapping(u'tag:yaml.org,2002:map', data)
|
||||
|
||||
def represent_set(self, data):
|
||||
value = {}
|
||||
for key in data:
|
||||
value[key] = None
|
||||
return self.represent_mapping(u'tag:yaml.org,2002:set', value)
|
||||
|
||||
def represent_date(self, data):
|
||||
value = unicode(data.isoformat())
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value)
|
||||
|
||||
def represent_datetime(self, data):
|
||||
value = unicode(data.isoformat(' '))
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value)
|
||||
|
||||
def represent_yaml_object(self, tag, data, cls, flow_style=None):
|
||||
if hasattr(data, '__getstate__'):
|
||||
state = data.__getstate__()
|
||||
else:
|
||||
state = data.__dict__.copy()
|
||||
return self.represent_mapping(tag, state, flow_style=flow_style)
|
||||
|
||||
def represent_undefined(self, data):
|
||||
raise RepresenterError("cannot represent an object", data)
|
||||
|
||||
SafeRepresenter.add_representer(type(None),
|
||||
SafeRepresenter.represent_none)
|
||||
|
||||
SafeRepresenter.add_representer(str,
|
||||
SafeRepresenter.represent_str)
|
||||
|
||||
SafeRepresenter.add_representer(unicode,
|
||||
SafeRepresenter.represent_unicode)
|
||||
|
||||
SafeRepresenter.add_representer(bool,
|
||||
SafeRepresenter.represent_bool)
|
||||
|
||||
SafeRepresenter.add_representer(int,
|
||||
SafeRepresenter.represent_int)
|
||||
|
||||
SafeRepresenter.add_representer(long,
|
||||
SafeRepresenter.represent_long)
|
||||
|
||||
SafeRepresenter.add_representer(float,
|
||||
SafeRepresenter.represent_float)
|
||||
|
||||
SafeRepresenter.add_representer(list,
|
||||
SafeRepresenter.represent_list)
|
||||
|
||||
SafeRepresenter.add_representer(tuple,
|
||||
SafeRepresenter.represent_list)
|
||||
|
||||
SafeRepresenter.add_representer(dict,
|
||||
SafeRepresenter.represent_dict)
|
||||
|
||||
SafeRepresenter.add_representer(set,
|
||||
SafeRepresenter.represent_set)
|
||||
|
||||
SafeRepresenter.add_representer(datetime.date,
|
||||
SafeRepresenter.represent_date)
|
||||
|
||||
SafeRepresenter.add_representer(datetime.datetime,
|
||||
SafeRepresenter.represent_datetime)
|
||||
|
||||
SafeRepresenter.add_representer(None,
|
||||
SafeRepresenter.represent_undefined)
|
||||
|
||||
class Representer(SafeRepresenter):
|
||||
|
||||
def represent_str(self, data):
|
||||
tag = None
|
||||
style = None
|
||||
try:
|
||||
data = unicode(data, 'ascii')
|
||||
tag = u'tag:yaml.org,2002:str'
|
||||
except UnicodeDecodeError:
|
||||
try:
|
||||
data = unicode(data, 'utf-8')
|
||||
tag = u'tag:yaml.org,2002:python/str'
|
||||
except UnicodeDecodeError:
|
||||
data = data.encode('base64')
|
||||
tag = u'tag:yaml.org,2002:binary'
|
||||
style = '|'
|
||||
return self.represent_scalar(tag, data, style=style)
|
||||
|
||||
def represent_unicode(self, data):
|
||||
tag = None
|
||||
try:
|
||||
data.encode('ascii')
|
||||
tag = u'tag:yaml.org,2002:python/unicode'
|
||||
except UnicodeEncodeError:
|
||||
tag = u'tag:yaml.org,2002:str'
|
||||
return self.represent_scalar(tag, data)
|
||||
|
||||
def represent_long(self, data):
|
||||
tag = u'tag:yaml.org,2002:int'
|
||||
if int(data) is not data:
|
||||
tag = u'tag:yaml.org,2002:python/long'
|
||||
return self.represent_scalar(tag, unicode(data))
|
||||
|
||||
def represent_complex(self, data):
|
||||
if data.imag == 0.0:
|
||||
data = u'%r' % data.real
|
||||
elif data.real == 0.0:
|
||||
data = u'%rj' % data.imag
|
||||
elif data.imag > 0:
|
||||
data = u'%r+%rj' % (data.real, data.imag)
|
||||
else:
|
||||
data = u'%r%rj' % (data.real, data.imag)
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:python/complex', data)
|
||||
|
||||
def represent_tuple(self, data):
|
||||
return self.represent_sequence(u'tag:yaml.org,2002:python/tuple', data)
|
||||
|
||||
def represent_name(self, data):
|
||||
name = u'%s.%s' % (data.__module__, data.__name__)
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:python/name:'+name, u'')
|
||||
|
||||
def represent_module(self, data):
|
||||
return self.represent_scalar(
|
||||
u'tag:yaml.org,2002:python/module:'+data.__name__, u'')
|
||||
|
||||
def represent_instance(self, data):
|
||||
# For instances of classic classes, we use __getinitargs__ and
|
||||
# __getstate__ to serialize the data.
|
||||
|
||||
# If data.__getinitargs__ exists, the object must be reconstructed by
|
||||
# calling cls(**args), where args is a tuple returned by
|
||||
# __getinitargs__. Otherwise, the cls.__init__ method should never be
|
||||
# called and the class instance is created by instantiating a trivial
|
||||
# class and assigning to the instance's __class__ variable.
|
||||
|
||||
# If data.__getstate__ exists, it returns the state of the object.
|
||||
# Otherwise, the state of the object is data.__dict__.
|
||||
|
||||
# We produce either a !!python/object or !!python/object/new node.
|
||||
# If data.__getinitargs__ does not exist and state is a dictionary, we
|
||||
# produce a !!python/object node . Otherwise we produce a
|
||||
# !!python/object/new node.
|
||||
|
||||
cls = data.__class__
|
||||
class_name = u'%s.%s' % (cls.__module__, cls.__name__)
|
||||
args = None
|
||||
state = None
|
||||
if hasattr(data, '__getinitargs__'):
|
||||
args = list(data.__getinitargs__())
|
||||
if hasattr(data, '__getstate__'):
|
||||
state = data.__getstate__()
|
||||
else:
|
||||
state = data.__dict__
|
||||
if args is None and isinstance(state, dict):
|
||||
return self.represent_mapping(
|
||||
u'tag:yaml.org,2002:python/object:'+class_name, state)
|
||||
if isinstance(state, dict) and not state:
|
||||
return self.represent_sequence(
|
||||
u'tag:yaml.org,2002:python/object/new:'+class_name, args)
|
||||
value = {}
|
||||
if args:
|
||||
value['args'] = args
|
||||
value['state'] = state
|
||||
return self.represent_mapping(
|
||||
u'tag:yaml.org,2002:python/object/new:'+class_name, value)
|
||||
|
||||
def represent_object(self, data):
|
||||
# We use __reduce__ API to save the data. data.__reduce__ returns
|
||||
# a tuple of length 2-5:
|
||||
# (function, args, state, listitems, dictitems)
|
||||
|
||||
# For reconstructing, we calls function(*args), then set its state,
|
||||
# listitems, and dictitems if they are not None.
|
||||
|
||||
# A special case is when function.__name__ == '__newobj__'. In this
|
||||
# case we create the object with args[0].__new__(*args).
|
||||
|
||||
# Another special case is when __reduce__ returns a string - we don't
|
||||
# support it.
|
||||
|
||||
# We produce a !!python/object, !!python/object/new or
|
||||
# !!python/object/apply node.
|
||||
|
||||
cls = type(data)
|
||||
if cls in copy_reg.dispatch_table:
|
||||
reduce = copy_reg.dispatch_table[cls](data)
|
||||
elif hasattr(data, '__reduce_ex__'):
|
||||
reduce = data.__reduce_ex__(2)
|
||||
elif hasattr(data, '__reduce__'):
|
||||
reduce = data.__reduce__()
|
||||
else:
|
||||
raise RepresenterError("cannot represent an object", data)
|
||||
reduce = (list(reduce)+[None]*5)[:5]
|
||||
function, args, state, listitems, dictitems = reduce
|
||||
args = list(args)
|
||||
if state is None:
|
||||
state = {}
|
||||
if listitems is not None:
|
||||
listitems = list(listitems)
|
||||
if dictitems is not None:
|
||||
dictitems = dict(dictitems)
|
||||
if function.__name__ == '__newobj__':
|
||||
function = args[0]
|
||||
args = args[1:]
|
||||
tag = u'tag:yaml.org,2002:python/object/new:'
|
||||
newobj = True
|
||||
else:
|
||||
tag = u'tag:yaml.org,2002:python/object/apply:'
|
||||
newobj = False
|
||||
function_name = u'%s.%s' % (function.__module__, function.__name__)
|
||||
if not args and not listitems and not dictitems \
|
||||
and isinstance(state, dict) and newobj:
|
||||
return self.represent_mapping(
|
||||
u'tag:yaml.org,2002:python/object:'+function_name, state)
|
||||
if not listitems and not dictitems \
|
||||
and isinstance(state, dict) and not state:
|
||||
return self.represent_sequence(tag+function_name, args)
|
||||
value = {}
|
||||
if args:
|
||||
value['args'] = args
|
||||
if state or not isinstance(state, dict):
|
||||
value['state'] = state
|
||||
if listitems:
|
||||
value['listitems'] = listitems
|
||||
if dictitems:
|
||||
value['dictitems'] = dictitems
|
||||
return self.represent_mapping(tag+function_name, value)
|
||||
|
||||
Representer.add_representer(str,
|
||||
Representer.represent_str)
|
||||
|
||||
Representer.add_representer(unicode,
|
||||
Representer.represent_unicode)
|
||||
|
||||
Representer.add_representer(long,
|
||||
Representer.represent_long)
|
||||
|
||||
Representer.add_representer(complex,
|
||||
Representer.represent_complex)
|
||||
|
||||
Representer.add_representer(tuple,
|
||||
Representer.represent_tuple)
|
||||
|
||||
Representer.add_representer(type,
|
||||
Representer.represent_name)
|
||||
|
||||
Representer.add_representer(types.ClassType,
|
||||
Representer.represent_name)
|
||||
|
||||
Representer.add_representer(types.FunctionType,
|
||||
Representer.represent_name)
|
||||
|
||||
Representer.add_representer(types.BuiltinFunctionType,
|
||||
Representer.represent_name)
|
||||
|
||||
Representer.add_representer(types.ModuleType,
|
||||
Representer.represent_module)
|
||||
|
||||
Representer.add_multi_representer(types.InstanceType,
|
||||
Representer.represent_instance)
|
||||
|
||||
Representer.add_multi_representer(object,
|
||||
Representer.represent_object)
|
||||
|
227
libs/yaml/resolver.py
Normal file
227
libs/yaml/resolver.py
Normal file
|
@ -0,0 +1,227 @@
|
|||
|
||||
__all__ = ['BaseResolver', 'Resolver']
|
||||
|
||||
from error import *
|
||||
from nodes import *
|
||||
|
||||
import re
|
||||
|
||||
class ResolverError(YAMLError):
|
||||
pass
|
||||
|
||||
class BaseResolver(object):
|
||||
|
||||
DEFAULT_SCALAR_TAG = u'tag:yaml.org,2002:str'
|
||||
DEFAULT_SEQUENCE_TAG = u'tag:yaml.org,2002:seq'
|
||||
DEFAULT_MAPPING_TAG = u'tag:yaml.org,2002:map'
|
||||
|
||||
yaml_implicit_resolvers = {}
|
||||
yaml_path_resolvers = {}
|
||||
|
||||
def __init__(self):
|
||||
self.resolver_exact_paths = []
|
||||
self.resolver_prefix_paths = []
|
||||
|
||||
def add_implicit_resolver(cls, tag, regexp, first):
|
||||
if not 'yaml_implicit_resolvers' in cls.__dict__:
|
||||
implicit_resolvers = {}
|
||||
for key in cls.yaml_implicit_resolvers:
|
||||
implicit_resolvers[key] = cls.yaml_implicit_resolvers[key][:]
|
||||
cls.yaml_implicit_resolvers = implicit_resolvers
|
||||
if first is None:
|
||||
first = [None]
|
||||
for ch in first:
|
||||
cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
|
||||
add_implicit_resolver = classmethod(add_implicit_resolver)
|
||||
|
||||
def add_path_resolver(cls, tag, path, kind=None):
|
||||
# Note: `add_path_resolver` is experimental. The API could be changed.
|
||||
# `new_path` is a pattern that is matched against the path from the
|
||||
# root to the node that is being considered. `node_path` elements are
|
||||
# tuples `(node_check, index_check)`. `node_check` is a node class:
|
||||
# `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None`
|
||||
# matches any kind of a node. `index_check` could be `None`, a boolean
|
||||
# value, a string value, or a number. `None` and `False` match against
|
||||
# any _value_ of sequence and mapping nodes. `True` matches against
|
||||
# any _key_ of a mapping node. A string `index_check` matches against
|
||||
# a mapping value that corresponds to a scalar key which content is
|
||||
# equal to the `index_check` value. An integer `index_check` matches
|
||||
# against a sequence value with the index equal to `index_check`.
|
||||
if not 'yaml_path_resolvers' in cls.__dict__:
|
||||
cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
|
||||
new_path = []
|
||||
for element in path:
|
||||
if isinstance(element, (list, tuple)):
|
||||
if len(element) == 2:
|
||||
node_check, index_check = element
|
||||
elif len(element) == 1:
|
||||
node_check = element[0]
|
||||
index_check = True
|
||||
else:
|
||||
raise ResolverError("Invalid path element: %s" % element)
|
||||
else:
|
||||
node_check = None
|
||||
index_check = element
|
||||
if node_check is str:
|
||||
node_check = ScalarNode
|
||||
elif node_check is list:
|
||||
node_check = SequenceNode
|
||||
elif node_check is dict:
|
||||
node_check = MappingNode
|
||||
elif node_check not in [ScalarNode, SequenceNode, MappingNode] \
|
||||
and not isinstance(node_check, basestring) \
|
||||
and node_check is not None:
|
||||
raise ResolverError("Invalid node checker: %s" % node_check)
|
||||
if not isinstance(index_check, (basestring, int)) \
|
||||
and index_check is not None:
|
||||
raise ResolverError("Invalid index checker: %s" % index_check)
|
||||
new_path.append((node_check, index_check))
|
||||
if kind is str:
|
||||
kind = ScalarNode
|
||||
elif kind is list:
|
||||
kind = SequenceNode
|
||||
elif kind is dict:
|
||||
kind = MappingNode
|
||||
elif kind not in [ScalarNode, SequenceNode, MappingNode] \
|
||||
and kind is not None:
|
||||
raise ResolverError("Invalid node kind: %s" % kind)
|
||||
cls.yaml_path_resolvers[tuple(new_path), kind] = tag
|
||||
add_path_resolver = classmethod(add_path_resolver)
|
||||
|
||||
def descend_resolver(self, current_node, current_index):
|
||||
if not self.yaml_path_resolvers:
|
||||
return
|
||||
exact_paths = {}
|
||||
prefix_paths = []
|
||||
if current_node:
|
||||
depth = len(self.resolver_prefix_paths)
|
||||
for path, kind in self.resolver_prefix_paths[-1]:
|
||||
if self.check_resolver_prefix(depth, path, kind,
|
||||
current_node, current_index):
|
||||
if len(path) > depth:
|
||||
prefix_paths.append((path, kind))
|
||||
else:
|
||||
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
|
||||
else:
|
||||
for path, kind in self.yaml_path_resolvers:
|
||||
if not path:
|
||||
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
|
||||
else:
|
||||
prefix_paths.append((path, kind))
|
||||
self.resolver_exact_paths.append(exact_paths)
|
||||
self.resolver_prefix_paths.append(prefix_paths)
|
||||
|
||||
def ascend_resolver(self):
|
||||
if not self.yaml_path_resolvers:
|
||||
return
|
||||
self.resolver_exact_paths.pop()
|
||||
self.resolver_prefix_paths.pop()
|
||||
|
||||
def check_resolver_prefix(self, depth, path, kind,
|
||||
current_node, current_index):
|
||||
node_check, index_check = path[depth-1]
|
||||
if isinstance(node_check, basestring):
|
||||
if current_node.tag != node_check:
|
||||
return
|
||||
elif node_check is not None:
|
||||
if not isinstance(current_node, node_check):
|
||||
return
|
||||
if index_check is True and current_index is not None:
|
||||
return
|
||||
if (index_check is False or index_check is None) \
|
||||
and current_index is None:
|
||||
return
|
||||
if isinstance(index_check, basestring):
|
||||
if not (isinstance(current_index, ScalarNode)
|
||||
and index_check == current_index.value):
|
||||
return
|
||||
elif isinstance(index_check, int) and not isinstance(index_check, bool):
|
||||
if index_check != current_index:
|
||||
return
|
||||
return True
|
||||
|
||||
def resolve(self, kind, value, implicit):
|
||||
if kind is ScalarNode and implicit[0]:
|
||||
if value == u'':
|
||||
resolvers = self.yaml_implicit_resolvers.get(u'', [])
|
||||
else:
|
||||
resolvers = self.yaml_implicit_resolvers.get(value[0], [])
|
||||
resolvers += self.yaml_implicit_resolvers.get(None, [])
|
||||
for tag, regexp in resolvers:
|
||||
if regexp.match(value):
|
||||
return tag
|
||||
implicit = implicit[1]
|
||||
if self.yaml_path_resolvers:
|
||||
exact_paths = self.resolver_exact_paths[-1]
|
||||
if kind in exact_paths:
|
||||
return exact_paths[kind]
|
||||
if None in exact_paths:
|
||||
return exact_paths[None]
|
||||
if kind is ScalarNode:
|
||||
return self.DEFAULT_SCALAR_TAG
|
||||
elif kind is SequenceNode:
|
||||
return self.DEFAULT_SEQUENCE_TAG
|
||||
elif kind is MappingNode:
|
||||
return self.DEFAULT_MAPPING_TAG
|
||||
|
||||
class Resolver(BaseResolver):
|
||||
pass
|
||||
|
||||
Resolver.add_implicit_resolver(
|
||||
u'tag:yaml.org,2002:bool',
|
||||
re.compile(ur'''^(?:yes|Yes|YES|no|No|NO
|
||||
|true|True|TRUE|false|False|FALSE
|
||||
|on|On|ON|off|Off|OFF)$''', re.X),
|
||||
list(u'yYnNtTfFoO'))
|
||||
|
||||
Resolver.add_implicit_resolver(
|
||||
u'tag:yaml.org,2002:float',
|
||||
re.compile(ur'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)?
|
||||
|\.[0-9_]+(?:[eE][-+][0-9]+)?
|
||||
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*
|
||||
|[-+]?\.(?:inf|Inf|INF)
|
||||
|\.(?:nan|NaN|NAN))$''', re.X),
|
||||
list(u'-+0123456789.'))
|
||||
|
||||
Resolver.add_implicit_resolver(
|
||||
u'tag:yaml.org,2002:int',
|
||||
re.compile(ur'''^(?:[-+]?0b[0-1_]+
|
||||
|[-+]?0[0-7_]+
|
||||
|[-+]?(?:0|[1-9][0-9_]*)
|
||||
|[-+]?0x[0-9a-fA-F_]+
|
||||
|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X),
|
||||
list(u'-+0123456789'))
|
||||
|
||||
Resolver.add_implicit_resolver(
|
||||
u'tag:yaml.org,2002:merge',
|
||||
re.compile(ur'^(?:<<)$'),
|
||||
[u'<'])
|
||||
|
||||
Resolver.add_implicit_resolver(
|
||||
u'tag:yaml.org,2002:null',
|
||||
re.compile(ur'''^(?: ~
|
||||
|null|Null|NULL
|
||||
| )$''', re.X),
|
||||
[u'~', u'n', u'N', u''])
|
||||
|
||||
Resolver.add_implicit_resolver(
|
||||
u'tag:yaml.org,2002:timestamp',
|
||||
re.compile(ur'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
|
||||
|[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
|
||||
(?:[Tt]|[ \t]+)[0-9][0-9]?
|
||||
:[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)?
|
||||
(?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
|
||||
list(u'0123456789'))
|
||||
|
||||
Resolver.add_implicit_resolver(
|
||||
u'tag:yaml.org,2002:value',
|
||||
re.compile(ur'^(?:=)$'),
|
||||
[u'='])
|
||||
|
||||
# The following resolver is only for documentation purposes. It cannot work
|
||||
# because plain scalars cannot start with '!', '&', or '*'.
|
||||
Resolver.add_implicit_resolver(
|
||||
u'tag:yaml.org,2002:yaml',
|
||||
re.compile(ur'^(?:!|&|\*)$'),
|
||||
list(u'!&*'))
|
||||
|
1444
libs/yaml/scanner.py
Normal file
1444
libs/yaml/scanner.py
Normal file
File diff suppressed because it is too large
Load diff
111
libs/yaml/serializer.py
Normal file
111
libs/yaml/serializer.py
Normal file
|
@ -0,0 +1,111 @@
|
|||
|
||||
__all__ = ['Serializer', 'SerializerError']
|
||||
|
||||
from error import YAMLError
|
||||
from events import *
|
||||
from nodes import *
|
||||
|
||||
class SerializerError(YAMLError):
|
||||
pass
|
||||
|
||||
class Serializer(object):
|
||||
|
||||
ANCHOR_TEMPLATE = u'id%03d'
|
||||
|
||||
def __init__(self, encoding=None,
|
||||
explicit_start=None, explicit_end=None, version=None, tags=None):
|
||||
self.use_encoding = encoding
|
||||
self.use_explicit_start = explicit_start
|
||||
self.use_explicit_end = explicit_end
|
||||
self.use_version = version
|
||||
self.use_tags = tags
|
||||
self.serialized_nodes = {}
|
||||
self.anchors = {}
|
||||
self.last_anchor_id = 0
|
||||
self.closed = None
|
||||
|
||||
def open(self):
|
||||
if self.closed is None:
|
||||
self.emit(StreamStartEvent(encoding=self.use_encoding))
|
||||
self.closed = False
|
||||
elif self.closed:
|
||||
raise SerializerError("serializer is closed")
|
||||
else:
|
||||
raise SerializerError("serializer is already opened")
|
||||
|
||||
def close(self):
|
||||
if self.closed is None:
|
||||
raise SerializerError("serializer is not opened")
|
||||
elif not self.closed:
|
||||
self.emit(StreamEndEvent())
|
||||
self.closed = True
|
||||
|
||||
#def __del__(self):
|
||||
# self.close()
|
||||
|
||||
def serialize(self, node):
|
||||
if self.closed is None:
|
||||
raise SerializerError("serializer is not opened")
|
||||
elif self.closed:
|
||||
raise SerializerError("serializer is closed")
|
||||
self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
|
||||
version=self.use_version, tags=self.use_tags))
|
||||
self.anchor_node(node)
|
||||
self.serialize_node(node, None, None)
|
||||
self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
|
||||
self.serialized_nodes = {}
|
||||
self.anchors = {}
|
||||
self.last_anchor_id = 0
|
||||
|
||||
def anchor_node(self, node):
|
||||
if node in self.anchors:
|
||||
if self.anchors[node] is None:
|
||||
self.anchors[node] = self.generate_anchor(node)
|
||||
else:
|
||||
self.anchors[node] = None
|
||||
if isinstance(node, SequenceNode):
|
||||
for item in node.value:
|
||||
self.anchor_node(item)
|
||||
elif isinstance(node, MappingNode):
|
||||
for key, value in node.value:
|
||||
self.anchor_node(key)
|
||||
self.anchor_node(value)
|
||||
|
||||
def generate_anchor(self, node):
|
||||
self.last_anchor_id += 1
|
||||
return self.ANCHOR_TEMPLATE % self.last_anchor_id
|
||||
|
||||
def serialize_node(self, node, parent, index):
|
||||
alias = self.anchors[node]
|
||||
if node in self.serialized_nodes:
|
||||
self.emit(AliasEvent(alias))
|
||||
else:
|
||||
self.serialized_nodes[node] = True
|
||||
self.descend_resolver(parent, index)
|
||||
if isinstance(node, ScalarNode):
|
||||
detected_tag = self.resolve(ScalarNode, node.value, (True, False))
|
||||
default_tag = self.resolve(ScalarNode, node.value, (False, True))
|
||||
implicit = (node.tag == detected_tag), (node.tag == default_tag)
|
||||
self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
|
||||
style=node.style))
|
||||
elif isinstance(node, SequenceNode):
|
||||
implicit = (node.tag
|
||||
== self.resolve(SequenceNode, node.value, True))
|
||||
self.emit(SequenceStartEvent(alias, node.tag, implicit,
|
||||
flow_style=node.flow_style))
|
||||
index = 0
|
||||
for item in node.value:
|
||||
self.serialize_node(item, node, index)
|
||||
index += 1
|
||||
self.emit(SequenceEndEvent())
|
||||
elif isinstance(node, MappingNode):
|
||||
implicit = (node.tag
|
||||
== self.resolve(MappingNode, node.value, True))
|
||||
self.emit(MappingStartEvent(alias, node.tag, implicit,
|
||||
flow_style=node.flow_style))
|
||||
for key, value in node.value:
|
||||
self.serialize_node(key, node, None)
|
||||
self.serialize_node(value, node, key)
|
||||
self.emit(MappingEndEvent())
|
||||
self.ascend_resolver()
|
||||
|
104
libs/yaml/tokens.py
Normal file
104
libs/yaml/tokens.py
Normal file
|
@ -0,0 +1,104 @@
|
|||
|
||||
class Token(object):
|
||||
def __init__(self, start_mark, end_mark):
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
def __repr__(self):
|
||||
attributes = [key for key in self.__dict__
|
||||
if not key.endswith('_mark')]
|
||||
attributes.sort()
|
||||
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
|
||||
for key in attributes])
|
||||
return '%s(%s)' % (self.__class__.__name__, arguments)
|
||||
|
||||
#class BOMToken(Token):
|
||||
# id = '<byte order mark>'
|
||||
|
||||
class DirectiveToken(Token):
|
||||
id = '<directive>'
|
||||
def __init__(self, name, value, start_mark, end_mark):
|
||||
self.name = name
|
||||
self.value = value
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
|
||||
class DocumentStartToken(Token):
|
||||
id = '<document start>'
|
||||
|
||||
class DocumentEndToken(Token):
|
||||
id = '<document end>'
|
||||
|
||||
class StreamStartToken(Token):
|
||||
id = '<stream start>'
|
||||
def __init__(self, start_mark=None, end_mark=None,
|
||||
encoding=None):
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
self.encoding = encoding
|
||||
|
||||
class StreamEndToken(Token):
|
||||
id = '<stream end>'
|
||||
|
||||
class BlockSequenceStartToken(Token):
|
||||
id = '<block sequence start>'
|
||||
|
||||
class BlockMappingStartToken(Token):
|
||||
id = '<block mapping start>'
|
||||
|
||||
class BlockEndToken(Token):
|
||||
id = '<block end>'
|
||||
|
||||
class FlowSequenceStartToken(Token):
|
||||
id = '['
|
||||
|
||||
class FlowMappingStartToken(Token):
|
||||
id = '{'
|
||||
|
||||
class FlowSequenceEndToken(Token):
|
||||
id = ']'
|
||||
|
||||
class FlowMappingEndToken(Token):
|
||||
id = '}'
|
||||
|
||||
class KeyToken(Token):
|
||||
id = '?'
|
||||
|
||||
class ValueToken(Token):
|
||||
id = ':'
|
||||
|
||||
class BlockEntryToken(Token):
|
||||
id = '-'
|
||||
|
||||
class FlowEntryToken(Token):
|
||||
id = ','
|
||||
|
||||
class AliasToken(Token):
|
||||
id = '<alias>'
|
||||
def __init__(self, value, start_mark, end_mark):
|
||||
self.value = value
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
|
||||
class AnchorToken(Token):
|
||||
id = '<anchor>'
|
||||
def __init__(self, value, start_mark, end_mark):
|
||||
self.value = value
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
|
||||
class TagToken(Token):
|
||||
id = '<tag>'
|
||||
def __init__(self, value, start_mark, end_mark):
|
||||
self.value = value
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
|
||||
class ScalarToken(Token):
|
||||
id = '<scalar>'
|
||||
def __init__(self, value, plain, start_mark, end_mark, style=None):
|
||||
self.value = value
|
||||
self.plain = plain
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
self.style = style
|
||||
|
|
@ -140,9 +140,11 @@
|
|||
if ($('#settings_proxy_type').val() === "None") {
|
||||
$('.proxy_option').hide();
|
||||
$('#settings_form').form('remove rule', 'settings_proxy_url', 'empty');
|
||||
$('#settings_form').form('remove rule', 'settings_proxy_port', 'empty');
|
||||
$('#settings_form').form('remove rule', 'settings_proxy_port', 'integer[1..65535]');
|
||||
} else {
|
||||
$('#settings_form').form('add rule', 'settings_proxy_url', {rules: [{type : 'empty', prompt : '"General / Proxy settings / Hostname" must have a value'}]});
|
||||
$('#settings_form').form('add rule', 'settings_proxy_port', {rules: [{type : 'empty', prompt : '"General / Proxy settings / Port" must have a value'}]});
|
||||
$('#settings_form').form('add rule', 'settings_proxy_port', {rules: [{type : 'integer[1..65535]', prompt : '"General / Proxy settings / Port" must be an integer between 1 and 65535'}]});
|
||||
}
|
||||
|
||||
|
@ -151,12 +153,14 @@
|
|||
if ($('#settings_proxy_type').val() === "None") {
|
||||
$('.proxy_option').hide();
|
||||
$('#settings_form').form('remove rule', 'settings_proxy_url', 'empty');
|
||||
$('#settings_form').form('remove rule', 'settings_proxy_port', 'empty');
|
||||
$('#settings_form').form('remove rule', 'settings_proxy_port', 'integer[1..65535]');
|
||||
$('.form').form('validate form');
|
||||
$('#loader').removeClass('active');
|
||||
} else {
|
||||
$('.proxy_option').show();
|
||||
$('#settings_form').form('add rule', 'settings_proxy_url', {rules: [{type : 'empty', prompt : '"General / Proxy settings / Hostname" must have a value'}]});
|
||||
$('#settings_form').form('add rule', 'settings_proxy_port', {rules: [{type : 'empty', prompt : '"General / Proxy settings / Port" must have a value'}]});
|
||||
$('#settings_form').form('add rule', 'settings_proxy_port', {rules: [{type : 'integer[1..65535]', prompt : '"General / Proxy settings / Port" must be an integer between 1 and 65535'}]});
|
||||
$('.form').form('validate form');
|
||||
$('#loader').removeClass('active');
|
||||
|
|
|
@ -236,6 +236,25 @@
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="middle aligned row">
|
||||
<div class="right aligned four wide column">
|
||||
<label>Encode subtitles to UTF8</label>
|
||||
</div>
|
||||
<div class="one wide column">
|
||||
<div id="settings_utf8_encode" class="ui toggle checkbox" data-utf8encode={{ settings.general.getboolean('utf8_encode') }}>
|
||||
<input name="settings_general_utf8_encode" type="checkbox">
|
||||
<label></label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="collapsed column">
|
||||
<div class="collapsed center aligned column">
|
||||
<div class="ui basic icon" data-tooltip="Re-encode downloaded subtitles to UTF8. Should be left enabled in most case." data-inverted="">
|
||||
<i class="help circle large icon"></i>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
@ -569,6 +588,12 @@
|
|||
$("#settings_multithreading").checkbox('uncheck');
|
||||
}
|
||||
|
||||
if ($('#settings_utf8_encode').data("utf8encode") === "True") {
|
||||
$("#settings_utf8_encode").checkbox('check');
|
||||
} else {
|
||||
$("#settings_utf8_encode").checkbox('uncheck');
|
||||
}
|
||||
|
||||
if (($('#settings_subfolder').val() !== "relative") && ($('#settings_subfolder').val() !== "absolute")) {
|
||||
$('.subfolder').hide();
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue