Merge remote-tracking branch 'remotes/origin/development'
|
@ -6,11 +6,8 @@ ENV PYTHONIOENCODING="UTF-8"
|
|||
VOLUME /tv
|
||||
VOLUME /movies
|
||||
|
||||
RUN apk add --update git py-pip jpeg-dev && \
|
||||
apk add --update --virtual build-dependencies build-base python-dev libffi-dev zlib-dev && \
|
||||
git clone https://github.com/morpheus65535/bazarr.git /bazarr && \
|
||||
pip install -r /bazarr/requirements.txt && \
|
||||
apk del --purge build-dependencies
|
||||
RUN apk add --update git py-pip && \
|
||||
git clone https://github.com/morpheus65535/bazarr.git /bazarr
|
||||
|
||||
VOLUME /bazarr/data
|
||||
|
||||
|
|
|
@ -1 +1,17 @@
|
|||
Dockerfile
|
||||
FROM lsiobase/alpine.python:3.7
|
||||
|
||||
# set python to use utf-8 rather than ascii.
|
||||
ENV PYTHONIOENCODING="UTF-8"
|
||||
|
||||
VOLUME /tv
|
||||
VOLUME /movies
|
||||
|
||||
RUN apk add --update git py-pip && \
|
||||
git clone https://github.com/morpheus65535/bazarr.git /bazarr
|
||||
|
||||
VOLUME /bazarr/data
|
||||
|
||||
EXPOSE 6767
|
||||
|
||||
COPY root/ /
|
||||
CMD ["python", "/bazarr/bazarr.py"]
|
||||
|
|
|
@ -8,11 +8,8 @@ ADD qemu-aarch64-static /usr/bin
|
|||
VOLUME /tv
|
||||
VOLUME /movies
|
||||
|
||||
RUN apk add --update git py-pip jpeg-dev && \
|
||||
apk add --update --virtual build-dependencies build-base python-dev libffi-dev zlib-dev && \
|
||||
git clone https://github.com/morpheus65535/bazarr.git /bazarr && \
|
||||
pip install -r /bazarr/requirements.txt && \
|
||||
apk del --purge build-dependencies
|
||||
RUN apk add --update git py-pip && \
|
||||
git clone https://github.com/morpheus65535/bazarr.git /bazarr
|
||||
|
||||
VOLUME /bazarr/data
|
||||
|
||||
|
|
|
@ -8,11 +8,8 @@ ADD qemu-arm-static /usr/bin
|
|||
VOLUME /tv
|
||||
VOLUME /movies
|
||||
|
||||
RUN apk add --update git py-pip jpeg-dev && \
|
||||
apk add --update --virtual build-dependencies build-base python-dev libffi-dev zlib-dev && \
|
||||
git clone https://github.com/morpheus65535/bazarr.git /bazarr && \
|
||||
pip install -r /bazarr/requirements.txt && \
|
||||
apk del --purge build-dependencies
|
||||
RUN apk add --update git py-pip && \
|
||||
git clone https://github.com/morpheus65535/bazarr.git /bazarr
|
||||
|
||||
VOLUME /bazarr/data
|
||||
|
||||
|
|
|
@ -60,7 +60,7 @@ def get_general_settings():
|
|||
if cfg.has_option('general', 'minimum_score'):
|
||||
minimum_score = cfg.get('general', 'minimum_score')
|
||||
else:
|
||||
minimum_score = '100'
|
||||
minimum_score = '90'
|
||||
|
||||
if cfg.has_option('general', 'use_scenename'):
|
||||
use_scenename = cfg.getboolean('general', 'use_scenename')
|
||||
|
@ -130,7 +130,7 @@ def get_general_settings():
|
|||
if cfg.has_option('general', 'minimum_score_movie'):
|
||||
minimum_score_movie = cfg.get('general', 'minimum_score_movie')
|
||||
else:
|
||||
minimum_score_movie = '100'
|
||||
minimum_score_movie = '70'
|
||||
|
||||
if cfg.has_option('general', 'use_embedded_subs'):
|
||||
use_embedded_subs = cfg.getboolean('general', 'use_embedded_subs')
|
||||
|
@ -156,7 +156,7 @@ def get_general_settings():
|
|||
branch = 'master'
|
||||
auto_update = True
|
||||
single_language = False
|
||||
minimum_score = '100'
|
||||
minimum_score = '90'
|
||||
use_scenename = False
|
||||
use_postprocessing = False
|
||||
postprocessing_cmd = False
|
||||
|
@ -170,7 +170,7 @@ def get_general_settings():
|
|||
movie_default_language = []
|
||||
movie_default_hi = False
|
||||
page_size = '25'
|
||||
minimum_score_movie = '100'
|
||||
minimum_score_movie = '70'
|
||||
use_embedded_subs = False
|
||||
only_monitored = False
|
||||
adaptive_searching = False
|
||||
|
@ -187,10 +187,10 @@ def get_auth_settings():
|
|||
pass
|
||||
|
||||
if cfg.has_section('auth'):
|
||||
if cfg.has_option('auth', 'enabled'):
|
||||
enabled = cfg.getboolean('auth', 'enabled')
|
||||
if cfg.has_option('auth', 'type'):
|
||||
type = cfg.get('auth', 'type')
|
||||
else:
|
||||
enabled = False
|
||||
type = None
|
||||
|
||||
if cfg.has_option('auth', 'username'):
|
||||
username = cfg.get('auth', 'username')
|
||||
|
@ -202,11 +202,60 @@ def get_auth_settings():
|
|||
else:
|
||||
password = ''
|
||||
else:
|
||||
enabled = False
|
||||
type = None
|
||||
username = ''
|
||||
password = ''
|
||||
|
||||
return [enabled, username, password]
|
||||
return [type, username, password]
|
||||
|
||||
|
||||
def get_proxy_settings():
|
||||
cfg = ConfigParser()
|
||||
try:
|
||||
with open(config_file, 'r') as f:
|
||||
cfg.read_file(f)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if cfg.has_section('proxy'):
|
||||
if cfg.has_option('proxy', 'type'):
|
||||
proxy_type = cfg.get('proxy', 'type')
|
||||
else:
|
||||
proxy_type = 'None'
|
||||
|
||||
if cfg.has_option('proxy', 'url'):
|
||||
url = cfg.get('proxy', 'url')
|
||||
else:
|
||||
url = ''
|
||||
|
||||
if cfg.has_option('proxy', 'port'):
|
||||
port = cfg.get('proxy', 'port')
|
||||
else:
|
||||
port = ''
|
||||
|
||||
if cfg.has_option('proxy', 'username'):
|
||||
username = cfg.get('proxy', 'username')
|
||||
else:
|
||||
username = ''
|
||||
|
||||
if cfg.has_option('proxy', 'password'):
|
||||
password = cfg.get('proxy', 'password')
|
||||
else:
|
||||
password = ''
|
||||
|
||||
if cfg.has_option('proxy', 'exclude'):
|
||||
exclude = cfg.get('proxy', 'exclude')
|
||||
else:
|
||||
exclude = 'localhost,127.0.0.1'
|
||||
else:
|
||||
proxy_type = 'None'
|
||||
url = ''
|
||||
port = ''
|
||||
username = ''
|
||||
password = ''
|
||||
exclude = 'localhost,127.0.0.1'
|
||||
|
||||
return [proxy_type, url, port, username, password, exclude]
|
||||
|
||||
|
||||
def get_sonarr_settings():
|
||||
|
|
|
@ -42,7 +42,7 @@ def download_subtitle(path, language, hi, providers, providers_auth, sceneName,
|
|||
lang_obj = Language(language)
|
||||
|
||||
try:
|
||||
if sceneName == "None" or use_scenename is False:
|
||||
if sceneName is None or use_scenename is False:
|
||||
used_sceneName = False
|
||||
video = scan_video(path)
|
||||
else:
|
||||
|
@ -189,7 +189,7 @@ def manual_download_subtitle(path, language, hi, subtitle, provider, providers_a
|
|||
lang_obj = Language(language)
|
||||
|
||||
try:
|
||||
if sceneName == "None" or use_scenename is False:
|
||||
if sceneName is None or use_scenename is False:
|
||||
used_sceneName = False
|
||||
video = scan_video(path)
|
||||
else:
|
||||
|
|
47
init.py
|
@ -27,7 +27,7 @@ if os.path.exists(os.path.join(config_dir, 'log')) is False:
|
|||
|
||||
config_file = os.path.normpath(os.path.join(config_dir, 'config/config.ini'))
|
||||
|
||||
# if os.path.exists(os.path.join(config_dir, 'db/bazarr.db')) is True and os.path.exists(config_file) is False:
|
||||
cfg = ConfigParser()
|
||||
try:
|
||||
# Open database connection
|
||||
db = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'data/db/bazarr.db'), timeout=30)
|
||||
|
@ -51,8 +51,6 @@ try:
|
|||
# Close database connection
|
||||
db.close()
|
||||
|
||||
cfg = ConfigParser()
|
||||
|
||||
section = 'general'
|
||||
|
||||
if not cfg.has_section(section):
|
||||
|
@ -158,13 +156,25 @@ except sqlite3.OperationalError:
|
|||
cfg.set(section, 'use_embedded_subs', "False")
|
||||
cfg.set(section, 'only_monitored', "False")
|
||||
cfg.set(section, 'adaptive_searching', "False")
|
||||
|
||||
section = 'proxy'
|
||||
|
||||
if not cfg.has_section(section):
|
||||
cfg.add_section(section)
|
||||
|
||||
cfg.set(section, 'type', "None")
|
||||
cfg.set(section, 'url', "")
|
||||
cfg.set(section, 'port', "")
|
||||
cfg.set(section, 'username', "")
|
||||
cfg.set(section, 'password', "")
|
||||
cfg.set(section, 'exclude', "localhost,127.0.0.1")
|
||||
|
||||
section = 'auth'
|
||||
|
||||
if not cfg.has_section(section):
|
||||
cfg.add_section(section)
|
||||
|
||||
cfg.set(section, 'enabled', "False")
|
||||
cfg.set(section, 'type', "None")
|
||||
cfg.set(section, 'username', "")
|
||||
cfg.set(section, 'password', "")
|
||||
|
||||
|
@ -218,3 +228,32 @@ try:
|
|||
logging.info('Database created successfully')
|
||||
except:
|
||||
pass
|
||||
|
||||
# Remove unused settings
|
||||
try:
|
||||
with open(config_file, 'r') as f:
|
||||
cfg.read_file(f)
|
||||
except Exception:
|
||||
pass
|
||||
cfg.remove_option('auth', 'enabled')
|
||||
with open(config_file, 'w+') as configfile:
|
||||
cfg.write(configfile)
|
||||
|
||||
from cork import Cork
|
||||
import time
|
||||
if os.path.exists(os.path.normpath(os.path.join(config_dir, 'config/users.json'))) is False:
|
||||
cork = Cork(os.path.normpath(os.path.join(config_dir, 'config')), initialize=True)
|
||||
|
||||
cork._store.roles[''] = 100
|
||||
cork._store.save_roles()
|
||||
|
||||
tstamp = str(time.time())
|
||||
username = password = ''
|
||||
cork._store.users[username] = {
|
||||
'role': '',
|
||||
'hash': cork._hash(username, password),
|
||||
'email_addr': username,
|
||||
'desc': username,
|
||||
'creation_date': tstamp
|
||||
}
|
||||
cork._store.save_users()
|
||||
|
|
284
libs/apprise/Apprise.py
Normal file
|
@ -0,0 +1,284 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Apprise Core
|
||||
#
|
||||
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with apprise. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
import logging
|
||||
from markdown import markdown
|
||||
|
||||
from .common import NotifyType
|
||||
from .common import NotifyFormat
|
||||
from .utils import parse_list
|
||||
from .utils import compat_is_basestring
|
||||
|
||||
from .AppriseAsset import AppriseAsset
|
||||
|
||||
from . import NotifyBase
|
||||
from . import plugins
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Build a list of supported plugins
|
||||
SCHEMA_MAP = {}
|
||||
|
||||
# Used for attempting to acquire the schema if the URL can't be parsed.
|
||||
GET_SCHEMA_RE = re.compile(r'\s*(?P<schema>[a-z0-9]{3,9})://.*$', re.I)
|
||||
|
||||
|
||||
# Load our Lookup Matrix
|
||||
def __load_matrix():
|
||||
"""
|
||||
Dynamically load our schema map; this allows us to gracefully
|
||||
skip over plugins we simply don't have the dependecies for.
|
||||
|
||||
"""
|
||||
# to add it's mapping to our hash table
|
||||
for entry in dir(plugins):
|
||||
|
||||
# Get our plugin
|
||||
plugin = getattr(plugins, entry)
|
||||
|
||||
# Load protocol(s) if defined
|
||||
proto = getattr(plugin, 'protocol', None)
|
||||
if compat_is_basestring(proto):
|
||||
if proto not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[proto] = plugin
|
||||
|
||||
elif isinstance(proto, (set, list, tuple)):
|
||||
# Support iterables list types
|
||||
for p in proto:
|
||||
if p not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[p] = plugin
|
||||
|
||||
# Load secure protocol(s) if defined
|
||||
protos = getattr(plugin, 'secure_protocol', None)
|
||||
if compat_is_basestring(protos):
|
||||
if protos not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[protos] = plugin
|
||||
|
||||
if isinstance(protos, (set, list, tuple)):
|
||||
# Support iterables list types
|
||||
for p in protos:
|
||||
if p not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[p] = plugin
|
||||
|
||||
|
||||
# Dynamically build our module
|
||||
__load_matrix()
|
||||
|
||||
|
||||
class Apprise(object):
|
||||
"""
|
||||
Our Notification Manager
|
||||
|
||||
"""
|
||||
def __init__(self, servers=None, asset=None):
|
||||
"""
|
||||
Loads a set of server urls while applying the Asset() module to each
|
||||
if specified.
|
||||
|
||||
If no asset is provided, then the default asset is used.
|
||||
|
||||
"""
|
||||
|
||||
# Initialize a server list of URLs
|
||||
self.servers = list()
|
||||
|
||||
# Assigns an central asset object that will be later passed into each
|
||||
# notification plugin. Assets contain information such as the local
|
||||
# directory images can be found in. It can also identify remote
|
||||
# URL paths that contain the images you want to present to the end
|
||||
# user. If no asset is specified, then the default one is used.
|
||||
self.asset = asset
|
||||
if asset is None:
|
||||
# Load our default configuration
|
||||
self.asset = AppriseAsset()
|
||||
|
||||
if servers:
|
||||
self.add(servers)
|
||||
|
||||
@staticmethod
|
||||
def instantiate(url, asset=None, suppress_exceptions=True):
|
||||
"""
|
||||
Returns the instance of a instantiated plugin based on the provided
|
||||
Server URL. If the url fails to be parsed, then None is returned.
|
||||
|
||||
"""
|
||||
# swap hash (#) tag values with their html version
|
||||
# This is useful for accepting channels (as arguments to pushbullet)
|
||||
_url = url.replace('/#', '/%23')
|
||||
|
||||
# Attempt to acquire the schema at the very least to allow our plugins
|
||||
# to determine if they can make a better interpretation of a URL
|
||||
# geared for them anyway.
|
||||
schema = GET_SCHEMA_RE.match(_url)
|
||||
if schema is None:
|
||||
logger.error('%s is an unparseable server url.' % url)
|
||||
return None
|
||||
|
||||
# Update the schema
|
||||
schema = schema.group('schema').lower()
|
||||
|
||||
# Some basic validation
|
||||
if schema not in SCHEMA_MAP:
|
||||
logger.error(
|
||||
'{0} is not a supported server type (url={1}).'.format(
|
||||
schema,
|
||||
_url,
|
||||
)
|
||||
)
|
||||
return None
|
||||
|
||||
# Parse our url details
|
||||
# the server object is a dictionary containing all of the information
|
||||
# parsed from our URL
|
||||
results = SCHEMA_MAP[schema].parse_url(_url)
|
||||
|
||||
if not results:
|
||||
# Failed to parse the server URL
|
||||
logger.error('Could not parse URL: %s' % url)
|
||||
return None
|
||||
|
||||
if suppress_exceptions:
|
||||
try:
|
||||
# Attempt to create an instance of our plugin using the parsed
|
||||
# URL information
|
||||
plugin = SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
except:
|
||||
# the arguments are invalid or can not be used.
|
||||
logger.error('Could not load URL: %s' % url)
|
||||
return None
|
||||
|
||||
else:
|
||||
# Attempt to create an instance of our plugin using the parsed
|
||||
# URL information but don't wrap it in a try catch
|
||||
plugin = SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
# Save our asset
|
||||
if asset:
|
||||
plugin.asset = asset
|
||||
|
||||
return plugin
|
||||
|
||||
def add(self, servers, asset=None):
|
||||
"""
|
||||
Adds one or more server URLs into our list.
|
||||
|
||||
"""
|
||||
|
||||
# Initialize our return status
|
||||
return_status = True
|
||||
|
||||
if asset is None:
|
||||
# prepare default asset
|
||||
asset = self.asset
|
||||
|
||||
if isinstance(servers, NotifyBase):
|
||||
# Go ahead and just add our plugin into our list
|
||||
self.servers.append(servers)
|
||||
return True
|
||||
|
||||
servers = parse_list(servers)
|
||||
for _server in servers:
|
||||
|
||||
# Instantiate ourselves an object, this function throws or
|
||||
# returns None if it fails
|
||||
instance = Apprise.instantiate(_server, asset=asset)
|
||||
if not instance:
|
||||
return_status = False
|
||||
logging.error(
|
||||
"Failed to load notification url: {}".format(_server),
|
||||
)
|
||||
continue
|
||||
|
||||
# Add our initialized plugin to our server listings
|
||||
self.servers.append(instance)
|
||||
|
||||
# Return our status
|
||||
return return_status
|
||||
|
||||
def clear(self):
|
||||
"""
|
||||
Empties our server list
|
||||
|
||||
"""
|
||||
self.servers[:] = []
|
||||
|
||||
def notify(self, title, body, notify_type=NotifyType.INFO,
|
||||
body_format=None):
|
||||
"""
|
||||
Send a notification to all of the plugins previously loaded.
|
||||
|
||||
If the body_format specified is NotifyFormat.MARKDOWN, it will
|
||||
be converted to HTML if the Notification type expects this.
|
||||
|
||||
"""
|
||||
|
||||
# Initialize our return result
|
||||
status = len(self.servers) > 0
|
||||
|
||||
if not (title or body):
|
||||
return False
|
||||
|
||||
# Tracks conversions
|
||||
conversion_map = dict()
|
||||
|
||||
# Iterate over our loaded plugins
|
||||
for server in self.servers:
|
||||
if server.notify_format not in conversion_map:
|
||||
if body_format == NotifyFormat.MARKDOWN and \
|
||||
server.notify_format == NotifyFormat.HTML:
|
||||
|
||||
# Apply Markdown
|
||||
conversion_map[server.notify_format] = markdown(body)
|
||||
|
||||
else:
|
||||
# Store entry directly
|
||||
conversion_map[server.notify_format] = body
|
||||
|
||||
try:
|
||||
# Send notification
|
||||
if not server.notify(
|
||||
title=title,
|
||||
body=conversion_map[server.notify_format],
|
||||
notify_type=notify_type):
|
||||
|
||||
# Toggle our return status flag
|
||||
status = False
|
||||
|
||||
except TypeError:
|
||||
# These our our internally thrown notifications
|
||||
# TODO: Change this to a custom one such as AppriseNotifyError
|
||||
status = False
|
||||
|
||||
except Exception:
|
||||
# A catch all so we don't have to abort early
|
||||
# just because one of our plugins has a bug in it.
|
||||
logging.exception("Notification Exception")
|
||||
status = False
|
||||
|
||||
return status
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Returns the number of servers loaded
|
||||
"""
|
||||
return len(self.servers)
|
242
libs/apprise/AppriseAsset.py
Normal file
|
@ -0,0 +1,242 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Apprise Asset
|
||||
#
|
||||
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
|
||||
from os.path import join
|
||||
from os.path import dirname
|
||||
from os.path import isfile
|
||||
from os.path import abspath
|
||||
from .common import NotifyType
|
||||
|
||||
|
||||
class AppriseAsset(object):
|
||||
"""
|
||||
Provides a supplimentary class that can be used to provide extra
|
||||
information and details that can be used by Apprise such as providing
|
||||
an alternate location to where images/icons can be found and the
|
||||
URL masks.
|
||||
|
||||
"""
|
||||
# Application Identifier
|
||||
app_id = 'Apprise'
|
||||
|
||||
# Application Description
|
||||
app_desc = 'Apprise Notifications'
|
||||
|
||||
# Provider URL
|
||||
app_url = 'https://github.com/caronc/apprise'
|
||||
|
||||
# A Simple Mapping of Colors; For every NOTIFY_TYPE identified,
|
||||
# there should be a mapping to it's color here:
|
||||
html_notify_map = {
|
||||
NotifyType.INFO: '#3AA3E3',
|
||||
NotifyType.SUCCESS: '#3AA337',
|
||||
NotifyType.FAILURE: '#A32037',
|
||||
NotifyType.WARNING: '#CACF29',
|
||||
}
|
||||
|
||||
# The default color to return if a mapping isn't found in our table above
|
||||
default_html_color = '#888888'
|
||||
|
||||
# The default image extension to use
|
||||
default_extension = '.png'
|
||||
|
||||
# The default theme
|
||||
theme = 'default'
|
||||
|
||||
# Image URL Mask
|
||||
image_url_mask = \
|
||||
'http://nuxref.com/apprise/themes/{THEME}/' \
|
||||
'apprise-{TYPE}-{XY}{EXTENSION}'
|
||||
|
||||
# Application Logo
|
||||
image_url_logo = \
|
||||
'http://nuxref.com/apprise/themes/{THEME}/apprise-logo.png'
|
||||
|
||||
# Image Path Mask
|
||||
image_path_mask = abspath(join(
|
||||
dirname(__file__),
|
||||
'assets',
|
||||
'themes',
|
||||
'{THEME}',
|
||||
'apprise-{TYPE}-{XY}{EXTENSION}',
|
||||
))
|
||||
|
||||
def __init__(self, theme='default', image_path_mask=None,
|
||||
image_url_mask=None, default_extension=None):
|
||||
"""
|
||||
Asset Initialization
|
||||
|
||||
"""
|
||||
if theme:
|
||||
self.theme = theme
|
||||
|
||||
if image_path_mask is not None:
|
||||
self.image_path_mask = image_path_mask
|
||||
|
||||
if image_url_mask is not None:
|
||||
self.image_url_mask = image_url_mask
|
||||
|
||||
if default_extension is not None:
|
||||
self.default_extension = default_extension
|
||||
|
||||
def color(self, notify_type, color_type=None):
|
||||
"""
|
||||
Returns an HTML mapped color based on passed in notify type
|
||||
|
||||
if color_type is:
|
||||
None then a standard hex string is returned as
|
||||
a string format ('#000000').
|
||||
|
||||
int then the integer representation is returned
|
||||
tuple then the the red, green, blue is returned in a tuple
|
||||
|
||||
"""
|
||||
|
||||
# Attempt to get the type, otherwise return a default grey
|
||||
# if we couldn't look up the entry
|
||||
color = self.html_notify_map.get(notify_type, self.default_html_color)
|
||||
if color_type is None:
|
||||
# This is the default return type
|
||||
return color
|
||||
|
||||
elif color_type is int:
|
||||
# Convert the color to integer
|
||||
return AppriseAsset.hex_to_int(color)
|
||||
|
||||
# The only other type is tuple
|
||||
elif color_type is tuple:
|
||||
return AppriseAsset.hex_to_rgb(color)
|
||||
|
||||
# Unsupported type
|
||||
raise ValueError(
|
||||
'AppriseAsset html_color(): An invalid color_type was specified.')
|
||||
|
||||
def image_url(self, notify_type, image_size, logo=False, extension=None):
|
||||
"""
|
||||
Apply our mask to our image URL
|
||||
|
||||
if logo is set to True, then the logo_url is used instead
|
||||
|
||||
"""
|
||||
|
||||
url_mask = self.image_url_logo if logo else self.image_url_mask
|
||||
if not url_mask:
|
||||
# No image to return
|
||||
return None
|
||||
|
||||
if extension is None:
|
||||
extension = self.default_extension
|
||||
|
||||
re_map = {
|
||||
'{THEME}': self.theme if self.theme else '',
|
||||
'{TYPE}': notify_type,
|
||||
'{XY}': image_size,
|
||||
'{EXTENSION}': extension,
|
||||
}
|
||||
|
||||
# Iterate over above list and store content accordingly
|
||||
re_table = re.compile(
|
||||
r'(' + '|'.join(re_map.keys()) + r')',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
return re_table.sub(lambda x: re_map[x.group()], url_mask)
|
||||
|
||||
def image_path(self, notify_type, image_size, must_exist=True,
|
||||
extension=None):
|
||||
"""
|
||||
Apply our mask to our image file path
|
||||
|
||||
"""
|
||||
|
||||
if not self.image_path_mask:
|
||||
# No image to return
|
||||
return None
|
||||
|
||||
if extension is None:
|
||||
extension = self.default_extension
|
||||
|
||||
re_map = {
|
||||
'{THEME}': self.theme if self.theme else '',
|
||||
'{TYPE}': notify_type,
|
||||
'{XY}': image_size,
|
||||
'{EXTENSION}': extension,
|
||||
}
|
||||
|
||||
# Iterate over above list and store content accordingly
|
||||
re_table = re.compile(
|
||||
r'(' + '|'.join(re_map.keys()) + r')',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
# Acquire our path
|
||||
path = re_table.sub(lambda x: re_map[x.group()], self.image_path_mask)
|
||||
if must_exist and not isfile(path):
|
||||
return None
|
||||
|
||||
# Return what we parsed
|
||||
return path
|
||||
|
||||
def image_raw(self, notify_type, image_size, extension=None):
|
||||
"""
|
||||
Returns the raw image if it can (otherwise the function returns None)
|
||||
|
||||
"""
|
||||
|
||||
path = self.image_path(
|
||||
notify_type=notify_type,
|
||||
image_size=image_size,
|
||||
extension=extension,
|
||||
)
|
||||
if path:
|
||||
try:
|
||||
with open(path, 'rb') as fd:
|
||||
return fd.read()
|
||||
|
||||
except (OSError, IOError):
|
||||
# We can't access the file
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def hex_to_rgb(value):
|
||||
"""
|
||||
Takes a hex string (such as #00ff00) and returns a tuple in the form
|
||||
of (red, green, blue)
|
||||
|
||||
eg: #00ff00 becomes : (0, 65535, 0)
|
||||
|
||||
"""
|
||||
value = value.lstrip('#')
|
||||
lv = len(value)
|
||||
return tuple(int(value[i:i + lv // 3], 16)
|
||||
for i in range(0, lv, lv // 3))
|
||||
|
||||
@staticmethod
|
||||
def hex_to_int(value):
|
||||
"""
|
||||
Takes a hex string (such as #00ff00) and returns its integer
|
||||
equivalent
|
||||
|
||||
eg: #00000f becomes : 15
|
||||
|
||||
"""
|
||||
return int(value.lstrip('#'), 16)
|
48
libs/apprise/__init__.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# base class for easier library inclusion
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
__title__ = 'apprise'
|
||||
__version__ = '0.5.0'
|
||||
__author__ = 'Chris Caron <lead2gold@gmail.com>'
|
||||
__license__ = 'GPLv3'
|
||||
__copywrite__ = 'Copyright 2017-2018 Chris Caron <lead2gold@gmail.com>'
|
||||
|
||||
from .common import NotifyType
|
||||
from .common import NOTIFY_TYPES
|
||||
from .common import NotifyImageSize
|
||||
from .common import NOTIFY_IMAGE_SIZES
|
||||
from .common import NotifyFormat
|
||||
from .common import NOTIFY_FORMATS
|
||||
from .plugins.NotifyBase import NotifyBase
|
||||
|
||||
from .Apprise import Apprise
|
||||
from .AppriseAsset import AppriseAsset
|
||||
|
||||
# Set default logging handler to avoid "No handler found" warnings.
|
||||
import logging
|
||||
from logging import NullHandler
|
||||
logging.getLogger(__name__).addHandler(NullHandler())
|
||||
|
||||
__all__ = [
|
||||
# Core
|
||||
'Apprise', 'AppriseAsset', 'NotifyBase',
|
||||
|
||||
# Reference
|
||||
'NotifyType', 'NotifyImageSize', 'NotifyFormat', 'NOTIFY_TYPES',
|
||||
'NOTIFY_IMAGE_SIZES', 'NOTIFY_FORMATS',
|
||||
]
|
22
libs/apprise/assets/NotifyXML-1.0.xsd
Normal file
|
@ -0,0 +1,22 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<xs:schema elementFormDefault="qualified" xmlns:xs="http://www.w3.org/2001/XMLSchema">
|
||||
<xs:element name="Notification">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element name="Version" type="xs:string" />
|
||||
<xs:element name="MessageType" type="xs:string" />
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="success" />
|
||||
<xs:enumeration value="failure" />
|
||||
<xs:enumeration value="info" />
|
||||
<xs:enumeration value="warning" />
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:element>
|
||||
<xs:element name="Subject" type="xs:string" />
|
||||
<xs:element name="Message" type="xs:string" />
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
</xs:schema>
|
BIN
libs/apprise/assets/themes/default/apprise-failure-128x128.ico
Normal file
After Width: | Height: | Size: 88 KiB |
BIN
libs/apprise/assets/themes/default/apprise-failure-128x128.png
Normal file
After Width: | Height: | Size: 16 KiB |
BIN
libs/apprise/assets/themes/default/apprise-failure-256x256.png
Normal file
After Width: | Height: | Size: 41 KiB |
BIN
libs/apprise/assets/themes/default/apprise-failure-32x32.png
Normal file
After Width: | Height: | Size: 2.4 KiB |
BIN
libs/apprise/assets/themes/default/apprise-failure-72x72.png
Normal file
After Width: | Height: | Size: 7.5 KiB |
BIN
libs/apprise/assets/themes/default/apprise-info-128x128.ico
Normal file
After Width: | Height: | Size: 88 KiB |
BIN
libs/apprise/assets/themes/default/apprise-info-128x128.png
Normal file
After Width: | Height: | Size: 16 KiB |
BIN
libs/apprise/assets/themes/default/apprise-info-256x256.png
Normal file
After Width: | Height: | Size: 42 KiB |
BIN
libs/apprise/assets/themes/default/apprise-info-32x32.png
Normal file
After Width: | Height: | Size: 2.5 KiB |
BIN
libs/apprise/assets/themes/default/apprise-info-72x72.png
Normal file
After Width: | Height: | Size: 7.7 KiB |
BIN
libs/apprise/assets/themes/default/apprise-logo.png
Normal file
After Width: | Height: | Size: 42 KiB |
BIN
libs/apprise/assets/themes/default/apprise-success-128x128.ico
Normal file
After Width: | Height: | Size: 88 KiB |
BIN
libs/apprise/assets/themes/default/apprise-success-128x128.png
Normal file
After Width: | Height: | Size: 17 KiB |
BIN
libs/apprise/assets/themes/default/apprise-success-256x256.png
Normal file
After Width: | Height: | Size: 47 KiB |
BIN
libs/apprise/assets/themes/default/apprise-success-32x32.png
Normal file
After Width: | Height: | Size: 2.5 KiB |
BIN
libs/apprise/assets/themes/default/apprise-success-72x72.png
Normal file
After Width: | Height: | Size: 7.8 KiB |
BIN
libs/apprise/assets/themes/default/apprise-warning-128x128.ico
Normal file
After Width: | Height: | Size: 88 KiB |
BIN
libs/apprise/assets/themes/default/apprise-warning-128x128.png
Normal file
After Width: | Height: | Size: 16 KiB |
BIN
libs/apprise/assets/themes/default/apprise-warning-256x256.png
Normal file
After Width: | Height: | Size: 42 KiB |
BIN
libs/apprise/assets/themes/default/apprise-warning-32x32.png
Normal file
After Width: | Height: | Size: 2.4 KiB |
BIN
libs/apprise/assets/themes/default/apprise-warning-72x72.png
Normal file
After Width: | Height: | Size: 7.7 KiB |
107
libs/apprise/cli.py
Normal file
|
@ -0,0 +1,107 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Apprise CLI Tool
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with apprise. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import click
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from . import NotifyType
|
||||
from . import Apprise
|
||||
from . import AppriseAsset
|
||||
|
||||
# Logging
|
||||
logger = logging.getLogger('apprise.plugins.NotifyBase')
|
||||
|
||||
# Defines our click context settings adding -h to the additional options that
|
||||
# can be specified to get the help menu to come up
|
||||
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
|
||||
|
||||
|
||||
def print_help_msg(command):
|
||||
"""
|
||||
Prints help message when -h or --help is specified.
|
||||
|
||||
"""
|
||||
with click.Context(command) as ctx:
|
||||
click.echo(command.get_help(ctx))
|
||||
|
||||
|
||||
@click.command(context_settings=CONTEXT_SETTINGS)
|
||||
@click.option('--title', '-t', default=None, type=str,
|
||||
help='Specify the message title.')
|
||||
@click.option('--body', '-b', default=None, type=str,
|
||||
help='Specify the message body.')
|
||||
@click.option('--notification-type', '-n', default=NotifyType.INFO, type=str,
|
||||
metavar='TYPE', help='Specify the message type (default=info).')
|
||||
@click.option('--theme', '-T', default='default', type=str,
|
||||
help='Specify the default theme.')
|
||||
@click.option('-v', '--verbose', count=True)
|
||||
@click.argument('urls', nargs=-1,
|
||||
metavar='SERVER_URL [SERVER_URL2 [SERVER_URL3]]',)
|
||||
def main(title, body, urls, notification_type, theme, verbose):
|
||||
"""
|
||||
Send a notification to all of the specified servers identified by their
|
||||
URLs the content provided within the title, body and notification-type.
|
||||
|
||||
"""
|
||||
# Note: Click ignores the return values of functions it wraps, If you
|
||||
# want to return a specific error code, you must call sys.exit()
|
||||
# as you will see below.
|
||||
|
||||
# Logging
|
||||
ch = logging.StreamHandler(sys.stdout)
|
||||
if verbose > 2:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
elif verbose == 1:
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
else:
|
||||
logger.setLevel(logging.ERROR)
|
||||
|
||||
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
if not urls:
|
||||
logger.error('You must specify at least one server URL.')
|
||||
print_help_msg(main)
|
||||
sys.exit(1)
|
||||
|
||||
# Prepare our asset
|
||||
asset = AppriseAsset(theme=theme)
|
||||
|
||||
# Create our object
|
||||
a = Apprise(asset=asset)
|
||||
|
||||
# Load our inventory up
|
||||
for url in urls:
|
||||
a.add(url)
|
||||
|
||||
if body is None:
|
||||
# if no body was specified, then read from STDIN
|
||||
body = click.get_text_stream('stdin').read()
|
||||
|
||||
# now print it out
|
||||
if a.notify(title=title, body=body, notify_type=notification_type):
|
||||
sys.exit(0)
|
||||
sys.exit(1)
|
72
libs/apprise/common.py
Normal file
|
@ -0,0 +1,72 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Base Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
|
||||
class NotifyType(object):
|
||||
"""
|
||||
A simple mapping of notification types most commonly used with
|
||||
all types of logging and notification services.
|
||||
"""
|
||||
INFO = 'info'
|
||||
SUCCESS = 'success'
|
||||
FAILURE = 'failure'
|
||||
WARNING = 'warning'
|
||||
|
||||
|
||||
NOTIFY_TYPES = (
|
||||
NotifyType.INFO,
|
||||
NotifyType.SUCCESS,
|
||||
NotifyType.FAILURE,
|
||||
NotifyType.WARNING,
|
||||
)
|
||||
|
||||
|
||||
class NotifyImageSize(object):
|
||||
"""
|
||||
A list of pre-defined image sizes to make it easier to work with defined
|
||||
plugins.
|
||||
"""
|
||||
XY_32 = '32x32'
|
||||
XY_72 = '72x72'
|
||||
XY_128 = '128x128'
|
||||
XY_256 = '256x256'
|
||||
|
||||
|
||||
NOTIFY_IMAGE_SIZES = (
|
||||
NotifyImageSize.XY_32,
|
||||
NotifyImageSize.XY_72,
|
||||
NotifyImageSize.XY_128,
|
||||
NotifyImageSize.XY_256,
|
||||
)
|
||||
|
||||
|
||||
class NotifyFormat(object):
|
||||
"""
|
||||
A list of pre-defined text message formats that can be passed via the
|
||||
apprise library.
|
||||
"""
|
||||
TEXT = 'text'
|
||||
HTML = 'html'
|
||||
MARKDOWN = 'markdown'
|
||||
|
||||
|
||||
NOTIFY_FORMATS = (
|
||||
NotifyFormat.TEXT,
|
||||
NotifyFormat.HTML,
|
||||
NotifyFormat.MARKDOWN,
|
||||
)
|
384
libs/apprise/plugins/NotifyBase.py
Normal file
|
@ -0,0 +1,384 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Base Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
import logging
|
||||
from time import sleep
|
||||
try:
|
||||
# Python 2.7
|
||||
from urllib import unquote as _unquote
|
||||
from urllib import quote as _quote
|
||||
from urllib import urlencode as _urlencode
|
||||
|
||||
except ImportError:
|
||||
# Python 3.x
|
||||
from urllib.parse import unquote as _unquote
|
||||
from urllib.parse import quote as _quote
|
||||
from urllib.parse import urlencode as _urlencode
|
||||
|
||||
from ..utils import parse_url
|
||||
from ..utils import parse_bool
|
||||
from ..utils import is_hostname
|
||||
from ..common import NOTIFY_IMAGE_SIZES
|
||||
from ..common import NOTIFY_TYPES
|
||||
from ..common import NotifyFormat
|
||||
from ..common import NOTIFY_FORMATS
|
||||
|
||||
from ..AppriseAsset import AppriseAsset
|
||||
|
||||
# use sax first because it's faster
|
||||
from xml.sax.saxutils import escape as sax_escape
|
||||
|
||||
|
||||
def _escape(text):
|
||||
"""
|
||||
saxutil escape tool
|
||||
"""
|
||||
return sax_escape(text, {"'": "'", "\"": """})
|
||||
|
||||
|
||||
HTTP_ERROR_MAP = {
|
||||
400: 'Bad Request - Unsupported Parameters.',
|
||||
401: 'Verification Failed.',
|
||||
404: 'Page not found.',
|
||||
405: 'Method not allowed.',
|
||||
500: 'Internal server error.',
|
||||
503: 'Servers are overloaded.',
|
||||
}
|
||||
|
||||
# HTML New Line Delimiter
|
||||
NOTIFY_NEWLINE = '\r\n'
|
||||
|
||||
# Used to break a path list into parts
|
||||
PATHSPLIT_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
# Regular expression retrieved from:
|
||||
# http://www.regular-expressions.info/email.html
|
||||
IS_EMAIL_RE = re.compile(
|
||||
r"(?P<userid>[a-z0-9$%+=_~-]+"
|
||||
r"(?:\.[a-z0-9$%+=_~-]+)"
|
||||
r"*)@(?P<domain>(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+"
|
||||
r"[a-z0-9](?:[a-z0-9-]*"
|
||||
r"[a-z0-9]))?",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
|
||||
class NotifyBase(object):
|
||||
"""
|
||||
This is the base class for all notification services
|
||||
"""
|
||||
|
||||
# The default simple (insecure) protocol
|
||||
# all inheriting entries must provide their protocol lookup
|
||||
# protocol:// (in this example they would specify 'protocol')
|
||||
protocol = ''
|
||||
|
||||
# The default secure protocol
|
||||
# all inheriting entries must provide their protocol lookup
|
||||
# protocols:// (in this example they would specify 'protocols')
|
||||
# This value can be the same as the defined protocol.
|
||||
secure_protocol = ''
|
||||
|
||||
# Most Servers do not like more then 1 request per 5 seconds, so 5.5 gives
|
||||
# us a safe play range...
|
||||
throttle_attempt = 5.5
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = None
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 32768
|
||||
|
||||
# Defines the maximum allowable characters in the title
|
||||
title_maxlen = 250
|
||||
|
||||
# Default Notify Format
|
||||
notify_format = NotifyFormat.TEXT
|
||||
|
||||
# Logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initialize some general logging and common server arguments that will
|
||||
keep things consistent when working with the notifiers that will
|
||||
inherit this class.
|
||||
|
||||
"""
|
||||
|
||||
# Prepare our Assets
|
||||
self.asset = AppriseAsset()
|
||||
|
||||
# Certificate Verification (for SSL calls); default to being enabled
|
||||
self.verify_certificate = kwargs.get('verify', True)
|
||||
|
||||
# Secure Mode
|
||||
self.secure = kwargs.get('secure', False)
|
||||
|
||||
self.host = kwargs.get('host', '')
|
||||
self.port = kwargs.get('port')
|
||||
if self.port:
|
||||
try:
|
||||
self.port = int(self.port)
|
||||
|
||||
except (TypeError, ValueError):
|
||||
self.port = None
|
||||
|
||||
self.user = kwargs.get('user')
|
||||
self.password = kwargs.get('password')
|
||||
|
||||
if 'notify_format' in kwargs:
|
||||
# Store the specified notify_format if specified
|
||||
notify_format = kwargs.get('notify_format')
|
||||
if notify_format.lower() not in NOTIFY_FORMATS:
|
||||
self.logger.error(
|
||||
'Invalid notification format %s' % notify_format,
|
||||
)
|
||||
raise TypeError(
|
||||
'Invalid notification format %s' % notify_format,
|
||||
)
|
||||
# Provide override
|
||||
self.notify_format = notify_format
|
||||
|
||||
def throttle(self, throttle_time=None):
|
||||
"""
|
||||
A common throttle control
|
||||
"""
|
||||
self.logger.debug('Throttling...')
|
||||
|
||||
throttle_time = throttle_time \
|
||||
if throttle_time is not None else self.throttle_attempt
|
||||
|
||||
# Perform throttle
|
||||
if throttle_time > 0:
|
||||
sleep(throttle_time)
|
||||
|
||||
return
|
||||
|
||||
def image_url(self, notify_type, logo=False, extension=None):
|
||||
"""
|
||||
Returns Image URL if possible
|
||||
"""
|
||||
|
||||
if not self.image_size:
|
||||
return None
|
||||
|
||||
if notify_type not in NOTIFY_TYPES:
|
||||
return None
|
||||
|
||||
return self.asset.image_url(
|
||||
notify_type=notify_type,
|
||||
image_size=self.image_size,
|
||||
logo=logo,
|
||||
extension=extension,
|
||||
)
|
||||
|
||||
def image_path(self, notify_type, extension=None):
|
||||
"""
|
||||
Returns the path of the image if it can
|
||||
"""
|
||||
if not self.image_size:
|
||||
return None
|
||||
|
||||
if notify_type not in NOTIFY_TYPES:
|
||||
return None
|
||||
|
||||
return self.asset.image_path(
|
||||
notify_type=notify_type,
|
||||
image_size=self.image_size,
|
||||
extension=extension,
|
||||
)
|
||||
|
||||
def image_raw(self, notify_type, extension=None):
|
||||
"""
|
||||
Returns the raw image if it can
|
||||
"""
|
||||
if not self.image_size:
|
||||
return None
|
||||
|
||||
if notify_type not in NOTIFY_TYPES:
|
||||
return None
|
||||
|
||||
return self.asset.image_raw(
|
||||
notify_type=notify_type,
|
||||
image_size=self.image_size,
|
||||
extension=extension,
|
||||
)
|
||||
|
||||
def color(self, notify_type, color_type=None):
|
||||
"""
|
||||
Returns the html color (hex code) associated with the notify_type
|
||||
"""
|
||||
if notify_type not in NOTIFY_TYPES:
|
||||
return None
|
||||
|
||||
return self.asset.color(
|
||||
notify_type=notify_type,
|
||||
color_type=color_type,
|
||||
)
|
||||
|
||||
@property
|
||||
def app_id(self):
|
||||
return self.asset.app_id
|
||||
|
||||
@property
|
||||
def app_desc(self):
|
||||
return self.asset.app_desc
|
||||
|
||||
@property
|
||||
def app_url(self):
|
||||
return self.asset.app_url
|
||||
|
||||
@staticmethod
|
||||
def escape_html(html, convert_new_lines=False, whitespace=True):
|
||||
"""
|
||||
Takes html text as input and escapes it so that it won't
|
||||
conflict with any xml/html wrapping characters.
|
||||
"""
|
||||
escaped = _escape(html)
|
||||
|
||||
if whitespace:
|
||||
# Tidy up whitespace too
|
||||
escaped = escaped\
|
||||
.replace(u'\t', u' ')\
|
||||
.replace(u' ', u' ')
|
||||
|
||||
if convert_new_lines:
|
||||
return escaped.replace(u'\n', u'<br/>')
|
||||
|
||||
return escaped
|
||||
|
||||
@staticmethod
|
||||
def unquote(content, encoding='utf-8', errors='replace'):
|
||||
"""
|
||||
common unquote function
|
||||
|
||||
"""
|
||||
if not content:
|
||||
return ''
|
||||
|
||||
try:
|
||||
# Python v3.x
|
||||
return _unquote(content, encoding=encoding, errors=errors)
|
||||
|
||||
except TypeError:
|
||||
# Python v2.7
|
||||
return _unquote(content)
|
||||
|
||||
@staticmethod
|
||||
def quote(content, safe='/', encoding=None, errors=None):
|
||||
"""
|
||||
common quote function
|
||||
|
||||
"""
|
||||
if not content:
|
||||
return ''
|
||||
|
||||
try:
|
||||
# Python v3.x
|
||||
return _quote(content, safe=safe, encoding=encoding, errors=errors)
|
||||
|
||||
except TypeError:
|
||||
# Python v2.7
|
||||
return _quote(content, safe=safe)
|
||||
|
||||
@staticmethod
|
||||
def urlencode(query, doseq=False, safe='', encoding=None, errors=None):
|
||||
"""
|
||||
common urlencode function
|
||||
|
||||
"""
|
||||
try:
|
||||
# Python v3.x
|
||||
return _urlencode(
|
||||
query, doseq=doseq, safe=safe, encoding=encoding,
|
||||
errors=errors)
|
||||
|
||||
except TypeError:
|
||||
# Python v2.7
|
||||
return _urlencode(query)
|
||||
|
||||
@staticmethod
|
||||
def split_path(path, unquote=True):
|
||||
"""
|
||||
Splits a URL up into a list object.
|
||||
|
||||
"""
|
||||
if unquote:
|
||||
return PATHSPLIT_LIST_DELIM.split(
|
||||
NotifyBase.unquote(path).lstrip('/'))
|
||||
return PATHSPLIT_LIST_DELIM.split(path.lstrip('/'))
|
||||
|
||||
@staticmethod
|
||||
def is_email(address):
|
||||
"""
|
||||
Returns True if specified entry is an email address
|
||||
|
||||
"""
|
||||
return IS_EMAIL_RE.match(address) is not None
|
||||
|
||||
@staticmethod
|
||||
def is_hostname(hostname):
|
||||
"""
|
||||
Returns True if specified entry is a hostname
|
||||
|
||||
"""
|
||||
return is_hostname(hostname)
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url, verify_host=True):
|
||||
"""
|
||||
Parses the URL and returns it broken apart into a dictionary.
|
||||
|
||||
"""
|
||||
results = parse_url(
|
||||
url, default_schema='unknown', verify_host=verify_host)
|
||||
|
||||
if not results:
|
||||
# We're done; we failed to parse our url
|
||||
return results
|
||||
|
||||
# if our URL ends with an 's', then assueme our secure flag is set.
|
||||
results['secure'] = (results['schema'][-1] == 's')
|
||||
|
||||
# Support SSL Certificate 'verify' keyword. Default to being enabled
|
||||
results['verify'] = verify_host
|
||||
|
||||
if 'verify' in results['qsd']:
|
||||
results['verify'] = parse_bool(
|
||||
results['qsd'].get('verify', True))
|
||||
|
||||
# Allow overriding the default format
|
||||
if 'format' in results['qsd']:
|
||||
results['format'] = results['qsd'].get('format')
|
||||
if results['format'] not in NOTIFY_FORMATS:
|
||||
NotifyBase.logger.warning(
|
||||
'Unsupported format specified {}'.format(
|
||||
results['format']))
|
||||
del results['format']
|
||||
|
||||
# Password overrides
|
||||
if 'pass' in results['qsd']:
|
||||
results['password'] = results['qsd']['pass']
|
||||
|
||||
# User overrides
|
||||
if 'user' in results['qsd']:
|
||||
results['user'] = results['qsd']['user']
|
||||
|
||||
return results
|
291
libs/apprise/plugins/NotifyBoxcar.py
Normal file
|
@ -0,0 +1,291 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Boxcar Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
from json import dumps
|
||||
import requests
|
||||
import re
|
||||
from time import time
|
||||
import hmac
|
||||
from hashlib import sha1
|
||||
try:
|
||||
from urlparse import urlparse
|
||||
|
||||
except ImportError:
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
# Default to sending to all devices if nothing is specified
|
||||
DEFAULT_TAG = '@all'
|
||||
|
||||
# The tags value is an structure containing an array of strings defining the
|
||||
# list of tagged devices that the notification need to be send to, and a
|
||||
# boolean operator (‘and’ / ‘or’) that defines the criteria to match devices
|
||||
# against those tags.
|
||||
IS_TAG = re.compile(r'^[@](?P<name>[A-Z0-9]{1,63})$', re.I)
|
||||
|
||||
# Device tokens are only referenced when developing.
|
||||
# it's not likely you'll send a message directly to a device, but
|
||||
# if you do; this plugin supports it.
|
||||
IS_DEVICETOKEN = re.compile(r'^[A-Z0-9]{64}$', re.I)
|
||||
|
||||
# Both an access key and seret key are created and assigned to each project
|
||||
# you create on the boxcar website
|
||||
VALIDATE_ACCESS = re.compile(r'[A-Z0-9_-]{64}', re.I)
|
||||
VALIDATE_SECRET = re.compile(r'[A-Z0-9_-]{64}', re.I)
|
||||
|
||||
# Used to break apart list of potential tags by their delimiter
|
||||
# into a usable list.
|
||||
TAGS_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
|
||||
class NotifyBoxcar(NotifyBase):
|
||||
"""
|
||||
A wrapper for Boxcar Notifications
|
||||
"""
|
||||
|
||||
# All boxcar notifications are secure
|
||||
secure_protocol = 'boxcar'
|
||||
|
||||
# Boxcar URL
|
||||
notify_url = 'https://boxcar-api.io/api/push/'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_72
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 10000
|
||||
|
||||
def __init__(self, access, secret, recipients=None, **kwargs):
|
||||
"""
|
||||
Initialize Boxcar Object
|
||||
"""
|
||||
super(NotifyBoxcar, self).__init__(**kwargs)
|
||||
|
||||
# Initialize tag list
|
||||
self.tags = list()
|
||||
|
||||
# Initialize device_token list
|
||||
self.device_tokens = list()
|
||||
|
||||
try:
|
||||
# Access Key (associated with project)
|
||||
self.access = access.strip()
|
||||
|
||||
except AttributeError:
|
||||
self.logger.warning(
|
||||
'The specified access key specified is invalid.',
|
||||
)
|
||||
raise TypeError(
|
||||
'The specified access key specified is invalid.',
|
||||
)
|
||||
|
||||
try:
|
||||
# Secret Key (associated with project)
|
||||
self.secret = secret.strip()
|
||||
|
||||
except AttributeError:
|
||||
self.logger.warning(
|
||||
'The specified secret key specified is invalid.',
|
||||
)
|
||||
raise TypeError(
|
||||
'The specified secret key specified is invalid.',
|
||||
)
|
||||
|
||||
if not VALIDATE_ACCESS.match(self.access):
|
||||
self.logger.warning(
|
||||
'The access key specified (%s) is invalid.' % self.access,
|
||||
)
|
||||
raise TypeError(
|
||||
'The access key specified (%s) is invalid.' % self.access,
|
||||
)
|
||||
|
||||
if not VALIDATE_SECRET.match(self.secret):
|
||||
self.logger.warning(
|
||||
'The secret key specified (%s) is invalid.' % self.secret,
|
||||
)
|
||||
raise TypeError(
|
||||
'The secret key specified (%s) is invalid.' % self.secret,
|
||||
)
|
||||
|
||||
if not recipients:
|
||||
self.tags.append(DEFAULT_TAG)
|
||||
recipients = []
|
||||
|
||||
elif compat_is_basestring(recipients):
|
||||
recipients = [x for x in filter(bool, TAGS_LIST_DELIM.split(
|
||||
recipients,
|
||||
))]
|
||||
|
||||
# Validate recipients and drop bad ones:
|
||||
for recipient in recipients:
|
||||
if IS_TAG.match(recipient):
|
||||
# store valid tag/alias
|
||||
self.tags.append(IS_TAG.match(recipient).group('name'))
|
||||
|
||||
elif IS_DEVICETOKEN.match(recipient):
|
||||
# store valid device
|
||||
self.device_tokens.append(recipient)
|
||||
|
||||
else:
|
||||
self.logger.warning(
|
||||
'Dropped invalid tag/alias/device_token '
|
||||
'(%s) specified.' % recipient,
|
||||
)
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Boxcar Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
# prepare Boxcar Object
|
||||
payload = {
|
||||
'aps': {
|
||||
'badge': 'auto',
|
||||
'alert': '',
|
||||
},
|
||||
'expires': str(int(time() + 30)),
|
||||
}
|
||||
|
||||
if title:
|
||||
payload['aps']['@title'] = title
|
||||
|
||||
if body:
|
||||
payload['aps']['alert'] = body
|
||||
|
||||
if self.tags:
|
||||
payload['tags'] = {'or': self.tags}
|
||||
|
||||
if self.device_tokens:
|
||||
payload['device_tokens'] = self.device_tokens
|
||||
|
||||
# Source picture should be <= 450 DP wide, ~2:1 aspect.
|
||||
image_url = self.image_url(notify_type)
|
||||
if image_url:
|
||||
# Set our image
|
||||
payload['@img'] = image_url
|
||||
|
||||
# Acquire our hostname
|
||||
host = urlparse(self.notify_url).hostname
|
||||
|
||||
# Calculate signature.
|
||||
str_to_sign = "%s\n%s\n%s\n%s" % (
|
||||
"POST", host, "/api/push", dumps(payload))
|
||||
|
||||
h = hmac.new(
|
||||
bytearray(self.secret, 'utf-8'),
|
||||
bytearray(str_to_sign, 'utf-8'),
|
||||
sha1,
|
||||
)
|
||||
|
||||
params = self.urlencode({
|
||||
"publishkey": self.access,
|
||||
"signature": h.hexdigest(),
|
||||
})
|
||||
|
||||
notify_url = '%s?%s' % (self.notify_url, params)
|
||||
self.logger.debug('Boxcar POST URL: %s (cert_verify=%r)' % (
|
||||
notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Boxcar Payload: %s' % str(payload))
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
notify_url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
# Boxcar returns 201 (Created) when successful
|
||||
if r.status_code != requests.codes.created:
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Boxcar notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Boxcar notification '
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Boxcar notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Boxcar '
|
||||
'notification to %s.' % (host))
|
||||
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns it broken apart into a dictionary.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url, verify_host=False)
|
||||
|
||||
if not results:
|
||||
# We're done early
|
||||
return None
|
||||
|
||||
# The first token is stored in the hostname
|
||||
access = results['host']
|
||||
|
||||
# Now fetch the remaining tokens
|
||||
secret = NotifyBase.split_path(results['fullpath'])[0]
|
||||
|
||||
# Our recipients
|
||||
recipients = ','.join(
|
||||
NotifyBase.split_path(results['fullpath'])[1:])
|
||||
|
||||
if not (access and secret):
|
||||
# If we did not recive an access and/or secret code
|
||||
# then we're done
|
||||
return None
|
||||
|
||||
# Store our required content
|
||||
results['recipients'] = recipients if recipients else None
|
||||
results['access'] = access
|
||||
results['secret'] = secret
|
||||
|
||||
return results
|
292
libs/apprise/plugins/NotifyDiscord.py
Normal file
|
@ -0,0 +1,292 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Discord Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
# For this to work correctly you need to create a webhook. To do this just
|
||||
# click on the little gear icon next to the channel you're part of. From
|
||||
# here you'll be able to access the Webhooks menu and create a new one.
|
||||
#
|
||||
# When you've completed, you'll get a URL that looks a little like this:
|
||||
# https://discordapp.com/api/webhooks/417429632418316298/\
|
||||
# JHZ7lQml277CDHmQKMHI8qBe7bk2ZwO5UKjCiOAF7711o33MyqU344Qpgv7YTpadV_js
|
||||
#
|
||||
# Simplified, it looks like this:
|
||||
# https://discordapp.com/api/webhooks/WEBHOOK_ID/WEBHOOK_TOKEN
|
||||
#
|
||||
# This plugin will simply work using the url of:
|
||||
# discord://WEBHOOK_ID/WEBHOOK_TOKEN
|
||||
#
|
||||
# API Documentation on Webhooks:
|
||||
# - https://discordapp.com/developers/docs/resources/webhook
|
||||
#
|
||||
import re
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyFormat
|
||||
from ..utils import parse_bool
|
||||
|
||||
|
||||
class NotifyDiscord(NotifyBase):
|
||||
"""
|
||||
A wrapper to Discord Notifications
|
||||
|
||||
"""
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'discord'
|
||||
|
||||
# Discord Webhook
|
||||
notify_url = 'https://discordapp.com/api/webhooks'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_256
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 2000
|
||||
|
||||
# Default Notify Format
|
||||
notify_format = NotifyFormat.MARKDOWN
|
||||
|
||||
def __init__(self, webhook_id, webhook_token, tts=False, avatar=True,
|
||||
footer=False, thumbnail=True, **kwargs):
|
||||
"""
|
||||
Initialize Discord Object
|
||||
|
||||
"""
|
||||
super(NotifyDiscord, self).__init__(**kwargs)
|
||||
|
||||
if not webhook_id:
|
||||
raise TypeError(
|
||||
'An invalid Client ID was specified.'
|
||||
)
|
||||
|
||||
if not webhook_token:
|
||||
raise TypeError(
|
||||
'An invalid Webhook Token was specified.'
|
||||
)
|
||||
|
||||
# Store our data
|
||||
self.webhook_id = webhook_id
|
||||
self.webhook_token = webhook_token
|
||||
|
||||
# Text To Speech
|
||||
self.tts = tts
|
||||
|
||||
# Over-ride Avatar Icon
|
||||
self.avatar = avatar
|
||||
|
||||
# Place a footer icon
|
||||
self.footer = footer
|
||||
|
||||
# Place a thumbnail image inline with the message body
|
||||
self.thumbnail = thumbnail
|
||||
|
||||
return
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Discord Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'multipart/form-data',
|
||||
}
|
||||
|
||||
# Prepare JSON Object
|
||||
payload = {
|
||||
# Text-To-Speech
|
||||
'tts': self.tts,
|
||||
|
||||
# If Text-To-Speech is set to True, then we do not want to wait
|
||||
# for the whole message before continuing. Otherwise, we wait
|
||||
'wait': self.tts is False,
|
||||
|
||||
# Our color associated with our notification
|
||||
'color': self.color(notify_type, int),
|
||||
|
||||
'embeds': [{
|
||||
'provider': {
|
||||
'name': self.app_id,
|
||||
'url': self.app_url,
|
||||
},
|
||||
'title': title,
|
||||
'type': 'rich',
|
||||
'description': body,
|
||||
}]
|
||||
}
|
||||
|
||||
if self.notify_format == NotifyFormat.MARKDOWN:
|
||||
fields = self.extract_markdown_sections(body)
|
||||
|
||||
if len(fields) > 0:
|
||||
# Apply our additional parsing for a better presentation
|
||||
|
||||
# Swap first entry for description
|
||||
payload['embeds'][0]['description'] = \
|
||||
fields[0].get('name') + fields[0].get('value')
|
||||
payload['embeds'][0]['fields'] = fields[1:]
|
||||
|
||||
if self.footer:
|
||||
logo_url = self.image_url(notify_type, logo=True)
|
||||
payload['embeds'][0]['footer'] = {
|
||||
'text': self.app_desc,
|
||||
}
|
||||
if logo_url:
|
||||
payload['embeds'][0]['footer']['icon_url'] = logo_url
|
||||
|
||||
image_url = self.image_url(notify_type)
|
||||
if image_url:
|
||||
if self.thumbnail:
|
||||
payload['embeds'][0]['thumbnail'] = {
|
||||
'url': image_url,
|
||||
'height': 256,
|
||||
'width': 256,
|
||||
}
|
||||
|
||||
if self.avatar:
|
||||
payload['avatar_url'] = image_url
|
||||
|
||||
if self.user:
|
||||
# Optionally override the default username of the webhook
|
||||
payload['username'] = self.user
|
||||
|
||||
# Construct Notify URL
|
||||
notify_url = '{0}/{1}/{2}'.format(
|
||||
self.notify_url,
|
||||
self.webhook_id,
|
||||
self.webhook_token,
|
||||
)
|
||||
|
||||
self.logger.debug('Discord POST URL: %s (cert_verify=%r)' % (
|
||||
notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Discord Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
notify_url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code not in (
|
||||
requests.codes.ok, requests.codes.no_content):
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Discord notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Discord notification '
|
||||
'(error=%s).' % r.status_code)
|
||||
|
||||
self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Discord notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Discord '
|
||||
'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
Syntax:
|
||||
discord://webhook_id/webhook_token
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Store our webhook ID
|
||||
webhook_id = results['host']
|
||||
|
||||
# Now fetch our tokens
|
||||
try:
|
||||
webhook_token = [x for x in filter(bool, NotifyBase.split_path(
|
||||
results['fullpath']))][0]
|
||||
|
||||
except (ValueError, AttributeError, IndexError):
|
||||
# Force some bad values that will get caught
|
||||
# in parsing later
|
||||
webhook_token = None
|
||||
|
||||
results['webhook_id'] = webhook_id
|
||||
results['webhook_token'] = webhook_token
|
||||
|
||||
# Text To Speech
|
||||
results['tts'] = parse_bool(results['qsd'].get('tts', False))
|
||||
|
||||
# Use Footer
|
||||
results['footer'] = parse_bool(results['qsd'].get('footer', False))
|
||||
|
||||
# Update Avatar Icon
|
||||
results['avatar'] = parse_bool(results['qsd'].get('avatar', True))
|
||||
|
||||
# Use Thumbnail
|
||||
results['thumbnail'] = \
|
||||
parse_bool(results['qsd'].get('thumbnail', True))
|
||||
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def extract_markdown_sections(markdown):
|
||||
"""
|
||||
Takes a string in a markdown type format and extracts
|
||||
the headers and their corresponding sections into individual
|
||||
fields that get passed as an embed entry to Discord.
|
||||
|
||||
"""
|
||||
regex = re.compile(
|
||||
r'\s*#+\s*(?P<name>[^#\n]+)([ \r\t\v#]*)'
|
||||
r'(?P<value>(.+?)(\n(?!\s#))|\s*$)', flags=re.S)
|
||||
|
||||
common = regex.finditer(markdown)
|
||||
fields = list()
|
||||
for el in common:
|
||||
d = el.groupdict()
|
||||
|
||||
fields.append({
|
||||
'name': d.get('name', '').strip(),
|
||||
'value': '```md\n' + d.get('value', '').strip() + '\n```'
|
||||
})
|
||||
|
||||
return fields
|
370
libs/apprise/plugins/NotifyEmail.py
Normal file
|
@ -0,0 +1,370 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Email Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
|
||||
from datetime import datetime
|
||||
import smtplib
|
||||
from socket import error as SocketError
|
||||
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyFormat
|
||||
|
||||
|
||||
class WebBaseLogin(object):
|
||||
"""
|
||||
This class is just used in conjunction of the default emailers
|
||||
to best formulate a login to it using the data detected
|
||||
"""
|
||||
# User Login must be Email Based
|
||||
EMAIL = 'Email'
|
||||
|
||||
# User Login must UserID Based
|
||||
USERID = 'UserID'
|
||||
|
||||
|
||||
# To attempt to make this script stupid proof, if we detect an email address
|
||||
# that is part of the this table, we can pre-use a lot more defaults if they
|
||||
# aren't otherwise specified on the users input.
|
||||
WEBBASE_LOOKUP_TABLE = (
|
||||
# Google GMail
|
||||
(
|
||||
'Google Mail',
|
||||
re.compile(r'^(?P<id>[^@]+)@(?P<domain>gmail\.com)$', re.I),
|
||||
{
|
||||
'port': 587,
|
||||
'smtp_host': 'smtp.gmail.com',
|
||||
'secure': True,
|
||||
'login_type': (WebBaseLogin.EMAIL, )
|
||||
},
|
||||
),
|
||||
|
||||
# Pronto Mail
|
||||
(
|
||||
'Pronto Mail',
|
||||
re.compile(r'^(?P<id>[^@]+)@(?P<domain>prontomail\.com)$', re.I),
|
||||
{
|
||||
'port': 465,
|
||||
'smtp_host': 'secure.emailsrvr.com',
|
||||
'secure': True,
|
||||
'login_type': (WebBaseLogin.EMAIL, )
|
||||
},
|
||||
),
|
||||
|
||||
# Microsoft Hotmail
|
||||
(
|
||||
'Microsoft Hotmail',
|
||||
re.compile(r'^(?P<id>[^@]+)@(?P<domain>(hotmail|live)\.com)$', re.I),
|
||||
{
|
||||
'port': 587,
|
||||
'smtp_host': 'smtp.live.com',
|
||||
'secure': True,
|
||||
'login_type': (WebBaseLogin.EMAIL, )
|
||||
},
|
||||
),
|
||||
|
||||
# Yahoo Mail
|
||||
(
|
||||
'Yahoo Mail',
|
||||
re.compile(r'^(?P<id>[^@]+)@(?P<domain>yahoo\.(ca|com))$', re.I),
|
||||
{
|
||||
'port': 465,
|
||||
'smtp_host': 'smtp.mail.yahoo.com',
|
||||
'secure': True,
|
||||
'login_type': (WebBaseLogin.EMAIL, )
|
||||
},
|
||||
),
|
||||
|
||||
# Catch All
|
||||
(
|
||||
'Custom',
|
||||
re.compile(r'^(?P<id>[^@]+)@(?P<domain>.+)$', re.I),
|
||||
{
|
||||
# Setting smtp_host to None is a way of
|
||||
# auto-detecting it based on other parameters
|
||||
# specified. There is no reason to ever modify
|
||||
# this Catch All
|
||||
'smtp_host': None,
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class NotifyEmail(NotifyBase):
|
||||
"""
|
||||
A wrapper to Email Notifications
|
||||
|
||||
"""
|
||||
|
||||
# The default simple (insecure) protocol
|
||||
protocol = 'mailto'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'mailtos'
|
||||
|
||||
# Default Non-Encryption Port
|
||||
default_port = 25
|
||||
|
||||
# Default Secure Port
|
||||
default_secure_port = 587
|
||||
|
||||
# Default SMTP Timeout (in seconds)
|
||||
connect_timeout = 15
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initialize Email Object
|
||||
"""
|
||||
super(NotifyEmail, self).__init__(**kwargs)
|
||||
|
||||
# Handle SMTP vs SMTPS (Secure vs UnSecure)
|
||||
if not self.port:
|
||||
if self.secure:
|
||||
self.port = self.default_secure_port
|
||||
|
||||
else:
|
||||
self.port = self.default_port
|
||||
|
||||
# Email SMTP Server Timeout
|
||||
try:
|
||||
self.timeout = int(kwargs.get('timeout', self.connect_timeout))
|
||||
|
||||
except (ValueError, TypeError):
|
||||
self.timeout = self.connect_timeout
|
||||
|
||||
# Now we want to construct the To and From email
|
||||
# addresses from the URL provided
|
||||
self.from_name = kwargs.get('name', None)
|
||||
self.from_addr = kwargs.get('from', None)
|
||||
self.to_addr = kwargs.get('to', self.from_addr)
|
||||
|
||||
if not NotifyBase.is_email(self.from_addr):
|
||||
# Parse Source domain based on from_addr
|
||||
raise TypeError('Invalid ~From~ email format: %s' % self.from_addr)
|
||||
|
||||
if not NotifyBase.is_email(self.to_addr):
|
||||
raise TypeError('Invalid ~To~ email format: %s' % self.to_addr)
|
||||
|
||||
# Now detect the SMTP Server
|
||||
self.smtp_host = kwargs.get('smtp_host', '')
|
||||
|
||||
# Apply any defaults based on certain known configurations
|
||||
self.NotifyEmailDefaults()
|
||||
|
||||
return
|
||||
|
||||
def NotifyEmailDefaults(self):
|
||||
"""
|
||||
A function that prefills defaults based on the email
|
||||
it was provided.
|
||||
"""
|
||||
|
||||
if self.smtp_host:
|
||||
# SMTP Server was explicitly specified, therefore it is assumed
|
||||
# the caller knows what he's doing and is intentionally
|
||||
# over-riding any smarts to be applied
|
||||
return
|
||||
|
||||
for i in range(len(WEBBASE_LOOKUP_TABLE)): # pragma: no branch
|
||||
self.logger.debug('Scanning %s against %s' % (
|
||||
self.to_addr, WEBBASE_LOOKUP_TABLE[i][0]
|
||||
))
|
||||
match = WEBBASE_LOOKUP_TABLE[i][1].match(self.from_addr)
|
||||
if match:
|
||||
self.logger.info(
|
||||
'Applying %s Defaults' %
|
||||
WEBBASE_LOOKUP_TABLE[i][0],
|
||||
)
|
||||
self.port = WEBBASE_LOOKUP_TABLE[i][2]\
|
||||
.get('port', self.port)
|
||||
self.secure = WEBBASE_LOOKUP_TABLE[i][2]\
|
||||
.get('secure', self.secure)
|
||||
|
||||
self.smtp_host = WEBBASE_LOOKUP_TABLE[i][2]\
|
||||
.get('smtp_host', self.smtp_host)
|
||||
|
||||
if self.smtp_host is None:
|
||||
# Detect Server if possible
|
||||
self.smtp_host = re.split(r'[\s@]+', self.from_addr)[-1]
|
||||
|
||||
# Adjust email login based on the defined
|
||||
# usertype
|
||||
login_type = WEBBASE_LOOKUP_TABLE[i][2]\
|
||||
.get('login_type', [])
|
||||
|
||||
if NotifyBase.is_email(self.user) and \
|
||||
WebBaseLogin.EMAIL not in login_type:
|
||||
# Email specified but login type
|
||||
# not supported; switch it to user id
|
||||
self.user = match.group('id')
|
||||
|
||||
elif WebBaseLogin.USERID not in login_type:
|
||||
# user specified but login type
|
||||
# not supported; switch it to email
|
||||
self.user = '%s@%s' % (self.user, self.host)
|
||||
|
||||
break
|
||||
|
||||
def notify(self, title, body, **kwargs):
|
||||
"""
|
||||
Perform Email Notification
|
||||
"""
|
||||
|
||||
from_name = self.from_name
|
||||
if not from_name:
|
||||
from_name = self.app_desc
|
||||
|
||||
self.logger.debug('Email From: %s <%s>' % (
|
||||
self.from_addr, from_name))
|
||||
self.logger.debug('Email To: %s' % (self.to_addr))
|
||||
self.logger.debug('Login ID: %s' % (self.user))
|
||||
self.logger.debug('Delivery: %s:%d' % (self.smtp_host, self.port))
|
||||
|
||||
# Prepare Email Message
|
||||
if self.notify_format == NotifyFormat.HTML:
|
||||
email = MIMEText(body, 'html')
|
||||
email['Content-Type'] = 'text/html'
|
||||
|
||||
else:
|
||||
email = MIMEText(body, 'text')
|
||||
email['Content-Type'] = 'text/plain'
|
||||
|
||||
email['Subject'] = title
|
||||
email['From'] = '%s <%s>' % (from_name, self.from_addr)
|
||||
email['To'] = self.to_addr
|
||||
email['Date'] = datetime.utcnow()\
|
||||
.strftime("%a, %d %b %Y %H:%M:%S +0000")
|
||||
email['X-Application'] = self.app_id
|
||||
|
||||
# bind the socket variable to the current namespace
|
||||
socket = None
|
||||
try:
|
||||
self.logger.debug('Connecting to remote SMTP server...')
|
||||
socket = smtplib.SMTP(
|
||||
self.smtp_host,
|
||||
self.port,
|
||||
None,
|
||||
timeout=self.timeout,
|
||||
)
|
||||
|
||||
if self.secure:
|
||||
# Handle Secure Connections
|
||||
self.logger.debug('Securing connection with TLS...')
|
||||
socket.starttls()
|
||||
|
||||
if self.user and self.password:
|
||||
# Apply Login credetials
|
||||
self.logger.debug('Applying user credentials...')
|
||||
socket.login(self.user, self.password)
|
||||
|
||||
# Send the email
|
||||
socket.sendmail(self.from_addr, self.to_addr, email.as_string())
|
||||
|
||||
self.logger.info('Sent Email notification to "%s".' % (
|
||||
self.to_addr,
|
||||
))
|
||||
|
||||
except (SocketError, smtplib.SMTPException, RuntimeError) as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Email '
|
||||
'notification to %s.' % self.smtp_host)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
finally:
|
||||
# Gracefully terminate the connection with the server
|
||||
if socket is not None: # pragma: no branch
|
||||
socket.quit()
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
|
||||
# Default Format is HTML
|
||||
results['notify_format'] = NotifyFormat.HTML
|
||||
|
||||
to_addr = ''
|
||||
from_addr = ''
|
||||
smtp_host = ''
|
||||
|
||||
if 'format' in results['qsd'] and len(results['qsd']['format']):
|
||||
# Extract email format (Text/Html)
|
||||
format = NotifyBase.unquote(results['qsd']['format']).lower()
|
||||
if len(format) > 0 and format[0] == 't':
|
||||
results['notify_format'] = NotifyFormat.TEXT
|
||||
|
||||
# Attempt to detect 'from' email address
|
||||
if 'from' in results['qsd'] and len(results['qsd']['from']):
|
||||
from_addr = NotifyBase.unquote(results['qsd']['from'])
|
||||
|
||||
else:
|
||||
# get 'To' email address
|
||||
from_addr = '%s@%s' % (
|
||||
re.split(
|
||||
r'[\s@]+', NotifyBase.unquote(results['user']))[0],
|
||||
results.get('host', '')
|
||||
)
|
||||
# Lets be clever and attempt to make the from
|
||||
# address an email based on the to address
|
||||
from_addr = '%s@%s' % (
|
||||
re.split(r'[\s@]+', from_addr)[0],
|
||||
re.split(r'[\s@]+', from_addr)[-1],
|
||||
)
|
||||
|
||||
# Attempt to detect 'to' email address
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
to_addr = NotifyBase.unquote(results['qsd']['to']).strip()
|
||||
|
||||
if not to_addr:
|
||||
# Send to ourselves if not otherwise specified to do so
|
||||
to_addr = from_addr
|
||||
|
||||
if 'name' in results['qsd'] and len(results['qsd']['name']):
|
||||
# Extract from name to associate with from address
|
||||
results['name'] = NotifyBase.unquote(results['qsd']['name'])
|
||||
|
||||
if 'timeout' in results['qsd'] and len(results['qsd']['timeout']):
|
||||
# Extract the timeout to associate with smtp server
|
||||
results['timeout'] = results['qsd']['timeout']
|
||||
|
||||
# Store SMTP Host if specified
|
||||
if 'smtp' in results['qsd'] and len(results['qsd']['smtp']):
|
||||
# Extract the smtp server
|
||||
smtp_host = NotifyBase.unquote(results['qsd']['smtp'])
|
||||
|
||||
results['to'] = to_addr
|
||||
results['from'] = from_addr
|
||||
results['smtp_host'] = smtp_host
|
||||
|
||||
return results
|
577
libs/apprise/plugins/NotifyEmby.py
Normal file
|
@ -0,0 +1,577 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Emby Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
# For this plugin to work correct, the Emby server must be set up to allow
|
||||
# for remote connections.
|
||||
|
||||
# Emby Docker configuration: https://hub.docker.com/r/emby/embyserver/
|
||||
# Authentication: https://github.com/MediaBrowser/Emby/wiki/Authentication
|
||||
# Notifications: https://github.com/MediaBrowser/Emby/wiki/Remote-control
|
||||
import requests
|
||||
import hashlib
|
||||
from json import dumps
|
||||
from json import loads
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..utils import parse_bool
|
||||
from .. import __version__ as VERSION
|
||||
|
||||
|
||||
class NotifyEmby(NotifyBase):
|
||||
"""
|
||||
A wrapper for Emby Notifications
|
||||
"""
|
||||
|
||||
# The default protocol
|
||||
protocol = 'emby'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'embys'
|
||||
|
||||
# Emby uses the http protocol with JSON requests
|
||||
emby_default_port = 8096
|
||||
|
||||
# By default Emby requires you to provide it a device id
|
||||
# The following was just a random uuid4 generated one. There
|
||||
# is no real reason to change this, but hey; that's what open
|
||||
# source is for right?
|
||||
emby_device_id = '48df9504-6843-49be-9f2d-a685e25a0bc8'
|
||||
|
||||
# The Emby message timeout; basically it is how long should our message be
|
||||
# displayed for. The value is in milli-seconds
|
||||
emby_message_timeout_ms = 60000
|
||||
|
||||
def __init__(self, modal=False, **kwargs):
|
||||
"""
|
||||
Initialize Emby Object
|
||||
|
||||
"""
|
||||
super(NotifyEmby, self).__init__(**kwargs)
|
||||
|
||||
if self.secure:
|
||||
self.schema = 'https'
|
||||
|
||||
else:
|
||||
self.schema = 'http'
|
||||
|
||||
# Our access token does not get created until we first
|
||||
# authenticate with our Emby server. The same goes for the
|
||||
# user id below.
|
||||
self.access_token = None
|
||||
self.user_id = None
|
||||
|
||||
# Whether or not our popup dialog is a timed notification
|
||||
# or a modal type box (requires an Okay acknowledgement)
|
||||
self.modal = modal
|
||||
|
||||
if not self.user:
|
||||
# Token was None
|
||||
self.logger.warning('No Username was specified.')
|
||||
raise TypeError('No Username was specified.')
|
||||
|
||||
return
|
||||
|
||||
def login(self, **kwargs):
|
||||
"""
|
||||
Creates our authentication token and prepares our header
|
||||
|
||||
"""
|
||||
|
||||
if self.is_authenticated:
|
||||
# Log out first before we log back in
|
||||
self.logout()
|
||||
|
||||
# Prepare our login url
|
||||
url = '%s://%s' % (self.schema, self.host)
|
||||
if self.port:
|
||||
url += ':%d' % self.port
|
||||
|
||||
url += '/Users/AuthenticateByName'
|
||||
|
||||
# Initialize our payload
|
||||
payload = {
|
||||
'Username': self.user
|
||||
}
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json',
|
||||
'X-Emby-Authorization': self.emby_auth_header,
|
||||
}
|
||||
|
||||
if self.password:
|
||||
# Source: https://github.com/MediaBrowser/Emby/wiki/Authentication
|
||||
# We require the following during our authentication
|
||||
# pw - password in plain text
|
||||
# password - password in Sha1
|
||||
# passwordMd5 - password in MD5
|
||||
payload['pw'] = self.password
|
||||
|
||||
password_md5 = hashlib.md5()
|
||||
password_md5.update(self.password.encode('utf-8'))
|
||||
payload['passwordMd5'] = password_md5.hexdigest()
|
||||
|
||||
password_sha1 = hashlib.sha1()
|
||||
password_sha1.update(self.password.encode('utf-8'))
|
||||
payload['password'] = password_sha1.hexdigest()
|
||||
|
||||
else:
|
||||
# Backwards compatibility
|
||||
payload['password'] = ''
|
||||
payload['passwordMd5'] = ''
|
||||
|
||||
# April 1st, 2018 and newer requirement:
|
||||
payload['pw'] = ''
|
||||
|
||||
self.logger.debug(
|
||||
'Emby login() POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate))
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
headers=headers,
|
||||
data=dumps(payload),
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to authenticate user %s details: '
|
||||
'%s (error=%s).' % (
|
||||
self.user,
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to authenticate user %s details: '
|
||||
'(error=%s).' % (self.user, r.status_code))
|
||||
|
||||
self.logger.debug('Emby Response:\r\n%s' % r.text)
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured authenticating a user with Emby '
|
||||
'at %s.' % self.host)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
# Load our results
|
||||
try:
|
||||
results = loads(r.content)
|
||||
|
||||
except ValueError:
|
||||
# A string like '' would cause this; basicallly the content
|
||||
# that was provided was not a JSON string. We can stop here
|
||||
return False
|
||||
|
||||
# Acquire our Access Token
|
||||
self.access_token = results.get('AccessToken')
|
||||
|
||||
# Acquire our UserId. It can be in one (or both) of the
|
||||
# following locations in the response:
|
||||
# {
|
||||
# 'User': {
|
||||
# ...
|
||||
# 'Id': 'the_user_id_can_be_here',
|
||||
# ...
|
||||
# },
|
||||
# 'Id': 'the_user_id_can_be_found_here_too',
|
||||
# }
|
||||
#
|
||||
# The below just safely covers both grounds.
|
||||
self.user_id = results.get('Id')
|
||||
if not self.user_id:
|
||||
if 'User' in results:
|
||||
self.user_id = results['User'].get('Id')
|
||||
|
||||
# No user was found matching the specified
|
||||
return self.is_authenticated
|
||||
|
||||
def sessions(self, user_controlled=True):
|
||||
"""
|
||||
Acquire our Session Identifiers and store them in a dictionary
|
||||
indexed by the session id itself.
|
||||
|
||||
"""
|
||||
# A single session might look like this:
|
||||
# {
|
||||
# u'AdditionalUsers': [],
|
||||
# u'ApplicationVersion': u'3.3.1.0',
|
||||
# u'Client': u'Emby Mobile',
|
||||
# u'DeviceId': u'00c901e90ae814c00f81c75ae06a1c8a4381f45b',
|
||||
# u'DeviceName': u'Firefox',
|
||||
# u'Id': u'e37151ea06d7eb636639fded5a80f223',
|
||||
# u'LastActivityDate': u'2018-03-04T21:29:02.5590200Z',
|
||||
# u'PlayState': {
|
||||
# u'CanSeek': False,
|
||||
# u'IsMuted': False,
|
||||
# u'IsPaused': False,
|
||||
# u'RepeatMode': u'RepeatNone',
|
||||
# },
|
||||
# u'PlayableMediaTypes': [u'Audio', u'Video'],
|
||||
# u'RemoteEndPoint': u'172.17.0.1',
|
||||
# u'ServerId': u'4470e977ea704a08b264628c24127d43',
|
||||
# u'SupportedCommands': [
|
||||
# u'MoveUp',
|
||||
# u'MoveDown',
|
||||
# u'MoveLeft',
|
||||
# u'MoveRight',
|
||||
# u'PageUp',
|
||||
# u'PageDown',
|
||||
# u'PreviousLetter',
|
||||
# u'NextLetter',
|
||||
# u'ToggleOsd',
|
||||
# u'ToggleContextMenu',
|
||||
# u'Select',
|
||||
# u'Back',
|
||||
# u'SendKey',
|
||||
# u'SendString',
|
||||
# u'GoHome',
|
||||
# u'GoToSettings',
|
||||
# u'VolumeUp',
|
||||
# u'VolumeDown',
|
||||
# u'Mute',
|
||||
# u'Unmute',
|
||||
# u'ToggleMute',
|
||||
# u'SetVolume',
|
||||
# u'SetAudioStreamIndex',
|
||||
# u'SetSubtitleStreamIndex',
|
||||
# u'DisplayContent',
|
||||
# u'GoToSearch',
|
||||
# u'DisplayMessage',
|
||||
# u'SetRepeatMode',
|
||||
# u'ChannelUp',
|
||||
# u'ChannelDown',
|
||||
# u'PlayMediaSource',
|
||||
# ],
|
||||
# u'SupportsRemoteControl': True,
|
||||
# u'UserId': u'6f98d12cb10f48209ee282787daf7af6',
|
||||
# u'UserName': u'l2g'
|
||||
# }
|
||||
|
||||
# Prepare a dict() object to control our sessions; the keys are
|
||||
# the sessions while the details associated with the session
|
||||
# are stored inside.
|
||||
sessions = dict()
|
||||
|
||||
if not self.is_authenticated and not self.login():
|
||||
# Authenticate if we aren't already
|
||||
return sessions
|
||||
|
||||
# Prepare our login url
|
||||
url = '%s://%s' % (self.schema, self.host)
|
||||
if self.port:
|
||||
url += ':%d' % self.port
|
||||
|
||||
url += '/Sessions'
|
||||
|
||||
if user_controlled is True:
|
||||
# Only return sessions that can be managed by the current Emby
|
||||
# user.
|
||||
url += '?ControllableByUserId=%s' % self.user_id
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json',
|
||||
'X-Emby-Authorization': self.emby_auth_header,
|
||||
'X-MediaBrowser-Token': self.access_token,
|
||||
}
|
||||
|
||||
self.logger.debug(
|
||||
'Emby session() GET URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate))
|
||||
|
||||
try:
|
||||
r = requests.get(
|
||||
url,
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to acquire session for user %s details: '
|
||||
'%s (error=%s).' % (
|
||||
self.user,
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to acquire session for user %s details: '
|
||||
'(error=%s).' % (self.user, r.status_code))
|
||||
|
||||
self.logger.debug('Emby Response:\r\n%s' % r.text)
|
||||
|
||||
# Return; we're done
|
||||
return sessions
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured querying Emby '
|
||||
'for session information at %s.' % self.host)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return sessions
|
||||
|
||||
# Load our results
|
||||
try:
|
||||
results = loads(r.content)
|
||||
|
||||
except ValueError:
|
||||
# A string like '' would cause this; basicallly the content
|
||||
# that was provided was not a JSON string. There is nothing
|
||||
# more we can do at this point
|
||||
return sessions
|
||||
|
||||
for entry in results:
|
||||
session = entry.get('Id')
|
||||
if session:
|
||||
sessions[session] = entry
|
||||
|
||||
return sessions
|
||||
|
||||
def logout(self, **kwargs):
|
||||
"""
|
||||
Logs out of an already-authenticated session
|
||||
|
||||
"""
|
||||
if not self.is_authenticated:
|
||||
# We're not authenticated; there is nothing to do
|
||||
return True
|
||||
|
||||
# Prepare our login url
|
||||
url = '%s://%s' % (self.schema, self.host)
|
||||
if self.port:
|
||||
url += ':%d' % self.port
|
||||
|
||||
url += '/Sessions/Logout'
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json',
|
||||
'X-Emby-Authorization': self.emby_auth_header,
|
||||
'X-MediaBrowser-Token': self.access_token,
|
||||
}
|
||||
|
||||
self.logger.debug(
|
||||
'Emby logout() POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate))
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code not in (
|
||||
# We're already logged out
|
||||
requests.codes.unauthorized,
|
||||
# The below show up if we were 'just' logged out
|
||||
requests.codes.ok,
|
||||
requests.codes.no_content):
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to logoff user %s details: '
|
||||
'%s (error=%s).' % (
|
||||
self.user,
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to logoff user %s details: '
|
||||
'(error=%s).' % (self.user, r.status_code))
|
||||
|
||||
self.logger.debug('Emby Response:\r\n%s' % r.text)
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured querying Emby '
|
||||
'to logoff user %s at %s.' % (self.user, self.host))
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
# We logged our successfully if we reached here
|
||||
|
||||
# Reset our variables
|
||||
self.access_token = None
|
||||
self.user_id = None
|
||||
return True
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Emby Notification
|
||||
"""
|
||||
if not self.is_authenticated and not self.login():
|
||||
# Authenticate if we aren't already
|
||||
return False
|
||||
|
||||
# Acquire our list of sessions
|
||||
sessions = self.sessions().keys()
|
||||
if not sessions:
|
||||
self.logger.warning('There were no Emby sessions to notify.')
|
||||
# We don't need to fail; there really is no one to notify
|
||||
return True
|
||||
|
||||
url = '%s://%s' % (self.schema, self.host)
|
||||
if self.port:
|
||||
url += ':%d' % self.port
|
||||
|
||||
# Append our remaining path
|
||||
url += '/Sessions/%s/Message'
|
||||
|
||||
# Prepare Emby Object
|
||||
payload = {
|
||||
'Header': title,
|
||||
'Text': body,
|
||||
}
|
||||
|
||||
if not self.modal:
|
||||
payload['TimeoutMs'] = self.emby_message_timeout_ms
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json',
|
||||
'X-Emby-Authorization': self.emby_auth_header,
|
||||
'X-MediaBrowser-Token': self.access_token,
|
||||
}
|
||||
|
||||
# Track whether or not we had a failure or not.
|
||||
has_error = False
|
||||
|
||||
for session in sessions:
|
||||
# Update our session
|
||||
session_url = url % session
|
||||
|
||||
self.logger.debug('Emby POST URL: %s (cert_verify=%r)' % (
|
||||
session_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Emby Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
session_url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code not in (
|
||||
requests.codes.ok,
|
||||
requests.codes.no_content):
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Emby notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Emby notification '
|
||||
'(error=%s).' % (r.status_code))
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Emby notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Emby '
|
||||
'notification to %s.' % self.host)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
return not has_error
|
||||
|
||||
@property
|
||||
def is_authenticated(self):
|
||||
"""
|
||||
Returns True if we're authenticated and False if not.
|
||||
|
||||
"""
|
||||
return True if self.access_token and self.user_id else False
|
||||
|
||||
@property
|
||||
def emby_auth_header(self):
|
||||
"""
|
||||
Generates the X-Emby-Authorization header response based on whether
|
||||
we're authenticated or not.
|
||||
|
||||
"""
|
||||
# Specific to Emby
|
||||
header_args = [
|
||||
('MediaBrowser Client', self.app_id),
|
||||
('Device', self.app_id),
|
||||
('DeviceId', self.emby_device_id),
|
||||
('Version', str(VERSION)),
|
||||
]
|
||||
|
||||
if self.user_id:
|
||||
# Append UserId variable if we're authenticated
|
||||
header_args.append(('UserId', self.user))
|
||||
|
||||
return ', '.join(['%s="%s"' % (k, v) for k, v in header_args])
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
if not results:
|
||||
# We're done early
|
||||
return results
|
||||
|
||||
# Assign Default Emby Port
|
||||
if not results['port']:
|
||||
results['port'] = NotifyEmby.emby_default_port
|
||||
|
||||
# Modal type popup (default False)
|
||||
results['modal'] = parse_bool(results['qsd'].get('modal', False))
|
||||
|
||||
return results
|
||||
|
||||
def __del__(self):
|
||||
"""
|
||||
Deconstructor
|
||||
"""
|
||||
self.logout()
|
130
libs/apprise/plugins/NotifyFaast.py
Normal file
|
@ -0,0 +1,130 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Faast Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import requests
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
|
||||
|
||||
class NotifyFaast(NotifyBase):
|
||||
"""
|
||||
A wrapper for Faast Notifications
|
||||
"""
|
||||
|
||||
# The default protocol (this is secure for faast)
|
||||
protocol = 'faast'
|
||||
|
||||
# Faast uses the http protocol with JSON requests
|
||||
notify_url = 'https://www.appnotifications.com/account/notifications.json'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_72
|
||||
|
||||
def __init__(self, authtoken, **kwargs):
|
||||
"""
|
||||
Initialize Faast Object
|
||||
"""
|
||||
super(NotifyFaast, self).__init__(**kwargs)
|
||||
|
||||
self.authtoken = authtoken
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Faast Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'multipart/form-data'
|
||||
}
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'user_credentials': self.authtoken,
|
||||
'title': title,
|
||||
'message': body,
|
||||
}
|
||||
|
||||
image_url = self.image_url(notify_type)
|
||||
if image_url:
|
||||
payload['icon_url'] = image_url
|
||||
|
||||
self.logger.debug('Faast POST URL: %s (cert_verify=%r)' % (
|
||||
self.notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Faast Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
self.notify_url,
|
||||
data=payload,
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Faast notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Faast notification '
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Faast notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Faast notification.',
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
|
||||
# Store our authtoken using the host
|
||||
results['authtoken'] = results['host']
|
||||
|
||||
return results
|
255
libs/apprise/plugins/NotifyGrowl/NotifyGrowl.py
Normal file
|
@ -0,0 +1,255 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Growl Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
|
||||
from .gntp import notifier
|
||||
from .gntp import errors
|
||||
from ..NotifyBase import NotifyBase
|
||||
from ...common import NotifyImageSize
|
||||
|
||||
|
||||
# Priorities
|
||||
class GrowlPriority(object):
|
||||
LOW = -2
|
||||
MODERATE = -1
|
||||
NORMAL = 0
|
||||
HIGH = 1
|
||||
EMERGENCY = 2
|
||||
|
||||
|
||||
GROWL_PRIORITIES = (
|
||||
GrowlPriority.LOW,
|
||||
GrowlPriority.MODERATE,
|
||||
GrowlPriority.NORMAL,
|
||||
GrowlPriority.HIGH,
|
||||
GrowlPriority.EMERGENCY,
|
||||
)
|
||||
|
||||
GROWL_NOTIFICATION_TYPE = "New Messages"
|
||||
|
||||
|
||||
class NotifyGrowl(NotifyBase):
|
||||
"""
|
||||
A wrapper to Growl Notifications
|
||||
|
||||
"""
|
||||
|
||||
# The default protocol
|
||||
protocol = 'growl'
|
||||
|
||||
# Default Growl Port
|
||||
default_port = 23053
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_72
|
||||
|
||||
def __init__(self, priority=None, version=2, **kwargs):
|
||||
"""
|
||||
Initialize Growl Object
|
||||
"""
|
||||
super(NotifyGrowl, self).__init__(**kwargs)
|
||||
|
||||
if not self.port:
|
||||
self.port = self.default_port
|
||||
|
||||
# The Priority of the message
|
||||
if priority not in GROWL_PRIORITIES:
|
||||
self.priority = GrowlPriority.NORMAL
|
||||
|
||||
else:
|
||||
self.priority = priority
|
||||
|
||||
# Always default the sticky flag to False
|
||||
self.sticky = False
|
||||
|
||||
# Store Version
|
||||
self.version = version
|
||||
|
||||
payload = {
|
||||
'applicationName': self.app_id,
|
||||
'notifications': [GROWL_NOTIFICATION_TYPE, ],
|
||||
'defaultNotifications': [GROWL_NOTIFICATION_TYPE, ],
|
||||
'hostname': self.host,
|
||||
'port': self.port,
|
||||
}
|
||||
|
||||
if self.password is not None:
|
||||
payload['password'] = self.password
|
||||
|
||||
self.logger.debug('Growl Registration Payload: %s' % str(payload))
|
||||
self.growl = notifier.GrowlNotifier(**payload)
|
||||
|
||||
try:
|
||||
self.growl.register()
|
||||
self.logger.debug(
|
||||
'Growl server registration completed successfully.'
|
||||
)
|
||||
|
||||
except errors.NetworkError:
|
||||
self.logger.warning(
|
||||
'A network error occured sending Growl '
|
||||
'notification to %s.' % self.host)
|
||||
raise TypeError(
|
||||
'A network error occured sending Growl '
|
||||
'notification to %s.' % self.host)
|
||||
|
||||
except errors.AuthError:
|
||||
self.logger.warning(
|
||||
'An authentication error occured sending Growl '
|
||||
'notification to %s.' % self.host)
|
||||
raise TypeError(
|
||||
'An authentication error occured sending Growl '
|
||||
'notification to %s.' % self.host)
|
||||
|
||||
except errors.UnsupportedError:
|
||||
self.logger.warning(
|
||||
'An unsupported error occured sending Growl '
|
||||
'notification to %s.' % self.host)
|
||||
raise TypeError(
|
||||
'An unsupported error occured sending Growl '
|
||||
'notification to %s.' % self.host)
|
||||
|
||||
return
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Growl Notification
|
||||
"""
|
||||
|
||||
# Limit results to just the first 2 line otherwise there is just to
|
||||
# much content to display
|
||||
body = re.split('[\r\n]+', body)
|
||||
body[0] = body[0].strip('#').strip()
|
||||
body = '\r\n'.join(body[0:2])
|
||||
|
||||
icon = None
|
||||
if self.version >= 2:
|
||||
# URL Based
|
||||
icon = self.image_url(notify_type)
|
||||
|
||||
else:
|
||||
# Raw
|
||||
icon = self.image_raw(notify_type)
|
||||
|
||||
payload = {
|
||||
'noteType': GROWL_NOTIFICATION_TYPE,
|
||||
'title': title,
|
||||
'description': body,
|
||||
'icon': icon is not None,
|
||||
'sticky': False,
|
||||
'priority': self.priority,
|
||||
}
|
||||
self.logger.debug('Growl Payload: %s' % str(payload))
|
||||
|
||||
# Update icon of payload to be raw data; this is intentionally done
|
||||
# here after we spit the debug message above (so we don't try to
|
||||
# print the binary contents of an image
|
||||
payload['icon'] = icon
|
||||
|
||||
try:
|
||||
response = self.growl.notify(**payload)
|
||||
if not isinstance(response, bool):
|
||||
self.logger.warning(
|
||||
'Growl notification failed to send with response: %s' %
|
||||
str(response),
|
||||
)
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Growl notification.')
|
||||
|
||||
except errors.BaseError as e:
|
||||
# Since Growl servers listen for UDP broadcasts, it's possible
|
||||
# that you will never get to this part of the code since there is
|
||||
# no acknowledgement as to whether it accepted what was sent to it
|
||||
# or not.
|
||||
|
||||
# However, if the host/server is unavailable, you will get to this
|
||||
# point of the code.
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Growl '
|
||||
'notification to %s.' % self.host)
|
||||
self.logger.debug('Growl Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
version = None
|
||||
if 'version' in results['qsd'] and len(results['qsd']['version']):
|
||||
# Allow the user to specify the version of the protocol to use.
|
||||
try:
|
||||
version = int(
|
||||
NotifyBase.unquote(
|
||||
results['qsd']['version']).strip().split('.')[0])
|
||||
|
||||
except (AttributeError, IndexError, TypeError, ValueError):
|
||||
NotifyBase.logger.warning(
|
||||
'An invalid Growl version of "%s" was specified and will '
|
||||
'be ignored.' % results['qsd']['version']
|
||||
)
|
||||
pass
|
||||
|
||||
if 'priority' in results['qsd'] and len(results['qsd']['priority']):
|
||||
_map = {
|
||||
'l': GrowlPriority.LOW,
|
||||
'-2': GrowlPriority.LOW,
|
||||
'm': GrowlPriority.MODERATE,
|
||||
'-1': GrowlPriority.MODERATE,
|
||||
'n': GrowlPriority.NORMAL,
|
||||
'0': GrowlPriority.NORMAL,
|
||||
'h': GrowlPriority.HIGH,
|
||||
'1': GrowlPriority.HIGH,
|
||||
'e': GrowlPriority.EMERGENCY,
|
||||
'2': GrowlPriority.EMERGENCY,
|
||||
}
|
||||
try:
|
||||
results['priority'] = \
|
||||
_map[results['qsd']['priority'][0].lower()]
|
||||
|
||||
except KeyError:
|
||||
# No priority was set
|
||||
pass
|
||||
|
||||
# Because of the URL formatting, the password is actually where the
|
||||
# username field is. For this reason, we just preform this small hack
|
||||
# to make it (the URL) conform correctly. The following strips out the
|
||||
# existing password entry (if exists) so that it can be swapped with
|
||||
# the new one we specify.
|
||||
if results.get('password', None) is None:
|
||||
results['password'] = results.get('user', None)
|
||||
|
||||
if version:
|
||||
results['version'] = version
|
||||
|
||||
return results
|
21
libs/apprise/plugins/NotifyGrowl/__init__.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
from . import NotifyGrowl
|
||||
|
||||
__all__ = [
|
||||
'NotifyGrowl',
|
||||
]
|
0
libs/apprise/plugins/NotifyGrowl/gntp/__init__.py
Normal file
141
libs/apprise/plugins/NotifyGrowl/gntp/cli.py
Normal file
|
@ -0,0 +1,141 @@
|
|||
# Copyright: 2013 Paul Traylor
|
||||
# These sources are released under the terms of the MIT license: see LICENSE
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from optparse import OptionParser, OptionGroup
|
||||
|
||||
from .notifier import GrowlNotifier
|
||||
from .shim import RawConfigParser
|
||||
from .version import __version__
|
||||
|
||||
DEFAULT_CONFIG = os.path.expanduser('~/.gntp')
|
||||
|
||||
config = RawConfigParser({
|
||||
'hostname': 'localhost',
|
||||
'password': None,
|
||||
'port': 23053,
|
||||
})
|
||||
config.read([DEFAULT_CONFIG])
|
||||
if not config.has_section('gntp'):
|
||||
config.add_section('gntp')
|
||||
|
||||
|
||||
class ClientParser(OptionParser):
|
||||
def __init__(self):
|
||||
OptionParser.__init__(self, version="%%prog %s" % __version__)
|
||||
|
||||
group = OptionGroup(self, "Network Options")
|
||||
group.add_option("-H", "--host",
|
||||
dest="host", default=config.get('gntp', 'hostname'),
|
||||
help="Specify a hostname to which to send a remote notification. [%default]")
|
||||
group.add_option("--port",
|
||||
dest="port", default=config.getint('gntp', 'port'), type="int",
|
||||
help="port to listen on [%default]")
|
||||
group.add_option("-P", "--password",
|
||||
dest='password', default=config.get('gntp', 'password'),
|
||||
help="Network password")
|
||||
self.add_option_group(group)
|
||||
|
||||
group = OptionGroup(self, "Notification Options")
|
||||
group.add_option("-n", "--name",
|
||||
dest="app", default='Python GNTP Test Client',
|
||||
help="Set the name of the application [%default]")
|
||||
group.add_option("-s", "--sticky",
|
||||
dest='sticky', default=False, action="store_true",
|
||||
help="Make the notification sticky [%default]")
|
||||
group.add_option("--image",
|
||||
dest="icon", default=None,
|
||||
help="Icon for notification (URL or /path/to/file)")
|
||||
group.add_option("-m", "--message",
|
||||
dest="message", default=None,
|
||||
help="Sets the message instead of using stdin")
|
||||
group.add_option("-p", "--priority",
|
||||
dest="priority", default=0, type="int",
|
||||
help="-2 to 2 [%default]")
|
||||
group.add_option("-d", "--identifier",
|
||||
dest="identifier",
|
||||
help="Identifier for coalescing")
|
||||
group.add_option("-t", "--title",
|
||||
dest="title", default=None,
|
||||
help="Set the title of the notification [%default]")
|
||||
group.add_option("-N", "--notification",
|
||||
dest="name", default='Notification',
|
||||
help="Set the notification name [%default]")
|
||||
group.add_option("--callback",
|
||||
dest="callback",
|
||||
help="URL callback")
|
||||
self.add_option_group(group)
|
||||
|
||||
# Extra Options
|
||||
self.add_option('-v', '--verbose',
|
||||
dest='verbose', default=0, action='count',
|
||||
help="Verbosity levels")
|
||||
|
||||
def parse_args(self, args=None, values=None):
|
||||
values, args = OptionParser.parse_args(self, args, values)
|
||||
|
||||
if values.message is None:
|
||||
print('Enter a message followed by Ctrl-D')
|
||||
try:
|
||||
message = sys.stdin.read()
|
||||
except KeyboardInterrupt:
|
||||
exit()
|
||||
else:
|
||||
message = values.message
|
||||
|
||||
if values.title is None:
|
||||
values.title = ' '.join(args)
|
||||
|
||||
# If we still have an empty title, use the
|
||||
# first bit of the message as the title
|
||||
if values.title == '':
|
||||
values.title = message[:20]
|
||||
|
||||
values.verbose = logging.WARNING - values.verbose * 10
|
||||
|
||||
return values, message
|
||||
|
||||
|
||||
def main():
|
||||
(options, message) = ClientParser().parse_args()
|
||||
logging.basicConfig(level=options.verbose)
|
||||
if not os.path.exists(DEFAULT_CONFIG):
|
||||
logging.info('No config read found at %s', DEFAULT_CONFIG)
|
||||
|
||||
growl = GrowlNotifier(
|
||||
applicationName=options.app,
|
||||
notifications=[options.name],
|
||||
defaultNotifications=[options.name],
|
||||
hostname=options.host,
|
||||
password=options.password,
|
||||
port=options.port,
|
||||
)
|
||||
result = growl.register()
|
||||
if result is not True:
|
||||
exit(result)
|
||||
|
||||
# This would likely be better placed within the growl notifier
|
||||
# class but until I make _checkIcon smarter this is "easier"
|
||||
if options.icon is not None and not options.icon.startswith('http'):
|
||||
logging.info('Loading image %s', options.icon)
|
||||
f = open(options.icon)
|
||||
options.icon = f.read()
|
||||
f.close()
|
||||
|
||||
result = growl.notify(
|
||||
noteType=options.name,
|
||||
title=options.title,
|
||||
description=message,
|
||||
icon=options.icon,
|
||||
sticky=options.sticky,
|
||||
priority=options.priority,
|
||||
callback=options.callback,
|
||||
identifier=options.identifier,
|
||||
)
|
||||
if result is not True:
|
||||
exit(result)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
77
libs/apprise/plugins/NotifyGrowl/gntp/config.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
# Copyright: 2013 Paul Traylor
|
||||
# These sources are released under the terms of the MIT license: see LICENSE
|
||||
|
||||
"""
|
||||
The gntp.config module is provided as an extended GrowlNotifier object that takes
|
||||
advantage of the ConfigParser module to allow us to setup some default values
|
||||
(such as hostname, password, and port) in a more global way to be shared among
|
||||
programs using gntp
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
|
||||
from .gntp import notifier
|
||||
from .gntp import shim
|
||||
|
||||
__all__ = [
|
||||
'mini',
|
||||
'GrowlNotifier'
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GrowlNotifier(gntp.notifier.GrowlNotifier):
|
||||
"""
|
||||
ConfigParser enhanced GrowlNotifier object
|
||||
|
||||
For right now, we are only interested in letting users overide certain
|
||||
values from ~/.gntp
|
||||
|
||||
::
|
||||
|
||||
[gntp]
|
||||
hostname = ?
|
||||
password = ?
|
||||
port = ?
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
config = gntp.shim.RawConfigParser({
|
||||
'hostname': kwargs.get('hostname', 'localhost'),
|
||||
'password': kwargs.get('password'),
|
||||
'port': kwargs.get('port', 23053),
|
||||
})
|
||||
|
||||
config.read([os.path.expanduser('~/.gntp')])
|
||||
|
||||
# If the file does not exist, then there will be no gntp section defined
|
||||
# and the config.get() lines below will get confused. Since we are not
|
||||
# saving the config, it should be safe to just add it here so the
|
||||
# code below doesn't complain
|
||||
if not config.has_section('gntp'):
|
||||
logger.info('Error reading ~/.gntp config file')
|
||||
config.add_section('gntp')
|
||||
|
||||
kwargs['password'] = config.get('gntp', 'password')
|
||||
kwargs['hostname'] = config.get('gntp', 'hostname')
|
||||
kwargs['port'] = config.getint('gntp', 'port')
|
||||
|
||||
super(GrowlNotifier, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
def mini(description, **kwargs):
|
||||
"""Single notification function
|
||||
|
||||
Simple notification function in one line. Has only one required parameter
|
||||
and attempts to use reasonable defaults for everything else
|
||||
:param string description: Notification message
|
||||
"""
|
||||
kwargs['notifierFactory'] = GrowlNotifier
|
||||
gntp.notifier.mini(description, **kwargs)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# If we're running this module directly we're likely running it as a test
|
||||
# so extra debugging is useful
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
mini('Testing mini notification')
|
511
libs/apprise/plugins/NotifyGrowl/gntp/core.py
Normal file
|
@ -0,0 +1,511 @@
|
|||
# Copyright: 2013 Paul Traylor
|
||||
# These sources are released under the terms of the MIT license: see LICENSE
|
||||
|
||||
import hashlib
|
||||
import re
|
||||
import time
|
||||
|
||||
from . import shim
|
||||
from . import errors as errors
|
||||
|
||||
__all__ = [
|
||||
'GNTPRegister',
|
||||
'GNTPNotice',
|
||||
'GNTPSubscribe',
|
||||
'GNTPOK',
|
||||
'GNTPError',
|
||||
'parse_gntp',
|
||||
]
|
||||
|
||||
#GNTP/<version> <messagetype> <encryptionAlgorithmID>[:<ivValue>][ <keyHashAlgorithmID>:<keyHash>.<salt>]
|
||||
GNTP_INFO_LINE = re.compile(
|
||||
r'GNTP/(?P<version>\d+\.\d+) (?P<messagetype>REGISTER|NOTIFY|SUBSCRIBE|\-OK|\-ERROR)' +
|
||||
r' (?P<encryptionAlgorithmID>[A-Z0-9]+(:(?P<ivValue>[A-F0-9]+))?) ?' +
|
||||
r'((?P<keyHashAlgorithmID>[A-Z0-9]+):(?P<keyHash>[A-F0-9]+).(?P<salt>[A-F0-9]+))?\r\n',
|
||||
re.IGNORECASE
|
||||
)
|
||||
|
||||
GNTP_INFO_LINE_SHORT = re.compile(
|
||||
r'GNTP/(?P<version>\d+\.\d+) (?P<messagetype>REGISTER|NOTIFY|SUBSCRIBE|\-OK|\-ERROR)',
|
||||
re.IGNORECASE
|
||||
)
|
||||
|
||||
GNTP_HEADER = re.compile(r'([\w-]+):(.+)')
|
||||
|
||||
GNTP_EOL = shim.b('\r\n')
|
||||
GNTP_SEP = shim.b(': ')
|
||||
|
||||
|
||||
class _GNTPBuffer(shim.StringIO):
|
||||
"""GNTP Buffer class"""
|
||||
def writeln(self, value=None):
|
||||
if value:
|
||||
self.write(shim.b(value))
|
||||
self.write(GNTP_EOL)
|
||||
|
||||
def writeheader(self, key, value):
|
||||
if not isinstance(value, str):
|
||||
value = str(value)
|
||||
self.write(shim.b(key))
|
||||
self.write(GNTP_SEP)
|
||||
self.write(shim.b(value))
|
||||
self.write(GNTP_EOL)
|
||||
|
||||
|
||||
class _GNTPBase(object):
|
||||
"""Base initilization
|
||||
|
||||
:param string messagetype: GNTP Message type
|
||||
:param string version: GNTP Protocol version
|
||||
:param string encription: Encryption protocol
|
||||
"""
|
||||
def __init__(self, messagetype=None, version='1.0', encryption=None):
|
||||
self.info = {
|
||||
'version': version,
|
||||
'messagetype': messagetype,
|
||||
'encryptionAlgorithmID': encryption
|
||||
}
|
||||
self.hash_algo = {
|
||||
'MD5': hashlib.md5,
|
||||
'SHA1': hashlib.sha1,
|
||||
'SHA256': hashlib.sha256,
|
||||
'SHA512': hashlib.sha512,
|
||||
}
|
||||
self.headers = {}
|
||||
self.resources = {}
|
||||
|
||||
def __str__(self):
|
||||
return self.encode()
|
||||
|
||||
def _parse_info(self, data):
|
||||
"""Parse the first line of a GNTP message to get security and other info values
|
||||
|
||||
:param string data: GNTP Message
|
||||
:return dict: Parsed GNTP Info line
|
||||
"""
|
||||
|
||||
match = GNTP_INFO_LINE.match(data)
|
||||
|
||||
if not match:
|
||||
raise errors.ParseError('ERROR_PARSING_INFO_LINE')
|
||||
|
||||
info = match.groupdict()
|
||||
if info['encryptionAlgorithmID'] == 'NONE':
|
||||
info['encryptionAlgorithmID'] = None
|
||||
|
||||
return info
|
||||
|
||||
def set_password(self, password, encryptAlgo='MD5'):
|
||||
"""Set a password for a GNTP Message
|
||||
|
||||
:param string password: Null to clear password
|
||||
:param string encryptAlgo: Supports MD5, SHA1, SHA256, SHA512
|
||||
"""
|
||||
if not password:
|
||||
self.info['encryptionAlgorithmID'] = None
|
||||
self.info['keyHashAlgorithm'] = None
|
||||
return
|
||||
|
||||
self.password = shim.b(password)
|
||||
self.encryptAlgo = encryptAlgo.upper()
|
||||
|
||||
if not self.encryptAlgo in self.hash_algo:
|
||||
raise errors.UnsupportedError('INVALID HASH "%s"' % self.encryptAlgo)
|
||||
|
||||
hashfunction = self.hash_algo.get(self.encryptAlgo)
|
||||
|
||||
password = password.encode('utf8')
|
||||
seed = time.ctime().encode('utf8')
|
||||
salt = hashfunction(seed).hexdigest()
|
||||
saltHash = hashfunction(seed).digest()
|
||||
keyBasis = password + saltHash
|
||||
key = hashfunction(keyBasis).digest()
|
||||
keyHash = hashfunction(key).hexdigest()
|
||||
|
||||
self.info['keyHashAlgorithmID'] = self.encryptAlgo
|
||||
self.info['keyHash'] = keyHash.upper()
|
||||
self.info['salt'] = salt.upper()
|
||||
|
||||
def _decode_hex(self, value):
|
||||
"""Helper function to decode hex string to `proper` hex string
|
||||
|
||||
:param string value: Human readable hex string
|
||||
:return string: Hex string
|
||||
"""
|
||||
result = ''
|
||||
for i in range(0, len(value), 2):
|
||||
tmp = int(value[i:i + 2], 16)
|
||||
result += chr(tmp)
|
||||
return result
|
||||
|
||||
def _decode_binary(self, rawIdentifier, identifier):
|
||||
rawIdentifier += '\r\n\r\n'
|
||||
dataLength = int(identifier['Length'])
|
||||
pointerStart = self.raw.find(rawIdentifier) + len(rawIdentifier)
|
||||
pointerEnd = pointerStart + dataLength
|
||||
data = self.raw[pointerStart:pointerEnd]
|
||||
if not len(data) == dataLength:
|
||||
raise errors.ParseError('INVALID_DATA_LENGTH Expected: %s Recieved %s' % (dataLength, len(data)))
|
||||
return data
|
||||
|
||||
def _validate_password(self, password):
|
||||
"""Validate GNTP Message against stored password"""
|
||||
self.password = password
|
||||
if password is None:
|
||||
raise errors.AuthError('Missing password')
|
||||
keyHash = self.info.get('keyHash', None)
|
||||
if keyHash is None and self.password is None:
|
||||
return True
|
||||
if keyHash is None:
|
||||
raise errors.AuthError('Invalid keyHash')
|
||||
if self.password is None:
|
||||
raise errors.AuthError('Missing password')
|
||||
|
||||
keyHashAlgorithmID = self.info.get('keyHashAlgorithmID','MD5')
|
||||
|
||||
password = self.password.encode('utf8')
|
||||
saltHash = self._decode_hex(self.info['salt'])
|
||||
|
||||
keyBasis = password + saltHash
|
||||
self.key = self.hash_algo[keyHashAlgorithmID](keyBasis).digest()
|
||||
keyHash = self.hash_algo[keyHashAlgorithmID](self.key).hexdigest()
|
||||
|
||||
if not keyHash.upper() == self.info['keyHash'].upper():
|
||||
raise errors.AuthError('Invalid Hash')
|
||||
return True
|
||||
|
||||
def validate(self):
|
||||
"""Verify required headers"""
|
||||
for header in self._requiredHeaders:
|
||||
if not self.headers.get(header, False):
|
||||
raise errors.ParseError('Missing Notification Header: ' + header)
|
||||
|
||||
def _format_info(self):
|
||||
"""Generate info line for GNTP Message
|
||||
|
||||
:return string:
|
||||
"""
|
||||
info = 'GNTP/%s %s' % (
|
||||
self.info.get('version'),
|
||||
self.info.get('messagetype'),
|
||||
)
|
||||
if self.info.get('encryptionAlgorithmID', None):
|
||||
info += ' %s:%s' % (
|
||||
self.info.get('encryptionAlgorithmID'),
|
||||
self.info.get('ivValue'),
|
||||
)
|
||||
else:
|
||||
info += ' NONE'
|
||||
|
||||
if self.info.get('keyHashAlgorithmID', None):
|
||||
info += ' %s:%s.%s' % (
|
||||
self.info.get('keyHashAlgorithmID'),
|
||||
self.info.get('keyHash'),
|
||||
self.info.get('salt')
|
||||
)
|
||||
|
||||
return info
|
||||
|
||||
def _parse_dict(self, data):
|
||||
"""Helper function to parse blocks of GNTP headers into a dictionary
|
||||
|
||||
:param string data:
|
||||
:return dict: Dictionary of parsed GNTP Headers
|
||||
"""
|
||||
d = {}
|
||||
for line in data.split('\r\n'):
|
||||
match = GNTP_HEADER.match(line)
|
||||
if not match:
|
||||
continue
|
||||
|
||||
key = match.group(1).strip()
|
||||
val = match.group(2).strip()
|
||||
d[key] = val
|
||||
return d
|
||||
|
||||
def add_header(self, key, value):
|
||||
self.headers[key] = value
|
||||
|
||||
def add_resource(self, data):
|
||||
"""Add binary resource
|
||||
|
||||
:param string data: Binary Data
|
||||
"""
|
||||
data = shim.b(data)
|
||||
identifier = hashlib.md5(data).hexdigest()
|
||||
self.resources[identifier] = data
|
||||
return 'x-growl-resource://%s' % identifier
|
||||
|
||||
def decode(self, data, password=None):
|
||||
"""Decode GNTP Message
|
||||
|
||||
:param string data:
|
||||
"""
|
||||
self.password = password
|
||||
self.raw = shim.u(data)
|
||||
parts = self.raw.split('\r\n\r\n')
|
||||
self.info = self._parse_info(self.raw)
|
||||
self.headers = self._parse_dict(parts[0])
|
||||
|
||||
def encode(self):
|
||||
"""Encode a generic GNTP Message
|
||||
|
||||
:return string: GNTP Message ready to be sent. Returned as a byte string
|
||||
"""
|
||||
|
||||
buff = _GNTPBuffer()
|
||||
|
||||
buff.writeln(self._format_info())
|
||||
|
||||
#Headers
|
||||
for k, v in self.headers.items():
|
||||
buff.writeheader(k, v)
|
||||
buff.writeln()
|
||||
|
||||
#Resources
|
||||
for resource, data in self.resources.items():
|
||||
buff.writeheader('Identifier', resource)
|
||||
buff.writeheader('Length', len(data))
|
||||
buff.writeln()
|
||||
buff.write(data)
|
||||
buff.writeln()
|
||||
buff.writeln()
|
||||
|
||||
return buff.getvalue()
|
||||
|
||||
|
||||
class GNTPRegister(_GNTPBase):
|
||||
"""Represents a GNTP Registration Command
|
||||
|
||||
:param string data: (Optional) See decode()
|
||||
:param string password: (Optional) Password to use while encoding/decoding messages
|
||||
"""
|
||||
_requiredHeaders = [
|
||||
'Application-Name',
|
||||
'Notifications-Count'
|
||||
]
|
||||
_requiredNotificationHeaders = ['Notification-Name']
|
||||
|
||||
def __init__(self, data=None, password=None):
|
||||
_GNTPBase.__init__(self, 'REGISTER')
|
||||
self.notifications = []
|
||||
|
||||
if data:
|
||||
self.decode(data, password)
|
||||
else:
|
||||
self.set_password(password)
|
||||
self.add_header('Application-Name', 'pygntp')
|
||||
self.add_header('Notifications-Count', 0)
|
||||
|
||||
def validate(self):
|
||||
'''Validate required headers and validate notification headers'''
|
||||
for header in self._requiredHeaders:
|
||||
if not self.headers.get(header, False):
|
||||
raise errors.ParseError('Missing Registration Header: ' + header)
|
||||
for notice in self.notifications:
|
||||
for header in self._requiredNotificationHeaders:
|
||||
if not notice.get(header, False):
|
||||
raise errors.ParseError('Missing Notification Header: ' + header)
|
||||
|
||||
def decode(self, data, password):
|
||||
"""Decode existing GNTP Registration message
|
||||
|
||||
:param string data: Message to decode
|
||||
"""
|
||||
self.raw = shim.u(data)
|
||||
parts = self.raw.split('\r\n\r\n')
|
||||
self.info = self._parse_info(self.raw)
|
||||
self._validate_password(password)
|
||||
self.headers = self._parse_dict(parts[0])
|
||||
|
||||
for i, part in enumerate(parts):
|
||||
if i == 0:
|
||||
continue # Skip Header
|
||||
if part.strip() == '':
|
||||
continue
|
||||
notice = self._parse_dict(part)
|
||||
if notice.get('Notification-Name', False):
|
||||
self.notifications.append(notice)
|
||||
elif notice.get('Identifier', False):
|
||||
notice['Data'] = self._decode_binary(part, notice)
|
||||
#open('register.png','wblol').write(notice['Data'])
|
||||
self.resources[notice.get('Identifier')] = notice
|
||||
|
||||
def add_notification(self, name, enabled=True):
|
||||
"""Add new Notification to Registration message
|
||||
|
||||
:param string name: Notification Name
|
||||
:param boolean enabled: Enable this notification by default
|
||||
"""
|
||||
notice = {}
|
||||
notice['Notification-Name'] = name
|
||||
notice['Notification-Enabled'] = enabled
|
||||
|
||||
self.notifications.append(notice)
|
||||
self.add_header('Notifications-Count', len(self.notifications))
|
||||
|
||||
def encode(self):
|
||||
"""Encode a GNTP Registration Message
|
||||
|
||||
:return string: Encoded GNTP Registration message. Returned as a byte string
|
||||
"""
|
||||
|
||||
buff = _GNTPBuffer()
|
||||
|
||||
buff.writeln(self._format_info())
|
||||
|
||||
#Headers
|
||||
for k, v in self.headers.items():
|
||||
buff.writeheader(k, v)
|
||||
buff.writeln()
|
||||
|
||||
#Notifications
|
||||
if len(self.notifications) > 0:
|
||||
for notice in self.notifications:
|
||||
for k, v in notice.items():
|
||||
buff.writeheader(k, v)
|
||||
buff.writeln()
|
||||
|
||||
#Resources
|
||||
for resource, data in self.resources.items():
|
||||
buff.writeheader('Identifier', resource)
|
||||
buff.writeheader('Length', len(data))
|
||||
buff.writeln()
|
||||
buff.write(data)
|
||||
buff.writeln()
|
||||
buff.writeln()
|
||||
|
||||
return buff.getvalue()
|
||||
|
||||
|
||||
class GNTPNotice(_GNTPBase):
|
||||
"""Represents a GNTP Notification Command
|
||||
|
||||
:param string data: (Optional) See decode()
|
||||
:param string app: (Optional) Set Application-Name
|
||||
:param string name: (Optional) Set Notification-Name
|
||||
:param string title: (Optional) Set Notification Title
|
||||
:param string password: (Optional) Password to use while encoding/decoding messages
|
||||
"""
|
||||
_requiredHeaders = [
|
||||
'Application-Name',
|
||||
'Notification-Name',
|
||||
'Notification-Title'
|
||||
]
|
||||
|
||||
def __init__(self, data=None, app=None, name=None, title=None, password=None):
|
||||
_GNTPBase.__init__(self, 'NOTIFY')
|
||||
|
||||
if data:
|
||||
self.decode(data, password)
|
||||
else:
|
||||
self.set_password(password)
|
||||
if app:
|
||||
self.add_header('Application-Name', app)
|
||||
if name:
|
||||
self.add_header('Notification-Name', name)
|
||||
if title:
|
||||
self.add_header('Notification-Title', title)
|
||||
|
||||
def decode(self, data, password):
|
||||
"""Decode existing GNTP Notification message
|
||||
|
||||
:param string data: Message to decode.
|
||||
"""
|
||||
self.raw = shim.u(data)
|
||||
parts = self.raw.split('\r\n\r\n')
|
||||
self.info = self._parse_info(self.raw)
|
||||
self._validate_password(password)
|
||||
self.headers = self._parse_dict(parts[0])
|
||||
|
||||
for i, part in enumerate(parts):
|
||||
if i == 0:
|
||||
continue # Skip Header
|
||||
if part.strip() == '':
|
||||
continue
|
||||
notice = self._parse_dict(part)
|
||||
if notice.get('Identifier', False):
|
||||
notice['Data'] = self._decode_binary(part, notice)
|
||||
#open('notice.png','wblol').write(notice['Data'])
|
||||
self.resources[notice.get('Identifier')] = notice
|
||||
|
||||
|
||||
class GNTPSubscribe(_GNTPBase):
|
||||
"""Represents a GNTP Subscribe Command
|
||||
|
||||
:param string data: (Optional) See decode()
|
||||
:param string password: (Optional) Password to use while encoding/decoding messages
|
||||
"""
|
||||
_requiredHeaders = [
|
||||
'Subscriber-ID',
|
||||
'Subscriber-Name',
|
||||
]
|
||||
|
||||
def __init__(self, data=None, password=None):
|
||||
_GNTPBase.__init__(self, 'SUBSCRIBE')
|
||||
if data:
|
||||
self.decode(data, password)
|
||||
else:
|
||||
self.set_password(password)
|
||||
|
||||
|
||||
class GNTPOK(_GNTPBase):
|
||||
"""Represents a GNTP OK Response
|
||||
|
||||
:param string data: (Optional) See _GNTPResponse.decode()
|
||||
:param string action: (Optional) Set type of action the OK Response is for
|
||||
"""
|
||||
_requiredHeaders = ['Response-Action']
|
||||
|
||||
def __init__(self, data=None, action=None):
|
||||
_GNTPBase.__init__(self, '-OK')
|
||||
if data:
|
||||
self.decode(data)
|
||||
if action:
|
||||
self.add_header('Response-Action', action)
|
||||
|
||||
|
||||
class GNTPError(_GNTPBase):
|
||||
"""Represents a GNTP Error response
|
||||
|
||||
:param string data: (Optional) See _GNTPResponse.decode()
|
||||
:param string errorcode: (Optional) Error code
|
||||
:param string errordesc: (Optional) Error Description
|
||||
"""
|
||||
_requiredHeaders = ['Error-Code', 'Error-Description']
|
||||
|
||||
def __init__(self, data=None, errorcode=None, errordesc=None):
|
||||
_GNTPBase.__init__(self, '-ERROR')
|
||||
if data:
|
||||
self.decode(data)
|
||||
if errorcode:
|
||||
self.add_header('Error-Code', errorcode)
|
||||
self.add_header('Error-Description', errordesc)
|
||||
|
||||
def error(self):
|
||||
return (self.headers.get('Error-Code', None),
|
||||
self.headers.get('Error-Description', None))
|
||||
|
||||
|
||||
def parse_gntp(data, password=None):
|
||||
"""Attempt to parse a message as a GNTP message
|
||||
|
||||
:param string data: Message to be parsed
|
||||
:param string password: Optional password to be used to verify the message
|
||||
"""
|
||||
data = shim.u(data)
|
||||
match = GNTP_INFO_LINE_SHORT.match(data)
|
||||
if not match:
|
||||
raise errors.ParseError('INVALID_GNTP_INFO')
|
||||
info = match.groupdict()
|
||||
if info['messagetype'] == 'REGISTER':
|
||||
return GNTPRegister(data, password=password)
|
||||
elif info['messagetype'] == 'NOTIFY':
|
||||
return GNTPNotice(data, password=password)
|
||||
elif info['messagetype'] == 'SUBSCRIBE':
|
||||
return GNTPSubscribe(data, password=password)
|
||||
elif info['messagetype'] == '-OK':
|
||||
return GNTPOK(data)
|
||||
elif info['messagetype'] == '-ERROR':
|
||||
return GNTPError(data)
|
||||
raise errors.ParseError('INVALID_GNTP_MESSAGE')
|
25
libs/apprise/plugins/NotifyGrowl/gntp/errors.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
# Copyright: 2013 Paul Traylor
|
||||
# These sources are released under the terms of the MIT license: see LICENSE
|
||||
|
||||
class BaseError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ParseError(BaseError):
|
||||
errorcode = 500
|
||||
errordesc = 'Error parsing the message'
|
||||
|
||||
|
||||
class AuthError(BaseError):
|
||||
errorcode = 400
|
||||
errordesc = 'Error with authorization'
|
||||
|
||||
|
||||
class UnsupportedError(BaseError):
|
||||
errorcode = 500
|
||||
errordesc = 'Currently unsupported by gntp.py'
|
||||
|
||||
|
||||
class NetworkError(BaseError):
|
||||
errorcode = 500
|
||||
errordesc = "Error connecting to growl server"
|
265
libs/apprise/plugins/NotifyGrowl/gntp/notifier.py
Normal file
|
@ -0,0 +1,265 @@
|
|||
# Copyright: 2013 Paul Traylor
|
||||
# These sources are released under the terms of the MIT license: see LICENSE
|
||||
|
||||
"""
|
||||
The gntp.notifier module is provided as a simple way to send notifications
|
||||
using GNTP
|
||||
|
||||
.. note::
|
||||
This class is intended to mostly mirror the older Python bindings such
|
||||
that you should be able to replace instances of the old bindings with
|
||||
this class.
|
||||
`Original Python bindings <http://code.google.com/p/growl/source/browse/Bindings/python/Growl.py>`_
|
||||
|
||||
"""
|
||||
import logging
|
||||
import platform
|
||||
import socket
|
||||
import sys
|
||||
|
||||
from .version import __version__
|
||||
from . import core
|
||||
from . import errors as errors
|
||||
from . import shim
|
||||
|
||||
__all__ = [
|
||||
'mini',
|
||||
'GrowlNotifier',
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GrowlNotifier(object):
|
||||
"""Helper class to simplfy sending Growl messages
|
||||
|
||||
:param string applicationName: Sending application name
|
||||
:param list notification: List of valid notifications
|
||||
:param list defaultNotifications: List of notifications that should be enabled
|
||||
by default
|
||||
:param string applicationIcon: Icon URL
|
||||
:param string hostname: Remote host
|
||||
:param integer port: Remote port
|
||||
"""
|
||||
|
||||
passwordHash = 'MD5'
|
||||
socketTimeout = 3
|
||||
|
||||
def __init__(self, applicationName='Python GNTP', notifications=[],
|
||||
defaultNotifications=None, applicationIcon=None, hostname='localhost',
|
||||
password=None, port=23053):
|
||||
|
||||
self.applicationName = applicationName
|
||||
self.notifications = list(notifications)
|
||||
if defaultNotifications:
|
||||
self.defaultNotifications = list(defaultNotifications)
|
||||
else:
|
||||
self.defaultNotifications = self.notifications
|
||||
self.applicationIcon = applicationIcon
|
||||
|
||||
self.password = password
|
||||
self.hostname = hostname
|
||||
self.port = int(port)
|
||||
|
||||
def _checkIcon(self, data):
|
||||
'''
|
||||
Check the icon to see if it's valid
|
||||
|
||||
If it's a simple URL icon, then we return True. If it's a data icon
|
||||
then we return False
|
||||
'''
|
||||
logger.info('Checking icon')
|
||||
return shim.u(data).startswith('http')
|
||||
|
||||
def register(self):
|
||||
"""Send GNTP Registration
|
||||
|
||||
.. warning::
|
||||
Before sending notifications to Growl, you need to have
|
||||
sent a registration message at least once
|
||||
"""
|
||||
logger.info('Sending registration to %s:%s', self.hostname, self.port)
|
||||
register = core.GNTPRegister()
|
||||
register.add_header('Application-Name', self.applicationName)
|
||||
for notification in self.notifications:
|
||||
enabled = notification in self.defaultNotifications
|
||||
register.add_notification(notification, enabled)
|
||||
if self.applicationIcon:
|
||||
if self._checkIcon(self.applicationIcon):
|
||||
register.add_header('Application-Icon', self.applicationIcon)
|
||||
else:
|
||||
resource = register.add_resource(self.applicationIcon)
|
||||
register.add_header('Application-Icon', resource)
|
||||
if self.password:
|
||||
register.set_password(self.password, self.passwordHash)
|
||||
self.add_origin_info(register)
|
||||
self.register_hook(register)
|
||||
return self._send('register', register)
|
||||
|
||||
def notify(self, noteType, title, description, icon=None, sticky=False,
|
||||
priority=None, callback=None, identifier=None, custom={}):
|
||||
"""Send a GNTP notifications
|
||||
|
||||
.. warning::
|
||||
Must have registered with growl beforehand or messages will be ignored
|
||||
|
||||
:param string noteType: One of the notification names registered earlier
|
||||
:param string title: Notification title (usually displayed on the notification)
|
||||
:param string description: The main content of the notification
|
||||
:param string icon: Icon URL path
|
||||
:param boolean sticky: Sticky notification
|
||||
:param integer priority: Message priority level from -2 to 2
|
||||
:param string callback: URL callback
|
||||
:param dict custom: Custom attributes. Key names should be prefixed with X-
|
||||
according to the spec but this is not enforced by this class
|
||||
|
||||
.. warning::
|
||||
For now, only URL callbacks are supported. In the future, the
|
||||
callback argument will also support a function
|
||||
"""
|
||||
logger.info('Sending notification [%s] to %s:%s', noteType, self.hostname, self.port)
|
||||
assert noteType in self.notifications
|
||||
notice = core.GNTPNotice()
|
||||
notice.add_header('Application-Name', self.applicationName)
|
||||
notice.add_header('Notification-Name', noteType)
|
||||
notice.add_header('Notification-Title', title)
|
||||
if self.password:
|
||||
notice.set_password(self.password, self.passwordHash)
|
||||
if sticky:
|
||||
notice.add_header('Notification-Sticky', sticky)
|
||||
if priority:
|
||||
notice.add_header('Notification-Priority', priority)
|
||||
if icon:
|
||||
if self._checkIcon(icon):
|
||||
notice.add_header('Notification-Icon', icon)
|
||||
else:
|
||||
resource = notice.add_resource(icon)
|
||||
notice.add_header('Notification-Icon', resource)
|
||||
|
||||
if description:
|
||||
notice.add_header('Notification-Text', description)
|
||||
if callback:
|
||||
notice.add_header('Notification-Callback-Target', callback)
|
||||
if identifier:
|
||||
notice.add_header('Notification-Coalescing-ID', identifier)
|
||||
|
||||
for key in custom:
|
||||
notice.add_header(key, custom[key])
|
||||
|
||||
self.add_origin_info(notice)
|
||||
self.notify_hook(notice)
|
||||
|
||||
return self._send('notify', notice)
|
||||
|
||||
def subscribe(self, id, name, port):
|
||||
"""Send a Subscribe request to a remote machine"""
|
||||
sub = core.GNTPSubscribe()
|
||||
sub.add_header('Subscriber-ID', id)
|
||||
sub.add_header('Subscriber-Name', name)
|
||||
sub.add_header('Subscriber-Port', port)
|
||||
if self.password:
|
||||
sub.set_password(self.password, self.passwordHash)
|
||||
|
||||
self.add_origin_info(sub)
|
||||
self.subscribe_hook(sub)
|
||||
|
||||
return self._send('subscribe', sub)
|
||||
|
||||
def add_origin_info(self, packet):
|
||||
"""Add optional Origin headers to message"""
|
||||
packet.add_header('Origin-Machine-Name', platform.node())
|
||||
packet.add_header('Origin-Software-Name', 'gntp.py')
|
||||
packet.add_header('Origin-Software-Version', __version__)
|
||||
packet.add_header('Origin-Platform-Name', platform.system())
|
||||
packet.add_header('Origin-Platform-Version', platform.platform())
|
||||
|
||||
def register_hook(self, packet):
|
||||
pass
|
||||
|
||||
def notify_hook(self, packet):
|
||||
pass
|
||||
|
||||
def subscribe_hook(self, packet):
|
||||
pass
|
||||
|
||||
def _send(self, messagetype, packet):
|
||||
"""Send the GNTP Packet"""
|
||||
|
||||
packet.validate()
|
||||
data = packet.encode()
|
||||
|
||||
logger.debug('To : %s:%s <%s>\n%s', self.hostname, self.port, packet.__class__, data)
|
||||
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
s.settimeout(self.socketTimeout)
|
||||
try:
|
||||
s.connect((self.hostname, self.port))
|
||||
s.send(data)
|
||||
recv_data = s.recv(1024)
|
||||
while not recv_data.endswith(shim.b("\r\n\r\n")):
|
||||
recv_data += s.recv(1024)
|
||||
except socket.error:
|
||||
# Python2.5 and Python3 compatibile exception
|
||||
exc = sys.exc_info()[1]
|
||||
raise errors.NetworkError(exc)
|
||||
|
||||
response = core.parse_gntp(recv_data)
|
||||
s.close()
|
||||
|
||||
logger.debug('From : %s:%s <%s>\n%s', self.hostname, self.port, response.__class__, response)
|
||||
|
||||
if type(response) == core.GNTPOK:
|
||||
return True
|
||||
logger.error('Invalid response: %s', response.error())
|
||||
return response.error()
|
||||
|
||||
|
||||
def mini(description, applicationName='PythonMini', noteType="Message",
|
||||
title="Mini Message", applicationIcon=None, hostname='localhost',
|
||||
password=None, port=23053, sticky=False, priority=None,
|
||||
callback=None, notificationIcon=None, identifier=None,
|
||||
notifierFactory=GrowlNotifier):
|
||||
"""Single notification function
|
||||
|
||||
Simple notification function in one line. Has only one required parameter
|
||||
and attempts to use reasonable defaults for everything else
|
||||
:param string description: Notification message
|
||||
|
||||
.. warning::
|
||||
For now, only URL callbacks are supported. In the future, the
|
||||
callback argument will also support a function
|
||||
"""
|
||||
try:
|
||||
growl = notifierFactory(
|
||||
applicationName=applicationName,
|
||||
notifications=[noteType],
|
||||
defaultNotifications=[noteType],
|
||||
applicationIcon=applicationIcon,
|
||||
hostname=hostname,
|
||||
password=password,
|
||||
port=port,
|
||||
)
|
||||
result = growl.register()
|
||||
if result is not True:
|
||||
return result
|
||||
|
||||
return growl.notify(
|
||||
noteType=noteType,
|
||||
title=title,
|
||||
description=description,
|
||||
icon=notificationIcon,
|
||||
sticky=sticky,
|
||||
priority=priority,
|
||||
callback=callback,
|
||||
identifier=identifier,
|
||||
)
|
||||
except Exception:
|
||||
# We want the "mini" function to be simple and swallow Exceptions
|
||||
# in order to be less invasive
|
||||
logger.exception("Growl error")
|
||||
|
||||
if __name__ == '__main__':
|
||||
# If we're running this module directly we're likely running it as a test
|
||||
# so extra debugging is useful
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
mini('Testing mini notification')
|
45
libs/apprise/plugins/NotifyGrowl/gntp/shim.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
# Copyright: 2013 Paul Traylor
|
||||
# These sources are released under the terms of the MIT license: see LICENSE
|
||||
|
||||
"""
|
||||
Python2.5 and Python3.3 compatibility shim
|
||||
|
||||
Heavily inspirted by the "six" library.
|
||||
https://pypi.python.org/pypi/six
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
def b(s):
|
||||
if isinstance(s, bytes):
|
||||
return s
|
||||
return s.encode('utf8', 'replace')
|
||||
|
||||
def u(s):
|
||||
if isinstance(s, bytes):
|
||||
return s.decode('utf8', 'replace')
|
||||
return s
|
||||
|
||||
from io import BytesIO as StringIO
|
||||
from configparser import RawConfigParser
|
||||
else:
|
||||
def b(s):
|
||||
if isinstance(s, unicode):
|
||||
return s.encode('utf8', 'replace')
|
||||
return s
|
||||
|
||||
def u(s):
|
||||
if isinstance(s, unicode):
|
||||
return s
|
||||
if isinstance(s, int):
|
||||
s = str(s)
|
||||
return unicode(s, "utf8", "replace")
|
||||
|
||||
from StringIO import StringIO
|
||||
from ConfigParser import RawConfigParser
|
||||
|
||||
b.__doc__ = "Ensure we have a byte string"
|
||||
u.__doc__ = "Ensure we have a unicode string"
|
4
libs/apprise/plugins/NotifyGrowl/gntp/version.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
# Copyright: 2013 Paul Traylor
|
||||
# These sources are released under the terms of the MIT license: see LICENSE
|
||||
|
||||
__version__ = '1.0.2'
|
217
libs/apprise/plugins/NotifyIFTTT.py
Normal file
|
@ -0,0 +1,217 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# IFTTT (If-This-Then-That)
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# For this plugin to work, you need to add the Maker applet to your profile
|
||||
# Simply visit https://ifttt.com/search and search for 'Webhooks'
|
||||
# Or if you're signed in, click here: https://ifttt.com/maker_webhooks
|
||||
# and click 'Connect'
|
||||
#
|
||||
# You'll want to visit the settings of this Applet and pay attention to the
|
||||
# URL. For example, it might look like this:
|
||||
# https://maker.ifttt.com/use/a3nHB7gA9TfBQSqJAHklod
|
||||
#
|
||||
# In the above example a3nHB7gA9TfBQSqJAHklod becomes your {apikey}
|
||||
# You will need this to make this notification work correctly
|
||||
#
|
||||
# For each event you create you will assign it a name (this will be known as
|
||||
# the {event} when building your URL.
|
||||
import requests
|
||||
|
||||
from json import dumps
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
|
||||
|
||||
class NotifyIFTTT(NotifyBase):
|
||||
"""
|
||||
A wrapper for IFTTT Notifications
|
||||
|
||||
"""
|
||||
|
||||
# Even though you'll add 'Ingredients' as {{ Value1 }} to your Applets,
|
||||
# you must use their lowercase value in the HTTP POST.
|
||||
ifttt_default_key_prefix = 'value'
|
||||
|
||||
# The default IFTTT Key to use when mapping the title text to the IFTTT
|
||||
# event. The idea here is if someone wants to over-ride the default and
|
||||
# change it to another Ingredient Name (in 2018, you were limited to have
|
||||
# value1, value2, and value3).
|
||||
ifttt_default_title_key = 'value1'
|
||||
|
||||
# The default IFTTT Key to use when mapping the body text to the IFTTT
|
||||
# event. The idea here is if someone wants to over-ride the default and
|
||||
# change it to another Ingredient Name (in 2018, you were limited to have
|
||||
# value1, value2, and value3).
|
||||
ifttt_default_body_key = 'value2'
|
||||
|
||||
# The default IFTTT Key to use when mapping the body text to the IFTTT
|
||||
# event. The idea here is if someone wants to over-ride the default and
|
||||
# change it to another Ingredient Name (in 2018, you were limited to have
|
||||
# value1, value2, and value3).
|
||||
ifttt_default_type_key = 'value3'
|
||||
|
||||
# The default protocol
|
||||
protocol = 'ifttt'
|
||||
|
||||
# IFTTT uses the http protocol with JSON requests
|
||||
notify_url = 'https://maker.ifttt.com/trigger/{event}/with/key/{apikey}'
|
||||
|
||||
def __init__(self, apikey, event, event_args=None, **kwargs):
|
||||
"""
|
||||
Initialize IFTTT Object
|
||||
|
||||
"""
|
||||
super(NotifyIFTTT, self).__init__(**kwargs)
|
||||
|
||||
if not apikey:
|
||||
raise TypeError('You must specify the Webhooks apikey.')
|
||||
|
||||
if not event:
|
||||
raise TypeError('You must specify the Event you wish to trigger.')
|
||||
|
||||
# Store our APIKey
|
||||
self.apikey = apikey
|
||||
|
||||
# Store our Event we wish to trigger
|
||||
self.event = event
|
||||
|
||||
if isinstance(event_args, dict):
|
||||
# Make a copy of the arguments so that they can't change
|
||||
# outside of this plugin
|
||||
self.event_args = event_args.copy()
|
||||
|
||||
else:
|
||||
# Force a dictionary
|
||||
self.event_args = dict()
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform IFTTT Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
self.ifttt_default_title_key: title,
|
||||
self.ifttt_default_body_key: body,
|
||||
self.ifttt_default_type_key: notify_type,
|
||||
}
|
||||
|
||||
# Update our payload using any other event_args specified
|
||||
payload.update(self.event_args)
|
||||
|
||||
# Eliminate empty fields; users wishing to cancel the use of the
|
||||
# self.ifttt_default_ entries can preset these keys to being
|
||||
# empty so that they get caught here and removed.
|
||||
payload = {x: y for x, y in payload.items() if y}
|
||||
|
||||
# URL to transmit content via
|
||||
url = self.notify_url.format(
|
||||
apikey=self.apikey,
|
||||
event=self.event,
|
||||
)
|
||||
|
||||
self.logger.debug('IFTTT POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('IFTTT Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
self.logger.debug(
|
||||
u"IFTTT HTTP response status: %r" % r.status_code)
|
||||
self.logger.debug(
|
||||
u"IFTTT HTTP response headers: %r" % r.headers)
|
||||
self.logger.debug(
|
||||
u"IFTTT HTTP response body: %r" % r.content)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send IFTTT:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
self.event,
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send IFTTT:%s '
|
||||
'notification (error=%s).' % (
|
||||
self.event,
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.content)
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
'Sent IFTTT notification to Event %s.' % self.event)
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending IFTTT:%s ' % (
|
||||
self.event) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Our Event
|
||||
results['event'] = results['host']
|
||||
|
||||
# Our API Key
|
||||
results['apikey'] = results['user']
|
||||
|
||||
# Store ValueX entries based on each entry past the host
|
||||
results['event_args'] = {
|
||||
'{0}{1}'.format(NotifyIFTTT.ifttt_default_key_prefix, n + 1):
|
||||
NotifyBase.unquote(x)
|
||||
for n, x in enumerate(
|
||||
NotifyBase.split_path(results['fullpath'])) if x}
|
||||
|
||||
# Allow users to set key=val parameters to specify more types
|
||||
# of payload options
|
||||
results['event_args'].update(
|
||||
{k: NotifyBase.unquote(v)
|
||||
for k, v in results['qsd'].items()})
|
||||
|
||||
return results
|
131
libs/apprise/plugins/NotifyJSON.py
Normal file
|
@ -0,0 +1,131 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# JSON Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
|
||||
class NotifyJSON(NotifyBase):
|
||||
"""
|
||||
A wrapper for JSON Notifications
|
||||
"""
|
||||
|
||||
# The default protocol
|
||||
protocol = 'json'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'jsons'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_128
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initialize JSON Object
|
||||
"""
|
||||
super(NotifyJSON, self).__init__(**kwargs)
|
||||
|
||||
if self.secure:
|
||||
self.schema = 'https'
|
||||
|
||||
else:
|
||||
self.schema = 'http'
|
||||
|
||||
self.fullpath = kwargs.get('fullpath')
|
||||
if not compat_is_basestring(self.fullpath):
|
||||
self.fullpath = '/'
|
||||
|
||||
return
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform JSON Notification
|
||||
"""
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
# Version: Major.Minor, Major is only updated if the entire
|
||||
# schema is changed. If just adding new items (or removing
|
||||
# old ones, only increment the Minor!
|
||||
'version': '1.0',
|
||||
'title': title,
|
||||
'message': body,
|
||||
'type': notify_type,
|
||||
}
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
auth = None
|
||||
if self.user:
|
||||
auth = (self.user, self.password)
|
||||
|
||||
url = '%s://%s' % (self.schema, self.host)
|
||||
if isinstance(self.port, int):
|
||||
url += ':%d' % self.port
|
||||
|
||||
url += self.fullpath
|
||||
|
||||
self.logger.debug('JSON POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('JSON Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
auth=auth,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send JSON notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send JSON notification '
|
||||
'(error=%s).' % (r.status_code))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent JSON notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending JSON '
|
||||
'notification to %s.' % self.host)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
240
libs/apprise/plugins/NotifyJoin.py
Normal file
|
@ -0,0 +1,240 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Join Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
# Join URL: http://joaoapps.com/join/
|
||||
# To use this plugin, you need to first access (make sure your browser allows
|
||||
# popups): https://joinjoaomgcd.appspot.com/
|
||||
#
|
||||
# To register you just need to allow it to connect to your Google Profile but
|
||||
# the good news is it doesn't ask for anything too personal.
|
||||
#
|
||||
# You can download the app for your phone here:
|
||||
# https://play.google.com/store/apps/details?id=com.joaomgcd.join
|
||||
|
||||
import re
|
||||
import requests
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
# Token required as part of the API request
|
||||
VALIDATE_APIKEY = re.compile(r'[A-Za-z0-9]{32}')
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
JOIN_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
JOIN_HTTP_ERROR_MAP.update({
|
||||
401: 'Unauthorized - Invalid Token.',
|
||||
})
|
||||
|
||||
# Used to break path apart into list of devices
|
||||
DEVICE_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
# Used to detect a device
|
||||
IS_DEVICE_RE = re.compile(r'([A-Za-z0-9]{32})')
|
||||
|
||||
# Used to detect a device
|
||||
IS_GROUP_RE = re.compile(
|
||||
r'(group\.)?(?P<name>(all|android|chrome|windows10|phone|tablet|pc))',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
# Image Support (72x72)
|
||||
JOIN_IMAGE_XY = NotifyImageSize.XY_72
|
||||
|
||||
|
||||
class NotifyJoin(NotifyBase):
|
||||
"""
|
||||
A wrapper for Join Notifications
|
||||
"""
|
||||
|
||||
# The default protocol
|
||||
protocol = 'join'
|
||||
|
||||
# Join uses the http protocol with JSON requests
|
||||
notify_url = \
|
||||
'https://joinjoaomgcd.appspot.com/_ah/api/messaging/v1/sendPush'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_72
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 1000
|
||||
|
||||
def __init__(self, apikey, devices, **kwargs):
|
||||
"""
|
||||
Initialize Join Object
|
||||
"""
|
||||
super(NotifyJoin, self).__init__(**kwargs)
|
||||
|
||||
if not VALIDATE_APIKEY.match(apikey.strip()):
|
||||
self.logger.warning(
|
||||
'The first API Token specified (%s) is invalid.' % apikey,
|
||||
)
|
||||
|
||||
raise TypeError(
|
||||
'The first API Token specified (%s) is invalid.' % apikey,
|
||||
)
|
||||
|
||||
# The token associated with the account
|
||||
self.apikey = apikey.strip()
|
||||
|
||||
if compat_is_basestring(devices):
|
||||
self.devices = [x for x in filter(bool, DEVICE_LIST_DELIM.split(
|
||||
devices,
|
||||
))]
|
||||
|
||||
elif isinstance(devices, (set, tuple, list)):
|
||||
self.devices = devices
|
||||
|
||||
else:
|
||||
self.devices = list()
|
||||
|
||||
if len(self.devices) == 0:
|
||||
# Default to everyone
|
||||
self.devices.append('group.all')
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Join Notification
|
||||
"""
|
||||
|
||||
try:
|
||||
# Limit results to just the first 2 line otherwise
|
||||
# there is just to much content to display
|
||||
body = re.split('[\r\n]+', body)
|
||||
body[0] = body[0].strip('#').strip()
|
||||
body = '\r\n'.join(body[0:2])
|
||||
|
||||
except (AttributeError, TypeError):
|
||||
# body was None or not of a type string
|
||||
body = ''
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
}
|
||||
|
||||
# error tracking (used for function return)
|
||||
return_status = True
|
||||
|
||||
# Create a copy of the devices list
|
||||
devices = list(self.devices)
|
||||
while len(devices):
|
||||
device = devices.pop(0)
|
||||
group_re = IS_GROUP_RE.match(device)
|
||||
if group_re:
|
||||
device = 'group.%s' % group_re.group('name').lower()
|
||||
|
||||
elif not IS_DEVICE_RE.match(device):
|
||||
self.logger.warning(
|
||||
"The specified device/group '%s' is invalid; skipping." % (
|
||||
device,
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
url_args = {
|
||||
'apikey': self.apikey,
|
||||
'deviceId': device,
|
||||
'title': title,
|
||||
'text': body,
|
||||
}
|
||||
|
||||
image_url = self.image_url(notify_type)
|
||||
if image_url:
|
||||
url_args['icon'] = image_url
|
||||
|
||||
# prepare payload
|
||||
payload = {}
|
||||
|
||||
# Prepare the URL
|
||||
url = '%s?%s' % (self.notify_url, NotifyBase.urlencode(url_args))
|
||||
|
||||
self.logger.debug('Join POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Join Payload: %s' % str(payload))
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=payload,
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Join:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
device,
|
||||
JOIN_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Join:%s '
|
||||
'notification (error=%s).' % (
|
||||
device,
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
|
||||
return_status = False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Join notification to %s.' % device)
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Join:%s '
|
||||
'notification.' % device
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return_status = False
|
||||
|
||||
if len(devices):
|
||||
# Prevent thrashing requests
|
||||
self.throttle()
|
||||
|
||||
return return_status
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
devices = ' '.join(
|
||||
filter(bool, NotifyBase.split_path(results['fullpath'])))
|
||||
|
||||
results['apikey'] = results['host']
|
||||
results['devices'] = devices
|
||||
|
||||
return results
|
190
libs/apprise/plugins/NotifyMatterMost.py
Normal file
|
@ -0,0 +1,190 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# MatterMost Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
|
||||
# Some Reference Locations:
|
||||
# - https://docs.mattermost.com/developer/webhooks-incoming.html
|
||||
# - https://docs.mattermost.com/administration/config-settings.html
|
||||
|
||||
# Used to validate Authorization Token
|
||||
VALIDATE_AUTHTOKEN = re.compile(r'[A-Za-z0-9]{24,32}')
|
||||
|
||||
|
||||
class NotifyMatterMost(NotifyBase):
|
||||
"""
|
||||
A wrapper for MatterMost Notifications
|
||||
"""
|
||||
|
||||
# The default protocol
|
||||
protocol = 'mmost'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'mmosts'
|
||||
|
||||
# The default Mattermost port
|
||||
default_port = 8065
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_72
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 4000
|
||||
|
||||
def __init__(self, authtoken, channel=None, **kwargs):
|
||||
"""
|
||||
Initialize MatterMost Object
|
||||
"""
|
||||
super(NotifyMatterMost, self).__init__(**kwargs)
|
||||
|
||||
if self.secure:
|
||||
self.schema = 'https'
|
||||
|
||||
else:
|
||||
self.schema = 'http'
|
||||
|
||||
# Our API Key
|
||||
self.authtoken = authtoken
|
||||
|
||||
# Validate authtoken
|
||||
if not authtoken:
|
||||
self.logger.warning(
|
||||
'Missing MatterMost Authorization Token.'
|
||||
)
|
||||
raise TypeError(
|
||||
'Missing MatterMost Authorization Token.'
|
||||
)
|
||||
|
||||
if not VALIDATE_AUTHTOKEN.match(authtoken):
|
||||
self.logger.warning(
|
||||
'Invalid MatterMost Authorization Token Specified.'
|
||||
)
|
||||
raise TypeError(
|
||||
'Invalid MatterMost Authorization Token Specified.'
|
||||
)
|
||||
|
||||
# A Channel (optional)
|
||||
self.channel = channel
|
||||
|
||||
if not self.port:
|
||||
self.port = self.default_port
|
||||
|
||||
return
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform MatterMost Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'text': '###### %s\n%s' % (title, body),
|
||||
'icon_url': self.image_url(notify_type),
|
||||
}
|
||||
|
||||
if self.user:
|
||||
payload['username'] = self.user
|
||||
|
||||
else:
|
||||
payload['username'] = self.app_id
|
||||
|
||||
if self.channel:
|
||||
payload['channel'] = self.channel
|
||||
|
||||
url = '%s://%s:%d' % (self.schema, self.host, self.port)
|
||||
url += '/hooks/%s' % self.authtoken
|
||||
|
||||
self.logger.debug('MatterMost POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('MatterMost Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send MatterMost notification:'
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send MatterMost notification '
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
else:
|
||||
self.logger.info('Sent MatterMost notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending MatterMost '
|
||||
'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
authtoken = NotifyBase.split_path(results['fullpath'])[0]
|
||||
|
||||
channel = None
|
||||
if 'channel' in results['qsd'] and len(results['qsd']['channel']):
|
||||
# Allow the user to specify the channel to post to
|
||||
channel = NotifyBase.unquote(results['qsd']['channel']).strip()
|
||||
|
||||
results['authtoken'] = authtoken
|
||||
results['channel'] = channel
|
||||
|
||||
return results
|
224
libs/apprise/plugins/NotifyProwl.py
Normal file
|
@ -0,0 +1,224 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Prowl Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
import requests
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
|
||||
# Used to validate API Key
|
||||
VALIDATE_APIKEY = re.compile(r'[A-Za-z0-9]{40}')
|
||||
|
||||
# Used to validate Provider Key
|
||||
VALIDATE_PROVIDERKEY = re.compile(r'[A-Za-z0-9]{40}')
|
||||
|
||||
|
||||
# Priorities
|
||||
class ProwlPriority(object):
|
||||
LOW = -2
|
||||
MODERATE = -1
|
||||
NORMAL = 0
|
||||
HIGH = 1
|
||||
EMERGENCY = 2
|
||||
|
||||
|
||||
PROWL_PRIORITIES = (
|
||||
ProwlPriority.LOW,
|
||||
ProwlPriority.MODERATE,
|
||||
ProwlPriority.NORMAL,
|
||||
ProwlPriority.HIGH,
|
||||
ProwlPriority.EMERGENCY,
|
||||
)
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
PROWL_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
HTTP_ERROR_MAP.update({
|
||||
406: 'IP address has exceeded API limit',
|
||||
409: 'Request not aproved.',
|
||||
})
|
||||
|
||||
|
||||
class NotifyProwl(NotifyBase):
|
||||
"""
|
||||
A wrapper for Prowl Notifications
|
||||
"""
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'prowl'
|
||||
|
||||
# Prowl uses the http protocol with JSON requests
|
||||
notify_url = 'https://api.prowlapp.com/publicapi/add'
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 10000
|
||||
|
||||
# Defines the maximum allowable characters in the title
|
||||
title_maxlen = 1024
|
||||
|
||||
def __init__(self, apikey, providerkey=None, priority=None, **kwargs):
|
||||
"""
|
||||
Initialize Prowl Object
|
||||
"""
|
||||
super(NotifyProwl, self).__init__(**kwargs)
|
||||
|
||||
if priority not in PROWL_PRIORITIES:
|
||||
self.priority = ProwlPriority.NORMAL
|
||||
|
||||
else:
|
||||
self.priority = priority
|
||||
|
||||
if not VALIDATE_APIKEY.match(apikey):
|
||||
self.logger.warning(
|
||||
'The API key specified (%s) is invalid.' % apikey,
|
||||
)
|
||||
raise TypeError(
|
||||
'The API key specified (%s) is invalid.' % apikey,
|
||||
)
|
||||
|
||||
# Store the API key
|
||||
self.apikey = apikey
|
||||
|
||||
# Store the provider key (if specified)
|
||||
if providerkey:
|
||||
if not VALIDATE_PROVIDERKEY.match(providerkey):
|
||||
self.logger.warning(
|
||||
'The Provider key specified (%s) '
|
||||
'is invalid.' % providerkey)
|
||||
|
||||
raise TypeError(
|
||||
'The Provider key specified (%s) '
|
||||
'is invalid.' % providerkey)
|
||||
|
||||
# Store the Provider Key
|
||||
self.providerkey = providerkey
|
||||
|
||||
def notify(self, title, body, **kwargs):
|
||||
"""
|
||||
Perform Prowl Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-type': "application/x-www-form-urlencoded",
|
||||
}
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'apikey': self.apikey,
|
||||
'application': self.app_id,
|
||||
'event': title,
|
||||
'description': body,
|
||||
'priority': self.priority,
|
||||
}
|
||||
|
||||
if self.providerkey:
|
||||
payload['providerkey'] = self.providerkey
|
||||
|
||||
self.logger.debug('Prowl POST URL: %s (cert_verify=%r)' % (
|
||||
self.notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Prowl Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
self.notify_url,
|
||||
data=payload,
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Prowl notification: '
|
||||
'%s (error=%s).' % (
|
||||
PROWL_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Prowl notification '
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Prowl notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Prowl notification.')
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
|
||||
# optionally find the provider key
|
||||
try:
|
||||
providerkey = [x for x in filter(
|
||||
bool, NotifyBase.split_path(results['fullpath']))][0]
|
||||
|
||||
except (AttributeError, IndexError):
|
||||
providerkey = None
|
||||
|
||||
if 'priority' in results['qsd'] and len(results['qsd']['priority']):
|
||||
_map = {
|
||||
'l': ProwlPriority.LOW,
|
||||
'-2': ProwlPriority.LOW,
|
||||
'm': ProwlPriority.MODERATE,
|
||||
'-1': ProwlPriority.MODERATE,
|
||||
'n': ProwlPriority.NORMAL,
|
||||
'0': ProwlPriority.NORMAL,
|
||||
'h': ProwlPriority.HIGH,
|
||||
'1': ProwlPriority.HIGH,
|
||||
'e': ProwlPriority.EMERGENCY,
|
||||
'2': ProwlPriority.EMERGENCY,
|
||||
}
|
||||
try:
|
||||
results['priority'] = \
|
||||
_map[results['qsd']['priority'][0].lower()]
|
||||
|
||||
except KeyError:
|
||||
# No priority was set
|
||||
pass
|
||||
|
||||
results['apikey'] = results['host']
|
||||
results['providerkey'] = providerkey
|
||||
|
||||
return results
|
188
libs/apprise/plugins/NotifyPushBullet.py
Normal file
|
@ -0,0 +1,188 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# PushBullet Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from .NotifyBase import IS_EMAIL_RE
|
||||
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
# Flag used as a placeholder to sending to all devices
|
||||
PUSHBULLET_SEND_TO_ALL = 'ALL_DEVICES'
|
||||
|
||||
# Used to break apart list of potential recipients by their delimiter
|
||||
# into a usable list.
|
||||
RECIPIENTS_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
PUSHBULLET_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
PUSHBULLET_HTTP_ERROR_MAP.update({
|
||||
401: 'Unauthorized - Invalid Token.',
|
||||
})
|
||||
|
||||
|
||||
class NotifyPushBullet(NotifyBase):
|
||||
"""
|
||||
A wrapper for PushBullet Notifications
|
||||
"""
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'pbul'
|
||||
|
||||
# PushBullet uses the http protocol with JSON requests
|
||||
notify_url = 'https://api.pushbullet.com/v2/pushes'
|
||||
|
||||
def __init__(self, accesstoken, recipients=None, **kwargs):
|
||||
"""
|
||||
Initialize PushBullet Object
|
||||
"""
|
||||
super(NotifyPushBullet, self).__init__(**kwargs)
|
||||
|
||||
self.accesstoken = accesstoken
|
||||
if compat_is_basestring(recipients):
|
||||
self.recipients = [x for x in filter(
|
||||
bool, RECIPIENTS_LIST_DELIM.split(recipients))]
|
||||
|
||||
elif isinstance(recipients, (set, tuple, list)):
|
||||
self.recipients = recipients
|
||||
|
||||
else:
|
||||
self.recipients = list()
|
||||
|
||||
if len(self.recipients) == 0:
|
||||
self.recipients = (PUSHBULLET_SEND_TO_ALL, )
|
||||
|
||||
def notify(self, title, body, **kwargs):
|
||||
"""
|
||||
Perform PushBullet Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
auth = (self.accesstoken, '')
|
||||
|
||||
# error tracking (used for function return)
|
||||
has_error = False
|
||||
|
||||
# Create a copy of the recipients list
|
||||
recipients = list(self.recipients)
|
||||
while len(recipients):
|
||||
recipient = recipients.pop(0)
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'type': 'note',
|
||||
'title': title,
|
||||
'body': body,
|
||||
}
|
||||
|
||||
if recipient is PUSHBULLET_SEND_TO_ALL:
|
||||
# Send to all
|
||||
pass
|
||||
|
||||
elif IS_EMAIL_RE.match(recipient):
|
||||
payload['email'] = recipient
|
||||
self.logger.debug(
|
||||
"Recipient '%s' is an email address" % recipient)
|
||||
|
||||
elif recipient[0] == '#':
|
||||
payload['channel_tag'] = recipient[1:]
|
||||
self.logger.debug("Recipient '%s' is a channel" % recipient)
|
||||
|
||||
else:
|
||||
payload['device_iden'] = recipient
|
||||
self.logger.debug(
|
||||
"Recipient '%s' is a device" % recipient)
|
||||
|
||||
self.logger.debug('PushBullet POST URL: %s (cert_verify=%r)' % (
|
||||
self.notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('PushBullet Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
self.notify_url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
auth=auth,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send PushBullet notification to '
|
||||
'"%s": %s (error=%s).' % (
|
||||
recipient,
|
||||
PUSHBULLET_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send PushBullet notification to '
|
||||
'"%s" (error=%s).' % (recipient, r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
|
||||
# Return; we're done
|
||||
has_error = True
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
'Sent PushBullet notification to "%s".' % (recipient))
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending PushBullet '
|
||||
'notification to "%s".' % (recipient),
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
has_error = True
|
||||
|
||||
if len(recipients):
|
||||
# Prevent thrashing requests
|
||||
self.throttle()
|
||||
|
||||
return not has_error
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
recipients = NotifyBase.unquote(results['fullpath'])
|
||||
|
||||
results['accesstoken'] = results['host']
|
||||
results['recipients'] = recipients
|
||||
|
||||
return results
|
152
libs/apprise/plugins/NotifyPushalot.py
Normal file
|
@ -0,0 +1,152 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Pushalot Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
PUSHALOT_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
PUSHALOT_HTTP_ERROR_MAP.update({
|
||||
406: 'Message throttle limit hit.',
|
||||
410: 'AuthorizedToken is no longer valid.',
|
||||
})
|
||||
|
||||
# Used to validate Authorization Token
|
||||
VALIDATE_AUTHTOKEN = re.compile(r'[A-Za-z0-9]{32}')
|
||||
|
||||
|
||||
class NotifyPushalot(NotifyBase):
|
||||
"""
|
||||
A wrapper for Pushalot Notifications
|
||||
"""
|
||||
|
||||
# The default protocol is always secured
|
||||
secure_protocol = 'palot'
|
||||
|
||||
# Pushalot uses the http protocol with JSON requests
|
||||
notify_url = 'https://pushalot.com/api/sendmessage'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_72
|
||||
|
||||
def __init__(self, authtoken, is_important=False, **kwargs):
|
||||
"""
|
||||
Initialize Pushalot Object
|
||||
"""
|
||||
super(NotifyPushalot, self).__init__(**kwargs)
|
||||
|
||||
# Is Important Flag
|
||||
self.is_important = is_important
|
||||
|
||||
self.authtoken = authtoken
|
||||
# Validate authtoken
|
||||
if not VALIDATE_AUTHTOKEN.match(authtoken):
|
||||
self.logger.warning(
|
||||
'Invalid Pushalot Authorization Token Specified.'
|
||||
)
|
||||
raise TypeError(
|
||||
'Invalid Pushalot Authorization Token Specified.'
|
||||
)
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Pushalot Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'AuthorizationToken': self.authtoken,
|
||||
'IsImportant': self.is_important,
|
||||
'Title': title,
|
||||
'Body': body,
|
||||
'Source': self.app_id,
|
||||
}
|
||||
|
||||
image_url = self.image_url(notify_type)
|
||||
if image_url:
|
||||
payload['Image'] = image_url
|
||||
|
||||
self.logger.debug('Pushalot POST URL: %s (cert_verify=%r)' % (
|
||||
self.notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Pushalot Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
self.notify_url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Pushalot notification: '
|
||||
'%s (error=%s).' % (
|
||||
PUSHALOT_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Pushalot notification '
|
||||
'(error=%s).' % r.status_code)
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Pushalot notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Pushalot notification.')
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
results['authtoken'] = results['host']
|
||||
|
||||
return results
|
76
libs/apprise/plugins/NotifyPushjet/NotifyPushjet.py
Normal file
|
@ -0,0 +1,76 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Pushjet Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
import re
|
||||
from .pushjet import errors
|
||||
from .pushjet import pushjet
|
||||
|
||||
from ..NotifyBase import NotifyBase
|
||||
|
||||
PUBLIC_KEY_RE = re.compile(
|
||||
r'^[a-z0-9]{4}-[a-z0-9]{6}-[a-z0-9]{12}-[a-z0-9]{5}-[a-z0-9]{9}$', re.I)
|
||||
|
||||
SECRET_KEY_RE = re.compile(r'^[a-z0-9]{32}$', re.I)
|
||||
|
||||
|
||||
class NotifyPushjet(NotifyBase):
|
||||
"""
|
||||
A wrapper for Pushjet Notifications
|
||||
"""
|
||||
|
||||
# The default protocol
|
||||
protocol = 'pjet'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'pjets'
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initialize Pushjet Object
|
||||
"""
|
||||
super(NotifyPushjet, self).__init__(**kwargs)
|
||||
|
||||
def notify(self, title, body, notify_type):
|
||||
"""
|
||||
Perform Pushjet Notification
|
||||
"""
|
||||
try:
|
||||
if self.user and self.host:
|
||||
server = "http://"
|
||||
if self.secure:
|
||||
server = "https://"
|
||||
|
||||
server += self.host
|
||||
if self.port:
|
||||
server += ":" + str(self.port)
|
||||
|
||||
api = pushjet.Api(server)
|
||||
service = api.Service(secret_key=self.user)
|
||||
|
||||
else:
|
||||
api = pushjet.Api(pushjet.DEFAULT_API_URL)
|
||||
service = api.Service(secret_key=self.host)
|
||||
|
||||
service.send(body, title)
|
||||
self.logger.info('Sent Pushjet notification.')
|
||||
|
||||
except (errors.PushjetError, ValueError) as e:
|
||||
self.logger.warning('Failed to send Pushjet notification.')
|
||||
self.logger.debug('Pushjet Exception: %s' % str(e))
|
||||
return False
|
||||
|
||||
return True
|
21
libs/apprise/plugins/NotifyPushjet/__init__.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
from . import NotifyPushjet
|
||||
|
||||
__all__ = [
|
||||
'NotifyPushjet',
|
||||
]
|
6
libs/apprise/plugins/NotifyPushjet/pushjet/__init__.py
Normal file
|
@ -0,0 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""A Python API for Pushjet. Send notifications to your phone from Python scripts!"""
|
||||
|
||||
from .pushjet import Service, Device, Subscription, Message, Api
|
||||
from .errors import PushjetError, AccessError, NonexistentError, SubscriptionError, RequestError, ServerError
|
48
libs/apprise/plugins/NotifyPushjet/pushjet/errors.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
from requests import RequestException
|
||||
|
||||
import sys
|
||||
if sys.version_info[0] < 3:
|
||||
# This is built into Python 3.
|
||||
class ConnectionError(Exception):
|
||||
pass
|
||||
|
||||
class PushjetError(Exception):
|
||||
"""All the errors inherit from this. Therefore, ``except PushjetError`` catches all errors."""
|
||||
|
||||
class AccessError(PushjetError):
|
||||
"""Raised when a secret key is missing for a service method that needs one."""
|
||||
|
||||
class NonexistentError(PushjetError):
|
||||
"""Raised when an attempt to access a nonexistent service is made."""
|
||||
|
||||
class SubscriptionError(PushjetError):
|
||||
"""Raised when an attempt to subscribe to a service that's already subscribed to,
|
||||
or to unsubscribe from a service that isn't subscribed to, is made."""
|
||||
|
||||
class RequestError(PushjetError, ConnectionError):
|
||||
"""Raised if something goes wrong in the connection to the API server.
|
||||
Inherits from ``ConnectionError`` on Python 3, and can therefore be caught
|
||||
with ``except ConnectionError`` there.
|
||||
|
||||
:ivar requests_exception: The underlying `requests <http://docs.python-requests.org>`__
|
||||
exception. Access this if you want to handle different HTTP request errors in different ways.
|
||||
"""
|
||||
|
||||
def __str__(self):
|
||||
return "requests.{error}: {description}".format(
|
||||
error=self.requests_exception.__class__.__name__,
|
||||
description=str(self.requests_exception)
|
||||
)
|
||||
|
||||
def __init__(self, requests_exception):
|
||||
self.requests_exception = requests_exception
|
||||
|
||||
class ServerError(PushjetError):
|
||||
"""Raised if the API server has an error while processing your request.
|
||||
This getting raised means there's a bug in the server! If you manage to
|
||||
track down what caused it, you can `open an issue on Pushjet's GitHub page
|
||||
<https://github.com/Pushjet/Pushjet-Server-Api/issues>`__.
|
||||
"""
|
317
libs/apprise/plugins/NotifyPushjet/pushjet/pushjet.py
Normal file
|
@ -0,0 +1,317 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import requests
|
||||
from functools import partial
|
||||
|
||||
from .utilities import (
|
||||
NoNoneDict,
|
||||
requires_secret_key, with_api_bound,
|
||||
is_valid_uuid, is_valid_public_key, is_valid_secret_key, repr_format
|
||||
)
|
||||
from .errors import NonexistentError, SubscriptionError, RequestError, ServerError
|
||||
|
||||
import sys
|
||||
if sys.version_info[0] >= 3:
|
||||
from urllib.parse import urljoin
|
||||
unicode_type = str
|
||||
else:
|
||||
from urlparse import urljoin
|
||||
unicode_type = unicode
|
||||
|
||||
DEFAULT_API_URL = 'https://api.pushjet.io/'
|
||||
|
||||
class PushjetModel(object):
|
||||
_api = None # This is filled in later.
|
||||
|
||||
class Service(PushjetModel):
|
||||
"""A Pushjet service to send messages through. To receive messages, devices
|
||||
subscribe to these.
|
||||
|
||||
:param secret_key: The service's API key for write access. If provided,
|
||||
:func:`~pushjet.Service.send`, :func:`~pushjet.Service.edit`, and
|
||||
:func:`~pushjet.Service.delete` become available.
|
||||
Either this or the public key parameter must be present.
|
||||
:param public_key: The service's public API key for read access only.
|
||||
Either this or the secret key parameter must be present.
|
||||
|
||||
:ivar name: The name of the service.
|
||||
:ivar icon_url: The URL to the service's icon. May be ``None``.
|
||||
:ivar created: When the service was created, as seconds from epoch.
|
||||
:ivar secret_key: The service's secret API key, or ``None`` if the service is read-only.
|
||||
:ivar public_key: The service's public API key, to be used when subscribing to the service.
|
||||
"""
|
||||
|
||||
def __repr__(self):
|
||||
return "<Pushjet Service: \"{}\">".format(repr_format(self.name))
|
||||
|
||||
def __init__(self, secret_key=None, public_key=None):
|
||||
if secret_key is None and public_key is None:
|
||||
raise ValueError("Either a secret key or public key "
|
||||
"must be provided.")
|
||||
elif secret_key and not is_valid_secret_key(secret_key):
|
||||
raise ValueError("Invalid secret key provided.")
|
||||
elif public_key and not is_valid_public_key(public_key):
|
||||
raise ValueError("Invalid public key provided.")
|
||||
self.secret_key = unicode_type(secret_key) if secret_key else None
|
||||
self.public_key = unicode_type(public_key) if public_key else None
|
||||
self.refresh()
|
||||
|
||||
def _request(self, endpoint, method, is_secret, params=None, data=None):
|
||||
params = params or {}
|
||||
if is_secret:
|
||||
params['secret'] = self.secret_key
|
||||
else:
|
||||
params['service'] = self.public_key
|
||||
return self._api._request(endpoint, method, params, data)
|
||||
|
||||
@requires_secret_key
|
||||
def send(self, message, title=None, link=None, importance=None):
|
||||
"""Send a message to the service's subscribers.
|
||||
|
||||
:param message: The message body to be sent.
|
||||
:param title: (optional) The message's title. Messages can be without title.
|
||||
:param link: (optional) An URL to be sent with the message.
|
||||
:param importance: (optional) The priority level of the message. May be
|
||||
a number between 1 and 5, where 1 is least important and 5 is most.
|
||||
"""
|
||||
data = NoNoneDict({
|
||||
'message': message,
|
||||
'title': title,
|
||||
'link': link,
|
||||
'level': importance
|
||||
})
|
||||
self._request('message', 'POST', is_secret=True, data=data)
|
||||
|
||||
@requires_secret_key
|
||||
def edit(self, name=None, icon_url=None):
|
||||
"""Edit the service's attributes.
|
||||
|
||||
:param name: (optional) A new name to give the service.
|
||||
:param icon_url: (optional) A new URL to use as the service's icon URL.
|
||||
Set to an empty string to remove the service's icon entirely.
|
||||
"""
|
||||
data = NoNoneDict({
|
||||
'name': name,
|
||||
'icon': icon_url
|
||||
})
|
||||
if not data:
|
||||
return
|
||||
self._request('service', 'PATCH', is_secret=True, data=data)
|
||||
self.name = unicode_type(name)
|
||||
self.icon_url = unicode_type(icon_url)
|
||||
|
||||
@requires_secret_key
|
||||
def delete(self):
|
||||
"""Delete the service. Irreversible."""
|
||||
self._request('service', 'DELETE', is_secret=True)
|
||||
|
||||
def _update_from_data(self, data):
|
||||
self.name = data['name']
|
||||
self.icon_url = data['icon'] or None
|
||||
self.created = data['created']
|
||||
self.public_key = data['public']
|
||||
self.secret_key = data.get('secret', getattr(self, 'secret_key', None))
|
||||
|
||||
def refresh(self):
|
||||
"""Refresh the server's information, in case it could be edited from elsewhere.
|
||||
|
||||
:raises: :exc:`~pushjet.NonexistentError` if the service was deleted before refreshing.
|
||||
"""
|
||||
key_name = 'public'
|
||||
secret = False
|
||||
if self.secret_key is not None:
|
||||
key_name = 'secret'
|
||||
secret = True
|
||||
|
||||
status, response = self._request('service', 'GET', is_secret=secret)
|
||||
if status == requests.codes.NOT_FOUND:
|
||||
raise NonexistentError("A service with the provided {} key "
|
||||
"does not exist (anymore, at least).".format(key_name))
|
||||
self._update_from_data(response['service'])
|
||||
|
||||
@classmethod
|
||||
def _from_data(cls, data):
|
||||
# This might be a no-no, but I see little alternative if
|
||||
# different constructors with different parameters are needed,
|
||||
# *and* a default __init__ constructor should be present.
|
||||
# This, along with the subclassing for custom API URLs, may
|
||||
# very well be one of those pieces of code you look back at
|
||||
# years down the line - or maybe just a couple of weeks - and say
|
||||
# "what the heck was I thinking"? I assure you, though, future me.
|
||||
# This was the most reasonable thing to get the API + argspecs I wanted.
|
||||
obj = cls.__new__(cls)
|
||||
obj._update_from_data(data)
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
def create(cls, name, icon_url=None):
|
||||
"""Create a new service.
|
||||
|
||||
:param name: The name of the new service.
|
||||
:param icon_url: (optional) An URL to an image to be used as the service's icon.
|
||||
:return: The newly-created :class:`~pushjet.Service`.
|
||||
"""
|
||||
data = NoNoneDict({
|
||||
'name': name,
|
||||
'icon': icon_url
|
||||
})
|
||||
_, response = cls._api._request('service', 'POST', data=data)
|
||||
return cls._from_data(response['service'])
|
||||
|
||||
class Device(PushjetModel):
|
||||
"""The "receiver" for messages. Subscribes to services and receives any
|
||||
messages they send.
|
||||
|
||||
:param uuid: The device's unique ID as a UUID. Does not need to be registered
|
||||
before using it. A UUID can be generated with ``uuid.uuid4()``, for example.
|
||||
:ivar uuid: The UUID the device was initialized with.
|
||||
"""
|
||||
|
||||
def __repr__(self):
|
||||
return "<Pushjet Device: {}>".format(self.uuid)
|
||||
|
||||
def __init__(self, uuid):
|
||||
uuid = unicode_type(uuid)
|
||||
if not is_valid_uuid(uuid):
|
||||
raise ValueError("Invalid UUID provided. Try uuid.uuid4().")
|
||||
self.uuid = unicode_type(uuid)
|
||||
|
||||
def _request(self, endpoint, method, params=None, data=None):
|
||||
params = (params or {})
|
||||
params['uuid'] = self.uuid
|
||||
return self._api._request(endpoint, method, params, data)
|
||||
|
||||
def subscribe(self, service):
|
||||
"""Subscribe the device to a service.
|
||||
|
||||
:param service: The service to subscribe to. May be a public key or a :class:`~pushjet.Service`.
|
||||
:return: The :class:`~pushjet.Service` subscribed to.
|
||||
|
||||
:raises: :exc:`~pushjet.NonexistentError` if the provided service does not exist.
|
||||
:raises: :exc:`~pushjet.SubscriptionError` if the provided service is already subscribed to.
|
||||
"""
|
||||
data = {}
|
||||
data['service'] = service.public_key if isinstance(service, Service) else service
|
||||
status, response = self._request('subscription', 'POST', data=data)
|
||||
if status == requests.codes.CONFLICT:
|
||||
raise SubscriptionError("The device is already subscribed to that service.")
|
||||
elif status == requests.codes.NOT_FOUND:
|
||||
raise NonexistentError("A service with the provided public key "
|
||||
"does not exist (anymore, at least).")
|
||||
return self._api.Service._from_data(response['service'])
|
||||
|
||||
def unsubscribe(self, service):
|
||||
"""Unsubscribe the device from a service.
|
||||
|
||||
:param service: The service to unsubscribe from. May be a public key or a :class:`~pushjet.Service`.
|
||||
:raises: :exc:`~pushjet.NonexistentError` if the provided service does not exist.
|
||||
:raises: :exc:`~pushjet.SubscriptionError` if the provided service isn't subscribed to.
|
||||
"""
|
||||
data = {}
|
||||
data['service'] = service.public_key if isinstance(service, Service) else service
|
||||
status, _ = self._request('subscription', 'DELETE', data=data)
|
||||
if status == requests.codes.CONFLICT:
|
||||
raise SubscriptionError("The device is not subscribed to that service.")
|
||||
elif status == requests.codes.NOT_FOUND:
|
||||
raise NonexistentError("A service with the provided public key "
|
||||
"does not exist (anymore, at least).")
|
||||
|
||||
def get_subscriptions(self):
|
||||
"""Get all the subscriptions the device has.
|
||||
|
||||
:return: A list of :class:`~pushjet.Subscription`.
|
||||
"""
|
||||
_, response = self._request('subscription', 'GET')
|
||||
subscriptions = []
|
||||
for subscription_dict in response['subscriptions']:
|
||||
subscriptions.append(Subscription(subscription_dict))
|
||||
return subscriptions
|
||||
|
||||
def get_messages(self):
|
||||
"""Get all new (that is, as of yet unretrieved) messages.
|
||||
|
||||
:return: A list of :class:`~pushjet.Message`.
|
||||
"""
|
||||
_, response = self._request('message', 'GET')
|
||||
messages = []
|
||||
for message_dict in response['messages']:
|
||||
messages.append(Message(message_dict))
|
||||
return messages
|
||||
|
||||
class Subscription(object):
|
||||
"""A subscription to a service, with the metadata that entails.
|
||||
|
||||
:ivar service: The service the subscription is to, as a :class:`~pushjet.Service`.
|
||||
:ivar time_subscribed: When the subscription was made, as seconds from epoch.
|
||||
:ivar last_checked: When the device last retrieved messages from the subscription,
|
||||
as seconds from epoch.
|
||||
:ivar device_uuid: The UUID of the device that owns the subscription.
|
||||
"""
|
||||
|
||||
def __repr__(self):
|
||||
return "<Pushjet Subscription to service \"{}\">".format(repr_format(self.service.name))
|
||||
|
||||
def __init__(self, subscription_dict):
|
||||
self.service = Service._from_data(subscription_dict['service'])
|
||||
self.time_subscribed = subscription_dict['timestamp']
|
||||
self.last_checked = subscription_dict['timestamp_checked']
|
||||
self.device_uuid = subscription_dict['uuid'] # Not sure this is needed, but...
|
||||
|
||||
class Message(object):
|
||||
"""A message received from a service.
|
||||
|
||||
:ivar message: The message body.
|
||||
:ivar title: The message title. May be ``None``.
|
||||
:ivar link: The URL the message links to. May be ``None``.
|
||||
:ivar time_sent: When the message was sent, as seconds from epoch.
|
||||
:ivar importance: The message's priority level between 1 and 5, where 1 is
|
||||
least important and 5 is most.
|
||||
:ivar service: The :class:`~pushjet.Service` that sent the message.
|
||||
"""
|
||||
|
||||
def __repr__(self):
|
||||
return "<Pushjet Message: \"{}\">".format(repr_format(self.title or self.message))
|
||||
|
||||
def __init__(self, message_dict):
|
||||
self.message = message_dict['message']
|
||||
self.title = message_dict['title'] or None
|
||||
self.link = message_dict['link'] or None
|
||||
self.time_sent = message_dict['timestamp']
|
||||
self.importance = message_dict['level']
|
||||
self.service = Service._from_data(message_dict['service'])
|
||||
|
||||
class Api(object):
|
||||
"""An API with a custom URL. Use this if you're connecting to a self-hosted
|
||||
Pushjet API instance, or a non-standard one in general.
|
||||
|
||||
:param url: The URL to the API instance.
|
||||
:ivar url: The URL to the API instance, as supplied.
|
||||
"""
|
||||
|
||||
def __repr__(self):
|
||||
return "<Pushjet Api: {}>".format(self.url).encode(sys.stdout.encoding, errors='replace')
|
||||
|
||||
def __init__(self, url):
|
||||
self.url = unicode_type(url)
|
||||
self.Service = with_api_bound(Service, self)
|
||||
self.Device = with_api_bound(Device, self)
|
||||
|
||||
def _request(self, endpoint, method, params=None, data=None):
|
||||
url = urljoin(self.url, endpoint)
|
||||
try:
|
||||
r = requests.request(method, url, params=params, data=data)
|
||||
except requests.RequestException as e:
|
||||
raise RequestError(e)
|
||||
status = r.status_code
|
||||
if status == requests.codes.INTERNAL_SERVER_ERROR:
|
||||
raise ServerError(
|
||||
"An error occurred in the server while processing your request. "
|
||||
"This should probably be reported to: "
|
||||
"https://github.com/Pushjet/Pushjet-Server-Api/issues"
|
||||
)
|
||||
try:
|
||||
response = r.json()
|
||||
except ValueError:
|
||||
response = {}
|
||||
return status, response
|
||||
|
64
libs/apprise/plugins/NotifyPushjet/pushjet/utilities.py
Normal file
|
@ -0,0 +1,64 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import re
|
||||
import sys
|
||||
from decorator import decorator
|
||||
from .errors import AccessError
|
||||
|
||||
# Help class(...es? Nah. Just singular for now.)
|
||||
|
||||
class NoNoneDict(dict):
|
||||
"""A dict that ignores values that are None. Not completely API-compatible
|
||||
with dict, but contains all that's needed.
|
||||
"""
|
||||
def __repr__(self):
|
||||
return "NoNoneDict({dict})".format(dict=dict.__repr__(self))
|
||||
|
||||
def __init__(self, initial={}):
|
||||
self.update(initial)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if value is not None:
|
||||
dict.__setitem__(self, key, value)
|
||||
|
||||
def update(self, data):
|
||||
for key, value in data.items():
|
||||
self[key] = value
|
||||
|
||||
# Decorators / factories
|
||||
|
||||
@decorator
|
||||
def requires_secret_key(func, self, *args, **kwargs):
|
||||
"""Raise an error if the method is called without a secret key."""
|
||||
if self.secret_key is None:
|
||||
raise AccessError("The Service doesn't have a secret "
|
||||
"key provided, and therefore lacks write permission.")
|
||||
return func(self, *args, **kwargs)
|
||||
|
||||
def with_api_bound(cls, api):
|
||||
new_cls = type(cls.__name__, (cls,), {
|
||||
'_api': api,
|
||||
'__doc__': (
|
||||
"Create a :class:`~pushjet.{name}` bound to the API. "
|
||||
"See :class:`pushjet.{name}` for documentation."
|
||||
).format(name=cls.__name__)
|
||||
})
|
||||
return new_cls
|
||||
|
||||
# Helper functions
|
||||
|
||||
UUID_RE = re.compile(r'^[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}$')
|
||||
PUBLIC_KEY_RE = re.compile(r'^[A-Za-z0-9]{4}-[A-Za-z0-9]{6}-[A-Za-z0-9]{12}-[A-Za-z0-9]{5}-[A-Za-z0-9]{9}$')
|
||||
SECRET_KEY_RE = re.compile(r'^[A-Za-z0-9]{32}$')
|
||||
|
||||
is_valid_uuid = lambda s: UUID_RE.match(s) is not None
|
||||
is_valid_public_key = lambda s: PUBLIC_KEY_RE.match(s) is not None
|
||||
is_valid_secret_key = lambda s: SECRET_KEY_RE.match(s) is not None
|
||||
|
||||
def repr_format(s):
|
||||
s = s.replace('\n', ' ').replace('\r', '')
|
||||
original_length = len(s)
|
||||
s = s[:30]
|
||||
s += '...' if len(s) != original_length else ''
|
||||
s = s.encode(sys.stdout.encoding, errors='replace')
|
||||
return s
|
264
libs/apprise/plugins/NotifyPushover.py
Normal file
|
@ -0,0 +1,264 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Pushover Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
import requests
|
||||
|
||||
from ..utils import compat_is_basestring
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
|
||||
# Flag used as a placeholder to sending to all devices
|
||||
PUSHOVER_SEND_TO_ALL = 'ALL_DEVICES'
|
||||
|
||||
# Used to validate API Key
|
||||
VALIDATE_TOKEN = re.compile(r'^[a-z0-9]{30}$', re.I)
|
||||
|
||||
# Used to detect a User and/or Group
|
||||
VALIDATE_USERGROUP = re.compile(r'^[a-z0-9]{30}$', re.I)
|
||||
|
||||
# Used to detect a User and/or Group
|
||||
VALIDATE_DEVICE = re.compile(r'^[a-z0-9_]{1,25}$', re.I)
|
||||
|
||||
|
||||
# Priorities
|
||||
class PushoverPriority(object):
|
||||
LOW = -2
|
||||
MODERATE = -1
|
||||
NORMAL = 0
|
||||
HIGH = 1
|
||||
EMERGENCY = 2
|
||||
|
||||
|
||||
PUSHOVER_PRIORITIES = (
|
||||
PushoverPriority.LOW,
|
||||
PushoverPriority.MODERATE,
|
||||
PushoverPriority.NORMAL,
|
||||
PushoverPriority.HIGH,
|
||||
PushoverPriority.EMERGENCY,
|
||||
)
|
||||
|
||||
# Used to break path apart into list of devices
|
||||
DEVICE_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
PUSHOVER_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
PUSHOVER_HTTP_ERROR_MAP.update({
|
||||
401: 'Unauthorized - Invalid Token.',
|
||||
})
|
||||
|
||||
|
||||
class NotifyPushover(NotifyBase):
|
||||
"""
|
||||
A wrapper for Pushover Notifications
|
||||
"""
|
||||
|
||||
# All pushover requests are secure
|
||||
secure_protocol = 'pover'
|
||||
|
||||
# Pushover uses the http protocol with JSON requests
|
||||
notify_url = 'https://api.pushover.net/1/messages.json'
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 512
|
||||
|
||||
def __init__(self, token, devices=None, priority=None, **kwargs):
|
||||
"""
|
||||
Initialize Pushover Object
|
||||
"""
|
||||
super(NotifyPushover, self).__init__(**kwargs)
|
||||
|
||||
try:
|
||||
# The token associated with the account
|
||||
self.token = token.strip()
|
||||
|
||||
except AttributeError:
|
||||
# Token was None
|
||||
self.logger.warning('No API Token was specified.')
|
||||
raise TypeError('No API Token was specified.')
|
||||
|
||||
if not VALIDATE_TOKEN.match(self.token):
|
||||
self.logger.warning(
|
||||
'The API Token specified (%s) is invalid.' % token,
|
||||
)
|
||||
raise TypeError(
|
||||
'The API Token specified (%s) is invalid.' % token,
|
||||
)
|
||||
|
||||
if compat_is_basestring(devices):
|
||||
self.devices = [x for x in filter(bool, DEVICE_LIST_DELIM.split(
|
||||
devices,
|
||||
))]
|
||||
|
||||
elif isinstance(devices, (set, tuple, list)):
|
||||
self.devices = devices
|
||||
|
||||
else:
|
||||
self.devices = list()
|
||||
|
||||
if len(self.devices) == 0:
|
||||
self.devices = (PUSHOVER_SEND_TO_ALL, )
|
||||
|
||||
# The Priority of the message
|
||||
if priority not in PUSHOVER_PRIORITIES:
|
||||
self.priority = PushoverPriority.NORMAL
|
||||
|
||||
else:
|
||||
self.priority = priority
|
||||
|
||||
if not self.user:
|
||||
self.logger.warning('No user was specified.')
|
||||
raise TypeError('No user was specified.')
|
||||
|
||||
if not VALIDATE_USERGROUP.match(self.user):
|
||||
self.logger.warning(
|
||||
'The user/group specified (%s) is invalid.' % self.user,
|
||||
)
|
||||
raise TypeError(
|
||||
'The user/group specified (%s) is invalid.' % self.user,
|
||||
)
|
||||
|
||||
def notify(self, title, body, **kwargs):
|
||||
"""
|
||||
Perform Pushover Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/x-www-form-urlencoded'
|
||||
}
|
||||
auth = (self.token, '')
|
||||
|
||||
# error tracking (used for function return)
|
||||
has_error = False
|
||||
|
||||
# Create a copy of the devices list
|
||||
devices = list(self.devices)
|
||||
while len(devices):
|
||||
device = devices.pop(0)
|
||||
|
||||
if VALIDATE_DEVICE.match(device) is None:
|
||||
self.logger.warning(
|
||||
'The device specified (%s) is invalid.' % device,
|
||||
)
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'token': self.token,
|
||||
'user': self.user,
|
||||
'priority': str(self.priority),
|
||||
'title': title,
|
||||
'message': body,
|
||||
'device': device,
|
||||
}
|
||||
|
||||
self.logger.debug('Pushover POST URL: %s (cert_verify=%r)' % (
|
||||
self.notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Pushover Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
self.notify_url,
|
||||
data=payload,
|
||||
headers=headers,
|
||||
auth=auth,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Pushover:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
device,
|
||||
PUSHOVER_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Pushover:%s '
|
||||
'notification (error=%s).' % (
|
||||
device,
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
|
||||
# Return; we're done
|
||||
has_error = True
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
'Sent Pushover notification to %s.' % device)
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Pushover:%s ' % (
|
||||
device) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
has_error = True
|
||||
|
||||
if len(devices):
|
||||
# Prevent thrashing requests
|
||||
self.throttle()
|
||||
|
||||
return not has_error
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
devices = NotifyBase.unquote(results['fullpath'])
|
||||
|
||||
if 'priority' in results['qsd'] and len(results['qsd']['priority']):
|
||||
_map = {
|
||||
'l': PushoverPriority.LOW,
|
||||
'-2': PushoverPriority.LOW,
|
||||
'm': PushoverPriority.MODERATE,
|
||||
'-1': PushoverPriority.MODERATE,
|
||||
'n': PushoverPriority.NORMAL,
|
||||
'0': PushoverPriority.NORMAL,
|
||||
'h': PushoverPriority.HIGH,
|
||||
'1': PushoverPriority.HIGH,
|
||||
'e': PushoverPriority.EMERGENCY,
|
||||
'2': PushoverPriority.EMERGENCY,
|
||||
}
|
||||
try:
|
||||
results['priority'] = \
|
||||
_map[results['qsd']['priority'][0].lower()]
|
||||
|
||||
except KeyError:
|
||||
# No priority was set
|
||||
pass
|
||||
|
||||
results['token'] = results['host']
|
||||
results['devices'] = devices
|
||||
|
||||
return results
|
344
libs/apprise/plugins/NotifyRocketChat.py
Normal file
|
@ -0,0 +1,344 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Notify Rocket.Chat Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
import requests
|
||||
from json import loads
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
IS_CHANNEL = re.compile(r'^#(?P<name>[A-Za-z0-9]+)$')
|
||||
IS_ROOM_ID = re.compile(r'^(?P<name>[A-Za-z0-9]+)$')
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
RC_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
RC_HTTP_ERROR_MAP.update({
|
||||
400: 'Channel/RoomId is wrong format, or missing from server.',
|
||||
401: 'Authentication tokens provided is invalid or missing.',
|
||||
})
|
||||
|
||||
# Used to break apart list of potential tags by their delimiter
|
||||
# into a usable list.
|
||||
LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
|
||||
class NotifyRocketChat(NotifyBase):
|
||||
"""
|
||||
A wrapper for Notify Rocket.Chat Notifications
|
||||
"""
|
||||
|
||||
# The default protocol
|
||||
protocol = 'rocket'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'rockets'
|
||||
|
||||
# Defines the maximum allowable characters in the title
|
||||
title_maxlen = 200
|
||||
|
||||
def __init__(self, recipients=None, **kwargs):
|
||||
"""
|
||||
Initialize Notify Rocket.Chat Object
|
||||
"""
|
||||
super(NotifyRocketChat, self).__init__(**kwargs)
|
||||
|
||||
if self.secure:
|
||||
self.schema = 'https'
|
||||
|
||||
else:
|
||||
self.schema = 'http'
|
||||
|
||||
# Prepare our URL
|
||||
self.api_url = '%s://%s' % (self.schema, self.host)
|
||||
|
||||
if isinstance(self.port, int):
|
||||
self.api_url += ':%d' % self.port
|
||||
|
||||
self.api_url += '/api/v1/'
|
||||
|
||||
# Initialize channels list
|
||||
self.channels = list()
|
||||
|
||||
# Initialize room list
|
||||
self.rooms = list()
|
||||
|
||||
if recipients is None:
|
||||
recipients = []
|
||||
|
||||
elif compat_is_basestring(recipients):
|
||||
recipients = [x for x in filter(bool, LIST_DELIM.split(
|
||||
recipients,
|
||||
))]
|
||||
|
||||
elif not isinstance(recipients, (set, tuple, list)):
|
||||
recipients = []
|
||||
|
||||
# Validate recipients and drop bad ones:
|
||||
for recipient in recipients:
|
||||
result = IS_CHANNEL.match(recipient)
|
||||
if result:
|
||||
# store valid device
|
||||
self.channels.append(result.group('name'))
|
||||
continue
|
||||
|
||||
result = IS_ROOM_ID.match(recipient)
|
||||
if result:
|
||||
# store valid room
|
||||
self.rooms.append(result.group('name'))
|
||||
continue
|
||||
|
||||
self.logger.warning(
|
||||
'Dropped invalid channel/room ' +
|
||||
'(%s) specified.' % recipient,
|
||||
)
|
||||
|
||||
if len(self.rooms) == 0 and len(self.channels) == 0:
|
||||
raise TypeError(
|
||||
'No Rocket.Chat room and/or channels specified to notify.'
|
||||
)
|
||||
|
||||
# Used to track token headers upon authentication (if successful)
|
||||
self.headers = {}
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
wrapper to send_notification since we can alert more then one channel
|
||||
"""
|
||||
|
||||
# Track whether we authenticated okay
|
||||
|
||||
if not self.login():
|
||||
return False
|
||||
|
||||
# Prepare our message
|
||||
text = '*%s*\r\n%s' % (title.replace('*', '\\*'), body)
|
||||
|
||||
# Initiaize our error tracking
|
||||
has_error = False
|
||||
|
||||
# Create a copy of our rooms and channels to notify against
|
||||
channels = list(self.channels)
|
||||
rooms = list(self.rooms)
|
||||
|
||||
while len(channels) > 0:
|
||||
# Get Channel
|
||||
channel = channels.pop(0)
|
||||
|
||||
if not self.send_notification(
|
||||
{
|
||||
'text': text,
|
||||
'channel': channel,
|
||||
}, notify_type=notify_type, **kwargs):
|
||||
|
||||
# toggle flag
|
||||
has_error = True
|
||||
|
||||
if len(channels) + len(rooms) > 0:
|
||||
# Prevent thrashing requests
|
||||
self.throttle()
|
||||
|
||||
# Send all our defined room id's
|
||||
while len(rooms):
|
||||
# Get Room
|
||||
room = rooms.pop(0)
|
||||
|
||||
if not self.send_notification(
|
||||
{
|
||||
'text': text,
|
||||
'roomId': room,
|
||||
}, notify_type=notify_type, **kwargs):
|
||||
|
||||
# toggle flag
|
||||
has_error = True
|
||||
|
||||
if len(rooms) > 0:
|
||||
# Prevent thrashing requests
|
||||
self.throttle()
|
||||
|
||||
# logout
|
||||
self.logout()
|
||||
|
||||
return not has_error
|
||||
|
||||
def send_notification(self, payload, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Notify Rocket.Chat Notification
|
||||
"""
|
||||
|
||||
self.logger.debug('Rocket.Chat POST URL: %s (cert_verify=%r)' % (
|
||||
self.api_url + 'chat.postMessage', self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Rocket.Chat Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
self.api_url + 'chat.postMessage',
|
||||
data=payload,
|
||||
headers=self.headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Rocket.Chat notification: ' +
|
||||
'%s (error=%s).' % (
|
||||
RC_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Rocket.Chat notification ' +
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.debug('Rocket.Chat Server Response: %s.' % r.text)
|
||||
self.logger.info('Sent Rocket.Chat notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Rocket.Chat ' +
|
||||
'notification.')
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def login(self):
|
||||
"""
|
||||
login to our server
|
||||
|
||||
"""
|
||||
payload = {
|
||||
'username': self.user,
|
||||
'password': self.password,
|
||||
}
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
self.api_url + 'login',
|
||||
data=payload,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to authenticate with Rocket.Chat server: ' +
|
||||
'%s (error=%s).' % (
|
||||
RC_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to authenticate with Rocket.Chat server ' +
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.debug('Rocket.Chat authentication successful')
|
||||
response = loads(r.text)
|
||||
if response.get('status') != "success":
|
||||
self.logger.warning(
|
||||
'Could not authenticate with Rocket.Chat server.')
|
||||
return False
|
||||
|
||||
# Set our headers for further communication
|
||||
self.headers['X-Auth-Token'] = response.get(
|
||||
'data', {'authToken': None}).get('authToken')
|
||||
self.headers['X-User-Id'] = response.get(
|
||||
'data', {'userId': None}).get('userId')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured authenticating to the ' +
|
||||
'Rocket.Chat server.')
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def logout(self):
|
||||
"""
|
||||
logout of our server
|
||||
"""
|
||||
try:
|
||||
r = requests.post(
|
||||
self.api_url + 'logout',
|
||||
headers=self.headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to log off Rocket.Chat server: ' +
|
||||
'%s (error=%s).' % (
|
||||
RC_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to log off Rocket.Chat server ' +
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.debug(
|
||||
'Rocket.Chat log off successful; response %s.' % (
|
||||
r.text))
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured logging off the ' +
|
||||
'Rocket.Chat server')
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
results['recipients'] = NotifyBase.unquote(results['fullpath'])
|
||||
|
||||
return results
|
319
libs/apprise/plugins/NotifySlack.py
Normal file
|
@ -0,0 +1,319 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Slack Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
# To use this plugin, you need to first access https://api.slack.com
|
||||
# Specifically https://my.slack.com/services/new/incoming-webhook/
|
||||
# to create a new incoming webhook for your account. You'll need to
|
||||
# follow the wizard to pre-determine the channel(s) you want your
|
||||
# message to broadcast to, and when you're complete, you will
|
||||
# recieve a URL that looks something like this:
|
||||
# https://hooks.slack.com/services/T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcOXrIdevi7FQ
|
||||
# ^ ^ ^
|
||||
# | | |
|
||||
# These are important <--------------^---------^---------------^
|
||||
#
|
||||
#
|
||||
import re
|
||||
import requests
|
||||
from json import dumps
|
||||
from time import time
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
# Token required as part of the API request
|
||||
# /AAAAAAAAA/........./........................
|
||||
VALIDATE_TOKEN_A = re.compile(r'[A-Z0-9]{9}')
|
||||
|
||||
# Token required as part of the API request
|
||||
# /........./BBBBBBBBB/........................
|
||||
VALIDATE_TOKEN_B = re.compile(r'[A-Z0-9]{9}')
|
||||
|
||||
# Token required as part of the API request
|
||||
# /........./........./CCCCCCCCCCCCCCCCCCCCCCCC
|
||||
VALIDATE_TOKEN_C = re.compile(r'[A-Za-z0-9]{24}')
|
||||
|
||||
# Default User
|
||||
SLACK_DEFAULT_USER = 'apprise'
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
SLACK_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
SLACK_HTTP_ERROR_MAP.update({
|
||||
401: 'Unauthorized - Invalid Token.',
|
||||
})
|
||||
|
||||
# Used to break path apart into list of channels
|
||||
CHANNEL_LIST_DELIM = re.compile(r'[ \t\r\n,#\\/]+')
|
||||
|
||||
# Used to detect a channel
|
||||
IS_CHANNEL_RE = re.compile(r'[+#@]?([A-Z0-9_]{1,32})', re.I)
|
||||
|
||||
|
||||
class NotifySlack(NotifyBase):
|
||||
"""
|
||||
A wrapper for Slack Notifications
|
||||
"""
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'slack'
|
||||
|
||||
# Slack uses the http protocol with JSON requests
|
||||
notify_url = 'https://hooks.slack.com/services'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_72
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 1000
|
||||
|
||||
def __init__(self, token_a, token_b, token_c, channels, **kwargs):
|
||||
"""
|
||||
Initialize Slack Object
|
||||
"""
|
||||
super(NotifySlack, self).__init__(**kwargs)
|
||||
|
||||
if not VALIDATE_TOKEN_A.match(token_a.strip()):
|
||||
self.logger.warning(
|
||||
'The first API Token specified (%s) is invalid.' % token_a,
|
||||
)
|
||||
raise TypeError(
|
||||
'The first API Token specified (%s) is invalid.' % token_a,
|
||||
)
|
||||
|
||||
# The token associated with the account
|
||||
self.token_a = token_a.strip()
|
||||
|
||||
if not VALIDATE_TOKEN_B.match(token_b.strip()):
|
||||
self.logger.warning(
|
||||
'The second API Token specified (%s) is invalid.' % token_b,
|
||||
)
|
||||
raise TypeError(
|
||||
'The second API Token specified (%s) is invalid.' % token_b,
|
||||
)
|
||||
|
||||
# The token associated with the account
|
||||
self.token_b = token_b.strip()
|
||||
|
||||
if not VALIDATE_TOKEN_C.match(token_c.strip()):
|
||||
self.logger.warning(
|
||||
'The third API Token specified (%s) is invalid.' % token_c,
|
||||
)
|
||||
raise TypeError(
|
||||
'The third API Token specified (%s) is invalid.' % token_c,
|
||||
)
|
||||
|
||||
# The token associated with the account
|
||||
self.token_c = token_c.strip()
|
||||
|
||||
if not self.user:
|
||||
self.logger.warning(
|
||||
'No user was specified; using %s.' % SLACK_DEFAULT_USER)
|
||||
self.user = SLACK_DEFAULT_USER
|
||||
|
||||
if compat_is_basestring(channels):
|
||||
self.channels = [x for x in filter(bool, CHANNEL_LIST_DELIM.split(
|
||||
channels,
|
||||
))]
|
||||
|
||||
elif isinstance(channels, (set, tuple, list)):
|
||||
self.channels = channels
|
||||
|
||||
else:
|
||||
self.channels = list()
|
||||
|
||||
if len(self.channels) == 0:
|
||||
self.logger.warning('No channel(s) were specified.')
|
||||
raise TypeError('No channel(s) were specified.')
|
||||
|
||||
# Formatting requirements are defined here:
|
||||
# https://api.slack.com/docs/message-formatting
|
||||
self._re_formatting_map = {
|
||||
# New lines must become the string version
|
||||
r'\r\*\n': '\\n',
|
||||
# Escape other special characters
|
||||
r'&': '&',
|
||||
r'<': '<',
|
||||
r'>': '>',
|
||||
}
|
||||
|
||||
# Iterate over above list and store content accordingly
|
||||
self._re_formatting_rules = re.compile(
|
||||
r'(' + '|'.join(self._re_formatting_map.keys()) + r')',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Slack Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
# error tracking (used for function return)
|
||||
notify_okay = True
|
||||
|
||||
# Perform Formatting
|
||||
title = self._re_formatting_rules.sub( # pragma: no branch
|
||||
lambda x: self._re_formatting_map[x.group()], title,
|
||||
)
|
||||
body = self._re_formatting_rules.sub( # pragma: no branch
|
||||
lambda x: self._re_formatting_map[x.group()], body,
|
||||
)
|
||||
url = '%s/%s/%s/%s' % (
|
||||
self.notify_url,
|
||||
self.token_a,
|
||||
self.token_b,
|
||||
self.token_c,
|
||||
)
|
||||
|
||||
image_url = self.image_url(notify_type)
|
||||
|
||||
# Create a copy of the channel list
|
||||
channels = list(self.channels)
|
||||
while len(channels):
|
||||
channel = channels.pop(0)
|
||||
if not IS_CHANNEL_RE.match(channel):
|
||||
self.logger.warning(
|
||||
"The specified channel '%s' is invalid; skipping." % (
|
||||
channel,
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
if len(channel) > 1 and channel[0] == '+':
|
||||
# Treat as encoded id if prefixed with a +
|
||||
_channel = channel[1:]
|
||||
|
||||
elif len(channel) > 1 and channel[0] == '@':
|
||||
# Treat @ value 'as is'
|
||||
_channel = channel
|
||||
|
||||
else:
|
||||
# Prefix with channel hash tag
|
||||
_channel = '#%s' % channel
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'channel': _channel,
|
||||
'username': self.user,
|
||||
# Use Markdown language
|
||||
'mrkdwn': True,
|
||||
'attachments': [{
|
||||
'title': title,
|
||||
'text': body,
|
||||
'color': self.color(notify_type),
|
||||
# Time
|
||||
'ts': time(),
|
||||
'footer': self.app_id,
|
||||
}],
|
||||
}
|
||||
|
||||
if image_url:
|
||||
payload['attachments'][0]['footer_icon'] = image_url
|
||||
|
||||
self.logger.debug('Slack POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Slack Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Slack:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
channel,
|
||||
SLACK_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Slack:%s '
|
||||
'notification (error=%s).' % (
|
||||
channel,
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
|
||||
# Return; we're done
|
||||
notify_okay = False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Slack notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Slack:%s ' % (
|
||||
channel) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
notify_okay = False
|
||||
|
||||
if len(channels):
|
||||
# Prevent thrashing requests
|
||||
self.throttle()
|
||||
|
||||
return notify_okay
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
|
||||
# The first token is stored in the hostname
|
||||
token_a = results['host']
|
||||
|
||||
# Now fetch the remaining tokens
|
||||
try:
|
||||
token_b, token_c = [x for x in filter(
|
||||
bool, NotifyBase.split_path(results['fullpath']))][0:2]
|
||||
|
||||
except (ValueError, AttributeError, IndexError):
|
||||
# We're done
|
||||
return None
|
||||
|
||||
channels = [x for x in filter(
|
||||
bool, NotifyBase.split_path(results['fullpath']))][2:]
|
||||
|
||||
results['token_a'] = token_a
|
||||
results['token_b'] = token_b
|
||||
results['token_c'] = token_c
|
||||
results['channels'] = channels
|
||||
|
||||
return results
|
238
libs/apprise/plugins/NotifyStride.py
Normal file
|
@ -0,0 +1,238 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Stride Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
# When you sign-up with stride.com they'll ask if you want to join a channel
|
||||
# or create your own.
|
||||
#
|
||||
# Once you get set up, you'll have the option of creating a channel.
|
||||
#
|
||||
# Now you'll want to connect apprise up. To do this, you need to go to
|
||||
# the App Manager an choose to 'Connect your own app'. It will get you
|
||||
# to provide a 'token name' which can be whatever you want. Call it
|
||||
# 'Apprise' if you want (it really doesn't matter) and then click the
|
||||
# 'Create' button.
|
||||
#
|
||||
# When it completes it will generate a token that looks something like:
|
||||
# HQFtq4pF8rKFOlKTm9Th
|
||||
#
|
||||
# This will become your AUTH_TOKEN
|
||||
#
|
||||
# It will also provide you a conversation URL that might look like:
|
||||
# https://api.atlassian.com/site/ce171c45-79ae-4fec-a73d-5a4b7a322872/\
|
||||
# conversation/a54a80b3-eaad-4564-9a3a-f6653bcfb100/message
|
||||
#
|
||||
# Simplified, it looks like this:
|
||||
# https://api.atlassian.com/site/CLOUD_ID/conversation/CONVO_ID/message
|
||||
#
|
||||
# This plugin will simply work using the url of:
|
||||
# stride://AUTH_TOKEN/CLOUD_ID/CONVO_ID
|
||||
#
|
||||
import requests
|
||||
import re
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
|
||||
# A Simple UUID4 checker
|
||||
IS_VALID_TOKEN = re.compile(
|
||||
r'([0-9a-f]{8})-*([0-9a-f]{4})-*(4[0-9a-f]{3})-*'
|
||||
r'([89ab][0-9a-f]{3})-*([0-9a-f]{12})', re.I)
|
||||
|
||||
|
||||
class NotifyStride(NotifyBase):
|
||||
"""
|
||||
A wrapper to Stride Notifications
|
||||
|
||||
"""
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'stride'
|
||||
|
||||
# Stride Webhook
|
||||
notify_url = 'https://api.atlassian.com/site/{cloud_id}/' \
|
||||
'conversation/{convo_id}/message'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_256
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 2000
|
||||
|
||||
def __init__(self, auth_token, cloud_id, convo_id, **kwargs):
|
||||
"""
|
||||
Initialize Stride Object
|
||||
|
||||
"""
|
||||
super(NotifyStride, self).__init__(**kwargs)
|
||||
|
||||
if not auth_token:
|
||||
raise TypeError(
|
||||
'An invalid Authorization token was specified.'
|
||||
)
|
||||
|
||||
if not cloud_id:
|
||||
raise TypeError('No Cloud ID was specified.')
|
||||
|
||||
cloud_id_re = IS_VALID_TOKEN.match(cloud_id)
|
||||
if cloud_id_re is None:
|
||||
raise TypeError('The specified Cloud ID is not a valid UUID.')
|
||||
|
||||
if not convo_id:
|
||||
raise TypeError('No Conversation ID was specified.')
|
||||
|
||||
convo_id_re = IS_VALID_TOKEN.match(convo_id)
|
||||
if convo_id_re is None:
|
||||
raise TypeError(
|
||||
'The specified Conversation ID is not a valid UUID.')
|
||||
|
||||
# Store our validated token
|
||||
self.cloud_id = '{0}-{1}-{2}-{3}-{4}'.format(
|
||||
cloud_id_re.group(0),
|
||||
cloud_id_re.group(1),
|
||||
cloud_id_re.group(2),
|
||||
cloud_id_re.group(3),
|
||||
cloud_id_re.group(4),
|
||||
)
|
||||
|
||||
# Store our validated token
|
||||
self.convo_id = '{0}-{1}-{2}-{3}-{4}'.format(
|
||||
convo_id_re.group(0),
|
||||
convo_id_re.group(1),
|
||||
convo_id_re.group(2),
|
||||
convo_id_re.group(3),
|
||||
convo_id_re.group(4),
|
||||
)
|
||||
|
||||
self.auth_token = auth_token
|
||||
|
||||
return
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Stride Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Authorization': 'Bearer {auth_token}'.format(
|
||||
auth_token=self.auth_token),
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
# Prepare JSON Object
|
||||
payload = {
|
||||
"body": {
|
||||
"version": 1,
|
||||
"type": "doc",
|
||||
"content": [{
|
||||
"type": "paragraph",
|
||||
"content": [{
|
||||
"type": "text",
|
||||
"text": body,
|
||||
}],
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
||||
# Construct Notify URL
|
||||
notify_url = self.notify_url.format(
|
||||
cloud_id=self.cloud_id,
|
||||
convo_id=self.convo_id,
|
||||
)
|
||||
|
||||
self.logger.debug('Stride POST URL: %s (cert_verify=%r)' % (
|
||||
notify_url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Stride Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
notify_url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code not in (
|
||||
requests.codes.ok, requests.codes.no_content):
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Stride notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Stride notification '
|
||||
'(error=%s).' % r.status_code)
|
||||
|
||||
self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Stride notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Stride '
|
||||
'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
Syntax:
|
||||
stride://auth_token/cloud_id/convo_id
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Store our Authentication Token
|
||||
auth_token = results['host']
|
||||
|
||||
# Now fetch our tokens
|
||||
try:
|
||||
(ta, tb) = [x for x in filter(bool, NotifyBase.split_path(
|
||||
results['fullpath']))][0:2]
|
||||
|
||||
except (ValueError, AttributeError, IndexError):
|
||||
# Force some bad values that will get caught
|
||||
# in parsing later
|
||||
ta = None
|
||||
tb = None
|
||||
|
||||
results['cloud_id'] = ta
|
||||
results['convo_id'] = tb
|
||||
results['auth_token'] = auth_token
|
||||
|
||||
return results
|
537
libs/apprise/plugins/NotifyTelegram.py
Normal file
|
@ -0,0 +1,537 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Telegram Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
# To use this plugin, you need to first access https://api.telegram.org
|
||||
# You need to create a bot and acquire it's Token Identifier (bot_token)
|
||||
#
|
||||
# Basically you need to create a chat with a user called the 'BotFather'
|
||||
# and type: /newbot
|
||||
#
|
||||
# Then follow through the wizard, it will provide you an api key
|
||||
# that looks like this:123456789:alphanumeri_characters
|
||||
#
|
||||
# For each chat_id a bot joins will have a chat_id associated with it.
|
||||
# You will need this value as well to send the notification.
|
||||
#
|
||||
# Log into the webpage version of the site if you like by accessing:
|
||||
# https://web.telegram.org
|
||||
#
|
||||
# You can't check out to see if your entry is working using:
|
||||
# https://api.telegram.org/botAPI_KEY/getMe
|
||||
#
|
||||
# Pay attention to the word 'bot' that must be present infront of your
|
||||
# api key that the BotFather gave you.
|
||||
#
|
||||
# For example, a url might look like this:
|
||||
# https://api.telegram.org/bot123456789:alphanumeri_characters/getMe
|
||||
#
|
||||
# Development API Reference::
|
||||
# - https://core.telegram.org/bots/api
|
||||
import requests
|
||||
import re
|
||||
|
||||
from os.path import basename
|
||||
|
||||
from json import loads
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import compat_is_basestring
|
||||
from ..utils import parse_bool
|
||||
|
||||
TELEGRAM_IMAGE_XY = NotifyImageSize.XY_256
|
||||
|
||||
# Token required as part of the API request
|
||||
# allow the word 'bot' infront
|
||||
VALIDATE_BOT_TOKEN = re.compile(
|
||||
r'^(bot)?(?P<key>[0-9]+:[a-z0-9_-]+)/*$',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
# Chat ID is required
|
||||
# If the Chat ID is positive, then it's addressed to a single person
|
||||
# If the Chat ID is negative, then it's targeting a group
|
||||
IS_CHAT_ID_RE = re.compile(
|
||||
r'^(@*(?P<idno>-?[0-9]{1,32})|(?P<name>[a-z_-][a-z0-9_-]+))$',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
# Used to break path apart into list of chat identifiers
|
||||
CHAT_ID_LIST_DELIM = re.compile(r'[ \t\r\n,#\\/]+')
|
||||
|
||||
|
||||
class NotifyTelegram(NotifyBase):
|
||||
"""
|
||||
A wrapper for Telegram Notifications
|
||||
"""
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'tgram'
|
||||
|
||||
# Telegram uses the http protocol with JSON requests
|
||||
notify_url = 'https://api.telegram.org/bot'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_256
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 4096
|
||||
|
||||
def __init__(self, bot_token, chat_ids, detect_bot_owner=True,
|
||||
include_image=True, **kwargs):
|
||||
"""
|
||||
Initialize Telegram Object
|
||||
"""
|
||||
super(NotifyTelegram, self).__init__(**kwargs)
|
||||
|
||||
try:
|
||||
self.bot_token = bot_token.strip()
|
||||
|
||||
except AttributeError:
|
||||
# Token was None
|
||||
self.logger.warning('No Bot Token was specified.')
|
||||
raise TypeError('No Bot Token was specified.')
|
||||
|
||||
result = VALIDATE_BOT_TOKEN.match(self.bot_token)
|
||||
if not result:
|
||||
raise TypeError(
|
||||
'The Bot Token specified (%s) is invalid.' % bot_token,
|
||||
)
|
||||
|
||||
# Store our Bot Token
|
||||
self.bot_token = result.group('key')
|
||||
|
||||
if compat_is_basestring(chat_ids):
|
||||
self.chat_ids = [x for x in filter(bool, CHAT_ID_LIST_DELIM.split(
|
||||
chat_ids,
|
||||
))]
|
||||
|
||||
elif isinstance(chat_ids, (set, tuple, list)):
|
||||
self.chat_ids = list(chat_ids)
|
||||
|
||||
else:
|
||||
self.chat_ids = list()
|
||||
|
||||
if self.user:
|
||||
# Treat this as a channel too
|
||||
self.chat_ids.append(self.user)
|
||||
|
||||
if len(self.chat_ids) == 0 and detect_bot_owner:
|
||||
_id = self.detect_bot_owner()
|
||||
if _id:
|
||||
# Store our id
|
||||
self.chat_ids = [str(_id)]
|
||||
|
||||
if len(self.chat_ids) == 0:
|
||||
self.logger.warning('No chat_id(s) were specified.')
|
||||
raise TypeError('No chat_id(s) were specified.')
|
||||
|
||||
# Track whether or not we want to send an image with our notification
|
||||
# or not.
|
||||
self.include_image = include_image
|
||||
|
||||
def send_image(self, chat_id, notify_type):
|
||||
"""
|
||||
Sends a sticker based on the specified notify type
|
||||
|
||||
"""
|
||||
|
||||
# The URL; we do not set headers because the api doesn't seem to like
|
||||
# when we set one.
|
||||
url = '%s%s/%s' % (
|
||||
self.notify_url,
|
||||
self.bot_token,
|
||||
'sendPhoto'
|
||||
)
|
||||
|
||||
path = self.image_path(notify_type)
|
||||
if not path:
|
||||
# No image to send
|
||||
self.logger.debug(
|
||||
'Telegram Image does not exist for %s' % (
|
||||
notify_type))
|
||||
return None
|
||||
|
||||
files = {'photo': (basename(path), open(path), 'rb')}
|
||||
|
||||
payload = {
|
||||
'chat_id': chat_id,
|
||||
}
|
||||
|
||||
self.logger.debug(
|
||||
'Telegram Image POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate))
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
files=files,
|
||||
data=payload,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to post Telegram Image: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to detect Telegram Image. (error=%s).' % (
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
return False
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A connection error occured posting Telegram Image.')
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def detect_bot_owner(self):
|
||||
"""
|
||||
Takes a bot and attempts to detect it's chat id from that
|
||||
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
url = '%s%s/%s' % (
|
||||
self.notify_url,
|
||||
self.bot_token,
|
||||
'getUpdates'
|
||||
)
|
||||
|
||||
self.logger.debug(
|
||||
'Telegram User Detection POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate))
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
|
||||
try:
|
||||
# Try to get the error message if we can:
|
||||
error_msg = loads(r.content)['description']
|
||||
|
||||
except:
|
||||
error_msg = None
|
||||
|
||||
try:
|
||||
if error_msg:
|
||||
self.logger.warning(
|
||||
'Failed to detect Telegram user: (%s) %s.' % (
|
||||
r.status_code, error_msg))
|
||||
|
||||
else:
|
||||
self.logger.warning(
|
||||
'Failed to detect Telegram user: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to detect Telegram user. (error=%s).' % (
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
return 0
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A connection error occured detecting Telegram User.')
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return 0
|
||||
|
||||
# A Response might look something like this:
|
||||
# {
|
||||
# "ok":true,
|
||||
# "result":[{
|
||||
# "update_id":645421321,
|
||||
# "message":{
|
||||
# "message_id":1,
|
||||
# "from":{
|
||||
# "id":532389719,
|
||||
# "is_bot":false,
|
||||
# "first_name":"Chris",
|
||||
# "language_code":"en-US"
|
||||
# },
|
||||
# "chat":{
|
||||
# "id":532389719,
|
||||
# "first_name":"Chris",
|
||||
# "type":"private"
|
||||
# },
|
||||
# "date":1519694394,
|
||||
# "text":"/start",
|
||||
# "entities":[{"offset":0,"length":6,"type":"bot_command"}]}}]
|
||||
|
||||
# Load our response and attempt to fetch our userid
|
||||
response = loads(r.content)
|
||||
if 'ok' in response and response['ok'] is True:
|
||||
start = re.compile(r'^\s*\/start', re.I)
|
||||
for _msg in iter(response['result']):
|
||||
# Find /start
|
||||
if not start.search(_msg['message']['text']):
|
||||
continue
|
||||
|
||||
_id = _msg['message']['from'].get('id', 0)
|
||||
_user = _msg['message']['from'].get('first_name')
|
||||
self.logger.info('Detected telegram user %s (userid=%d)' % (
|
||||
_user, _id))
|
||||
# Return our detected userid
|
||||
return _id
|
||||
|
||||
self.logger.warning(
|
||||
'Could not detect bot owner. Is it running (/start)?')
|
||||
|
||||
return 0
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Telegram Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
# error tracking (used for function return)
|
||||
has_error = False
|
||||
|
||||
url = '%s%s/%s' % (
|
||||
self.notify_url,
|
||||
self.bot_token,
|
||||
'sendMessage'
|
||||
)
|
||||
|
||||
payload = {}
|
||||
|
||||
# HTML Spaces ( ) and tabs ( ) aren't supported
|
||||
# See https://core.telegram.org/bots/api#html-style
|
||||
title = re.sub(' ?', ' ', title, re.I)
|
||||
body = re.sub(' ?', ' ', body, re.I)
|
||||
# Tabs become 3 spaces
|
||||
title = re.sub(' ?', ' ', title, re.I)
|
||||
body = re.sub(' ?', ' ', body, re.I)
|
||||
|
||||
# HTML
|
||||
title = NotifyBase.escape_html(title, whitespace=False)
|
||||
body = NotifyBase.escape_html(body, whitespace=False)
|
||||
|
||||
payload['parse_mode'] = 'HTML'
|
||||
|
||||
payload['text'] = '<b>%s</b>\r\n%s' % (
|
||||
title,
|
||||
body,
|
||||
)
|
||||
|
||||
# Create a copy of the chat_ids list
|
||||
chat_ids = list(self.chat_ids)
|
||||
while len(chat_ids):
|
||||
chat_id = chat_ids.pop(0)
|
||||
chat_id = IS_CHAT_ID_RE.match(chat_id)
|
||||
if not chat_id:
|
||||
self.logger.warning(
|
||||
"The specified chat_id '%s' is invalid; skipping." % (
|
||||
chat_id,
|
||||
)
|
||||
)
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
if chat_id.group('name') is not None:
|
||||
# Name
|
||||
payload['chat_id'] = '@%s' % chat_id.group('name')
|
||||
|
||||
else:
|
||||
# ID
|
||||
payload['chat_id'] = int(chat_id.group('idno'))
|
||||
|
||||
if self.include_image is True:
|
||||
# Send an image
|
||||
if self.send_image(
|
||||
payload['chat_id'], notify_type) is not None:
|
||||
# We sent a post (whether we were successful or not)
|
||||
# we still hit the remote server... just throttle
|
||||
# before our next hit server query
|
||||
self.throttle()
|
||||
|
||||
self.logger.debug('Telegram POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Telegram Payload: %s' % str(payload))
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
|
||||
try:
|
||||
# Try to get the error message if we can:
|
||||
error_msg = loads(r.content)['description']
|
||||
|
||||
except:
|
||||
error_msg = None
|
||||
|
||||
try:
|
||||
if error_msg:
|
||||
self.logger.warning(
|
||||
'Failed to send Telegram:%s '
|
||||
'notification: (%s) %s.' % (
|
||||
payload['chat_id'],
|
||||
r.status_code, error_msg))
|
||||
|
||||
else:
|
||||
self.logger.warning(
|
||||
'Failed to send Telegram:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
payload['chat_id'],
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Telegram:%s '
|
||||
'notification (error=%s).' % (
|
||||
payload['chat_id'], r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
|
||||
# Flag our error
|
||||
has_error = True
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Telegram notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A connection error occured sending Telegram:%s ' % (
|
||||
payload['chat_id']) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
has_error = True
|
||||
|
||||
finally:
|
||||
if len(chat_ids):
|
||||
# Prevent thrashing requests
|
||||
self.throttle()
|
||||
|
||||
return not has_error
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
# This is a dirty hack; but it's the only work around to
|
||||
# tgram:// messages since the bot_token has a colon in it.
|
||||
# It invalidates an normal URL.
|
||||
|
||||
# This hack searches for this bogus URL and corrects it
|
||||
# so we can properly load it further down. The other
|
||||
# alternative is to ask users to actually change the colon
|
||||
# into a slash (which will work too), but it's more likely
|
||||
# to cause confusion... So this is the next best thing
|
||||
try:
|
||||
tgram = re.match(
|
||||
r'(?P<protocol>%s://)(bot)?(?P<prefix>([a-z0-9_-]+)'
|
||||
r'(:[a-z0-9_-]+)?@)?(?P<btoken_a>[0-9]+):+'
|
||||
r'(?P<remaining>.*)$' % NotifyTelegram.secure_protocol,
|
||||
url, re.I)
|
||||
|
||||
except (TypeError, AttributeError):
|
||||
# url is bad; force tgram to be None
|
||||
tgram = None
|
||||
|
||||
if not tgram:
|
||||
# Content is simply not parseable
|
||||
return None
|
||||
|
||||
if tgram.group('prefix'):
|
||||
# Try again
|
||||
results = NotifyBase.parse_url(
|
||||
'%s%s%s/%s' % (
|
||||
tgram.group('protocol'),
|
||||
tgram.group('prefix'),
|
||||
tgram.group('btoken_a'),
|
||||
tgram.group('remaining'),
|
||||
),
|
||||
)
|
||||
|
||||
else:
|
||||
# Try again
|
||||
results = NotifyBase.parse_url(
|
||||
'%s%s/%s' % (
|
||||
tgram.group('protocol'),
|
||||
tgram.group('btoken_a'),
|
||||
tgram.group('remaining'),
|
||||
),
|
||||
)
|
||||
|
||||
# The first token is stored in the hostname
|
||||
bot_token_a = results['host']
|
||||
|
||||
# Now fetch the remaining tokens
|
||||
bot_token_b = [x for x in filter(
|
||||
bool, NotifyBase.split_path(results['fullpath']))][0]
|
||||
|
||||
bot_token = '%s:%s' % (bot_token_a, bot_token_b)
|
||||
|
||||
chat_ids = ','.join(
|
||||
[x for x in filter(
|
||||
bool, NotifyBase.split_path(results['fullpath']))][1:])
|
||||
|
||||
# Store our bot token
|
||||
results['bot_token'] = bot_token
|
||||
|
||||
# Store our chat ids
|
||||
results['chat_ids'] = chat_ids
|
||||
|
||||
# Include images with our message
|
||||
results['include_image'] = \
|
||||
parse_bool(results['qsd'].get('image', False))
|
||||
|
||||
return results
|
171
libs/apprise/plugins/NotifyToasty.py
Normal file
|
@ -0,0 +1,171 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# (Super) Toasty Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
import requests
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
# Used to break apart list of potential devices by their delimiter
|
||||
# into a usable list.
|
||||
DEVICES_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
|
||||
class NotifyToasty(NotifyBase):
|
||||
"""
|
||||
A wrapper for Toasty Notifications
|
||||
"""
|
||||
|
||||
# The default protocol
|
||||
protocol = 'toasty'
|
||||
|
||||
# Toasty uses the http protocol with JSON requests
|
||||
notify_url = 'http://api.supertoasty.com/notify/'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_128
|
||||
|
||||
def __init__(self, devices, **kwargs):
|
||||
"""
|
||||
Initialize Toasty Object
|
||||
"""
|
||||
super(NotifyToasty, self).__init__(**kwargs)
|
||||
|
||||
if compat_is_basestring(devices):
|
||||
self.devices = [x for x in filter(bool, DEVICES_LIST_DELIM.split(
|
||||
devices,
|
||||
))]
|
||||
|
||||
elif isinstance(devices, (set, tuple, list)):
|
||||
self.devices = devices
|
||||
|
||||
else:
|
||||
self.devices = list()
|
||||
|
||||
if len(devices) == 0:
|
||||
raise TypeError('You must specify at least 1 device.')
|
||||
|
||||
if not self.user:
|
||||
raise TypeError('You must specify a username.')
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Toasty Notification
|
||||
"""
|
||||
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'multipart/form-data',
|
||||
}
|
||||
|
||||
# error tracking (used for function return)
|
||||
has_error = False
|
||||
|
||||
# Create a copy of the devices list
|
||||
devices = list(self.devices)
|
||||
while len(devices):
|
||||
device = devices.pop(0)
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'sender': NotifyBase.quote(self.user),
|
||||
'title': NotifyBase.quote(title),
|
||||
'text': NotifyBase.quote(body),
|
||||
}
|
||||
|
||||
image_url = self.image_url(notify_type)
|
||||
if image_url:
|
||||
payload['image'] = image_url
|
||||
|
||||
# URL to transmit content via
|
||||
url = '%s%s' % (self.notify_url, device)
|
||||
|
||||
self.logger.debug('Toasty POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Toasty Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.get(
|
||||
url,
|
||||
data=payload,
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Toasty:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
device,
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Toasty:%s '
|
||||
'notification (error=%s).' % (
|
||||
device,
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
|
||||
# Return; we're done
|
||||
has_error = True
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
'Sent Toasty notification to %s.' % device)
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Toasty:%s ' % (
|
||||
device) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
has_error = True
|
||||
|
||||
if len(devices):
|
||||
# Prevent thrashing requests
|
||||
self.throttle()
|
||||
|
||||
return not has_error
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
devices = NotifyBase.unquote(results['fullpath'])
|
||||
|
||||
# Store our devices
|
||||
results['devices'] = '%s/%s' % (results['host'], devices)
|
||||
|
||||
return results
|
152
libs/apprise/plugins/NotifyTwitter/NotifyTwitter.py
Normal file
|
@ -0,0 +1,152 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Twitter Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
from . import tweepy
|
||||
from ..NotifyBase import NotifyBase
|
||||
|
||||
|
||||
class NotifyTwitter(NotifyBase):
|
||||
"""
|
||||
A wrapper to Twitter Notifications
|
||||
|
||||
"""
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'tweet'
|
||||
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
# This is used during a Private DM Message Size (not Public Tweets
|
||||
# which are limited to 240 characters)
|
||||
body_maxlen = 4096
|
||||
|
||||
def __init__(self, ckey, csecret, akey, asecret, **kwargs):
|
||||
"""
|
||||
Initialize Twitter Object
|
||||
|
||||
"""
|
||||
super(NotifyTwitter, self).__init__(**kwargs)
|
||||
|
||||
if not ckey:
|
||||
raise TypeError(
|
||||
'An invalid Consumer API Key was specified.'
|
||||
)
|
||||
|
||||
if not csecret:
|
||||
raise TypeError(
|
||||
'An invalid Consumer Secret API Key was specified.'
|
||||
)
|
||||
|
||||
if not akey:
|
||||
raise TypeError(
|
||||
'An invalid Acess Token API Key was specified.'
|
||||
)
|
||||
|
||||
if not asecret:
|
||||
raise TypeError(
|
||||
'An invalid Acess Token Secret API Key was specified.'
|
||||
)
|
||||
|
||||
if not self.user:
|
||||
raise TypeError(
|
||||
'No user was specified.'
|
||||
)
|
||||
|
||||
# Store our data
|
||||
self.ckey = ckey
|
||||
self.csecret = csecret
|
||||
self.akey = akey
|
||||
self.asecret = asecret
|
||||
|
||||
return
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Twitter Notification
|
||||
"""
|
||||
|
||||
try:
|
||||
# Attempt to Establish a connection to Twitter
|
||||
self.auth = tweepy.OAuthHandler(self.ckey, self.csecret)
|
||||
|
||||
# Apply our Access Tokens
|
||||
self.auth.set_access_token(self.akey, self.asecret)
|
||||
|
||||
except Exception:
|
||||
self.logger.warning(
|
||||
'Twitter authentication failed; '
|
||||
'please verify your configuration.'
|
||||
)
|
||||
return False
|
||||
|
||||
text = '%s\r\n%s' % (title, body)
|
||||
try:
|
||||
# Get our API
|
||||
api = tweepy.API(self.auth)
|
||||
|
||||
# Send our Direct Message
|
||||
api.send_direct_message(self.user, text=text)
|
||||
self.logger.info('Sent Twitter DM notification.')
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending Twitter '
|
||||
'direct message to %s.' % self.user)
|
||||
self.logger.debug('Twitter Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Apply our settings now
|
||||
|
||||
# The first token is stored in the hostname
|
||||
consumer_key = results['host']
|
||||
|
||||
# Now fetch the remaining tokens
|
||||
try:
|
||||
consumer_secret, access_token_key, access_token_secret = \
|
||||
[x for x in filter(bool, NotifyBase.split_path(
|
||||
results['fullpath']))][0:3]
|
||||
|
||||
except (ValueError, AttributeError, IndexError):
|
||||
# Force some bad values that will get caught
|
||||
# in parsing later
|
||||
consumer_secret = None
|
||||
access_token_key = None
|
||||
access_token_secret = None
|
||||
|
||||
results['ckey'] = consumer_key
|
||||
results['csecret'] = consumer_secret
|
||||
results['akey'] = access_token_key
|
||||
results['asecret'] = access_token_secret
|
||||
|
||||
return results
|
21
libs/apprise/plugins/NotifyTwitter/__init__.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
from . import NotifyTwitter
|
||||
|
||||
__all__ = [
|
||||
'NotifyTwitter',
|
||||
]
|
25
libs/apprise/plugins/NotifyTwitter/tweepy/__init__.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
# Tweepy
|
||||
# Copyright 2009-2010 Joshua Roesslein
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Tweepy Twitter API library
|
||||
"""
|
||||
__version__ = '3.6.0'
|
||||
__author__ = 'Joshua Roesslein'
|
||||
__license__ = 'MIT'
|
||||
|
||||
from .models import Status, User, DirectMessage, Friendship, SavedSearch, SearchResults, ModelFactory, Category
|
||||
from .error import TweepError, RateLimitError
|
||||
from .api import API
|
||||
from .cache import Cache, MemoryCache, FileCache
|
||||
from .auth import OAuthHandler, AppAuthHandler
|
||||
from .streaming import Stream, StreamListener
|
||||
from .cursor import Cursor
|
||||
|
||||
# Global, unauthenticated instance of API
|
||||
api = API()
|
||||
|
||||
def debug(enable=True, level=1):
|
||||
from six.moves.http_client import HTTPConnection
|
||||
HTTPConnection.debuglevel = level
|
1386
libs/apprise/plugins/NotifyTwitter/tweepy/api.py
Normal file
178
libs/apprise/plugins/NotifyTwitter/tweepy/auth.py
Normal file
|
@ -0,0 +1,178 @@
|
|||
from __future__ import print_function
|
||||
|
||||
import six
|
||||
import logging
|
||||
|
||||
from .error import TweepError
|
||||
from .api import API
|
||||
import requests
|
||||
from requests_oauthlib import OAuth1Session, OAuth1
|
||||
from requests.auth import AuthBase
|
||||
from six.moves.urllib.parse import parse_qs
|
||||
|
||||
WARNING_MESSAGE = """Warning! Due to a Twitter API bug, signin_with_twitter
|
||||
and access_type don't always play nice together. Details
|
||||
https://dev.twitter.com/discussions/21281"""
|
||||
|
||||
|
||||
class AuthHandler(object):
|
||||
|
||||
def apply_auth(self, url, method, headers, parameters):
|
||||
"""Apply authentication headers to request"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_username(self):
|
||||
"""Return the username of the authenticated user"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class OAuthHandler(AuthHandler):
|
||||
"""OAuth authentication handler"""
|
||||
OAUTH_HOST = 'api.twitter.com'
|
||||
OAUTH_ROOT = '/oauth/'
|
||||
|
||||
def __init__(self, consumer_key, consumer_secret, callback=None):
|
||||
if type(consumer_key) == six.text_type:
|
||||
consumer_key = consumer_key.encode('ascii')
|
||||
|
||||
if type(consumer_secret) == six.text_type:
|
||||
consumer_secret = consumer_secret.encode('ascii')
|
||||
|
||||
self.consumer_key = consumer_key
|
||||
self.consumer_secret = consumer_secret
|
||||
self.access_token = None
|
||||
self.access_token_secret = None
|
||||
self.callback = callback
|
||||
self.username = None
|
||||
self.oauth = OAuth1Session(consumer_key,
|
||||
client_secret=consumer_secret,
|
||||
callback_uri=self.callback)
|
||||
|
||||
def _get_oauth_url(self, endpoint):
|
||||
return 'https://' + self.OAUTH_HOST + self.OAUTH_ROOT + endpoint
|
||||
|
||||
def apply_auth(self):
|
||||
return OAuth1(self.consumer_key,
|
||||
client_secret=self.consumer_secret,
|
||||
resource_owner_key=self.access_token,
|
||||
resource_owner_secret=self.access_token_secret,
|
||||
decoding=None)
|
||||
|
||||
def _get_request_token(self, access_type=None):
|
||||
try:
|
||||
url = self._get_oauth_url('request_token')
|
||||
if access_type:
|
||||
url += '?x_auth_access_type=%s' % access_type
|
||||
return self.oauth.fetch_request_token(url)
|
||||
except Exception as e:
|
||||
raise TweepError(e)
|
||||
|
||||
def set_access_token(self, key, secret):
|
||||
self.access_token = key
|
||||
self.access_token_secret = secret
|
||||
|
||||
def get_authorization_url(self,
|
||||
signin_with_twitter=False,
|
||||
access_type=None):
|
||||
"""Get the authorization URL to redirect the user"""
|
||||
try:
|
||||
if signin_with_twitter:
|
||||
url = self._get_oauth_url('authenticate')
|
||||
if access_type:
|
||||
logging.warning(WARNING_MESSAGE)
|
||||
else:
|
||||
url = self._get_oauth_url('authorize')
|
||||
self.request_token = self._get_request_token(access_type=access_type)
|
||||
return self.oauth.authorization_url(url)
|
||||
except Exception as e:
|
||||
raise TweepError(e)
|
||||
|
||||
def get_access_token(self, verifier=None):
|
||||
"""
|
||||
After user has authorized the request token, get access token
|
||||
with user supplied verifier.
|
||||
"""
|
||||
try:
|
||||
url = self._get_oauth_url('access_token')
|
||||
self.oauth = OAuth1Session(self.consumer_key,
|
||||
client_secret=self.consumer_secret,
|
||||
resource_owner_key=self.request_token['oauth_token'],
|
||||
resource_owner_secret=self.request_token['oauth_token_secret'],
|
||||
verifier=verifier, callback_uri=self.callback)
|
||||
resp = self.oauth.fetch_access_token(url)
|
||||
self.access_token = resp['oauth_token']
|
||||
self.access_token_secret = resp['oauth_token_secret']
|
||||
return self.access_token, self.access_token_secret
|
||||
except Exception as e:
|
||||
raise TweepError(e)
|
||||
|
||||
def get_xauth_access_token(self, username, password):
|
||||
"""
|
||||
Get an access token from an username and password combination.
|
||||
In order to get this working you need to create an app at
|
||||
http://twitter.com/apps, after that send a mail to api@twitter.com
|
||||
and request activation of xAuth for it.
|
||||
"""
|
||||
try:
|
||||
url = self._get_oauth_url('access_token')
|
||||
oauth = OAuth1(self.consumer_key,
|
||||
client_secret=self.consumer_secret)
|
||||
r = requests.post(url=url,
|
||||
auth=oauth,
|
||||
headers={'x_auth_mode': 'client_auth',
|
||||
'x_auth_username': username,
|
||||
'x_auth_password': password})
|
||||
|
||||
credentials = parse_qs(r.content)
|
||||
return credentials.get('oauth_token')[0], credentials.get('oauth_token_secret')[0]
|
||||
except Exception as e:
|
||||
raise TweepError(e)
|
||||
|
||||
def get_username(self):
|
||||
if self.username is None:
|
||||
api = API(self)
|
||||
user = api.verify_credentials()
|
||||
if user:
|
||||
self.username = user.screen_name
|
||||
else:
|
||||
raise TweepError('Unable to get username,'
|
||||
' invalid oauth token!')
|
||||
return self.username
|
||||
|
||||
|
||||
class OAuth2Bearer(AuthBase):
|
||||
def __init__(self, bearer_token):
|
||||
self.bearer_token = bearer_token
|
||||
|
||||
def __call__(self, request):
|
||||
request.headers['Authorization'] = 'Bearer ' + self.bearer_token
|
||||
return request
|
||||
|
||||
|
||||
class AppAuthHandler(AuthHandler):
|
||||
"""Application-only authentication handler"""
|
||||
|
||||
OAUTH_HOST = 'api.twitter.com'
|
||||
OAUTH_ROOT = '/oauth2/'
|
||||
|
||||
def __init__(self, consumer_key, consumer_secret):
|
||||
self.consumer_key = consumer_key
|
||||
self.consumer_secret = consumer_secret
|
||||
self._bearer_token = ''
|
||||
|
||||
resp = requests.post(self._get_oauth_url('token'),
|
||||
auth=(self.consumer_key,
|
||||
self.consumer_secret),
|
||||
data={'grant_type': 'client_credentials'})
|
||||
data = resp.json()
|
||||
if data.get('token_type') != 'bearer':
|
||||
raise TweepError('Expected token_type to equal "bearer", '
|
||||
'but got %s instead' % data.get('token_type'))
|
||||
|
||||
self._bearer_token = data['access_token']
|
||||
|
||||
def _get_oauth_url(self, endpoint):
|
||||
return 'https://' + self.OAUTH_HOST + self.OAUTH_ROOT + endpoint
|
||||
|
||||
def apply_auth(self):
|
||||
return OAuth2Bearer(self._bearer_token)
|
261
libs/apprise/plugins/NotifyTwitter/tweepy/binder.py
Normal file
|
@ -0,0 +1,261 @@
|
|||
# Tweepy
|
||||
# Copyright 2009-2010 Joshua Roesslein
|
||||
# See LICENSE for details.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import time
|
||||
import re
|
||||
|
||||
from six.moves.urllib.parse import quote, urlencode
|
||||
import requests
|
||||
|
||||
import logging
|
||||
|
||||
from .error import TweepError, RateLimitError, is_rate_limit_error_message
|
||||
from .utils import convert_to_utf8_str
|
||||
from .models import Model
|
||||
import six
|
||||
import sys
|
||||
|
||||
|
||||
re_path_template = re.compile(r'{\w+}')
|
||||
|
||||
log = logging.getLogger('tweepy.binder')
|
||||
|
||||
def bind_api(**config):
|
||||
|
||||
class APIMethod(object):
|
||||
|
||||
api = config['api']
|
||||
path = config['path']
|
||||
payload_type = config.get('payload_type', None)
|
||||
payload_list = config.get('payload_list', False)
|
||||
allowed_param = config.get('allowed_param', [])
|
||||
method = config.get('method', 'GET')
|
||||
require_auth = config.get('require_auth', False)
|
||||
search_api = config.get('search_api', False)
|
||||
upload_api = config.get('upload_api', False)
|
||||
use_cache = config.get('use_cache', True)
|
||||
session = requests.Session()
|
||||
|
||||
def __init__(self, args, kwargs):
|
||||
api = self.api
|
||||
# If authentication is required and no credentials
|
||||
# are provided, throw an error.
|
||||
if self.require_auth and not api.auth:
|
||||
raise TweepError('Authentication required!')
|
||||
|
||||
self.post_data = kwargs.pop('post_data', None)
|
||||
self.retry_count = kwargs.pop('retry_count',
|
||||
api.retry_count)
|
||||
self.retry_delay = kwargs.pop('retry_delay',
|
||||
api.retry_delay)
|
||||
self.retry_errors = kwargs.pop('retry_errors',
|
||||
api.retry_errors)
|
||||
self.wait_on_rate_limit = kwargs.pop('wait_on_rate_limit',
|
||||
api.wait_on_rate_limit)
|
||||
self.wait_on_rate_limit_notify = kwargs.pop('wait_on_rate_limit_notify',
|
||||
api.wait_on_rate_limit_notify)
|
||||
self.parser = kwargs.pop('parser', api.parser)
|
||||
self.session.headers = kwargs.pop('headers', {})
|
||||
self.build_parameters(args, kwargs)
|
||||
|
||||
# Pick correct URL root to use
|
||||
if self.search_api:
|
||||
self.api_root = api.search_root
|
||||
elif self.upload_api:
|
||||
self.api_root = api.upload_root
|
||||
else:
|
||||
self.api_root = api.api_root
|
||||
|
||||
# Perform any path variable substitution
|
||||
self.build_path()
|
||||
|
||||
if self.search_api:
|
||||
self.host = api.search_host
|
||||
elif self.upload_api:
|
||||
self.host = api.upload_host
|
||||
else:
|
||||
self.host = api.host
|
||||
|
||||
# Manually set Host header to fix an issue in python 2.5
|
||||
# or older where Host is set including the 443 port.
|
||||
# This causes Twitter to issue 301 redirect.
|
||||
# See Issue https://github.com/tweepy/tweepy/issues/12
|
||||
self.session.headers['Host'] = self.host
|
||||
# Monitoring rate limits
|
||||
self._remaining_calls = None
|
||||
self._reset_time = None
|
||||
|
||||
def build_parameters(self, args, kwargs):
|
||||
self.session.params = {}
|
||||
for idx, arg in enumerate(args):
|
||||
if arg is None:
|
||||
continue
|
||||
try:
|
||||
self.session.params[self.allowed_param[idx]] = convert_to_utf8_str(arg)
|
||||
except IndexError:
|
||||
raise TweepError('Too many parameters supplied!')
|
||||
|
||||
for k, arg in kwargs.items():
|
||||
if arg is None:
|
||||
continue
|
||||
if k in self.session.params:
|
||||
raise TweepError('Multiple values for parameter %s supplied!' % k)
|
||||
|
||||
self.session.params[k] = convert_to_utf8_str(arg)
|
||||
|
||||
log.debug("PARAMS: %r", self.session.params)
|
||||
|
||||
def build_path(self):
|
||||
for variable in re_path_template.findall(self.path):
|
||||
name = variable.strip('{}')
|
||||
|
||||
if name == 'user' and 'user' not in self.session.params and self.api.auth:
|
||||
# No 'user' parameter provided, fetch it from Auth instead.
|
||||
value = self.api.auth.get_username()
|
||||
else:
|
||||
try:
|
||||
value = quote(self.session.params[name])
|
||||
except KeyError:
|
||||
raise TweepError('No parameter value found for path variable: %s' % name)
|
||||
del self.session.params[name]
|
||||
|
||||
self.path = self.path.replace(variable, value)
|
||||
|
||||
def execute(self):
|
||||
self.api.cached_result = False
|
||||
|
||||
# Build the request URL
|
||||
url = self.api_root + self.path
|
||||
full_url = 'https://' + self.host + url
|
||||
|
||||
# Query the cache if one is available
|
||||
# and this request uses a GET method.
|
||||
if self.use_cache and self.api.cache and self.method == 'GET':
|
||||
cache_result = self.api.cache.get('%s?%s' % (url, urlencode(self.session.params)))
|
||||
# if cache result found and not expired, return it
|
||||
if cache_result:
|
||||
# must restore api reference
|
||||
if isinstance(cache_result, list):
|
||||
for result in cache_result:
|
||||
if isinstance(result, Model):
|
||||
result._api = self.api
|
||||
else:
|
||||
if isinstance(cache_result, Model):
|
||||
cache_result._api = self.api
|
||||
self.api.cached_result = True
|
||||
return cache_result
|
||||
|
||||
# Continue attempting request until successful
|
||||
# or maximum number of retries is reached.
|
||||
retries_performed = 0
|
||||
while retries_performed < self.retry_count + 1:
|
||||
# handle running out of api calls
|
||||
if self.wait_on_rate_limit:
|
||||
if self._reset_time is not None:
|
||||
if self._remaining_calls is not None:
|
||||
if self._remaining_calls < 1:
|
||||
sleep_time = self._reset_time - int(time.time())
|
||||
if sleep_time > 0:
|
||||
if self.wait_on_rate_limit_notify:
|
||||
log.warning("Rate limit reached. Sleeping for: %d" % sleep_time)
|
||||
time.sleep(sleep_time + 5) # sleep for few extra sec
|
||||
|
||||
# if self.wait_on_rate_limit and self._reset_time is not None and \
|
||||
# self._remaining_calls is not None and self._remaining_calls < 1:
|
||||
# sleep_time = self._reset_time - int(time.time())
|
||||
# if sleep_time > 0:
|
||||
# if self.wait_on_rate_limit_notify:
|
||||
# log.warning("Rate limit reached. Sleeping for: %d" % sleep_time)
|
||||
# time.sleep(sleep_time + 5) # sleep for few extra sec
|
||||
|
||||
# Apply authentication
|
||||
auth = None
|
||||
if self.api.auth:
|
||||
auth = self.api.auth.apply_auth()
|
||||
|
||||
# Request compression if configured
|
||||
if self.api.compression:
|
||||
self.session.headers['Accept-encoding'] = 'gzip'
|
||||
|
||||
# Execute request
|
||||
try:
|
||||
resp = self.session.request(self.method,
|
||||
full_url,
|
||||
data=self.post_data,
|
||||
timeout=self.api.timeout,
|
||||
auth=auth,
|
||||
proxies=self.api.proxy)
|
||||
except Exception as e:
|
||||
six.reraise(TweepError, TweepError('Failed to send request: %s' % e), sys.exc_info()[2])
|
||||
|
||||
rem_calls = resp.headers.get('x-rate-limit-remaining')
|
||||
|
||||
if rem_calls is not None:
|
||||
self._remaining_calls = int(rem_calls)
|
||||
elif isinstance(self._remaining_calls, int):
|
||||
self._remaining_calls -= 1
|
||||
reset_time = resp.headers.get('x-rate-limit-reset')
|
||||
if reset_time is not None:
|
||||
self._reset_time = int(reset_time)
|
||||
if self.wait_on_rate_limit and self._remaining_calls == 0 and (
|
||||
# if ran out of calls before waiting switching retry last call
|
||||
resp.status_code == 429 or resp.status_code == 420):
|
||||
continue
|
||||
retry_delay = self.retry_delay
|
||||
# Exit request loop if non-retry error code
|
||||
if resp.status_code == 200:
|
||||
break
|
||||
elif (resp.status_code == 429 or resp.status_code == 420) and self.wait_on_rate_limit:
|
||||
if 'retry-after' in resp.headers:
|
||||
retry_delay = float(resp.headers['retry-after'])
|
||||
elif self.retry_errors and resp.status_code not in self.retry_errors:
|
||||
break
|
||||
|
||||
# Sleep before retrying request again
|
||||
time.sleep(retry_delay)
|
||||
retries_performed += 1
|
||||
|
||||
# If an error was returned, throw an exception
|
||||
self.api.last_response = resp
|
||||
if resp.status_code and not 200 <= resp.status_code < 300:
|
||||
try:
|
||||
error_msg, api_error_code = \
|
||||
self.parser.parse_error(resp.text)
|
||||
except Exception:
|
||||
error_msg = "Twitter error response: status code = %s" % resp.status_code
|
||||
api_error_code = None
|
||||
|
||||
if is_rate_limit_error_message(error_msg):
|
||||
raise RateLimitError(error_msg, resp)
|
||||
else:
|
||||
raise TweepError(error_msg, resp, api_code=api_error_code)
|
||||
|
||||
# Parse the response payload
|
||||
result = self.parser.parse(self, resp.text)
|
||||
|
||||
# Store result into cache if one is available.
|
||||
if self.use_cache and self.api.cache and self.method == 'GET' and result:
|
||||
self.api.cache.store('%s?%s' % (url, urlencode(self.session.params)), result)
|
||||
|
||||
return result
|
||||
|
||||
def _call(*args, **kwargs):
|
||||
method = APIMethod(args, kwargs)
|
||||
if kwargs.get('create'):
|
||||
return method
|
||||
else:
|
||||
return method.execute()
|
||||
|
||||
# Set pagination mode
|
||||
if 'cursor' in APIMethod.allowed_param:
|
||||
_call.pagination_mode = 'cursor'
|
||||
elif 'max_id' in APIMethod.allowed_param:
|
||||
if 'since_id' in APIMethod.allowed_param:
|
||||
_call.pagination_mode = 'id'
|
||||
elif 'page' in APIMethod.allowed_param:
|
||||
_call.pagination_mode = 'page'
|
||||
|
||||
return _call
|
432
libs/apprise/plugins/NotifyTwitter/tweepy/cache.py
Normal file
|
@ -0,0 +1,432 @@
|
|||
# Tweepy
|
||||
# Copyright 2009-2010 Joshua Roesslein
|
||||
# See LICENSE for details.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import time
|
||||
import datetime
|
||||
import hashlib
|
||||
import threading
|
||||
import os
|
||||
import logging
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
|
||||
try:
|
||||
import fcntl
|
||||
except ImportError:
|
||||
# Probably on a windows system
|
||||
# TODO: use win32file
|
||||
pass
|
||||
|
||||
log = logging.getLogger('tweepy.cache')
|
||||
|
||||
class Cache(object):
|
||||
"""Cache interface"""
|
||||
|
||||
def __init__(self, timeout=60):
|
||||
"""Initialize the cache
|
||||
timeout: number of seconds to keep a cached entry
|
||||
"""
|
||||
self.timeout = timeout
|
||||
|
||||
def store(self, key, value):
|
||||
"""Add new record to cache
|
||||
key: entry key
|
||||
value: data of entry
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get(self, key, timeout=None):
|
||||
"""Get cached entry if exists and not expired
|
||||
key: which entry to get
|
||||
timeout: override timeout with this value [optional]
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def count(self):
|
||||
"""Get count of entries currently stored in cache"""
|
||||
raise NotImplementedError
|
||||
|
||||
def cleanup(self):
|
||||
"""Delete any expired entries in cache."""
|
||||
raise NotImplementedError
|
||||
|
||||
def flush(self):
|
||||
"""Delete all cached entries"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class MemoryCache(Cache):
|
||||
"""In-memory cache"""
|
||||
|
||||
def __init__(self, timeout=60):
|
||||
Cache.__init__(self, timeout)
|
||||
self._entries = {}
|
||||
self.lock = threading.Lock()
|
||||
|
||||
def __getstate__(self):
|
||||
# pickle
|
||||
return {'entries': self._entries, 'timeout': self.timeout}
|
||||
|
||||
def __setstate__(self, state):
|
||||
# unpickle
|
||||
self.lock = threading.Lock()
|
||||
self._entries = state['entries']
|
||||
self.timeout = state['timeout']
|
||||
|
||||
def _is_expired(self, entry, timeout):
|
||||
return timeout > 0 and (time.time() - entry[0]) >= timeout
|
||||
|
||||
def store(self, key, value):
|
||||
self.lock.acquire()
|
||||
self._entries[key] = (time.time(), value)
|
||||
self.lock.release()
|
||||
|
||||
def get(self, key, timeout=None):
|
||||
self.lock.acquire()
|
||||
try:
|
||||
# check to see if we have this key
|
||||
entry = self._entries.get(key)
|
||||
if not entry:
|
||||
# no hit, return nothing
|
||||
return None
|
||||
|
||||
# use provided timeout in arguments if provided
|
||||
# otherwise use the one provided during init.
|
||||
if timeout is None:
|
||||
timeout = self.timeout
|
||||
|
||||
# make sure entry is not expired
|
||||
if self._is_expired(entry, timeout):
|
||||
# entry expired, delete and return nothing
|
||||
del self._entries[key]
|
||||
return None
|
||||
|
||||
# entry found and not expired, return it
|
||||
return entry[1]
|
||||
finally:
|
||||
self.lock.release()
|
||||
|
||||
def count(self):
|
||||
return len(self._entries)
|
||||
|
||||
def cleanup(self):
|
||||
self.lock.acquire()
|
||||
try:
|
||||
for k, v in dict(self._entries).items():
|
||||
if self._is_expired(v, self.timeout):
|
||||
del self._entries[k]
|
||||
finally:
|
||||
self.lock.release()
|
||||
|
||||
def flush(self):
|
||||
self.lock.acquire()
|
||||
self._entries.clear()
|
||||
self.lock.release()
|
||||
|
||||
|
||||
class FileCache(Cache):
|
||||
"""File-based cache"""
|
||||
|
||||
# locks used to make cache thread-safe
|
||||
cache_locks = {}
|
||||
|
||||
def __init__(self, cache_dir, timeout=60):
|
||||
Cache.__init__(self, timeout)
|
||||
if os.path.exists(cache_dir) is False:
|
||||
os.mkdir(cache_dir)
|
||||
self.cache_dir = cache_dir
|
||||
if cache_dir in FileCache.cache_locks:
|
||||
self.lock = FileCache.cache_locks[cache_dir]
|
||||
else:
|
||||
self.lock = threading.Lock()
|
||||
FileCache.cache_locks[cache_dir] = self.lock
|
||||
|
||||
if os.name == 'posix':
|
||||
self._lock_file = self._lock_file_posix
|
||||
self._unlock_file = self._unlock_file_posix
|
||||
elif os.name == 'nt':
|
||||
self._lock_file = self._lock_file_win32
|
||||
self._unlock_file = self._unlock_file_win32
|
||||
else:
|
||||
log.warning('FileCache locking not supported on this system!')
|
||||
self._lock_file = self._lock_file_dummy
|
||||
self._unlock_file = self._unlock_file_dummy
|
||||
|
||||
def _get_path(self, key):
|
||||
md5 = hashlib.md5()
|
||||
md5.update(key.encode('utf-8'))
|
||||
return os.path.join(self.cache_dir, md5.hexdigest())
|
||||
|
||||
def _lock_file_dummy(self, path, exclusive=True):
|
||||
return None
|
||||
|
||||
def _unlock_file_dummy(self, lock):
|
||||
return
|
||||
|
||||
def _lock_file_posix(self, path, exclusive=True):
|
||||
lock_path = path + '.lock'
|
||||
if exclusive is True:
|
||||
f_lock = open(lock_path, 'w')
|
||||
fcntl.lockf(f_lock, fcntl.LOCK_EX)
|
||||
else:
|
||||
f_lock = open(lock_path, 'r')
|
||||
fcntl.lockf(f_lock, fcntl.LOCK_SH)
|
||||
if os.path.exists(lock_path) is False:
|
||||
f_lock.close()
|
||||
return None
|
||||
return f_lock
|
||||
|
||||
def _unlock_file_posix(self, lock):
|
||||
lock.close()
|
||||
|
||||
def _lock_file_win32(self, path, exclusive=True):
|
||||
# TODO: implement
|
||||
return None
|
||||
|
||||
def _unlock_file_win32(self, lock):
|
||||
# TODO: implement
|
||||
return
|
||||
|
||||
def _delete_file(self, path):
|
||||
os.remove(path)
|
||||
if os.path.exists(path + '.lock'):
|
||||
os.remove(path + '.lock')
|
||||
|
||||
def store(self, key, value):
|
||||
path = self._get_path(key)
|
||||
self.lock.acquire()
|
||||
try:
|
||||
# acquire lock and open file
|
||||
f_lock = self._lock_file(path)
|
||||
datafile = open(path, 'wb')
|
||||
|
||||
# write data
|
||||
pickle.dump((time.time(), value), datafile)
|
||||
|
||||
# close and unlock file
|
||||
datafile.close()
|
||||
self._unlock_file(f_lock)
|
||||
finally:
|
||||
self.lock.release()
|
||||
|
||||
def get(self, key, timeout=None):
|
||||
return self._get(self._get_path(key), timeout)
|
||||
|
||||
def _get(self, path, timeout):
|
||||
if os.path.exists(path) is False:
|
||||
# no record
|
||||
return None
|
||||
self.lock.acquire()
|
||||
try:
|
||||
# acquire lock and open
|
||||
f_lock = self._lock_file(path, False)
|
||||
datafile = open(path, 'rb')
|
||||
|
||||
# read pickled object
|
||||
created_time, value = pickle.load(datafile)
|
||||
datafile.close()
|
||||
|
||||
# check if value is expired
|
||||
if timeout is None:
|
||||
timeout = self.timeout
|
||||
if timeout > 0:
|
||||
if (time.time() - created_time) >= timeout:
|
||||
# expired! delete from cache
|
||||
value = None
|
||||
self._delete_file(path)
|
||||
|
||||
# unlock and return result
|
||||
self._unlock_file(f_lock)
|
||||
return value
|
||||
finally:
|
||||
self.lock.release()
|
||||
|
||||
def count(self):
|
||||
c = 0
|
||||
for entry in os.listdir(self.cache_dir):
|
||||
if entry.endswith('.lock'):
|
||||
continue
|
||||
c += 1
|
||||
return c
|
||||
|
||||
def cleanup(self):
|
||||
for entry in os.listdir(self.cache_dir):
|
||||
if entry.endswith('.lock'):
|
||||
continue
|
||||
self._get(os.path.join(self.cache_dir, entry), None)
|
||||
|
||||
def flush(self):
|
||||
for entry in os.listdir(self.cache_dir):
|
||||
if entry.endswith('.lock'):
|
||||
continue
|
||||
self._delete_file(os.path.join(self.cache_dir, entry))
|
||||
|
||||
|
||||
class MemCacheCache(Cache):
|
||||
"""Cache interface"""
|
||||
|
||||
def __init__(self, client, timeout=60):
|
||||
"""Initialize the cache
|
||||
client: The memcache client
|
||||
timeout: number of seconds to keep a cached entry
|
||||
"""
|
||||
self.client = client
|
||||
self.timeout = timeout
|
||||
|
||||
def store(self, key, value):
|
||||
"""Add new record to cache
|
||||
key: entry key
|
||||
value: data of entry
|
||||
"""
|
||||
self.client.set(key, value, time=self.timeout)
|
||||
|
||||
def get(self, key, timeout=None):
|
||||
"""Get cached entry if exists and not expired
|
||||
key: which entry to get
|
||||
timeout: override timeout with this value [optional].
|
||||
DOES NOT WORK HERE
|
||||
"""
|
||||
return self.client.get(key)
|
||||
|
||||
def count(self):
|
||||
"""Get count of entries currently stored in cache. RETURN 0"""
|
||||
raise NotImplementedError
|
||||
|
||||
def cleanup(self):
|
||||
"""Delete any expired entries in cache. NO-OP"""
|
||||
raise NotImplementedError
|
||||
|
||||
def flush(self):
|
||||
"""Delete all cached entries. NO-OP"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class RedisCache(Cache):
|
||||
"""Cache running in a redis server"""
|
||||
|
||||
def __init__(self, client,
|
||||
timeout=60,
|
||||
keys_container='tweepy:keys',
|
||||
pre_identifier='tweepy:'):
|
||||
Cache.__init__(self, timeout)
|
||||
self.client = client
|
||||
self.keys_container = keys_container
|
||||
self.pre_identifier = pre_identifier
|
||||
|
||||
def _is_expired(self, entry, timeout):
|
||||
# Returns true if the entry has expired
|
||||
return timeout > 0 and (time.time() - entry[0]) >= timeout
|
||||
|
||||
def store(self, key, value):
|
||||
"""Store the key, value pair in our redis server"""
|
||||
# Prepend tweepy to our key,
|
||||
# this makes it easier to identify tweepy keys in our redis server
|
||||
key = self.pre_identifier + key
|
||||
# Get a pipe (to execute several redis commands in one step)
|
||||
pipe = self.client.pipeline()
|
||||
# Set our values in a redis hash (similar to python dict)
|
||||
pipe.set(key, pickle.dumps((time.time(), value)))
|
||||
# Set the expiration
|
||||
pipe.expire(key, self.timeout)
|
||||
# Add the key to a set containing all the keys
|
||||
pipe.sadd(self.keys_container, key)
|
||||
# Execute the instructions in the redis server
|
||||
pipe.execute()
|
||||
|
||||
def get(self, key, timeout=None):
|
||||
"""Given a key, returns an element from the redis table"""
|
||||
key = self.pre_identifier + key
|
||||
# Check to see if we have this key
|
||||
unpickled_entry = self.client.get(key)
|
||||
if not unpickled_entry:
|
||||
# No hit, return nothing
|
||||
return None
|
||||
|
||||
entry = pickle.loads(unpickled_entry)
|
||||
# Use provided timeout in arguments if provided
|
||||
# otherwise use the one provided during init.
|
||||
if timeout is None:
|
||||
timeout = self.timeout
|
||||
|
||||
# Make sure entry is not expired
|
||||
if self._is_expired(entry, timeout):
|
||||
# entry expired, delete and return nothing
|
||||
self.delete_entry(key)
|
||||
return None
|
||||
# entry found and not expired, return it
|
||||
return entry[1]
|
||||
|
||||
def count(self):
|
||||
"""Note: This is not very efficient,
|
||||
since it retreives all the keys from the redis
|
||||
server to know how many keys we have"""
|
||||
return len(self.client.smembers(self.keys_container))
|
||||
|
||||
def delete_entry(self, key):
|
||||
"""Delete an object from the redis table"""
|
||||
pipe = self.client.pipeline()
|
||||
pipe.srem(self.keys_container, key)
|
||||
pipe.delete(key)
|
||||
pipe.execute()
|
||||
|
||||
def cleanup(self):
|
||||
"""Cleanup all the expired keys"""
|
||||
keys = self.client.smembers(self.keys_container)
|
||||
for key in keys:
|
||||
entry = self.client.get(key)
|
||||
if entry:
|
||||
entry = pickle.loads(entry)
|
||||
if self._is_expired(entry, self.timeout):
|
||||
self.delete_entry(key)
|
||||
|
||||
def flush(self):
|
||||
"""Delete all entries from the cache"""
|
||||
keys = self.client.smembers(self.keys_container)
|
||||
for key in keys:
|
||||
self.delete_entry(key)
|
||||
|
||||
|
||||
class MongodbCache(Cache):
|
||||
"""A simple pickle-based MongoDB cache sytem."""
|
||||
|
||||
def __init__(self, db, timeout=3600, collection='tweepy_cache'):
|
||||
"""Should receive a "database" cursor from pymongo."""
|
||||
Cache.__init__(self, timeout)
|
||||
self.timeout = timeout
|
||||
self.col = db[collection]
|
||||
self.col.create_index('created', expireAfterSeconds=timeout)
|
||||
|
||||
def store(self, key, value):
|
||||
from bson.binary import Binary
|
||||
|
||||
now = datetime.datetime.utcnow()
|
||||
blob = Binary(pickle.dumps(value))
|
||||
|
||||
self.col.insert({'created': now, '_id': key, 'value': blob})
|
||||
|
||||
def get(self, key, timeout=None):
|
||||
if timeout:
|
||||
raise NotImplementedError
|
||||
obj = self.col.find_one({'_id': key})
|
||||
if obj:
|
||||
return pickle.loads(obj['value'])
|
||||
|
||||
def count(self):
|
||||
return self.col.find({}).count()
|
||||
|
||||
def delete_entry(self, key):
|
||||
return self.col.remove({'_id': key})
|
||||
|
||||
def cleanup(self):
|
||||
"""MongoDB will automatically clear expired keys."""
|
||||
pass
|
||||
|
||||
def flush(self):
|
||||
self.col.drop()
|
||||
self.col.create_index('created', expireAfterSeconds=self.timeout)
|
214
libs/apprise/plugins/NotifyTwitter/tweepy/cursor.py
Normal file
|
@ -0,0 +1,214 @@
|
|||
# Tweepy
|
||||
# Copyright 2009-2010 Joshua Roesslein
|
||||
# See LICENSE for details.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from .error import TweepError
|
||||
from .parsers import ModelParser, RawParser
|
||||
|
||||
|
||||
class Cursor(object):
|
||||
"""Pagination helper class"""
|
||||
|
||||
def __init__(self, method, *args, **kargs):
|
||||
if hasattr(method, 'pagination_mode'):
|
||||
if method.pagination_mode == 'cursor':
|
||||
self.iterator = CursorIterator(method, args, kargs)
|
||||
elif method.pagination_mode == 'id':
|
||||
self.iterator = IdIterator(method, args, kargs)
|
||||
elif method.pagination_mode == 'page':
|
||||
self.iterator = PageIterator(method, args, kargs)
|
||||
else:
|
||||
raise TweepError('Invalid pagination mode.')
|
||||
else:
|
||||
raise TweepError('This method does not perform pagination')
|
||||
|
||||
def pages(self, limit=0):
|
||||
"""Return iterator for pages"""
|
||||
if limit > 0:
|
||||
self.iterator.limit = limit
|
||||
return self.iterator
|
||||
|
||||
def items(self, limit=0):
|
||||
"""Return iterator for items in each page"""
|
||||
i = ItemIterator(self.iterator)
|
||||
i.limit = limit
|
||||
return i
|
||||
|
||||
|
||||
class BaseIterator(object):
|
||||
|
||||
def __init__(self, method, args, kargs):
|
||||
self.method = method
|
||||
self.args = args
|
||||
self.kargs = kargs
|
||||
self.limit = 0
|
||||
|
||||
def __next__(self):
|
||||
return self.next()
|
||||
|
||||
def next(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def prev(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
|
||||
class CursorIterator(BaseIterator):
|
||||
|
||||
def __init__(self, method, args, kargs):
|
||||
BaseIterator.__init__(self, method, args, kargs)
|
||||
start_cursor = kargs.pop('cursor', None)
|
||||
self.next_cursor = start_cursor or -1
|
||||
self.prev_cursor = start_cursor or 0
|
||||
self.num_tweets = 0
|
||||
|
||||
def next(self):
|
||||
if self.next_cursor == 0 or (self.limit and self.num_tweets == self.limit):
|
||||
raise StopIteration
|
||||
data, cursors = self.method(cursor=self.next_cursor,
|
||||
*self.args,
|
||||
**self.kargs)
|
||||
self.prev_cursor, self.next_cursor = cursors
|
||||
if len(data) == 0:
|
||||
raise StopIteration
|
||||
self.num_tweets += 1
|
||||
return data
|
||||
|
||||
def prev(self):
|
||||
if self.prev_cursor == 0:
|
||||
raise TweepError('Can not page back more, at first page')
|
||||
data, self.next_cursor, self.prev_cursor = self.method(cursor=self.prev_cursor,
|
||||
*self.args,
|
||||
**self.kargs)
|
||||
self.num_tweets -= 1
|
||||
return data
|
||||
|
||||
|
||||
class IdIterator(BaseIterator):
|
||||
|
||||
def __init__(self, method, args, kargs):
|
||||
BaseIterator.__init__(self, method, args, kargs)
|
||||
self.max_id = kargs.pop('max_id', None)
|
||||
self.num_tweets = 0
|
||||
self.results = []
|
||||
self.model_results = []
|
||||
self.index = 0
|
||||
|
||||
def next(self):
|
||||
"""Fetch a set of items with IDs less than current set."""
|
||||
if self.limit and self.limit == self.num_tweets:
|
||||
raise StopIteration
|
||||
|
||||
if self.index >= len(self.results) - 1:
|
||||
data = self.method(max_id=self.max_id, parser=RawParser(), *self.args, **self.kargs)
|
||||
|
||||
if hasattr(self.method, '__self__'):
|
||||
old_parser = self.method.__self__.parser
|
||||
# Hack for models which expect ModelParser to be set
|
||||
self.method.__self__.parser = ModelParser()
|
||||
|
||||
# This is a special invocation that returns the underlying
|
||||
# APIMethod class
|
||||
model = ModelParser().parse(self.method(create=True), data)
|
||||
if hasattr(self.method, '__self__'):
|
||||
self.method.__self__.parser = old_parser
|
||||
result = self.method.__self__.parser.parse(self.method(create=True), data)
|
||||
else:
|
||||
result = model
|
||||
|
||||
if len(self.results) != 0:
|
||||
self.index += 1
|
||||
self.results.append(result)
|
||||
self.model_results.append(model)
|
||||
else:
|
||||
self.index += 1
|
||||
result = self.results[self.index]
|
||||
model = self.model_results[self.index]
|
||||
|
||||
if len(result) == 0:
|
||||
raise StopIteration
|
||||
# TODO: Make this not dependant on the parser making max_id and
|
||||
# since_id available
|
||||
self.max_id = model.max_id
|
||||
self.num_tweets += 1
|
||||
return result
|
||||
|
||||
def prev(self):
|
||||
"""Fetch a set of items with IDs greater than current set."""
|
||||
if self.limit and self.limit == self.num_tweets:
|
||||
raise StopIteration
|
||||
|
||||
self.index -= 1
|
||||
if self.index < 0:
|
||||
# There's no way to fetch a set of tweets directly 'above' the
|
||||
# current set
|
||||
raise StopIteration
|
||||
|
||||
data = self.results[self.index]
|
||||
self.max_id = self.model_results[self.index].max_id
|
||||
self.num_tweets += 1
|
||||
return data
|
||||
|
||||
|
||||
class PageIterator(BaseIterator):
|
||||
|
||||
def __init__(self, method, args, kargs):
|
||||
BaseIterator.__init__(self, method, args, kargs)
|
||||
self.current_page = 0
|
||||
|
||||
def next(self):
|
||||
if self.limit > 0:
|
||||
if self.current_page > self.limit:
|
||||
raise StopIteration
|
||||
|
||||
items = self.method(page=self.current_page, *self.args, **self.kargs)
|
||||
if len(items) == 0:
|
||||
raise StopIteration
|
||||
self.current_page += 1
|
||||
return items
|
||||
|
||||
def prev(self):
|
||||
if self.current_page == 1:
|
||||
raise TweepError('Can not page back more, at first page')
|
||||
self.current_page -= 1
|
||||
return self.method(page=self.current_page, *self.args, **self.kargs)
|
||||
|
||||
|
||||
class ItemIterator(BaseIterator):
|
||||
|
||||
def __init__(self, page_iterator):
|
||||
self.page_iterator = page_iterator
|
||||
self.limit = 0
|
||||
self.current_page = None
|
||||
self.page_index = -1
|
||||
self.num_tweets = 0
|
||||
|
||||
def next(self):
|
||||
if self.limit > 0:
|
||||
if self.num_tweets == self.limit:
|
||||
raise StopIteration
|
||||
if self.current_page is None or self.page_index == len(self.current_page) - 1:
|
||||
# Reached end of current page, get the next page...
|
||||
self.current_page = self.page_iterator.next()
|
||||
self.page_index = -1
|
||||
self.page_index += 1
|
||||
self.num_tweets += 1
|
||||
return self.current_page[self.page_index]
|
||||
|
||||
def prev(self):
|
||||
if self.current_page is None:
|
||||
raise TweepError('Can not go back more, at first page')
|
||||
if self.page_index == 0:
|
||||
# At the beginning of the current page, move to next...
|
||||
self.current_page = self.page_iterator.prev()
|
||||
self.page_index = len(self.current_page)
|
||||
if self.page_index == 0:
|
||||
raise TweepError('No more items')
|
||||
self.page_index -= 1
|
||||
self.num_tweets -= 1
|
||||
return self.current_page[self.page_index]
|
34
libs/apprise/plugins/NotifyTwitter/tweepy/error.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
# Tweepy
|
||||
# Copyright 2009-2010 Joshua Roesslein
|
||||
# See LICENSE for details.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import six
|
||||
|
||||
class TweepError(Exception):
|
||||
"""Tweepy exception"""
|
||||
|
||||
def __init__(self, reason, response=None, api_code=None):
|
||||
self.reason = six.text_type(reason)
|
||||
self.response = response
|
||||
self.api_code = api_code
|
||||
Exception.__init__(self, reason)
|
||||
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
|
||||
def is_rate_limit_error_message(message):
|
||||
"""Check if the supplied error message belongs to a rate limit error."""
|
||||
return isinstance(message, list) \
|
||||
and len(message) > 0 \
|
||||
and 'code' in message[0] \
|
||||
and message[0]['code'] == 88
|
||||
|
||||
|
||||
class RateLimitError(TweepError):
|
||||
"""Exception for Tweepy hitting the rate limit."""
|
||||
# RateLimitError has the exact same properties and inner workings
|
||||
# as TweepError for backwards compatibility reasons.
|
||||
pass
|
495
libs/apprise/plugins/NotifyTwitter/tweepy/models.py
Normal file
|
@ -0,0 +1,495 @@
|
|||
# Tweepy
|
||||
# Copyright 2009-2010 Joshua Roesslein
|
||||
# See LICENSE for details.
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from .utils import parse_datetime, parse_html_value, parse_a_href
|
||||
|
||||
|
||||
class ResultSet(list):
|
||||
"""A list like object that holds results from a Twitter API query."""
|
||||
def __init__(self, max_id=None, since_id=None):
|
||||
super(ResultSet, self).__init__()
|
||||
self._max_id = max_id
|
||||
self._since_id = since_id
|
||||
|
||||
@property
|
||||
def max_id(self):
|
||||
if self._max_id:
|
||||
return self._max_id
|
||||
ids = self.ids()
|
||||
# Max_id is always set to the *smallest* id, minus one, in the set
|
||||
return (min(ids) - 1) if ids else None
|
||||
|
||||
@property
|
||||
def since_id(self):
|
||||
if self._since_id:
|
||||
return self._since_id
|
||||
ids = self.ids()
|
||||
# Since_id is always set to the *greatest* id in the set
|
||||
return max(ids) if ids else None
|
||||
|
||||
def ids(self):
|
||||
return [item.id for item in self if hasattr(item, 'id')]
|
||||
|
||||
|
||||
class Model(object):
|
||||
|
||||
def __init__(self, api=None):
|
||||
self._api = api
|
||||
|
||||
def __getstate__(self):
|
||||
# pickle
|
||||
pickle = dict(self.__dict__)
|
||||
try:
|
||||
del pickle['_api'] # do not pickle the API reference
|
||||
except KeyError:
|
||||
pass
|
||||
return pickle
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
"""Parse a JSON object into a model instance."""
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def parse_list(cls, api, json_list):
|
||||
"""
|
||||
Parse a list of JSON objects into
|
||||
a result set of model instances.
|
||||
"""
|
||||
results = ResultSet()
|
||||
for obj in json_list:
|
||||
if obj:
|
||||
results.append(cls.parse(api, obj))
|
||||
return results
|
||||
|
||||
def __repr__(self):
|
||||
state = ['%s=%s' % (k, repr(v)) for (k, v) in vars(self).items()]
|
||||
return '%s(%s)' % (self.__class__.__name__, ', '.join(state))
|
||||
|
||||
|
||||
class Status(Model):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
status = cls(api)
|
||||
setattr(status, '_json', json)
|
||||
for k, v in json.items():
|
||||
if k == 'user':
|
||||
user_model = getattr(api.parser.model_factory, 'user') if api else User
|
||||
user = user_model.parse(api, v)
|
||||
setattr(status, 'author', user)
|
||||
setattr(status, 'user', user) # DEPRECIATED
|
||||
elif k == 'created_at':
|
||||
setattr(status, k, parse_datetime(v))
|
||||
elif k == 'source':
|
||||
if '<' in v:
|
||||
setattr(status, k, parse_html_value(v))
|
||||
setattr(status, 'source_url', parse_a_href(v))
|
||||
else:
|
||||
setattr(status, k, v)
|
||||
setattr(status, 'source_url', None)
|
||||
elif k == 'retweeted_status':
|
||||
setattr(status, k, Status.parse(api, v))
|
||||
elif k == 'quoted_status':
|
||||
setattr(status, k, Status.parse(api, v))
|
||||
elif k == 'place':
|
||||
if v is not None:
|
||||
setattr(status, k, Place.parse(api, v))
|
||||
else:
|
||||
setattr(status, k, None)
|
||||
else:
|
||||
setattr(status, k, v)
|
||||
return status
|
||||
|
||||
def destroy(self):
|
||||
return self._api.destroy_status(self.id)
|
||||
|
||||
def retweet(self):
|
||||
return self._api.retweet(self.id)
|
||||
|
||||
def retweets(self):
|
||||
return self._api.retweets(self.id)
|
||||
|
||||
def favorite(self):
|
||||
return self._api.create_favorite(self.id)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, Status):
|
||||
return self.id == other.id
|
||||
|
||||
return NotImplemented
|
||||
|
||||
def __ne__(self, other):
|
||||
result = self == other
|
||||
|
||||
if result is NotImplemented:
|
||||
return result
|
||||
|
||||
return not result
|
||||
|
||||
|
||||
class User(Model):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
user = cls(api)
|
||||
setattr(user, '_json', json)
|
||||
for k, v in json.items():
|
||||
if k == 'created_at':
|
||||
setattr(user, k, parse_datetime(v))
|
||||
elif k == 'status':
|
||||
setattr(user, k, Status.parse(api, v))
|
||||
elif k == 'following':
|
||||
# twitter sets this to null if it is false
|
||||
if v is True:
|
||||
setattr(user, k, True)
|
||||
else:
|
||||
setattr(user, k, False)
|
||||
else:
|
||||
setattr(user, k, v)
|
||||
return user
|
||||
|
||||
@classmethod
|
||||
def parse_list(cls, api, json_list):
|
||||
if isinstance(json_list, list):
|
||||
item_list = json_list
|
||||
else:
|
||||
item_list = json_list['users']
|
||||
|
||||
results = ResultSet()
|
||||
for obj in item_list:
|
||||
results.append(cls.parse(api, obj))
|
||||
return results
|
||||
|
||||
def timeline(self, **kargs):
|
||||
return self._api.user_timeline(user_id=self.id, **kargs)
|
||||
|
||||
def friends(self, **kargs):
|
||||
return self._api.friends(user_id=self.id, **kargs)
|
||||
|
||||
def followers(self, **kargs):
|
||||
return self._api.followers(user_id=self.id, **kargs)
|
||||
|
||||
def follow(self):
|
||||
self._api.create_friendship(user_id=self.id)
|
||||
self.following = True
|
||||
|
||||
def unfollow(self):
|
||||
self._api.destroy_friendship(user_id=self.id)
|
||||
self.following = False
|
||||
|
||||
def lists_memberships(self, *args, **kargs):
|
||||
return self._api.lists_memberships(user=self.screen_name,
|
||||
*args,
|
||||
**kargs)
|
||||
|
||||
def lists_subscriptions(self, *args, **kargs):
|
||||
return self._api.lists_subscriptions(user=self.screen_name,
|
||||
*args,
|
||||
**kargs)
|
||||
|
||||
def lists(self, *args, **kargs):
|
||||
return self._api.lists_all(user=self.screen_name,
|
||||
*args,
|
||||
**kargs)
|
||||
|
||||
def followers_ids(self, *args, **kargs):
|
||||
return self._api.followers_ids(user_id=self.id,
|
||||
*args,
|
||||
**kargs)
|
||||
|
||||
|
||||
class DirectMessage(Model):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
dm = cls(api)
|
||||
for k, v in json.items():
|
||||
if k == 'sender' or k == 'recipient':
|
||||
setattr(dm, k, User.parse(api, v))
|
||||
elif k == 'created_at':
|
||||
setattr(dm, k, parse_datetime(v))
|
||||
else:
|
||||
setattr(dm, k, v)
|
||||
return dm
|
||||
|
||||
def destroy(self):
|
||||
return self._api.destroy_direct_message(self.id)
|
||||
|
||||
|
||||
class Friendship(Model):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
relationship = json['relationship']
|
||||
|
||||
# parse source
|
||||
source = cls(api)
|
||||
for k, v in relationship['source'].items():
|
||||
setattr(source, k, v)
|
||||
|
||||
# parse target
|
||||
target = cls(api)
|
||||
for k, v in relationship['target'].items():
|
||||
setattr(target, k, v)
|
||||
|
||||
return source, target
|
||||
|
||||
|
||||
class Category(Model):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
category = cls(api)
|
||||
for k, v in json.items():
|
||||
setattr(category, k, v)
|
||||
return category
|
||||
|
||||
|
||||
class SavedSearch(Model):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
ss = cls(api)
|
||||
for k, v in json.items():
|
||||
if k == 'created_at':
|
||||
setattr(ss, k, parse_datetime(v))
|
||||
else:
|
||||
setattr(ss, k, v)
|
||||
return ss
|
||||
|
||||
def destroy(self):
|
||||
return self._api.destroy_saved_search(self.id)
|
||||
|
||||
|
||||
class SearchResults(ResultSet):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
metadata = json['search_metadata']
|
||||
results = SearchResults()
|
||||
results.refresh_url = metadata.get('refresh_url')
|
||||
results.completed_in = metadata.get('completed_in')
|
||||
results.query = metadata.get('query')
|
||||
results.count = metadata.get('count')
|
||||
results.next_results = metadata.get('next_results')
|
||||
|
||||
status_model = getattr(api.parser.model_factory, 'status') if api else Status
|
||||
|
||||
for status in json['statuses']:
|
||||
results.append(status_model.parse(api, status))
|
||||
return results
|
||||
|
||||
|
||||
class List(Model):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
lst = List(api)
|
||||
for k, v in json.items():
|
||||
if k == 'user':
|
||||
setattr(lst, k, User.parse(api, v))
|
||||
elif k == 'created_at':
|
||||
setattr(lst, k, parse_datetime(v))
|
||||
else:
|
||||
setattr(lst, k, v)
|
||||
return lst
|
||||
|
||||
@classmethod
|
||||
def parse_list(cls, api, json_list, result_set=None):
|
||||
results = ResultSet()
|
||||
if isinstance(json_list, dict):
|
||||
json_list = json_list['lists']
|
||||
for obj in json_list:
|
||||
results.append(cls.parse(api, obj))
|
||||
return results
|
||||
|
||||
def update(self, **kargs):
|
||||
return self._api.update_list(self.slug, **kargs)
|
||||
|
||||
def destroy(self):
|
||||
return self._api.destroy_list(self.slug)
|
||||
|
||||
def timeline(self, **kargs):
|
||||
return self._api.list_timeline(self.user.screen_name,
|
||||
self.slug,
|
||||
**kargs)
|
||||
|
||||
def add_member(self, id):
|
||||
return self._api.add_list_member(self.slug, id)
|
||||
|
||||
def remove_member(self, id):
|
||||
return self._api.remove_list_member(self.slug, id)
|
||||
|
||||
def members(self, **kargs):
|
||||
return self._api.list_members(self.user.screen_name,
|
||||
self.slug,
|
||||
**kargs)
|
||||
|
||||
def is_member(self, id):
|
||||
return self._api.is_list_member(self.user.screen_name,
|
||||
self.slug,
|
||||
id)
|
||||
|
||||
def subscribe(self):
|
||||
return self._api.subscribe_list(self.user.screen_name, self.slug)
|
||||
|
||||
def unsubscribe(self):
|
||||
return self._api.unsubscribe_list(self.user.screen_name, self.slug)
|
||||
|
||||
def subscribers(self, **kargs):
|
||||
return self._api.list_subscribers(self.user.screen_name,
|
||||
self.slug,
|
||||
**kargs)
|
||||
|
||||
def is_subscribed(self, id):
|
||||
return self._api.is_subscribed_list(self.user.screen_name,
|
||||
self.slug,
|
||||
id)
|
||||
|
||||
|
||||
class Relation(Model):
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
result = cls(api)
|
||||
for k, v in json.items():
|
||||
if k == 'value' and json['kind'] in ['Tweet', 'LookedupStatus']:
|
||||
setattr(result, k, Status.parse(api, v))
|
||||
elif k == 'results':
|
||||
setattr(result, k, Relation.parse_list(api, v))
|
||||
else:
|
||||
setattr(result, k, v)
|
||||
return result
|
||||
|
||||
|
||||
class Relationship(Model):
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
result = cls(api)
|
||||
for k, v in json.items():
|
||||
if k == 'connections':
|
||||
setattr(result, 'is_following', 'following' in v)
|
||||
setattr(result, 'is_followed_by', 'followed_by' in v)
|
||||
else:
|
||||
setattr(result, k, v)
|
||||
return result
|
||||
|
||||
|
||||
class JSONModel(Model):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
return json
|
||||
|
||||
|
||||
class IDModel(Model):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
if isinstance(json, list):
|
||||
return json
|
||||
else:
|
||||
return json['ids']
|
||||
|
||||
|
||||
class BoundingBox(Model):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
result = cls(api)
|
||||
if json is not None:
|
||||
for k, v in json.items():
|
||||
setattr(result, k, v)
|
||||
return result
|
||||
|
||||
def origin(self):
|
||||
"""
|
||||
Return longitude, latitude of southwest (bottom, left) corner of
|
||||
bounding box, as a tuple.
|
||||
|
||||
This assumes that bounding box is always a rectangle, which
|
||||
appears to be the case at present.
|
||||
"""
|
||||
return tuple(self.coordinates[0][0])
|
||||
|
||||
def corner(self):
|
||||
"""
|
||||
Return longitude, latitude of northeast (top, right) corner of
|
||||
bounding box, as a tuple.
|
||||
|
||||
This assumes that bounding box is always a rectangle, which
|
||||
appears to be the case at present.
|
||||
"""
|
||||
return tuple(self.coordinates[0][2])
|
||||
|
||||
|
||||
class Place(Model):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
place = cls(api)
|
||||
for k, v in json.items():
|
||||
if k == 'bounding_box':
|
||||
# bounding_box value may be null (None.)
|
||||
# Example: "United States" (id=96683cc9126741d1)
|
||||
if v is not None:
|
||||
t = BoundingBox.parse(api, v)
|
||||
else:
|
||||
t = v
|
||||
setattr(place, k, t)
|
||||
elif k == 'contained_within':
|
||||
# contained_within is a list of Places.
|
||||
setattr(place, k, Place.parse_list(api, v))
|
||||
else:
|
||||
setattr(place, k, v)
|
||||
return place
|
||||
|
||||
@classmethod
|
||||
def parse_list(cls, api, json_list):
|
||||
if isinstance(json_list, list):
|
||||
item_list = json_list
|
||||
else:
|
||||
item_list = json_list['result']['places']
|
||||
|
||||
results = ResultSet()
|
||||
for obj in item_list:
|
||||
results.append(cls.parse(api, obj))
|
||||
return results
|
||||
|
||||
|
||||
class Media(Model):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
media = cls(api)
|
||||
for k, v in json.items():
|
||||
setattr(media, k, v)
|
||||
return media
|
||||
|
||||
|
||||
class ModelFactory(object):
|
||||
"""
|
||||
Used by parsers for creating instances
|
||||
of models. You may subclass this factory
|
||||
to add your own extended models.
|
||||
"""
|
||||
|
||||
status = Status
|
||||
user = User
|
||||
direct_message = DirectMessage
|
||||
friendship = Friendship
|
||||
saved_search = SavedSearch
|
||||
search_results = SearchResults
|
||||
category = Category
|
||||
list = List
|
||||
relation = Relation
|
||||
relationship = Relationship
|
||||
media = Media
|
||||
|
||||
json = JSONModel
|
||||
ids = IDModel
|
||||
place = Place
|
||||
bounding_box = BoundingBox
|
109
libs/apprise/plugins/NotifyTwitter/tweepy/parsers.py
Normal file
|
@ -0,0 +1,109 @@
|
|||
# Tweepy
|
||||
# Copyright 2009-2010 Joshua Roesslein
|
||||
# See LICENSE for details.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from .models import ModelFactory
|
||||
from .utils import import_simplejson
|
||||
from .error import TweepError
|
||||
|
||||
|
||||
class Parser(object):
|
||||
|
||||
def parse(self, method, payload):
|
||||
"""
|
||||
Parse the response payload and return the result.
|
||||
Returns a tuple that contains the result data and the cursors
|
||||
(or None if not present).
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def parse_error(self, payload):
|
||||
"""
|
||||
Parse the error message and api error code from payload.
|
||||
Return them as an (error_msg, error_code) tuple. If unable to parse the
|
||||
message, throw an exception and default error message will be used.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class RawParser(Parser):
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def parse(self, method, payload):
|
||||
return payload
|
||||
|
||||
def parse_error(self, payload):
|
||||
return payload
|
||||
|
||||
|
||||
class JSONParser(Parser):
|
||||
|
||||
payload_format = 'json'
|
||||
|
||||
def __init__(self):
|
||||
self.json_lib = import_simplejson()
|
||||
|
||||
def parse(self, method, payload):
|
||||
try:
|
||||
json = self.json_lib.loads(payload)
|
||||
except Exception as e:
|
||||
raise TweepError('Failed to parse JSON payload: %s' % e)
|
||||
|
||||
needs_cursors = 'cursor' in method.session.params
|
||||
if needs_cursors and isinstance(json, dict) \
|
||||
and 'previous_cursor' in json \
|
||||
and 'next_cursor' in json:
|
||||
cursors = json['previous_cursor'], json['next_cursor']
|
||||
return json, cursors
|
||||
else:
|
||||
return json
|
||||
|
||||
def parse_error(self, payload):
|
||||
error_object = self.json_lib.loads(payload)
|
||||
|
||||
if 'error' in error_object:
|
||||
reason = error_object['error']
|
||||
api_code = error_object.get('code')
|
||||
else:
|
||||
reason = error_object['errors']
|
||||
api_code = [error.get('code') for error in
|
||||
reason if error.get('code')]
|
||||
api_code = api_code[0] if len(api_code) == 1 else api_code
|
||||
|
||||
return reason, api_code
|
||||
|
||||
|
||||
class ModelParser(JSONParser):
|
||||
|
||||
def __init__(self, model_factory=None):
|
||||
JSONParser.__init__(self)
|
||||
self.model_factory = model_factory or ModelFactory
|
||||
|
||||
def parse(self, method, payload):
|
||||
try:
|
||||
if method.payload_type is None:
|
||||
return
|
||||
model = getattr(self.model_factory, method.payload_type)
|
||||
except AttributeError:
|
||||
raise TweepError('No model for this payload type: '
|
||||
'%s' % method.payload_type)
|
||||
|
||||
json = JSONParser.parse(self, method, payload)
|
||||
if isinstance(json, tuple):
|
||||
json, cursors = json
|
||||
else:
|
||||
cursors = None
|
||||
|
||||
if method.payload_list:
|
||||
result = model.parse_list(method.api, json)
|
||||
else:
|
||||
result = model.parse(method.api, json)
|
||||
|
||||
if cursors:
|
||||
return result, cursors
|
||||
else:
|
||||
return result
|
476
libs/apprise/plugins/NotifyTwitter/tweepy/streaming.py
Normal file
|
@ -0,0 +1,476 @@
|
|||
# Tweepy
|
||||
# Copyright 2009-2010 Joshua Roesslein
|
||||
# See LICENSE for details.
|
||||
|
||||
# Appengine users: https://developers.google.com/appengine/docs/python/sockets/#making_httplib_use_sockets
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import logging
|
||||
import re
|
||||
import requests
|
||||
import sys
|
||||
from requests.exceptions import Timeout
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
|
||||
import six
|
||||
|
||||
import ssl
|
||||
|
||||
from .models import Status
|
||||
from .api import API
|
||||
from .error import TweepError
|
||||
|
||||
from .utils import import_simplejson
|
||||
json = import_simplejson()
|
||||
|
||||
STREAM_VERSION = '1.1'
|
||||
|
||||
|
||||
class StreamListener(object):
|
||||
|
||||
def __init__(self, api=None):
|
||||
self.api = api or API()
|
||||
|
||||
def on_connect(self):
|
||||
"""Called once connected to streaming server.
|
||||
|
||||
This will be invoked once a successful response
|
||||
is received from the server. Allows the listener
|
||||
to perform some work prior to entering the read loop.
|
||||
"""
|
||||
pass
|
||||
|
||||
def on_data(self, raw_data):
|
||||
"""Called when raw data is received from connection.
|
||||
|
||||
Override this method if you wish to manually handle
|
||||
the stream data. Return False to stop stream and close connection.
|
||||
"""
|
||||
data = json.loads(raw_data)
|
||||
|
||||
if 'in_reply_to_status_id' in data:
|
||||
status = Status.parse(self.api, data)
|
||||
if self.on_status(status) is False:
|
||||
return False
|
||||
elif 'delete' in data:
|
||||
delete = data['delete']['status']
|
||||
if self.on_delete(delete['id'], delete['user_id']) is False:
|
||||
return False
|
||||
elif 'event' in data:
|
||||
status = Status.parse(self.api, data)
|
||||
if self.on_event(status) is False:
|
||||
return False
|
||||
elif 'direct_message' in data:
|
||||
status = Status.parse(self.api, data)
|
||||
if self.on_direct_message(status) is False:
|
||||
return False
|
||||
elif 'friends' in data:
|
||||
if self.on_friends(data['friends']) is False:
|
||||
return False
|
||||
elif 'limit' in data:
|
||||
if self.on_limit(data['limit']['track']) is False:
|
||||
return False
|
||||
elif 'disconnect' in data:
|
||||
if self.on_disconnect(data['disconnect']) is False:
|
||||
return False
|
||||
elif 'warning' in data:
|
||||
if self.on_warning(data['warning']) is False:
|
||||
return False
|
||||
else:
|
||||
logging.error("Unknown message type: " + str(raw_data))
|
||||
|
||||
def keep_alive(self):
|
||||
"""Called when a keep-alive arrived"""
|
||||
return
|
||||
|
||||
def on_status(self, status):
|
||||
"""Called when a new status arrives"""
|
||||
return
|
||||
|
||||
def on_exception(self, exception):
|
||||
"""Called when an unhandled exception occurs."""
|
||||
return
|
||||
|
||||
def on_delete(self, status_id, user_id):
|
||||
"""Called when a delete notice arrives for a status"""
|
||||
return
|
||||
|
||||
def on_event(self, status):
|
||||
"""Called when a new event arrives"""
|
||||
return
|
||||
|
||||
def on_direct_message(self, status):
|
||||
"""Called when a new direct message arrives"""
|
||||
return
|
||||
|
||||
def on_friends(self, friends):
|
||||
"""Called when a friends list arrives.
|
||||
|
||||
friends is a list that contains user_id
|
||||
"""
|
||||
return
|
||||
|
||||
def on_limit(self, track):
|
||||
"""Called when a limitation notice arrives"""
|
||||
return
|
||||
|
||||
def on_error(self, status_code):
|
||||
"""Called when a non-200 status code is returned"""
|
||||
return False
|
||||
|
||||
def on_timeout(self):
|
||||
"""Called when stream connection times out"""
|
||||
return
|
||||
|
||||
def on_disconnect(self, notice):
|
||||
"""Called when twitter sends a disconnect notice
|
||||
|
||||
Disconnect codes are listed here:
|
||||
https://dev.twitter.com/docs/streaming-apis/messages#Disconnect_messages_disconnect
|
||||
"""
|
||||
return
|
||||
|
||||
def on_warning(self, notice):
|
||||
"""Called when a disconnection warning message arrives"""
|
||||
return
|
||||
|
||||
|
||||
class ReadBuffer(object):
|
||||
"""Buffer data from the response in a smarter way than httplib/requests can.
|
||||
|
||||
Tweets are roughly in the 2-12kb range, averaging around 3kb.
|
||||
Requests/urllib3/httplib/socket all use socket.read, which blocks
|
||||
until enough data is returned. On some systems (eg google appengine), socket
|
||||
reads are quite slow. To combat this latency we can read big chunks,
|
||||
but the blocking part means we won't get results until enough tweets
|
||||
have arrived. That may not be a big deal for high throughput systems.
|
||||
For low throughput systems we don't want to sacrafice latency, so we
|
||||
use small chunks so it can read the length and the tweet in 2 read calls.
|
||||
"""
|
||||
|
||||
def __init__(self, stream, chunk_size, encoding='utf-8'):
|
||||
self._stream = stream
|
||||
self._buffer = six.b('')
|
||||
self._chunk_size = chunk_size
|
||||
self._encoding = encoding
|
||||
|
||||
def read_len(self, length):
|
||||
while not self._stream.closed:
|
||||
if len(self._buffer) >= length:
|
||||
return self._pop(length)
|
||||
read_len = max(self._chunk_size, length - len(self._buffer))
|
||||
self._buffer += self._stream.read(read_len)
|
||||
return six.b('')
|
||||
|
||||
def read_line(self, sep=six.b('\n')):
|
||||
"""Read the data stream until a given separator is found (default \n)
|
||||
|
||||
:param sep: Separator to read until. Must by of the bytes type (str in python 2,
|
||||
bytes in python 3)
|
||||
:return: The str of the data read until sep
|
||||
"""
|
||||
start = 0
|
||||
while not self._stream.closed:
|
||||
loc = self._buffer.find(sep, start)
|
||||
if loc >= 0:
|
||||
return self._pop(loc + len(sep))
|
||||
else:
|
||||
start = len(self._buffer)
|
||||
self._buffer += self._stream.read(self._chunk_size)
|
||||
return six.b('')
|
||||
|
||||
def _pop(self, length):
|
||||
r = self._buffer[:length]
|
||||
self._buffer = self._buffer[length:]
|
||||
return r.decode(self._encoding)
|
||||
|
||||
|
||||
class Stream(object):
|
||||
|
||||
host = 'stream.twitter.com'
|
||||
|
||||
def __init__(self, auth, listener, **options):
|
||||
self.auth = auth
|
||||
self.listener = listener
|
||||
self.running = False
|
||||
self.timeout = options.get("timeout", 300.0)
|
||||
self.retry_count = options.get("retry_count")
|
||||
# values according to
|
||||
# https://dev.twitter.com/docs/streaming-apis/connecting#Reconnecting
|
||||
self.retry_time_start = options.get("retry_time", 5.0)
|
||||
self.retry_420_start = options.get("retry_420", 60.0)
|
||||
self.retry_time_cap = options.get("retry_time_cap", 320.0)
|
||||
self.snooze_time_step = options.get("snooze_time", 0.25)
|
||||
self.snooze_time_cap = options.get("snooze_time_cap", 16)
|
||||
|
||||
# The default socket.read size. Default to less than half the size of
|
||||
# a tweet so that it reads tweets with the minimal latency of 2 reads
|
||||
# per tweet. Values higher than ~1kb will increase latency by waiting
|
||||
# for more data to arrive but may also increase throughput by doing
|
||||
# fewer socket read calls.
|
||||
self.chunk_size = options.get("chunk_size", 512)
|
||||
|
||||
self.verify = options.get("verify", True)
|
||||
|
||||
self.api = API()
|
||||
self.headers = options.get("headers") or {}
|
||||
self.new_session()
|
||||
self.body = None
|
||||
self.retry_time = self.retry_time_start
|
||||
self.snooze_time = self.snooze_time_step
|
||||
|
||||
# Example: proxies = {'http': 'http://localhost:1080', 'https': 'http://localhost:1080'}
|
||||
self.proxies = options.get("proxies")
|
||||
|
||||
def new_session(self):
|
||||
self.session = requests.Session()
|
||||
self.session.headers = self.headers
|
||||
self.session.params = None
|
||||
|
||||
def _run(self):
|
||||
# Authenticate
|
||||
url = "https://%s%s" % (self.host, self.url)
|
||||
|
||||
# Connect and process the stream
|
||||
error_counter = 0
|
||||
resp = None
|
||||
exc_info = None
|
||||
while self.running:
|
||||
if self.retry_count is not None:
|
||||
if error_counter > self.retry_count:
|
||||
# quit if error count greater than retry count
|
||||
break
|
||||
try:
|
||||
auth = self.auth.apply_auth()
|
||||
resp = self.session.request('POST',
|
||||
url,
|
||||
data=self.body,
|
||||
timeout=self.timeout,
|
||||
stream=True,
|
||||
auth=auth,
|
||||
verify=self.verify,
|
||||
proxies = self.proxies)
|
||||
if resp.status_code != 200:
|
||||
if self.listener.on_error(resp.status_code) is False:
|
||||
break
|
||||
error_counter += 1
|
||||
if resp.status_code == 420:
|
||||
self.retry_time = max(self.retry_420_start,
|
||||
self.retry_time)
|
||||
sleep(self.retry_time)
|
||||
self.retry_time = min(self.retry_time * 2,
|
||||
self.retry_time_cap)
|
||||
else:
|
||||
error_counter = 0
|
||||
self.retry_time = self.retry_time_start
|
||||
self.snooze_time = self.snooze_time_step
|
||||
self.listener.on_connect()
|
||||
self._read_loop(resp)
|
||||
except (Timeout, ssl.SSLError) as exc:
|
||||
# This is still necessary, as a SSLError can actually be
|
||||
# thrown when using Requests
|
||||
# If it's not time out treat it like any other exception
|
||||
if isinstance(exc, ssl.SSLError):
|
||||
if not (exc.args and 'timed out' in str(exc.args[0])):
|
||||
exc_info = sys.exc_info()
|
||||
break
|
||||
if self.listener.on_timeout() is False:
|
||||
break
|
||||
if self.running is False:
|
||||
break
|
||||
sleep(self.snooze_time)
|
||||
self.snooze_time = min(self.snooze_time + self.snooze_time_step,
|
||||
self.snooze_time_cap)
|
||||
except Exception as exc:
|
||||
exc_info = sys.exc_info()
|
||||
# any other exception is fatal, so kill loop
|
||||
break
|
||||
|
||||
# cleanup
|
||||
self.running = False
|
||||
if resp:
|
||||
resp.close()
|
||||
|
||||
self.new_session()
|
||||
|
||||
if exc_info:
|
||||
# call a handler first so that the exception can be logged.
|
||||
self.listener.on_exception(exc_info[1])
|
||||
six.reraise(*exc_info)
|
||||
|
||||
def _data(self, data):
|
||||
if self.listener.on_data(data) is False:
|
||||
self.running = False
|
||||
|
||||
def _read_loop(self, resp):
|
||||
charset = resp.headers.get('content-type', default='')
|
||||
enc_search = re.search(r'charset=(?P<enc>\S*)', charset)
|
||||
if enc_search is not None:
|
||||
encoding = enc_search.group('enc')
|
||||
else:
|
||||
encoding = 'utf-8'
|
||||
|
||||
buf = ReadBuffer(resp.raw, self.chunk_size, encoding=encoding)
|
||||
|
||||
while self.running and not resp.raw.closed:
|
||||
length = 0
|
||||
while not resp.raw.closed:
|
||||
line = buf.read_line()
|
||||
stripped_line = line.strip() if line else line # line is sometimes None so we need to check here
|
||||
if not stripped_line:
|
||||
self.listener.keep_alive() # keep-alive new lines are expected
|
||||
elif stripped_line.isdigit():
|
||||
length = int(stripped_line)
|
||||
break
|
||||
else:
|
||||
raise TweepError('Expecting length, unexpected value found')
|
||||
|
||||
next_status_obj = buf.read_len(length)
|
||||
if self.running and next_status_obj:
|
||||
self._data(next_status_obj)
|
||||
|
||||
# # Note: keep-alive newlines might be inserted before each length value.
|
||||
# # read until we get a digit...
|
||||
# c = b'\n'
|
||||
# for c in resp.iter_content(decode_unicode=True):
|
||||
# if c == b'\n':
|
||||
# continue
|
||||
# break
|
||||
#
|
||||
# delimited_string = c
|
||||
#
|
||||
# # read rest of delimiter length..
|
||||
# d = b''
|
||||
# for d in resp.iter_content(decode_unicode=True):
|
||||
# if d != b'\n':
|
||||
# delimited_string += d
|
||||
# continue
|
||||
# break
|
||||
#
|
||||
# # read the next twitter status object
|
||||
# if delimited_string.decode('utf-8').strip().isdigit():
|
||||
# status_id = int(delimited_string)
|
||||
# next_status_obj = resp.raw.read(status_id)
|
||||
# if self.running:
|
||||
# self._data(next_status_obj.decode('utf-8'))
|
||||
|
||||
|
||||
if resp.raw.closed:
|
||||
self.on_closed(resp)
|
||||
|
||||
def _start(self, is_async):
|
||||
self.running = True
|
||||
if is_async:
|
||||
self._thread = Thread(target=self._run)
|
||||
self._thread.start()
|
||||
else:
|
||||
self._run()
|
||||
|
||||
def on_closed(self, resp):
|
||||
""" Called when the response has been closed by Twitter """
|
||||
pass
|
||||
|
||||
def userstream(self,
|
||||
stall_warnings=False,
|
||||
_with=None,
|
||||
replies=None,
|
||||
track=None,
|
||||
locations=None,
|
||||
is_async=False,
|
||||
encoding='utf8'):
|
||||
self.session.params = {'delimited': 'length'}
|
||||
if self.running:
|
||||
raise TweepError('Stream object already connected!')
|
||||
self.url = '/%s/user.json' % STREAM_VERSION
|
||||
self.host = 'userstream.twitter.com'
|
||||
if stall_warnings:
|
||||
self.session.params['stall_warnings'] = stall_warnings
|
||||
if _with:
|
||||
self.session.params['with'] = _with
|
||||
if replies:
|
||||
self.session.params['replies'] = replies
|
||||
if locations and len(locations) > 0:
|
||||
if len(locations) % 4 != 0:
|
||||
raise TweepError("Wrong number of locations points, "
|
||||
"it has to be a multiple of 4")
|
||||
self.session.params['locations'] = ','.join(['%.2f' % l for l in locations])
|
||||
if track:
|
||||
self.session.params['track'] = u','.join(track).encode(encoding)
|
||||
|
||||
self._start(is_async)
|
||||
|
||||
def firehose(self, count=None, is_async=False):
|
||||
self.session.params = {'delimited': 'length'}
|
||||
if self.running:
|
||||
raise TweepError('Stream object already connected!')
|
||||
self.url = '/%s/statuses/firehose.json' % STREAM_VERSION
|
||||
if count:
|
||||
self.url += '&count=%s' % count
|
||||
self._start(is_async)
|
||||
|
||||
def retweet(self, is_async=False):
|
||||
self.session.params = {'delimited': 'length'}
|
||||
if self.running:
|
||||
raise TweepError('Stream object already connected!')
|
||||
self.url = '/%s/statuses/retweet.json' % STREAM_VERSION
|
||||
self._start(is_async)
|
||||
|
||||
def sample(self, is_async=False, languages=None, stall_warnings=False):
|
||||
self.session.params = {'delimited': 'length'}
|
||||
if self.running:
|
||||
raise TweepError('Stream object already connected!')
|
||||
self.url = '/%s/statuses/sample.json' % STREAM_VERSION
|
||||
if languages:
|
||||
self.session.params['language'] = ','.join(map(str, languages))
|
||||
if stall_warnings:
|
||||
self.session.params['stall_warnings'] = 'true'
|
||||
self._start(is_async)
|
||||
|
||||
def filter(self, follow=None, track=None, is_async=False, locations=None,
|
||||
stall_warnings=False, languages=None, encoding='utf8', filter_level=None):
|
||||
self.body = {}
|
||||
self.session.headers['Content-type'] = "application/x-www-form-urlencoded"
|
||||
if self.running:
|
||||
raise TweepError('Stream object already connected!')
|
||||
self.url = '/%s/statuses/filter.json' % STREAM_VERSION
|
||||
if follow:
|
||||
self.body['follow'] = u','.join(follow).encode(encoding)
|
||||
if track:
|
||||
self.body['track'] = u','.join(track).encode(encoding)
|
||||
if locations and len(locations) > 0:
|
||||
if len(locations) % 4 != 0:
|
||||
raise TweepError("Wrong number of locations points, "
|
||||
"it has to be a multiple of 4")
|
||||
self.body['locations'] = u','.join(['%.4f' % l for l in locations])
|
||||
if stall_warnings:
|
||||
self.body['stall_warnings'] = stall_warnings
|
||||
if languages:
|
||||
self.body['language'] = u','.join(map(str, languages))
|
||||
if filter_level:
|
||||
self.body['filter_level'] = filter_level.encode(encoding)
|
||||
self.session.params = {'delimited': 'length'}
|
||||
self.host = 'stream.twitter.com'
|
||||
self._start(is_async)
|
||||
|
||||
def sitestream(self, follow, stall_warnings=False,
|
||||
with_='user', replies=False, is_async=False):
|
||||
self.body = {}
|
||||
if self.running:
|
||||
raise TweepError('Stream object already connected!')
|
||||
self.url = '/%s/site.json' % STREAM_VERSION
|
||||
self.body['follow'] = u','.join(map(six.text_type, follow))
|
||||
self.body['delimited'] = 'length'
|
||||
if stall_warnings:
|
||||
self.body['stall_warnings'] = stall_warnings
|
||||
if with_:
|
||||
self.body['with'] = with_
|
||||
if replies:
|
||||
self.body['replies'] = replies
|
||||
self._start(is_async)
|
||||
|
||||
def disconnect(self):
|
||||
if self.running is False:
|
||||
return
|
||||
self.running = False
|
50
libs/apprise/plugins/NotifyTwitter/tweepy/utils.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
# Tweepy
|
||||
# Copyright 2010 Joshua Roesslein
|
||||
# See LICENSE for details.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
import six
|
||||
|
||||
from email.utils import parsedate
|
||||
|
||||
|
||||
def parse_datetime(string):
|
||||
return datetime(*(parsedate(string)[:6]))
|
||||
|
||||
|
||||
def parse_html_value(html):
|
||||
|
||||
return html[html.find('>')+1:html.rfind('<')]
|
||||
|
||||
|
||||
def parse_a_href(atag):
|
||||
|
||||
start = atag.find('"') + 1
|
||||
end = atag.find('"', start)
|
||||
return atag[start:end]
|
||||
|
||||
|
||||
def convert_to_utf8_str(arg):
|
||||
# written by Michael Norton (http://docondev.blogspot.com/)
|
||||
if isinstance(arg, six.text_type):
|
||||
arg = arg.encode('utf-8')
|
||||
elif not isinstance(arg, bytes):
|
||||
arg = six.text_type(arg).encode('utf-8')
|
||||
return arg
|
||||
|
||||
|
||||
def import_simplejson():
|
||||
try:
|
||||
import simplejson as json
|
||||
except ImportError:
|
||||
import json
|
||||
|
||||
return json
|
||||
|
||||
|
||||
def list_to_csv(item_list):
|
||||
if item_list:
|
||||
return ','.join([str(i) for i in item_list])
|
184
libs/apprise/plugins/NotifyWindows.py
Normal file
|
@ -0,0 +1,184 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Windows Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
import re
|
||||
from time import sleep
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyImageSize
|
||||
|
||||
# Default our global support flag
|
||||
NOTIFY_WINDOWS_SUPPORT_ENABLED = False
|
||||
|
||||
try:
|
||||
# 3rd party modules (Windows Only)
|
||||
import win32api
|
||||
import win32con
|
||||
import win32gui
|
||||
|
||||
# We're good to go!
|
||||
NOTIFY_WINDOWS_SUPPORT_ENABLED = True
|
||||
|
||||
except ImportError:
|
||||
# No problem; we just simply can't support this plugin because we're
|
||||
# either using Linux, or simply do not have pypiwin32 installed.
|
||||
pass
|
||||
|
||||
|
||||
class NotifyWindows(NotifyBase):
|
||||
"""
|
||||
A wrapper for local Windows Notifications
|
||||
"""
|
||||
|
||||
# The default protocol
|
||||
protocol = 'windows'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_128
|
||||
|
||||
# This entry is a bit hacky, but it allows us to unit-test this library
|
||||
# in an environment that simply doesn't have the windows packages
|
||||
# available to us. It also allows us to handle situations where the
|
||||
# packages actually are present but we need to test that they aren't.
|
||||
# If anyone is seeing this had knows a better way of testing this
|
||||
# outside of what is defined in test/test_windows_plugin.py, please
|
||||
# let me know! :)
|
||||
_enabled = NOTIFY_WINDOWS_SUPPORT_ENABLED
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initialize Windows Object
|
||||
"""
|
||||
|
||||
# Number of seconds to display notification for
|
||||
self.duration = 12
|
||||
|
||||
# Define our handler
|
||||
self.hwnd = None
|
||||
|
||||
super(NotifyWindows, self).__init__(**kwargs)
|
||||
|
||||
def _on_destroy(self, hwnd, msg, wparam, lparam):
|
||||
"""
|
||||
Destroy callback function
|
||||
"""
|
||||
|
||||
nid = (self.hwnd, 0)
|
||||
win32gui.Shell_NotifyIcon(win32gui.NIM_DELETE, nid)
|
||||
win32api.PostQuitMessage(0)
|
||||
|
||||
return None
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform Windows Notification
|
||||
"""
|
||||
|
||||
if not self._enabled:
|
||||
self.logger.warning(
|
||||
"Windows Notifications are not supported by this system.")
|
||||
return False
|
||||
|
||||
# Limit results to just the first 2 line otherwise
|
||||
# there is just to much content to display
|
||||
body = re.split('[\r\n]+', body)
|
||||
body[0] = body[0].strip('#').strip()
|
||||
body = '\r\n'.join(body[0:2])
|
||||
|
||||
try:
|
||||
# Register destruction callback
|
||||
message_map = {win32con.WM_DESTROY: self._on_destroy, }
|
||||
|
||||
# Register the window class.
|
||||
self.wc = win32gui.WNDCLASS()
|
||||
self.hinst = self.wc.hInstance = win32api.GetModuleHandle(None)
|
||||
self.wc.lpszClassName = str("PythonTaskbar")
|
||||
self.wc.lpfnWndProc = message_map
|
||||
self.classAtom = win32gui.RegisterClass(self.wc)
|
||||
|
||||
# Styling and window type
|
||||
style = win32con.WS_OVERLAPPED | win32con.WS_SYSMENU
|
||||
self.hwnd = win32gui.CreateWindow(
|
||||
self.classAtom, "Taskbar", style, 0, 0,
|
||||
win32con.CW_USEDEFAULT, win32con.CW_USEDEFAULT, 0, 0,
|
||||
self.hinst, None)
|
||||
win32gui.UpdateWindow(self.hwnd)
|
||||
|
||||
# image path
|
||||
icon_path = self.image_path(notify_type, extension='.ico')
|
||||
icon_flags = win32con.LR_LOADFROMFILE | win32con.LR_DEFAULTSIZE
|
||||
|
||||
try:
|
||||
hicon = win32gui.LoadImage(
|
||||
self.hinst, icon_path, win32con.IMAGE_ICON, 0, 0,
|
||||
icon_flags)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(
|
||||
"Could not load windows notification icon ({}): {}"
|
||||
.format(icon_path, e))
|
||||
|
||||
# disable icon
|
||||
hicon = win32gui.LoadIcon(0, win32con.IDI_APPLICATION)
|
||||
|
||||
# Taskbar icon
|
||||
flags = win32gui.NIF_ICON | win32gui.NIF_MESSAGE | win32gui.NIF_TIP
|
||||
nid = (self.hwnd, 0, flags, win32con.WM_USER + 20, hicon,
|
||||
"Tooltip")
|
||||
win32gui.Shell_NotifyIcon(win32gui.NIM_ADD, nid)
|
||||
win32gui.Shell_NotifyIcon(win32gui.NIM_MODIFY, (
|
||||
self.hwnd, 0, win32gui.NIF_INFO, win32con.WM_USER + 20, hicon,
|
||||
"Balloon Tooltip", body, 200, title))
|
||||
|
||||
# take a rest then destroy
|
||||
sleep(self.duration)
|
||||
win32gui.DestroyWindow(self.hwnd)
|
||||
win32gui.UnregisterClass(self.wc.lpszClassName, None)
|
||||
|
||||
self.logger.info('Sent Windows notification.')
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning('Failed to send Windows notification.')
|
||||
self.logger.exception('Windows Exception')
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
There are no parameters nessisary for this protocol; simply having
|
||||
windows:// is all you need. This function just makes sure that
|
||||
is in place.
|
||||
|
||||
"""
|
||||
|
||||
# return a very basic set of requirements
|
||||
return {
|
||||
'schema': NotifyWindows.protocol,
|
||||
'user': None,
|
||||
'password': None,
|
||||
'port': None,
|
||||
'host': 'localhost',
|
||||
'fullpath': None,
|
||||
'path': None,
|
||||
'url': url,
|
||||
'qsd': {},
|
||||
}
|
237
libs/apprise/plugins/NotifyXBMC.py
Normal file
|
@ -0,0 +1,237 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# XBMC/KODI Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyType
|
||||
from ..common import NotifyImageSize
|
||||
|
||||
|
||||
class NotifyXBMC(NotifyBase):
|
||||
"""
|
||||
A wrapper for XBMC/KODI Notifications
|
||||
"""
|
||||
|
||||
# The default protocols
|
||||
protocol = ('xbmc', 'kodi')
|
||||
|
||||
# The default secure protocols
|
||||
secure_protocol = ('xbmc', 'kodis')
|
||||
|
||||
# XBMC uses the http protocol with JSON requests
|
||||
xbmc_default_port = 8080
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_128
|
||||
|
||||
# XBMC default protocol version (v2)
|
||||
xbmc_remote_protocol = 2
|
||||
|
||||
# KODI default protocol version (v6)
|
||||
kodi_remote_protocol = 6
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initialize XBMC/KODI Object
|
||||
"""
|
||||
super(NotifyXBMC, self).__init__(**kwargs)
|
||||
|
||||
# Number of micro-seconds to display notification for
|
||||
self.duration = 12000
|
||||
|
||||
if self.secure:
|
||||
self.schema = 'https'
|
||||
|
||||
else:
|
||||
self.schema = 'http'
|
||||
|
||||
# Prepare the default header
|
||||
self.headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
# Default protocol
|
||||
self.protocol = kwargs.get('protocol', self.xbmc_remote_protocol)
|
||||
|
||||
def _payload_60(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Builds payload for KODI API v6.0
|
||||
|
||||
Returns (headers, payload)
|
||||
"""
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'jsonrpc': '2.0',
|
||||
'method': 'GUI.ShowNotification',
|
||||
'params': {
|
||||
'title': title,
|
||||
'message': body,
|
||||
# displaytime is defined in microseconds
|
||||
'displaytime': self.duration,
|
||||
},
|
||||
'id': 1,
|
||||
}
|
||||
|
||||
image_url = self.image_url(notify_type)
|
||||
if image_url:
|
||||
payload['params']['image'] = image_url
|
||||
if notify_type is NotifyType.FAILURE:
|
||||
payload['type'] = 'error'
|
||||
|
||||
elif notify_type is NotifyType.WARNING:
|
||||
payload['type'] = 'warning'
|
||||
|
||||
else:
|
||||
payload['type'] = 'info'
|
||||
|
||||
return (self.headers, dumps(payload))
|
||||
|
||||
def _payload_20(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Builds payload for XBMC API v2.0
|
||||
|
||||
Returns (headers, payload)
|
||||
"""
|
||||
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'jsonrpc': '2.0',
|
||||
'method': 'GUI.ShowNotification',
|
||||
'params': {
|
||||
'title': title,
|
||||
'message': body,
|
||||
# displaytime is defined in microseconds
|
||||
'displaytime': self.duration,
|
||||
},
|
||||
'id': 1,
|
||||
}
|
||||
|
||||
image_url = self.image_url(notify_type)
|
||||
if image_url:
|
||||
payload['params']['image'] = image_url
|
||||
|
||||
return (self.headers, dumps(payload))
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform XBMC/KODI Notification
|
||||
"""
|
||||
|
||||
# Limit results to just the first 2 line otherwise
|
||||
# there is just to much content to display
|
||||
body = re.split('[\r\n]+', body)
|
||||
body[0] = body[0].strip('#').strip()
|
||||
body = '\r\n'.join(body[0:2])
|
||||
|
||||
if self.protocol == self.xbmc_remote_protocol:
|
||||
# XBMC v2.0
|
||||
(headers, payload) = self._payload_20(
|
||||
title, body, notify_type, **kwargs)
|
||||
|
||||
else:
|
||||
# KODI v6.0
|
||||
(headers, payload) = self._payload_60(
|
||||
title, body, notify_type, **kwargs)
|
||||
|
||||
auth = None
|
||||
if self.user:
|
||||
auth = (self.user, self.password)
|
||||
|
||||
url = '%s://%s' % (self.schema, self.host)
|
||||
if self.port:
|
||||
url += ':%d' % self.port
|
||||
|
||||
url += '/jsonrpc'
|
||||
|
||||
self.logger.debug('XBMC/KODI POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('XBMC/KODI Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=payload,
|
||||
headers=headers,
|
||||
auth=auth,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send XBMC/KODI notification:'
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send XBMC/KODI notification '
|
||||
'(error=%s).' % r.status_code)
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent XBMC/KODI notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending XBMC/KODI '
|
||||
'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
if not results:
|
||||
# We're done early
|
||||
return results
|
||||
|
||||
# We want to set our protocol depending on whether we're using XBMC
|
||||
# or KODI
|
||||
if results.get('schema', '').startswith('xbmc'):
|
||||
# XBMC Support
|
||||
results['protocol'] = NotifyXBMC.xbmc_remote_protocol
|
||||
|
||||
# Assign Default XBMC Port
|
||||
if not results['port']:
|
||||
results['port'] = NotifyXBMC.xbmc_default_port
|
||||
|
||||
else:
|
||||
# KODI Support
|
||||
results['protocol'] = NotifyXBMC.kodi_remote_protocol
|
||||
|
||||
return results
|
149
libs/apprise/plugins/NotifyXML.py
Normal file
|
@ -0,0 +1,149 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# XML Notify Wrapper
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
import requests
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
|
||||
class NotifyXML(NotifyBase):
|
||||
"""
|
||||
A wrapper for XML Notifications
|
||||
"""
|
||||
|
||||
# The default protocol
|
||||
protocol = 'xml'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'xmls'
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
image_size = NotifyImageSize.XY_128
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initialize XML Object
|
||||
"""
|
||||
super(NotifyXML, self).__init__(**kwargs)
|
||||
|
||||
self.payload = """<?xml version='1.0' encoding='utf-8'?>
|
||||
<soapenv:Envelope
|
||||
xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/"
|
||||
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<soapenv:Body>
|
||||
<Notification xmlns:xsi="http://nuxref.com/apprise/NotifyXML-1.0.xsd">
|
||||
<Version>1.0</Version>
|
||||
<Subject>{SUBJECT}</Subject>
|
||||
<MessageType>{MESSAGE_TYPE}</MessageType>
|
||||
<Message>{MESSAGE}</Message>
|
||||
</Notification>
|
||||
</soapenv:Body>
|
||||
</soapenv:Envelope>"""
|
||||
|
||||
if self.secure:
|
||||
self.schema = 'https'
|
||||
|
||||
else:
|
||||
self.schema = 'http'
|
||||
|
||||
self.fullpath = kwargs.get('fullpath')
|
||||
if not compat_is_basestring(self.fullpath):
|
||||
self.fullpath = '/'
|
||||
|
||||
return
|
||||
|
||||
def notify(self, title, body, notify_type, **kwargs):
|
||||
"""
|
||||
Perform XML Notification
|
||||
"""
|
||||
|
||||
# prepare XML Object
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/xml'
|
||||
}
|
||||
|
||||
re_map = {
|
||||
'{MESSAGE_TYPE}': NotifyBase.quote(notify_type),
|
||||
'{SUBJECT}': NotifyBase.quote(title),
|
||||
'{MESSAGE}': NotifyBase.quote(body),
|
||||
}
|
||||
|
||||
# Iterate over above list and store content accordingly
|
||||
re_table = re.compile(
|
||||
r'(' + '|'.join(re_map.keys()) + r')',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
auth = None
|
||||
if self.user:
|
||||
auth = (self.user, self.password)
|
||||
|
||||
url = '%s://%s' % (self.schema, self.host)
|
||||
if isinstance(self.port, int):
|
||||
url += ':%d' % self.port
|
||||
|
||||
url += self.fullpath
|
||||
payload = re_table.sub(lambda x: re_map[x.group()], self.payload)
|
||||
|
||||
self.logger.debug('XML POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('XML Payload: %s' % str(payload))
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=payload,
|
||||
headers=headers,
|
||||
auth=auth,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send XML notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send XML notification '
|
||||
'(error=%s).' % r.status_code)
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent XML notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured sending XML '
|
||||
'notification to %s.' % self.host)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
80
libs/apprise/plugins/__init__.py
Normal file
|
@ -0,0 +1,80 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Our service wrappers
|
||||
#
|
||||
# Copyright (C) 2017-2018 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
# Used for Testing; specifically test_email_plugin.py needs access
|
||||
# to the modules WEBBASE_LOOKUP_TABLE and WebBaseLogin objects
|
||||
from . import NotifyEmail as NotifyEmailBase
|
||||
|
||||
from .NotifyBoxcar import NotifyBoxcar
|
||||
from .NotifyDiscord import NotifyDiscord
|
||||
from .NotifyEmail import NotifyEmail
|
||||
from .NotifyEmby import NotifyEmby
|
||||
from .NotifyFaast import NotifyFaast
|
||||
from .NotifyGrowl.NotifyGrowl import NotifyGrowl
|
||||
from .NotifyIFTTT import NotifyIFTTT
|
||||
from .NotifyJoin import NotifyJoin
|
||||
from .NotifyJSON import NotifyJSON
|
||||
from .NotifyMatterMost import NotifyMatterMost
|
||||
from .NotifyProwl import NotifyProwl
|
||||
from .NotifyPushalot import NotifyPushalot
|
||||
from .NotifyPushBullet import NotifyPushBullet
|
||||
from .NotifyPushjet.NotifyPushjet import NotifyPushjet
|
||||
from .NotifyPushover import NotifyPushover
|
||||
from .NotifyRocketChat import NotifyRocketChat
|
||||
from .NotifySlack import NotifySlack
|
||||
from .NotifyStride import NotifyStride
|
||||
from .NotifyTelegram import NotifyTelegram
|
||||
from .NotifyToasty import NotifyToasty
|
||||
from .NotifyTwitter.NotifyTwitter import NotifyTwitter
|
||||
from .NotifyXBMC import NotifyXBMC
|
||||
from .NotifyXML import NotifyXML
|
||||
from .NotifyWindows import NotifyWindows
|
||||
|
||||
from .NotifyPushjet import pushjet
|
||||
from .NotifyGrowl import gntp
|
||||
from .NotifyTwitter import tweepy
|
||||
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NOTIFY_IMAGE_SIZES
|
||||
from ..common import NotifyType
|
||||
from ..common import NOTIFY_TYPES
|
||||
|
||||
__all__ = [
|
||||
# Notification Services
|
||||
'NotifyBoxcar', 'NotifyEmail', 'NotifyEmby', 'NotifyDiscord',
|
||||
'NotifyFaast', 'NotifyGrowl', 'NotifyIFTTT', 'NotifyJoin', 'NotifyJSON',
|
||||
'NotifyMatterMost', 'NotifyProwl', 'NotifyPushalot',
|
||||
'NotifyPushBullet', 'NotifyPushjet', 'NotifyPushover', 'NotifyRocketChat',
|
||||
'NotifySlack', 'NotifyStride', 'NotifyToasty', 'NotifyTwitter',
|
||||
'NotifyTelegram', 'NotifyXBMC', 'NotifyXML', 'NotifyWindows',
|
||||
|
||||
# Reference
|
||||
'NotifyImageSize', 'NOTIFY_IMAGE_SIZES', 'NotifyType', 'NOTIFY_TYPES',
|
||||
|
||||
# NotifyEmail Base References (used for Testing)
|
||||
'NotifyEmailBase',
|
||||
|
||||
# gntp (used for NotifyGrowl Testing)
|
||||
'gntp',
|
||||
|
||||
# pushjet (used for NotifyPushjet Testing)
|
||||
'pushjet',
|
||||
|
||||
# tweepy (used for NotifyTwitter Testing)
|
||||
'tweepy',
|
||||
]
|
400
libs/apprise/utils.py
Normal file
|
@ -0,0 +1,400 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# A simple collection of general functions
|
||||
#
|
||||
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
|
||||
#
|
||||
# This file is part of apprise.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
|
||||
import re
|
||||
|
||||
from os.path import expanduser
|
||||
|
||||
try:
|
||||
# Python 2.7
|
||||
from urllib import unquote
|
||||
from urllib import quote
|
||||
from urlparse import urlparse
|
||||
from urlparse import parse_qsl
|
||||
|
||||
except ImportError:
|
||||
# Python 3.x
|
||||
from urllib.parse import unquote
|
||||
from urllib.parse import quote
|
||||
from urllib.parse import urlparse
|
||||
from urllib.parse import parse_qsl
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# URL Indexing Table for returns via parse_url()
|
||||
VALID_URL_RE = re.compile(
|
||||
r'^[\s]*(?P<schema>[^:\s]+):[/\\]*(?P<path>[^?]+)'
|
||||
r'(\?(?P<kwargs>.+))?[\s]*$',
|
||||
)
|
||||
VALID_HOST_RE = re.compile(r'^[\s]*(?P<path>[^?\s]+)(\?(?P<kwargs>.+))?')
|
||||
VALID_QUERY_RE = re.compile(r'^(?P<path>.*[/\\])(?P<query>[^/\\]*)$')
|
||||
|
||||
# delimiters used to separate values when content is passed in by string.
|
||||
# This is useful when turning a string into a list
|
||||
STRING_DELIMITERS = r'[\[\]\;,\s]+'
|
||||
|
||||
# Pre-Escape content since we reference it so much
|
||||
ESCAPED_PATH_SEPARATOR = re.escape('\\/')
|
||||
ESCAPED_WIN_PATH_SEPARATOR = re.escape('\\')
|
||||
ESCAPED_NUX_PATH_SEPARATOR = re.escape('/')
|
||||
|
||||
TIDY_WIN_PATH_RE = re.compile(
|
||||
r'(^[%s]{2}|[^%s\s][%s]|[\s][%s]{2}])([%s]+)' % (
|
||||
ESCAPED_WIN_PATH_SEPARATOR,
|
||||
ESCAPED_WIN_PATH_SEPARATOR,
|
||||
ESCAPED_WIN_PATH_SEPARATOR,
|
||||
ESCAPED_WIN_PATH_SEPARATOR,
|
||||
ESCAPED_WIN_PATH_SEPARATOR,
|
||||
),
|
||||
)
|
||||
TIDY_WIN_TRIM_RE = re.compile(
|
||||
r'^(.+[^:][^%s])[\s%s]*$' % (
|
||||
ESCAPED_WIN_PATH_SEPARATOR,
|
||||
ESCAPED_WIN_PATH_SEPARATOR,
|
||||
),
|
||||
)
|
||||
|
||||
TIDY_NUX_PATH_RE = re.compile(
|
||||
r'([%s])([%s]+)' % (
|
||||
ESCAPED_NUX_PATH_SEPARATOR,
|
||||
ESCAPED_NUX_PATH_SEPARATOR,
|
||||
),
|
||||
)
|
||||
|
||||
TIDY_NUX_TRIM_RE = re.compile(
|
||||
r'([^%s])[\s%s]+$' % (
|
||||
ESCAPED_NUX_PATH_SEPARATOR,
|
||||
ESCAPED_NUX_PATH_SEPARATOR,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def is_hostname(hostname):
|
||||
"""
|
||||
Validate hostname
|
||||
"""
|
||||
if len(hostname) > 255 or len(hostname) == 0:
|
||||
return False
|
||||
|
||||
if hostname[-1] == ".":
|
||||
hostname = hostname[:-1]
|
||||
|
||||
allowed = re.compile(r'(?!-)[A-Z\d_-]{1,63}(?<!-)$', re.IGNORECASE)
|
||||
return all(allowed.match(x) for x in hostname.split("."))
|
||||
|
||||
|
||||
def compat_is_basestring(content):
|
||||
"""
|
||||
Python 3 support for checking if content is unicode and/or
|
||||
of a string type
|
||||
"""
|
||||
try:
|
||||
# Python v2.x
|
||||
return isinstance(content, basestring)
|
||||
|
||||
except NameError:
|
||||
# Python v3.x
|
||||
return isinstance(content, str)
|
||||
|
||||
|
||||
def tidy_path(path):
|
||||
"""take a filename and or directory and attempts to tidy it up by removing
|
||||
trailing slashes and correcting any formatting issues.
|
||||
|
||||
For example: ////absolute//path// becomes:
|
||||
/absolute/path
|
||||
|
||||
"""
|
||||
# Windows
|
||||
path = TIDY_WIN_PATH_RE.sub('\\1', path.strip())
|
||||
# Linux
|
||||
path = TIDY_NUX_PATH_RE.sub('\\1', path.strip())
|
||||
|
||||
# Linux Based Trim
|
||||
path = TIDY_NUX_TRIM_RE.sub('\\1', path.strip())
|
||||
# Windows Based Trim
|
||||
path = expanduser(TIDY_WIN_TRIM_RE.sub('\\1', path.strip()))
|
||||
return path
|
||||
|
||||
|
||||
def parse_url(url, default_schema='http', verify_host=True):
|
||||
"""A function that greatly simplifies the parsing of a url
|
||||
specified by the end user.
|
||||
|
||||
Valid syntaxes are:
|
||||
<schema>://<user>@<host>:<port>/<path>
|
||||
<schema>://<user>:<passwd>@<host>:<port>/<path>
|
||||
<schema>://<host>:<port>/<path>
|
||||
<schema>://<host>/<path>
|
||||
<schema>://<host>
|
||||
|
||||
Argument parsing is also supported:
|
||||
<schema>://<user>@<host>:<port>/<path>?key1=val&key2=val2
|
||||
<schema>://<user>:<passwd>@<host>:<port>/<path>?key1=val&key2=val2
|
||||
<schema>://<host>:<port>/<path>?key1=val&key2=val2
|
||||
<schema>://<host>/<path>?key1=val&key2=val2
|
||||
<schema>://<host>?key1=val&key2=val2
|
||||
|
||||
The function returns a simple dictionary with all of
|
||||
the parsed content within it and returns 'None' if the
|
||||
content could not be extracted.
|
||||
"""
|
||||
|
||||
if not compat_is_basestring(url):
|
||||
# Simple error checking
|
||||
return None
|
||||
|
||||
# Default Results
|
||||
result = {
|
||||
# The username (if specified)
|
||||
'user': None,
|
||||
# The password (if specified)
|
||||
'password': None,
|
||||
# The port (if specified)
|
||||
'port': None,
|
||||
# The hostname
|
||||
'host': None,
|
||||
# The full path (query + path)
|
||||
'fullpath': None,
|
||||
# The path
|
||||
'path': None,
|
||||
# The query
|
||||
'query': None,
|
||||
# The schema
|
||||
'schema': None,
|
||||
# The schema
|
||||
'url': None,
|
||||
# The arguments passed in (the parsed query)
|
||||
# This is in a dictionary of {'key': 'val', etc }
|
||||
# qsd = Query String Dictionary
|
||||
'qsd': {}
|
||||
}
|
||||
|
||||
qsdata = ''
|
||||
match = VALID_URL_RE.search(url)
|
||||
if match:
|
||||
# Extract basic results
|
||||
result['schema'] = match.group('schema').lower().strip()
|
||||
host = match.group('path').strip()
|
||||
try:
|
||||
qsdata = match.group('kwargs').strip()
|
||||
except AttributeError:
|
||||
# No qsdata
|
||||
pass
|
||||
|
||||
else:
|
||||
match = VALID_HOST_RE.search(url)
|
||||
if not match:
|
||||
return None
|
||||
result['schema'] = default_schema
|
||||
host = match.group('path').strip()
|
||||
try:
|
||||
qsdata = match.group('kwargs').strip()
|
||||
except AttributeError:
|
||||
# No qsdata
|
||||
pass
|
||||
|
||||
# Now do a proper extraction of data
|
||||
parsed = urlparse('http://%s' % host)
|
||||
|
||||
# Parse results
|
||||
result['host'] = parsed[1].strip()
|
||||
|
||||
if not result['host']:
|
||||
# Nothing more we can do without a hostname
|
||||
return None
|
||||
|
||||
result['fullpath'] = quote(unquote(tidy_path(parsed[2].strip())))
|
||||
try:
|
||||
# Handle trailing slashes removed by tidy_path
|
||||
if result['fullpath'][-1] not in ('/', '\\') and \
|
||||
url[-1] in ('/', '\\'):
|
||||
result['fullpath'] += url.strip()[-1]
|
||||
|
||||
except IndexError:
|
||||
# No problem, there simply isn't any returned results
|
||||
# and therefore, no trailing slash
|
||||
pass
|
||||
|
||||
# Parse Query Arugments ?val=key&key=val
|
||||
# while ensureing that all keys are lowercase
|
||||
if qsdata:
|
||||
result['qsd'] = dict([(k.lower().strip(), v.strip())
|
||||
for k, v in parse_qsl(
|
||||
qsdata,
|
||||
keep_blank_values=True,
|
||||
strict_parsing=False,
|
||||
)])
|
||||
|
||||
if not result['fullpath']:
|
||||
# Default
|
||||
result['fullpath'] = None
|
||||
|
||||
else:
|
||||
# Using full path, extract query from path
|
||||
match = VALID_QUERY_RE.search(result['fullpath'])
|
||||
if match:
|
||||
result['path'] = match.group('path')
|
||||
result['query'] = match.group('query')
|
||||
if not result['query']:
|
||||
result['query'] = None
|
||||
try:
|
||||
(result['user'], result['host']) = \
|
||||
re.split(r'[\s@]+', result['host'])[:2]
|
||||
|
||||
except ValueError:
|
||||
# no problem then, host only exists
|
||||
# and it's already assigned
|
||||
pass
|
||||
|
||||
if result['user'] is not None:
|
||||
try:
|
||||
(result['user'], result['password']) = \
|
||||
re.split(r'[:\s]+', result['user'])[:2]
|
||||
|
||||
except ValueError:
|
||||
# no problem then, user only exists
|
||||
# and it's already assigned
|
||||
pass
|
||||
|
||||
try:
|
||||
(result['host'], result['port']) = \
|
||||
re.split(r'[\s:]+', result['host'])[:2]
|
||||
|
||||
except ValueError:
|
||||
# no problem then, user only exists
|
||||
# and it's already assigned
|
||||
pass
|
||||
|
||||
if result['port']:
|
||||
try:
|
||||
result['port'] = int(result['port'])
|
||||
|
||||
except (ValueError, TypeError):
|
||||
# Invalid Port Specified
|
||||
return None
|
||||
|
||||
if result['port'] == 0:
|
||||
result['port'] = None
|
||||
|
||||
if verify_host and not is_hostname(result['host']):
|
||||
# Nothing more we can do without a hostname
|
||||
return None
|
||||
|
||||
# Re-assemble cleaned up version of the url
|
||||
result['url'] = '%s://' % result['schema']
|
||||
if compat_is_basestring(result['user']):
|
||||
result['url'] += result['user']
|
||||
|
||||
if compat_is_basestring(result['password']):
|
||||
result['url'] += ':%s@' % result['password']
|
||||
|
||||
else:
|
||||
result['url'] += '@'
|
||||
result['url'] += result['host']
|
||||
|
||||
if result['port']:
|
||||
result['url'] += ':%d' % result['port']
|
||||
|
||||
if result['fullpath']:
|
||||
result['url'] += result['fullpath']
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def parse_bool(arg, default=False):
|
||||
"""
|
||||
NZBGet uses 'yes' and 'no' as well as other strings such as 'on' or
|
||||
'off' etch to handle boolean operations from it's control interface.
|
||||
|
||||
This method can just simplify checks to these variables.
|
||||
|
||||
If the content could not be parsed, then the default is returned.
|
||||
"""
|
||||
|
||||
if compat_is_basestring(arg):
|
||||
# no = no - False
|
||||
# of = short for off - False
|
||||
# 0 = int for False
|
||||
# fa = short for False - False
|
||||
# f = short for False - False
|
||||
# n = short for No or Never - False
|
||||
# ne = short for Never - False
|
||||
# di = short for Disable(d) - False
|
||||
# de = short for Deny - False
|
||||
if arg.lower()[0:2] in (
|
||||
'de', 'di', 'ne', 'f', 'n', 'no', 'of', '0', 'fa'):
|
||||
return False
|
||||
# ye = yes - True
|
||||
# on = short for off - True
|
||||
# 1 = int for True
|
||||
# tr = short for True - True
|
||||
# t = short for True - True
|
||||
# al = short for Always (and Allow) - True
|
||||
# en = short for Enable(d) - True
|
||||
elif arg.lower()[0:2] in (
|
||||
'en', 'al', 't', 'y', 'ye', 'on', '1', 'tr'):
|
||||
return True
|
||||
# otherwise
|
||||
return default
|
||||
|
||||
# Handle other types
|
||||
return bool(arg)
|
||||
|
||||
|
||||
def parse_list(*args):
|
||||
"""
|
||||
Take a string list and break it into a delimited
|
||||
list of arguments. This funciton also supports
|
||||
the processing of a list of delmited strings and will
|
||||
always return a unique set of arguments. Duplicates are
|
||||
always combined in the final results.
|
||||
|
||||
You can append as many items to the argument listing for
|
||||
parsing.
|
||||
|
||||
Hence: parse_list('.mkv, .iso, .avi') becomes:
|
||||
['.mkv', '.iso', '.avi']
|
||||
|
||||
Hence: parse_list('.mkv, .iso, .avi', ['.avi', '.mp4']) becomes:
|
||||
['.mkv', '.iso', '.avi', '.mp4']
|
||||
|
||||
The parsing is very forgiving and accepts spaces, slashes, commas
|
||||
semicolons, and pipes as delimiters
|
||||
"""
|
||||
|
||||
result = []
|
||||
for arg in args:
|
||||
if compat_is_basestring(arg):
|
||||
result += re.split(STRING_DELIMITERS, arg)
|
||||
|
||||
elif isinstance(arg, (set, list, tuple)):
|
||||
result += parse_list(*arg)
|
||||
|
||||
else:
|
||||
# Convert whatever it is to a string and work with it
|
||||
result += parse_list(str(arg))
|
||||
|
||||
#
|
||||
# filter() eliminates any empty entries
|
||||
#
|
||||
# Since Python v3 returns a filter (iterator) where-as Python v2 returned
|
||||
# a list, we need to change it into a list object to remain compatible with
|
||||
# both distribution types.
|
||||
return sorted([x for x in filter(bool, list(set(result)))])
|
10
libs/apscheduler/__init__.py
Normal file
|
@ -0,0 +1,10 @@
|
|||
from pkg_resources import get_distribution, DistributionNotFound
|
||||
|
||||
try:
|
||||
release = get_distribution('APScheduler').version.split('-')[0]
|
||||
except DistributionNotFound:
|
||||
release = '3.5.0'
|
||||
|
||||
version_info = tuple(int(x) if x.isdigit() else x for x in release.split('.'))
|
||||
version = __version__ = '.'.join(str(x) for x in version_info[:3])
|
||||
del get_distribution, DistributionNotFound
|
94
libs/apscheduler/events.py
Normal file
|
@ -0,0 +1,94 @@
|
|||
__all__ = ('EVENT_SCHEDULER_STARTED', 'EVENT_SCHEDULER_SHUTDOWN', 'EVENT_SCHEDULER_PAUSED',
|
||||
'EVENT_SCHEDULER_RESUMED', 'EVENT_EXECUTOR_ADDED', 'EVENT_EXECUTOR_REMOVED',
|
||||
'EVENT_JOBSTORE_ADDED', 'EVENT_JOBSTORE_REMOVED', 'EVENT_ALL_JOBS_REMOVED',
|
||||
'EVENT_JOB_ADDED', 'EVENT_JOB_REMOVED', 'EVENT_JOB_MODIFIED', 'EVENT_JOB_EXECUTED',
|
||||
'EVENT_JOB_ERROR', 'EVENT_JOB_MISSED', 'EVENT_JOB_SUBMITTED', 'EVENT_JOB_MAX_INSTANCES',
|
||||
'SchedulerEvent', 'JobEvent', 'JobExecutionEvent')
|
||||
|
||||
|
||||
EVENT_SCHEDULER_STARTED = EVENT_SCHEDULER_START = 2 ** 0
|
||||
EVENT_SCHEDULER_SHUTDOWN = 2 ** 1
|
||||
EVENT_SCHEDULER_PAUSED = 2 ** 2
|
||||
EVENT_SCHEDULER_RESUMED = 2 ** 3
|
||||
EVENT_EXECUTOR_ADDED = 2 ** 4
|
||||
EVENT_EXECUTOR_REMOVED = 2 ** 5
|
||||
EVENT_JOBSTORE_ADDED = 2 ** 6
|
||||
EVENT_JOBSTORE_REMOVED = 2 ** 7
|
||||
EVENT_ALL_JOBS_REMOVED = 2 ** 8
|
||||
EVENT_JOB_ADDED = 2 ** 9
|
||||
EVENT_JOB_REMOVED = 2 ** 10
|
||||
EVENT_JOB_MODIFIED = 2 ** 11
|
||||
EVENT_JOB_EXECUTED = 2 ** 12
|
||||
EVENT_JOB_ERROR = 2 ** 13
|
||||
EVENT_JOB_MISSED = 2 ** 14
|
||||
EVENT_JOB_SUBMITTED = 2 ** 15
|
||||
EVENT_JOB_MAX_INSTANCES = 2 ** 16
|
||||
EVENT_ALL = (EVENT_SCHEDULER_STARTED | EVENT_SCHEDULER_SHUTDOWN | EVENT_SCHEDULER_PAUSED |
|
||||
EVENT_SCHEDULER_RESUMED | EVENT_EXECUTOR_ADDED | EVENT_EXECUTOR_REMOVED |
|
||||
EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED | EVENT_ALL_JOBS_REMOVED |
|
||||
EVENT_JOB_ADDED | EVENT_JOB_REMOVED | EVENT_JOB_MODIFIED | EVENT_JOB_EXECUTED |
|
||||
EVENT_JOB_ERROR | EVENT_JOB_MISSED | EVENT_JOB_SUBMITTED | EVENT_JOB_MAX_INSTANCES)
|
||||
|
||||
|
||||
class SchedulerEvent(object):
|
||||
"""
|
||||
An event that concerns the scheduler itself.
|
||||
|
||||
:ivar code: the type code of this event
|
||||
:ivar alias: alias of the job store or executor that was added or removed (if applicable)
|
||||
"""
|
||||
|
||||
def __init__(self, code, alias=None):
|
||||
super(SchedulerEvent, self).__init__()
|
||||
self.code = code
|
||||
self.alias = alias
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s (code=%d)>' % (self.__class__.__name__, self.code)
|
||||
|
||||
|
||||
class JobEvent(SchedulerEvent):
|
||||
"""
|
||||
An event that concerns a job.
|
||||
|
||||
:ivar code: the type code of this event
|
||||
:ivar job_id: identifier of the job in question
|
||||
:ivar jobstore: alias of the job store containing the job in question
|
||||
"""
|
||||
|
||||
def __init__(self, code, job_id, jobstore):
|
||||
super(JobEvent, self).__init__(code)
|
||||
self.code = code
|
||||
self.job_id = job_id
|
||||
self.jobstore = jobstore
|
||||
|
||||
|
||||
class JobSubmissionEvent(JobEvent):
|
||||
"""
|
||||
An event that concerns the submission of a job to its executor.
|
||||
|
||||
:ivar scheduled_run_times: a list of datetimes when the job was intended to run
|
||||
"""
|
||||
|
||||
def __init__(self, code, job_id, jobstore, scheduled_run_times):
|
||||
super(JobSubmissionEvent, self).__init__(code, job_id, jobstore)
|
||||
self.scheduled_run_times = scheduled_run_times
|
||||
|
||||
|
||||
class JobExecutionEvent(JobEvent):
|
||||
"""
|
||||
An event that concerns the running of a job within its executor.
|
||||
|
||||
:ivar scheduled_run_time: the time when the job was scheduled to be run
|
||||
:ivar retval: the return value of the successfully executed job
|
||||
:ivar exception: the exception raised by the job
|
||||
:ivar traceback: a formatted traceback for the exception
|
||||
"""
|
||||
|
||||
def __init__(self, code, job_id, jobstore, scheduled_run_time, retval=None, exception=None,
|
||||
traceback=None):
|
||||
super(JobExecutionEvent, self).__init__(code, job_id, jobstore)
|
||||
self.scheduled_run_time = scheduled_run_time
|
||||
self.retval = retval
|
||||
self.exception = exception
|
||||
self.traceback = traceback
|
0
libs/apscheduler/executors/__init__.py
Normal file
60
libs/apscheduler/executors/asyncio.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
|
||||
from apscheduler.executors.base import BaseExecutor, run_job
|
||||
|
||||
try:
|
||||
from asyncio import iscoroutinefunction
|
||||
from apscheduler.executors.base_py3 import run_coroutine_job
|
||||
except ImportError:
|
||||
from trollius import iscoroutinefunction
|
||||
run_coroutine_job = None
|
||||
|
||||
|
||||
class AsyncIOExecutor(BaseExecutor):
|
||||
"""
|
||||
Runs jobs in the default executor of the event loop.
|
||||
|
||||
If the job function is a native coroutine function, it is scheduled to be run directly in the
|
||||
event loop as soon as possible. All other functions are run in the event loop's default
|
||||
executor which is usually a thread pool.
|
||||
|
||||
Plugin alias: ``asyncio``
|
||||
"""
|
||||
|
||||
def start(self, scheduler, alias):
|
||||
super(AsyncIOExecutor, self).start(scheduler, alias)
|
||||
self._eventloop = scheduler._eventloop
|
||||
self._pending_futures = set()
|
||||
|
||||
def shutdown(self, wait=True):
|
||||
# There is no way to honor wait=True without converting this method into a coroutine method
|
||||
for f in self._pending_futures:
|
||||
if not f.done():
|
||||
f.cancel()
|
||||
|
||||
self._pending_futures.clear()
|
||||
|
||||
def _do_submit_job(self, job, run_times):
|
||||
def callback(f):
|
||||
self._pending_futures.discard(f)
|
||||
try:
|
||||
events = f.result()
|
||||
except BaseException:
|
||||
self._run_job_error(job.id, *sys.exc_info()[1:])
|
||||
else:
|
||||
self._run_job_success(job.id, events)
|
||||
|
||||
if iscoroutinefunction(job.func):
|
||||
if run_coroutine_job is not None:
|
||||
coro = run_coroutine_job(job, job._jobstore_alias, run_times, self._logger.name)
|
||||
f = self._eventloop.create_task(coro)
|
||||
else:
|
||||
raise Exception('Executing coroutine based jobs is not supported with Trollius')
|
||||
else:
|
||||
f = self._eventloop.run_in_executor(None, run_job, job, job._jobstore_alias, run_times,
|
||||
self._logger.name)
|
||||
|
||||
f.add_done_callback(callback)
|
||||
self._pending_futures.add(f)
|
146
libs/apscheduler/executors/base.py
Normal file
|
@ -0,0 +1,146 @@
|
|||
from abc import ABCMeta, abstractmethod
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta
|
||||
from traceback import format_tb
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from pytz import utc
|
||||
import six
|
||||
|
||||
from apscheduler.events import (
|
||||
JobExecutionEvent, EVENT_JOB_MISSED, EVENT_JOB_ERROR, EVENT_JOB_EXECUTED)
|
||||
|
||||
|
||||
class MaxInstancesReachedError(Exception):
|
||||
def __init__(self, job):
|
||||
super(MaxInstancesReachedError, self).__init__(
|
||||
'Job "%s" has already reached its maximum number of instances (%d)' %
|
||||
(job.id, job.max_instances))
|
||||
|
||||
|
||||
class BaseExecutor(six.with_metaclass(ABCMeta, object)):
|
||||
"""Abstract base class that defines the interface that every executor must implement."""
|
||||
|
||||
_scheduler = None
|
||||
_lock = None
|
||||
_logger = logging.getLogger('apscheduler.executors')
|
||||
|
||||
def __init__(self):
|
||||
super(BaseExecutor, self).__init__()
|
||||
self._instances = defaultdict(lambda: 0)
|
||||
|
||||
def start(self, scheduler, alias):
|
||||
"""
|
||||
Called by the scheduler when the scheduler is being started or when the executor is being
|
||||
added to an already running scheduler.
|
||||
|
||||
:param apscheduler.schedulers.base.BaseScheduler scheduler: the scheduler that is starting
|
||||
this executor
|
||||
:param str|unicode alias: alias of this executor as it was assigned to the scheduler
|
||||
|
||||
"""
|
||||
self._scheduler = scheduler
|
||||
self._lock = scheduler._create_lock()
|
||||
self._logger = logging.getLogger('apscheduler.executors.%s' % alias)
|
||||
|
||||
def shutdown(self, wait=True):
|
||||
"""
|
||||
Shuts down this executor.
|
||||
|
||||
:param bool wait: ``True`` to wait until all submitted jobs
|
||||
have been executed
|
||||
"""
|
||||
|
||||
def submit_job(self, job, run_times):
|
||||
"""
|
||||
Submits job for execution.
|
||||
|
||||
:param Job job: job to execute
|
||||
:param list[datetime] run_times: list of datetimes specifying
|
||||
when the job should have been run
|
||||
:raises MaxInstancesReachedError: if the maximum number of
|
||||
allowed instances for this job has been reached
|
||||
|
||||
"""
|
||||
assert self._lock is not None, 'This executor has not been started yet'
|
||||
with self._lock:
|
||||
if self._instances[job.id] >= job.max_instances:
|
||||
raise MaxInstancesReachedError(job)
|
||||
|
||||
self._do_submit_job(job, run_times)
|
||||
self._instances[job.id] += 1
|
||||
|
||||
@abstractmethod
|
||||
def _do_submit_job(self, job, run_times):
|
||||
"""Performs the actual task of scheduling `run_job` to be called."""
|
||||
|
||||
def _run_job_success(self, job_id, events):
|
||||
"""
|
||||
Called by the executor with the list of generated events when :func:`run_job` has been
|
||||
successfully called.
|
||||
|
||||
"""
|
||||
with self._lock:
|
||||
self._instances[job_id] -= 1
|
||||
if self._instances[job_id] == 0:
|
||||
del self._instances[job_id]
|
||||
|
||||
for event in events:
|
||||
self._scheduler._dispatch_event(event)
|
||||
|
||||
def _run_job_error(self, job_id, exc, traceback=None):
|
||||
"""Called by the executor with the exception if there is an error calling `run_job`."""
|
||||
with self._lock:
|
||||
self._instances[job_id] -= 1
|
||||
if self._instances[job_id] == 0:
|
||||
del self._instances[job_id]
|
||||
|
||||
exc_info = (exc.__class__, exc, traceback)
|
||||
self._logger.error('Error running job %s', job_id, exc_info=exc_info)
|
||||
|
||||
|
||||
def run_job(job, jobstore_alias, run_times, logger_name):
|
||||
"""
|
||||
Called by executors to run the job. Returns a list of scheduler events to be dispatched by the
|
||||
scheduler.
|
||||
|
||||
"""
|
||||
events = []
|
||||
logger = logging.getLogger(logger_name)
|
||||
for run_time in run_times:
|
||||
# See if the job missed its run time window, and handle
|
||||
# possible misfires accordingly
|
||||
if job.misfire_grace_time is not None:
|
||||
difference = datetime.now(utc) - run_time
|
||||
grace_time = timedelta(seconds=job.misfire_grace_time)
|
||||
if difference > grace_time:
|
||||
events.append(JobExecutionEvent(EVENT_JOB_MISSED, job.id, jobstore_alias,
|
||||
run_time))
|
||||
logger.warning('Run time of job "%s" was missed by %s', job, difference)
|
||||
continue
|
||||
|
||||
logger.info('Running job "%s" (scheduled at %s)', job, run_time)
|
||||
try:
|
||||
retval = job.func(*job.args, **job.kwargs)
|
||||
except BaseException:
|
||||
exc, tb = sys.exc_info()[1:]
|
||||
formatted_tb = ''.join(format_tb(tb))
|
||||
events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time,
|
||||
exception=exc, traceback=formatted_tb))
|
||||
logger.exception('Job "%s" raised an exception', job)
|
||||
|
||||
# This is to prevent cyclic references that would lead to memory leaks
|
||||
if six.PY2:
|
||||
sys.exc_clear()
|
||||
del tb
|
||||
else:
|
||||
import traceback
|
||||
traceback.clear_frames(tb)
|
||||
del tb
|
||||
else:
|
||||
events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time,
|
||||
retval=retval))
|
||||
logger.info('Job "%s" executed successfully', job)
|
||||
|
||||
return events
|
41
libs/apscheduler/executors/base_py3.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
import logging
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
from traceback import format_tb
|
||||
|
||||
from pytz import utc
|
||||
|
||||
from apscheduler.events import (
|
||||
JobExecutionEvent, EVENT_JOB_MISSED, EVENT_JOB_ERROR, EVENT_JOB_EXECUTED)
|
||||
|
||||
|
||||
async def run_coroutine_job(job, jobstore_alias, run_times, logger_name):
|
||||
"""Coroutine version of run_job()."""
|
||||
events = []
|
||||
logger = logging.getLogger(logger_name)
|
||||
for run_time in run_times:
|
||||
# See if the job missed its run time window, and handle possible misfires accordingly
|
||||
if job.misfire_grace_time is not None:
|
||||
difference = datetime.now(utc) - run_time
|
||||
grace_time = timedelta(seconds=job.misfire_grace_time)
|
||||
if difference > grace_time:
|
||||
events.append(JobExecutionEvent(EVENT_JOB_MISSED, job.id, jobstore_alias,
|
||||
run_time))
|
||||
logger.warning('Run time of job "%s" was missed by %s', job, difference)
|
||||
continue
|
||||
|
||||
logger.info('Running job "%s" (scheduled at %s)', job, run_time)
|
||||
try:
|
||||
retval = await job.func(*job.args, **job.kwargs)
|
||||
except BaseException:
|
||||
exc, tb = sys.exc_info()[1:]
|
||||
formatted_tb = ''.join(format_tb(tb))
|
||||
events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time,
|
||||
exception=exc, traceback=formatted_tb))
|
||||
logger.exception('Job "%s" raised an exception', job)
|
||||
else:
|
||||
events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time,
|
||||
retval=retval))
|
||||
logger.info('Job "%s" executed successfully', job)
|
||||
|
||||
return events
|
20
libs/apscheduler/executors/debug.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
import sys
|
||||
|
||||
from apscheduler.executors.base import BaseExecutor, run_job
|
||||
|
||||
|
||||
class DebugExecutor(BaseExecutor):
|
||||
"""
|
||||
A special executor that executes the target callable directly instead of deferring it to a
|
||||
thread or process.
|
||||
|
||||
Plugin alias: ``debug``
|
||||
"""
|
||||
|
||||
def _do_submit_job(self, job, run_times):
|
||||
try:
|
||||
events = run_job(job, job._jobstore_alias, run_times, self._logger.name)
|
||||
except BaseException:
|
||||
self._run_job_error(job.id, *sys.exc_info()[1:])
|
||||
else:
|
||||
self._run_job_success(job.id, events)
|
30
libs/apscheduler/executors/gevent.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
from __future__ import absolute_import
|
||||
import sys
|
||||
|
||||
from apscheduler.executors.base import BaseExecutor, run_job
|
||||
|
||||
|
||||
try:
|
||||
import gevent
|
||||
except ImportError: # pragma: nocover
|
||||
raise ImportError('GeventExecutor requires gevent installed')
|
||||
|
||||
|
||||
class GeventExecutor(BaseExecutor):
|
||||
"""
|
||||
Runs jobs as greenlets.
|
||||
|
||||
Plugin alias: ``gevent``
|
||||
"""
|
||||
|
||||
def _do_submit_job(self, job, run_times):
|
||||
def callback(greenlet):
|
||||
try:
|
||||
events = greenlet.get()
|
||||
except BaseException:
|
||||
self._run_job_error(job.id, *sys.exc_info()[1:])
|
||||
else:
|
||||
self._run_job_success(job.id, events)
|
||||
|
||||
gevent.spawn(run_job, job, job._jobstore_alias, run_times, self._logger.name).\
|
||||
link(callback)
|
54
libs/apscheduler/executors/pool.py
Normal file
|
@ -0,0 +1,54 @@
|
|||
from abc import abstractmethod
|
||||
import concurrent.futures
|
||||
|
||||
from apscheduler.executors.base import BaseExecutor, run_job
|
||||
|
||||
|
||||
class BasePoolExecutor(BaseExecutor):
|
||||
@abstractmethod
|
||||
def __init__(self, pool):
|
||||
super(BasePoolExecutor, self).__init__()
|
||||
self._pool = pool
|
||||
|
||||
def _do_submit_job(self, job, run_times):
|
||||
def callback(f):
|
||||
exc, tb = (f.exception_info() if hasattr(f, 'exception_info') else
|
||||
(f.exception(), getattr(f.exception(), '__traceback__', None)))
|
||||
if exc:
|
||||
self._run_job_error(job.id, exc, tb)
|
||||
else:
|
||||
self._run_job_success(job.id, f.result())
|
||||
|
||||
f = self._pool.submit(run_job, job, job._jobstore_alias, run_times, self._logger.name)
|
||||
f.add_done_callback(callback)
|
||||
|
||||
def shutdown(self, wait=True):
|
||||
self._pool.shutdown(wait)
|
||||
|
||||
|
||||
class ThreadPoolExecutor(BasePoolExecutor):
|
||||
"""
|
||||
An executor that runs jobs in a concurrent.futures thread pool.
|
||||
|
||||
Plugin alias: ``threadpool``
|
||||
|
||||
:param max_workers: the maximum number of spawned threads.
|
||||
"""
|
||||
|
||||
def __init__(self, max_workers=10):
|
||||
pool = concurrent.futures.ThreadPoolExecutor(int(max_workers))
|
||||
super(ThreadPoolExecutor, self).__init__(pool)
|
||||
|
||||
|
||||
class ProcessPoolExecutor(BasePoolExecutor):
|
||||
"""
|
||||
An executor that runs jobs in a concurrent.futures process pool.
|
||||
|
||||
Plugin alias: ``processpool``
|
||||
|
||||
:param max_workers: the maximum number of spawned processes.
|
||||
"""
|
||||
|
||||
def __init__(self, max_workers=10):
|
||||
pool = concurrent.futures.ProcessPoolExecutor(int(max_workers))
|
||||
super(ProcessPoolExecutor, self).__init__(pool)
|
54
libs/apscheduler/executors/tornado.py
Normal file
|
@ -0,0 +1,54 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
from tornado.gen import convert_yielded
|
||||
|
||||
from apscheduler.executors.base import BaseExecutor, run_job
|
||||
|
||||
try:
|
||||
from inspect import iscoroutinefunction
|
||||
from apscheduler.executors.base_py3 import run_coroutine_job
|
||||
except ImportError:
|
||||
def iscoroutinefunction(func):
|
||||
return False
|
||||
|
||||
|
||||
class TornadoExecutor(BaseExecutor):
|
||||
"""
|
||||
Runs jobs either in a thread pool or directly on the I/O loop.
|
||||
|
||||
If the job function is a native coroutine function, it is scheduled to be run directly in the
|
||||
I/O loop as soon as possible. All other functions are run in a thread pool.
|
||||
|
||||
Plugin alias: ``tornado``
|
||||
|
||||
:param int max_workers: maximum number of worker threads in the thread pool
|
||||
"""
|
||||
|
||||
def __init__(self, max_workers=10):
|
||||
super(TornadoExecutor, self).__init__()
|
||||
self.executor = ThreadPoolExecutor(max_workers)
|
||||
|
||||
def start(self, scheduler, alias):
|
||||
super(TornadoExecutor, self).start(scheduler, alias)
|
||||
self._ioloop = scheduler._ioloop
|
||||
|
||||
def _do_submit_job(self, job, run_times):
|
||||
def callback(f):
|
||||
try:
|
||||
events = f.result()
|
||||
except BaseException:
|
||||
self._run_job_error(job.id, *sys.exc_info()[1:])
|
||||
else:
|
||||
self._run_job_success(job.id, events)
|
||||
|
||||
if iscoroutinefunction(job.func):
|
||||
f = run_coroutine_job(job, job._jobstore_alias, run_times, self._logger.name)
|
||||
else:
|
||||
f = self.executor.submit(run_job, job, job._jobstore_alias, run_times,
|
||||
self._logger.name)
|
||||
|
||||
f = convert_yielded(f)
|
||||
f.add_done_callback(callback)
|
25
libs/apscheduler/executors/twisted.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from apscheduler.executors.base import BaseExecutor, run_job
|
||||
|
||||
|
||||
class TwistedExecutor(BaseExecutor):
|
||||
"""
|
||||
Runs jobs in the reactor's thread pool.
|
||||
|
||||
Plugin alias: ``twisted``
|
||||
"""
|
||||
|
||||
def start(self, scheduler, alias):
|
||||
super(TwistedExecutor, self).start(scheduler, alias)
|
||||
self._reactor = scheduler._reactor
|
||||
|
||||
def _do_submit_job(self, job, run_times):
|
||||
def callback(success, result):
|
||||
if success:
|
||||
self._run_job_success(job.id, result)
|
||||
else:
|
||||
self._run_job_error(job.id, result.value, result.tb)
|
||||
|
||||
self._reactor.getThreadPool().callInThreadWithCallback(
|
||||
callback, run_job, job, job._jobstore_alias, run_times, self._logger.name)
|
297
libs/apscheduler/job.py
Normal file
|
@ -0,0 +1,297 @@
|
|||
from collections import Iterable, Mapping
|
||||
from inspect import ismethod, isclass
|
||||
from uuid import uuid4
|
||||
|
||||
import six
|
||||
|
||||
from apscheduler.triggers.base import BaseTrigger
|
||||
from apscheduler.util import (
|
||||
ref_to_obj, obj_to_ref, datetime_repr, repr_escape, get_callable_name, check_callable_args,
|
||||
convert_to_datetime)
|
||||
|
||||
|
||||
class Job(object):
|
||||
"""
|
||||
Contains the options given when scheduling callables and its current schedule and other state.
|
||||
This class should never be instantiated by the user.
|
||||
|
||||
:var str id: the unique identifier of this job
|
||||
:var str name: the description of this job
|
||||
:var func: the callable to execute
|
||||
:var tuple|list args: positional arguments to the callable
|
||||
:var dict kwargs: keyword arguments to the callable
|
||||
:var bool coalesce: whether to only run the job once when several run times are due
|
||||
:var trigger: the trigger object that controls the schedule of this job
|
||||
:var str executor: the name of the executor that will run this job
|
||||
:var int misfire_grace_time: the time (in seconds) how much this job's execution is allowed to
|
||||
be late
|
||||
:var int max_instances: the maximum number of concurrently executing instances allowed for this
|
||||
job
|
||||
:var datetime.datetime next_run_time: the next scheduled run time of this job
|
||||
|
||||
.. note::
|
||||
The ``misfire_grace_time`` has some non-obvious effects on job execution. See the
|
||||
:ref:`missed-job-executions` section in the documentation for an in-depth explanation.
|
||||
"""
|
||||
|
||||
__slots__ = ('_scheduler', '_jobstore_alias', 'id', 'trigger', 'executor', 'func', 'func_ref',
|
||||
'args', 'kwargs', 'name', 'misfire_grace_time', 'coalesce', 'max_instances',
|
||||
'next_run_time')
|
||||
|
||||
def __init__(self, scheduler, id=None, **kwargs):
|
||||
super(Job, self).__init__()
|
||||
self._scheduler = scheduler
|
||||
self._jobstore_alias = None
|
||||
self._modify(id=id or uuid4().hex, **kwargs)
|
||||
|
||||
def modify(self, **changes):
|
||||
"""
|
||||
Makes the given changes to this job and saves it in the associated job store.
|
||||
|
||||
Accepted keyword arguments are the same as the variables on this class.
|
||||
|
||||
.. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.modify_job`
|
||||
|
||||
:return Job: this job instance
|
||||
|
||||
"""
|
||||
self._scheduler.modify_job(self.id, self._jobstore_alias, **changes)
|
||||
return self
|
||||
|
||||
def reschedule(self, trigger, **trigger_args):
|
||||
"""
|
||||
Shortcut for switching the trigger on this job.
|
||||
|
||||
.. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.reschedule_job`
|
||||
|
||||
:return Job: this job instance
|
||||
|
||||
"""
|
||||
self._scheduler.reschedule_job(self.id, self._jobstore_alias, trigger, **trigger_args)
|
||||
return self
|
||||
|
||||
def pause(self):
|
||||
"""
|
||||
Temporarily suspend the execution of this job.
|
||||
|
||||
.. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.pause_job`
|
||||
|
||||
:return Job: this job instance
|
||||
|
||||
"""
|
||||
self._scheduler.pause_job(self.id, self._jobstore_alias)
|
||||
return self
|
||||
|
||||
def resume(self):
|
||||
"""
|
||||
Resume the schedule of this job if previously paused.
|
||||
|
||||
.. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.resume_job`
|
||||
|
||||
:return Job: this job instance
|
||||
|
||||
"""
|
||||
self._scheduler.resume_job(self.id, self._jobstore_alias)
|
||||
return self
|
||||
|
||||
def remove(self):
|
||||
"""
|
||||
Unschedules this job and removes it from its associated job store.
|
||||
|
||||
.. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.remove_job`
|
||||
|
||||
"""
|
||||
self._scheduler.remove_job(self.id, self._jobstore_alias)
|
||||
|
||||
@property
|
||||
def pending(self):
|
||||
"""
|
||||
Returns ``True`` if the referenced job is still waiting to be added to its designated job
|
||||
store.
|
||||
|
||||
"""
|
||||
return self._jobstore_alias is None
|
||||
|
||||
#
|
||||
# Private API
|
||||
#
|
||||
|
||||
def _get_run_times(self, now):
|
||||
"""
|
||||
Computes the scheduled run times between ``next_run_time`` and ``now`` (inclusive).
|
||||
|
||||
:type now: datetime.datetime
|
||||
:rtype: list[datetime.datetime]
|
||||
|
||||
"""
|
||||
run_times = []
|
||||
next_run_time = self.next_run_time
|
||||
while next_run_time and next_run_time <= now:
|
||||
run_times.append(next_run_time)
|
||||
next_run_time = self.trigger.get_next_fire_time(next_run_time, now)
|
||||
|
||||
return run_times
|
||||
|
||||
def _modify(self, **changes):
|
||||
"""
|
||||
Validates the changes to the Job and makes the modifications if and only if all of them
|
||||
validate.
|
||||
|
||||
"""
|
||||
approved = {}
|
||||
|
||||
if 'id' in changes:
|
||||
value = changes.pop('id')
|
||||
if not isinstance(value, six.string_types):
|
||||
raise TypeError("id must be a nonempty string")
|
||||
if hasattr(self, 'id'):
|
||||
raise ValueError('The job ID may not be changed')
|
||||
approved['id'] = value
|
||||
|
||||
if 'func' in changes or 'args' in changes or 'kwargs' in changes:
|
||||
func = changes.pop('func') if 'func' in changes else self.func
|
||||
args = changes.pop('args') if 'args' in changes else self.args
|
||||
kwargs = changes.pop('kwargs') if 'kwargs' in changes else self.kwargs
|
||||
|
||||
if isinstance(func, six.string_types):
|
||||
func_ref = func
|
||||
func = ref_to_obj(func)
|
||||
elif callable(func):
|
||||
try:
|
||||
func_ref = obj_to_ref(func)
|
||||
except ValueError:
|
||||
# If this happens, this Job won't be serializable
|
||||
func_ref = None
|
||||
else:
|
||||
raise TypeError('func must be a callable or a textual reference to one')
|
||||
|
||||
if not hasattr(self, 'name') and changes.get('name', None) is None:
|
||||
changes['name'] = get_callable_name(func)
|
||||
|
||||
if isinstance(args, six.string_types) or not isinstance(args, Iterable):
|
||||
raise TypeError('args must be a non-string iterable')
|
||||
if isinstance(kwargs, six.string_types) or not isinstance(kwargs, Mapping):
|
||||
raise TypeError('kwargs must be a dict-like object')
|
||||
|
||||
check_callable_args(func, args, kwargs)
|
||||
|
||||
approved['func'] = func
|
||||
approved['func_ref'] = func_ref
|
||||
approved['args'] = args
|
||||
approved['kwargs'] = kwargs
|
||||
|
||||
if 'name' in changes:
|
||||
value = changes.pop('name')
|
||||
if not value or not isinstance(value, six.string_types):
|
||||
raise TypeError("name must be a nonempty string")
|
||||
approved['name'] = value
|
||||
|
||||
if 'misfire_grace_time' in changes:
|
||||
value = changes.pop('misfire_grace_time')
|
||||
if value is not None and (not isinstance(value, six.integer_types) or value <= 0):
|
||||
raise TypeError('misfire_grace_time must be either None or a positive integer')
|
||||
approved['misfire_grace_time'] = value
|
||||
|
||||
if 'coalesce' in changes:
|
||||
value = bool(changes.pop('coalesce'))
|
||||
approved['coalesce'] = value
|
||||
|
||||
if 'max_instances' in changes:
|
||||
value = changes.pop('max_instances')
|
||||
if not isinstance(value, six.integer_types) or value <= 0:
|
||||
raise TypeError('max_instances must be a positive integer')
|
||||
approved['max_instances'] = value
|
||||
|
||||
if 'trigger' in changes:
|
||||
trigger = changes.pop('trigger')
|
||||
if not isinstance(trigger, BaseTrigger):
|
||||
raise TypeError('Expected a trigger instance, got %s instead' %
|
||||
trigger.__class__.__name__)
|
||||
|
||||
approved['trigger'] = trigger
|
||||
|
||||
if 'executor' in changes:
|
||||
value = changes.pop('executor')
|
||||
if not isinstance(value, six.string_types):
|
||||
raise TypeError('executor must be a string')
|
||||
approved['executor'] = value
|
||||
|
||||
if 'next_run_time' in changes:
|
||||
value = changes.pop('next_run_time')
|
||||
approved['next_run_time'] = convert_to_datetime(value, self._scheduler.timezone,
|
||||
'next_run_time')
|
||||
|
||||
if changes:
|
||||
raise AttributeError('The following are not modifiable attributes of Job: %s' %
|
||||
', '.join(changes))
|
||||
|
||||
for key, value in six.iteritems(approved):
|
||||
setattr(self, key, value)
|
||||
|
||||
def __getstate__(self):
|
||||
# Don't allow this Job to be serialized if the function reference could not be determined
|
||||
if not self.func_ref:
|
||||
raise ValueError(
|
||||
'This Job cannot be serialized since the reference to its callable (%r) could not '
|
||||
'be determined. Consider giving a textual reference (module:function name) '
|
||||
'instead.' % (self.func,))
|
||||
|
||||
# Instance methods cannot survive serialization as-is, so store the "self" argument
|
||||
# explicitly
|
||||
if ismethod(self.func) and not isclass(self.func.__self__):
|
||||
args = (self.func.__self__,) + tuple(self.args)
|
||||
else:
|
||||
args = self.args
|
||||
|
||||
return {
|
||||
'version': 1,
|
||||
'id': self.id,
|
||||
'func': self.func_ref,
|
||||
'trigger': self.trigger,
|
||||
'executor': self.executor,
|
||||
'args': args,
|
||||
'kwargs': self.kwargs,
|
||||
'name': self.name,
|
||||
'misfire_grace_time': self.misfire_grace_time,
|
||||
'coalesce': self.coalesce,
|
||||
'max_instances': self.max_instances,
|
||||
'next_run_time': self.next_run_time
|
||||
}
|
||||
|
||||
def __setstate__(self, state):
|
||||
if state.get('version', 1) > 1:
|
||||
raise ValueError('Job has version %s, but only version 1 can be handled' %
|
||||
state['version'])
|
||||
|
||||
self.id = state['id']
|
||||
self.func_ref = state['func']
|
||||
self.func = ref_to_obj(self.func_ref)
|
||||
self.trigger = state['trigger']
|
||||
self.executor = state['executor']
|
||||
self.args = state['args']
|
||||
self.kwargs = state['kwargs']
|
||||
self.name = state['name']
|
||||
self.misfire_grace_time = state['misfire_grace_time']
|
||||
self.coalesce = state['coalesce']
|
||||
self.max_instances = state['max_instances']
|
||||
self.next_run_time = state['next_run_time']
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, Job):
|
||||
return self.id == other.id
|
||||
return NotImplemented
|
||||
|
||||
def __repr__(self):
|
||||
return '<Job (id=%s name=%s)>' % (repr_escape(self.id), repr_escape(self.name))
|
||||
|
||||
def __str__(self):
|
||||
return repr_escape(self.__unicode__())
|
||||
|
||||
def __unicode__(self):
|
||||
if hasattr(self, 'next_run_time'):
|
||||
status = ('next run at: ' + datetime_repr(self.next_run_time) if
|
||||
self.next_run_time else 'paused')
|
||||
else:
|
||||
status = 'pending'
|
||||
|
||||
return u'%s (trigger: %s, %s)' % (self.name, self.trigger, status)
|