mirror of
https://github.com/morpheus65535/bazarr.git
synced 2025-04-24 06:37:16 -04:00
Merge branch 'development' into autonomous
# Conflicts: # bazarr/api.py # bazarr/config.py # bazarr/get_movies.py # bazarr/get_providers.py # bazarr/get_subtitle.py # bazarr/init.py # bazarr/list_subtitles.py # bazarr/server.py # bazarr/signalr_client.py # bazarr/utils.py # frontend/src/@types/system.d.ts # libs/version.txt # tests/conftest.py # tests/test_assrt.py
This commit is contained in:
commit
dc86c0076d
494 changed files with 29244 additions and 110782 deletions
|
@ -20,8 +20,8 @@ def check_python_version():
|
|||
print("Python " + minimum_py3_str + " or greater required. "
|
||||
"Current version is " + platform.python_version() + ". Please upgrade Python.")
|
||||
sys.exit(1)
|
||||
elif int(python_version[0]) == 3 and int(python_version[1]) == 9:
|
||||
print("Python 3.9.x is unsupported. Current version is " + platform.python_version() +
|
||||
elif int(python_version[0]) == 3 and int(python_version[1]) > 8:
|
||||
print("Python version greater than 3.8.x is unsupported. Current version is " + platform.python_version() +
|
||||
". Keep in mind that even if it works, you're on your own.")
|
||||
elif (int(python_version[0]) == minimum_py3_tuple[0] and int(python_version[1]) < minimum_py3_tuple[1]) or \
|
||||
(int(python_version[0]) != minimum_py3_tuple[0]):
|
||||
|
|
2319
bazarr/api.py
2319
bazarr/api.py
File diff suppressed because it is too large
Load diff
25
bazarr/api/__init__.py
Normal file
25
bazarr/api/__init__.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
# coding=utf-8
|
||||
|
||||
from .badges import api_bp_badges
|
||||
from .system import api_bp_system
|
||||
from .series import api_bp_series
|
||||
from .episodes import api_bp_episodes
|
||||
from .providers import api_bp_providers
|
||||
from .subtitles import api_bp_subtitles
|
||||
from .webhooks import api_bp_webhooks
|
||||
from .history import api_bp_history
|
||||
from .files import api_bp_files
|
||||
from .movies import api_bp_movies
|
||||
|
||||
api_bp_list = [
|
||||
api_bp_badges,
|
||||
api_bp_system,
|
||||
api_bp_series,
|
||||
api_bp_episodes,
|
||||
api_bp_providers,
|
||||
api_bp_subtitles,
|
||||
api_bp_webhooks,
|
||||
api_bp_history,
|
||||
api_bp_files,
|
||||
api_bp_movies
|
||||
]
|
12
bazarr/api/badges/__init__.py
Normal file
12
bazarr/api/badges/__init__.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import Blueprint
|
||||
from flask_restful import Api
|
||||
|
||||
from .badges import Badges
|
||||
|
||||
|
||||
api_bp_badges = Blueprint('api_badges', __name__)
|
||||
api = Api(api_bp_badges)
|
||||
|
||||
api.add_resource(Badges, '/badges')
|
47
bazarr/api/badges/badges.py
Normal file
47
bazarr/api/badges/badges.py
Normal file
|
@ -0,0 +1,47 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
import operator
|
||||
from functools import reduce
|
||||
|
||||
from database import get_exclusion_clause, TableEpisodes, TableShows, TableMovies
|
||||
from get_providers import get_throttled_providers
|
||||
from utils import get_health_issues
|
||||
|
||||
from ..utils import authenticate
|
||||
|
||||
|
||||
class Badges(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
episodes_conditions = [(TableEpisodes.missing_subtitles is not None),
|
||||
(TableEpisodes.missing_subtitles != '[]')]
|
||||
episodes_conditions += get_exclusion_clause('series')
|
||||
missing_episodes = TableEpisodes.select(TableShows.tags,
|
||||
TableShows.seriesType,
|
||||
TableEpisodes.monitored)\
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
|
||||
.where(reduce(operator.and_, episodes_conditions))\
|
||||
.count()
|
||||
|
||||
movies_conditions = [(TableMovies.missing_subtitles is not None),
|
||||
(TableMovies.missing_subtitles != '[]')]
|
||||
movies_conditions += get_exclusion_clause('movie')
|
||||
missing_movies = TableMovies.select(TableMovies.tags,
|
||||
TableMovies.monitored)\
|
||||
.where(reduce(operator.and_, movies_conditions))\
|
||||
.count()
|
||||
|
||||
throttled_providers = len(eval(str(get_throttled_providers())))
|
||||
|
||||
health_issues = len(get_health_issues())
|
||||
|
||||
result = {
|
||||
"episodes": missing_episodes,
|
||||
"movies": missing_movies,
|
||||
"providers": throttled_providers,
|
||||
"status": health_issues
|
||||
}
|
||||
return jsonify(result)
|
20
bazarr/api/episodes/__init__.py
Normal file
20
bazarr/api/episodes/__init__.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import Blueprint
|
||||
from flask_restful import Api
|
||||
|
||||
from .episodes import Episodes
|
||||
from .episodes_subtitles import EpisodesSubtitles
|
||||
from .history import EpisodesHistory
|
||||
from .wanted import EpisodesWanted
|
||||
from .blacklist import EpisodesBlacklist
|
||||
|
||||
|
||||
api_bp_episodes = Blueprint('api_episodes', __name__)
|
||||
api = Api(api_bp_episodes)
|
||||
|
||||
api.add_resource(Episodes, '/episodes')
|
||||
api.add_resource(EpisodesWanted, '/episodes/wanted')
|
||||
api.add_resource(EpisodesSubtitles, '/episodes/subtitles')
|
||||
api.add_resource(EpisodesHistory, '/episodes/history')
|
||||
api.add_resource(EpisodesBlacklist, '/episodes/blacklist')
|
92
bazarr/api/episodes/blacklist.py
Normal file
92
bazarr/api/episodes/blacklist.py
Normal file
|
@ -0,0 +1,92 @@
|
|||
# coding=utf-8
|
||||
|
||||
import datetime
|
||||
import pretty
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from database import TableEpisodes, TableShows, TableBlacklist
|
||||
from ..utils import authenticate, postprocessEpisode
|
||||
from utils import blacklist_log, delete_subtitles, blacklist_delete_all, blacklist_delete
|
||||
from helper import path_mappings
|
||||
from get_subtitle import episode_download_subtitles
|
||||
from event_handler import event_stream
|
||||
|
||||
|
||||
# GET: get blacklist
|
||||
# POST: add blacklist
|
||||
# DELETE: remove blacklist
|
||||
class EpisodesBlacklist(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
start = request.args.get('start') or 0
|
||||
length = request.args.get('length') or -1
|
||||
|
||||
data = TableBlacklist.select(TableShows.title.alias('seriesTitle'),
|
||||
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'),
|
||||
TableEpisodes.title.alias('episodeTitle'),
|
||||
TableEpisodes.sonarrSeriesId,
|
||||
TableBlacklist.provider,
|
||||
TableBlacklist.subs_id,
|
||||
TableBlacklist.language,
|
||||
TableBlacklist.timestamp)\
|
||||
.join(TableEpisodes, on=(TableBlacklist.sonarr_episode_id == TableEpisodes.sonarrEpisodeId))\
|
||||
.join(TableShows, on=(TableBlacklist.sonarr_series_id == TableShows.sonarrSeriesId))\
|
||||
.order_by(TableBlacklist.timestamp.desc())\
|
||||
.limit(length)\
|
||||
.offset(start)\
|
||||
.dicts()
|
||||
data = list(data)
|
||||
|
||||
for item in data:
|
||||
# Make timestamp pretty
|
||||
item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X')
|
||||
item.update({'timestamp': pretty.date(datetime.datetime.fromtimestamp(item['timestamp']))})
|
||||
|
||||
postprocessEpisode(item)
|
||||
|
||||
return jsonify(data=data)
|
||||
|
||||
@authenticate
|
||||
def post(self):
|
||||
sonarr_series_id = int(request.args.get('seriesid'))
|
||||
sonarr_episode_id = int(request.args.get('episodeid'))
|
||||
provider = request.form.get('provider')
|
||||
subs_id = request.form.get('subs_id')
|
||||
language = request.form.get('language')
|
||||
|
||||
episodeInfo = TableEpisodes.select(TableEpisodes.path)\
|
||||
.where(TableEpisodes.sonarrEpisodeId == sonarr_episode_id)\
|
||||
.dicts()\
|
||||
.get()
|
||||
|
||||
media_path = episodeInfo['path']
|
||||
subtitles_path = request.form.get('subtitles_path')
|
||||
|
||||
blacklist_log(sonarr_series_id=sonarr_series_id,
|
||||
sonarr_episode_id=sonarr_episode_id,
|
||||
provider=provider,
|
||||
subs_id=subs_id,
|
||||
language=language)
|
||||
delete_subtitles(media_type='series',
|
||||
language=language,
|
||||
forced=False,
|
||||
hi=False,
|
||||
media_path=path_mappings.path_replace(media_path),
|
||||
subtitles_path=subtitles_path,
|
||||
sonarr_series_id=sonarr_series_id,
|
||||
sonarr_episode_id=sonarr_episode_id)
|
||||
episode_download_subtitles(sonarr_episode_id)
|
||||
event_stream(type='episode-history')
|
||||
return '', 200
|
||||
|
||||
@authenticate
|
||||
def delete(self):
|
||||
if request.args.get("all") == "true":
|
||||
blacklist_delete_all()
|
||||
else:
|
||||
provider = request.form.get('provider')
|
||||
subs_id = request.form.get('subs_id')
|
||||
blacklist_delete(provider=provider, subs_id=subs_id)
|
||||
return '', 204
|
30
bazarr/api/episodes/episodes.py
Normal file
30
bazarr/api/episodes/episodes.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from database import TableEpisodes
|
||||
from ..utils import authenticate, postprocessEpisode
|
||||
|
||||
|
||||
class Episodes(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
seriesId = request.args.getlist('seriesid[]')
|
||||
episodeId = request.args.getlist('episodeid[]')
|
||||
|
||||
if len(episodeId) > 0:
|
||||
result = TableEpisodes.select().where(TableEpisodes.sonarrEpisodeId.in_(episodeId)).dicts()
|
||||
elif len(seriesId) > 0:
|
||||
result = TableEpisodes.select()\
|
||||
.where(TableEpisodes.sonarrSeriesId.in_(seriesId))\
|
||||
.order_by(TableEpisodes.season.desc(), TableEpisodes.episode.desc())\
|
||||
.dicts()
|
||||
else:
|
||||
return "Series or Episode ID not provided", 400
|
||||
|
||||
result = list(result)
|
||||
for item in result:
|
||||
postprocessEpisode(item)
|
||||
|
||||
return jsonify(data=result)
|
177
bazarr/api/episodes/episodes_subtitles.py
Normal file
177
bazarr/api/episodes/episodes_subtitles.py
Normal file
|
@ -0,0 +1,177 @@
|
|||
# coding=utf-8
|
||||
|
||||
import os
|
||||
|
||||
from flask import request
|
||||
from flask_restful import Resource
|
||||
from subliminal_patch.core import SUBTITLE_EXTENSIONS
|
||||
|
||||
from database import TableEpisodes, get_audio_profile_languages
|
||||
from ..utils import authenticate
|
||||
from helper import path_mappings
|
||||
from get_providers import get_providers, get_providers_auth
|
||||
from get_subtitle import download_subtitle, manual_upload_subtitle
|
||||
from utils import history_log, delete_subtitles
|
||||
from notifier import send_notifications
|
||||
from list_subtitles import store_subtitles
|
||||
from event_handler import event_stream
|
||||
from config import settings
|
||||
|
||||
|
||||
# PATCH: Download Subtitles
|
||||
# POST: Upload Subtitles
|
||||
# DELETE: Delete Subtitles
|
||||
class EpisodesSubtitles(Resource):
|
||||
@authenticate
|
||||
def patch(self):
|
||||
sonarrSeriesId = request.args.get('seriesid')
|
||||
sonarrEpisodeId = request.args.get('episodeid')
|
||||
episodeInfo = TableEpisodes.select(TableEpisodes.title,
|
||||
TableEpisodes.path,
|
||||
TableEpisodes.scene_name,
|
||||
TableEpisodes.audio_language)\
|
||||
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\
|
||||
.dicts()\
|
||||
.get()
|
||||
|
||||
title = episodeInfo['title']
|
||||
episodePath = path_mappings.path_replace(episodeInfo['path'])
|
||||
sceneName = episodeInfo['scene_name']
|
||||
audio_language = episodeInfo['audio_language']
|
||||
if sceneName is None: sceneName = "None"
|
||||
|
||||
language = request.form.get('language')
|
||||
hi = request.form.get('hi').capitalize()
|
||||
forced = request.form.get('forced').capitalize()
|
||||
|
||||
providers_list = get_providers()
|
||||
providers_auth = get_providers_auth()
|
||||
|
||||
audio_language_list = get_audio_profile_languages(episode_id=sonarrEpisodeId)
|
||||
if len(audio_language_list) > 0:
|
||||
audio_language = audio_language_list[0]['name']
|
||||
else:
|
||||
audio_language = None
|
||||
|
||||
try:
|
||||
result = download_subtitle(episodePath, language, audio_language, hi, forced, providers_list,
|
||||
providers_auth, sceneName, title, 'series')
|
||||
if result is not None:
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
forced = result[5]
|
||||
if result[8]:
|
||||
language_code = result[2] + ":hi"
|
||||
elif forced:
|
||||
language_code = result[2] + ":forced"
|
||||
else:
|
||||
language_code = result[2]
|
||||
provider = result[3]
|
||||
score = result[4]
|
||||
subs_id = result[6]
|
||||
subs_path = result[7]
|
||||
history_log(1, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score, subs_id,
|
||||
subs_path)
|
||||
send_notifications(sonarrSeriesId, sonarrEpisodeId, message)
|
||||
store_subtitles(path, episodePath)
|
||||
else:
|
||||
event_stream(type='episode', payload=sonarrEpisodeId)
|
||||
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return '', 204
|
||||
|
||||
@authenticate
|
||||
def post(self):
|
||||
sonarrSeriesId = request.args.get('seriesid')
|
||||
sonarrEpisodeId = request.args.get('episodeid')
|
||||
episodeInfo = TableEpisodes.select(TableEpisodes.title,
|
||||
TableEpisodes.path,
|
||||
TableEpisodes.scene_name,
|
||||
TableEpisodes.audio_language)\
|
||||
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\
|
||||
.dicts()\
|
||||
.get()
|
||||
|
||||
title = episodeInfo['title']
|
||||
episodePath = path_mappings.path_replace(episodeInfo['path'])
|
||||
sceneName = episodeInfo['scene_name']
|
||||
audio_language = episodeInfo['audio_language']
|
||||
if sceneName is None: sceneName = "None"
|
||||
|
||||
language = request.form.get('language')
|
||||
forced = True if request.form.get('forced') == 'true' else False
|
||||
hi = True if request.form.get('hi') == 'true' else False
|
||||
subFile = request.files.get('file')
|
||||
|
||||
_, ext = os.path.splitext(subFile.filename)
|
||||
|
||||
if ext not in SUBTITLE_EXTENSIONS:
|
||||
raise ValueError('A subtitle of an invalid format was uploaded.')
|
||||
|
||||
try:
|
||||
result = manual_upload_subtitle(path=episodePath,
|
||||
language=language,
|
||||
forced=forced,
|
||||
hi=hi,
|
||||
title=title,
|
||||
scene_name=sceneName,
|
||||
media_type='series',
|
||||
subtitle=subFile,
|
||||
audio_language=audio_language)
|
||||
|
||||
if result is not None:
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
subs_path = result[2]
|
||||
if hi:
|
||||
language_code = language + ":hi"
|
||||
elif forced:
|
||||
language_code = language + ":forced"
|
||||
else:
|
||||
language_code = language
|
||||
provider = "manual"
|
||||
score = 360
|
||||
history_log(4, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score,
|
||||
subtitles_path=subs_path)
|
||||
if not settings.general.getboolean('dont_notify_manual_actions'):
|
||||
send_notifications(sonarrSeriesId, sonarrEpisodeId, message)
|
||||
store_subtitles(path, episodePath)
|
||||
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return '', 204
|
||||
|
||||
@authenticate
|
||||
def delete(self):
|
||||
sonarrSeriesId = request.args.get('seriesid')
|
||||
sonarrEpisodeId = request.args.get('episodeid')
|
||||
episodeInfo = TableEpisodes.select(TableEpisodes.title,
|
||||
TableEpisodes.path,
|
||||
TableEpisodes.scene_name,
|
||||
TableEpisodes.audio_language)\
|
||||
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\
|
||||
.dicts()\
|
||||
.get()
|
||||
|
||||
episodePath = path_mappings.path_replace(episodeInfo['path'])
|
||||
|
||||
language = request.form.get('language')
|
||||
forced = request.form.get('forced')
|
||||
hi = request.form.get('hi')
|
||||
subtitlesPath = request.form.get('path')
|
||||
|
||||
subtitlesPath = path_mappings.path_replace_reverse(subtitlesPath)
|
||||
|
||||
delete_subtitles(media_type='series',
|
||||
language=language,
|
||||
forced=forced,
|
||||
hi=hi,
|
||||
media_path=episodePath,
|
||||
subtitles_path=subtitlesPath,
|
||||
sonarr_series_id=sonarrSeriesId,
|
||||
sonarr_episode_id=sonarrEpisodeId)
|
||||
|
||||
return '', 204
|
133
bazarr/api/episodes/history.py
Normal file
133
bazarr/api/episodes/history.py
Normal file
|
@ -0,0 +1,133 @@
|
|||
# coding=utf-8
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import operator
|
||||
import pretty
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
from functools import reduce
|
||||
from peewee import fn
|
||||
from datetime import timedelta
|
||||
|
||||
from database import get_exclusion_clause, TableEpisodes, TableShows, TableHistory, TableBlacklist
|
||||
from ..utils import authenticate, postprocessEpisode
|
||||
from config import settings
|
||||
from helper import path_mappings
|
||||
|
||||
|
||||
class EpisodesHistory(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
start = request.args.get('start') or 0
|
||||
length = request.args.get('length') or -1
|
||||
episodeid = request.args.get('episodeid')
|
||||
|
||||
upgradable_episodes_not_perfect = []
|
||||
if settings.general.getboolean('upgrade_subs'):
|
||||
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
|
||||
minimum_timestamp = ((datetime.datetime.now() - timedelta(days=int(days_to_upgrade_subs))) -
|
||||
datetime.datetime(1970, 1, 1)).total_seconds()
|
||||
|
||||
if settings.general.getboolean('upgrade_manual'):
|
||||
query_actions = [1, 2, 3, 6]
|
||||
else:
|
||||
query_actions = [1, 3]
|
||||
|
||||
upgradable_episodes_conditions = [(TableHistory.action.in_(query_actions)),
|
||||
(TableHistory.timestamp > minimum_timestamp),
|
||||
(TableHistory.score is not None)]
|
||||
upgradable_episodes_conditions += get_exclusion_clause('series')
|
||||
upgradable_episodes = TableHistory.select(TableHistory.video_path,
|
||||
fn.MAX(TableHistory.timestamp).alias('timestamp'),
|
||||
TableHistory.score,
|
||||
TableShows.tags,
|
||||
TableEpisodes.monitored,
|
||||
TableShows.seriesType)\
|
||||
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\
|
||||
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId))\
|
||||
.where(reduce(operator.and_, upgradable_episodes_conditions))\
|
||||
.group_by(TableHistory.video_path)\
|
||||
.dicts()
|
||||
upgradable_episodes = list(upgradable_episodes)
|
||||
for upgradable_episode in upgradable_episodes:
|
||||
if upgradable_episode['timestamp'] > minimum_timestamp:
|
||||
try:
|
||||
int(upgradable_episode['score'])
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
if int(upgradable_episode['score']) < 360:
|
||||
upgradable_episodes_not_perfect.append(upgradable_episode)
|
||||
|
||||
query_conditions = [(TableEpisodes.title is not None)]
|
||||
if episodeid:
|
||||
query_conditions.append((TableEpisodes.sonarrEpisodeId == episodeid))
|
||||
query_condition = reduce(operator.and_, query_conditions)
|
||||
episode_history = TableHistory.select(TableHistory.id,
|
||||
TableShows.title.alias('seriesTitle'),
|
||||
TableEpisodes.monitored,
|
||||
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'),
|
||||
TableEpisodes.title.alias('episodeTitle'),
|
||||
TableHistory.timestamp,
|
||||
TableHistory.subs_id,
|
||||
TableHistory.description,
|
||||
TableHistory.sonarrSeriesId,
|
||||
TableEpisodes.path,
|
||||
TableHistory.language,
|
||||
TableHistory.score,
|
||||
TableShows.tags,
|
||||
TableHistory.action,
|
||||
TableHistory.subtitles_path,
|
||||
TableHistory.sonarrEpisodeId,
|
||||
TableHistory.provider,
|
||||
TableShows.seriesType)\
|
||||
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId))\
|
||||
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\
|
||||
.where(query_condition)\
|
||||
.order_by(TableHistory.timestamp.desc())\
|
||||
.limit(length)\
|
||||
.offset(start)\
|
||||
.dicts()
|
||||
episode_history = list(episode_history)
|
||||
|
||||
blacklist_db = TableBlacklist.select(TableBlacklist.provider, TableBlacklist.subs_id).dicts()
|
||||
blacklist_db = list(blacklist_db)
|
||||
|
||||
for item in episode_history:
|
||||
# Mark episode as upgradable or not
|
||||
item.update({"upgradable": False})
|
||||
if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score']),
|
||||
"tags": str(item['tags']), "monitored": str(item['monitored']),
|
||||
"seriesType": str(item['seriesType'])} in upgradable_episodes_not_perfect:
|
||||
if os.path.isfile(path_mappings.path_replace(item['subtitles_path'])):
|
||||
item.update({"upgradable": True})
|
||||
|
||||
del item['path']
|
||||
|
||||
postprocessEpisode(item)
|
||||
|
||||
if item['score']:
|
||||
item['score'] = str(round((int(item['score']) * 100 / 360), 2)) + "%"
|
||||
|
||||
# Make timestamp pretty
|
||||
if item['timestamp']:
|
||||
item["raw_timestamp"] = int(item['timestamp'])
|
||||
item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X')
|
||||
item['timestamp'] = pretty.date(item["raw_timestamp"])
|
||||
|
||||
# Check if subtitles is blacklisted
|
||||
item.update({"blacklisted": False})
|
||||
if item['action'] not in [0, 4, 5]:
|
||||
for blacklisted_item in blacklist_db:
|
||||
if blacklisted_item['provider'] == item['provider'] and \
|
||||
blacklisted_item['subs_id'] == item['subs_id']:
|
||||
item.update({"blacklisted": True})
|
||||
break
|
||||
|
||||
count = TableHistory.select()\
|
||||
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\
|
||||
.where(TableEpisodes.title is not None).count()
|
||||
|
||||
return jsonify(data=episode_history, total=count)
|
74
bazarr/api/episodes/wanted.py
Normal file
74
bazarr/api/episodes/wanted.py
Normal file
|
@ -0,0 +1,74 @@
|
|||
# coding=utf-8
|
||||
|
||||
import operator
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
from functools import reduce
|
||||
|
||||
from database import get_exclusion_clause, TableEpisodes, TableShows
|
||||
from ..utils import authenticate, postprocessEpisode
|
||||
|
||||
|
||||
# GET: Get Wanted Episodes
|
||||
class EpisodesWanted(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
episodeid = request.args.getlist('episodeid[]')
|
||||
|
||||
wanted_conditions = [(TableEpisodes.missing_subtitles != '[]')]
|
||||
if len(episodeid) > 0:
|
||||
wanted_conditions.append((TableEpisodes.sonarrEpisodeId in episodeid))
|
||||
wanted_conditions += get_exclusion_clause('series')
|
||||
wanted_condition = reduce(operator.and_, wanted_conditions)
|
||||
|
||||
if len(episodeid) > 0:
|
||||
data = TableEpisodes.select(TableShows.title.alias('seriesTitle'),
|
||||
TableEpisodes.monitored,
|
||||
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'),
|
||||
TableEpisodes.title.alias('episodeTitle'),
|
||||
TableEpisodes.missing_subtitles,
|
||||
TableEpisodes.sonarrSeriesId,
|
||||
TableEpisodes.sonarrEpisodeId,
|
||||
TableEpisodes.scene_name.alias('sceneName'),
|
||||
TableShows.tags,
|
||||
TableEpisodes.failedAttempts,
|
||||
TableShows.seriesType)\
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
|
||||
.where(wanted_condition)\
|
||||
.dicts()
|
||||
else:
|
||||
start = request.args.get('start') or 0
|
||||
length = request.args.get('length') or -1
|
||||
data = TableEpisodes.select(TableShows.title.alias('seriesTitle'),
|
||||
TableEpisodes.monitored,
|
||||
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'),
|
||||
TableEpisodes.title.alias('episodeTitle'),
|
||||
TableEpisodes.missing_subtitles,
|
||||
TableEpisodes.sonarrSeriesId,
|
||||
TableEpisodes.sonarrEpisodeId,
|
||||
TableEpisodes.scene_name.alias('sceneName'),
|
||||
TableShows.tags,
|
||||
TableEpisodes.failedAttempts,
|
||||
TableShows.seriesType)\
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
|
||||
.where(wanted_condition)\
|
||||
.order_by(TableEpisodes.rowid.desc())\
|
||||
.limit(length)\
|
||||
.offset(start)\
|
||||
.dicts()
|
||||
data = list(data)
|
||||
|
||||
for item in data:
|
||||
postprocessEpisode(item)
|
||||
|
||||
count_conditions = [(TableEpisodes.missing_subtitles != '[]')]
|
||||
count_conditions += get_exclusion_clause('series')
|
||||
count = TableEpisodes.select(TableShows.tags,
|
||||
TableShows.seriesType,
|
||||
TableEpisodes.monitored)\
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
|
||||
.where(reduce(operator.and_, count_conditions))\
|
||||
.count()
|
||||
|
||||
return jsonify(data=data, total=count)
|
16
bazarr/api/files/__init__.py
Normal file
16
bazarr/api/files/__init__.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import Blueprint
|
||||
from flask_restful import Api
|
||||
|
||||
from .files import BrowseBazarrFS
|
||||
from .files_sonarr import BrowseSonarrFS
|
||||
from .files_radarr import BrowseRadarrFS
|
||||
|
||||
|
||||
api_bp_files = Blueprint('api_files', __name__)
|
||||
api = Api(api_bp_files)
|
||||
|
||||
api.add_resource(BrowseBazarrFS, '/files')
|
||||
api.add_resource(BrowseSonarrFS, '/files/sonarr')
|
||||
api.add_resource(BrowseRadarrFS, '/files/radarr')
|
24
bazarr/api/files/files.py
Normal file
24
bazarr/api/files/files.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from filesystem import browse_bazarr_filesystem
|
||||
|
||||
from ..utils import authenticate
|
||||
|
||||
|
||||
class BrowseBazarrFS(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
path = request.args.get('path') or ''
|
||||
data = []
|
||||
try:
|
||||
result = browse_bazarr_filesystem(path)
|
||||
if result is None:
|
||||
raise ValueError
|
||||
except Exception:
|
||||
return jsonify([])
|
||||
for item in result['directories']:
|
||||
data.append({'name': item['name'], 'children': True, 'path': item['path']})
|
||||
return jsonify(data)
|
24
bazarr/api/files/files_radarr.py
Normal file
24
bazarr/api/files/files_radarr.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from filesystem import browse_radarr_filesystem
|
||||
|
||||
from ..utils import authenticate
|
||||
|
||||
|
||||
class BrowseRadarrFS(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
path = request.args.get('path') or ''
|
||||
data = []
|
||||
try:
|
||||
result = browse_radarr_filesystem(path)
|
||||
if result is None:
|
||||
raise ValueError
|
||||
except Exception:
|
||||
return jsonify([])
|
||||
for item in result['directories']:
|
||||
data.append({'name': item['name'], 'children': True, 'path': item['path']})
|
||||
return jsonify(data)
|
24
bazarr/api/files/files_sonarr.py
Normal file
24
bazarr/api/files/files_sonarr.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from filesystem import browse_sonarr_filesystem
|
||||
|
||||
from ..utils import authenticate
|
||||
|
||||
|
||||
class BrowseSonarrFS(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
path = request.args.get('path') or ''
|
||||
data = []
|
||||
try:
|
||||
result = browse_sonarr_filesystem(path)
|
||||
if result is None:
|
||||
raise ValueError
|
||||
except Exception:
|
||||
return jsonify([])
|
||||
for item in result['directories']:
|
||||
data.append({'name': item['name'], 'children': True, 'path': item['path']})
|
||||
return jsonify(data)
|
12
bazarr/api/history/__init__.py
Normal file
12
bazarr/api/history/__init__.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import Blueprint
|
||||
from flask_restful import Api
|
||||
|
||||
from .stats import HistoryStats
|
||||
|
||||
|
||||
api_bp_history = Blueprint('api_history', __name__)
|
||||
api = Api(api_bp_history)
|
||||
|
||||
api.add_resource(HistoryStats, '/history/stats')
|
85
bazarr/api/history/stats.py
Normal file
85
bazarr/api/history/stats.py
Normal file
|
@ -0,0 +1,85 @@
|
|||
# coding=utf-8
|
||||
|
||||
import time
|
||||
import datetime
|
||||
import operator
|
||||
|
||||
from dateutil import rrule
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
from functools import reduce
|
||||
from peewee import fn
|
||||
|
||||
from database import TableHistory, TableHistoryMovie
|
||||
|
||||
from ..utils import authenticate
|
||||
|
||||
|
||||
class HistoryStats(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
timeframe = request.args.get('timeframe') or 'month'
|
||||
action = request.args.get('action') or 'All'
|
||||
provider = request.args.get('provider') or 'All'
|
||||
language = request.args.get('language') or 'All'
|
||||
|
||||
# timeframe must be in ['week', 'month', 'trimester', 'year']
|
||||
if timeframe == 'year':
|
||||
delay = 364 * 24 * 60 * 60
|
||||
elif timeframe == 'trimester':
|
||||
delay = 90 * 24 * 60 * 60
|
||||
elif timeframe == 'month':
|
||||
delay = 30 * 24 * 60 * 60
|
||||
elif timeframe == 'week':
|
||||
delay = 6 * 24 * 60 * 60
|
||||
|
||||
now = time.time()
|
||||
past = now - delay
|
||||
|
||||
history_where_clauses = [(TableHistory.timestamp.between(past, now))]
|
||||
history_where_clauses_movie = [(TableHistoryMovie.timestamp.between(past, now))]
|
||||
|
||||
if action != 'All':
|
||||
history_where_clauses.append((TableHistory.action == action))
|
||||
history_where_clauses_movie.append((TableHistoryMovie.action == action))
|
||||
else:
|
||||
history_where_clauses.append((TableHistory.action.in_([1, 2, 3])))
|
||||
history_where_clauses_movie.append((TableHistoryMovie.action.in_([1, 2, 3])))
|
||||
|
||||
if provider != 'All':
|
||||
history_where_clauses.append((TableHistory.provider == provider))
|
||||
history_where_clauses_movie.append((TableHistoryMovie.provider == provider))
|
||||
|
||||
if language != 'All':
|
||||
history_where_clauses.append((TableHistory.language == language))
|
||||
history_where_clauses_movie.append((TableHistoryMovie.language == language))
|
||||
|
||||
history_where_clause = reduce(operator.and_, history_where_clauses)
|
||||
history_where_clause_movie = reduce(operator.and_, history_where_clauses_movie)
|
||||
|
||||
data_series = TableHistory.select(fn.strftime('%Y-%m-%d', TableHistory.timestamp, 'unixepoch').alias('date'),
|
||||
fn.COUNT(TableHistory.id).alias('count'))\
|
||||
.where(history_where_clause) \
|
||||
.group_by(fn.strftime('%Y-%m-%d', TableHistory.timestamp, 'unixepoch'))\
|
||||
.dicts()
|
||||
data_series = list(data_series)
|
||||
|
||||
data_movies = TableHistoryMovie.select(fn.strftime('%Y-%m-%d', TableHistoryMovie.timestamp, 'unixepoch').alias('date'),
|
||||
fn.COUNT(TableHistoryMovie.id).alias('count')) \
|
||||
.where(history_where_clause_movie) \
|
||||
.group_by(fn.strftime('%Y-%m-%d', TableHistoryMovie.timestamp, 'unixepoch')) \
|
||||
.dicts()
|
||||
data_movies = list(data_movies)
|
||||
|
||||
for dt in rrule.rrule(rrule.DAILY,
|
||||
dtstart=datetime.datetime.now() - datetime.timedelta(seconds=delay),
|
||||
until=datetime.datetime.now()):
|
||||
if not any(d['date'] == dt.strftime('%Y-%m-%d') for d in data_series):
|
||||
data_series.append({'date': dt.strftime('%Y-%m-%d'), 'count': 0})
|
||||
if not any(d['date'] == dt.strftime('%Y-%m-%d') for d in data_movies):
|
||||
data_movies.append({'date': dt.strftime('%Y-%m-%d'), 'count': 0})
|
||||
|
||||
sorted_data_series = sorted(data_series, key=lambda i: i['date'])
|
||||
sorted_data_movies = sorted(data_movies, key=lambda i: i['date'])
|
||||
|
||||
return jsonify(series=sorted_data_series, movies=sorted_data_movies)
|
20
bazarr/api/movies/__init__.py
Normal file
20
bazarr/api/movies/__init__.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import Blueprint
|
||||
from flask_restful import Api
|
||||
|
||||
from .movies import Movies
|
||||
from .movies_subtitles import MoviesSubtitles
|
||||
from .history import MoviesHistory
|
||||
from .wanted import MoviesWanted
|
||||
from .blacklist import MoviesBlacklist
|
||||
|
||||
|
||||
api_bp_movies = Blueprint('api_movies', __name__)
|
||||
api = Api(api_bp_movies)
|
||||
|
||||
api.add_resource(Movies, '/movies')
|
||||
api.add_resource(MoviesWanted, '/movies/wanted')
|
||||
api.add_resource(MoviesSubtitles, '/movies/subtitles')
|
||||
api.add_resource(MoviesHistory, '/movies/history')
|
||||
api.add_resource(MoviesBlacklist, '/movies/blacklist')
|
86
bazarr/api/movies/blacklist.py
Normal file
86
bazarr/api/movies/blacklist.py
Normal file
|
@ -0,0 +1,86 @@
|
|||
# coding=utf-8
|
||||
|
||||
import datetime
|
||||
import pretty
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from database import TableMovies, TableBlacklistMovie
|
||||
from ..utils import authenticate, postprocessMovie
|
||||
from utils import blacklist_log_movie, delete_subtitles, blacklist_delete_all_movie, blacklist_delete_movie
|
||||
from helper import path_mappings
|
||||
from get_subtitle import movies_download_subtitles
|
||||
from event_handler import event_stream
|
||||
|
||||
|
||||
# GET: get blacklist
|
||||
# POST: add blacklist
|
||||
# DELETE: remove blacklist
|
||||
class MoviesBlacklist(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
start = request.args.get('start') or 0
|
||||
length = request.args.get('length') or -1
|
||||
|
||||
data = TableBlacklistMovie.select(TableMovies.title,
|
||||
TableMovies.radarrId,
|
||||
TableBlacklistMovie.provider,
|
||||
TableBlacklistMovie.subs_id,
|
||||
TableBlacklistMovie.language,
|
||||
TableBlacklistMovie.timestamp)\
|
||||
.join(TableMovies, on=(TableBlacklistMovie.radarr_id == TableMovies.radarrId))\
|
||||
.order_by(TableBlacklistMovie.timestamp.desc())\
|
||||
.limit(length)\
|
||||
.offset(start)\
|
||||
.dicts()
|
||||
data = list(data)
|
||||
|
||||
for item in data:
|
||||
postprocessMovie(item)
|
||||
|
||||
# Make timestamp pretty
|
||||
item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X')
|
||||
item.update({'timestamp': pretty.date(datetime.datetime.fromtimestamp(item['timestamp']))})
|
||||
|
||||
return jsonify(data=data)
|
||||
|
||||
@authenticate
|
||||
def post(self):
|
||||
radarr_id = int(request.args.get('radarrid'))
|
||||
provider = request.form.get('provider')
|
||||
subs_id = request.form.get('subs_id')
|
||||
language = request.form.get('language')
|
||||
# TODO
|
||||
forced = False
|
||||
hi = False
|
||||
|
||||
data = TableMovies.select(TableMovies.path).where(TableMovies.radarrId == radarr_id).dicts().get()
|
||||
|
||||
media_path = data['path']
|
||||
subtitles_path = request.form.get('subtitles_path')
|
||||
|
||||
blacklist_log_movie(radarr_id=radarr_id,
|
||||
provider=provider,
|
||||
subs_id=subs_id,
|
||||
language=language)
|
||||
delete_subtitles(media_type='movie',
|
||||
language=language,
|
||||
forced=forced,
|
||||
hi=hi,
|
||||
media_path=path_mappings.path_replace_movie(media_path),
|
||||
subtitles_path=subtitles_path,
|
||||
radarr_id=radarr_id)
|
||||
movies_download_subtitles(radarr_id)
|
||||
event_stream(type='movie-history')
|
||||
return '', 200
|
||||
|
||||
@authenticate
|
||||
def delete(self):
|
||||
if request.args.get("all") == "true":
|
||||
blacklist_delete_all_movie()
|
||||
else:
|
||||
provider = request.form.get('provider')
|
||||
subs_id = request.form.get('subs_id')
|
||||
blacklist_delete_movie(provider=provider, subs_id=subs_id)
|
||||
return '', 200
|
129
bazarr/api/movies/history.py
Normal file
129
bazarr/api/movies/history.py
Normal file
|
@ -0,0 +1,129 @@
|
|||
# coding=utf-8
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import operator
|
||||
import pretty
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
from functools import reduce
|
||||
from peewee import fn
|
||||
from datetime import timedelta
|
||||
|
||||
from database import get_exclusion_clause, TableMovies, TableHistoryMovie, TableBlacklistMovie
|
||||
from ..utils import authenticate, postprocessMovie
|
||||
from config import settings
|
||||
from helper import path_mappings
|
||||
|
||||
|
||||
class MoviesHistory(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
start = request.args.get('start') or 0
|
||||
length = request.args.get('length') or -1
|
||||
radarrid = request.args.get('radarrid')
|
||||
|
||||
upgradable_movies = []
|
||||
upgradable_movies_not_perfect = []
|
||||
if settings.general.getboolean('upgrade_subs'):
|
||||
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
|
||||
minimum_timestamp = ((datetime.datetime.now() - timedelta(days=int(days_to_upgrade_subs))) -
|
||||
datetime.datetime(1970, 1, 1)).total_seconds()
|
||||
|
||||
if settings.general.getboolean('upgrade_manual'):
|
||||
query_actions = [1, 2, 3, 6]
|
||||
else:
|
||||
query_actions = [1, 3]
|
||||
|
||||
upgradable_movies_conditions = [(TableHistoryMovie.action.in_(query_actions)),
|
||||
(TableHistoryMovie.timestamp > minimum_timestamp),
|
||||
(TableHistoryMovie.score is not None)]
|
||||
upgradable_movies_conditions += get_exclusion_clause('movie')
|
||||
upgradable_movies = TableHistoryMovie.select(TableHistoryMovie.video_path,
|
||||
fn.MAX(TableHistoryMovie.timestamp).alias('timestamp'),
|
||||
TableHistoryMovie.score,
|
||||
TableMovies.tags,
|
||||
TableMovies.monitored)\
|
||||
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\
|
||||
.where(reduce(operator.and_, upgradable_movies_conditions))\
|
||||
.group_by(TableHistoryMovie.video_path)\
|
||||
.dicts()
|
||||
upgradable_movies = list(upgradable_movies)
|
||||
|
||||
for upgradable_movie in upgradable_movies:
|
||||
if upgradable_movie['timestamp'] > minimum_timestamp:
|
||||
try:
|
||||
int(upgradable_movie['score'])
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
if int(upgradable_movie['score']) < 120:
|
||||
upgradable_movies_not_perfect.append(upgradable_movie)
|
||||
|
||||
query_conditions = [(TableMovies.title is not None)]
|
||||
if radarrid:
|
||||
query_conditions.append((TableMovies.radarrId == radarrid))
|
||||
query_condition = reduce(operator.and_, query_conditions)
|
||||
|
||||
movie_history = TableHistoryMovie.select(TableHistoryMovie.id,
|
||||
TableHistoryMovie.action,
|
||||
TableMovies.title,
|
||||
TableHistoryMovie.timestamp,
|
||||
TableHistoryMovie.description,
|
||||
TableHistoryMovie.radarrId,
|
||||
TableMovies.monitored,
|
||||
TableHistoryMovie.video_path.alias('path'),
|
||||
TableHistoryMovie.language,
|
||||
TableMovies.tags,
|
||||
TableHistoryMovie.score,
|
||||
TableHistoryMovie.subs_id,
|
||||
TableHistoryMovie.provider,
|
||||
TableHistoryMovie.subtitles_path)\
|
||||
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\
|
||||
.where(query_condition)\
|
||||
.order_by(TableHistoryMovie.timestamp.desc())\
|
||||
.limit(length)\
|
||||
.offset(start)\
|
||||
.dicts()
|
||||
movie_history = list(movie_history)
|
||||
|
||||
blacklist_db = TableBlacklistMovie.select(TableBlacklistMovie.provider, TableBlacklistMovie.subs_id).dicts()
|
||||
blacklist_db = list(blacklist_db)
|
||||
|
||||
for item in movie_history:
|
||||
# Mark movies as upgradable or not
|
||||
item.update({"upgradable": False})
|
||||
if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score']),
|
||||
"tags": str(item['tags']), "monitored": str(item['monitored'])} in upgradable_movies_not_perfect:
|
||||
if os.path.isfile(path_mappings.path_replace_movie(item['subtitles_path'])):
|
||||
item.update({"upgradable": True})
|
||||
|
||||
del item['path']
|
||||
|
||||
postprocessMovie(item)
|
||||
|
||||
if item['score']:
|
||||
item['score'] = str(round((int(item['score']) * 100 / 120), 2)) + "%"
|
||||
|
||||
# Make timestamp pretty
|
||||
if item['timestamp']:
|
||||
item["raw_timestamp"] = int(item['timestamp'])
|
||||
item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X')
|
||||
item['timestamp'] = pretty.date(item["raw_timestamp"])
|
||||
|
||||
# Check if subtitles is blacklisted
|
||||
item.update({"blacklisted": False})
|
||||
if item['action'] not in [0, 4, 5]:
|
||||
for blacklisted_item in blacklist_db:
|
||||
if blacklisted_item['provider'] == item['provider'] and blacklisted_item['subs_id'] == item[
|
||||
'subs_id']:
|
||||
item.update({"blacklisted": True})
|
||||
break
|
||||
|
||||
count = TableHistoryMovie.select()\
|
||||
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\
|
||||
.where(TableMovies.title is not None)\
|
||||
.count()
|
||||
|
||||
return jsonify(data=movie_history, total=count)
|
80
bazarr/api/movies/movies.py
Normal file
80
bazarr/api/movies/movies.py
Normal file
|
@ -0,0 +1,80 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from database import TableMovies
|
||||
from ..utils import authenticate, postprocessMovie, None_Keys
|
||||
from list_subtitles import list_missing_subtitles_movies, movies_scan_subtitles
|
||||
from event_handler import event_stream
|
||||
from get_subtitle import movies_download_subtitles, wanted_search_missing_subtitles_movies
|
||||
|
||||
|
||||
class Movies(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
start = request.args.get('start') or 0
|
||||
length = request.args.get('length') or -1
|
||||
radarrId = request.args.getlist('radarrid[]')
|
||||
|
||||
count = TableMovies.select().count()
|
||||
|
||||
if len(radarrId) != 0:
|
||||
result = TableMovies.select()\
|
||||
.where(TableMovies.radarrId.in_(radarrId))\
|
||||
.order_by(TableMovies.sortTitle)\
|
||||
.dicts()
|
||||
else:
|
||||
result = TableMovies.select().order_by(TableMovies.sortTitle).limit(length).offset(start).dicts()
|
||||
result = list(result)
|
||||
for item in result:
|
||||
postprocessMovie(item)
|
||||
|
||||
return jsonify(data=result, total=count)
|
||||
|
||||
@authenticate
|
||||
def post(self):
|
||||
radarrIdList = request.form.getlist('radarrid')
|
||||
profileIdList = request.form.getlist('profileid')
|
||||
|
||||
for idx in range(len(radarrIdList)):
|
||||
radarrId = radarrIdList[idx]
|
||||
profileId = profileIdList[idx]
|
||||
|
||||
if profileId in None_Keys:
|
||||
profileId = None
|
||||
else:
|
||||
try:
|
||||
profileId = int(profileId)
|
||||
except Exception:
|
||||
return '', 400
|
||||
|
||||
TableMovies.update({
|
||||
TableMovies.profileId: profileId
|
||||
})\
|
||||
.where(TableMovies.radarrId == radarrId)\
|
||||
.execute()
|
||||
|
||||
list_missing_subtitles_movies(no=radarrId, send_event=False)
|
||||
|
||||
event_stream(type='movie', payload=radarrId)
|
||||
event_stream(type='movie-wanted', payload=radarrId)
|
||||
event_stream(type='badges')
|
||||
|
||||
return '', 204
|
||||
|
||||
@authenticate
|
||||
def patch(self):
|
||||
radarrid = request.form.get('radarrid')
|
||||
action = request.form.get('action')
|
||||
if action == "scan-disk":
|
||||
movies_scan_subtitles(radarrid)
|
||||
return '', 204
|
||||
elif action == "search-missing":
|
||||
movies_download_subtitles(radarrid)
|
||||
return '', 204
|
||||
elif action == "search-wanted":
|
||||
wanted_search_missing_subtitles_movies()
|
||||
return '', 204
|
||||
|
||||
return '', 400
|
175
bazarr/api/movies/movies_subtitles.py
Normal file
175
bazarr/api/movies/movies_subtitles.py
Normal file
|
@ -0,0 +1,175 @@
|
|||
# coding=utf-8
|
||||
|
||||
import os
|
||||
|
||||
from flask import request
|
||||
from flask_restful import Resource
|
||||
from subliminal_patch.core import SUBTITLE_EXTENSIONS
|
||||
|
||||
from database import TableMovies, get_audio_profile_languages
|
||||
from ..utils import authenticate
|
||||
from helper import path_mappings
|
||||
from get_providers import get_providers, get_providers_auth
|
||||
from get_subtitle import download_subtitle, manual_upload_subtitle
|
||||
from utils import history_log_movie, delete_subtitles
|
||||
from notifier import send_notifications_movie
|
||||
from list_subtitles import store_subtitles_movie
|
||||
from event_handler import event_stream
|
||||
from config import settings
|
||||
|
||||
|
||||
# PATCH: Download Subtitles
|
||||
# POST: Upload Subtitles
|
||||
# DELETE: Delete Subtitles
|
||||
class MoviesSubtitles(Resource):
|
||||
@authenticate
|
||||
def patch(self):
|
||||
# Download
|
||||
radarrId = request.args.get('radarrid')
|
||||
|
||||
movieInfo = TableMovies.select(TableMovies.title,
|
||||
TableMovies.path,
|
||||
TableMovies.sceneName,
|
||||
TableMovies.audio_language)\
|
||||
.where(TableMovies.radarrId == radarrId)\
|
||||
.dicts()\
|
||||
.get()
|
||||
|
||||
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
|
||||
sceneName = movieInfo['sceneName']
|
||||
if sceneName is None: sceneName = 'None'
|
||||
|
||||
title = movieInfo['title']
|
||||
audio_language = movieInfo['audio_language']
|
||||
|
||||
language = request.form.get('language')
|
||||
hi = request.form.get('hi').capitalize()
|
||||
forced = request.form.get('forced').capitalize()
|
||||
|
||||
providers_list = get_providers()
|
||||
providers_auth = get_providers_auth()
|
||||
|
||||
audio_language_list = get_audio_profile_languages(movie_id=radarrId)
|
||||
if len(audio_language_list) > 0:
|
||||
audio_language = audio_language_list[0]['name']
|
||||
else:
|
||||
audio_language = None
|
||||
|
||||
try:
|
||||
result = download_subtitle(moviePath, language, audio_language, hi, forced, providers_list,
|
||||
providers_auth, sceneName, title, 'movie')
|
||||
if result is not None:
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
forced = result[5]
|
||||
if result[8]:
|
||||
language_code = result[2] + ":hi"
|
||||
elif forced:
|
||||
language_code = result[2] + ":forced"
|
||||
else:
|
||||
language_code = result[2]
|
||||
provider = result[3]
|
||||
score = result[4]
|
||||
subs_id = result[6]
|
||||
subs_path = result[7]
|
||||
history_log_movie(1, radarrId, message, path, language_code, provider, score, subs_id, subs_path)
|
||||
send_notifications_movie(radarrId, message)
|
||||
store_subtitles_movie(path, moviePath)
|
||||
else:
|
||||
event_stream(type='movie', payload=radarrId)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return '', 204
|
||||
|
||||
@authenticate
|
||||
def post(self):
|
||||
# Upload
|
||||
# TODO: Support Multiply Upload
|
||||
radarrId = request.args.get('radarrid')
|
||||
movieInfo = TableMovies.select(TableMovies.title,
|
||||
TableMovies.path,
|
||||
TableMovies.sceneName,
|
||||
TableMovies.audio_language) \
|
||||
.where(TableMovies.radarrId == radarrId) \
|
||||
.dicts() \
|
||||
.get()
|
||||
|
||||
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
|
||||
sceneName = movieInfo['sceneName']
|
||||
if sceneName is None: sceneName = 'None'
|
||||
|
||||
title = movieInfo['title']
|
||||
audioLanguage = movieInfo['audio_language']
|
||||
|
||||
language = request.form.get('language')
|
||||
forced = True if request.form.get('forced') == 'true' else False
|
||||
hi = True if request.form.get('hi') == 'true' else False
|
||||
subFile = request.files.get('file')
|
||||
|
||||
_, ext = os.path.splitext(subFile.filename)
|
||||
|
||||
if ext not in SUBTITLE_EXTENSIONS:
|
||||
raise ValueError('A subtitle of an invalid format was uploaded.')
|
||||
|
||||
try:
|
||||
result = manual_upload_subtitle(path=moviePath,
|
||||
language=language,
|
||||
forced=forced,
|
||||
hi=hi,
|
||||
title=title,
|
||||
scene_name=sceneName,
|
||||
media_type='movie',
|
||||
subtitle=subFile,
|
||||
audio_language=audioLanguage)
|
||||
|
||||
if result is not None:
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
subs_path = result[2]
|
||||
if hi:
|
||||
language_code = language + ":hi"
|
||||
elif forced:
|
||||
language_code = language + ":forced"
|
||||
else:
|
||||
language_code = language
|
||||
provider = "manual"
|
||||
score = 120
|
||||
history_log_movie(4, radarrId, message, path, language_code, provider, score, subtitles_path=subs_path)
|
||||
if not settings.general.getboolean('dont_notify_manual_actions'):
|
||||
send_notifications_movie(radarrId, message)
|
||||
store_subtitles_movie(path, moviePath)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return '', 204
|
||||
|
||||
@authenticate
|
||||
def delete(self):
|
||||
# Delete
|
||||
radarrId = request.args.get('radarrid')
|
||||
movieInfo = TableMovies.select(TableMovies.path) \
|
||||
.where(TableMovies.radarrId == radarrId) \
|
||||
.dicts() \
|
||||
.get()
|
||||
|
||||
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
|
||||
|
||||
language = request.form.get('language')
|
||||
forced = request.form.get('forced')
|
||||
hi = request.form.get('hi')
|
||||
subtitlesPath = request.form.get('path')
|
||||
|
||||
subtitlesPath = path_mappings.path_replace_reverse_movie(subtitlesPath)
|
||||
|
||||
result = delete_subtitles(media_type='movie',
|
||||
language=language,
|
||||
forced=forced,
|
||||
hi=hi,
|
||||
media_path=moviePath,
|
||||
subtitles_path=subtitlesPath,
|
||||
radarr_id=radarrId)
|
||||
if result:
|
||||
return '', 202
|
||||
else:
|
||||
return '', 204
|
62
bazarr/api/movies/wanted.py
Normal file
62
bazarr/api/movies/wanted.py
Normal file
|
@ -0,0 +1,62 @@
|
|||
# coding=utf-8
|
||||
|
||||
import operator
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
from functools import reduce
|
||||
|
||||
from database import get_exclusion_clause, TableMovies
|
||||
from ..utils import authenticate, postprocessMovie
|
||||
|
||||
|
||||
# GET: Get Wanted Movies
|
||||
class MoviesWanted(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
radarrid = request.args.getlist("radarrid[]")
|
||||
|
||||
wanted_conditions = [(TableMovies.missing_subtitles != '[]')]
|
||||
if len(radarrid) > 0:
|
||||
wanted_conditions.append((TableMovies.radarrId.in_(radarrid)))
|
||||
wanted_conditions += get_exclusion_clause('movie')
|
||||
wanted_condition = reduce(operator.and_, wanted_conditions)
|
||||
|
||||
if len(radarrid) > 0:
|
||||
result = TableMovies.select(TableMovies.title,
|
||||
TableMovies.missing_subtitles,
|
||||
TableMovies.radarrId,
|
||||
TableMovies.sceneName,
|
||||
TableMovies.failedAttempts,
|
||||
TableMovies.tags,
|
||||
TableMovies.monitored)\
|
||||
.where(wanted_condition)\
|
||||
.dicts()
|
||||
else:
|
||||
start = request.args.get('start') or 0
|
||||
length = request.args.get('length') or -1
|
||||
result = TableMovies.select(TableMovies.title,
|
||||
TableMovies.missing_subtitles,
|
||||
TableMovies.radarrId,
|
||||
TableMovies.sceneName,
|
||||
TableMovies.failedAttempts,
|
||||
TableMovies.tags,
|
||||
TableMovies.monitored)\
|
||||
.where(wanted_condition)\
|
||||
.order_by(TableMovies.rowid.desc())\
|
||||
.limit(length)\
|
||||
.offset(start)\
|
||||
.dicts()
|
||||
result = list(result)
|
||||
|
||||
for item in result:
|
||||
postprocessMovie(item)
|
||||
|
||||
count_conditions = [(TableMovies.missing_subtitles != '[]')]
|
||||
count_conditions += get_exclusion_clause('movie')
|
||||
count = TableMovies.select(TableMovies.monitored,
|
||||
TableMovies.tags)\
|
||||
.where(reduce(operator.and_, count_conditions))\
|
||||
.count()
|
||||
|
||||
return jsonify(data=result, total=count)
|
16
bazarr/api/providers/__init__.py
Normal file
16
bazarr/api/providers/__init__.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import Blueprint
|
||||
from flask_restful import Api
|
||||
|
||||
from .providers import Providers
|
||||
from .providers_episodes import ProviderEpisodes
|
||||
from .providers_movies import ProviderMovies
|
||||
|
||||
|
||||
api_bp_providers = Blueprint('api_providers', __name__)
|
||||
api = Api(api_bp_providers)
|
||||
|
||||
api.add_resource(Providers, '/providers')
|
||||
api.add_resource(ProviderMovies, '/providers/movies')
|
||||
api.add_resource(ProviderEpisodes, '/providers/episodes')
|
52
bazarr/api/providers/providers.py
Normal file
52
bazarr/api/providers/providers.py
Normal file
|
@ -0,0 +1,52 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
from operator import itemgetter
|
||||
|
||||
from database import TableHistory, TableHistoryMovie
|
||||
from get_providers import list_throttled_providers, reset_throttled_providers
|
||||
from ..utils import authenticate, False_Keys
|
||||
|
||||
|
||||
class Providers(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
history = request.args.get('history')
|
||||
if history and history not in False_Keys:
|
||||
providers = list(TableHistory.select(TableHistory.provider)
|
||||
.where(TableHistory.provider != None and TableHistory.provider != "manual")
|
||||
.dicts())
|
||||
providers += list(TableHistoryMovie.select(TableHistoryMovie.provider)
|
||||
.where(TableHistoryMovie.provider != None and TableHistoryMovie.provider != "manual")
|
||||
.dicts())
|
||||
providers_list = list(set([x['provider'] for x in providers]))
|
||||
providers_dicts = []
|
||||
for provider in providers_list:
|
||||
providers_dicts.append({
|
||||
'name': provider,
|
||||
'status': 'History',
|
||||
'retry': '-'
|
||||
})
|
||||
return jsonify(data=sorted(providers_dicts, key=itemgetter('name')))
|
||||
|
||||
throttled_providers = list_throttled_providers()
|
||||
|
||||
providers = list()
|
||||
for provider in throttled_providers:
|
||||
providers.append({
|
||||
"name": provider[0],
|
||||
"status": provider[1] if provider[1] is not None else "Good",
|
||||
"retry": provider[2] if provider[2] != "now" else "-"
|
||||
})
|
||||
return jsonify(data=providers)
|
||||
|
||||
@authenticate
|
||||
def post(self):
|
||||
action = request.form.get('action')
|
||||
|
||||
if action == 'reset':
|
||||
reset_throttled_providers()
|
||||
return '', 204
|
||||
|
||||
return '', 400
|
103
bazarr/api/providers/providers_episodes.py
Normal file
103
bazarr/api/providers/providers_episodes.py
Normal file
|
@ -0,0 +1,103 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from database import TableEpisodes, TableShows, get_audio_profile_languages
|
||||
from helper import path_mappings
|
||||
from get_providers import get_providers, get_providers_auth
|
||||
from get_subtitle import manual_search, manual_download_subtitle
|
||||
from utils import history_log
|
||||
from config import settings
|
||||
from notifier import send_notifications
|
||||
from list_subtitles import store_subtitles
|
||||
|
||||
from ..utils import authenticate
|
||||
|
||||
|
||||
class ProviderEpisodes(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
# Manual Search
|
||||
sonarrEpisodeId = request.args.get('episodeid')
|
||||
episodeInfo = TableEpisodes.select(TableEpisodes.title,
|
||||
TableEpisodes.path,
|
||||
TableEpisodes.scene_name,
|
||||
TableShows.profileId) \
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
|
||||
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
|
||||
.dicts() \
|
||||
.get()
|
||||
|
||||
title = episodeInfo['title']
|
||||
episodePath = path_mappings.path_replace(episodeInfo['path'])
|
||||
sceneName = episodeInfo['scene_name']
|
||||
profileId = episodeInfo['profileId']
|
||||
if sceneName is None: sceneName = "None"
|
||||
|
||||
providers_list = get_providers()
|
||||
providers_auth = get_providers_auth()
|
||||
|
||||
data = manual_search(episodePath, profileId, providers_list, providers_auth, sceneName, title,
|
||||
'series')
|
||||
if not data:
|
||||
data = []
|
||||
return jsonify(data=data)
|
||||
|
||||
@authenticate
|
||||
def post(self):
|
||||
# Manual Download
|
||||
sonarrSeriesId = request.args.get('seriesid')
|
||||
sonarrEpisodeId = request.args.get('episodeid')
|
||||
episodeInfo = TableEpisodes.select(TableEpisodes.title,
|
||||
TableEpisodes.path,
|
||||
TableEpisodes.scene_name) \
|
||||
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
|
||||
.dicts() \
|
||||
.get()
|
||||
|
||||
title = episodeInfo['title']
|
||||
episodePath = path_mappings.path_replace(episodeInfo['path'])
|
||||
sceneName = episodeInfo['scene_name']
|
||||
if sceneName is None: sceneName = "None"
|
||||
|
||||
language = request.form.get('language')
|
||||
hi = request.form.get('hi').capitalize()
|
||||
forced = request.form.get('forced').capitalize()
|
||||
selected_provider = request.form.get('provider')
|
||||
subtitle = request.form.get('subtitle')
|
||||
providers_auth = get_providers_auth()
|
||||
|
||||
audio_language_list = get_audio_profile_languages(episode_id=sonarrEpisodeId)
|
||||
if len(audio_language_list) > 0:
|
||||
audio_language = audio_language_list[0]['name']
|
||||
else:
|
||||
audio_language = 'None'
|
||||
|
||||
try:
|
||||
result = manual_download_subtitle(episodePath, language, audio_language, hi, forced, subtitle,
|
||||
selected_provider, providers_auth, sceneName, title, 'series')
|
||||
if result is not None:
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
forced = result[5]
|
||||
if result[8]:
|
||||
language_code = result[2] + ":hi"
|
||||
elif forced:
|
||||
language_code = result[2] + ":forced"
|
||||
else:
|
||||
language_code = result[2]
|
||||
provider = result[3]
|
||||
score = result[4]
|
||||
subs_id = result[6]
|
||||
subs_path = result[7]
|
||||
history_log(2, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score, subs_id,
|
||||
subs_path)
|
||||
if not settings.general.getboolean('dont_notify_manual_actions'):
|
||||
send_notifications(sonarrSeriesId, sonarrEpisodeId, message)
|
||||
store_subtitles(path, episodePath)
|
||||
return result, 201
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return '', 204
|
102
bazarr/api/providers/providers_movies.py
Normal file
102
bazarr/api/providers/providers_movies.py
Normal file
|
@ -0,0 +1,102 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from database import TableMovies, get_audio_profile_languages
|
||||
from helper import path_mappings
|
||||
from get_providers import get_providers, get_providers_auth
|
||||
from get_subtitle import manual_search, manual_download_subtitle
|
||||
from utils import history_log_movie
|
||||
from config import settings
|
||||
from notifier import send_notifications_movie
|
||||
from list_subtitles import store_subtitles_movie
|
||||
|
||||
from ..utils import authenticate
|
||||
|
||||
|
||||
class ProviderMovies(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
# Manual Search
|
||||
radarrId = request.args.get('radarrid')
|
||||
movieInfo = TableMovies.select(TableMovies.title,
|
||||
TableMovies.path,
|
||||
TableMovies.sceneName,
|
||||
TableMovies.profileId) \
|
||||
.where(TableMovies.radarrId == radarrId) \
|
||||
.dicts() \
|
||||
.get()
|
||||
|
||||
title = movieInfo['title']
|
||||
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
|
||||
sceneName = movieInfo['sceneName']
|
||||
profileId = movieInfo['profileId']
|
||||
if sceneName is None: sceneName = "None"
|
||||
|
||||
providers_list = get_providers()
|
||||
providers_auth = get_providers_auth()
|
||||
|
||||
data = manual_search(moviePath, profileId, providers_list, providers_auth, sceneName, title,
|
||||
'movie')
|
||||
if not data:
|
||||
data = []
|
||||
return jsonify(data=data)
|
||||
|
||||
@authenticate
|
||||
def post(self):
|
||||
# Manual Download
|
||||
radarrId = request.args.get('radarrid')
|
||||
movieInfo = TableMovies.select(TableMovies.title,
|
||||
TableMovies.path,
|
||||
TableMovies.sceneName,
|
||||
TableMovies.audio_language) \
|
||||
.where(TableMovies.radarrId == radarrId) \
|
||||
.dicts() \
|
||||
.get()
|
||||
|
||||
title = movieInfo['title']
|
||||
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
|
||||
sceneName = movieInfo['sceneName']
|
||||
if sceneName is None: sceneName = "None"
|
||||
audio_language = movieInfo['audio_language']
|
||||
|
||||
language = request.form.get('language')
|
||||
hi = request.form.get('hi').capitalize()
|
||||
forced = request.form.get('forced').capitalize()
|
||||
selected_provider = request.form.get('provider')
|
||||
subtitle = request.form.get('subtitle')
|
||||
|
||||
providers_auth = get_providers_auth()
|
||||
|
||||
audio_language_list = get_audio_profile_languages(movie_id=radarrId)
|
||||
if len(audio_language_list) > 0:
|
||||
audio_language = audio_language_list[0]['name']
|
||||
else:
|
||||
audio_language = 'None'
|
||||
|
||||
try:
|
||||
result = manual_download_subtitle(moviePath, language, audio_language, hi, forced, subtitle,
|
||||
selected_provider, providers_auth, sceneName, title, 'movie')
|
||||
if result is not None:
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
forced = result[5]
|
||||
if result[8]:
|
||||
language_code = result[2] + ":hi"
|
||||
elif forced:
|
||||
language_code = result[2] + ":forced"
|
||||
else:
|
||||
language_code = result[2]
|
||||
provider = result[3]
|
||||
score = result[4]
|
||||
subs_id = result[6]
|
||||
subs_path = result[7]
|
||||
history_log_movie(2, radarrId, message, path, language_code, provider, score, subs_id, subs_path)
|
||||
if not settings.general.getboolean('dont_notify_manual_actions'):
|
||||
send_notifications_movie(radarrId, message)
|
||||
store_subtitles_movie(path, moviePath)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return '', 204
|
12
bazarr/api/series/__init__.py
Normal file
12
bazarr/api/series/__init__.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import Blueprint
|
||||
from flask_restful import Api
|
||||
|
||||
from .series import Series
|
||||
|
||||
|
||||
api_bp_series = Blueprint('api_series', __name__)
|
||||
api = Api(api_bp_series)
|
||||
|
||||
api.add_resource(Series, '/series')
|
114
bazarr/api/series/series.py
Normal file
114
bazarr/api/series/series.py
Normal file
|
@ -0,0 +1,114 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
import operator
|
||||
from functools import reduce
|
||||
|
||||
from database import get_exclusion_clause, TableEpisodes, TableShows
|
||||
from list_subtitles import list_missing_subtitles, series_scan_subtitles
|
||||
from get_subtitle import series_download_subtitles, wanted_search_missing_subtitles_series
|
||||
from ..utils import authenticate, postprocessSeries, None_Keys
|
||||
from event_handler import event_stream
|
||||
|
||||
|
||||
class Series(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
start = request.args.get('start') or 0
|
||||
length = request.args.get('length') or -1
|
||||
seriesId = request.args.getlist('seriesid[]')
|
||||
|
||||
count = TableShows.select().count()
|
||||
|
||||
if len(seriesId) != 0:
|
||||
result = TableShows.select() \
|
||||
.where(TableShows.sonarrSeriesId.in_(seriesId)) \
|
||||
.order_by(TableShows.sortTitle).dicts()
|
||||
else:
|
||||
result = TableShows.select().order_by(TableShows.sortTitle).limit(length).offset(start).dicts()
|
||||
|
||||
result = list(result)
|
||||
|
||||
for item in result:
|
||||
postprocessSeries(item)
|
||||
|
||||
# Add missing subtitles episode count
|
||||
episodes_missing_conditions = [(TableEpisodes.sonarrSeriesId == item['sonarrSeriesId']),
|
||||
(TableEpisodes.missing_subtitles != '[]')]
|
||||
episodes_missing_conditions += get_exclusion_clause('series')
|
||||
|
||||
episodeMissingCount = TableEpisodes.select(TableShows.tags,
|
||||
TableEpisodes.monitored,
|
||||
TableShows.seriesType) \
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
|
||||
.where(reduce(operator.and_, episodes_missing_conditions)) \
|
||||
.count()
|
||||
item.update({"episodeMissingCount": episodeMissingCount})
|
||||
|
||||
# Add episode count
|
||||
episodeFileCount = TableEpisodes.select(TableShows.tags,
|
||||
TableEpisodes.monitored,
|
||||
TableShows.seriesType) \
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
|
||||
.where(TableEpisodes.sonarrSeriesId == item['sonarrSeriesId']) \
|
||||
.count()
|
||||
item.update({"episodeFileCount": episodeFileCount})
|
||||
|
||||
return jsonify(data=result, total=count)
|
||||
|
||||
@authenticate
|
||||
def post(self):
|
||||
seriesIdList = request.form.getlist('seriesid')
|
||||
profileIdList = request.form.getlist('profileid')
|
||||
|
||||
for idx in range(len(seriesIdList)):
|
||||
seriesId = seriesIdList[idx]
|
||||
profileId = profileIdList[idx]
|
||||
|
||||
if profileId in None_Keys:
|
||||
profileId = None
|
||||
else:
|
||||
try:
|
||||
profileId = int(profileId)
|
||||
except Exception:
|
||||
return '', 400
|
||||
|
||||
TableShows.update({
|
||||
TableShows.profileId: profileId
|
||||
}) \
|
||||
.where(TableShows.sonarrSeriesId == seriesId) \
|
||||
.execute()
|
||||
|
||||
list_missing_subtitles(no=seriesId, send_event=False)
|
||||
|
||||
event_stream(type='series', payload=seriesId)
|
||||
|
||||
episode_id_list = TableEpisodes \
|
||||
.select(TableEpisodes.sonarrEpisodeId) \
|
||||
.where(TableEpisodes.sonarrSeriesId == seriesId) \
|
||||
.dicts()
|
||||
|
||||
for item in episode_id_list:
|
||||
event_stream(type='episode-wanted', payload=item['sonarrEpisodeId'])
|
||||
|
||||
event_stream(type='badges')
|
||||
|
||||
return '', 204
|
||||
|
||||
@authenticate
|
||||
def patch(self):
|
||||
seriesid = request.form.get('seriesid')
|
||||
action = request.form.get('action')
|
||||
if action == "scan-disk":
|
||||
series_scan_subtitles(seriesid)
|
||||
return '', 204
|
||||
elif action == "search-missing":
|
||||
series_download_subtitles(seriesid)
|
||||
return '', 204
|
||||
elif action == "search-wanted":
|
||||
wanted_search_missing_subtitles_series()
|
||||
return '', 204
|
||||
|
||||
return '', 400
|
14
bazarr/api/subtitles/__init__.py
Normal file
14
bazarr/api/subtitles/__init__.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import Blueprint
|
||||
from flask_restful import Api
|
||||
|
||||
from .subtitles import Subtitles
|
||||
from .subtitles_info import SubtitleNameInfo
|
||||
|
||||
|
||||
api_bp_subtitles = Blueprint('api_subtitles', __name__)
|
||||
api = Api(api_bp_subtitles)
|
||||
|
||||
api.add_resource(Subtitles, '/subtitles')
|
||||
api.add_resource(SubtitleNameInfo, '/subtitles/info')
|
72
bazarr/api/subtitles/subtitles.py
Normal file
72
bazarr/api/subtitles/subtitles.py
Normal file
|
@ -0,0 +1,72 @@
|
|||
# coding=utf-8
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from flask import request
|
||||
from flask_restful import Resource
|
||||
|
||||
from database import TableEpisodes, TableMovies
|
||||
from helper import path_mappings
|
||||
from ..utils import authenticate
|
||||
from subsyncer import subsync
|
||||
from utils import translate_subtitles_file, subtitles_apply_mods
|
||||
from get_subtitle import store_subtitles, store_subtitles_movie
|
||||
from config import settings
|
||||
|
||||
|
||||
class Subtitles(Resource):
|
||||
@authenticate
|
||||
def patch(self):
|
||||
action = request.args.get('action')
|
||||
|
||||
language = request.form.get('language')
|
||||
subtitles_path = request.form.get('path')
|
||||
media_type = request.form.get('type')
|
||||
id = request.form.get('id')
|
||||
|
||||
if media_type == 'episode':
|
||||
subtitles_path = path_mappings.path_replace(subtitles_path)
|
||||
metadata = TableEpisodes.select(TableEpisodes.path, TableEpisodes.sonarrSeriesId)\
|
||||
.where(TableEpisodes.sonarrEpisodeId == id)\
|
||||
.dicts()\
|
||||
.get()
|
||||
video_path = path_mappings.path_replace(metadata['path'])
|
||||
else:
|
||||
subtitles_path = path_mappings.path_replace_movie(subtitles_path)
|
||||
metadata = TableMovies.select(TableMovies.path).where(TableMovies.radarrId == id).dicts().get()
|
||||
video_path = path_mappings.path_replace_movie(metadata['path'])
|
||||
|
||||
if action == 'sync':
|
||||
if media_type == 'episode':
|
||||
subsync.sync(video_path=video_path, srt_path=subtitles_path,
|
||||
srt_lang=language, media_type='series', sonarr_series_id=metadata['sonarrSeriesId'],
|
||||
sonarr_episode_id=int(id))
|
||||
else:
|
||||
subsync.sync(video_path=video_path, srt_path=subtitles_path,
|
||||
srt_lang=language, media_type='movies', radarr_id=id)
|
||||
elif action == 'translate':
|
||||
dest_language = language
|
||||
forced = True if request.form.get('forced') == 'true' else False
|
||||
hi = True if request.form.get('hi') == 'true' else False
|
||||
result = translate_subtitles_file(video_path=video_path, source_srt_file=subtitles_path,
|
||||
to_lang=dest_language,
|
||||
forced=forced, hi=hi)
|
||||
if result:
|
||||
if media_type == 'episode':
|
||||
store_subtitles(path_mappings.path_replace_reverse(video_path), video_path)
|
||||
else:
|
||||
store_subtitles_movie(path_mappings.path_replace_reverse_movie(video_path), video_path)
|
||||
return '', 200
|
||||
else:
|
||||
return '', 404
|
||||
else:
|
||||
subtitles_apply_mods(language, subtitles_path, [action])
|
||||
|
||||
# apply chmod if required
|
||||
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
|
||||
'win') and settings.general.getboolean('chmod_enabled') else None
|
||||
if chmod:
|
||||
os.chmod(subtitles_path, chmod)
|
||||
|
||||
return '', 204
|
41
bazarr/api/subtitles/subtitles_info.py
Normal file
41
bazarr/api/subtitles/subtitles_info.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
from subliminal_patch.core import guessit
|
||||
from ..utils import authenticate
|
||||
|
||||
|
||||
class SubtitleNameInfo(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
names = request.args.getlist('filenames[]')
|
||||
results = []
|
||||
for name in names:
|
||||
opts = dict()
|
||||
opts['type'] = 'episode'
|
||||
guessit_result = guessit(name, options=opts)
|
||||
result = {}
|
||||
result['filename'] = name
|
||||
if 'subtitle_language' in guessit_result:
|
||||
result['subtitle_language'] = str(guessit_result['subtitle_language'])
|
||||
|
||||
result['episode'] = 0
|
||||
if 'episode' in guessit_result:
|
||||
if isinstance(guessit_result['episode'], list):
|
||||
# for multiple episodes file, choose the first episode number
|
||||
if len(guessit_result['episode']):
|
||||
# make sure that guessit returned a list of more than 0 items
|
||||
result['episode'] = int(guessit_result['episode'][0])
|
||||
elif isinstance(guessit_result['episode'], (str, int)):
|
||||
# if single episode (should be int but just in case we cast it to int)
|
||||
result['episode'] = int(guessit_result['episode'])
|
||||
|
||||
if 'season' in guessit_result:
|
||||
result['season'] = int(guessit_result['season'])
|
||||
else:
|
||||
result['season'] = 0
|
||||
|
||||
results.append(result)
|
||||
|
||||
return jsonify(data=results)
|
33
bazarr/api/system/__init__.py
Normal file
33
bazarr/api/system/__init__.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import Blueprint
|
||||
from flask_restful import Api
|
||||
|
||||
from .system import System
|
||||
from .searches import Searches
|
||||
from .account import SystemAccount
|
||||
from .tasks import SystemTasks
|
||||
from .logs import SystemLogs
|
||||
from .status import SystemStatus
|
||||
from .health import SystemHealth
|
||||
from .releases import SystemReleases
|
||||
from .settings import SystemSettings
|
||||
from .languages import Languages
|
||||
from .languages_profiles import LanguagesProfiles
|
||||
from .notifications import Notifications
|
||||
|
||||
api_bp_system = Blueprint('api_system', __name__)
|
||||
api = Api(api_bp_system)
|
||||
|
||||
api.add_resource(System, '/system')
|
||||
api.add_resource(Searches, '/system/searches')
|
||||
api.add_resource(SystemAccount, '/system/account')
|
||||
api.add_resource(SystemTasks, '/system/tasks')
|
||||
api.add_resource(SystemLogs, '/system/logs')
|
||||
api.add_resource(SystemStatus, '/system/status')
|
||||
api.add_resource(SystemHealth, '/system/health')
|
||||
api.add_resource(SystemReleases, '/system/releases')
|
||||
api.add_resource(SystemSettings, '/system/settings')
|
||||
api.add_resource(Languages, '/system/languages')
|
||||
api.add_resource(LanguagesProfiles, '/system/languages/profiles')
|
||||
api.add_resource(Notifications, '/system/notifications')
|
29
bazarr/api/system/account.py
Normal file
29
bazarr/api/system/account.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
# coding=utf-8
|
||||
|
||||
import gc
|
||||
|
||||
from flask import request, session
|
||||
from flask_restful import Resource
|
||||
|
||||
from config import settings
|
||||
from utils import check_credentials
|
||||
|
||||
|
||||
class SystemAccount(Resource):
|
||||
def post(self):
|
||||
if settings.auth.type != 'form':
|
||||
return '', 405
|
||||
|
||||
action = request.args.get('action')
|
||||
if action == 'login':
|
||||
username = request.form.get('username')
|
||||
password = request.form.get('password')
|
||||
if check_credentials(username, password):
|
||||
session['logged_in'] = True
|
||||
return '', 204
|
||||
elif action == 'logout':
|
||||
session.clear()
|
||||
gc.collect()
|
||||
return '', 204
|
||||
|
||||
return '', 401
|
13
bazarr/api/system/health.py
Normal file
13
bazarr/api/system/health.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from ..utils import authenticate
|
||||
from utils import get_health_issues
|
||||
|
||||
|
||||
class SystemHealth(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
return jsonify(data=get_health_issues())
|
54
bazarr/api/system/languages.py
Normal file
54
bazarr/api/system/languages.py
Normal file
|
@ -0,0 +1,54 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from operator import itemgetter
|
||||
|
||||
from ..utils import authenticate, False_Keys
|
||||
from database import TableHistory, TableHistoryMovie, TableSettingsLanguages
|
||||
from get_languages import alpha2_from_alpha3, language_from_alpha2
|
||||
|
||||
|
||||
class Languages(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
history = request.args.get('history')
|
||||
if history and history not in False_Keys:
|
||||
languages = list(TableHistory.select(TableHistory.language)
|
||||
.where(TableHistory.language != None)
|
||||
.dicts())
|
||||
languages += list(TableHistoryMovie.select(TableHistoryMovie.language)
|
||||
.where(TableHistoryMovie.language != None)
|
||||
.dicts())
|
||||
languages_list = list(set([l['language'].split(':')[0] for l in languages]))
|
||||
languages_dicts = []
|
||||
for language in languages_list:
|
||||
code2 = None
|
||||
if len(language) == 2:
|
||||
code2 = language
|
||||
elif len(language) == 3:
|
||||
code2 = alpha2_from_alpha3(language)
|
||||
else:
|
||||
continue
|
||||
|
||||
if not any(x['code2'] == code2 for x in languages_dicts):
|
||||
try:
|
||||
languages_dicts.append({
|
||||
'code2': code2,
|
||||
'name': language_from_alpha2(code2),
|
||||
# Compatibility: Use false temporarily
|
||||
'enabled': False
|
||||
})
|
||||
except:
|
||||
continue
|
||||
return jsonify(sorted(languages_dicts, key=itemgetter('name')))
|
||||
|
||||
result = TableSettingsLanguages.select(TableSettingsLanguages.name,
|
||||
TableSettingsLanguages.code2,
|
||||
TableSettingsLanguages.enabled)\
|
||||
.order_by(TableSettingsLanguages.name).dicts()
|
||||
result = list(result)
|
||||
for item in result:
|
||||
item['enabled'] = item['enabled'] == 1
|
||||
return jsonify(result)
|
13
bazarr/api/system/languages_profiles.py
Normal file
13
bazarr/api/system/languages_profiles.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from ..utils import authenticate
|
||||
from database import get_profiles_list
|
||||
|
||||
|
||||
class LanguagesProfiles(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
return jsonify(get_profiles_list())
|
41
bazarr/api/system/logs.py
Normal file
41
bazarr/api/system/logs.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
# coding=utf-8
|
||||
|
||||
import io
|
||||
import os
|
||||
|
||||
from flask import jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from ..utils import authenticate
|
||||
from logger import empty_log
|
||||
from get_args import args
|
||||
|
||||
|
||||
class SystemLogs(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
logs = []
|
||||
with io.open(os.path.join(args.config_dir, 'log', 'bazarr.log'), encoding='UTF-8') as file:
|
||||
raw_lines = file.read()
|
||||
lines = raw_lines.split('|\n')
|
||||
for line in lines:
|
||||
if line == '':
|
||||
continue
|
||||
raw_message = line.split('|')
|
||||
raw_message_len = len(raw_message)
|
||||
if raw_message_len > 3:
|
||||
log = dict()
|
||||
log["timestamp"] = raw_message[0]
|
||||
log["type"] = raw_message[1].rstrip()
|
||||
log["message"] = raw_message[3]
|
||||
if raw_message_len > 4 and raw_message[4] != '\n':
|
||||
log['exception'] = raw_message[4].strip('\'').replace(' ', '\u2003\u2003')
|
||||
logs.append(log)
|
||||
|
||||
logs.reverse()
|
||||
return jsonify(data=logs)
|
||||
|
||||
@authenticate
|
||||
def delete(self):
|
||||
empty_log()
|
||||
return '', 204
|
27
bazarr/api/system/notifications.py
Normal file
27
bazarr/api/system/notifications.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
# coding=utf-8
|
||||
|
||||
import apprise
|
||||
|
||||
from flask import request
|
||||
from flask_restful import Resource
|
||||
|
||||
from ..utils import authenticate
|
||||
|
||||
|
||||
class Notifications(Resource):
|
||||
@authenticate
|
||||
def patch(self):
|
||||
url = request.form.get("url")
|
||||
|
||||
asset = apprise.AppriseAsset(async_mode=False)
|
||||
|
||||
apobj = apprise.Apprise(asset=asset)
|
||||
|
||||
apobj.add(url)
|
||||
|
||||
apobj.notify(
|
||||
title='Bazarr test notification',
|
||||
body='Test notification'
|
||||
)
|
||||
|
||||
return '', 204
|
47
bazarr/api/system/releases.py
Normal file
47
bazarr/api/system/releases.py
Normal file
|
@ -0,0 +1,47 @@
|
|||
# coding=utf-8
|
||||
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import logging
|
||||
|
||||
from flask import jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from ..utils import authenticate
|
||||
from config import settings
|
||||
from get_args import args
|
||||
|
||||
|
||||
class SystemReleases(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
filtered_releases = []
|
||||
try:
|
||||
with io.open(os.path.join(args.config_dir, 'config', 'releases.txt'), 'r', encoding='UTF-8') as f:
|
||||
releases = json.loads(f.read())
|
||||
|
||||
for release in releases:
|
||||
if settings.general.branch == 'master' and not release['prerelease']:
|
||||
filtered_releases.append(release)
|
||||
elif settings.general.branch != 'master' and any(not x['prerelease'] for x in filtered_releases):
|
||||
continue
|
||||
elif settings.general.branch != 'master':
|
||||
filtered_releases.append(release)
|
||||
if settings.general.branch == 'master':
|
||||
filtered_releases = filtered_releases[:5]
|
||||
|
||||
current_version = os.environ["BAZARR_VERSION"]
|
||||
|
||||
for i, release in enumerate(filtered_releases):
|
||||
body = release['body'].replace('- ', '').split('\n')[1:]
|
||||
filtered_releases[i] = {"body": body,
|
||||
"name": release['name'],
|
||||
"date": release['date'][:10],
|
||||
"prerelease": release['prerelease'],
|
||||
"current": release['name'].lstrip('v') == current_version}
|
||||
|
||||
except Exception:
|
||||
logging.exception(
|
||||
'BAZARR cannot parse releases caching file: ' + os.path.join(args.config_dir, 'config', 'releases.txt'))
|
||||
return jsonify(data=filtered_releases)
|
40
bazarr/api/system/searches.py
Normal file
40
bazarr/api/system/searches.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from ..utils import authenticate
|
||||
from config import settings
|
||||
from database import TableShows, TableMovies
|
||||
|
||||
|
||||
class Searches(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
query = request.args.get('query')
|
||||
search_list = []
|
||||
|
||||
if query:
|
||||
if settings.general.getboolean('use_sonarr'):
|
||||
# Get matching series
|
||||
series = TableShows.select(TableShows.title,
|
||||
TableShows.sonarrSeriesId,
|
||||
TableShows.year)\
|
||||
.where(TableShows.title.contains(query))\
|
||||
.order_by(TableShows.title)\
|
||||
.dicts()
|
||||
series = list(series)
|
||||
search_list += series
|
||||
|
||||
if settings.general.getboolean('use_radarr'):
|
||||
# Get matching movies
|
||||
movies = TableMovies.select(TableMovies.title,
|
||||
TableMovies.radarrId,
|
||||
TableMovies.year) \
|
||||
.where(TableMovies.title.contains(query)) \
|
||||
.order_by(TableMovies.title) \
|
||||
.dicts()
|
||||
movies = list(movies)
|
||||
search_list += movies
|
||||
|
||||
return jsonify(search_list)
|
102
bazarr/api/system/settings.py
Normal file
102
bazarr/api/system/settings.py
Normal file
|
@ -0,0 +1,102 @@
|
|||
# coding=utf-8
|
||||
|
||||
import json
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from ..utils import authenticate
|
||||
from database import TableLanguagesProfiles, TableSettingsLanguages, TableShows, TableMovies, TableSettingsNotifier, \
|
||||
update_profile_id_list
|
||||
from event_handler import event_stream
|
||||
from config import settings, save_settings, get_settings
|
||||
from scheduler import scheduler
|
||||
from list_subtitles import list_missing_subtitles, list_missing_subtitles_movies
|
||||
|
||||
|
||||
class SystemSettings(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
data = get_settings()
|
||||
|
||||
notifications = TableSettingsNotifier.select().order_by(TableSettingsNotifier.name).dicts()
|
||||
notifications = list(notifications)
|
||||
for i, item in enumerate(notifications):
|
||||
item["enabled"] = item["enabled"] == 1
|
||||
notifications[i] = item
|
||||
|
||||
data['notifications'] = dict()
|
||||
data['notifications']['providers'] = notifications
|
||||
|
||||
return jsonify(data)
|
||||
|
||||
@authenticate
|
||||
def post(self):
|
||||
enabled_languages = request.form.getlist('languages-enabled')
|
||||
if len(enabled_languages) != 0:
|
||||
TableSettingsLanguages.update({
|
||||
TableSettingsLanguages.enabled: 0
|
||||
}).execute()
|
||||
for code in enabled_languages:
|
||||
TableSettingsLanguages.update({
|
||||
TableSettingsLanguages.enabled: 1
|
||||
})\
|
||||
.where(TableSettingsLanguages.code2 == code)\
|
||||
.execute()
|
||||
event_stream("languages")
|
||||
|
||||
languages_profiles = request.form.get('languages-profiles')
|
||||
if languages_profiles:
|
||||
existing_ids = TableLanguagesProfiles.select(TableLanguagesProfiles.profileId).dicts()
|
||||
existing_ids = list(existing_ids)
|
||||
existing = [x['profileId'] for x in existing_ids]
|
||||
for item in json.loads(languages_profiles):
|
||||
if item['profileId'] in existing:
|
||||
# Update existing profiles
|
||||
TableLanguagesProfiles.update({
|
||||
TableLanguagesProfiles.name: item['name'],
|
||||
TableLanguagesProfiles.cutoff: item['cutoff'] if item['cutoff'] != 'null' else None,
|
||||
TableLanguagesProfiles.items: json.dumps(item['items'])
|
||||
})\
|
||||
.where(TableLanguagesProfiles.profileId == item['profileId'])\
|
||||
.execute()
|
||||
existing.remove(item['profileId'])
|
||||
else:
|
||||
# Add new profiles
|
||||
TableLanguagesProfiles.insert({
|
||||
TableLanguagesProfiles.profileId: item['profileId'],
|
||||
TableLanguagesProfiles.name: item['name'],
|
||||
TableLanguagesProfiles.cutoff: item['cutoff'] if item['cutoff'] != 'null' else None,
|
||||
TableLanguagesProfiles.items: json.dumps(item['items'])
|
||||
}).execute()
|
||||
for profileId in existing:
|
||||
# Unassign this profileId from series and movies
|
||||
TableShows.update({
|
||||
TableShows.profileId: None
|
||||
}).where(TableShows.profileId == profileId).execute()
|
||||
TableMovies.update({
|
||||
TableMovies.profileId: None
|
||||
}).where(TableMovies.profileId == profileId).execute()
|
||||
# Remove deleted profiles
|
||||
TableLanguagesProfiles.delete().where(TableLanguagesProfiles.profileId == profileId).execute()
|
||||
|
||||
update_profile_id_list()
|
||||
event_stream("languages")
|
||||
|
||||
if settings.general.getboolean('use_sonarr'):
|
||||
scheduler.add_job(list_missing_subtitles, kwargs={'send_event': False})
|
||||
if settings.general.getboolean('use_radarr'):
|
||||
scheduler.add_job(list_missing_subtitles_movies, kwargs={'send_event': False})
|
||||
|
||||
# Update Notification
|
||||
notifications = request.form.getlist('notifications-providers')
|
||||
for item in notifications:
|
||||
item = json.loads(item)
|
||||
TableSettingsNotifier.update({
|
||||
TableSettingsNotifier.enabled: item['enabled'],
|
||||
TableSettingsNotifier.url: item['url']
|
||||
}).where(TableSettingsNotifier.name == item['name']).execute()
|
||||
|
||||
save_settings(zip(request.form.keys(), request.form.listvalues()))
|
||||
event_stream("settings")
|
||||
return '', 204
|
27
bazarr/api/system/status.py
Normal file
27
bazarr/api/system/status.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
# coding=utf-8
|
||||
|
||||
import os
|
||||
import platform
|
||||
|
||||
from flask import jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from ..utils import authenticate
|
||||
from utils import get_sonarr_info, get_radarr_info
|
||||
from get_args import args
|
||||
from init import startTime
|
||||
|
||||
|
||||
class SystemStatus(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
system_status = {}
|
||||
system_status.update({'bazarr_version': os.environ["BAZARR_VERSION"]})
|
||||
system_status.update({'sonarr_version': get_sonarr_info.version()})
|
||||
system_status.update({'radarr_version': get_radarr_info.version()})
|
||||
system_status.update({'operating_system': platform.platform()})
|
||||
system_status.update({'python_version': platform.python_version()})
|
||||
system_status.update({'bazarr_directory': os.path.dirname(os.path.dirname(__file__))})
|
||||
system_status.update({'bazarr_config_directory': args.config_dir})
|
||||
system_status.update({'start_time': startTime})
|
||||
return jsonify(data=system_status)
|
18
bazarr/api/system/system.py
Normal file
18
bazarr/api/system/system.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import request
|
||||
from flask_restful import Resource
|
||||
|
||||
from ..utils import authenticate
|
||||
|
||||
|
||||
class System(Resource):
|
||||
@authenticate
|
||||
def post(self):
|
||||
from server import webserver
|
||||
action = request.args.get('action')
|
||||
if action == "shutdown":
|
||||
webserver.shutdown()
|
||||
elif action == "restart":
|
||||
webserver.restart()
|
||||
return '', 204
|
31
bazarr/api/system/tasks.py
Normal file
31
bazarr/api/system/tasks.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import request, jsonify
|
||||
from flask_restful import Resource
|
||||
|
||||
from ..utils import authenticate
|
||||
from scheduler import scheduler
|
||||
|
||||
|
||||
class SystemTasks(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
taskid = request.args.get('taskid')
|
||||
|
||||
task_list = scheduler.get_task_list()
|
||||
|
||||
if taskid:
|
||||
for item in task_list:
|
||||
if item['job_id'] == taskid:
|
||||
task_list = [item]
|
||||
continue
|
||||
|
||||
return jsonify(data=task_list)
|
||||
|
||||
@authenticate
|
||||
def post(self):
|
||||
taskid = request.form.get('taskid')
|
||||
|
||||
scheduler.execute_job_now(taskid)
|
||||
|
||||
return '', 204
|
239
bazarr/api/utils.py
Normal file
239
bazarr/api/utils.py
Normal file
|
@ -0,0 +1,239 @@
|
|||
# coding=utf-8
|
||||
|
||||
import ast
|
||||
|
||||
from functools import wraps
|
||||
from flask import request, abort
|
||||
from operator import itemgetter
|
||||
|
||||
from config import settings, base_url
|
||||
from get_languages import language_from_alpha2, alpha3_from_alpha2
|
||||
from database import get_audio_profile_languages, get_desired_languages
|
||||
from helper import path_mappings
|
||||
|
||||
None_Keys = ['null', 'undefined', '', None]
|
||||
|
||||
False_Keys = ['False', 'false', '0']
|
||||
|
||||
|
||||
def authenticate(actual_method):
|
||||
@wraps(actual_method)
|
||||
def wrapper(*args, **kwargs):
|
||||
apikey_settings = settings.auth.apikey
|
||||
apikey_get = request.args.get('apikey')
|
||||
apikey_post = request.form.get('apikey')
|
||||
apikey_header = None
|
||||
if 'X-API-KEY' in request.headers:
|
||||
apikey_header = request.headers['X-API-KEY']
|
||||
|
||||
if apikey_settings in [apikey_get, apikey_post, apikey_header]:
|
||||
return actual_method(*args, **kwargs)
|
||||
|
||||
return abort(401)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def postprocess(item):
|
||||
# Remove ffprobe_cache
|
||||
if 'ffprobe_cache' in item:
|
||||
del (item['ffprobe_cache'])
|
||||
|
||||
# Parse tags
|
||||
if 'tags' in item:
|
||||
if item['tags'] is None:
|
||||
item['tags'] = []
|
||||
else:
|
||||
item['tags'] = ast.literal_eval(item['tags'])
|
||||
|
||||
if 'monitored' in item:
|
||||
if item['monitored'] is None:
|
||||
item['monitored'] = False
|
||||
else:
|
||||
item['monitored'] = item['monitored'] == 'True'
|
||||
|
||||
if 'hearing_impaired' in item and item['hearing_impaired'] is not None:
|
||||
if item['hearing_impaired'] is None:
|
||||
item['hearing_impaired'] = False
|
||||
else:
|
||||
item['hearing_impaired'] = item['hearing_impaired'] == 'True'
|
||||
|
||||
if 'language' in item:
|
||||
if item['language'] == 'None':
|
||||
item['language'] = None
|
||||
elif item['language'] is not None:
|
||||
splitted_language = item['language'].split(':')
|
||||
item['language'] = {"name": language_from_alpha2(splitted_language[0]),
|
||||
"code2": splitted_language[0],
|
||||
"code3": alpha3_from_alpha2(splitted_language[0]),
|
||||
"forced": True if item['language'].endswith(':forced') else False,
|
||||
"hi": True if item['language'].endswith(':hi') else False}
|
||||
|
||||
|
||||
def postprocessSeries(item):
|
||||
postprocess(item)
|
||||
# Parse audio language
|
||||
if 'audio_language' in item and item['audio_language'] is not None:
|
||||
item['audio_language'] = get_audio_profile_languages(series_id=item['sonarrSeriesId'])
|
||||
|
||||
if 'alternateTitles' in item:
|
||||
if item['alternateTitles'] is None:
|
||||
item['alternativeTitles'] = []
|
||||
else:
|
||||
item['alternativeTitles'] = ast.literal_eval(item['alternateTitles'])
|
||||
del item["alternateTitles"]
|
||||
|
||||
# Parse seriesType
|
||||
if 'seriesType' in item and item['seriesType'] is not None:
|
||||
item['seriesType'] = item['seriesType'].capitalize()
|
||||
|
||||
if 'path' in item:
|
||||
item['path'] = path_mappings.path_replace(item['path'])
|
||||
|
||||
# map poster and fanart to server proxy
|
||||
if 'poster' in item:
|
||||
poster = item['poster']
|
||||
item['poster'] = f"{base_url}/images/series{poster}" if poster else None
|
||||
|
||||
if 'fanart' in item:
|
||||
fanart = item['fanart']
|
||||
item['fanart'] = f"{base_url}/images/series{fanart}" if fanart else None
|
||||
|
||||
|
||||
def postprocessEpisode(item):
|
||||
postprocess(item)
|
||||
if 'audio_language' in item and item['audio_language'] is not None:
|
||||
item['audio_language'] = get_audio_profile_languages(episode_id=item['sonarrEpisodeId'])
|
||||
|
||||
if 'subtitles' in item:
|
||||
if item['subtitles'] is None:
|
||||
raw_subtitles = []
|
||||
else:
|
||||
raw_subtitles = ast.literal_eval(item['subtitles'])
|
||||
subtitles = []
|
||||
|
||||
for subs in raw_subtitles:
|
||||
subtitle = subs[0].split(':')
|
||||
sub = {"name": language_from_alpha2(subtitle[0]),
|
||||
"code2": subtitle[0],
|
||||
"code3": alpha3_from_alpha2(subtitle[0]),
|
||||
"path": path_mappings.path_replace(subs[1]),
|
||||
"forced": False,
|
||||
"hi": False}
|
||||
if len(subtitle) > 1:
|
||||
sub["forced"] = True if subtitle[1] == 'forced' else False
|
||||
sub["hi"] = True if subtitle[1] == 'hi' else False
|
||||
|
||||
subtitles.append(sub)
|
||||
|
||||
item.update({"subtitles": subtitles})
|
||||
|
||||
# Parse missing subtitles
|
||||
if 'missing_subtitles' in item:
|
||||
if item['missing_subtitles'] is None:
|
||||
item['missing_subtitles'] = []
|
||||
else:
|
||||
item['missing_subtitles'] = ast.literal_eval(item['missing_subtitles'])
|
||||
for i, subs in enumerate(item['missing_subtitles']):
|
||||
subtitle = subs.split(':')
|
||||
item['missing_subtitles'][i] = {"name": language_from_alpha2(subtitle[0]),
|
||||
"code2": subtitle[0],
|
||||
"code3": alpha3_from_alpha2(subtitle[0]),
|
||||
"forced": False,
|
||||
"hi": False}
|
||||
if len(subtitle) > 1:
|
||||
item['missing_subtitles'][i].update({
|
||||
"forced": True if subtitle[1] == 'forced' else False,
|
||||
"hi": True if subtitle[1] == 'hi' else False
|
||||
})
|
||||
|
||||
if 'scene_name' in item:
|
||||
item["sceneName"] = item["scene_name"]
|
||||
del item["scene_name"]
|
||||
|
||||
if 'path' in item and item['path']:
|
||||
# Provide mapped path
|
||||
item['path'] = path_mappings.path_replace(item['path'])
|
||||
|
||||
|
||||
# TODO: Move
|
||||
def postprocessMovie(item):
|
||||
postprocess(item)
|
||||
# Parse audio language
|
||||
if 'audio_language' in item and item['audio_language'] is not None:
|
||||
item['audio_language'] = get_audio_profile_languages(movie_id=item['radarrId'])
|
||||
|
||||
# Parse alternate titles
|
||||
if 'alternativeTitles' in item:
|
||||
if item['alternativeTitles'] is None:
|
||||
item['alternativeTitles'] = []
|
||||
else:
|
||||
item['alternativeTitles'] = ast.literal_eval(item['alternativeTitles'])
|
||||
|
||||
# Parse failed attempts
|
||||
if 'failedAttempts' in item:
|
||||
if item['failedAttempts']:
|
||||
item['failedAttempts'] = ast.literal_eval(item['failedAttempts'])
|
||||
|
||||
# Parse subtitles
|
||||
if 'subtitles' in item:
|
||||
if item['subtitles'] is None:
|
||||
item['subtitles'] = []
|
||||
else:
|
||||
item['subtitles'] = ast.literal_eval(item['subtitles'])
|
||||
for i, subs in enumerate(item['subtitles']):
|
||||
language = subs[0].split(':')
|
||||
item['subtitles'][i] = {"path": path_mappings.path_replace_movie(subs[1]),
|
||||
"name": language_from_alpha2(language[0]),
|
||||
"code2": language[0],
|
||||
"code3": alpha3_from_alpha2(language[0]),
|
||||
"forced": False,
|
||||
"hi": False}
|
||||
if len(language) > 1:
|
||||
item['subtitles'][i].update({
|
||||
"forced": True if language[1] == 'forced' else False,
|
||||
"hi": True if language[1] == 'hi' else False
|
||||
})
|
||||
|
||||
if settings.general.getboolean('embedded_subs_show_desired'):
|
||||
desired_lang_list = get_desired_languages(item['profileId'])
|
||||
item['subtitles'] = [x for x in item['subtitles'] if x['code2'] in desired_lang_list or x['path']]
|
||||
|
||||
item['subtitles'] = sorted(item['subtitles'], key=itemgetter('name', 'forced'))
|
||||
|
||||
# Parse missing subtitles
|
||||
if 'missing_subtitles' in item:
|
||||
if item['missing_subtitles'] is None:
|
||||
item['missing_subtitles'] = []
|
||||
else:
|
||||
item['missing_subtitles'] = ast.literal_eval(item['missing_subtitles'])
|
||||
for i, subs in enumerate(item['missing_subtitles']):
|
||||
language = subs.split(':')
|
||||
item['missing_subtitles'][i] = {"name": language_from_alpha2(language[0]),
|
||||
"code2": language[0],
|
||||
"code3": alpha3_from_alpha2(language[0]),
|
||||
"forced": False,
|
||||
"hi": False}
|
||||
if len(language) > 1:
|
||||
item['missing_subtitles'][i].update({
|
||||
"forced": True if language[1] == 'forced' else False,
|
||||
"hi": True if language[1] == 'hi' else False
|
||||
})
|
||||
|
||||
# Provide mapped path
|
||||
if 'path' in item:
|
||||
if item['path']:
|
||||
item['path'] = path_mappings.path_replace_movie(item['path'])
|
||||
|
||||
if 'subtitles_path' in item:
|
||||
# Provide mapped subtitles path
|
||||
item['subtitles_path'] = path_mappings.path_replace_movie(item['subtitles_path'])
|
||||
|
||||
# map poster and fanart to server proxy
|
||||
if 'poster' in item:
|
||||
poster = item['poster']
|
||||
item['poster'] = f"{base_url}/images/movies{poster}" if poster else None
|
||||
|
||||
if 'fanart' in item:
|
||||
fanart = item['fanart']
|
||||
item['fanart'] = f"{base_url}/images/movies{fanart}" if fanart else None
|
12
bazarr/api/webhooks/__init__.py
Normal file
12
bazarr/api/webhooks/__init__.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask import Blueprint
|
||||
from flask_restful import Api
|
||||
|
||||
from .plex import WebHooksPlex
|
||||
|
||||
|
||||
api_bp_webhooks = Blueprint('api_webhooks', __name__)
|
||||
api = Api(api_bp_webhooks)
|
||||
|
||||
api.add_resource(WebHooksPlex, '/webhooks/plex')
|
76
bazarr/api/webhooks/plex.py
Normal file
76
bazarr/api/webhooks/plex.py
Normal file
|
@ -0,0 +1,76 @@
|
|||
# coding=utf-8
|
||||
|
||||
import json
|
||||
import requests
|
||||
import os
|
||||
import re
|
||||
|
||||
from flask import request
|
||||
from flask_restful import Resource
|
||||
from bs4 import BeautifulSoup as bso
|
||||
|
||||
from database import TableEpisodes, TableShows, TableMovies
|
||||
from get_subtitle import episode_download_subtitles, movies_download_subtitles
|
||||
from ..utils import authenticate
|
||||
|
||||
|
||||
class WebHooksPlex(Resource):
|
||||
@authenticate
|
||||
def post(self):
|
||||
json_webhook = request.form.get('payload')
|
||||
parsed_json_webhook = json.loads(json_webhook)
|
||||
|
||||
event = parsed_json_webhook['event']
|
||||
if event not in ['media.play']:
|
||||
return '', 204
|
||||
|
||||
media_type = parsed_json_webhook['Metadata']['type']
|
||||
|
||||
if media_type == 'episode':
|
||||
season = parsed_json_webhook['Metadata']['parentIndex']
|
||||
episode = parsed_json_webhook['Metadata']['index']
|
||||
else:
|
||||
season = episode = None
|
||||
|
||||
ids = []
|
||||
for item in parsed_json_webhook['Metadata']['Guid']:
|
||||
splitted_id = item['id'].split('://')
|
||||
if len(splitted_id) == 2:
|
||||
ids.append({splitted_id[0]: splitted_id[1]})
|
||||
if not ids:
|
||||
return '', 404
|
||||
|
||||
if media_type == 'episode':
|
||||
try:
|
||||
episode_imdb_id = [x['imdb'] for x in ids if 'imdb' in x][0]
|
||||
r = requests.get('https://imdb.com/title/{}'.format(episode_imdb_id),
|
||||
headers={"User-Agent": os.environ["SZ_USER_AGENT"]})
|
||||
soup = bso(r.content, "html.parser")
|
||||
series_imdb_id = soup.find('a', {'class': re.compile(r'SeriesParentLink__ParentTextLink')})['href'].split('/')[2]
|
||||
except:
|
||||
return '', 404
|
||||
else:
|
||||
sonarrEpisodeId = TableEpisodes.select(TableEpisodes.sonarrEpisodeId) \
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
|
||||
.where(TableShows.imdbId == series_imdb_id,
|
||||
TableEpisodes.season == season,
|
||||
TableEpisodes.episode == episode) \
|
||||
.dicts() \
|
||||
.get()
|
||||
|
||||
if sonarrEpisodeId:
|
||||
episode_download_subtitles(no=sonarrEpisodeId['sonarrEpisodeId'], send_progress=True)
|
||||
else:
|
||||
try:
|
||||
movie_imdb_id = [x['imdb'] for x in ids if 'imdb' in x][0]
|
||||
except:
|
||||
return '', 404
|
||||
else:
|
||||
radarrId = TableMovies.select(TableMovies.radarrId)\
|
||||
.where(TableMovies.imdbId == movie_imdb_id)\
|
||||
.dicts()\
|
||||
.get()
|
||||
if radarrId:
|
||||
movies_download_subtitles(no=radarrId['radarrId'])
|
||||
|
||||
return '', 200
|
|
@ -28,7 +28,8 @@ def create_app():
|
|||
else:
|
||||
app.config["DEBUG"] = False
|
||||
|
||||
socketio.init_app(app, path=base_url.rstrip('/')+'/api/socket.io', cors_allowed_origins='*', async_mode='threading')
|
||||
socketio.init_app(app, path=base_url.rstrip('/')+'/api/socket.io', cors_allowed_origins='*',
|
||||
async_mode='threading', allow_upgrades=False, transports='polling')
|
||||
return app
|
||||
|
||||
|
||||
|
|
|
@ -192,7 +192,7 @@ def update_cleaner(zipfile, bazarr_dir, config_dir):
|
|||
'^venv' + separator,
|
||||
'^WinPython' + separator,
|
||||
separator + '__pycache__' + separator + '$']
|
||||
if os.path.abspath(bazarr_dir) in os.path.abspath(config_dir):
|
||||
if os.path.abspath(bazarr_dir).lower() in os.path.abspath(config_dir).lower():
|
||||
dir_to_ignore.append('^' + os.path.relpath(config_dir, bazarr_dir) + os.path.sep)
|
||||
dir_to_ignore_regex = re.compile('(?:% s)' % '|'.join(dir_to_ignore))
|
||||
logging.debug('BAZARR upgrade leftover cleaner will ignore directories matching this regex: '
|
||||
|
|
|
@ -207,7 +207,10 @@ defaults = {
|
|||
},
|
||||
'titulky': {
|
||||
'username': '',
|
||||
'password': ''
|
||||
'password': '',
|
||||
'skip_wrong_fps': 'False',
|
||||
'approved_only': 'False',
|
||||
'multithreading': 'True'
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -412,8 +415,10 @@ def save_settings(settings_items):
|
|||
configure_captcha_func()
|
||||
|
||||
if update_schedule:
|
||||
from api import scheduler
|
||||
from scheduler import scheduler
|
||||
from event_handler import event_stream
|
||||
scheduler.update_configurable_tasks()
|
||||
event_stream(type='task')
|
||||
|
||||
if configure_proxy:
|
||||
configure_proxy_func()
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
import os
|
||||
import datetime
|
||||
import pytz
|
||||
import logging
|
||||
import subliminal_patch
|
||||
import pretty
|
||||
|
@ -29,6 +30,9 @@ def time_until_end_of_day(dt=None):
|
|||
tomorrow = dt + datetime.timedelta(days=1)
|
||||
return datetime.datetime.combine(tomorrow, datetime.time.min) - dt
|
||||
|
||||
# Titulky resets its download limits at the start of a new day from its perspective - the Europe/Prague timezone
|
||||
titulky_server_local_time = datetime.datetime.now(tz=pytz.timezone('Europe/Prague')).replace(tzinfo=None) # Needs to convert to offset-naive dt
|
||||
titulky_limit_reset_datetime = time_until_end_of_day(dt=titulky_server_local_time)
|
||||
|
||||
hours_until_end_of_day = time_until_end_of_day().seconds // 3600 + 1
|
||||
|
||||
|
@ -64,9 +68,8 @@ PROVIDER_THROTTLE_MAP = {
|
|||
TooManyRequests: (datetime.timedelta(minutes=5), "5 minutes"),
|
||||
IPAddressBlocked: (datetime.timedelta(hours=1), "1 hours"),
|
||||
},
|
||||
"titulky": {
|
||||
DownloadLimitExceeded: (
|
||||
datetime.timedelta(hours=hours_until_end_of_day), "{} hours".format(str(hours_until_end_of_day)))
|
||||
"titulky" : {
|
||||
DownloadLimitExceeded: (titulky_limit_reset_datetime, f"{titulky_limit_reset_datetime.seconds // 3600 + 1} hours")
|
||||
},
|
||||
"legendasdivx": {
|
||||
TooManyRequests: (datetime.timedelta(hours=3), "3 hours"),
|
||||
|
@ -183,6 +186,9 @@ def get_providers_auth():
|
|||
'titulky': {
|
||||
'username': settings.titulky.username,
|
||||
'password': settings.titulky.password,
|
||||
'skip_wrong_fps': settings.titulky.getboolean('skip_wrong_fps'),
|
||||
'approved_only': settings.titulky.getboolean('approved_only'),
|
||||
'multithreading': settings.titulky.getboolean('multithreading'),
|
||||
},
|
||||
'titlovi': {
|
||||
'username': settings.titlovi.username,
|
||||
|
|
|
@ -210,6 +210,7 @@ def download_subtitle(path, language, audio_language, hi, forced, providers, pro
|
|||
series_id = episode_metadata['seriesId']
|
||||
episode_id = episode_metadata['episodeId']
|
||||
sync_subtitles(video_path=path, srt_path=downloaded_path,
|
||||
forced=subtitle.language.forced,
|
||||
srt_lang=downloaded_language_code2, media_type=media_type,
|
||||
percent_score=percent_score,
|
||||
series_id=episode_metadata['seriesId'],
|
||||
|
@ -222,6 +223,7 @@ def download_subtitle(path, language, audio_language, hi, forced, providers, pro
|
|||
series_id = ""
|
||||
episode_id = movie_metadata['movieId']
|
||||
sync_subtitles(video_path=path, srt_path=downloaded_path,
|
||||
forced=subtitle.language.forced,
|
||||
srt_lang=downloaded_language_code2, media_type=media_type,
|
||||
percent_score=percent_score,
|
||||
movie_id=movie_metadata['movieId'])
|
||||
|
@ -522,6 +524,7 @@ def manual_download_subtitle(path, language, audio_language, hi, forced, subtitl
|
|||
series_id = episode_metadata['seriesId']
|
||||
episode_id = episode_metadata['episodeId']
|
||||
sync_subtitles(video_path=path, srt_path=downloaded_path,
|
||||
forced=subtitle.language.forced,
|
||||
srt_lang=downloaded_language_code2, media_type=media_type,
|
||||
percent_score=score,
|
||||
series_id=episode_metadata['seriesId'],
|
||||
|
@ -534,6 +537,7 @@ def manual_download_subtitle(path, language, audio_language, hi, forced, subtitl
|
|||
series_id = ""
|
||||
episode_id = movie_metadata['movieId']
|
||||
sync_subtitles(video_path=path, srt_path=downloaded_path,
|
||||
forced=subtitle.language.forced,
|
||||
srt_lang=downloaded_language_code2, media_type=media_type,
|
||||
percent_score=score, movie_id=movie_metadata['movieId'])
|
||||
|
||||
|
@ -657,7 +661,7 @@ def manual_upload_subtitle(path, language, forced, hi, title, media_type, subtit
|
|||
series_id = episode_metadata['seriesId']
|
||||
episode_id = episode_metadata['episodeId']
|
||||
sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code2, media_type=media_type,
|
||||
percent_score=100, series_id=episode_metadata['seriesId'],
|
||||
percent_score=100, series_id=episode_metadata['seriesId'], forced=forced,
|
||||
episode_id=episode_metadata['episodeId'])
|
||||
else:
|
||||
movie_metadata = TableMovies.select(TableMovies.movieId)\
|
||||
|
@ -667,7 +671,7 @@ def manual_upload_subtitle(path, language, forced, hi, title, media_type, subtit
|
|||
series_id = ""
|
||||
episode_id = movie_metadata['movieId']
|
||||
sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code2, media_type=media_type,
|
||||
percent_score=100, movie_id=movie_metadata['movieId'])
|
||||
percent_score=100, movie_id=movie_metadata['movieId'], forced=forced)
|
||||
|
||||
if use_postprocessing:
|
||||
command = pp_replace(postprocessing_cmd, path, subtitle_path, uploaded_language,
|
||||
|
@ -1602,9 +1606,15 @@ def postprocessing(command, path):
|
|||
logging.info('BAZARR Post-processing result for file ' + path + ' : ' + out)
|
||||
|
||||
|
||||
def sync_subtitles(video_path, srt_path, srt_lang, media_type, percent_score, series_id=None,
|
||||
def sync_subtitles(video_path, srt_path, srt_lang, forced, media_type, percent_score, series_id=None,
|
||||
episode_id=None, movie_id=None):
|
||||
if settings.subsync.getboolean('use_subsync'):
|
||||
if forced:
|
||||
logging.debug('BAZARR cannot sync forced subtitles. Skipping sync routine.')
|
||||
elif not settings.subsync.getboolean('use_subsync'):
|
||||
logging.debug('BAZARR automatic syncing is disabled in settings. Skipping sync routine.')
|
||||
else:
|
||||
logging.debug(f'BAZARR automatic syncing is enabled in settings. We\'ll try to sync this '
|
||||
f'subtitles: {srt_path}.')
|
||||
if media_type == 'series':
|
||||
use_subsync_threshold = settings.subsync.getboolean('use_subsync_threshold')
|
||||
subsync_threshold = settings.subsync.subsync_threshold
|
||||
|
|
|
@ -13,7 +13,11 @@ from dogpile.cache.region import register_backend as register_cache_backend
|
|||
from config import settings, configure_captcha_func
|
||||
from get_args import args
|
||||
from logger import configure_logging
|
||||
import time
|
||||
|
||||
# set start time global variable as epoch
|
||||
global startTime
|
||||
startTime = time.time()
|
||||
|
||||
def init():
|
||||
# set subliminal_patch user agent
|
||||
|
|
|
@ -18,6 +18,7 @@ from helper import get_subtitle_destination_folder
|
|||
from embedded_subs_reader import embedded_subs_reader
|
||||
from event_handler import event_stream, show_progress, hide_progress
|
||||
from charamel import Detector
|
||||
from peewee import DoesNotExist
|
||||
|
||||
gc.enable()
|
||||
|
||||
|
@ -426,7 +427,7 @@ def list_missing_subtitles_movies(no=None, send_event=True):
|
|||
|
||||
# remove missing that have forced or hi subtitles for this language in existing
|
||||
for item in actual_subtitles_list:
|
||||
if item[1] == 'True' or item[2] == 'True':
|
||||
if item[2] == 'True':
|
||||
try:
|
||||
missing_subtitles_list.remove([item[0], 'False', 'False'])
|
||||
except ValueError:
|
||||
|
|
|
@ -118,8 +118,7 @@ def configure_logging(debug=False):
|
|||
logging.getLogger("SignalRCoreClient").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("websocket").setLevel(logging.CRITICAL)
|
||||
|
||||
logging.getLogger("werkzeug").setLevel(logging.WARNING)
|
||||
logging.getLogger("engineio.server").setLevel(logging.WARNING)
|
||||
logging.getLogger("waitress").setLevel(logging.ERROR)
|
||||
logging.getLogger("knowit").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("enzyme").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("guessit").setLevel(logging.WARNING)
|
||||
|
|
|
@ -23,11 +23,11 @@ def update_notifier():
|
|||
notifiers_current.append([notifier['name']])
|
||||
|
||||
for x in results['schemas']:
|
||||
if [x['service_name']] not in notifiers_current:
|
||||
notifiers_new.append({'name': x['service_name'], 'enabled': 0})
|
||||
logging.debug('Adding new notifier agent: ' + x['service_name'])
|
||||
if [str(x['service_name'])] not in notifiers_current:
|
||||
notifiers_new.append({'name': str(x['service_name']), 'enabled': 0})
|
||||
logging.debug('Adding new notifier agent: ' + str(x['service_name']))
|
||||
else:
|
||||
notifiers_old.append([x['service_name']])
|
||||
notifiers_old.append([str(x['service_name'])])
|
||||
|
||||
notifiers_to_delete = [item for item in notifiers_current if item not in notifiers_old]
|
||||
|
||||
|
|
|
@ -13,8 +13,9 @@ from database import database
|
|||
from app import create_app
|
||||
app = create_app()
|
||||
|
||||
from api import api_bp # noqa
|
||||
app.register_blueprint(api_bp)
|
||||
from api import api_bp_list
|
||||
for item in api_bp_list:
|
||||
app.register_blueprint(item, url_prefix=base_url.rstrip('/') + '/api')
|
||||
|
||||
|
||||
class Server:
|
||||
|
|
|
@ -311,10 +311,14 @@ def subtitles_apply_mods(language, subtitle_path, mods):
|
|||
def translate_subtitles_file(video_path, source_srt_file, to_lang, forced, hi):
|
||||
language_code_convert_dict = {
|
||||
'he': 'iw',
|
||||
'zt': 'zh-cn',
|
||||
'zh': 'zh-tw',
|
||||
}
|
||||
|
||||
to_lang = alpha3_from_alpha2(to_lang)
|
||||
lang_obj = Language(to_lang)
|
||||
lang_obj = CustomLanguage.from_value(to_lang, "alpha3")
|
||||
if not lang_obj:
|
||||
lang_obj = Language(to_lang)
|
||||
if forced:
|
||||
lang_obj = Language.rebuild(lang_obj, forced=True)
|
||||
if hi:
|
||||
|
@ -324,7 +328,8 @@ def translate_subtitles_file(video_path, source_srt_file, to_lang, forced, hi):
|
|||
|
||||
max_characters = 5000
|
||||
|
||||
dest_srt_file = get_subtitle_path(video_path, language=lang_obj, extension='.srt', forced_tag=forced, hi_tag=hi)
|
||||
dest_srt_file = get_subtitle_path(video_path, language=lang_obj if isinstance(lang_obj, Language) else lang_obj.subzero_language(),
|
||||
extension='.srt', forced_tag=forced, hi_tag=hi)
|
||||
|
||||
subs = pysubs2.load(source_srt_file, encoding='utf-8')
|
||||
lines_list = [x.plaintext for x in subs]
|
||||
|
@ -348,8 +353,8 @@ def translate_subtitles_file(video_path, source_srt_file, to_lang, forced, hi):
|
|||
for block_str in lines_block_list:
|
||||
try:
|
||||
translated_partial_srt_text = GoogleTranslator(source='auto',
|
||||
target=language_code_convert_dict.get(lang_obj.basename,
|
||||
lang_obj.basename)
|
||||
target=language_code_convert_dict.get(lang_obj.alpha2,
|
||||
lang_obj.alpha2)
|
||||
).translate(text=block_str)
|
||||
except Exception:
|
||||
return False
|
||||
|
|
|
@ -4,3 +4,4 @@ pytest
|
|||
pytest-pep8
|
||||
pytest-flakes
|
||||
pytest-cov
|
||||
pytest-vcr
|
||||
|
|
1118
frontend/package-lock.json
generated
1118
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -14,7 +14,7 @@
|
|||
"private": true,
|
||||
"homepage": "./",
|
||||
"dependencies": {
|
||||
"@fontsource/roboto": "^4.2.2",
|
||||
"@fontsource/roboto": "^4.5.1",
|
||||
"@fortawesome/fontawesome-svg-core": "^1.2",
|
||||
"@fortawesome/free-brands-svg-icons": "^5.15",
|
||||
"@fortawesome/free-regular-svg-icons": "^5.15",
|
||||
|
@ -24,6 +24,8 @@
|
|||
"axios": "^0.23",
|
||||
"bootstrap": "^4",
|
||||
"lodash": "^4",
|
||||
"moment": "^2.29.1",
|
||||
"package.json": "^2.0.1",
|
||||
"rc-slider": "^9.7",
|
||||
"react": "^17",
|
||||
"react-bootstrap": "^1",
|
||||
|
@ -32,7 +34,7 @@
|
|||
"react-redux": "^7.2",
|
||||
"react-router-dom": "^5.3",
|
||||
"react-scripts": "^4",
|
||||
"react-select": "^4",
|
||||
"react-select": "^5.0.1",
|
||||
"react-table": "^7",
|
||||
"recharts": "^2.0.8",
|
||||
"rooks": "^5.7.1",
|
||||
|
@ -49,7 +51,7 @@
|
|||
"@types/react-dom": "^17",
|
||||
"@types/react-helmet": "^6.1",
|
||||
"@types/react-router-dom": "^5",
|
||||
"@types/react-select": "^4.0.3",
|
||||
"@types/react-select": "^5.0.1",
|
||||
"@types/react-table": "^7",
|
||||
"http-proxy-middleware": "^2",
|
||||
"husky": "^7",
|
||||
|
|
1
frontend/src/@types/system.d.ts
vendored
1
frontend/src/@types/system.d.ts
vendored
|
@ -14,6 +14,7 @@ declare namespace System {
|
|||
bazarr_version: string;
|
||||
operating_system: string;
|
||||
python_version: string;
|
||||
start_time: number;
|
||||
}
|
||||
|
||||
interface Health {
|
||||
|
|
|
@ -92,8 +92,7 @@ const NotificationModal: FunctionComponent<ModalProps & BaseModalProps> = ({
|
|||
variant="danger"
|
||||
onClick={() => {
|
||||
if (current) {
|
||||
current.enabled = false;
|
||||
update(current);
|
||||
update({ ...current, enabled: false });
|
||||
}
|
||||
closeModal();
|
||||
}}
|
||||
|
|
|
@ -2,12 +2,13 @@ import { capitalize, isArray, isBoolean } from "lodash";
|
|||
import React, {
|
||||
FunctionComponent,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useState,
|
||||
} from "react";
|
||||
import { Button, Col, Container, Row } from "react-bootstrap";
|
||||
import { components } from "react-select";
|
||||
import { SelectComponents } from "react-select/src/components";
|
||||
import { SelectComponents } from "react-select/dist/declarations/src/components";
|
||||
import {
|
||||
BaseModal,
|
||||
Selector,
|
||||
|
@ -81,6 +82,10 @@ export const ProviderModal: FunctionComponent = () => {
|
|||
|
||||
const [staged, setChange] = useState<LooseObject>({});
|
||||
|
||||
useEffect(() => {
|
||||
setInfo(payload);
|
||||
}, [payload]);
|
||||
|
||||
const [info, setInfo] = useState<Nullable<ProviderInfo>>(payload);
|
||||
|
||||
useOnModalShow<ProviderInfo>((p) => setInfo(p), ModalKey);
|
||||
|
@ -213,11 +218,12 @@ export const ProviderModal: FunctionComponent = () => {
|
|||
}, [info]);
|
||||
|
||||
const selectorComponents = useMemo<
|
||||
Partial<SelectComponents<ProviderInfo, false>>
|
||||
Partial<SelectComponents<ProviderInfo, false, any>>
|
||||
>(
|
||||
() => ({
|
||||
Option: ({ data, ...other }) => {
|
||||
const { label, value } = data as SelectorOption<ProviderInfo>;
|
||||
const { label, value } =
|
||||
data as unknown as SelectorOption<ProviderInfo>;
|
||||
return (
|
||||
<components.Option data={data} {...other}>
|
||||
{label}
|
||||
|
|
|
@ -220,7 +220,8 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
|
|||
key: "tusubtitulo",
|
||||
name: "Tusubtitulo.com",
|
||||
description:
|
||||
"LATAM Spanish / Spanish / English Subtitles Provider for TV Shows",
|
||||
"Provider requested to be removed from Bazarr so it will always return no subtitles. Could potentially come back in the future with an upcoming premium account.",
|
||||
// "LATAM Spanish / Spanish / English Subtitles Provider for TV Shows",
|
||||
},
|
||||
{
|
||||
key: "titulky",
|
||||
|
@ -229,6 +230,14 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
|
|||
defaultKey: {
|
||||
username: "",
|
||||
password: "",
|
||||
skip_wrong_fps: false,
|
||||
approved_only: false,
|
||||
multithreading: true,
|
||||
},
|
||||
keyNameOverride: {
|
||||
skip_wrong_fps: "Skip mismatching FPS",
|
||||
approved_only: "Skip unapproved subtitles",
|
||||
multithreading: "Enable multithreading",
|
||||
},
|
||||
},
|
||||
{ key: "tvsubtitles", name: "TVSubtitles" },
|
||||
|
|
|
@ -6,9 +6,11 @@ import {
|
|||
} from "@fortawesome/free-brands-svg-icons";
|
||||
import { faPaperPlane } from "@fortawesome/free-solid-svg-icons";
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
import React, { FunctionComponent } from "react";
|
||||
import moment from "moment";
|
||||
import React, { FunctionComponent, useState } from "react";
|
||||
import { Col, Container, Row } from "react-bootstrap";
|
||||
import { Helmet } from "react-helmet";
|
||||
import { useIntervalWhen } from "rooks";
|
||||
import { useSystemHealth, useSystemStatus } from "../../@redux/hooks";
|
||||
import { AsyncOverlay } from "../../components";
|
||||
import { GithubRepoRoot } from "../../constants";
|
||||
|
@ -69,6 +71,28 @@ const SystemStatusView: FunctionComponent<Props> = () => {
|
|||
const health = useSystemHealth();
|
||||
const status = useSystemStatus();
|
||||
|
||||
const [uptime, setState] = useState<string>();
|
||||
const [intervalWhenState] = useState(true);
|
||||
|
||||
useIntervalWhen(
|
||||
() => {
|
||||
if (status) {
|
||||
let duration = moment.duration(
|
||||
moment().utc().unix() - status.start_time,
|
||||
"seconds"
|
||||
),
|
||||
days = duration.days(),
|
||||
hours = duration.hours().toString().padStart(2, "0"),
|
||||
minutes = duration.minutes().toString().padStart(2, "0"),
|
||||
seconds = duration.seconds().toString().padStart(2, "0");
|
||||
setState(days + "d " + hours + ":" + minutes + ":" + seconds);
|
||||
}
|
||||
},
|
||||
1000,
|
||||
intervalWhenState,
|
||||
true
|
||||
);
|
||||
|
||||
return (
|
||||
<Container className="p-5">
|
||||
<Helmet>
|
||||
|
@ -100,6 +124,9 @@ const SystemStatusView: FunctionComponent<Props> = () => {
|
|||
<CRow title="Bazarr Config Directory">
|
||||
<span>{status?.bazarr_config_directory}</span>
|
||||
</CRow>
|
||||
<CRow title="Uptime">
|
||||
<span>{uptime}</span>
|
||||
</CRow>
|
||||
</InfoContainer>
|
||||
</Row>
|
||||
<Row>
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { isArray } from "lodash";
|
||||
import React, { useCallback, useMemo } from "react";
|
||||
import ReactSelect from "react-select";
|
||||
import { SelectComponents } from "react-select/src/components";
|
||||
import Select from "react-select";
|
||||
import { SelectComponents } from "react-select/dist/declarations/src/components";
|
||||
import "./selector.scss";
|
||||
|
||||
export interface SelectorProps<T, M extends boolean> {
|
||||
|
@ -17,7 +17,7 @@ export interface SelectorProps<T, M extends boolean> {
|
|||
label?: (item: T) => string;
|
||||
defaultValue?: SelectorValueType<T, M>;
|
||||
value?: SelectorValueType<T, M>;
|
||||
components?: Partial<SelectComponents<T, M>>;
|
||||
components?: Partial<SelectComponents<T, M, any>>;
|
||||
}
|
||||
|
||||
export function Selector<T = string, M extends boolean = false>(
|
||||
|
@ -69,15 +69,15 @@ export function Selector<T = string, M extends boolean = false>(
|
|||
[label, multiple, nameFromItems]
|
||||
);
|
||||
|
||||
const defaultWrapper = useMemo(() => wrapper(defaultValue), [
|
||||
defaultValue,
|
||||
wrapper,
|
||||
]);
|
||||
const defaultWrapper = useMemo(
|
||||
() => wrapper(defaultValue),
|
||||
[defaultValue, wrapper]
|
||||
);
|
||||
|
||||
const valueWrapper = useMemo(() => wrapper(value), [wrapper, value]);
|
||||
|
||||
return (
|
||||
<ReactSelect
|
||||
<Select
|
||||
isLoading={loading}
|
||||
placeholder={placeholder}
|
||||
isSearchable={options.length >= 10}
|
||||
|
@ -92,7 +92,7 @@ export function Selector<T = string, M extends boolean = false>(
|
|||
className={`custom-selector w-100 ${className ?? ""}`}
|
||||
classNamePrefix="selector"
|
||||
onFocus={onFocus}
|
||||
onChange={(v) => {
|
||||
onChange={(v: SelectorOption<T>[]) => {
|
||||
if (onChange) {
|
||||
let res: T | T[] | null = null;
|
||||
if (isArray(v)) {
|
||||
|
@ -106,6 +106,6 @@ export function Selector<T = string, M extends boolean = false>(
|
|||
onChange(res as any);
|
||||
}
|
||||
}}
|
||||
></ReactSelect>
|
||||
></Select>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -13,8 +13,8 @@ export const availableTranslation = {
|
|||
ca: "catalan",
|
||||
ceb: "cebuano",
|
||||
ny: "chichewa",
|
||||
"zh-cn": "chinese (simplified)",
|
||||
"zh-tw": "chinese (traditional)",
|
||||
zh: "chinese (simplified)",
|
||||
zt: "chinese (traditional)",
|
||||
co: "corsican",
|
||||
hr: "croatian",
|
||||
cs: "czech",
|
||||
|
|
|
@ -34,6 +34,7 @@ from .common import MATCH_ALL_TAG
|
|||
from .utils import is_exclusive_match
|
||||
from .utils import parse_list
|
||||
from .utils import parse_urls
|
||||
from .utils import cwe312_url
|
||||
from .logger import logger
|
||||
|
||||
from .AppriseAsset import AppriseAsset
|
||||
|
@ -58,13 +59,15 @@ class Apprise(object):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, servers=None, asset=None, debug=False):
|
||||
def __init__(self, servers=None, asset=None, location=None, debug=False):
|
||||
"""
|
||||
Loads a set of server urls while applying the Asset() module to each
|
||||
if specified.
|
||||
|
||||
If no asset is provided, then the default asset is used.
|
||||
|
||||
Optionally specify a global ContentLocation for a more strict means
|
||||
of handling Attachments.
|
||||
"""
|
||||
|
||||
# Initialize a server list of URLs
|
||||
|
@ -87,6 +90,11 @@ class Apprise(object):
|
|||
# Set our debug flag
|
||||
self.debug = debug
|
||||
|
||||
# Store our hosting location for optional strict rule handling
|
||||
# of Attachments. Setting this to None removes any attachment
|
||||
# restrictions.
|
||||
self.location = location
|
||||
|
||||
@staticmethod
|
||||
def instantiate(url, asset=None, tag=None, suppress_exceptions=True):
|
||||
"""
|
||||
|
@ -116,9 +124,14 @@ class Apprise(object):
|
|||
# Initialize our result set
|
||||
results = None
|
||||
|
||||
# Prepare our Asset Object
|
||||
asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
|
||||
if isinstance(url, six.string_types):
|
||||
# Acquire our url tokens
|
||||
results = plugins.url_to_dict(url)
|
||||
results = plugins.url_to_dict(
|
||||
url, secure_logging=asset.secure_logging)
|
||||
|
||||
if results is None:
|
||||
# Failed to parse the server URL; detailed logging handled
|
||||
# inside url_to_dict - nothing to report here.
|
||||
|
@ -132,25 +145,40 @@ class Apprise(object):
|
|||
# schema is a mandatory dictionary item as it is the only way
|
||||
# we can index into our loaded plugins
|
||||
logger.error('Dictionary does not include a "schema" entry.')
|
||||
logger.trace('Invalid dictionary unpacked as:{}{}'.format(
|
||||
os.linesep, os.linesep.join(
|
||||
['{}="{}"'.format(k, v) for k, v in results.items()])))
|
||||
logger.trace(
|
||||
'Invalid dictionary unpacked as:{}{}'.format(
|
||||
os.linesep, os.linesep.join(
|
||||
['{}="{}"'.format(k, v)
|
||||
for k, v in results.items()])))
|
||||
return None
|
||||
|
||||
logger.trace('Dictionary unpacked as:{}{}'.format(
|
||||
os.linesep, os.linesep.join(
|
||||
['{}="{}"'.format(k, v) for k, v in results.items()])))
|
||||
logger.trace(
|
||||
'Dictionary unpacked as:{}{}'.format(
|
||||
os.linesep, os.linesep.join(
|
||||
['{}="{}"'.format(k, v) for k, v in results.items()])))
|
||||
|
||||
# Otherwise we handle the invalid input specified
|
||||
else:
|
||||
logger.error('Invalid URL specified: {}'.format(url))
|
||||
logger.error(
|
||||
'An invalid URL type (%s) was specified for instantiation',
|
||||
type(url))
|
||||
return None
|
||||
|
||||
if not plugins.SCHEMA_MAP[results['schema']].enabled:
|
||||
#
|
||||
# First Plugin Enable Check (Pre Initialization)
|
||||
#
|
||||
|
||||
# Plugin has been disabled at a global level
|
||||
logger.error(
|
||||
'%s:// is disabled on this system.', results['schema'])
|
||||
return None
|
||||
|
||||
# Build a list of tags to associate with the newly added notifications
|
||||
results['tag'] = set(parse_list(tag))
|
||||
|
||||
# Prepare our Asset Object
|
||||
results['asset'] = \
|
||||
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
# Set our Asset Object
|
||||
results['asset'] = asset
|
||||
|
||||
if suppress_exceptions:
|
||||
try:
|
||||
|
@ -159,14 +187,21 @@ class Apprise(object):
|
|||
plugin = plugins.SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
# Create log entry of loaded URL
|
||||
logger.debug('Loaded {} URL: {}'.format(
|
||||
plugins.SCHEMA_MAP[results['schema']].service_name,
|
||||
plugin.url()))
|
||||
logger.debug(
|
||||
'Loaded {} URL: {}'.format(
|
||||
plugins.SCHEMA_MAP[results['schema']].service_name,
|
||||
plugin.url(privacy=asset.secure_logging)))
|
||||
|
||||
except Exception:
|
||||
# CWE-312 (Secure Logging) Handling
|
||||
loggable_url = url if not asset.secure_logging \
|
||||
else cwe312_url(url)
|
||||
|
||||
# the arguments are invalid or can not be used.
|
||||
logger.error('Could not load {} URL: {}'.format(
|
||||
plugins.SCHEMA_MAP[results['schema']].service_name, url))
|
||||
logger.error(
|
||||
'Could not load {} URL: {}'.format(
|
||||
plugins.SCHEMA_MAP[results['schema']].service_name,
|
||||
loggable_url))
|
||||
return None
|
||||
|
||||
else:
|
||||
|
@ -174,6 +209,24 @@ class Apprise(object):
|
|||
# URL information but don't wrap it in a try catch
|
||||
plugin = plugins.SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
if not plugin.enabled:
|
||||
#
|
||||
# Second Plugin Enable Check (Post Initialization)
|
||||
#
|
||||
|
||||
# Service/Plugin is disabled (on a more local level). This is a
|
||||
# case where the plugin was initially enabled but then after the
|
||||
# __init__() was called under the hood something pre-determined
|
||||
# that it could no longer be used.
|
||||
|
||||
# The only downside to doing it this way is services are
|
||||
# initialized prior to returning the details() if 3rd party tools
|
||||
# are polling what is available. These services that become
|
||||
# disabled thereafter are shown initially that they can be used.
|
||||
logger.error(
|
||||
'%s:// has become disabled on this system.', results['schema'])
|
||||
return None
|
||||
|
||||
return plugin
|
||||
|
||||
def add(self, servers, asset=None, tag=None):
|
||||
|
@ -286,7 +339,8 @@ class Apprise(object):
|
|||
return
|
||||
|
||||
def notify(self, body, title='', notify_type=NotifyType.INFO,
|
||||
body_format=None, tag=MATCH_ALL_TAG, attach=None):
|
||||
body_format=None, tag=MATCH_ALL_TAG, attach=None,
|
||||
interpret_escapes=None):
|
||||
"""
|
||||
Send a notification to all of the plugins previously loaded.
|
||||
|
||||
|
@ -306,47 +360,158 @@ class Apprise(object):
|
|||
Attach can contain a list of attachment URLs. attach can also be
|
||||
represented by a an AttachBase() (or list of) object(s). This
|
||||
identifies the products you wish to notify
|
||||
|
||||
Set interpret_escapes to True if you want to pre-escape a string
|
||||
such as turning a \n into an actual new line, etc.
|
||||
"""
|
||||
|
||||
if ASYNCIO_SUPPORT:
|
||||
return py3compat.asyncio.tosync(
|
||||
self.async_notify(
|
||||
body, title,
|
||||
notify_type=notify_type, body_format=body_format,
|
||||
tag=tag, attach=attach,
|
||||
interpret_escapes=interpret_escapes,
|
||||
),
|
||||
debug=self.debug
|
||||
)
|
||||
|
||||
else:
|
||||
try:
|
||||
results = list(
|
||||
self._notifyall(
|
||||
Apprise._notifyhandler,
|
||||
body, title,
|
||||
notify_type=notify_type, body_format=body_format,
|
||||
tag=tag, attach=attach,
|
||||
interpret_escapes=interpret_escapes,
|
||||
)
|
||||
)
|
||||
|
||||
except TypeError:
|
||||
# No notifications sent, and there was an internal error.
|
||||
return False
|
||||
|
||||
else:
|
||||
if len(results) > 0:
|
||||
# All notifications sent, return False if any failed.
|
||||
return all(results)
|
||||
|
||||
else:
|
||||
# No notifications sent.
|
||||
return None
|
||||
|
||||
def async_notify(self, *args, **kwargs):
|
||||
"""
|
||||
Send a notification to all of the plugins previously loaded, for
|
||||
asynchronous callers. This method is an async method that should be
|
||||
awaited on, even if it is missing the async keyword in its signature.
|
||||
(This is omitted to preserve syntax compatibility with Python 2.)
|
||||
|
||||
The arguments are identical to those of Apprise.notify(). This method
|
||||
is not available in Python 2.
|
||||
"""
|
||||
|
||||
try:
|
||||
coroutines = list(
|
||||
self._notifyall(
|
||||
Apprise._notifyhandlerasync, *args, **kwargs))
|
||||
|
||||
except TypeError:
|
||||
# No notifications sent, and there was an internal error.
|
||||
return py3compat.asyncio.toasyncwrap(False)
|
||||
|
||||
else:
|
||||
if len(coroutines) > 0:
|
||||
# All notifications sent, return False if any failed.
|
||||
return py3compat.asyncio.notify(coroutines)
|
||||
|
||||
else:
|
||||
# No notifications sent.
|
||||
return py3compat.asyncio.toasyncwrap(None)
|
||||
|
||||
@staticmethod
|
||||
def _notifyhandler(server, **kwargs):
|
||||
"""
|
||||
The synchronous notification sender. Returns True if the notification
|
||||
sent successfully.
|
||||
"""
|
||||
|
||||
try:
|
||||
# Send notification
|
||||
return server.notify(**kwargs)
|
||||
|
||||
except TypeError:
|
||||
# These our our internally thrown notifications
|
||||
return False
|
||||
|
||||
except Exception:
|
||||
# A catch all so we don't have to abort early
|
||||
# just because one of our plugins has a bug in it.
|
||||
logger.exception("Unhandled Notification Exception")
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def _notifyhandlerasync(server, **kwargs):
|
||||
"""
|
||||
The asynchronous notification sender. Returns a coroutine that yields
|
||||
True if the notification sent successfully.
|
||||
"""
|
||||
|
||||
if server.asset.async_mode:
|
||||
return server.async_notify(**kwargs)
|
||||
|
||||
else:
|
||||
# Send the notification immediately, and wrap the result in a
|
||||
# coroutine.
|
||||
status = Apprise._notifyhandler(server, **kwargs)
|
||||
return py3compat.asyncio.toasyncwrap(status)
|
||||
|
||||
def _notifyall(self, handler, body, title='', notify_type=NotifyType.INFO,
|
||||
body_format=None, tag=MATCH_ALL_TAG, attach=None,
|
||||
interpret_escapes=None):
|
||||
"""
|
||||
Creates notifications for all of the plugins loaded.
|
||||
|
||||
Returns a generator that calls handler for each notification. The first
|
||||
and only argument supplied to handler is the server, and the keyword
|
||||
arguments are exactly as they would be passed to server.notify().
|
||||
"""
|
||||
|
||||
if len(self) == 0:
|
||||
# Nothing to notify
|
||||
return False
|
||||
|
||||
# Initialize our return result which only turns to True if we send
|
||||
# at least one valid notification
|
||||
status = None
|
||||
raise TypeError("No service(s) to notify")
|
||||
|
||||
if not (title or body):
|
||||
return False
|
||||
raise TypeError("No message content specified to deliver")
|
||||
|
||||
if six.PY2:
|
||||
# Python 2.7.x Unicode Character Handling
|
||||
# Ensure we're working with utf-8
|
||||
if isinstance(title, unicode): # noqa: F821
|
||||
title = title.encode('utf-8')
|
||||
|
||||
if isinstance(body, unicode): # noqa: F821
|
||||
body = body.encode('utf-8')
|
||||
|
||||
# Tracks conversions
|
||||
conversion_map = dict()
|
||||
|
||||
# Prepare attachments if required
|
||||
if attach is not None and not isinstance(attach, AppriseAttachment):
|
||||
try:
|
||||
attach = AppriseAttachment(attach, asset=self.asset)
|
||||
|
||||
except TypeError:
|
||||
# bad attachments
|
||||
return False
|
||||
attach = AppriseAttachment(
|
||||
attach, asset=self.asset, location=self.location)
|
||||
|
||||
# Allow Asset default value
|
||||
body_format = self.asset.body_format \
|
||||
if body_format is None else body_format
|
||||
|
||||
# for asyncio support; we track a list of our servers to notify
|
||||
# sequentially
|
||||
coroutines = []
|
||||
# Allow Asset default value
|
||||
interpret_escapes = self.asset.interpret_escapes \
|
||||
if interpret_escapes is None else interpret_escapes
|
||||
|
||||
# Iterate over our loaded plugins
|
||||
for server in self.find(tag):
|
||||
if status is None:
|
||||
# We have at least one server to notify; change status
|
||||
# to be a default value of True from now (purely an
|
||||
# initialiation at this point)
|
||||
status = True
|
||||
|
||||
# If our code reaches here, we either did not define a tag (it
|
||||
# was set to None), or we did define a tag and the logic above
|
||||
# determined we need to notify the service it's associated with
|
||||
|
@ -396,48 +561,59 @@ class Apprise(object):
|
|||
# Store entry directly
|
||||
conversion_map[server.notify_format] = body
|
||||
|
||||
if ASYNCIO_SUPPORT and server.asset.async_mode:
|
||||
# Build a list of servers requiring notification
|
||||
# that will be triggered asynchronously afterwards
|
||||
coroutines.append(server.async_notify(
|
||||
body=conversion_map[server.notify_format],
|
||||
title=title,
|
||||
notify_type=notify_type,
|
||||
attach=attach))
|
||||
if interpret_escapes:
|
||||
#
|
||||
# Escape our content
|
||||
#
|
||||
|
||||
# We gather at this point and notify at the end
|
||||
continue
|
||||
try:
|
||||
# Added overhead required due to Python 3 Encoding Bug
|
||||
# identified here: https://bugs.python.org/issue21331
|
||||
conversion_map[server.notify_format] = \
|
||||
conversion_map[server.notify_format]\
|
||||
.encode('ascii', 'backslashreplace')\
|
||||
.decode('unicode-escape')
|
||||
|
||||
try:
|
||||
# Send notification
|
||||
if not server.notify(
|
||||
body=conversion_map[server.notify_format],
|
||||
title=title,
|
||||
notify_type=notify_type,
|
||||
attach=attach):
|
||||
except UnicodeDecodeError: # pragma: no cover
|
||||
# This occurs using a very old verion of Python 2.7 such
|
||||
# as the one that ships with CentOS/RedHat 7.x (v2.7.5).
|
||||
conversion_map[server.notify_format] = \
|
||||
conversion_map[server.notify_format] \
|
||||
.decode('string_escape')
|
||||
|
||||
# Toggle our return status flag
|
||||
status = False
|
||||
except AttributeError:
|
||||
# Must be of string type
|
||||
logger.error('Failed to escape message body')
|
||||
raise TypeError
|
||||
|
||||
except TypeError:
|
||||
# These our our internally thrown notifications
|
||||
status = False
|
||||
if title:
|
||||
try:
|
||||
# Added overhead required due to Python 3 Encoding Bug
|
||||
# identified here: https://bugs.python.org/issue21331
|
||||
title = title\
|
||||
.encode('ascii', 'backslashreplace')\
|
||||
.decode('unicode-escape')
|
||||
|
||||
except Exception:
|
||||
# A catch all so we don't have to abort early
|
||||
# just because one of our plugins has a bug in it.
|
||||
logger.exception("Notification Exception")
|
||||
status = False
|
||||
except UnicodeDecodeError: # pragma: no cover
|
||||
# This occurs using a very old verion of Python 2.7
|
||||
# such as the one that ships with CentOS/RedHat 7.x
|
||||
# (v2.7.5).
|
||||
title = title.decode('string_escape')
|
||||
|
||||
if coroutines:
|
||||
# perform our async notification(s)
|
||||
if not py3compat.asyncio.notify(coroutines, debug=self.debug):
|
||||
# Toggle our status only if we had a failure
|
||||
status = False
|
||||
except AttributeError:
|
||||
# Must be of string type
|
||||
logger.error('Failed to escape message title')
|
||||
raise TypeError
|
||||
|
||||
return status
|
||||
yield handler(
|
||||
server,
|
||||
body=conversion_map[server.notify_format],
|
||||
title=title,
|
||||
notify_type=notify_type,
|
||||
attach=attach
|
||||
)
|
||||
|
||||
def details(self, lang=None):
|
||||
def details(self, lang=None, show_requirements=False, show_disabled=False):
|
||||
"""
|
||||
Returns the details associated with the Apprise object
|
||||
|
||||
|
@ -453,8 +629,27 @@ class Apprise(object):
|
|||
'asset': self.asset.details(),
|
||||
}
|
||||
|
||||
# to add it's mapping to our hash table
|
||||
for plugin in set(plugins.SCHEMA_MAP.values()):
|
||||
# Iterate over our hashed plugins and dynamically build details on
|
||||
# their status:
|
||||
|
||||
content = {
|
||||
'service_name': getattr(plugin, 'service_name', None),
|
||||
'service_url': getattr(plugin, 'service_url', None),
|
||||
'setup_url': getattr(plugin, 'setup_url', None),
|
||||
# Placeholder - populated below
|
||||
'details': None
|
||||
}
|
||||
|
||||
# Standard protocol(s) should be None or a tuple
|
||||
enabled = getattr(plugin, 'enabled', True)
|
||||
if not show_disabled and not enabled:
|
||||
# Do not show inactive plugins
|
||||
continue
|
||||
|
||||
elif show_disabled:
|
||||
# Add current state to response
|
||||
content['enabled'] = enabled
|
||||
|
||||
# Standard protocol(s) should be None or a tuple
|
||||
protocols = getattr(plugin, 'protocol', None)
|
||||
|
@ -466,31 +661,35 @@ class Apprise(object):
|
|||
if isinstance(secure_protocols, six.string_types):
|
||||
secure_protocols = (secure_protocols, )
|
||||
|
||||
# Add our protocol details to our content
|
||||
content.update({
|
||||
'protocols': protocols,
|
||||
'secure_protocols': secure_protocols,
|
||||
})
|
||||
|
||||
if not lang:
|
||||
# Simply return our results
|
||||
details = plugins.details(plugin)
|
||||
content['details'] = plugins.details(plugin)
|
||||
if show_requirements:
|
||||
content['requirements'] = plugins.requirements(plugin)
|
||||
|
||||
else:
|
||||
# Emulate the specified language when returning our results
|
||||
with self.locale.lang_at(lang):
|
||||
details = plugins.details(plugin)
|
||||
content['details'] = plugins.details(plugin)
|
||||
if show_requirements:
|
||||
content['requirements'] = plugins.requirements(plugin)
|
||||
|
||||
# Build our response object
|
||||
response['schemas'].append({
|
||||
'service_name': getattr(plugin, 'service_name', None),
|
||||
'service_url': getattr(plugin, 'service_url', None),
|
||||
'setup_url': getattr(plugin, 'setup_url', None),
|
||||
'protocols': protocols,
|
||||
'secure_protocols': secure_protocols,
|
||||
'details': details,
|
||||
})
|
||||
response['schemas'].append(content)
|
||||
|
||||
return response
|
||||
|
||||
def urls(self):
|
||||
def urls(self, privacy=False):
|
||||
"""
|
||||
Returns all of the loaded URLs defined in this apprise object.
|
||||
"""
|
||||
return [x.url() for x in self.servers]
|
||||
return [x.url(privacy=privacy) for x in self.servers]
|
||||
|
||||
def pop(self, index):
|
||||
"""
|
||||
|
@ -592,3 +791,7 @@ class Apprise(object):
|
|||
"""
|
||||
return sum([1 if not isinstance(s, (ConfigBase, AppriseConfig))
|
||||
else len(s.servers()) for s in self.servers])
|
||||
|
||||
|
||||
if six.PY2:
|
||||
del Apprise.async_notify
|
||||
|
|
63
libs/apprise/Apprise.pyi
Normal file
63
libs/apprise/Apprise.pyi
Normal file
|
@ -0,0 +1,63 @@
|
|||
from typing import Any, Dict, List, Iterable, Iterator, Optional
|
||||
|
||||
from . import (AppriseAsset, AppriseAttachment, AppriseConfig, ConfigBase,
|
||||
NotifyBase, NotifyFormat, NotifyType)
|
||||
from .common import ContentLocation
|
||||
|
||||
_Server = Union[str, ConfigBase, NotifyBase, AppriseConfig]
|
||||
_Servers = Union[_Server, Dict[Any, _Server], Iterable[_Server]]
|
||||
# Can't define this recursively as mypy doesn't support recursive types:
|
||||
# https://github.com/python/mypy/issues/731
|
||||
_Tag = Union[str, Iterable[Union[str, Iterable[str]]]]
|
||||
|
||||
class Apprise:
|
||||
def __init__(
|
||||
self,
|
||||
servers: _Servers = ...,
|
||||
asset: Optional[AppriseAsset] = ...,
|
||||
location: Optional[ContentLocation] = ...,
|
||||
debug: bool = ...
|
||||
) -> None: ...
|
||||
@staticmethod
|
||||
def instantiate(
|
||||
url: Union[str, Dict[str, NotifyBase]],
|
||||
asset: Optional[AppriseAsset] = ...,
|
||||
tag: Optional[_Tag] = ...,
|
||||
suppress_exceptions: bool = ...
|
||||
) -> NotifyBase: ...
|
||||
def add(
|
||||
self,
|
||||
servers: _Servers = ...,
|
||||
asset: Optional[AppriseAsset] = ...,
|
||||
tag: Optional[_Tag] = ...
|
||||
) -> bool: ...
|
||||
def clear(self) -> None: ...
|
||||
def find(self, tag: str = ...) -> Iterator[Apprise]: ...
|
||||
def notify(
|
||||
self,
|
||||
body: str,
|
||||
title: str = ...,
|
||||
notify_type: NotifyType = ...,
|
||||
body_format: NotifyFormat = ...,
|
||||
tag: _Tag = ...,
|
||||
attach: Optional[AppriseAttachment] = ...,
|
||||
interpret_escapes: Optional[bool] = ...
|
||||
) -> bool: ...
|
||||
async def async_notify(
|
||||
self,
|
||||
body: str,
|
||||
title: str = ...,
|
||||
notify_type: NotifyType = ...,
|
||||
body_format: NotifyFormat = ...,
|
||||
tag: _Tag = ...,
|
||||
attach: Optional[AppriseAttachment] = ...,
|
||||
interpret_escapes: Optional[bool] = ...
|
||||
) -> bool: ...
|
||||
def details(self, lang: Optional[str] = ...) -> Dict[str, Any]: ...
|
||||
def urls(self, privacy: bool = ...) -> Iterable[str]: ...
|
||||
def pop(self, index: int) -> ConfigBase: ...
|
||||
def __getitem__(self, index: int) -> ConfigBase: ...
|
||||
def __bool__(self) -> bool: ...
|
||||
def __nonzero__(self) -> bool: ...
|
||||
def __iter__(self) -> Iterator[ConfigBase]: ...
|
||||
def __len__(self) -> int: ...
|
|
@ -24,7 +24,7 @@
|
|||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
|
||||
from uuid import uuid4
|
||||
from os.path import join
|
||||
from os.path import dirname
|
||||
from os.path import isfile
|
||||
|
@ -105,6 +105,36 @@ class AppriseAsset(object):
|
|||
# notifications are sent sequentially (one after another)
|
||||
async_mode = True
|
||||
|
||||
# Whether or not to interpret escapes found within the input text prior
|
||||
# to passing it upstream. Such as converting \t to an actual tab and \n
|
||||
# to a new line.
|
||||
interpret_escapes = False
|
||||
|
||||
# For more detail see CWE-312 @
|
||||
# https://cwe.mitre.org/data/definitions/312.html
|
||||
#
|
||||
# By enabling this, the logging output has additional overhead applied to
|
||||
# it preventing secure password and secret information from being
|
||||
# displayed in the logging. Since there is overhead involved in performing
|
||||
# this cleanup; system owners who run in a very isolated environment may
|
||||
# choose to disable this for a slight performance bump. It is recommended
|
||||
# that you leave this option as is otherwise.
|
||||
secure_logging = True
|
||||
|
||||
# All internal/system flags are prefixed with an underscore (_)
|
||||
# These can only be initialized using Python libraries and are not picked
|
||||
# up from (yaml) configuration files (if set)
|
||||
|
||||
# An internal counter that is used by AppriseAPI
|
||||
# (https://github.com/caronc/apprise-api). The idea is to allow one
|
||||
# instance of AppriseAPI to call another, but to track how many times
|
||||
# this occurs. It's intent is to prevent a loop where an AppriseAPI
|
||||
# Server calls itself (or loops indefinitely)
|
||||
_recursion = 0
|
||||
|
||||
# A unique identifer we can use to associate our calling source
|
||||
_uid = str(uuid4())
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Asset Initialization
|
||||
|
|
34
libs/apprise/AppriseAsset.pyi
Normal file
34
libs/apprise/AppriseAsset.pyi
Normal file
|
@ -0,0 +1,34 @@
|
|||
from typing import Dict, Optional
|
||||
|
||||
from . import NotifyFormat, NotifyType
|
||||
|
||||
class AppriseAsset:
|
||||
app_id: str
|
||||
app_desc: str
|
||||
app_url: str
|
||||
html_notify_map: Dict[NotifyType, str]
|
||||
default_html_color: str
|
||||
default_extension: str
|
||||
theme: Optional[str]
|
||||
image_url_mask: str
|
||||
image_url_logo: str
|
||||
image_path_mask: Optional[str]
|
||||
body_format: Optional[NotifyFormat]
|
||||
async_mode: bool
|
||||
interpret_escapes: bool
|
||||
def __init__(
|
||||
self,
|
||||
app_id: str = ...,
|
||||
app_desc: str = ...,
|
||||
app_url: str = ...,
|
||||
html_notify_map: Dict[NotifyType, str] = ...,
|
||||
default_html_color: str = ...,
|
||||
default_extension: str = ...,
|
||||
theme: Optional[str] = ...,
|
||||
image_url_mask: str = ...,
|
||||
image_url_logo: str = ...,
|
||||
image_path_mask: Optional[str] = ...,
|
||||
body_format: Optional[NotifyFormat] = ...,
|
||||
async_mode: bool = ...,
|
||||
interpret_escapes: bool = ...
|
||||
) -> None: ...
|
|
@ -29,6 +29,8 @@ from . import attachment
|
|||
from . import URLBase
|
||||
from .AppriseAsset import AppriseAsset
|
||||
from .logger import logger
|
||||
from .common import ContentLocation
|
||||
from .common import CONTENT_LOCATIONS
|
||||
from .utils import GET_SCHEMA_RE
|
||||
|
||||
|
||||
|
@ -38,7 +40,8 @@ class AppriseAttachment(object):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, paths=None, asset=None, cache=True, **kwargs):
|
||||
def __init__(self, paths=None, asset=None, cache=True, location=None,
|
||||
**kwargs):
|
||||
"""
|
||||
Loads all of the paths/urls specified (if any).
|
||||
|
||||
|
@ -59,6 +62,25 @@ class AppriseAttachment(object):
|
|||
|
||||
It's also worth nothing that the cache value is only set to elements
|
||||
that are not already of subclass AttachBase()
|
||||
|
||||
Optionally set your current ContentLocation in the location argument.
|
||||
This is used to further handle attachments. The rules are as follows:
|
||||
- INACCESSIBLE: You simply have disabled use of the object; no
|
||||
attachments will be retrieved/handled.
|
||||
- HOSTED: You are hosting an attachment service for others.
|
||||
In these circumstances all attachments that are LOCAL
|
||||
based (such as file://) will not be allowed.
|
||||
- LOCAL: The least restrictive mode as local files can be
|
||||
referenced in addition to hosted.
|
||||
|
||||
In all both HOSTED and LOCAL modes, INACCESSIBLE attachment types will
|
||||
continue to be inaccessible. However if you set this field (location)
|
||||
to None (it's default value) the attachment location category will not
|
||||
be tested in any way (all attachment types will be allowed).
|
||||
|
||||
The location field is also a global option that can be set when
|
||||
initializing the Apprise object.
|
||||
|
||||
"""
|
||||
|
||||
# Initialize our attachment listings
|
||||
|
@ -71,6 +93,15 @@ class AppriseAttachment(object):
|
|||
self.asset = \
|
||||
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
|
||||
if location is not None and location not in CONTENT_LOCATIONS:
|
||||
msg = "An invalid Attachment location ({}) was specified." \
|
||||
.format(location)
|
||||
logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Store our location
|
||||
self.location = location
|
||||
|
||||
# Now parse any paths specified
|
||||
if paths is not None:
|
||||
# Store our path(s)
|
||||
|
@ -123,26 +154,45 @@ class AppriseAttachment(object):
|
|||
|
||||
# Iterate over our attachments
|
||||
for _attachment in attachments:
|
||||
|
||||
if isinstance(_attachment, attachment.AttachBase):
|
||||
# Go ahead and just add our attachment into our list
|
||||
self.attachments.append(_attachment)
|
||||
if self.location == ContentLocation.INACCESSIBLE:
|
||||
logger.warning(
|
||||
"Attachments are disabled; ignoring {}"
|
||||
.format(_attachment))
|
||||
return_status = False
|
||||
continue
|
||||
|
||||
elif not isinstance(_attachment, six.string_types):
|
||||
if isinstance(_attachment, six.string_types):
|
||||
logger.debug("Loading attachment: {}".format(_attachment))
|
||||
# Instantiate ourselves an object, this function throws or
|
||||
# returns None if it fails
|
||||
instance = AppriseAttachment.instantiate(
|
||||
_attachment, asset=asset, cache=cache)
|
||||
if not isinstance(instance, attachment.AttachBase):
|
||||
return_status = False
|
||||
continue
|
||||
|
||||
elif not isinstance(_attachment, attachment.AttachBase):
|
||||
logger.warning(
|
||||
"An invalid attachment (type={}) was specified.".format(
|
||||
type(_attachment)))
|
||||
return_status = False
|
||||
continue
|
||||
|
||||
logger.debug("Loading attachment: {}".format(_attachment))
|
||||
else:
|
||||
# our entry is of type AttachBase, so just go ahead and point
|
||||
# our instance to it for some post processing below
|
||||
instance = _attachment
|
||||
|
||||
# Instantiate ourselves an object, this function throws or
|
||||
# returns None if it fails
|
||||
instance = AppriseAttachment.instantiate(
|
||||
_attachment, asset=asset, cache=cache)
|
||||
if not isinstance(instance, attachment.AttachBase):
|
||||
# Apply some simple logic if our location flag is set
|
||||
if self.location and ((
|
||||
self.location == ContentLocation.HOSTED
|
||||
and instance.location != ContentLocation.HOSTED)
|
||||
or instance.location == ContentLocation.INACCESSIBLE):
|
||||
logger.warning(
|
||||
"Attachment was disallowed due to accessibility "
|
||||
"restrictions ({}->{}): {}".format(
|
||||
self.location, instance.location,
|
||||
instance.url(privacy=True)))
|
||||
return_status = False
|
||||
continue
|
||||
|
||||
|
|
38
libs/apprise/AppriseAttachment.pyi
Normal file
38
libs/apprise/AppriseAttachment.pyi
Normal file
|
@ -0,0 +1,38 @@
|
|||
from typing import Any, Iterable, Optional, Union
|
||||
|
||||
from . import AppriseAsset, ContentLocation
|
||||
from .attachment import AttachBase
|
||||
|
||||
_Attachment = Union[str, AttachBase]
|
||||
_Attachments = Iterable[_Attachment]
|
||||
|
||||
class AppriseAttachment:
|
||||
def __init__(
|
||||
self,
|
||||
paths: Optional[_Attachments] = ...,
|
||||
asset: Optional[AppriseAttachment] = ...,
|
||||
cache: bool = ...,
|
||||
location: Optional[ContentLocation] = ...,
|
||||
**kwargs: Any
|
||||
) -> None: ...
|
||||
def add(
|
||||
self,
|
||||
attachments: _Attachments,
|
||||
asset: Optional[AppriseAttachment] = ...,
|
||||
cache: Optional[bool] = ...
|
||||
) -> bool: ...
|
||||
@staticmethod
|
||||
def instantiate(
|
||||
url: str,
|
||||
asset: Optional[AppriseAsset] = ...,
|
||||
cache: Optional[bool] = ...,
|
||||
suppress_exceptions: bool = ...
|
||||
) -> NotifyBase: ...
|
||||
def clear(self) -> None: ...
|
||||
def size(self) -> int: ...
|
||||
def pop(self, index: int = ...) -> AttachBase: ...
|
||||
def __getitem__(self, index: int) -> AttachBase: ...
|
||||
def __bool__(self) -> bool: ...
|
||||
def __nonzero__(self) -> bool: ...
|
||||
def __iter__(self) -> Iterator[AttachBase]: ...
|
||||
def __len__(self) -> int: ...
|
49
libs/apprise/AppriseConfig.pyi
Normal file
49
libs/apprise/AppriseConfig.pyi
Normal file
|
@ -0,0 +1,49 @@
|
|||
from typing import Any, Iterable, Iterator, List, Optional, Union
|
||||
|
||||
from . import AppriseAsset, NotifyBase
|
||||
from .config import ConfigBase
|
||||
|
||||
_Configs = Union[ConfigBase, str, Iterable[str]]
|
||||
|
||||
class AppriseConfig:
|
||||
def __init__(
|
||||
self,
|
||||
paths: Optional[_Configs] = ...,
|
||||
asset: Optional[AppriseAsset] = ...,
|
||||
cache: bool = ...,
|
||||
recursion: int = ...,
|
||||
insecure_includes: bool = ...,
|
||||
**kwargs: Any
|
||||
) -> None: ...
|
||||
def add(
|
||||
self,
|
||||
configs: _Configs,
|
||||
asset: Optional[AppriseAsset] = ...,
|
||||
cache: bool = ...,
|
||||
recursion: Optional[bool] = ...,
|
||||
insecure_includes: Optional[bool] = ...
|
||||
) -> bool: ...
|
||||
def add_config(
|
||||
self,
|
||||
content: str,
|
||||
asset: Optional[AppriseAsset] = ...,
|
||||
tag: Optional[str] = ...,
|
||||
format: Optional[str] = ...,
|
||||
recursion: Optional[int] = ...,
|
||||
insecure_includes: Optional[bool] = ...
|
||||
) -> bool: ...
|
||||
def servers(self, tag: str = ..., *args: Any, **kwargs: Any) -> List[ConfigBase]: ...
|
||||
def instantiate(
|
||||
url: str,
|
||||
asset: Optional[AppriseAsset] = ...,
|
||||
tag: Optional[str] = ...,
|
||||
cache: Optional[bool] = ...
|
||||
) -> NotifyBase: ...
|
||||
def clear(self) -> None: ...
|
||||
def server_pop(self, index: int) -> ConfigBase: ...
|
||||
def pop(self, index: int = ...) -> ConfigBase: ...
|
||||
def __getitem__(self, index: int) -> ConfigBase: ...
|
||||
def __bool__(self) -> bool: ...
|
||||
def __nonzero__(self) -> bool: ...
|
||||
def __iter__(self) -> Iterator[ConfigBase]: ...
|
||||
def __len__(self) -> int: ...
|
|
@ -25,7 +25,7 @@
|
|||
|
||||
import re
|
||||
import six
|
||||
import logging
|
||||
from .logger import logger
|
||||
from time import sleep
|
||||
from datetime import datetime
|
||||
from xml.sax.saxutils import escape as sax_escape
|
||||
|
@ -47,6 +47,7 @@ from .AppriseAsset import AppriseAsset
|
|||
from .utils import parse_url
|
||||
from .utils import parse_bool
|
||||
from .utils import parse_list
|
||||
from .utils import parse_phone_no
|
||||
|
||||
# Used to break a path list into parts
|
||||
PATHSPLIT_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
@ -115,8 +116,8 @@ class URLBase(object):
|
|||
# Secure sites should be verified against a Certificate Authority
|
||||
verify_certificate = True
|
||||
|
||||
# Logging
|
||||
logger = logging.getLogger(__name__)
|
||||
# Logging to our global logger
|
||||
logger = logger
|
||||
|
||||
# Define a default set of template arguments used for dynamically building
|
||||
# details about our individual plugins for developers.
|
||||
|
@ -280,7 +281,7 @@ class URLBase(object):
|
|||
self._last_io_datetime = reference
|
||||
return
|
||||
|
||||
if self.request_rate_per_sec <= 0.0:
|
||||
if self.request_rate_per_sec <= 0.0 and not wait:
|
||||
# We're done if there is no throttle limit set
|
||||
return
|
||||
|
||||
|
@ -560,6 +561,39 @@ class URLBase(object):
|
|||
|
||||
return content
|
||||
|
||||
@staticmethod
|
||||
def parse_phone_no(content, unquote=True):
|
||||
"""A wrapper to utils.parse_phone_no() with unquoting support
|
||||
|
||||
Parses a specified set of data and breaks it into a list.
|
||||
|
||||
Args:
|
||||
content (str): The path to split up into a list. If a list is
|
||||
provided, then it's individual entries are processed.
|
||||
|
||||
unquote (:obj:`bool`, optional): call unquote on each element
|
||||
added to the returned list.
|
||||
|
||||
Returns:
|
||||
list: A unique list containing all of the elements in the path
|
||||
"""
|
||||
|
||||
if unquote:
|
||||
try:
|
||||
content = URLBase.unquote(content)
|
||||
except TypeError:
|
||||
# Nothing further to do
|
||||
return []
|
||||
|
||||
except AttributeError:
|
||||
# This exception ONLY gets thrown under Python v2.7 if an
|
||||
# object() is passed in place of the content
|
||||
return []
|
||||
|
||||
content = parse_phone_no(content)
|
||||
|
||||
return content
|
||||
|
||||
@property
|
||||
def app_id(self):
|
||||
return self.asset.app_id if self.asset.app_id else ''
|
||||
|
@ -636,6 +670,8 @@ class URLBase(object):
|
|||
results['qsd'].get('verify', True))
|
||||
|
||||
# Password overrides
|
||||
if 'password' in results['qsd']:
|
||||
results['password'] = results['qsd']['password']
|
||||
if 'pass' in results['qsd']:
|
||||
results['password'] = results['qsd']['pass']
|
||||
|
||||
|
|
16
libs/apprise/URLBase.pyi
Normal file
16
libs/apprise/URLBase.pyi
Normal file
|
@ -0,0 +1,16 @@
|
|||
from logging import logger
|
||||
from typing import Any, Iterable, Set, Optional
|
||||
|
||||
class URLBase:
|
||||
service_name: Optional[str]
|
||||
protocol: Optional[str]
|
||||
secure_protocol: Optional[str]
|
||||
request_rate_per_sec: int
|
||||
socket_connect_timeout: float
|
||||
socket_read_timeout: float
|
||||
tags: Set[str]
|
||||
verify_certificate: bool
|
||||
logger: logger
|
||||
def url(self, privacy: bool = ..., *args: Any, **kwargs: Any) -> str: ...
|
||||
def __contains__(self, tags: Iterable[str]) -> bool: ...
|
||||
def __str__(self) -> str: ...
|
|
@ -1,6 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2020 Chris Caron <lead2gold@gmail.com>
|
||||
# Copyright (C) 2021 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
|
@ -23,11 +23,11 @@
|
|||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
__title__ = 'apprise'
|
||||
__version__ = '0.8.8'
|
||||
__title__ = 'Apprise'
|
||||
__version__ = '0.9.6'
|
||||
__author__ = 'Chris Caron'
|
||||
__license__ = 'MIT'
|
||||
__copywrite__ = 'Copyright (C) 2020 Chris Caron <lead2gold@gmail.com>'
|
||||
__copywrite__ = 'Copyright (C) 2021 Chris Caron <lead2gold@gmail.com>'
|
||||
__email__ = 'lead2gold@gmail.com'
|
||||
__status__ = 'Production'
|
||||
|
||||
|
@ -41,8 +41,10 @@ from .common import OverflowMode
|
|||
from .common import OVERFLOW_MODES
|
||||
from .common import ConfigFormat
|
||||
from .common import CONFIG_FORMATS
|
||||
from .common import ConfigIncludeMode
|
||||
from .common import CONFIG_INCLUDE_MODES
|
||||
from .common import ContentIncludeMode
|
||||
from .common import CONTENT_INCLUDE_MODES
|
||||
from .common import ContentLocation
|
||||
from .common import CONTENT_LOCATIONS
|
||||
|
||||
from .URLBase import URLBase
|
||||
from .URLBase import PrivacyMode
|
||||
|
@ -55,10 +57,13 @@ from .AppriseAsset import AppriseAsset
|
|||
from .AppriseConfig import AppriseConfig
|
||||
from .AppriseAttachment import AppriseAttachment
|
||||
|
||||
# Inherit our logging with our additional entries added to it
|
||||
from .logger import logging
|
||||
from .logger import logger
|
||||
from .logger import LogCapture
|
||||
|
||||
# Set default logging handler to avoid "No handler found" warnings.
|
||||
import logging
|
||||
from logging import NullHandler
|
||||
logging.getLogger(__name__).addHandler(NullHandler())
|
||||
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
||||
|
||||
__all__ = [
|
||||
# Core
|
||||
|
@ -69,6 +74,10 @@ __all__ = [
|
|||
'NotifyType', 'NotifyImageSize', 'NotifyFormat', 'OverflowMode',
|
||||
'NOTIFY_TYPES', 'NOTIFY_IMAGE_SIZES', 'NOTIFY_FORMATS', 'OVERFLOW_MODES',
|
||||
'ConfigFormat', 'CONFIG_FORMATS',
|
||||
'ConfigIncludeMode', 'CONFIG_INCLUDE_MODES',
|
||||
'ContentIncludeMode', 'CONTENT_INCLUDE_MODES',
|
||||
'ContentLocation', 'CONTENT_LOCATIONS',
|
||||
'PrivacyMode',
|
||||
|
||||
# Logging
|
||||
'logging', 'logger', 'LogCapture',
|
||||
]
|
||||
|
|
|
@ -1,22 +1,23 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<xs:schema elementFormDefault="qualified" xmlns:xs="http://www.w3.org/2001/XMLSchema">
|
||||
<xs:schema attributeFormDefault="unqualified" elementFormDefault="qualified" xmlns:xs="http://www.w3.org/2001/XMLSchema">
|
||||
<xs:import namespace="http://schemas.xmlsoap.org/soap/envelope/" schemaLocation="http://schemas.xmlsoap.org/soap/envelope/"/>
|
||||
<xs:element name="Notification">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element name="Version" type="xs:string" />
|
||||
<xs:element name="MessageType" type="xs:string" />
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="success" />
|
||||
<xs:enumeration value="failure" />
|
||||
<xs:enumeration value="info" />
|
||||
<xs:enumeration value="warning" />
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:element>
|
||||
<xs:element name="Subject" type="xs:string" />
|
||||
<xs:element name="Message" type="xs:string" />
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element name="Version" type="xs:string" />
|
||||
<xs:element name="Subject" type="xs:string" />
|
||||
<xs:element name="MessageType">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="success" />
|
||||
<xs:enumeration value="failure" />
|
||||
<xs:enumeration value="info" />
|
||||
<xs:enumeration value="warning" />
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:element>
|
||||
<xs:element name="Message" type="xs:string" />
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
</xs:schema>
|
||||
|
|
40
libs/apprise/assets/NotifyXML-1.1.xsd
Normal file
40
libs/apprise/assets/NotifyXML-1.1.xsd
Normal file
|
@ -0,0 +1,40 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<xs:schema attributeFormDefault="unqualified" elementFormDefault="qualified" xmlns:xs="http://www.w3.org/2001/XMLSchema">
|
||||
<xs:import namespace="http://schemas.xmlsoap.org/soap/envelope/" schemaLocation="http://schemas.xmlsoap.org/soap/envelope/"/>
|
||||
<xs:element name="Notification">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element name="Version" type="xs:string" />
|
||||
<xs:element name="Subject" type="xs:string" />
|
||||
<xs:element name="MessageType">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:enumeration value="success" />
|
||||
<xs:enumeration value="failure" />
|
||||
<xs:enumeration value="info" />
|
||||
<xs:enumeration value="warning" />
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:element>
|
||||
<xs:element name="Message" type="xs:string" />
|
||||
<xs:element name="Attachments" minOccurs="0">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element name="Attachment" minOccurs="0" maxOccurs="unbounded">
|
||||
<xs:complexType>
|
||||
<xs:simpleContent>
|
||||
<xs:extension base="xs:string">
|
||||
<xs:attribute name="mimetype" type="xs:string" use="required"/>
|
||||
<xs:attribute name="filename" type="xs:string" use="required"/>
|
||||
</xs:extension>
|
||||
</xs:simpleContent>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
</xs:sequence>
|
||||
<xs:attribute name="encoding" type="xs:string" use="required"/>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
</xs:schema>
|
|
@ -28,6 +28,7 @@ import time
|
|||
import mimetypes
|
||||
from ..URLBase import URLBase
|
||||
from ..utils import parse_bool
|
||||
from ..common import ContentLocation
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
||||
|
@ -62,6 +63,11 @@ class AttachBase(URLBase):
|
|||
# 5 MB = 5242880 bytes
|
||||
max_file_size = 5242880
|
||||
|
||||
# By default all attachments types are inaccessible.
|
||||
# Developers of items identified in the attachment plugin directory
|
||||
# are requried to set a location
|
||||
location = ContentLocation.INACCESSIBLE
|
||||
|
||||
# Here is where we define all of the arguments we accept on the url
|
||||
# such as: schema://whatever/?overflow=upstream&format=text
|
||||
# These act the same way as tokens except they are optional and/or
|
||||
|
|
37
libs/apprise/attachment/AttachBase.pyi
Normal file
37
libs/apprise/attachment/AttachBase.pyi
Normal file
|
@ -0,0 +1,37 @@
|
|||
from typing import Any, Dict, Optional
|
||||
|
||||
from .. import ContentLocation
|
||||
|
||||
class AttachBase:
|
||||
max_detect_buffer_size: int
|
||||
unknown_mimetype: str
|
||||
unknown_filename: str
|
||||
unknown_filename_extension: str
|
||||
strict: bool
|
||||
max_file_size: int
|
||||
location: ContentLocation
|
||||
template_args: Dict[str, Any]
|
||||
def __init__(
|
||||
self,
|
||||
name: Optional[str] = ...,
|
||||
mimetype: Optional[str] = ...,
|
||||
cache: Optional[bool] = ...,
|
||||
**kwargs: Any
|
||||
) -> None: ...
|
||||
@property
|
||||
def path(self) -> Optional[str]: ...
|
||||
@property
|
||||
def name(self) -> Optional[str]: ...
|
||||
@property
|
||||
def mimetype(self) -> Optional[str]: ...
|
||||
def exists(self) -> bool: ...
|
||||
def invalidate(self) -> None: ...
|
||||
def download(self) -> bool: ...
|
||||
@staticmethod
|
||||
def parse_url(
|
||||
url: str,
|
||||
verify_host: bool = ...
|
||||
) -> Dict[str, Any]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __bool__(self) -> bool: ...
|
||||
def __nonzero__(self) -> bool: ...
|
|
@ -26,6 +26,7 @@
|
|||
import re
|
||||
import os
|
||||
from .AttachBase import AttachBase
|
||||
from ..common import ContentLocation
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
||||
|
@ -40,6 +41,10 @@ class AttachFile(AttachBase):
|
|||
# The default protocol
|
||||
protocol = 'file'
|
||||
|
||||
# Content is local to the same location as the apprise instance
|
||||
# being called (server-side)
|
||||
location = ContentLocation.LOCAL
|
||||
|
||||
def __init__(self, path, **kwargs):
|
||||
"""
|
||||
Initialize Local File Attachment Object
|
||||
|
@ -81,6 +86,10 @@ class AttachFile(AttachBase):
|
|||
validate it.
|
||||
"""
|
||||
|
||||
if self.location == ContentLocation.INACCESSIBLE:
|
||||
# our content is inaccessible
|
||||
return False
|
||||
|
||||
# Ensure any existing content set has been invalidated
|
||||
self.invalidate()
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ import six
|
|||
import requests
|
||||
from tempfile import NamedTemporaryFile
|
||||
from .AttachBase import AttachBase
|
||||
from ..common import ContentLocation
|
||||
from ..URLBase import PrivacyMode
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
@ -50,6 +51,9 @@ class AttachHTTP(AttachBase):
|
|||
# The number of bytes in memory to read from the remote source at a time
|
||||
chunk_size = 8192
|
||||
|
||||
# Web based requests are remote/external to our current location
|
||||
location = ContentLocation.HOSTED
|
||||
|
||||
def __init__(self, headers=None, **kwargs):
|
||||
"""
|
||||
Initialize HTTP Object
|
||||
|
@ -86,6 +90,10 @@ class AttachHTTP(AttachBase):
|
|||
Perform retrieval of the configuration based on the specified request
|
||||
"""
|
||||
|
||||
if self.location == ContentLocation.INACCESSIBLE:
|
||||
# our content is inaccessible
|
||||
return False
|
||||
|
||||
# Ensure any existing content set has been invalidated
|
||||
self.invalidate()
|
||||
|
||||
|
|
|
@ -26,7 +26,11 @@
|
|||
import click
|
||||
import logging
|
||||
import platform
|
||||
import six
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
|
||||
from os.path import isfile
|
||||
from os.path import expanduser
|
||||
from os.path import expandvars
|
||||
|
@ -39,6 +43,7 @@ from . import AppriseConfig
|
|||
from .utils import parse_list
|
||||
from .common import NOTIFY_TYPES
|
||||
from .common import NOTIFY_FORMATS
|
||||
from .common import ContentLocation
|
||||
from .logger import logger
|
||||
|
||||
from . import __title__
|
||||
|
@ -133,6 +138,9 @@ def print_version_msg():
|
|||
help='Perform a trial run but only prints the notification '
|
||||
'services to-be triggered to stdout. Notifications are never '
|
||||
'sent using this mode.')
|
||||
@click.option('--details', '-l', is_flag=True,
|
||||
help='Prints details about the current services supported by '
|
||||
'Apprise.')
|
||||
@click.option('--recursion-depth', '-R', default=DEFAULT_RECURSION_DEPTH,
|
||||
type=int,
|
||||
help='The number of recursive import entries that can be '
|
||||
|
@ -141,6 +149,8 @@ def print_version_msg():
|
|||
@click.option('--verbose', '-v', count=True,
|
||||
help='Makes the operation more talkative. Use multiple v to '
|
||||
'increase the verbosity. I.e.: -vvvv')
|
||||
@click.option('--interpret-escapes', '-e', is_flag=True,
|
||||
help='Enable interpretation of backslash escapes')
|
||||
@click.option('--debug', '-D', is_flag=True, help='Debug mode')
|
||||
@click.option('--version', '-V', is_flag=True,
|
||||
help='Display the apprise version and exit.')
|
||||
|
@ -148,7 +158,7 @@ def print_version_msg():
|
|||
metavar='SERVER_URL [SERVER_URL2 [SERVER_URL3]]',)
|
||||
def main(body, title, config, attach, urls, notification_type, theme, tag,
|
||||
input_format, dry_run, recursion_depth, verbose, disable_async,
|
||||
debug, version):
|
||||
details, interpret_escapes, debug, version):
|
||||
"""
|
||||
Send a notification to all of the specified servers identified by their
|
||||
URLs the content provided within the title, body and notification-type.
|
||||
|
@ -224,8 +234,15 @@ def main(body, title, config, attach, urls, notification_type, theme, tag,
|
|||
|
||||
# Prepare our asset
|
||||
asset = AppriseAsset(
|
||||
# Our body format
|
||||
body_format=input_format,
|
||||
|
||||
# Interpret Escapes
|
||||
interpret_escapes=interpret_escapes,
|
||||
|
||||
# Set the theme
|
||||
theme=theme,
|
||||
|
||||
# Async mode is only used for Python v3+ and allows a user to send
|
||||
# all of their notifications asyncronously. This was made an option
|
||||
# incase there are problems in the future where it's better that
|
||||
|
@ -234,18 +251,132 @@ def main(body, title, config, attach, urls, notification_type, theme, tag,
|
|||
)
|
||||
|
||||
# Create our Apprise object
|
||||
a = Apprise(asset=asset, debug=debug)
|
||||
a = Apprise(asset=asset, debug=debug, location=ContentLocation.LOCAL)
|
||||
|
||||
# Load our configuration if no URLs or specified configuration was
|
||||
# identified on the command line
|
||||
a.add(AppriseConfig(
|
||||
paths=[f for f in DEFAULT_SEARCH_PATHS if isfile(expanduser(f))]
|
||||
if not (config or urls) else config,
|
||||
asset=asset, recursion=recursion_depth))
|
||||
if details:
|
||||
# Print details and exit
|
||||
results = a.details(show_requirements=True, show_disabled=True)
|
||||
|
||||
# Load our inventory up
|
||||
for url in urls:
|
||||
a.add(url)
|
||||
# Sort our results:
|
||||
plugins = sorted(
|
||||
results['schemas'], key=lambda i: str(i['service_name']))
|
||||
for entry in plugins:
|
||||
protocols = [] if not entry['protocols'] else \
|
||||
[p for p in entry['protocols']
|
||||
if isinstance(p, six.string_types)]
|
||||
protocols.extend(
|
||||
[] if not entry['secure_protocols'] else
|
||||
[p for p in entry['secure_protocols']
|
||||
if isinstance(p, six.string_types)])
|
||||
|
||||
if len(protocols) == 1:
|
||||
# Simplify view by swapping {schema} with the single
|
||||
# protocol value
|
||||
|
||||
# Convert tuple to list
|
||||
entry['details']['templates'] = \
|
||||
list(entry['details']['templates'])
|
||||
|
||||
for x in range(len(entry['details']['templates'])):
|
||||
entry['details']['templates'][x] = \
|
||||
re.sub(
|
||||
r'^[^}]+}://',
|
||||
'{}://'.format(protocols[0]),
|
||||
entry['details']['templates'][x])
|
||||
|
||||
click.echo(click.style(
|
||||
'{} {:<30} '.format(
|
||||
'+' if entry['enabled'] else '-',
|
||||
str(entry['service_name'])),
|
||||
fg="green" if entry['enabled'] else "red", bold=True),
|
||||
nl=(not entry['enabled'] or len(protocols) == 1))
|
||||
|
||||
if not entry['enabled']:
|
||||
if entry['requirements']['details']:
|
||||
click.echo(
|
||||
' ' + str(entry['requirements']['details']))
|
||||
|
||||
if entry['requirements']['packages_required']:
|
||||
click.echo(' Python Packages Required:')
|
||||
for req in entry['requirements']['packages_required']:
|
||||
click.echo(' - ' + req)
|
||||
|
||||
if entry['requirements']['packages_recommended']:
|
||||
click.echo(' Python Packages Recommended:')
|
||||
for req in entry['requirements']['packages_recommended']:
|
||||
click.echo(' - ' + req)
|
||||
|
||||
# new line padding between entries
|
||||
click.echo()
|
||||
continue
|
||||
|
||||
if len(protocols) > 1:
|
||||
click.echo('| Schema(s): {}'.format(
|
||||
', '.join(protocols),
|
||||
))
|
||||
|
||||
prefix = ' - '
|
||||
click.echo('{}{}'.format(
|
||||
prefix,
|
||||
'\n{}'.format(prefix).join(entry['details']['templates'])))
|
||||
|
||||
# new line padding between entries
|
||||
click.echo()
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
# The priorities of what is accepted are parsed in order below:
|
||||
# 1. URLs by command line
|
||||
# 2. Configuration by command line
|
||||
# 3. URLs by environment variable: APPRISE_URLS
|
||||
# 4. Configuration by environment variable: APPRISE_CONFIG
|
||||
# 5. Default Configuration File(s) (if found)
|
||||
#
|
||||
if urls:
|
||||
if tag:
|
||||
# Ignore any tags specified
|
||||
logger.warning(
|
||||
'--tag (-g) entries are ignored when using specified URLs')
|
||||
tag = None
|
||||
|
||||
# Load our URLs (if any defined)
|
||||
for url in urls:
|
||||
a.add(url)
|
||||
|
||||
if config:
|
||||
# Provide a warning to the end user if they specified both
|
||||
logger.warning(
|
||||
'You defined both URLs and a --config (-c) entry; '
|
||||
'Only the URLs will be referenced.')
|
||||
|
||||
elif config:
|
||||
# We load our configuration file(s) now only if no URLs were specified
|
||||
# Specified config entries trump all
|
||||
a.add(AppriseConfig(
|
||||
paths=config, asset=asset, recursion=recursion_depth))
|
||||
|
||||
elif os.environ.get('APPRISE_URLS', '').strip():
|
||||
logger.debug('Loading provided APPRISE_URLS environment variable')
|
||||
if tag:
|
||||
# Ignore any tags specified
|
||||
logger.warning(
|
||||
'--tag (-g) entries are ignored when using specified URLs')
|
||||
tag = None
|
||||
|
||||
# Attempt to use our APPRISE_URLS environment variable (if populated)
|
||||
a.add(os.environ['APPRISE_URLS'].strip())
|
||||
|
||||
elif os.environ.get('APPRISE_CONFIG', '').strip():
|
||||
logger.debug('Loading provided APPRISE_CONFIG environment variable')
|
||||
# Fall back to config environment variable (if populated)
|
||||
a.add(AppriseConfig(
|
||||
paths=os.environ['APPRISE_CONFIG'].strip(),
|
||||
asset=asset, recursion=recursion_depth))
|
||||
else:
|
||||
# Load default configuration
|
||||
a.add(AppriseConfig(
|
||||
paths=[f for f in DEFAULT_SEARCH_PATHS if isfile(expanduser(f))],
|
||||
asset=asset, recursion=recursion_depth))
|
||||
|
||||
if len(a) == 0:
|
||||
logger.error(
|
||||
|
|
|
@ -130,28 +130,58 @@ CONFIG_FORMATS = (
|
|||
)
|
||||
|
||||
|
||||
class ConfigIncludeMode(object):
|
||||
class ContentIncludeMode(object):
|
||||
"""
|
||||
The different Cofiguration inclusion modes. All Configuration
|
||||
plugins will have one of these associated with it.
|
||||
The different Content inclusion modes. All content based plugins will
|
||||
have one of these associated with it.
|
||||
"""
|
||||
# - Configuration inclusion of same type only; hence a file:// can include
|
||||
# - Content inclusion of same type only; hence a file:// can include
|
||||
# a file://
|
||||
# - Cross file inclusion is not allowed unless insecure_includes (a flag)
|
||||
# is set to True. In these cases STRICT acts as type ALWAYS
|
||||
STRICT = 'strict'
|
||||
|
||||
# This configuration type can never be included
|
||||
# This content type can never be included
|
||||
NEVER = 'never'
|
||||
|
||||
# File configuration can always be included
|
||||
# This content can always be included
|
||||
ALWAYS = 'always'
|
||||
|
||||
|
||||
CONFIG_INCLUDE_MODES = (
|
||||
ConfigIncludeMode.STRICT,
|
||||
ConfigIncludeMode.NEVER,
|
||||
ConfigIncludeMode.ALWAYS,
|
||||
CONTENT_INCLUDE_MODES = (
|
||||
ContentIncludeMode.STRICT,
|
||||
ContentIncludeMode.NEVER,
|
||||
ContentIncludeMode.ALWAYS,
|
||||
)
|
||||
|
||||
|
||||
class ContentLocation(object):
|
||||
"""
|
||||
This is primarily used for handling file attachments. The idea is
|
||||
to track the source of the attachment itself. We don't want
|
||||
remote calls to a server to access local attachments for example.
|
||||
|
||||
By knowing the attachment type and cross-associating it with how
|
||||
we plan on accessing the content, we can make a judgement call
|
||||
(for security reasons) if we will allow it.
|
||||
|
||||
Obviously local uses of apprise can access both local and remote
|
||||
type files.
|
||||
"""
|
||||
# Content is located locally (on the same server as apprise)
|
||||
LOCAL = 'local'
|
||||
|
||||
# Content is located in a remote location
|
||||
HOSTED = 'hosted'
|
||||
|
||||
# Content is inaccessible
|
||||
INACCESSIBLE = 'n/a'
|
||||
|
||||
|
||||
CONTENT_LOCATIONS = (
|
||||
ContentLocation.LOCAL,
|
||||
ContentLocation.HOSTED,
|
||||
ContentLocation.INACCESSIBLE,
|
||||
)
|
||||
|
||||
# This is a reserved tag that is automatically assigned to every
|
||||
|
|
15
libs/apprise/common.pyi
Normal file
15
libs/apprise/common.pyi
Normal file
|
@ -0,0 +1,15 @@
|
|||
class NotifyType:
|
||||
INFO: NotifyType
|
||||
SUCCESS: NotifyType
|
||||
WARNING: NotifyType
|
||||
FAILURE: NotifyType
|
||||
|
||||
class NotifyFormat:
|
||||
TEXT: NotifyFormat
|
||||
HTML: NotifyFormat
|
||||
MARKDOWN: NotifyFormat
|
||||
|
||||
class ContentLocation:
|
||||
LOCAL: ContentLocation
|
||||
HOSTED: ContentLocation
|
||||
INACCESSIBLE: ContentLocation
|
|
@ -34,13 +34,18 @@ from ..AppriseAsset import AppriseAsset
|
|||
from ..URLBase import URLBase
|
||||
from ..common import ConfigFormat
|
||||
from ..common import CONFIG_FORMATS
|
||||
from ..common import ConfigIncludeMode
|
||||
from ..common import ContentIncludeMode
|
||||
from ..utils import GET_SCHEMA_RE
|
||||
from ..utils import parse_list
|
||||
from ..utils import parse_bool
|
||||
from ..utils import parse_urls
|
||||
from ..utils import cwe312_url
|
||||
from . import SCHEMA_MAP
|
||||
|
||||
# Test whether token is valid or not
|
||||
VALID_TOKEN = re.compile(
|
||||
r'(?P<token>[a-z0-9][a-z0-9_]+)', re.I)
|
||||
|
||||
|
||||
class ConfigBase(URLBase):
|
||||
"""
|
||||
|
@ -65,7 +70,7 @@ class ConfigBase(URLBase):
|
|||
|
||||
# By default all configuration is not includable using the 'include'
|
||||
# line found in configuration files.
|
||||
allow_cross_includes = ConfigIncludeMode.NEVER
|
||||
allow_cross_includes = ContentIncludeMode.NEVER
|
||||
|
||||
# the config path manages the handling of relative include
|
||||
config_path = os.getcwd()
|
||||
|
@ -205,8 +210,8 @@ class ConfigBase(URLBase):
|
|||
# Configuration files were detected; recursively populate them
|
||||
# If we have been configured to do so
|
||||
for url in configs:
|
||||
if self.recursion > 0:
|
||||
|
||||
if self.recursion > 0:
|
||||
# Attempt to acquire the schema at the very least to allow
|
||||
# our configuration based urls.
|
||||
schema = GET_SCHEMA_RE.match(url)
|
||||
|
@ -219,6 +224,7 @@ class ConfigBase(URLBase):
|
|||
url = os.path.join(self.config_path, url)
|
||||
|
||||
url = '{}://{}'.format(schema, URLBase.quote(url))
|
||||
|
||||
else:
|
||||
# Ensure our schema is always in lower case
|
||||
schema = schema.group('schema').lower()
|
||||
|
@ -229,27 +235,31 @@ class ConfigBase(URLBase):
|
|||
'Unsupported include schema {}.'.format(schema))
|
||||
continue
|
||||
|
||||
# CWE-312 (Secure Logging) Handling
|
||||
loggable_url = url if not asset.secure_logging \
|
||||
else cwe312_url(url)
|
||||
|
||||
# Parse our url details of the server object as dictionary
|
||||
# containing all of the information parsed from our URL
|
||||
results = SCHEMA_MAP[schema].parse_url(url)
|
||||
if not results:
|
||||
# Failed to parse the server URL
|
||||
self.logger.warning(
|
||||
'Unparseable include URL {}'.format(url))
|
||||
'Unparseable include URL {}'.format(loggable_url))
|
||||
continue
|
||||
|
||||
# Handle cross inclusion based on allow_cross_includes rules
|
||||
if (SCHEMA_MAP[schema].allow_cross_includes ==
|
||||
ConfigIncludeMode.STRICT
|
||||
ContentIncludeMode.STRICT
|
||||
and schema not in self.schemas()
|
||||
and not self.insecure_includes) or \
|
||||
SCHEMA_MAP[schema].allow_cross_includes == \
|
||||
ConfigIncludeMode.NEVER:
|
||||
ContentIncludeMode.NEVER:
|
||||
|
||||
# Prevent the loading if insecure base protocols
|
||||
ConfigBase.logger.warning(
|
||||
'Including {}:// based configuration is prohibited. '
|
||||
'Ignoring URL {}'.format(schema, url))
|
||||
'Ignoring URL {}'.format(schema, loggable_url))
|
||||
continue
|
||||
|
||||
# Prepare our Asset Object
|
||||
|
@ -275,7 +285,7 @@ class ConfigBase(URLBase):
|
|||
except Exception as e:
|
||||
# the arguments are invalid or can not be used.
|
||||
self.logger.warning(
|
||||
'Could not load include URL: {}'.format(url))
|
||||
'Could not load include URL: {}'.format(loggable_url))
|
||||
self.logger.debug('Loading Exception: {}'.format(str(e)))
|
||||
continue
|
||||
|
||||
|
@ -288,16 +298,23 @@ class ConfigBase(URLBase):
|
|||
del cfg_plugin
|
||||
|
||||
else:
|
||||
# CWE-312 (Secure Logging) Handling
|
||||
loggable_url = url if not asset.secure_logging \
|
||||
else cwe312_url(url)
|
||||
|
||||
self.logger.debug(
|
||||
'Recursion limit reached; ignoring Include URL: %s' % url)
|
||||
'Recursion limit reached; ignoring Include URL: %s',
|
||||
loggable_url)
|
||||
|
||||
if self._cached_servers:
|
||||
self.logger.info('Loaded {} entries from {}'.format(
|
||||
len(self._cached_servers), self.url()))
|
||||
self.logger.info(
|
||||
'Loaded {} entries from {}'.format(
|
||||
len(self._cached_servers),
|
||||
self.url(privacy=asset.secure_logging)))
|
||||
else:
|
||||
self.logger.warning(
|
||||
'Failed to load Apprise configuration from {}'.format(
|
||||
self.url()))
|
||||
self.url(privacy=asset.secure_logging)))
|
||||
|
||||
# Set the time our content was cached at
|
||||
self._cached_time = time.time()
|
||||
|
@ -527,6 +544,9 @@ class ConfigBase(URLBase):
|
|||
# the include keyword
|
||||
configs = list()
|
||||
|
||||
# Prepare our Asset Object
|
||||
asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
|
||||
# Define what a valid line should look like
|
||||
valid_line_re = re.compile(
|
||||
r'^\s*(?P<line>([;#]+(?P<comment>.*))|'
|
||||
|
@ -563,27 +583,37 @@ class ConfigBase(URLBase):
|
|||
continue
|
||||
|
||||
if config:
|
||||
ConfigBase.logger.debug('Include URL: {}'.format(config))
|
||||
# CWE-312 (Secure Logging) Handling
|
||||
loggable_url = config if not asset.secure_logging \
|
||||
else cwe312_url(config)
|
||||
|
||||
ConfigBase.logger.debug(
|
||||
'Include URL: {}'.format(loggable_url))
|
||||
|
||||
# Store our include line
|
||||
configs.append(config.strip())
|
||||
continue
|
||||
|
||||
# CWE-312 (Secure Logging) Handling
|
||||
loggable_url = url if not asset.secure_logging \
|
||||
else cwe312_url(url)
|
||||
|
||||
# Acquire our url tokens
|
||||
results = plugins.url_to_dict(url)
|
||||
results = plugins.url_to_dict(
|
||||
url, secure_logging=asset.secure_logging)
|
||||
if results is None:
|
||||
# Failed to parse the server URL
|
||||
ConfigBase.logger.warning(
|
||||
'Unparseable URL {} on line {}.'.format(url, line))
|
||||
'Unparseable URL {} on line {}.'.format(
|
||||
loggable_url, line))
|
||||
continue
|
||||
|
||||
# Build a list of tags to associate with the newly added
|
||||
# notifications if any were set
|
||||
results['tag'] = set(parse_list(result.group('tags')))
|
||||
|
||||
# Prepare our Asset Object
|
||||
results['asset'] = \
|
||||
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
# Set our Asset Object
|
||||
results['asset'] = asset
|
||||
|
||||
try:
|
||||
# Attempt to create an instance of our plugin using the
|
||||
|
@ -591,13 +621,14 @@ class ConfigBase(URLBase):
|
|||
plugin = plugins.SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
# Create log entry of loaded URL
|
||||
ConfigBase.logger.debug('Loaded URL: {}'.format(plugin.url()))
|
||||
ConfigBase.logger.debug(
|
||||
'Loaded URL: %s', plugin.url(privacy=asset.secure_logging))
|
||||
|
||||
except Exception as e:
|
||||
# the arguments are invalid or can not be used.
|
||||
ConfigBase.logger.warning(
|
||||
'Could not load URL {} on line {}.'.format(
|
||||
url, line))
|
||||
loggable_url, line))
|
||||
ConfigBase.logger.debug('Loading Exception: %s' % str(e))
|
||||
continue
|
||||
|
||||
|
@ -633,7 +664,9 @@ class ConfigBase(URLBase):
|
|||
# Load our data (safely)
|
||||
result = yaml.load(content, Loader=yaml.SafeLoader)
|
||||
|
||||
except (AttributeError, yaml.error.MarkedYAMLError) as e:
|
||||
except (AttributeError,
|
||||
yaml.parser.ParserError,
|
||||
yaml.error.MarkedYAMLError) as e:
|
||||
# Invalid content
|
||||
ConfigBase.logger.error(
|
||||
'Invalid Apprise YAML data specified.')
|
||||
|
@ -671,7 +704,9 @@ class ConfigBase(URLBase):
|
|||
continue
|
||||
|
||||
if not (hasattr(asset, k) and
|
||||
isinstance(getattr(asset, k), six.string_types)):
|
||||
isinstance(getattr(asset, k),
|
||||
(bool, six.string_types))):
|
||||
|
||||
# We can't set a function or non-string set value
|
||||
ConfigBase.logger.warning(
|
||||
'Invalid asset key "{}".'.format(k))
|
||||
|
@ -681,15 +716,23 @@ class ConfigBase(URLBase):
|
|||
# Convert to an empty string
|
||||
v = ''
|
||||
|
||||
if not isinstance(v, six.string_types):
|
||||
if (isinstance(v, (bool, six.string_types))
|
||||
and isinstance(getattr(asset, k), bool)):
|
||||
|
||||
# If the object in the Asset is a boolean, then
|
||||
# we want to convert the specified string to
|
||||
# match that.
|
||||
setattr(asset, k, parse_bool(v))
|
||||
|
||||
elif isinstance(v, six.string_types):
|
||||
# Set our asset object with the new value
|
||||
setattr(asset, k, v.strip())
|
||||
|
||||
else:
|
||||
# we must set strings with a string
|
||||
ConfigBase.logger.warning(
|
||||
'Invalid asset value to "{}".'.format(k))
|
||||
continue
|
||||
|
||||
# Set our asset object with the new value
|
||||
setattr(asset, k, v.strip())
|
||||
|
||||
#
|
||||
# global tag root directive
|
||||
#
|
||||
|
@ -740,6 +783,10 @@ class ConfigBase(URLBase):
|
|||
# we can. Reset it to None on each iteration
|
||||
results = list()
|
||||
|
||||
# CWE-312 (Secure Logging) Handling
|
||||
loggable_url = url if not asset.secure_logging \
|
||||
else cwe312_url(url)
|
||||
|
||||
if isinstance(url, six.string_types):
|
||||
# We're just a simple URL string...
|
||||
schema = GET_SCHEMA_RE.match(url)
|
||||
|
@ -748,16 +795,18 @@ class ConfigBase(URLBase):
|
|||
# config file at least has something to take action
|
||||
# with.
|
||||
ConfigBase.logger.warning(
|
||||
'Invalid URL {}, entry #{}'.format(url, no + 1))
|
||||
'Invalid URL {}, entry #{}'.format(
|
||||
loggable_url, no + 1))
|
||||
continue
|
||||
|
||||
# We found a valid schema worthy of tracking; store it's
|
||||
# details:
|
||||
_results = plugins.url_to_dict(url)
|
||||
_results = plugins.url_to_dict(
|
||||
url, secure_logging=asset.secure_logging)
|
||||
if _results is None:
|
||||
ConfigBase.logger.warning(
|
||||
'Unparseable URL {}, entry #{}'.format(
|
||||
url, no + 1))
|
||||
loggable_url, no + 1))
|
||||
continue
|
||||
|
||||
# add our results to our global set
|
||||
|
@ -791,19 +840,20 @@ class ConfigBase(URLBase):
|
|||
.format(key, no + 1))
|
||||
continue
|
||||
|
||||
# Store our URL and Schema Regex
|
||||
_url = key
|
||||
|
||||
# Store our schema
|
||||
schema = _schema.group('schema').lower()
|
||||
|
||||
# Store our URL and Schema Regex
|
||||
_url = key
|
||||
|
||||
if _url is None:
|
||||
# the loop above failed to match anything
|
||||
ConfigBase.logger.warning(
|
||||
'Unsupported schema in urls, entry #{}'.format(no + 1))
|
||||
'Unsupported URL, entry #{}'.format(no + 1))
|
||||
continue
|
||||
|
||||
_results = plugins.url_to_dict(_url)
|
||||
_results = plugins.url_to_dict(
|
||||
_url, secure_logging=asset.secure_logging)
|
||||
if _results is None:
|
||||
# Setup dictionary
|
||||
_results = {
|
||||
|
@ -830,12 +880,33 @@ class ConfigBase(URLBase):
|
|||
if 'schema' in entries:
|
||||
del entries['schema']
|
||||
|
||||
# support our special tokens (if they're present)
|
||||
if schema in plugins.SCHEMA_MAP:
|
||||
entries = ConfigBase._special_token_handler(
|
||||
schema, entries)
|
||||
|
||||
# Extend our dictionary with our new entries
|
||||
r.update(entries)
|
||||
|
||||
# add our results to our global set
|
||||
results.append(r)
|
||||
|
||||
elif isinstance(tokens, dict):
|
||||
# support our special tokens (if they're present)
|
||||
if schema in plugins.SCHEMA_MAP:
|
||||
tokens = ConfigBase._special_token_handler(
|
||||
schema, tokens)
|
||||
|
||||
# Copy ourselves a template of our parsed URL as a base to
|
||||
# work with
|
||||
r = _results.copy()
|
||||
|
||||
# add our result set
|
||||
r.update(tokens)
|
||||
|
||||
# add our results to our global set
|
||||
results.append(r)
|
||||
|
||||
else:
|
||||
# add our results to our global set
|
||||
results.append(_results)
|
||||
|
@ -867,6 +938,17 @@ class ConfigBase(URLBase):
|
|||
# Just use the global settings
|
||||
_results['tag'] = global_tags
|
||||
|
||||
for key in list(_results.keys()):
|
||||
# Strip out any tokens we know that we can't accept and
|
||||
# warn the user
|
||||
match = VALID_TOKEN.match(key)
|
||||
if not match:
|
||||
ConfigBase.logger.warning(
|
||||
'Ignoring invalid token ({}) found in YAML '
|
||||
'configuration entry #{}, item #{}'
|
||||
.format(key, no + 1, entry))
|
||||
del _results[key]
|
||||
|
||||
ConfigBase.logger.trace(
|
||||
'URL #{}: {} unpacked as:{}{}'
|
||||
.format(no + 1, url, os.linesep, os.linesep.join(
|
||||
|
@ -883,7 +965,8 @@ class ConfigBase(URLBase):
|
|||
|
||||
# Create log entry of loaded URL
|
||||
ConfigBase.logger.debug(
|
||||
'Loaded URL: {}'.format(plugin.url()))
|
||||
'Loaded URL: {}'.format(
|
||||
plugin.url(privacy=asset.secure_logging)))
|
||||
|
||||
except Exception as e:
|
||||
# the arguments are invalid or can not be used.
|
||||
|
@ -913,6 +996,135 @@ class ConfigBase(URLBase):
|
|||
# Pop the element off of the stack
|
||||
return self._cached_servers.pop(index)
|
||||
|
||||
@staticmethod
|
||||
def _special_token_handler(schema, tokens):
|
||||
"""
|
||||
This function takes a list of tokens and updates them to no longer
|
||||
include any special tokens such as +,-, and :
|
||||
|
||||
- schema must be a valid schema of a supported plugin type
|
||||
- tokens must be a dictionary containing the yaml entries parsed.
|
||||
|
||||
The idea here is we can post process a set of tokens provided in
|
||||
a YAML file where the user provided some of the special keywords.
|
||||
|
||||
We effectivley look up what these keywords map to their appropriate
|
||||
value they're expected
|
||||
"""
|
||||
# Create a copy of our dictionary
|
||||
tokens = tokens.copy()
|
||||
|
||||
for kw, meta in plugins.SCHEMA_MAP[schema]\
|
||||
.template_kwargs.items():
|
||||
|
||||
# Determine our prefix:
|
||||
prefix = meta.get('prefix', '+')
|
||||
|
||||
# Detect any matches
|
||||
matches = \
|
||||
{k[1:]: str(v) for k, v in tokens.items()
|
||||
if k.startswith(prefix)}
|
||||
|
||||
if not matches:
|
||||
# we're done with this entry
|
||||
continue
|
||||
|
||||
if not isinstance(tokens.get(kw), dict):
|
||||
# Invalid; correct it
|
||||
tokens[kw] = dict()
|
||||
|
||||
# strip out processed tokens
|
||||
tokens = {k: v for k, v in tokens.items()
|
||||
if not k.startswith(prefix)}
|
||||
|
||||
# Update our entries
|
||||
tokens[kw].update(matches)
|
||||
|
||||
# Now map our tokens accordingly to the class templates defined by
|
||||
# each service.
|
||||
#
|
||||
# This is specifically used for YAML file parsing. It allows a user to
|
||||
# define an entry such as:
|
||||
#
|
||||
# urls:
|
||||
# - mailto://user:pass@domain:
|
||||
# - to: user1@hotmail.com
|
||||
# - to: user2@hotmail.com
|
||||
#
|
||||
# Under the hood, the NotifyEmail() class does not parse the `to`
|
||||
# argument. It's contents needs to be mapped to `targets`. This is
|
||||
# defined in the class via the `template_args` and template_tokens`
|
||||
# section.
|
||||
#
|
||||
# This function here allows these mappings to take place within the
|
||||
# YAML file as independant arguments.
|
||||
class_templates = \
|
||||
plugins.details(plugins.SCHEMA_MAP[schema])
|
||||
|
||||
for key in list(tokens.keys()):
|
||||
|
||||
if key not in class_templates['args']:
|
||||
# No need to handle non-arg entries
|
||||
continue
|
||||
|
||||
# get our `map_to` and/or 'alias_of' value (if it exists)
|
||||
map_to = class_templates['args'][key].get(
|
||||
'alias_of', class_templates['args'][key].get('map_to', ''))
|
||||
|
||||
if map_to == key:
|
||||
# We're already good as we are now
|
||||
continue
|
||||
|
||||
if map_to in class_templates['tokens']:
|
||||
meta = class_templates['tokens'][map_to]
|
||||
|
||||
else:
|
||||
meta = class_templates['args'].get(
|
||||
map_to, class_templates['args'][key])
|
||||
|
||||
# Perform a translation/mapping if our code reaches here
|
||||
value = tokens[key]
|
||||
del tokens[key]
|
||||
|
||||
# Detect if we're dealign with a list or not
|
||||
is_list = re.search(
|
||||
r'^(list|choice):.*',
|
||||
meta.get('type'),
|
||||
re.IGNORECASE)
|
||||
|
||||
if map_to not in tokens:
|
||||
tokens[map_to] = [] if is_list \
|
||||
else meta.get('default')
|
||||
|
||||
elif is_list and not isinstance(tokens.get(map_to), list):
|
||||
# Convert ourselves to a list if we aren't already
|
||||
tokens[map_to] = [tokens[map_to]]
|
||||
|
||||
# Type Conversion
|
||||
if re.search(
|
||||
r'^(choice:)?string',
|
||||
meta.get('type'),
|
||||
re.IGNORECASE) \
|
||||
and not isinstance(value, six.string_types):
|
||||
|
||||
# Ensure our format is as expected
|
||||
value = str(value)
|
||||
|
||||
# Apply any further translations if required (absolute map)
|
||||
# This is the case when an arg maps to a token which further
|
||||
# maps to a different function arg on the class constructor
|
||||
abs_map = meta.get('map_to', map_to)
|
||||
|
||||
# Set our token as how it was provided by the configuration
|
||||
if isinstance(tokens.get(map_to), list):
|
||||
tokens[abs_map].append(value)
|
||||
|
||||
else:
|
||||
tokens[abs_map] = value
|
||||
|
||||
# Return our tokens
|
||||
return tokens
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Returns the indexed server entry associated with the loaded
|
||||
|
|
3
libs/apprise/config/ConfigBase.pyi
Normal file
3
libs/apprise/config/ConfigBase.pyi
Normal file
|
@ -0,0 +1,3 @@
|
|||
from .. import URLBase
|
||||
|
||||
class ConfigBase(URLBase): ...
|
|
@ -28,7 +28,7 @@ import io
|
|||
import os
|
||||
from .ConfigBase import ConfigBase
|
||||
from ..common import ConfigFormat
|
||||
from ..common import ConfigIncludeMode
|
||||
from ..common import ContentIncludeMode
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
||||
|
@ -44,7 +44,7 @@ class ConfigFile(ConfigBase):
|
|||
protocol = 'file'
|
||||
|
||||
# Configuration file inclusion can only be of the same type
|
||||
allow_cross_includes = ConfigIncludeMode.STRICT
|
||||
allow_cross_includes = ContentIncludeMode.STRICT
|
||||
|
||||
def __init__(self, path, **kwargs):
|
||||
"""
|
||||
|
|
|
@ -28,7 +28,7 @@ import six
|
|||
import requests
|
||||
from .ConfigBase import ConfigBase
|
||||
from ..common import ConfigFormat
|
||||
from ..common import ConfigIncludeMode
|
||||
from ..common import ContentIncludeMode
|
||||
from ..URLBase import PrivacyMode
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
@ -66,7 +66,7 @@ class ConfigHTTP(ConfigBase):
|
|||
max_error_buffer_size = 2048
|
||||
|
||||
# Configuration file inclusion can always include this type
|
||||
allow_cross_includes = ConfigIncludeMode.ALWAYS
|
||||
allow_cross_includes = ContentIncludeMode.ALWAYS
|
||||
|
||||
def __init__(self, headers=None, **kwargs):
|
||||
"""
|
||||
|
|
|
@ -1,21 +1,27 @@
|
|||
# Translations template for apprise.
|
||||
# Copyright (C) 2020 Chris Caron
|
||||
# Copyright (C) 2021 Chris Caron
|
||||
# This file is distributed under the same license as the apprise project.
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, 2020.
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, 2021.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: apprise 0.8.8\n"
|
||||
"Project-Id-Version: apprise 0.9.6\n"
|
||||
"Report-Msgid-Bugs-To: lead2gold@gmail.com\n"
|
||||
"POT-Creation-Date: 2020-09-02 07:46-0400\n"
|
||||
"POT-Creation-Date: 2021-12-01 18:56-0500\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=utf-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Generated-By: Babel 2.7.0\n"
|
||||
"Generated-By: Babel 2.9.1\n"
|
||||
|
||||
msgid "A local Gnome environment is required."
|
||||
msgstr ""
|
||||
|
||||
msgid "A local Microsoft Windows environment is required."
|
||||
msgstr ""
|
||||
|
||||
msgid "API Key"
|
||||
msgstr ""
|
||||
|
@ -44,6 +50,27 @@ msgstr ""
|
|||
msgid "Add Tokens"
|
||||
msgstr ""
|
||||
|
||||
msgid "Alert Type"
|
||||
msgstr ""
|
||||
|
||||
msgid "Alias"
|
||||
msgstr ""
|
||||
|
||||
msgid "Amount"
|
||||
msgstr ""
|
||||
|
||||
msgid "App Access Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "App ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "App Version"
|
||||
msgstr ""
|
||||
|
||||
msgid "Application ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Application Key"
|
||||
msgstr ""
|
||||
|
||||
|
@ -83,6 +110,9 @@ msgstr ""
|
|||
msgid "Cache Results"
|
||||
msgstr ""
|
||||
|
||||
msgid "Call"
|
||||
msgstr ""
|
||||
|
||||
msgid "Carbon Copy"
|
||||
msgstr ""
|
||||
|
||||
|
@ -104,15 +134,27 @@ msgstr ""
|
|||
msgid "Country"
|
||||
msgstr ""
|
||||
|
||||
msgid "Currency"
|
||||
msgstr ""
|
||||
|
||||
msgid "Custom Icon"
|
||||
msgstr ""
|
||||
|
||||
msgid "Cycles"
|
||||
msgstr ""
|
||||
|
||||
msgid "DBus Notification"
|
||||
msgstr ""
|
||||
|
||||
msgid "Details"
|
||||
msgstr ""
|
||||
|
||||
msgid "Detect Bot Owner"
|
||||
msgstr ""
|
||||
|
||||
msgid "Device"
|
||||
msgstr ""
|
||||
|
||||
msgid "Device API Key"
|
||||
msgstr ""
|
||||
|
||||
|
@ -134,12 +176,18 @@ msgstr ""
|
|||
msgid "Email"
|
||||
msgstr ""
|
||||
|
||||
msgid "Email Header"
|
||||
msgstr ""
|
||||
|
||||
msgid "Encrypted Password"
|
||||
msgstr ""
|
||||
|
||||
msgid "Encrypted Salt"
|
||||
msgstr ""
|
||||
|
||||
msgid "Entity"
|
||||
msgstr ""
|
||||
|
||||
msgid "Event"
|
||||
msgstr ""
|
||||
|
||||
|
@ -152,6 +200,12 @@ msgstr ""
|
|||
msgid "Facility"
|
||||
msgstr ""
|
||||
|
||||
msgid "Flair ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Flair Text"
|
||||
msgstr ""
|
||||
|
||||
msgid "Footer Logo"
|
||||
msgstr ""
|
||||
|
||||
|
@ -170,6 +224,9 @@ msgstr ""
|
|||
msgid "From Phone No"
|
||||
msgstr ""
|
||||
|
||||
msgid "Gnome Notification"
|
||||
msgstr ""
|
||||
|
||||
msgid "Group"
|
||||
msgstr ""
|
||||
|
||||
|
@ -185,12 +242,33 @@ msgstr ""
|
|||
msgid "Icon Type"
|
||||
msgstr ""
|
||||
|
||||
msgid "Identifier"
|
||||
msgstr ""
|
||||
|
||||
msgid "Image Link"
|
||||
msgstr ""
|
||||
|
||||
msgid "Include Footer"
|
||||
msgstr ""
|
||||
|
||||
msgid "Include Image"
|
||||
msgstr ""
|
||||
|
||||
msgid "Include Segment"
|
||||
msgstr ""
|
||||
|
||||
msgid "Is Ad?"
|
||||
msgstr ""
|
||||
|
||||
msgid "Is Spoiler"
|
||||
msgstr ""
|
||||
|
||||
msgid "Kind"
|
||||
msgstr ""
|
||||
|
||||
msgid "Language"
|
||||
msgstr ""
|
||||
|
||||
msgid "Local File"
|
||||
msgstr ""
|
||||
|
||||
|
@ -200,6 +278,15 @@ msgstr ""
|
|||
msgid "Log to STDERR"
|
||||
msgstr ""
|
||||
|
||||
msgid "Long-Lived Access Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "MacOSX Notification"
|
||||
msgstr ""
|
||||
|
||||
msgid "Master Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Memory"
|
||||
msgstr ""
|
||||
|
||||
|
@ -209,18 +296,41 @@ msgstr ""
|
|||
msgid "Message Mode"
|
||||
msgstr ""
|
||||
|
||||
msgid "Message Type"
|
||||
msgstr ""
|
||||
|
||||
msgid "Modal"
|
||||
msgstr ""
|
||||
|
||||
msgid "Mode"
|
||||
msgstr ""
|
||||
|
||||
msgid "NSFW"
|
||||
msgstr ""
|
||||
|
||||
msgid "Name"
|
||||
msgstr ""
|
||||
|
||||
msgid "No dependencies."
|
||||
msgstr ""
|
||||
|
||||
msgid "Notification ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Notify Format"
|
||||
msgstr ""
|
||||
|
||||
msgid "OAuth Access Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "OAuth2 KeyFile"
|
||||
msgstr ""
|
||||
|
||||
msgid ""
|
||||
"Only works with Mac OS X 10.8 and higher. Additionally requires that "
|
||||
"/usr/local/bin/terminal-notifier is locally accessible."
|
||||
msgstr ""
|
||||
|
||||
msgid "Organization"
|
||||
msgstr ""
|
||||
|
||||
|
@ -230,6 +340,12 @@ msgstr ""
|
|||
msgid "Overflow Mode"
|
||||
msgstr ""
|
||||
|
||||
msgid "Packages are recommended to improve functionality."
|
||||
msgstr ""
|
||||
|
||||
msgid "Packages are required to function."
|
||||
msgstr ""
|
||||
|
||||
msgid "Password"
|
||||
msgstr ""
|
||||
|
||||
|
@ -254,6 +370,9 @@ msgstr ""
|
|||
msgid "Provider Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "QOS"
|
||||
msgstr ""
|
||||
|
||||
msgid "Region"
|
||||
msgstr ""
|
||||
|
||||
|
@ -263,6 +382,9 @@ msgstr ""
|
|||
msgid "Remove Tokens"
|
||||
msgstr ""
|
||||
|
||||
msgid "Resubmit Flag"
|
||||
msgstr ""
|
||||
|
||||
msgid "Retry"
|
||||
msgstr ""
|
||||
|
||||
|
@ -287,6 +409,9 @@ msgstr ""
|
|||
msgid "Secure Mode"
|
||||
msgstr ""
|
||||
|
||||
msgid "Send Replies"
|
||||
msgstr ""
|
||||
|
||||
msgid "Sender ID"
|
||||
msgstr ""
|
||||
|
||||
|
@ -296,6 +421,9 @@ msgstr ""
|
|||
msgid "Server Timeout"
|
||||
msgstr ""
|
||||
|
||||
msgid "Silent Notification"
|
||||
msgstr ""
|
||||
|
||||
msgid "Socket Connect Timeout"
|
||||
msgstr ""
|
||||
|
||||
|
@ -305,6 +433,9 @@ msgstr ""
|
|||
msgid "Sound"
|
||||
msgstr ""
|
||||
|
||||
msgid "Sound Link"
|
||||
msgstr ""
|
||||
|
||||
msgid "Source Email"
|
||||
msgstr ""
|
||||
|
||||
|
@ -314,12 +445,21 @@ msgstr ""
|
|||
msgid "Source Phone No"
|
||||
msgstr ""
|
||||
|
||||
msgid "Special Text Color"
|
||||
msgstr ""
|
||||
|
||||
msgid "Sticky"
|
||||
msgstr ""
|
||||
|
||||
msgid "Subtitle"
|
||||
msgstr ""
|
||||
|
||||
msgid "Syslog Mode"
|
||||
msgstr ""
|
||||
|
||||
msgid "Tags"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Channel"
|
||||
msgstr ""
|
||||
|
||||
|
@ -344,24 +484,45 @@ msgstr ""
|
|||
msgid "Target Encoded ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Escalation"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target JID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Phone No"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Player ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Queue"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Room Alias"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Room ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Schedule"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Short Code"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Stream"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Subreddit"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Tag ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Team"
|
||||
msgstr ""
|
||||
|
||||
msgid "Target Topic"
|
||||
msgstr ""
|
||||
|
||||
|
@ -371,12 +532,24 @@ msgstr ""
|
|||
msgid "Targets"
|
||||
msgstr ""
|
||||
|
||||
msgid "Targets "
|
||||
msgstr ""
|
||||
|
||||
msgid "Team Name"
|
||||
msgstr ""
|
||||
|
||||
msgid "Template"
|
||||
msgstr ""
|
||||
|
||||
msgid "Template Data"
|
||||
msgstr ""
|
||||
|
||||
msgid "Template Path"
|
||||
msgstr ""
|
||||
|
||||
msgid "Template Tokens"
|
||||
msgstr ""
|
||||
|
||||
msgid "Tenant Domain"
|
||||
msgstr ""
|
||||
|
||||
|
@ -404,12 +577,27 @@ msgstr ""
|
|||
msgid "Token C"
|
||||
msgstr ""
|
||||
|
||||
msgid "URL"
|
||||
msgstr ""
|
||||
|
||||
msgid "URL Title"
|
||||
msgstr ""
|
||||
|
||||
msgid "Urgency"
|
||||
msgstr ""
|
||||
|
||||
msgid "Use Avatar"
|
||||
msgstr ""
|
||||
|
||||
msgid "Use Blocks"
|
||||
msgstr ""
|
||||
|
||||
msgid "Use Fields"
|
||||
msgstr ""
|
||||
|
||||
msgid "Use Session"
|
||||
msgstr ""
|
||||
|
||||
msgid "User ID"
|
||||
msgstr ""
|
||||
|
||||
|
@ -434,18 +622,27 @@ msgstr ""
|
|||
msgid "Web Based"
|
||||
msgstr ""
|
||||
|
||||
msgid "Web Page Preview"
|
||||
msgstr ""
|
||||
|
||||
msgid "Webhook"
|
||||
msgstr ""
|
||||
|
||||
msgid "Webhook ID"
|
||||
msgstr ""
|
||||
|
||||
msgid "Webhook Key"
|
||||
msgstr ""
|
||||
|
||||
msgid "Webhook Mode"
|
||||
msgstr ""
|
||||
|
||||
msgid "Webhook Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "Workspace"
|
||||
msgstr ""
|
||||
|
||||
msgid "X-Axis"
|
||||
msgstr ""
|
||||
|
||||
|
@ -455,6 +652,9 @@ msgstr ""
|
|||
msgid "Y-Axis"
|
||||
msgstr ""
|
||||
|
||||
msgid "libdbus-1.so.x must be installed."
|
||||
msgstr ""
|
||||
|
||||
msgid "ttl"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
@ -23,7 +23,12 @@
|
|||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import os
|
||||
import logging
|
||||
from io import StringIO
|
||||
|
||||
# The root identifier needed to monitor 'apprise' logging
|
||||
LOGGER_NAME = 'apprise'
|
||||
|
||||
# Define a verbosity level that is a noisier then debug mode
|
||||
logging.TRACE = logging.DEBUG - 1
|
||||
|
@ -57,5 +62,136 @@ def deprecate(self, message, *args, **kwargs):
|
|||
logging.Logger.trace = trace
|
||||
logging.Logger.deprecate = deprecate
|
||||
|
||||
# Create ourselve a generic logging reference
|
||||
logger = logging.getLogger('apprise')
|
||||
# Create ourselve a generic (singleton) logging reference
|
||||
logger = logging.getLogger(LOGGER_NAME)
|
||||
|
||||
|
||||
class LogCapture(object):
|
||||
"""
|
||||
A class used to allow one to instantiate loggers that write to
|
||||
memory for temporary purposes. e.g.:
|
||||
|
||||
1. with LogCapture() as captured:
|
||||
2.
|
||||
3. # Send our notification(s)
|
||||
4. aobj.notify("hello world")
|
||||
5.
|
||||
6. # retrieve our logs produced by the above call via our
|
||||
7. # `captured` StringIO object we have access to within the `with`
|
||||
8. # block here:
|
||||
9. print(captured.getvalue())
|
||||
|
||||
"""
|
||||
def __init__(self, path=None, level=None, name=LOGGER_NAME, delete=True,
|
||||
fmt='%(asctime)s - %(levelname)s - %(message)s'):
|
||||
"""
|
||||
Instantiate a temporary log capture object
|
||||
|
||||
If a path is specified, then log content is sent to that file instead
|
||||
of a StringIO object.
|
||||
|
||||
You can optionally specify a logging level such as logging.INFO if you
|
||||
wish, otherwise by default the script uses whatever logging has been
|
||||
set globally. If you set delete to `False` then when using log files,
|
||||
they are not automatically cleaned up afterwards.
|
||||
|
||||
Optionally over-ride the fmt as well if you wish.
|
||||
|
||||
"""
|
||||
# Our memory buffer placeholder
|
||||
self.__buffer_ptr = StringIO()
|
||||
|
||||
# Store our file path as it will determine whether or not we write to
|
||||
# memory and a file
|
||||
self.__path = path
|
||||
self.__delete = delete
|
||||
|
||||
# Our logging level tracking
|
||||
self.__level = level
|
||||
self.__restore_level = None
|
||||
|
||||
# Acquire a pointer to our logger
|
||||
self.__logger = logging.getLogger(name)
|
||||
|
||||
# Prepare our handler
|
||||
self.__handler = logging.StreamHandler(self.__buffer_ptr) \
|
||||
if not self.__path else logging.FileHandler(
|
||||
self.__path, mode='a', encoding='utf-8')
|
||||
|
||||
# Use the specified level, otherwise take on the already
|
||||
# effective level of our logger
|
||||
self.__handler.setLevel(
|
||||
self.__level if self.__level is not None
|
||||
else self.__logger.getEffectiveLevel())
|
||||
|
||||
# Prepare our formatter
|
||||
self.__handler.setFormatter(logging.Formatter(fmt))
|
||||
|
||||
def __enter__(self):
|
||||
"""
|
||||
Allows logger manipulation within a 'with' block
|
||||
"""
|
||||
|
||||
if self.__level is not None:
|
||||
# Temporary adjust our log level if required
|
||||
self.__restore_level = self.__logger.getEffectiveLevel()
|
||||
if self.__restore_level > self.__level:
|
||||
# Bump our log level up for the duration of our `with`
|
||||
self.__logger.setLevel(self.__level)
|
||||
|
||||
else:
|
||||
# No restoration required
|
||||
self.__restore_level = None
|
||||
|
||||
else:
|
||||
# Do nothing but enforce that we have nothing to restore to
|
||||
self.__restore_level = None
|
||||
|
||||
if self.__path:
|
||||
# If a path has been identified, ensure we can write to the path
|
||||
# and that the file exists
|
||||
with open(self.__path, 'a'):
|
||||
os.utime(self.__path, None)
|
||||
|
||||
# Update our buffer pointer
|
||||
self.__buffer_ptr = open(self.__path, 'r')
|
||||
|
||||
# Add our handler
|
||||
self.__logger.addHandler(self.__handler)
|
||||
|
||||
# return our memory pointer
|
||||
return self.__buffer_ptr
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
"""
|
||||
removes the handler gracefully when the with block has completed
|
||||
"""
|
||||
|
||||
# Flush our content
|
||||
self.__handler.flush()
|
||||
self.__buffer_ptr.flush()
|
||||
|
||||
# Drop our handler
|
||||
self.__logger.removeHandler(self.__handler)
|
||||
|
||||
if self.__restore_level is not None:
|
||||
# Restore level
|
||||
self.__logger.setLevel(self.__restore_level)
|
||||
|
||||
if self.__path:
|
||||
# Close our file pointer
|
||||
self.__buffer_ptr.close()
|
||||
if self.__delete:
|
||||
try:
|
||||
# Always remove file afterwards
|
||||
os.unlink(self.__path)
|
||||
|
||||
except OSError:
|
||||
# It's okay if the file does not exist
|
||||
pass
|
||||
|
||||
if exc_type is not None:
|
||||
# pass exception on if one was generated
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
382
libs/apprise/plugins/NotifyAppriseAPI.py
Normal file
382
libs/apprise/plugins/NotifyAppriseAPI.py
Normal file
|
@ -0,0 +1,382 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2021 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..URLBase import PrivacyMode
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_list
|
||||
from ..utils import validate_regex
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
||||
class NotifyAppriseAPI(NotifyBase):
|
||||
"""
|
||||
A wrapper for Apprise (Persistent) API Notifications
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Apprise API'
|
||||
|
||||
# The services URL
|
||||
service_url = 'https://github.com/caronc/apprise-api'
|
||||
|
||||
# The default protocol
|
||||
protocol = 'apprise'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'apprises'
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_apprise_api'
|
||||
|
||||
# Depending on the number of transactions/notifications taking place, this
|
||||
# could take a while. 30 seconds should be enough to perform the task
|
||||
socket_connect_timeout = 30.0
|
||||
|
||||
# Disable throttle rate for Apprise API requests since they are normally
|
||||
# local anyway
|
||||
request_rate_per_sec = 0.0
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
'{schema}://{host}/{token}',
|
||||
'{schema}://{host}:{port}/{token}',
|
||||
'{schema}://{user}@{host}/{token}',
|
||||
'{schema}://{user}@{host}:{port}/{token}',
|
||||
'{schema}://{user}:{password}@{host}/{token}',
|
||||
'{schema}://{user}:{password}@{host}:{port}/{token}',
|
||||
)
|
||||
|
||||
# Define our tokens; these are the minimum tokens required required to
|
||||
# be passed into this function (as arguments). The syntax appends any
|
||||
# previously defined in the base package and builds onto them
|
||||
template_tokens = dict(NotifyBase.template_tokens, **{
|
||||
'host': {
|
||||
'name': _('Hostname'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
},
|
||||
'port': {
|
||||
'name': _('Port'),
|
||||
'type': 'int',
|
||||
'min': 1,
|
||||
'max': 65535,
|
||||
},
|
||||
'user': {
|
||||
'name': _('Username'),
|
||||
'type': 'string',
|
||||
},
|
||||
'password': {
|
||||
'name': _('Password'),
|
||||
'type': 'string',
|
||||
'private': True,
|
||||
},
|
||||
'token': {
|
||||
'name': _('Token'),
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
'private': True,
|
||||
'regex': (r'^[A-Z0-9_-]{1,32}$', 'i'),
|
||||
},
|
||||
})
|
||||
|
||||
# Define our template arguments
|
||||
template_args = dict(NotifyBase.template_args, **{
|
||||
'tags': {
|
||||
'name': _('Tags'),
|
||||
'type': 'string',
|
||||
},
|
||||
'to': {
|
||||
'alias_of': 'token',
|
||||
},
|
||||
})
|
||||
|
||||
# Define any kwargs we're using
|
||||
template_kwargs = {
|
||||
'headers': {
|
||||
'name': _('HTTP Header'),
|
||||
'prefix': '+',
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, token=None, tags=None, headers=None, **kwargs):
|
||||
"""
|
||||
Initialize Apprise API Object
|
||||
|
||||
headers can be a dictionary of key/value pairs that you want to
|
||||
additionally include as part of the server headers to post with
|
||||
|
||||
"""
|
||||
super(NotifyAppriseAPI, self).__init__(**kwargs)
|
||||
|
||||
self.fullpath = kwargs.get('fullpath')
|
||||
if not isinstance(self.fullpath, six.string_types):
|
||||
self.fullpath = '/'
|
||||
|
||||
self.token = validate_regex(
|
||||
token, *self.template_tokens['token']['regex'])
|
||||
if not self.token:
|
||||
msg = 'The Apprise API token specified ({}) is invalid.'\
|
||||
.format(token)
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Build list of tags
|
||||
self.__tags = parse_list(tags)
|
||||
|
||||
self.headers = {}
|
||||
if headers:
|
||||
# Store our extra headers
|
||||
self.headers.update(headers)
|
||||
|
||||
return
|
||||
|
||||
def url(self, privacy=False, *args, **kwargs):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Our URL parameters
|
||||
params = self.url_parameters(privacy=privacy, *args, **kwargs)
|
||||
|
||||
# Append our headers into our parameters
|
||||
params.update({'+{}'.format(k): v for k, v in self.headers.items()})
|
||||
|
||||
if self.__tags:
|
||||
params['tags'] = ','.join([x for x in self.__tags])
|
||||
|
||||
# Determine Authentication
|
||||
auth = ''
|
||||
if self.user and self.password:
|
||||
auth = '{user}:{password}@'.format(
|
||||
user=NotifyAppriseAPI.quote(self.user, safe=''),
|
||||
password=self.pprint(
|
||||
self.password, privacy, mode=PrivacyMode.Secret, safe=''),
|
||||
)
|
||||
elif self.user:
|
||||
auth = '{user}@'.format(
|
||||
user=NotifyAppriseAPI.quote(self.user, safe=''),
|
||||
)
|
||||
|
||||
default_port = 443 if self.secure else 80
|
||||
|
||||
fullpath = self.fullpath.strip('/')
|
||||
return '{schema}://{auth}{hostname}{port}{fullpath}{token}' \
|
||||
'/?{params}'.format(
|
||||
schema=self.secure_protocol
|
||||
if self.secure else self.protocol,
|
||||
auth=auth,
|
||||
# never encode hostname since we're expecting it to be a
|
||||
# valid one
|
||||
hostname=self.host,
|
||||
port='' if self.port is None or self.port == default_port
|
||||
else ':{}'.format(self.port),
|
||||
fullpath='/{}/'.format(NotifyAppriseAPI.quote(
|
||||
fullpath, safe='/')) if fullpath else '/',
|
||||
token=self.pprint(self.token, privacy, safe=''),
|
||||
params=NotifyAppriseAPI.urlencode(params))
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
Perform Apprise API Notification
|
||||
"""
|
||||
|
||||
headers = {}
|
||||
# Apply any/all header over-rides defined
|
||||
headers.update(self.headers)
|
||||
|
||||
# prepare Apprise API Object
|
||||
payload = {
|
||||
# Apprise API Payload
|
||||
'title': title,
|
||||
'body': body,
|
||||
'type': notify_type,
|
||||
'format': self.notify_format,
|
||||
}
|
||||
|
||||
if self.__tags:
|
||||
payload['tag'] = self.__tags
|
||||
|
||||
auth = None
|
||||
if self.user:
|
||||
auth = (self.user, self.password)
|
||||
|
||||
# Set our schema
|
||||
schema = 'https' if self.secure else 'http'
|
||||
|
||||
url = '%s://%s' % (schema, self.host)
|
||||
if isinstance(self.port, int):
|
||||
url += ':%d' % self.port
|
||||
|
||||
fullpath = self.fullpath.strip('/')
|
||||
url += '/{}/'.format(fullpath) if fullpath else '/'
|
||||
url += 'notify/{}'.format(self.token)
|
||||
|
||||
# Some entries can not be over-ridden
|
||||
headers.update({
|
||||
'User-Agent': self.app_id,
|
||||
'Content-Type': 'application/json',
|
||||
# Pass our Source UUID4 Identifier
|
||||
'X-Apprise-ID': self.asset._uid,
|
||||
# Pass our current recursion count to our upstream server
|
||||
'X-Apprise-Recursion-Count': str(self.asset._recursion + 1),
|
||||
})
|
||||
|
||||
self.logger.debug('Apprise API POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
self.logger.debug('Apprise API Payload: %s' % str(payload))
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=dumps(payload),
|
||||
headers=headers,
|
||||
auth=auth,
|
||||
verify=self.verify_certificate,
|
||||
timeout=self.request_timeout,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyAppriseAPI.http_response_code_lookup(r.status_code)
|
||||
|
||||
self.logger.warning(
|
||||
'Failed to send Apprise API notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Apprise API notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occurred sending Apprise API '
|
||||
'notification to %s.' % self.host)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def parse_native_url(url):
|
||||
"""
|
||||
Support http://hostname/notify/token and
|
||||
http://hostname/path/notify/token
|
||||
"""
|
||||
|
||||
result = re.match(
|
||||
r'^http(?P<secure>s?)://(?P<hostname>[A-Z0-9._-]+)'
|
||||
r'(:(?P<port>[0-9]+))?'
|
||||
r'(?P<path>/[^?]+?)?/notify/(?P<token>[A-Z0-9_-]{1,32})/?'
|
||||
r'(?P<params>\?.+)?$', url, re.I)
|
||||
|
||||
if result:
|
||||
return NotifyAppriseAPI.parse_url(
|
||||
'{schema}://{hostname}{port}{path}/{token}/{params}'.format(
|
||||
schema=NotifyAppriseAPI.secure_protocol
|
||||
if result.group('secure') else NotifyAppriseAPI.protocol,
|
||||
hostname=result.group('hostname'),
|
||||
port='' if not result.group('port')
|
||||
else ':{}'.format(result.group('port')),
|
||||
path='' if not result.group('path')
|
||||
else result.group('path'),
|
||||
token=result.group('token'),
|
||||
params='' if not result.group('params')
|
||||
else '?{}'.format(result.group('params'))))
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to re-instantiate this object.
|
||||
|
||||
"""
|
||||
results = NotifyBase.parse_url(url)
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Add our headers that the user can potentially over-ride if they wish
|
||||
# to to our returned result set
|
||||
results['headers'] = results['qsd+']
|
||||
if results['qsd-']:
|
||||
results['headers'].update(results['qsd-'])
|
||||
NotifyBase.logger.deprecate(
|
||||
"minus (-) based Apprise API header tokens are being "
|
||||
" removed; use the plus (+) symbol instead.")
|
||||
|
||||
# Tidy our header entries by unquoting them
|
||||
results['headers'] = \
|
||||
{NotifyAppriseAPI.unquote(x): NotifyAppriseAPI.unquote(y)
|
||||
for x, y in results['headers'].items()}
|
||||
|
||||
# Support the passing of tags in the URL
|
||||
if 'tags' in results['qsd'] and len(results['qsd']['tags']):
|
||||
results['tags'] = \
|
||||
NotifyAppriseAPI.parse_list(results['qsd']['tags'])
|
||||
|
||||
# Support the 'to' & 'token' variable so that we can support rooms
|
||||
# this way too.
|
||||
if 'token' in results['qsd'] and len(results['qsd']['token']):
|
||||
results['token'] = \
|
||||
NotifyAppriseAPI.unquote(results['qsd']['token'])
|
||||
|
||||
elif 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['token'] = NotifyAppriseAPI.unquote(results['qsd']['to'])
|
||||
|
||||
else:
|
||||
# Start with a list of path entries to work with
|
||||
entries = NotifyAppriseAPI.split_path(results['fullpath'])
|
||||
if entries:
|
||||
# use our last entry found
|
||||
results['token'] = entries[-1]
|
||||
|
||||
# pop our last entry off
|
||||
entries = entries[:-1]
|
||||
|
||||
# re-assemble our full path
|
||||
results['fullpath'] = '/'.join(entries)
|
||||
|
||||
return results
|
|
@ -52,6 +52,54 @@ class NotifyBase(BASE_OBJECT):
|
|||
This is the base class for all notification services
|
||||
"""
|
||||
|
||||
# An internal flag used to test the state of the plugin. If set to
|
||||
# False, then the plugin is not used. Plugins can disable themselves
|
||||
# due to enviroment issues (such as missing libraries, or platform
|
||||
# dependencies that are not present). By default all plugins are
|
||||
# enabled.
|
||||
enabled = True
|
||||
|
||||
# Some plugins may require additional packages above what is provided
|
||||
# already by Apprise.
|
||||
#
|
||||
# Use this section to relay this information to the users of the script to
|
||||
# help guide them with what they need to know if they plan on using your
|
||||
# plugin. The below configuration should otherwise accomodate all normal
|
||||
# situations and will not requrie any updating:
|
||||
requirements = {
|
||||
# Use the description to provide a human interpretable description of
|
||||
# what is required to make the plugin work. This is only nessisary
|
||||
# if there are package dependencies. Setting this to default will
|
||||
# cause a general response to be returned. Only set this if you plan
|
||||
# on over-riding the default. Always consider language support here.
|
||||
# So before providing a value do the following in your code base:
|
||||
#
|
||||
# from apprise.AppriseLocale import gettext_lazy as _
|
||||
#
|
||||
# 'details': _('My detailed requirements')
|
||||
'details': None,
|
||||
|
||||
# Define any required packages needed for the plugin to run. This is
|
||||
# an array of strings that simply look like lines residing in a
|
||||
# `requirements.txt` file...
|
||||
#
|
||||
# As an example, an entry may look like:
|
||||
# 'packages_required': [
|
||||
# 'cryptography < 3.4`,
|
||||
# ]
|
||||
'packages_required': [],
|
||||
|
||||
# Recommended packages identify packages that are not required to make
|
||||
# your plugin work, but would improve it's use or grant it access to
|
||||
# full functionality (that might otherwise be limited).
|
||||
|
||||
# Similar to `packages_required`, you would identify each entry in
|
||||
# the array as you would in a `requirements.txt` file.
|
||||
#
|
||||
# - Do not re-provide entries already in the `packages_required`
|
||||
'packages_recommended': [],
|
||||
}
|
||||
|
||||
# The services URL
|
||||
service_url = None
|
||||
|
||||
|
@ -153,7 +201,8 @@ class NotifyBase(BASE_OBJECT):
|
|||
# Provide override
|
||||
self.overflow_mode = overflow
|
||||
|
||||
def image_url(self, notify_type, logo=False, extension=None):
|
||||
def image_url(self, notify_type, logo=False, extension=None,
|
||||
image_size=None):
|
||||
"""
|
||||
Returns Image URL if possible
|
||||
"""
|
||||
|
@ -166,7 +215,7 @@ class NotifyBase(BASE_OBJECT):
|
|||
|
||||
return self.asset.image_url(
|
||||
notify_type=notify_type,
|
||||
image_size=self.image_size,
|
||||
image_size=self.image_size if image_size is None else image_size,
|
||||
logo=logo,
|
||||
extension=extension,
|
||||
)
|
||||
|
@ -222,6 +271,13 @@ class NotifyBase(BASE_OBJECT):
|
|||
|
||||
"""
|
||||
|
||||
if not self.enabled:
|
||||
# Deny notifications issued to services that are disabled
|
||||
self.logger.warning(
|
||||
"{} is currently disabled on this system.".format(
|
||||
self.service_name))
|
||||
return False
|
||||
|
||||
# Prepare attachments if required
|
||||
if attach is not None and not isinstance(attach, AppriseAttachment):
|
||||
try:
|
||||
|
|
1
libs/apprise/plugins/NotifyBase.pyi
Normal file
1
libs/apprise/plugins/NotifyBase.pyi
Normal file
|
@ -0,0 +1 @@
|
|||
class NotifyBase: ...
|
|
@ -36,7 +36,6 @@
|
|||
# The API reference used to build this plugin was documented here:
|
||||
# https://developers.clicksend.com/docs/rest/v3/
|
||||
#
|
||||
import re
|
||||
import requests
|
||||
from json import dumps
|
||||
from base64 import b64encode
|
||||
|
@ -44,7 +43,8 @@ from base64 import b64encode
|
|||
from .NotifyBase import NotifyBase
|
||||
from ..URLBase import PrivacyMode
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_list
|
||||
from ..utils import is_phone_no
|
||||
from ..utils import parse_phone_no
|
||||
from ..utils import parse_bool
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
@ -53,12 +53,6 @@ CLICKSEND_HTTP_ERROR_MAP = {
|
|||
401: 'Unauthorized - Invalid Token.',
|
||||
}
|
||||
|
||||
# Some Phone Number Detection
|
||||
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
|
||||
|
||||
# Used to break path apart into list of channels
|
||||
TARGET_LIST_DELIM = re.compile(r'[ \t\r\n,#\\/]+')
|
||||
|
||||
|
||||
class NotifyClickSend(NotifyBase):
|
||||
"""
|
||||
|
@ -151,26 +145,18 @@ class NotifyClickSend(NotifyBase):
|
|||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
for target in parse_list(targets):
|
||||
for target in parse_phone_no(targets):
|
||||
# Validate targets and drop bad ones:
|
||||
result = IS_PHONE_NO.match(target)
|
||||
if result:
|
||||
# Further check our phone # for it's digit count
|
||||
result = ''.join(re.findall(r'\d+', result.group('phone')))
|
||||
if len(result) < 11 or len(result) > 14:
|
||||
self.logger.warning(
|
||||
'Dropped invalid phone # '
|
||||
'({}) specified.'.format(target),
|
||||
)
|
||||
continue
|
||||
|
||||
# store valid phone number
|
||||
self.targets.append(result)
|
||||
result = is_phone_no(target)
|
||||
if not result:
|
||||
self.logger.warning(
|
||||
'Dropped invalid phone # '
|
||||
'({}) specified.'.format(target),
|
||||
)
|
||||
continue
|
||||
|
||||
self.logger.warning(
|
||||
'Dropped invalid phone # '
|
||||
'({}) specified.'.format(target))
|
||||
# store valid phone number
|
||||
self.targets.append(result['full'])
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
|
@ -321,8 +307,7 @@ class NotifyClickSend(NotifyBase):
|
|||
# Support the 'to' variable so that we can support rooms this way too
|
||||
# The 'to' makes it easier to use yaml configuration
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['targets'] += [x for x in filter(
|
||||
bool, TARGET_LIST_DELIM.split(
|
||||
NotifyClickSend.unquote(results['qsd']['to'])))]
|
||||
results['targets'] += \
|
||||
NotifyClickSend.parse_phone_no(results['qsd']['to'])
|
||||
|
||||
return results
|
||||
|
|
|
@ -30,7 +30,6 @@
|
|||
# (both user and password) from the API Details section from within your
|
||||
# account profile area: https://d7networks.com/accounts/profile/
|
||||
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
import base64
|
||||
|
@ -40,7 +39,8 @@ from json import loads
|
|||
from .NotifyBase import NotifyBase
|
||||
from ..URLBase import PrivacyMode
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_list
|
||||
from ..utils import is_phone_no
|
||||
from ..utils import parse_phone_no
|
||||
from ..utils import parse_bool
|
||||
from ..AppriseLocale import gettext_lazy as _
|
||||
|
||||
|
@ -52,9 +52,6 @@ D7NETWORKS_HTTP_ERROR_MAP = {
|
|||
500: 'A Serverside Error Occured Handling the Request.',
|
||||
}
|
||||
|
||||
# Some Phone Number Detection
|
||||
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
|
||||
|
||||
|
||||
# Priorities
|
||||
class D7SMSPriority(object):
|
||||
|
@ -197,36 +194,26 @@ class NotifyD7Networks(NotifyBase):
|
|||
self.source = None \
|
||||
if not isinstance(source, six.string_types) else source.strip()
|
||||
|
||||
# Parse our targets
|
||||
self.targets = list()
|
||||
|
||||
for target in parse_list(targets):
|
||||
# Validate targets and drop bad ones:
|
||||
result = IS_PHONE_NO.match(target)
|
||||
if result:
|
||||
# Further check our phone # for it's digit count
|
||||
# if it's less than 10, then we can assume it's
|
||||
# a poorly specified phone no and spit a warning
|
||||
result = ''.join(re.findall(r'\d+', result.group('phone')))
|
||||
if len(result) < 11 or len(result) > 14:
|
||||
self.logger.warning(
|
||||
'Dropped invalid phone # '
|
||||
'({}) specified.'.format(target),
|
||||
)
|
||||
continue
|
||||
|
||||
# store valid phone number
|
||||
self.targets.append(result)
|
||||
continue
|
||||
|
||||
self.logger.warning(
|
||||
'Dropped invalid phone # ({}) specified.'.format(target))
|
||||
|
||||
if len(self.targets) == 0:
|
||||
msg = 'There are no valid targets identified to notify.'
|
||||
if not (self.user and self.password):
|
||||
msg = 'A D7 Networks user/pass was not provided.'
|
||||
self.logger.warning(msg)
|
||||
raise TypeError(msg)
|
||||
|
||||
# Parse our targets
|
||||
self.targets = list()
|
||||
for target in parse_phone_no(targets):
|
||||
# Validate targets and drop bad ones:
|
||||
result = result = is_phone_no(target)
|
||||
if not result:
|
||||
self.logger.warning(
|
||||
'Dropped invalid phone # '
|
||||
'({}) specified.'.format(target),
|
||||
)
|
||||
continue
|
||||
|
||||
# store valid phone number
|
||||
self.targets.append(result['full'])
|
||||
|
||||
return
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
|
@ -235,6 +222,11 @@ class NotifyD7Networks(NotifyBase):
|
|||
redirects to the appropriate handling
|
||||
"""
|
||||
|
||||
if len(self.targets) == 0:
|
||||
# There were no services to notify
|
||||
self.logger.warning('There were no D7 Networks targets to notify.')
|
||||
return False
|
||||
|
||||
# error tracking (used for function return)
|
||||
has_error = False
|
||||
|
||||
|
@ -479,6 +471,6 @@ class NotifyD7Networks(NotifyBase):
|
|||
# The 'to' makes it easier to use yaml configuration
|
||||
if 'to' in results['qsd'] and len(results['qsd']['to']):
|
||||
results['targets'] += \
|
||||
NotifyD7Networks.parse_list(results['qsd']['to'])
|
||||
NotifyD7Networks.parse_phone_no(results['qsd']['to'])
|
||||
|
||||
return results
|
||||
|
|
|
@ -38,10 +38,6 @@ NOTIFY_DBUS_SUPPORT_ENABLED = False
|
|||
# Image support is dependant on the GdkPixbuf library being available
|
||||
NOTIFY_DBUS_IMAGE_SUPPORT = False
|
||||
|
||||
# The following are required to hook into the notifications:
|
||||
NOTIFY_DBUS_INTERFACE = 'org.freedesktop.Notifications'
|
||||
NOTIFY_DBUS_SETTING_LOCATION = '/org/freedesktop/Notifications'
|
||||
|
||||
# Initialize our mainloops
|
||||
LOOP_GLIB = None
|
||||
LOOP_QT = None
|
||||
|
@ -132,8 +128,19 @@ class NotifyDBus(NotifyBase):
|
|||
A wrapper for local DBus/Qt Notifications
|
||||
"""
|
||||
|
||||
# Set our global enabled flag
|
||||
enabled = NOTIFY_DBUS_SUPPORT_ENABLED
|
||||
|
||||
requirements = {
|
||||
# Define our required packaging in order to work
|
||||
'details': _('libdbus-1.so.x must be installed.')
|
||||
}
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'DBus Notification'
|
||||
service_name = _('DBus Notification')
|
||||
|
||||
# The services URL
|
||||
service_url = 'http://www.freedesktop.org/Software/dbus/'
|
||||
|
||||
# The default protocols
|
||||
# Python 3 keys() does not return a list object, it's it's own dict_keys()
|
||||
|
@ -158,14 +165,9 @@ class NotifyDBus(NotifyBase):
|
|||
# content to display
|
||||
body_max_line_count = 10
|
||||
|
||||
# This entry is a bit hacky, but it allows us to unit-test this library
|
||||
# in an environment that simply doesn't have the gnome packages
|
||||
# available to us. It also allows us to handle situations where the
|
||||
# packages actually are present but we need to test that they aren't.
|
||||
# If anyone is seeing this had knows a better way of testing this
|
||||
# outside of what is defined in test/test_glib_plugin.py, please
|
||||
# let me know! :)
|
||||
_enabled = NOTIFY_DBUS_SUPPORT_ENABLED
|
||||
# The following are required to hook into the notifications:
|
||||
dbus_interface = 'org.freedesktop.Notifications'
|
||||
dbus_setting_location = '/org/freedesktop/Notifications'
|
||||
|
||||
# Define object templates
|
||||
templates = (
|
||||
|
@ -241,12 +243,6 @@ class NotifyDBus(NotifyBase):
|
|||
"""
|
||||
Perform DBus Notification
|
||||
"""
|
||||
|
||||
if not self._enabled or MAINLOOP_MAP[self.schema] is None:
|
||||
self.logger.warning(
|
||||
"{} notifications could not be loaded.".format(self.schema))
|
||||
return False
|
||||
|
||||
# Acquire our session
|
||||
try:
|
||||
session = SessionBus(mainloop=MAINLOOP_MAP[self.schema])
|
||||
|
@ -265,14 +261,14 @@ class NotifyDBus(NotifyBase):
|
|||
|
||||
# acquire our dbus object
|
||||
dbus_obj = session.get_object(
|
||||
NOTIFY_DBUS_INTERFACE,
|
||||
NOTIFY_DBUS_SETTING_LOCATION,
|
||||
self.dbus_interface,
|
||||
self.dbus_setting_location,
|
||||
)
|
||||
|
||||
# Acquire our dbus interface
|
||||
dbus_iface = Interface(
|
||||
dbus_obj,
|
||||
dbus_interface=NOTIFY_DBUS_INTERFACE,
|
||||
dbus_interface=self.dbus_interface,
|
||||
)
|
||||
|
||||
# image path
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue