mirror of
https://github.com/morpheus65535/bazarr.git
synced 2025-04-23 22:27:17 -04:00
no log: fixed multiple PEP8 lint issues
This commit is contained in:
parent
b24ee309ed
commit
6ca3689f2e
12 changed files with 88 additions and 73 deletions
|
@ -43,12 +43,12 @@ class SystemLogs(Resource):
|
|||
if len(include) > 0:
|
||||
try:
|
||||
include_compiled = re.compile(include, flags)
|
||||
except:
|
||||
except Exception:
|
||||
include_compiled = None
|
||||
if len(exclude) > 0:
|
||||
try:
|
||||
exclude_compiled = re.compile(exclude, flags)
|
||||
except:
|
||||
except Exception:
|
||||
exclude_compiled = None
|
||||
elif ignore_case:
|
||||
include = include.casefold()
|
||||
|
|
|
@ -38,6 +38,7 @@ def validate_ip_address(ip_string):
|
|||
ONE_HUNDRED_YEARS_IN_MINUTES = 52560000
|
||||
ONE_HUNDRED_YEARS_IN_HOURS = 876000
|
||||
|
||||
|
||||
class Validator(OriginalValidator):
|
||||
# Give the ability to personalize messages sent by the original dynasync Validator class.
|
||||
default_messages = MappingProxyType(
|
||||
|
@ -103,7 +104,7 @@ validators = [
|
|||
Validator('general.subfolder', must_exist=True, default='current', is_type_of=str),
|
||||
Validator('general.subfolder_custom', must_exist=True, default='', is_type_of=str),
|
||||
Validator('general.upgrade_subs', must_exist=True, default=True, is_type_of=bool),
|
||||
Validator('general.upgrade_frequency', must_exist=True, default=12, is_type_of=int,
|
||||
Validator('general.upgrade_frequency', must_exist=True, default=12, is_type_of=int,
|
||||
is_in=[6, 12, 24, ONE_HUNDRED_YEARS_IN_HOURS]),
|
||||
Validator('general.days_to_upgrade_subs', must_exist=True, default=7, is_type_of=int, gte=0, lte=30),
|
||||
Validator('general.upgrade_manual', must_exist=True, default=True, is_type_of=bool),
|
||||
|
@ -484,25 +485,27 @@ def get_settings():
|
|||
settings_to_return[k].update({subk: subv})
|
||||
return settings_to_return
|
||||
|
||||
|
||||
def validate_log_regex():
|
||||
# handle bug in dynaconf that changes strings to numbers, so change them back to str
|
||||
if not isinstance(settings.log.include_filter, str):
|
||||
settings.log.include_filter = str(settings.log.include_filter)
|
||||
settings.log.include_filter = str(settings.log.include_filter)
|
||||
if not isinstance(settings.log.exclude_filter, str):
|
||||
settings.log.exclude_filter = str(settings.log.exclude_filter)
|
||||
settings.log.exclude_filter = str(settings.log.exclude_filter)
|
||||
|
||||
if (settings.log.use_regex):
|
||||
if settings.log.use_regex:
|
||||
# compile any regular expressions specified to see if they are valid
|
||||
# if invalid, tell the user which one
|
||||
try:
|
||||
re.compile(settings.log.include_filter)
|
||||
except:
|
||||
except Exception:
|
||||
raise ValidationError(f"Include filter: invalid regular expression: {settings.log.include_filter}")
|
||||
try:
|
||||
re.compile(settings.log.exclude_filter)
|
||||
except:
|
||||
except Exception:
|
||||
raise ValidationError(f"Exclude filter: invalid regular expression: {settings.log.exclude_filter}")
|
||||
|
||||
|
||||
def save_settings(settings_items):
|
||||
configure_debug = False
|
||||
configure_captcha = False
|
||||
|
@ -519,8 +522,7 @@ def save_settings(settings_items):
|
|||
undefined_subtitles_track_default_changed = False
|
||||
audio_tracks_parsing_changed = False
|
||||
reset_providers = False
|
||||
check_log_regex = False
|
||||
|
||||
|
||||
# Subzero Mods
|
||||
update_subzero = False
|
||||
subzero_mods = get_array_from(settings.general.subzero_mods)
|
||||
|
|
|
@ -323,7 +323,7 @@ def get_providers_auth():
|
|||
'response': settings.whisperai.response,
|
||||
'timeout': settings.whisperai.timeout,
|
||||
'ffmpeg_path': _FFMPEG_BINARY,
|
||||
'loglevel': settings.whisperai.loglevel,
|
||||
'loglevel': settings.whisperai.loglevel,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -55,34 +55,34 @@ class NoExceptionFormatter(logging.Formatter):
|
|||
def formatException(self, record):
|
||||
return ''
|
||||
|
||||
|
||||
|
||||
class UnwantedWaitressMessageFilter(logging.Filter):
|
||||
def filter(self, record):
|
||||
if settings.general.debug == True:
|
||||
if settings.general.debug is True:
|
||||
# no filtering in debug mode
|
||||
return True
|
||||
|
||||
unwantedMessages = [
|
||||
"Exception while serving /api/socket.io/",
|
||||
['Session is disconnected', 'Session not found' ],
|
||||
|
||||
"Exception while serving /api/socket.io/",
|
||||
["'Session is disconnected'", "'Session not found'" ],
|
||||
|
||||
"Exception while serving /api/socket.io/",
|
||||
['"Session is disconnected"', '"Session not found"' ]
|
||||
|
||||
unwantedMessages = [
|
||||
"Exception while serving /api/socket.io/",
|
||||
['Session is disconnected', 'Session not found'],
|
||||
|
||||
"Exception while serving /api/socket.io/",
|
||||
["'Session is disconnected'", "'Session not found'"],
|
||||
|
||||
"Exception while serving /api/socket.io/",
|
||||
['"Session is disconnected"', '"Session not found"']
|
||||
]
|
||||
|
||||
wanted = True
|
||||
|
||||
wanted = True
|
||||
listLength = len(unwantedMessages)
|
||||
for i in range(0, listLength, 2):
|
||||
if record.msg == unwantedMessages[i]:
|
||||
exceptionTuple = record.exc_info
|
||||
if exceptionTuple != None:
|
||||
if exceptionTuple is not None:
|
||||
if str(exceptionTuple[1]) in unwantedMessages[i+1]:
|
||||
wanted = False
|
||||
break
|
||||
|
||||
|
||||
return wanted
|
||||
|
||||
|
||||
|
@ -159,7 +159,7 @@ def configure_logging(debug=False):
|
|||
logging.getLogger("ga4mp.ga4mp").setLevel(logging.ERROR)
|
||||
|
||||
logging.getLogger("waitress").setLevel(logging.ERROR)
|
||||
logging.getLogger("waitress").addFilter(UnwantedWaitressMessageFilter())
|
||||
logging.getLogger("waitress").addFilter(UnwantedWaitressMessageFilter())
|
||||
logging.getLogger("knowit").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("enzyme").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("guessit").setLevel(logging.WARNING)
|
||||
|
|
|
@ -8,7 +8,6 @@ from apscheduler.triggers.interval import IntervalTrigger
|
|||
from apscheduler.triggers.cron import CronTrigger
|
||||
from apscheduler.triggers.date import DateTrigger
|
||||
from apscheduler.events import EVENT_JOB_SUBMITTED, EVENT_JOB_EXECUTED, EVENT_JOB_ERROR
|
||||
from apscheduler.jobstores.base import JobLookupError
|
||||
from datetime import datetime, timedelta
|
||||
from calendar import day_name
|
||||
from random import randrange
|
||||
|
@ -40,17 +39,20 @@ from dateutil.relativedelta import relativedelta
|
|||
|
||||
NO_INTERVAL = "None"
|
||||
NEVER_DATE = "Never"
|
||||
ONE_YEAR_IN_SECONDS = 60 * 60 * 24 * 365
|
||||
ONE_YEAR_IN_SECONDS = 60 * 60 * 24 * 365
|
||||
|
||||
|
||||
def a_long_time_from_now(job):
|
||||
# currently defined as more than a year from now
|
||||
delta = job.next_run_time - datetime.now(job.next_run_time.tzinfo)
|
||||
return delta.total_seconds() > ONE_YEAR_IN_SECONDS
|
||||
|
||||
|
||||
def in_a_century():
|
||||
century = datetime.now() + relativedelta(years=100)
|
||||
return century.year
|
||||
|
||||
|
||||
class Scheduler:
|
||||
|
||||
def __init__(self):
|
||||
|
@ -133,7 +135,6 @@ class Scheduler:
|
|||
return ", ".join(strings)
|
||||
|
||||
def get_time_from_cron(cron):
|
||||
year = str(cron[0])
|
||||
day = str(cron[4])
|
||||
hour = str(cron[5])
|
||||
|
||||
|
@ -183,8 +184,8 @@ class Scheduler:
|
|||
else:
|
||||
interval = get_time_from_cron(job.trigger.fields)
|
||||
task_list.append({'name': job.name, 'interval': interval,
|
||||
'next_run_in': next_run, 'next_run_time': next_run, 'job_id': job.id,
|
||||
'job_running': running})
|
||||
'next_run_in': next_run, 'next_run_time': next_run, 'job_id': job.id,
|
||||
'job_running': running})
|
||||
|
||||
return task_list
|
||||
|
||||
|
@ -218,9 +219,8 @@ class Scheduler:
|
|||
trigger = CronTrigger(day_of_week=settings.backup.day, hour=settings.backup.hour)
|
||||
elif backup == "Manually":
|
||||
trigger = CronTrigger(year=in_a_century())
|
||||
self.aps_scheduler.add_job(backup_to_zip, trigger,
|
||||
max_instances=1, coalesce=True, misfire_grace_time=15, id='backup',
|
||||
name='Backup Database and Configuration File', replace_existing=True)
|
||||
self.aps_scheduler.add_job(backup_to_zip, trigger, max_instances=1, coalesce=True, misfire_grace_time=15,
|
||||
id='backup', name='Backup Database and Configuration File', replace_existing=True)
|
||||
|
||||
def __sonarr_full_update_task(self):
|
||||
if settings.general.use_sonarr:
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# coding=utf-8
|
||||
|
||||
import os
|
||||
import io
|
||||
|
||||
from threading import Thread
|
||||
|
||||
|
@ -20,6 +21,7 @@ from app.get_args import args # noqa E402
|
|||
from app.check_update import apply_update, check_releases, check_if_new_update # noqa E402
|
||||
from app.config import settings, configure_proxy_func, base_url # noqa E402
|
||||
from init import * # noqa E402
|
||||
import logging # noqa E402
|
||||
|
||||
# Install downloaded update
|
||||
if bazarr_version != '':
|
||||
|
|
|
@ -21,10 +21,13 @@ from .parser import movieParser
|
|||
bool_map = {"True": True, "False": False}
|
||||
|
||||
FEATURE_PREFIX = "SYNC_MOVIES "
|
||||
|
||||
|
||||
def trace(message):
|
||||
if settings.general.debug:
|
||||
logging.debug(FEATURE_PREFIX + message)
|
||||
|
||||
|
||||
def update_all_movies():
|
||||
movies_full_scan_subtitles()
|
||||
logging.info('BAZARR All existing movie subtitles indexed from disk.')
|
||||
|
@ -63,6 +66,7 @@ def get_movie_monitored_status(movie_id):
|
|||
else:
|
||||
return bool_map[existing_movie_monitored[0]]
|
||||
|
||||
|
||||
# Insert new movies in DB
|
||||
def add_movie(added_movie, send_event):
|
||||
try:
|
||||
|
@ -158,7 +162,7 @@ def update_movies(send_event=True):
|
|||
# Only movies that Radarr says have files downloaded will be kept up to date in the DB
|
||||
if movie['hasFile'] is True:
|
||||
if 'movieFile' in movie:
|
||||
if sync_monitored:
|
||||
if sync_monitored:
|
||||
if get_movie_monitored_status(movie['tmdbId']) != movie['monitored']:
|
||||
# monitored status is not the same as our DB
|
||||
trace(f"{i}: (Monitor Status Mismatch) {movie['title']}")
|
||||
|
@ -187,8 +191,8 @@ def update_movies(send_event=True):
|
|||
add_movie(parsed_movie, send_event)
|
||||
movies_added.append(parsed_movie['title'])
|
||||
else:
|
||||
trace(f"{i}: (Skipped File Missing) {movie['title']}")
|
||||
files_missing += 1
|
||||
trace(f"{i}: (Skipped File Missing) {movie['title']}")
|
||||
files_missing += 1
|
||||
|
||||
if send_event:
|
||||
hide_progress(id='movies_progress')
|
||||
|
@ -196,10 +200,12 @@ def update_movies(send_event=True):
|
|||
trace(f"Skipped {files_missing} file missing movies out of {i}")
|
||||
if sync_monitored:
|
||||
trace(f"Skipped {skipped_count} unmonitored movies out of {i}")
|
||||
trace(f"Processed {i - files_missing - skipped_count} movies out of {i} " +
|
||||
f"with {len(movies_added)} added, {len(movies_updated)} updated and {len(movies_deleted)} deleted")
|
||||
trace(f"Processed {i - files_missing - skipped_count} movies out of {i} "
|
||||
f"with {len(movies_added)} added, {len(movies_updated)} updated and "
|
||||
f"{len(movies_deleted)} deleted")
|
||||
else:
|
||||
trace(f"Processed {i - files_missing} movies out of {i} with {len(movies_added)} added and {len(movies_updated)} updated")
|
||||
trace(f"Processed {i - files_missing} movies out of {i} with {len(movies_added)} added and "
|
||||
f"{len(movies_updated)} updated")
|
||||
|
||||
logging.debug('BAZARR All movies synced from Radarr into database.')
|
||||
|
||||
|
|
|
@ -115,27 +115,27 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
|
|||
tags = [d['label'] for d in tags_dict if d['id'] in movie['tags']]
|
||||
|
||||
parsed_movie = {'radarrId': int(movie["id"]),
|
||||
'title': movie["title"],
|
||||
'path': os.path.join(movie["path"], movie['movieFile']['relativePath']),
|
||||
'tmdbId': str(movie["tmdbId"]),
|
||||
'poster': poster,
|
||||
'fanart': fanart,
|
||||
'audio_language': str(audio_language),
|
||||
'sceneName': sceneName,
|
||||
'monitored': str(bool(movie['monitored'])),
|
||||
'year': str(movie['year']),
|
||||
'sortTitle': movie['sortTitle'],
|
||||
'alternativeTitles': alternativeTitles,
|
||||
'format': format,
|
||||
'resolution': resolution,
|
||||
'video_codec': videoCodec,
|
||||
'audio_codec': audioCodec,
|
||||
'overview': overview,
|
||||
'imdbId': imdbId,
|
||||
'movie_file_id': int(movie['movieFile']['id']),
|
||||
'tags': str(tags),
|
||||
'file_size': movie['movieFile']['size']}
|
||||
|
||||
'title': movie["title"],
|
||||
'path': os.path.join(movie["path"], movie['movieFile']['relativePath']),
|
||||
'tmdbId': str(movie["tmdbId"]),
|
||||
'poster': poster,
|
||||
'fanart': fanart,
|
||||
'audio_language': str(audio_language),
|
||||
'sceneName': sceneName,
|
||||
'monitored': str(bool(movie['monitored'])),
|
||||
'year': str(movie['year']),
|
||||
'sortTitle': movie['sortTitle'],
|
||||
'alternativeTitles': alternativeTitles,
|
||||
'format': format,
|
||||
'resolution': resolution,
|
||||
'video_codec': videoCodec,
|
||||
'audio_codec': audioCodec,
|
||||
'overview': overview,
|
||||
'imdbId': imdbId,
|
||||
'movie_file_id': int(movie['movieFile']['id']),
|
||||
'tags': str(tags),
|
||||
'file_size': movie['movieFile']['size']}
|
||||
|
||||
if action == 'insert':
|
||||
parsed_movie['subtitles'] = '[]'
|
||||
parsed_movie['profileId'] = movie_default_profile
|
||||
|
|
|
@ -12,7 +12,7 @@ from utilities.path_mappings import path_mappings
|
|||
from subtitles.indexer.series import store_subtitles, series_full_scan_subtitles
|
||||
from subtitles.mass_download import episode_download_subtitles
|
||||
from app.event_handler import event_stream
|
||||
from sonarr.info import get_sonarr_info, url_sonarr
|
||||
from sonarr.info import get_sonarr_info
|
||||
|
||||
from .parser import episodeParser
|
||||
from .utils import get_episodes_from_sonarr_api, get_episodesFiles_from_sonarr_api
|
||||
|
@ -21,10 +21,13 @@ from .utils import get_episodes_from_sonarr_api, get_episodesFiles_from_sonarr_a
|
|||
bool_map = {"True": True, "False": False}
|
||||
|
||||
FEATURE_PREFIX = "SYNC_EPISODES "
|
||||
|
||||
|
||||
def trace(message):
|
||||
if settings.general.debug:
|
||||
logging.debug(FEATURE_PREFIX + message)
|
||||
|
||||
|
||||
def get_episodes_monitored_table(series_id):
|
||||
episodes_monitored = database.execute(
|
||||
select(TableEpisodes.episode_file_id, TableEpisodes.monitored)
|
||||
|
@ -32,7 +35,8 @@ def get_episodes_monitored_table(series_id):
|
|||
.all()
|
||||
episode_dict = dict((x, y) for x, y in episodes_monitored)
|
||||
return episode_dict
|
||||
|
||||
|
||||
|
||||
def update_all_episodes():
|
||||
series_full_scan_subtitles()
|
||||
logging.info('BAZARR All existing episode subtitles indexed from disk.')
|
||||
|
@ -74,7 +78,6 @@ def sync_episodes(series_id, send_event=True):
|
|||
if item:
|
||||
episode['episodeFile'] = item[0]
|
||||
|
||||
|
||||
sync_monitored = settings.sonarr.sync_only_monitored_series and settings.sonarr.sync_only_monitored_episodes
|
||||
if sync_monitored:
|
||||
episodes_monitored = get_episodes_monitored_table(series_id)
|
||||
|
@ -122,7 +125,7 @@ def sync_episodes(series_id, send_event=True):
|
|||
episodes_to_add.append(episodeParser(episode))
|
||||
else:
|
||||
return
|
||||
|
||||
|
||||
if sync_monitored:
|
||||
# try to avoid unnecessary database calls
|
||||
if settings.general.debug:
|
||||
|
@ -175,7 +178,6 @@ def sync_episodes(series_id, send_event=True):
|
|||
|
||||
def sync_one_episode(episode_id, defer_search=False):
|
||||
logging.debug(f'BAZARR syncing this specific episode from Sonarr: {episode_id}')
|
||||
url = url_sonarr()
|
||||
apikey_sonarr = settings.sonarr.apikey
|
||||
|
||||
# Check if there's a row in database for this episode ID
|
||||
|
|
|
@ -5,7 +5,6 @@ import logging
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from app.config import settings
|
||||
from sonarr.info import url_sonarr
|
||||
from subtitles.indexer.series import list_missing_subtitles
|
||||
from sonarr.rootfolder import check_sonarr_rootfolder
|
||||
from app.database import TableShows, TableLanguagesProfiles, database, insert, update, delete, select
|
||||
|
@ -20,10 +19,13 @@ from .utils import get_profile_list, get_tags, get_series_from_sonarr_api
|
|||
bool_map = {"True": True, "False": False}
|
||||
|
||||
FEATURE_PREFIX = "SYNC_SERIES "
|
||||
|
||||
|
||||
def trace(message):
|
||||
if settings.general.debug:
|
||||
logging.debug(FEATURE_PREFIX + message)
|
||||
|
||||
|
||||
def get_series_monitored_table():
|
||||
series_monitored = database.execute(
|
||||
select(TableShows.tvdbId, TableShows.monitored))\
|
||||
|
@ -31,6 +33,7 @@ def get_series_monitored_table():
|
|||
series_dict = dict((x, y) for x, y in series_monitored)
|
||||
return series_dict
|
||||
|
||||
|
||||
def update_series(send_event=True):
|
||||
check_sonarr_rootfolder()
|
||||
apikey_sonarr = settings.sonarr.apikey
|
||||
|
@ -74,7 +77,7 @@ def update_series(send_event=True):
|
|||
series_monitored = get_series_monitored_table()
|
||||
skipped_count = 0
|
||||
trace(f"Starting sync for {series_count} shows")
|
||||
|
||||
|
||||
for i, show in enumerate(series):
|
||||
if send_event:
|
||||
show_progress(id='series_progress',
|
||||
|
@ -152,7 +155,7 @@ def update_series(send_event=True):
|
|||
removed_series = list(set(current_shows_db) - set(current_shows_sonarr))
|
||||
|
||||
for series in removed_series:
|
||||
# try to avoid unnecessary database calls
|
||||
# try to avoid unnecessary database calls
|
||||
if settings.general.debug:
|
||||
series_title = database.execute(select(TableShows.title).where(TableShows.sonarrSeriesId == series)).first()[0]
|
||||
trace(f"Deleting {series_title}")
|
||||
|
|
|
@ -16,7 +16,7 @@ def _escape(in_str):
|
|||
|
||||
|
||||
def pp_replace(pp_command, episode, subtitles, language, language_code2, language_code3, episode_language,
|
||||
episode_language_code2, episode_language_code3, score, subtitle_id, provider, uploader,
|
||||
episode_language_code2, episode_language_code3, score, subtitle_id, provider, uploader,
|
||||
release_info, series_id, episode_id):
|
||||
pp_command = re.sub(r'[\'"]?{{directory}}[\'"]?', _escape(os.path.dirname(episode)), pp_command)
|
||||
pp_command = re.sub(r'[\'"]?{{episode}}[\'"]?', _escape(episode), pp_command)
|
||||
|
|
|
@ -270,7 +270,7 @@ def parse_video_metadata(file, file_size, episode_file_id=None, movie_file_id=No
|
|||
if not os.path.exists(file):
|
||||
logging.error(f'Video file "{file}" cannot be found for analysis')
|
||||
return None
|
||||
|
||||
|
||||
# if we have ffprobe available
|
||||
if ffprobe_path:
|
||||
try:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue