mirror of
https://github.com/morpheus65535/bazarr.git
synced 2025-04-24 06:37:16 -04:00
Improved stability by usinf Python f-string as much as possible to prevent TypeError and improve code readability.
This commit is contained in:
parent
4521b11061
commit
2ad7ddf5a6
52 changed files with 284 additions and 294 deletions
|
@ -150,7 +150,7 @@ class EpisodesHistory(Resource):
|
|||
del item['external_subtitles']
|
||||
|
||||
if item['score']:
|
||||
item['score'] = str(round((int(item['score']) * 100 / 360), 2)) + "%"
|
||||
item['score'] = f"{round((int(item['score']) * 100 / 360), 2)}%"
|
||||
|
||||
# Make timestamp pretty
|
||||
if item['timestamp']:
|
||||
|
|
|
@ -141,7 +141,7 @@ class MoviesHistory(Resource):
|
|||
del item['external_subtitles']
|
||||
|
||||
if item['score']:
|
||||
item['score'] = str(round((int(item['score']) * 100 / 120), 2)) + "%"
|
||||
item['score'] = f"{round((int(item['score']) * 100 / 120), 2)}%"
|
||||
|
||||
# Make timestamp pretty
|
||||
if item['timestamp']:
|
||||
|
|
|
@ -58,5 +58,6 @@ class SystemReleases(Resource):
|
|||
|
||||
except Exception:
|
||||
logging.exception(
|
||||
'BAZARR cannot parse releases caching file: ' + os.path.join(args.config_dir, 'config', 'releases.txt'))
|
||||
f'BAZARR cannot parse releases caching file: '
|
||||
f'{os.path.join(args.config_dir, "config", "releases.txt")}')
|
||||
return marshal(filtered_releases, self.get_response_model, envelope='data')
|
||||
|
|
|
@ -62,7 +62,7 @@ class WebHooksPlex(Resource):
|
|||
if media_type == 'episode':
|
||||
try:
|
||||
episode_imdb_id = [x['imdb'] for x in ids if 'imdb' in x][0]
|
||||
r = requests.get('https://imdb.com/title/{}'.format(episode_imdb_id),
|
||||
r = requests.get(f'https://imdb.com/title/{episode_imdb_id}',
|
||||
headers={"User-Agent": os.environ["SZ_USER_AGENT"]})
|
||||
soup = bso(r.content, "html.parser")
|
||||
script_tag = soup.find(id='__NEXT_DATA__')
|
||||
|
|
|
@ -34,7 +34,7 @@ def create_app():
|
|||
else:
|
||||
app.config["DEBUG"] = False
|
||||
|
||||
socketio.init_app(app, path=base_url.rstrip('/')+'/api/socket.io', cors_allowed_origins='*',
|
||||
socketio.init_app(app, path=f'{base_url.rstrip("/")}/api/socket.io', cors_allowed_origins='*',
|
||||
async_mode='threading', allow_upgrades=False, transports='polling')
|
||||
|
||||
@app.errorhandler(404)
|
||||
|
|
|
@ -24,7 +24,7 @@ def check_releases():
|
|||
releases = []
|
||||
url_releases = 'https://api.github.com/repos/morpheus65535/Bazarr/releases?per_page=100'
|
||||
try:
|
||||
logging.debug('BAZARR getting releases from Github: {}'.format(url_releases))
|
||||
logging.debug(f'BAZARR getting releases from Github: {url_releases}')
|
||||
r = requests.get(url_releases, allow_redirects=True)
|
||||
r.raise_for_status()
|
||||
except requests.exceptions.HTTPError:
|
||||
|
@ -50,7 +50,7 @@ def check_releases():
|
|||
'download_link': download_link})
|
||||
with open(os.path.join(args.config_dir, 'config', 'releases.txt'), 'w') as f:
|
||||
json.dump(releases, f)
|
||||
logging.debug('BAZARR saved {} releases to releases.txt'.format(len(r.json())))
|
||||
logging.debug(f'BAZARR saved {len(r.json())} releases to releases.txt')
|
||||
|
||||
|
||||
def check_if_new_update():
|
||||
|
@ -59,9 +59,9 @@ def check_if_new_update():
|
|||
elif settings.general.branch == 'development':
|
||||
use_prerelease = True
|
||||
else:
|
||||
logging.error('BAZARR unknown branch provided to updater: {}'.format(settings.general.branch))
|
||||
logging.error(f'BAZARR unknown branch provided to updater: {settings.general.branch}')
|
||||
return
|
||||
logging.debug('BAZARR updater is using {} branch'.format(settings.general.branch))
|
||||
logging.debug(f'BAZARR updater is using {settings.general.branch} branch')
|
||||
|
||||
check_releases()
|
||||
|
||||
|
@ -84,7 +84,7 @@ def check_if_new_update():
|
|||
release = next((item for item in data if not item["prerelease"]), None)
|
||||
|
||||
if release and 'name' in release:
|
||||
logging.debug('BAZARR last release available is {}'.format(release['name']))
|
||||
logging.debug(f'BAZARR last release available is {release["name"]}')
|
||||
if deprecated_python_version():
|
||||
logging.warning('BAZARR is using a deprecated Python version, you must update Python to get latest '
|
||||
'version available.')
|
||||
|
@ -101,12 +101,12 @@ def check_if_new_update():
|
|||
|
||||
# skip update process if latest release is v0.9.1.1 which is the latest pre-semver compatible release
|
||||
if new_version and release['name'] != 'v0.9.1.1':
|
||||
logging.debug('BAZARR newer release available and will be downloaded: {}'.format(release['name']))
|
||||
logging.debug(f'BAZARR newer release available and will be downloaded: {release["name"]}')
|
||||
download_release(url=release['download_link'])
|
||||
# rolling back from nightly to stable release
|
||||
elif current_version:
|
||||
if current_version.prerelease and not use_prerelease:
|
||||
logging.debug('BAZARR previous stable version will be downloaded: {}'.format(release['name']))
|
||||
logging.debug(f'BAZARR previous stable version will be downloaded: {release["name"]}')
|
||||
download_release(url=release['download_link'])
|
||||
else:
|
||||
logging.debug('BAZARR no newer release have been found')
|
||||
|
@ -122,9 +122,9 @@ def download_release(url):
|
|||
try:
|
||||
os.makedirs(update_dir, exist_ok=True)
|
||||
except Exception:
|
||||
logging.debug('BAZARR unable to create update directory {}'.format(update_dir))
|
||||
logging.debug(f'BAZARR unable to create update directory {update_dir}')
|
||||
else:
|
||||
logging.debug('BAZARR downloading release from Github: {}'.format(url))
|
||||
logging.debug(f'BAZARR downloading release from Github: {url}')
|
||||
r = requests.get(url, allow_redirects=True)
|
||||
if r:
|
||||
try:
|
||||
|
@ -145,7 +145,7 @@ def apply_update():
|
|||
|
||||
if os.path.isdir(update_dir):
|
||||
if os.path.isfile(bazarr_zip):
|
||||
logging.debug('BAZARR is trying to unzip this release to {0}: {1}'.format(bazarr_dir, bazarr_zip))
|
||||
logging.debug(f'BAZARR is trying to unzip this release to {bazarr_dir}: {bazarr_zip}')
|
||||
try:
|
||||
with ZipFile(bazarr_zip, 'r') as archive:
|
||||
zip_root_directory = ''
|
||||
|
@ -195,7 +195,7 @@ def apply_update():
|
|||
def update_cleaner(zipfile, bazarr_dir, config_dir):
|
||||
with ZipFile(zipfile, 'r') as archive:
|
||||
file_in_zip = archive.namelist()
|
||||
logging.debug('BAZARR zip file contain {} directories and files'.format(len(file_in_zip)))
|
||||
logging.debug(f'BAZARR zip file contain {len(file_in_zip)} directories and files')
|
||||
separator = os.path.sep
|
||||
if os.path.sep == '\\':
|
||||
logging.debug('BAZARR upgrade leftover cleaner is running on Windows. We\'ll fix the zip file separator '
|
||||
|
@ -207,33 +207,33 @@ def update_cleaner(zipfile, bazarr_dir, config_dir):
|
|||
logging.debug('BAZARR upgrade leftover cleaner is running on something else than Windows. The zip file '
|
||||
'separator are fine.')
|
||||
|
||||
dir_to_ignore = ['^.' + separator,
|
||||
'^bin' + separator,
|
||||
'^venv' + separator,
|
||||
'^WinPython' + separator,
|
||||
separator + '__pycache__' + separator + '$']
|
||||
dir_to_ignore = [f'^.{separator}',
|
||||
f'^bin{separator}',
|
||||
f'^venv{separator}',
|
||||
f'^WinPython{separator}',
|
||||
f'{separator}__pycache__{separator}$']
|
||||
if os.path.abspath(bazarr_dir).lower() == os.path.abspath(config_dir).lower():
|
||||
# for users who installed Bazarr inside the config directory (ie: `%programdata%\Bazarr` on windows)
|
||||
dir_to_ignore.append('^backup' + separator)
|
||||
dir_to_ignore.append('^cache' + separator)
|
||||
dir_to_ignore.append('^config' + separator)
|
||||
dir_to_ignore.append('^db' + separator)
|
||||
dir_to_ignore.append('^log' + separator)
|
||||
dir_to_ignore.append('^restore' + separator)
|
||||
dir_to_ignore.append('^update' + separator)
|
||||
dir_to_ignore.append(f'^backup{separator}')
|
||||
dir_to_ignore.append(f'^cache{separator}')
|
||||
dir_to_ignore.append(f'^config{separator}')
|
||||
dir_to_ignore.append(f'^db{separator}')
|
||||
dir_to_ignore.append(f'^log{separator}')
|
||||
dir_to_ignore.append(f'^restore{separator}')
|
||||
dir_to_ignore.append(f'^update{separator}')
|
||||
elif os.path.abspath(bazarr_dir).lower() in os.path.abspath(config_dir).lower():
|
||||
# when config directory is a child of Bazarr installation directory
|
||||
dir_to_ignore.append('^' + os.path.relpath(config_dir, bazarr_dir) + separator)
|
||||
dir_to_ignore.append(f'^{os.path.relpath(config_dir, bazarr_dir)}{separator}')
|
||||
dir_to_ignore_regex_string = '(?:% s)' % '|'.join(dir_to_ignore)
|
||||
logging.debug(f'BAZARR upgrade leftover cleaner will ignore directories matching this '
|
||||
f'regex: {dir_to_ignore_regex_string}')
|
||||
dir_to_ignore_regex = re.compile(dir_to_ignore_regex_string)
|
||||
|
||||
file_to_ignore = ['nssm.exe', '7za.exe', 'unins000.exe', 'unins000.dat']
|
||||
logging.debug('BAZARR upgrade leftover cleaner will ignore those files: {}'.format(', '.join(file_to_ignore)))
|
||||
logging.debug(f'BAZARR upgrade leftover cleaner will ignore those files: {", ".join(file_to_ignore)}')
|
||||
extension_to_ignore = ['.pyc']
|
||||
logging.debug('BAZARR upgrade leftover cleaner will ignore files with those extensions: '
|
||||
'{}'.format(', '.join(extension_to_ignore)))
|
||||
logging.debug(
|
||||
f'BAZARR upgrade leftover cleaner will ignore files with those extensions: {", ".join(extension_to_ignore)}')
|
||||
|
||||
file_on_disk = []
|
||||
folder_list = []
|
||||
|
@ -256,14 +256,14 @@ def update_cleaner(zipfile, bazarr_dir, config_dir):
|
|||
filepath = os.path.join(current_dir, file)
|
||||
if not dir_to_ignore_regex.findall(filepath):
|
||||
file_on_disk.append(filepath)
|
||||
logging.debug('BAZARR directory contain {} files'.format(len(file_on_disk)))
|
||||
logging.debug('BAZARR directory contain {} directories'.format(len(folder_list)))
|
||||
logging.debug(f'BAZARR directory contain {len(file_on_disk)} files')
|
||||
logging.debug(f'BAZARR directory contain {len(folder_list)} directories')
|
||||
file_on_disk += folder_list
|
||||
logging.debug('BAZARR directory contain {} directories and files'.format(len(file_on_disk)))
|
||||
logging.debug(f'BAZARR directory contain {len(file_on_disk)} directories and files')
|
||||
|
||||
file_to_remove = list(set(file_on_disk) - set(file_in_zip))
|
||||
logging.debug('BAZARR will delete {} directories and files'.format(len(file_to_remove)))
|
||||
logging.debug('BAZARR will delete this: {}'.format(', '.join(file_to_remove)))
|
||||
logging.debug(f'BAZARR will delete {len(file_to_remove)} directories and files')
|
||||
logging.debug(f'BAZARR will delete this: {", ".join(file_to_remove)}')
|
||||
|
||||
for file in file_to_remove:
|
||||
filepath = os.path.join(bazarr_dir, file)
|
||||
|
@ -273,4 +273,4 @@ def update_cleaner(zipfile, bazarr_dir, config_dir):
|
|||
else:
|
||||
os.remove(filepath)
|
||||
except Exception:
|
||||
logging.debug('BAZARR upgrade leftover cleaner cannot delete {}'.format(filepath))
|
||||
logging.debug(f'BAZARR upgrade leftover cleaner cannot delete {filepath}')
|
||||
|
|
|
@ -346,7 +346,7 @@ def convert_ini_to_yaml(config_file):
|
|||
output_dict[section].update({item[0]: item[1]})
|
||||
with open(os.path.join(os.path.dirname(config_file), 'config.yaml'), 'w') as file:
|
||||
yaml.dump(output_dict, file)
|
||||
os.rename(config_file, config_file + '.old')
|
||||
os.rename(config_file, f'{config_file}.old')
|
||||
|
||||
|
||||
config_yaml_file = os.path.join(args.config_dir, 'config', 'config.yaml')
|
||||
|
@ -761,10 +761,10 @@ def configure_captcha_func():
|
|||
def configure_proxy_func():
|
||||
if settings.proxy.type:
|
||||
if settings.proxy.username != '' and settings.proxy.password != '':
|
||||
proxy = settings.proxy.type + '://' + quote_plus(settings.proxy.username) + ':' + \
|
||||
quote_plus(settings.proxy.password) + '@' + settings.proxy.url + ':' + str(settings.proxy.port)
|
||||
proxy = (f'{settings.proxy.type}://{quote_plus(settings.proxy.username)}:'
|
||||
f'{quote_plus(settings.proxy.password)}@{settings.proxy.url}:{settings.proxy.port}')
|
||||
else:
|
||||
proxy = settings.proxy.type + '://' + settings.proxy.url + ':' + str(settings.proxy.port)
|
||||
proxy = f'{settings.proxy.type}://{settings.proxy.url}:{settings.proxy.port}'
|
||||
os.environ['HTTP_PROXY'] = str(proxy)
|
||||
os.environ['HTTPS_PROXY'] = str(proxy)
|
||||
exclude = ','.join(settings.proxy.exclude)
|
||||
|
|
|
@ -330,11 +330,11 @@ def get_exclusion_clause(exclusion_type):
|
|||
if exclusion_type == 'series':
|
||||
tagsList = settings.sonarr.excluded_tags
|
||||
for tag in tagsList:
|
||||
where_clause.append(~(TableShows.tags.contains("\'" + tag + "\'")))
|
||||
where_clause.append(~(TableShows.tags.contains(f"\'{tag}\'")))
|
||||
else:
|
||||
tagsList = settings.radarr.excluded_tags
|
||||
for tag in tagsList:
|
||||
where_clause.append(~(TableMovies.tags.contains("\'" + tag + "\'")))
|
||||
where_clause.append(~(TableMovies.tags.contains(f"\'{tag}\'")))
|
||||
|
||||
if exclusion_type == 'series':
|
||||
monitoredOnly = settings.sonarr.only_monitored
|
||||
|
|
|
@ -160,7 +160,7 @@ class PatchedTimedRotatingFileHandler(TimedRotatingFileHandler):
|
|||
result = []
|
||||
# See bpo-44753: Don't use the extension when computing the prefix.
|
||||
n, e = os.path.splitext(baseName)
|
||||
prefix = n + '.'
|
||||
prefix = f'{n}.'
|
||||
plen = len(prefix)
|
||||
for fileName in fileNames:
|
||||
if self.namer is None:
|
||||
|
|
|
@ -24,7 +24,7 @@ def update_notifier():
|
|||
for x in results['schemas']:
|
||||
if x['service_name'] not in notifiers_in_db:
|
||||
notifiers_added.append({'name': str(x['service_name']), 'enabled': 0})
|
||||
logging.debug('Adding new notifier agent: ' + str(x['service_name']))
|
||||
logging.debug(f'Adding new notifier agent: {x["service_name"]}')
|
||||
else:
|
||||
notifiers_kept.append(x['service_name'])
|
||||
|
||||
|
@ -60,7 +60,7 @@ def send_notifications(sonarr_series_id, sonarr_episode_id, message):
|
|||
series_title = series.title
|
||||
series_year = series.year
|
||||
if series_year not in [None, '', '0']:
|
||||
series_year = ' ({})'.format(series_year)
|
||||
series_year = f' ({series_year})'
|
||||
else:
|
||||
series_year = ''
|
||||
episode = database.execute(
|
||||
|
@ -80,8 +80,7 @@ def send_notifications(sonarr_series_id, sonarr_episode_id, message):
|
|||
|
||||
apobj.notify(
|
||||
title='Bazarr notification',
|
||||
body="{}{} - S{:02d}E{:02d} - {} : {}".format(series_title, series_year, episode.season, episode.episode,
|
||||
episode.title, message),
|
||||
body=f"{series_title}{series_year} - S{episode.season:02d}E{episode.episode:02d} - {episode.title} : {message}",
|
||||
)
|
||||
|
||||
|
||||
|
@ -98,7 +97,7 @@ def send_notifications_movie(radarr_id, message):
|
|||
movie_title = movie.title
|
||||
movie_year = movie.year
|
||||
if movie_year not in [None, '', '0']:
|
||||
movie_year = ' ({})'.format(movie_year)
|
||||
movie_year = f' ({movie_year})'
|
||||
else:
|
||||
movie_year = ''
|
||||
|
||||
|
@ -112,5 +111,5 @@ def send_notifications_movie(radarr_id, message):
|
|||
|
||||
apobj.notify(
|
||||
title='Bazarr notification',
|
||||
body="{}{} : {}".format(movie_title, movie_year, message),
|
||||
body=f"{movie_title}{movie_year} : {message}",
|
||||
)
|
||||
|
|
|
@ -127,10 +127,10 @@ class Scheduler:
|
|||
if day == "*":
|
||||
text = "everyday"
|
||||
else:
|
||||
text = "every " + day_name[int(day)]
|
||||
text = f"every {day_name[int(day)]}"
|
||||
|
||||
if hour != "*":
|
||||
text += " at " + hour + ":00"
|
||||
text += f" at {hour}:00"
|
||||
|
||||
return text
|
||||
|
||||
|
@ -149,7 +149,7 @@ class Scheduler:
|
|||
running = False
|
||||
|
||||
if isinstance(job.trigger, IntervalTrigger):
|
||||
interval = "every " + get_time_from_interval(job.trigger.__getstate__()['interval'])
|
||||
interval = f"every {get_time_from_interval(job.trigger.__getstate__()['interval'])}"
|
||||
task_list.append({'name': job.name, 'interval': interval, 'next_run_in': next_run,
|
||||
'next_run_time': next_run, 'job_id': job.id, 'job_running': running})
|
||||
elif isinstance(job.trigger, CronTrigger):
|
||||
|
|
|
@ -77,13 +77,13 @@ class Server:
|
|||
try:
|
||||
self.server.close()
|
||||
except Exception as e:
|
||||
logging.error('BAZARR Cannot stop Waitress: ' + repr(e))
|
||||
logging.error(f'BAZARR Cannot stop Waitress: {repr(e)}')
|
||||
else:
|
||||
database.close()
|
||||
try:
|
||||
stop_file = io.open(os.path.join(args.config_dir, "bazarr.stop"), "w", encoding='UTF-8')
|
||||
except Exception as e:
|
||||
logging.error('BAZARR Cannot create stop file: ' + repr(e))
|
||||
logging.error(f'BAZARR Cannot create stop file: {repr(e)}')
|
||||
else:
|
||||
logging.info('Bazarr is being shutdown...')
|
||||
stop_file.write(str(''))
|
||||
|
@ -94,13 +94,13 @@ class Server:
|
|||
try:
|
||||
self.server.close()
|
||||
except Exception as e:
|
||||
logging.error('BAZARR Cannot stop Waitress: ' + repr(e))
|
||||
logging.error(f'BAZARR Cannot stop Waitress: {repr(e)}')
|
||||
else:
|
||||
database.close()
|
||||
try:
|
||||
restart_file = io.open(os.path.join(args.config_dir, "bazarr.restart"), "w", encoding='UTF-8')
|
||||
except Exception as e:
|
||||
logging.error('BAZARR Cannot create restart file: ' + repr(e))
|
||||
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
|
||||
else:
|
||||
logging.info('Bazarr is being restarted...')
|
||||
restart_file.write(str(''))
|
||||
|
|
|
@ -45,8 +45,9 @@ class SonarrSignalrClientLegacy:
|
|||
|
||||
def start(self):
|
||||
if get_sonarr_info.is_legacy():
|
||||
logging.warning('BAZARR can only sync from Sonarr v3 SignalR feed to get real-time update. You should '
|
||||
'consider upgrading your version({}).'.format(get_sonarr_info.version()))
|
||||
logging.warning(
|
||||
f'BAZARR can only sync from Sonarr v3 SignalR feed to get real-time update. You should consider '
|
||||
f'upgrading your version({get_sonarr_info.version()}).')
|
||||
else:
|
||||
self.connected = False
|
||||
event_stream(type='badges')
|
||||
|
@ -98,7 +99,7 @@ class SonarrSignalrClientLegacy:
|
|||
|
||||
def configure(self):
|
||||
self.apikey_sonarr = settings.sonarr.apikey
|
||||
self.connection = Connection(url_sonarr() + "/signalr", self.session)
|
||||
self.connection = Connection(f"{url_sonarr()}/signalr", self.session)
|
||||
self.connection.qs = {'apikey': self.apikey_sonarr}
|
||||
sonarr_hub = self.connection.register_hub('') # Sonarr doesn't use named hub
|
||||
|
||||
|
@ -158,7 +159,7 @@ class SonarrSignalrClient:
|
|||
def configure(self):
|
||||
self.apikey_sonarr = settings.sonarr.apikey
|
||||
self.connection = HubConnectionBuilder() \
|
||||
.with_url(url_sonarr() + "/signalr/messages?access_token={}".format(self.apikey_sonarr),
|
||||
.with_url(f"{url_sonarr()}/signalr/messages?access_token={self.apikey_sonarr}",
|
||||
options={
|
||||
"verify_ssl": False,
|
||||
"headers": headers
|
||||
|
@ -225,7 +226,7 @@ class RadarrSignalrClient:
|
|||
def configure(self):
|
||||
self.apikey_radarr = settings.radarr.apikey
|
||||
self.connection = HubConnectionBuilder() \
|
||||
.with_url(url_radarr() + "/signalr/messages?access_token={}".format(self.apikey_radarr),
|
||||
.with_url(f"{url_radarr()}/signalr/messages?access_token={self.apikey_radarr}",
|
||||
options={
|
||||
"verify_ssl": False,
|
||||
"headers": headers
|
||||
|
@ -306,7 +307,7 @@ def dispatcher(data):
|
|||
update_one_movie(movie_id=media_id, action=action,
|
||||
defer_search=settings.radarr.defer_search_signalr)
|
||||
except Exception as e:
|
||||
logging.debug('BAZARR an exception occurred while parsing SignalR feed: {}'.format(repr(e)))
|
||||
logging.debug(f'BAZARR an exception occurred while parsing SignalR feed: {repr(e)}')
|
||||
finally:
|
||||
event_stream(type='badges')
|
||||
return
|
||||
|
|
|
@ -110,11 +110,9 @@ def series_images(url):
|
|||
apikey = settings.sonarr.apikey
|
||||
baseUrl = settings.sonarr.base_url
|
||||
if get_sonarr_info.is_legacy():
|
||||
url_image = (url_sonarr() + '/api/' + url.lstrip(baseUrl) + '?apikey=' +
|
||||
apikey).replace('poster-250', 'poster-500')
|
||||
url_image = f'{url_sonarr()}/api/{url.lstrip(baseUrl)}?apikey={apikey}'.replace('poster-250', 'poster-500')
|
||||
else:
|
||||
url_image = (url_sonarr() + '/api/v3/' + url.lstrip(baseUrl) + '?apikey=' +
|
||||
apikey).replace('poster-250', 'poster-500')
|
||||
url_image = f'{url_sonarr()}/api/v3/{url.lstrip(baseUrl)}?apikey={apikey}'.replace('poster-250', 'poster-500')
|
||||
try:
|
||||
req = requests.get(url_image, stream=True, timeout=15, verify=False, headers=headers)
|
||||
except Exception:
|
||||
|
@ -129,9 +127,9 @@ def movies_images(url):
|
|||
apikey = settings.radarr.apikey
|
||||
baseUrl = settings.radarr.base_url
|
||||
if get_radarr_info.is_legacy():
|
||||
url_image = url_radarr() + '/api/' + url.lstrip(baseUrl) + '?apikey=' + apikey
|
||||
url_image = f'{url_radarr()}/api/{url.lstrip(baseUrl)}?apikey={apikey}'
|
||||
else:
|
||||
url_image = url_radarr() + '/api/v3/' + url.lstrip(baseUrl) + '?apikey=' + apikey
|
||||
url_image = f'{url_radarr()}/api/v3/{url.lstrip(baseUrl)}?apikey={apikey}'
|
||||
try:
|
||||
req = requests.get(url_image, stream=True, timeout=15, verify=False, headers=headers)
|
||||
except Exception:
|
||||
|
@ -171,7 +169,7 @@ def configured():
|
|||
def proxy(protocol, url):
|
||||
if protocol.lower() not in ['http', 'https']:
|
||||
return dict(status=False, error='Unsupported protocol')
|
||||
url = protocol + '://' + unquote(url)
|
||||
url = f'{protocol}://{unquote(url)}'
|
||||
params = request.args
|
||||
try:
|
||||
result = requests.get(url, params, allow_redirects=False, verify=False, timeout=5, headers=headers)
|
||||
|
|
|
@ -28,7 +28,7 @@ startTime = time.time()
|
|||
restore_from_backup()
|
||||
|
||||
# set subliminal_patch user agent
|
||||
os.environ["SZ_USER_AGENT"] = "Bazarr/{}".format(os.environ["BAZARR_VERSION"])
|
||||
os.environ["SZ_USER_AGENT"] = f"Bazarr/{os.environ['BAZARR_VERSION']}"
|
||||
|
||||
# Check if args.config_dir exist
|
||||
if not os.path.exists(args.config_dir):
|
||||
|
@ -96,7 +96,7 @@ if not args.no_update:
|
|||
pip_command.insert(4, '--user')
|
||||
subprocess.check_output(pip_command, stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
logging.exception('BAZARR requirements.txt installation result: {}'.format(e.stdout))
|
||||
logging.exception(f'BAZARR requirements.txt installation result: {e.stdout}')
|
||||
os._exit(1)
|
||||
else:
|
||||
logging.info('BAZARR requirements installed.')
|
||||
|
@ -104,7 +104,7 @@ if not args.no_update:
|
|||
try:
|
||||
restart_file = io.open(os.path.join(args.config_dir, "bazarr.restart"), "w", encoding='UTF-8')
|
||||
except Exception as e:
|
||||
logging.error('BAZARR Cannot create restart file: ' + repr(e))
|
||||
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
|
||||
else:
|
||||
logging.info('Bazarr is being restarted...')
|
||||
restart_file.write(str(''))
|
||||
|
|
|
@ -47,7 +47,7 @@ if args.create_db_revision:
|
|||
try:
|
||||
stop_file = io.open(os.path.join(args.config_dir, "bazarr.stop"), "w", encoding='UTF-8')
|
||||
except Exception as e:
|
||||
logging.error('BAZARR Cannot create stop file: ' + repr(e))
|
||||
logging.error(f'BAZARR Cannot create stop file: {repr(e)}')
|
||||
else:
|
||||
create_db_revision(app)
|
||||
logging.info('Bazarr is being shutdown...')
|
||||
|
|
|
@ -13,15 +13,16 @@ def browse_radarr_filesystem(path='#'):
|
|||
path = ''
|
||||
|
||||
if get_radarr_info.is_legacy():
|
||||
url_radarr_api_filesystem = url_radarr() + "/api/filesystem?path=" + path + \
|
||||
"&allowFoldersWithoutTrailingSlashes=true&includeFiles=false&apikey=" + \
|
||||
settings.radarr.apikey
|
||||
url_radarr_api_filesystem = (f"{url_radarr()}/api/filesystem?path={path}&"
|
||||
f"allowFoldersWithoutTrailingSlashes=true&includeFiles=false&"
|
||||
f"apikey={settings.radarr.apikey}")
|
||||
else:
|
||||
url_radarr_api_filesystem = url_radarr() + "/api/v3/filesystem?path=" + path + \
|
||||
"&allowFoldersWithoutTrailingSlashes=true&includeFiles=false&apikey=" + \
|
||||
settings.radarr.apikey
|
||||
url_radarr_api_filesystem = (f"{url_radarr()}/api/v3/filesystem?path={path}&"
|
||||
f"allowFoldersWithoutTrailingSlashes=true&includeFiles=false&"
|
||||
f"apikey={settings.radarr.apikey}")
|
||||
try:
|
||||
r = requests.get(url_radarr_api_filesystem, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
|
||||
r = requests.get(url_radarr_api_filesystem, timeout=int(settings.radarr.http_timeout), verify=False,
|
||||
headers=headers)
|
||||
r.raise_for_status()
|
||||
except requests.exceptions.HTTPError:
|
||||
logging.exception("BAZARR Error trying to get series from Radarr. Http error.")
|
||||
|
|
|
@ -28,23 +28,25 @@ class GetRadarrInfo:
|
|||
radarr_version = ''
|
||||
if settings.general.use_radarr:
|
||||
try:
|
||||
rv = url_radarr() + "/api/system/status?apikey=" + settings.radarr.apikey
|
||||
radarr_json = requests.get(rv, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers).json()
|
||||
rv = f"{url_radarr()}/api/system/status?apikey={settings.radarr.apikey}"
|
||||
radarr_json = requests.get(rv, timeout=int(settings.radarr.http_timeout), verify=False,
|
||||
headers=headers).json()
|
||||
if 'version' in radarr_json:
|
||||
radarr_version = radarr_json['version']
|
||||
else:
|
||||
raise json.decoder.JSONDecodeError
|
||||
except json.decoder.JSONDecodeError:
|
||||
try:
|
||||
rv = url_radarr() + "/api/v3/system/status?apikey=" + settings.radarr.apikey
|
||||
radarr_version = requests.get(rv, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers).json()['version']
|
||||
rv = f"{url_radarr()}/api/v3/system/status?apikey={settings.radarr.apikey}"
|
||||
radarr_version = requests.get(rv, timeout=int(settings.radarr.http_timeout), verify=False,
|
||||
headers=headers).json()['version']
|
||||
except json.decoder.JSONDecodeError:
|
||||
logging.debug('BAZARR cannot get Radarr version')
|
||||
radarr_version = 'unknown'
|
||||
except Exception:
|
||||
logging.debug('BAZARR cannot get Radarr version')
|
||||
radarr_version = 'unknown'
|
||||
logging.debug('BAZARR got this Radarr version from its API: {}'.format(radarr_version))
|
||||
logging.debug(f'BAZARR got this Radarr version from its API: {radarr_version}')
|
||||
region.set("radarr_version", radarr_version)
|
||||
return radarr_version
|
||||
|
||||
|
@ -83,7 +85,7 @@ def url_radarr():
|
|||
if settings.radarr.base_url == '':
|
||||
settings.radarr.base_url = "/"
|
||||
if not settings.radarr.base_url.startswith("/"):
|
||||
settings.radarr.base_url = "/" + settings.radarr.base_url
|
||||
settings.radarr.base_url = f"/{settings.radarr.base_url}"
|
||||
if settings.radarr.base_url.endswith("/"):
|
||||
settings.radarr.base_url = settings.radarr.base_url[:-1]
|
||||
|
||||
|
|
|
@ -11,9 +11,9 @@ from constants import headers
|
|||
def notify_radarr(radarr_id):
|
||||
try:
|
||||
if get_radarr_info.is_legacy():
|
||||
url = url_radarr() + "/api/command?apikey=" + settings.radarr.apikey
|
||||
url = f"{url_radarr()}/api/command?apikey={settings.radarr.apikey}"
|
||||
else:
|
||||
url = url_radarr() + "/api/v3/command?apikey=" + settings.radarr.apikey
|
||||
url = f"{url_radarr()}/api/v3/command?apikey={settings.radarr.apikey}"
|
||||
data = {
|
||||
'name': 'RescanMovie',
|
||||
'movieId': int(radarr_id)
|
||||
|
|
|
@ -17,9 +17,9 @@ def get_radarr_rootfolder():
|
|||
|
||||
# Get root folder data from Radarr
|
||||
if get_radarr_info.is_legacy():
|
||||
url_radarr_api_rootfolder = url_radarr() + "/api/rootfolder?apikey=" + apikey_radarr
|
||||
url_radarr_api_rootfolder = f"{url_radarr()}/api/rootfolder?apikey={apikey_radarr}"
|
||||
else:
|
||||
url_radarr_api_rootfolder = url_radarr() + "/api/v3/rootfolder?apikey=" + apikey_radarr
|
||||
url_radarr_api_rootfolder = f"{url_radarr()}/api/v3/rootfolder?apikey={apikey_radarr}"
|
||||
|
||||
try:
|
||||
rootfolder = requests.get(url_radarr_api_rootfolder, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
|
||||
|
|
|
@ -155,7 +155,7 @@ def update_movies(send_event=True):
|
|||
|
||||
|
||||
def update_one_movie(movie_id, action, defer_search=False):
|
||||
logging.debug('BAZARR syncing this specific movie from Radarr: {}'.format(movie_id))
|
||||
logging.debug(f'BAZARR syncing this specific movie from Radarr: {movie_id}')
|
||||
|
||||
# Check if there's a row in database for this movie ID
|
||||
existing_movie = database.execute(
|
||||
|
@ -175,8 +175,9 @@ def update_one_movie(movie_id, action, defer_search=False):
|
|||
f"because of {e}")
|
||||
else:
|
||||
event_stream(type='movie', action='delete', payload=int(movie_id))
|
||||
logging.debug('BAZARR deleted this movie from the database:{}'.format(path_mappings.path_replace_movie(
|
||||
existing_movie.path)))
|
||||
logging.debug(
|
||||
f'BAZARR deleted this movie from the database: '
|
||||
f'{path_mappings.path_replace_movie(existing_movie.path)}')
|
||||
return
|
||||
|
||||
movie_default_enabled = settings.general.movie_default_enabled
|
||||
|
@ -224,8 +225,8 @@ def update_one_movie(movie_id, action, defer_search=False):
|
|||
f"of {e}")
|
||||
else:
|
||||
event_stream(type='movie', action='delete', payload=int(movie_id))
|
||||
logging.debug('BAZARR deleted this movie from the database:{}'.format(path_mappings.path_replace_movie(
|
||||
existing_movie.path)))
|
||||
logging.debug(
|
||||
f'BAZARR deleted this movie from the database:{path_mappings.path_replace_movie(existing_movie.path)}')
|
||||
return
|
||||
|
||||
# Update existing movie in DB
|
||||
|
@ -240,8 +241,8 @@ def update_one_movie(movie_id, action, defer_search=False):
|
|||
f"of {e}")
|
||||
else:
|
||||
event_stream(type='movie', action='update', payload=int(movie_id))
|
||||
logging.debug('BAZARR updated this movie into the database:{}'.format(path_mappings.path_replace_movie(
|
||||
movie['path'])))
|
||||
logging.debug(
|
||||
f'BAZARR updated this movie into the database:{path_mappings.path_replace_movie(movie["path"])}')
|
||||
|
||||
# Insert new movie in DB
|
||||
elif movie and not existing_movie:
|
||||
|
@ -254,19 +255,19 @@ def update_one_movie(movie_id, action, defer_search=False):
|
|||
f"of {e}")
|
||||
else:
|
||||
event_stream(type='movie', action='update', payload=int(movie_id))
|
||||
logging.debug('BAZARR inserted this movie into the database:{}'.format(path_mappings.path_replace_movie(
|
||||
movie['path'])))
|
||||
logging.debug(
|
||||
f'BAZARR inserted this movie into the database:{path_mappings.path_replace_movie(movie["path"])}')
|
||||
|
||||
# Storing existing subtitles
|
||||
logging.debug('BAZARR storing subtitles for this movie: {}'.format(path_mappings.path_replace_movie(
|
||||
movie['path'])))
|
||||
logging.debug(f'BAZARR storing subtitles for this movie: {path_mappings.path_replace_movie(movie["path"])}')
|
||||
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']))
|
||||
|
||||
# Downloading missing subtitles
|
||||
if defer_search:
|
||||
logging.debug('BAZARR searching for missing subtitles is deferred until scheduled task execution for this '
|
||||
'movie: {}'.format(path_mappings.path_replace_movie(movie['path'])))
|
||||
logging.debug(
|
||||
f'BAZARR searching for missing subtitles is deferred until scheduled task execution for this movie: '
|
||||
f'{path_mappings.path_replace_movie(movie["path"])}')
|
||||
else:
|
||||
logging.debug('BAZARR downloading missing subtitles for this movie: {}'.format(path_mappings.path_replace_movie(
|
||||
movie['path'])))
|
||||
logging.debug(
|
||||
f'BAZARR downloading missing subtitles for this movie: {path_mappings.path_replace_movie(movie["path"])}')
|
||||
movies_download_subtitles(movie_id)
|
||||
|
|
|
@ -25,7 +25,7 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
|
|||
overview = ""
|
||||
try:
|
||||
poster_big = movie['images'][0]['url']
|
||||
poster = os.path.splitext(poster_big)[0] + '-500' + os.path.splitext(poster_big)[1]
|
||||
poster = f'{os.path.splitext(poster_big)[0]}-500{os.path.splitext(poster_big)[1]}'
|
||||
except Exception:
|
||||
poster = ""
|
||||
try:
|
||||
|
@ -56,7 +56,7 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
|
|||
except Exception:
|
||||
format = movie['movieFile']['quality']['quality']['name']
|
||||
try:
|
||||
resolution = str(movie['movieFile']['quality']['quality']['resolution']) + 'p'
|
||||
resolution = f'{movie["movieFile"]["quality"]["quality"]["resolution"]}p'
|
||||
except Exception:
|
||||
resolution = None
|
||||
|
||||
|
|
|
@ -13,9 +13,9 @@ def get_profile_list():
|
|||
profiles_list = []
|
||||
# Get profiles data from radarr
|
||||
if get_radarr_info.is_legacy():
|
||||
url_radarr_api_movies = url_radarr() + "/api/profile?apikey=" + apikey_radarr
|
||||
url_radarr_api_movies = f"{url_radarr()}/api/profile?apikey={apikey_radarr}"
|
||||
else:
|
||||
url_radarr_api_movies = url_radarr() + "/api/v3/qualityprofile?apikey=" + apikey_radarr
|
||||
url_radarr_api_movies = f"{url_radarr()}/api/v3/qualityprofile?apikey={apikey_radarr}"
|
||||
|
||||
try:
|
||||
profiles_json = requests.get(url_radarr_api_movies, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
|
||||
|
@ -45,9 +45,9 @@ def get_tags():
|
|||
|
||||
# Get tags data from Radarr
|
||||
if get_radarr_info.is_legacy():
|
||||
url_radarr_api_series = url_radarr() + "/api/tag?apikey=" + apikey_radarr
|
||||
url_radarr_api_series = f"{url_radarr()}/api/tag?apikey={apikey_radarr}"
|
||||
else:
|
||||
url_radarr_api_series = url_radarr() + "/api/v3/tag?apikey=" + apikey_radarr
|
||||
url_radarr_api_series = f"{url_radarr()}/api/v3/tag?apikey={apikey_radarr}"
|
||||
|
||||
try:
|
||||
tagsDict = requests.get(url_radarr_api_series, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
|
||||
|
@ -72,11 +72,9 @@ def get_tags():
|
|||
|
||||
def get_movies_from_radarr_api(url, apikey_radarr, radarr_id=None):
|
||||
if get_radarr_info.is_legacy():
|
||||
url_radarr_api_movies = url + "/api/movie" + ("/{}".format(radarr_id) if radarr_id else "") + "?apikey=" + \
|
||||
apikey_radarr
|
||||
url_radarr_api_movies = f'{url}/api/movie{f"/{radarr_id}" if radarr_id else ""}?apikey={apikey_radarr}'
|
||||
else:
|
||||
url_radarr_api_movies = url + "/api/v3/movie" + ("/{}".format(radarr_id) if radarr_id else "") + "?apikey=" + \
|
||||
apikey_radarr
|
||||
url_radarr_api_movies = f'{url}/api/v3/movie{f"/{radarr_id}" if radarr_id else ""}?apikey={apikey_radarr}'
|
||||
|
||||
try:
|
||||
r = requests.get(url_radarr_api_movies, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
|
||||
|
|
|
@ -12,15 +12,16 @@ def browse_sonarr_filesystem(path='#'):
|
|||
if path == '#':
|
||||
path = ''
|
||||
if get_sonarr_info.is_legacy():
|
||||
url_sonarr_api_filesystem = url_sonarr() + "/api/filesystem?path=" + path + \
|
||||
"&allowFoldersWithoutTrailingSlashes=true&includeFiles=false&apikey=" + \
|
||||
settings.sonarr.apikey
|
||||
url_sonarr_api_filesystem = (f"{url_sonarr()}/api/filesystem?path={path}&"
|
||||
f"allowFoldersWithoutTrailingSlashes=true&includeFiles=false&"
|
||||
f"apikey={settings.sonarr.apikey}")
|
||||
else:
|
||||
url_sonarr_api_filesystem = url_sonarr() + "/api/v3/filesystem?path=" + path + \
|
||||
"&allowFoldersWithoutTrailingSlashes=true&includeFiles=false&apikey=" + \
|
||||
settings.sonarr.apikey
|
||||
url_sonarr_api_filesystem = (f"{url_sonarr()}/api/v3/filesystem?path={path}&"
|
||||
f"allowFoldersWithoutTrailingSlashes=true&includeFiles=false&"
|
||||
f"apikey={settings.sonarr.apikey}")
|
||||
try:
|
||||
r = requests.get(url_sonarr_api_filesystem, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
|
||||
r = requests.get(url_sonarr_api_filesystem, timeout=int(settings.sonarr.http_timeout), verify=False,
|
||||
headers=headers)
|
||||
r.raise_for_status()
|
||||
except requests.exceptions.HTTPError:
|
||||
logging.exception("BAZARR Error trying to get series from Sonarr. Http error.")
|
||||
|
|
|
@ -28,23 +28,25 @@ class GetSonarrInfo:
|
|||
sonarr_version = ''
|
||||
if settings.general.use_sonarr:
|
||||
try:
|
||||
sv = url_sonarr() + "/api/system/status?apikey=" + settings.sonarr.apikey
|
||||
sonarr_json = requests.get(sv, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers).json()
|
||||
sv = f"{url_sonarr()}/api/system/status?apikey={settings.sonarr.apikey}"
|
||||
sonarr_json = requests.get(sv, timeout=int(settings.sonarr.http_timeout), verify=False,
|
||||
headers=headers).json()
|
||||
if 'version' in sonarr_json:
|
||||
sonarr_version = sonarr_json['version']
|
||||
else:
|
||||
raise json.decoder.JSONDecodeError
|
||||
except json.decoder.JSONDecodeError:
|
||||
try:
|
||||
sv = url_sonarr() + "/api/v3/system/status?apikey=" + settings.sonarr.apikey
|
||||
sonarr_version = requests.get(sv, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers).json()['version']
|
||||
sv = f"{url_sonarr()}/api/v3/system/status?apikey={settings.sonarr.apikey}"
|
||||
sonarr_version = requests.get(sv, timeout=int(settings.sonarr.http_timeout), verify=False,
|
||||
headers=headers).json()['version']
|
||||
except json.decoder.JSONDecodeError:
|
||||
logging.debug('BAZARR cannot get Sonarr version')
|
||||
sonarr_version = 'unknown'
|
||||
except Exception:
|
||||
logging.debug('BAZARR cannot get Sonarr version')
|
||||
sonarr_version = 'unknown'
|
||||
logging.debug('BAZARR got this Sonarr version from its API: {}'.format(sonarr_version))
|
||||
logging.debug(f'BAZARR got this Sonarr version from its API: {sonarr_version}')
|
||||
region.set("sonarr_version", sonarr_version)
|
||||
return sonarr_version
|
||||
|
||||
|
@ -83,7 +85,7 @@ def url_sonarr():
|
|||
if settings.sonarr.base_url == '':
|
||||
settings.sonarr.base_url = "/"
|
||||
if not settings.sonarr.base_url.startswith("/"):
|
||||
settings.sonarr.base_url = "/" + settings.sonarr.base_url
|
||||
settings.sonarr.base_url = f"/{settings.sonarr.base_url}"
|
||||
if settings.sonarr.base_url.endswith("/"):
|
||||
settings.sonarr.base_url = settings.sonarr.base_url[:-1]
|
||||
|
||||
|
|
|
@ -11,9 +11,9 @@ from constants import headers
|
|||
def notify_sonarr(sonarr_series_id):
|
||||
try:
|
||||
if get_sonarr_info.is_legacy():
|
||||
url = url_sonarr() + "/api/command?apikey=" + settings.sonarr.apikey
|
||||
url = f"{url_sonarr()}/api/command?apikey={settings.sonarr.apikey}"
|
||||
else:
|
||||
url = url_sonarr() + "/api/v3/command?apikey=" + settings.sonarr.apikey
|
||||
url = f"{url_sonarr()}/api/v3/command?apikey={settings.sonarr.apikey}"
|
||||
data = {
|
||||
'name': 'RescanSeries',
|
||||
'seriesId': int(sonarr_series_id)
|
||||
|
|
|
@ -17,9 +17,9 @@ def get_sonarr_rootfolder():
|
|||
|
||||
# Get root folder data from Sonarr
|
||||
if get_sonarr_info.is_legacy():
|
||||
url_sonarr_api_rootfolder = url_sonarr() + "/api/rootfolder?apikey=" + apikey_sonarr
|
||||
url_sonarr_api_rootfolder = f"{url_sonarr()}/api/rootfolder?apikey={apikey_sonarr}"
|
||||
else:
|
||||
url_sonarr_api_rootfolder = url_sonarr() + "/api/v3/rootfolder?apikey=" + apikey_sonarr
|
||||
url_sonarr_api_rootfolder = f"{url_sonarr()}/api/v3/rootfolder?apikey={apikey_sonarr}"
|
||||
|
||||
try:
|
||||
rootfolder = requests.get(url_sonarr_api_rootfolder, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
|
||||
|
|
|
@ -130,7 +130,7 @@ def sync_episodes(series_id, send_event=True):
|
|||
|
||||
|
||||
def sync_one_episode(episode_id, defer_search=False):
|
||||
logging.debug('BAZARR syncing this specific episode from Sonarr: {}'.format(episode_id))
|
||||
logging.debug(f'BAZARR syncing this specific episode from Sonarr: {episode_id}')
|
||||
url = url_sonarr()
|
||||
apikey_sonarr = settings.sonarr.apikey
|
||||
|
||||
|
@ -173,8 +173,8 @@ def sync_one_episode(episode_id, defer_search=False):
|
|||
logging.error(f"BAZARR cannot delete episode {existing_episode.path} because of {e}")
|
||||
else:
|
||||
event_stream(type='episode', action='delete', payload=int(episode_id))
|
||||
logging.debug('BAZARR deleted this episode from the database:{}'.format(path_mappings.path_replace(
|
||||
existing_episode['path'])))
|
||||
logging.debug(
|
||||
f'BAZARR deleted this episode from the database:{path_mappings.path_replace(existing_episode["path"])}')
|
||||
return
|
||||
|
||||
# Update existing episodes in DB
|
||||
|
@ -188,8 +188,8 @@ def sync_one_episode(episode_id, defer_search=False):
|
|||
logging.error(f"BAZARR cannot update episode {episode['path']} because of {e}")
|
||||
else:
|
||||
event_stream(type='episode', action='update', payload=int(episode_id))
|
||||
logging.debug('BAZARR updated this episode into the database:{}'.format(path_mappings.path_replace(
|
||||
episode['path'])))
|
||||
logging.debug(
|
||||
f'BAZARR updated this episode into the database:{path_mappings.path_replace(episode["path"])}')
|
||||
|
||||
# Insert new episodes in DB
|
||||
elif episode and not existing_episode:
|
||||
|
@ -201,19 +201,19 @@ def sync_one_episode(episode_id, defer_search=False):
|
|||
logging.error(f"BAZARR cannot insert episode {episode['path']} because of {e}")
|
||||
else:
|
||||
event_stream(type='episode', action='update', payload=int(episode_id))
|
||||
logging.debug('BAZARR inserted this episode into the database:{}'.format(path_mappings.path_replace(
|
||||
episode['path'])))
|
||||
logging.debug(
|
||||
f'BAZARR inserted this episode into the database:{path_mappings.path_replace(episode["path"])}')
|
||||
|
||||
# Storing existing subtitles
|
||||
logging.debug('BAZARR storing subtitles for this episode: {}'.format(path_mappings.path_replace(
|
||||
episode['path'])))
|
||||
logging.debug(f'BAZARR storing subtitles for this episode: {path_mappings.path_replace(episode["path"])}')
|
||||
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']))
|
||||
|
||||
# Downloading missing subtitles
|
||||
if defer_search:
|
||||
logging.debug('BAZARR searching for missing subtitles is deferred until scheduled task execution for this '
|
||||
'episode: {}'.format(path_mappings.path_replace(episode['path'])))
|
||||
logging.debug(
|
||||
f'BAZARR searching for missing subtitles is deferred until scheduled task execution for this episode: '
|
||||
f'{path_mappings.path_replace(episode["path"])}')
|
||||
else:
|
||||
logging.debug('BAZARR downloading missing subtitles for this episode: {}'.format(path_mappings.path_replace(
|
||||
episode['path'])))
|
||||
logging.debug(
|
||||
f'BAZARR downloading missing subtitles for this episode: {path_mappings.path_replace(episode["path"])}')
|
||||
episode_download_subtitles(episode_id)
|
||||
|
|
|
@ -18,7 +18,7 @@ def seriesParser(show, action, tags_dict, serie_default_profile, audio_profiles)
|
|||
for image in show['images']:
|
||||
if image['coverType'] == 'poster':
|
||||
poster_big = image['url'].split('?')[0]
|
||||
poster = os.path.splitext(poster_big)[0] + '-250' + os.path.splitext(poster_big)[1]
|
||||
poster = f'{os.path.splitext(poster_big)[0]}-250{os.path.splitext(poster_big)[1]}'
|
||||
|
||||
if image['coverType'] == 'fanart':
|
||||
fanart = image['url'].split('?')[0]
|
||||
|
@ -144,7 +144,7 @@ def episodeParser(episode):
|
|||
except Exception:
|
||||
video_format = episode['episodeFile']['quality']['quality']['name']
|
||||
try:
|
||||
video_resolution = str(episode['episodeFile']['quality']['quality']['resolution']) + 'p'
|
||||
video_resolution = f'{episode["episodeFile"]["quality"]["quality"]["resolution"]}p'
|
||||
except Exception:
|
||||
video_resolution = None
|
||||
|
||||
|
|
|
@ -117,7 +117,7 @@ def update_series(send_event=True):
|
|||
|
||||
|
||||
def update_one_series(series_id, action):
|
||||
logging.debug('BAZARR syncing this specific series from Sonarr: {}'.format(series_id))
|
||||
logging.debug(f'BAZARR syncing this specific series from Sonarr: {series_id}')
|
||||
|
||||
# Check if there's a row in database for this series ID
|
||||
existing_series = database.execute(
|
||||
|
@ -180,8 +180,7 @@ def update_one_series(series_id, action):
|
|||
else:
|
||||
sync_episodes(series_id=int(series_id), send_event=False)
|
||||
event_stream(type='series', action='update', payload=int(series_id))
|
||||
logging.debug('BAZARR updated this series into the database:{}'.format(path_mappings.path_replace(
|
||||
series['path'])))
|
||||
logging.debug(f'BAZARR updated this series into the database:{path_mappings.path_replace(series["path"])}')
|
||||
|
||||
# Insert new series in DB
|
||||
elif action == 'updated' and not existing_series:
|
||||
|
@ -193,5 +192,4 @@ def update_one_series(series_id, action):
|
|||
logging.error(f"BAZARR cannot insert series {series['path']} because of {e}")
|
||||
else:
|
||||
event_stream(type='series', action='update', payload=int(series_id))
|
||||
logging.debug('BAZARR inserted this series into the database:{}'.format(path_mappings.path_replace(
|
||||
series['path'])))
|
||||
logging.debug(f'BAZARR inserted this series into the database:{path_mappings.path_replace(series["path"])}')
|
||||
|
|
|
@ -14,15 +14,16 @@ def get_profile_list():
|
|||
|
||||
# Get profiles data from Sonarr
|
||||
if get_sonarr_info.is_legacy():
|
||||
url_sonarr_api_series = url_sonarr() + "/api/profile?apikey=" + apikey_sonarr
|
||||
url_sonarr_api_series = f"{url_sonarr()}/api/profile?apikey={apikey_sonarr}"
|
||||
else:
|
||||
if not get_sonarr_info.version().startswith('3.'):
|
||||
# return an empty list when using Sonarr >= v4 that does not support series languages profiles anymore
|
||||
return profiles_list
|
||||
url_sonarr_api_series = url_sonarr() + "/api/v3/languageprofile?apikey=" + apikey_sonarr
|
||||
url_sonarr_api_series = f"{url_sonarr()}/api/v3/languageprofile?apikey={apikey_sonarr}"
|
||||
|
||||
try:
|
||||
profiles_json = requests.get(url_sonarr_api_series, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
|
||||
profiles_json = requests.get(url_sonarr_api_series, timeout=int(settings.sonarr.http_timeout), verify=False,
|
||||
headers=headers)
|
||||
except requests.exceptions.ConnectionError:
|
||||
logging.exception("BAZARR Error trying to get profiles from Sonarr. Connection Error.")
|
||||
return None
|
||||
|
@ -50,9 +51,9 @@ def get_tags():
|
|||
|
||||
# Get tags data from Sonarr
|
||||
if get_sonarr_info.is_legacy():
|
||||
url_sonarr_api_series = url_sonarr() + "/api/tag?apikey=" + apikey_sonarr
|
||||
url_sonarr_api_series = f"{url_sonarr()}/api/tag?apikey={apikey_sonarr}"
|
||||
else:
|
||||
url_sonarr_api_series = url_sonarr() + "/api/v3/tag?apikey=" + apikey_sonarr
|
||||
url_sonarr_api_series = f"{url_sonarr()}/api/v3/tag?apikey={apikey_sonarr}"
|
||||
|
||||
try:
|
||||
tagsDict = requests.get(url_sonarr_api_series, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
|
||||
|
@ -70,8 +71,8 @@ def get_tags():
|
|||
|
||||
|
||||
def get_series_from_sonarr_api(url, apikey_sonarr, sonarr_series_id=None):
|
||||
url_sonarr_api_series = url + "/api/{0}series/{1}?apikey={2}".format(
|
||||
'' if get_sonarr_info.is_legacy() else 'v3/', sonarr_series_id if sonarr_series_id else "", apikey_sonarr)
|
||||
url_sonarr_api_series = (f"{url}/api/{'' if get_sonarr_info.is_legacy() else 'v3/'}series/"
|
||||
f"{sonarr_series_id if sonarr_series_id else ''}?apikey={apikey_sonarr}")
|
||||
try:
|
||||
r = requests.get(url_sonarr_api_series, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
|
||||
r.raise_for_status()
|
||||
|
@ -99,11 +100,11 @@ def get_series_from_sonarr_api(url, apikey_sonarr, sonarr_series_id=None):
|
|||
|
||||
def get_episodes_from_sonarr_api(url, apikey_sonarr, series_id=None, episode_id=None):
|
||||
if series_id:
|
||||
url_sonarr_api_episode = url + "/api/{0}episode?seriesId={1}&apikey={2}".format(
|
||||
'' if get_sonarr_info.is_legacy() else 'v3/', series_id, apikey_sonarr)
|
||||
url_sonarr_api_episode = (f"{url}/api/{'' if get_sonarr_info.is_legacy() else 'v3/'}episode?"
|
||||
f"seriesId={series_id}&apikey={apikey_sonarr}")
|
||||
elif episode_id:
|
||||
url_sonarr_api_episode = url + "/api/{0}episode/{1}?apikey={2}".format(
|
||||
'' if get_sonarr_info.is_legacy() else 'v3/', episode_id, apikey_sonarr)
|
||||
url_sonarr_api_episode = (f"{url}/api/{'' if get_sonarr_info.is_legacy() else 'v3/'}episode/{episode_id}?"
|
||||
f"apikey={apikey_sonarr}")
|
||||
else:
|
||||
return
|
||||
|
||||
|
@ -128,15 +129,15 @@ def get_episodes_from_sonarr_api(url, apikey_sonarr, series_id=None, episode_id=
|
|||
|
||||
def get_episodesFiles_from_sonarr_api(url, apikey_sonarr, series_id=None, episode_file_id=None):
|
||||
if series_id:
|
||||
url_sonarr_api_episodeFiles = url + "/api/v3/episodeFile?seriesId={0}&apikey={1}".format(series_id,
|
||||
apikey_sonarr)
|
||||
url_sonarr_api_episodeFiles = f"{url}/api/v3/episodeFile?seriesId={series_id}&apikey={apikey_sonarr}"
|
||||
elif episode_file_id:
|
||||
url_sonarr_api_episodeFiles = url + "/api/v3/episodeFile/{0}?apikey={1}".format(episode_file_id, apikey_sonarr)
|
||||
url_sonarr_api_episodeFiles = f"{url}/api/v3/episodeFile/{episode_file_id}?apikey={apikey_sonarr}"
|
||||
else:
|
||||
return
|
||||
|
||||
try:
|
||||
r = requests.get(url_sonarr_api_episodeFiles, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
|
||||
r = requests.get(url_sonarr_api_episodeFiles, timeout=int(settings.sonarr.http_timeout), verify=False,
|
||||
headers=headers)
|
||||
r.raise_for_status()
|
||||
except requests.exceptions.HTTPError:
|
||||
logging.exception("BAZARR Error trying to get episodeFiles from Sonarr. Http error.")
|
||||
|
|
|
@ -29,7 +29,7 @@ def generate_subtitles(path, languages, audio_language, sceneName, title, media_
|
|||
if not languages:
|
||||
return None
|
||||
|
||||
logging.debug('BAZARR Searching subtitles for this file: ' + path)
|
||||
logging.debug(f'BAZARR Searching subtitles for this file: {path}')
|
||||
|
||||
if settings.general.utf8_encode:
|
||||
os.environ["SZ_KEEP_ENCODING"] = ""
|
||||
|
@ -97,7 +97,7 @@ def generate_subtitles(path, languages, audio_language, sceneName, title, media_
|
|||
)
|
||||
except Exception as e:
|
||||
logging.exception(
|
||||
'BAZARR Error saving Subtitles file to disk for this file:' + path + ': ' + repr(e))
|
||||
f'BAZARR Error saving Subtitles file to disk for this file {path}: {repr(e)}')
|
||||
pass
|
||||
else:
|
||||
saved_any = True
|
||||
|
@ -115,12 +115,12 @@ def generate_subtitles(path, languages, audio_language, sceneName, title, media_
|
|||
return None
|
||||
|
||||
if not saved_any:
|
||||
logging.debug('BAZARR No Subtitles were found for this file: ' + path)
|
||||
logging.debug(f'BAZARR No Subtitles were found for this file: {path}')
|
||||
return None
|
||||
|
||||
subliminal.region.backend.sync()
|
||||
|
||||
logging.debug('BAZARR Ended searching Subtitles for file: ' + path)
|
||||
logging.debug(f'BAZARR Ended searching Subtitles for file: {path}')
|
||||
|
||||
|
||||
def _get_language_obj(languages):
|
||||
|
|
|
@ -22,7 +22,7 @@ gc.enable()
|
|||
|
||||
|
||||
def store_subtitles_movie(original_path, reversed_path, use_cache=True):
|
||||
logging.debug('BAZARR started subtitles indexing for this file: ' + reversed_path)
|
||||
logging.debug(f'BAZARR started subtitles indexing for this file: {reversed_path}')
|
||||
actual_subtitles = []
|
||||
if os.path.exists(reversed_path):
|
||||
if settings.general.use_embedded_subs:
|
||||
|
@ -52,18 +52,18 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
|
|||
if alpha2_from_alpha3(subtitle_language) is not None:
|
||||
lang = str(alpha2_from_alpha3(subtitle_language))
|
||||
if subtitle_forced:
|
||||
lang = lang + ':forced'
|
||||
lang = f'{lang}:forced'
|
||||
if subtitle_hi:
|
||||
lang = lang + ':hi'
|
||||
logging.debug("BAZARR embedded subtitles detected: " + lang)
|
||||
lang = f'{lang}:hi'
|
||||
logging.debug(f"BAZARR embedded subtitles detected: {lang}")
|
||||
actual_subtitles.append([lang, None, None])
|
||||
except Exception as error:
|
||||
logging.debug("BAZARR unable to index this unrecognized language: %s (%s)",
|
||||
subtitle_language, error)
|
||||
logging.debug(f"BAZARR unable to index this unrecognized language: {subtitle_language} "
|
||||
f"({error})")
|
||||
except Exception:
|
||||
logging.exception(
|
||||
"BAZARR error when trying to analyze this %s file: %s" % (os.path.splitext(reversed_path)[1],
|
||||
reversed_path))
|
||||
f"BAZARR error when trying to analyze this {os.path.splitext(reversed_path)[1]} file: "
|
||||
f"{reversed_path}")
|
||||
|
||||
try:
|
||||
dest_folder = get_subtitle_destination_folder()
|
||||
|
@ -119,12 +119,12 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
|
|||
|
||||
elif str(language.basename) != 'und':
|
||||
if language.forced:
|
||||
language_str = str(language)
|
||||
language_str = f'{language}:forced'
|
||||
elif language.hi:
|
||||
language_str = str(language) + ':hi'
|
||||
language_str = f'{language}:hi'
|
||||
else:
|
||||
language_str = str(language)
|
||||
logging.debug("BAZARR external subtitles detected: " + language_str)
|
||||
logging.debug(f"BAZARR external subtitles detected: {language_str}")
|
||||
actual_subtitles.append([language_str, path_mappings.path_replace_reverse_movie(subtitle_path),
|
||||
os.stat(subtitle_path).st_size])
|
||||
|
||||
|
@ -139,14 +139,14 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
|
|||
|
||||
for movie in matching_movies:
|
||||
if movie:
|
||||
logging.debug("BAZARR storing those languages to DB: " + str(actual_subtitles))
|
||||
logging.debug(f"BAZARR storing those languages to DB: {actual_subtitles}")
|
||||
list_missing_subtitles_movies(no=movie.radarrId)
|
||||
else:
|
||||
logging.debug("BAZARR haven't been able to update existing subtitles to DB : " + str(actual_subtitles))
|
||||
logging.debug(f"BAZARR haven't been able to update existing subtitles to DB: {actual_subtitles}")
|
||||
else:
|
||||
logging.debug("BAZARR this file doesn't seems to exist or isn't accessible.")
|
||||
|
||||
logging.debug('BAZARR ended subtitles indexing for this file: ' + reversed_path)
|
||||
logging.debug(f'BAZARR ended subtitles indexing for this file: {reversed_path}')
|
||||
|
||||
return actual_subtitles
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ gc.enable()
|
|||
|
||||
|
||||
def store_subtitles(original_path, reversed_path, use_cache=True):
|
||||
logging.debug('BAZARR started subtitles indexing for this file: ' + reversed_path)
|
||||
logging.debug(f'BAZARR started subtitles indexing for this file: {reversed_path}')
|
||||
actual_subtitles = []
|
||||
if os.path.exists(reversed_path):
|
||||
if settings.general.use_embedded_subs:
|
||||
|
@ -52,10 +52,10 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
|
|||
if alpha2_from_alpha3(subtitle_language) is not None:
|
||||
lang = str(alpha2_from_alpha3(subtitle_language))
|
||||
if subtitle_forced:
|
||||
lang = lang + ":forced"
|
||||
lang = f"{lang}:forced"
|
||||
if subtitle_hi:
|
||||
lang = lang + ":hi"
|
||||
logging.debug("BAZARR embedded subtitles detected: " + lang)
|
||||
lang = f"{lang}:hi"
|
||||
logging.debug(f"BAZARR embedded subtitles detected: {lang}")
|
||||
actual_subtitles.append([lang, None, None])
|
||||
except Exception as error:
|
||||
logging.debug("BAZARR unable to index this unrecognized language: %s (%s)", subtitle_language, error)
|
||||
|
@ -118,12 +118,12 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
|
|||
|
||||
elif str(language.basename) != 'und':
|
||||
if language.forced:
|
||||
language_str = str(language)
|
||||
language_str = f'{language}:forced'
|
||||
elif language.hi:
|
||||
language_str = str(language) + ':hi'
|
||||
language_str = f'{language}:hi'
|
||||
else:
|
||||
language_str = str(language)
|
||||
logging.debug("BAZARR external subtitles detected: " + language_str)
|
||||
logging.debug(f"BAZARR external subtitles detected: {language_str}")
|
||||
actual_subtitles.append([language_str, path_mappings.path_replace_reverse(subtitle_path),
|
||||
os.stat(subtitle_path).st_size])
|
||||
|
||||
|
@ -138,14 +138,14 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
|
|||
|
||||
for episode in matching_episodes:
|
||||
if episode:
|
||||
logging.debug("BAZARR storing those languages to DB: " + str(actual_subtitles))
|
||||
logging.debug(f"BAZARR storing those languages to DB: {actual_subtitles}")
|
||||
list_missing_subtitles(epno=episode.sonarrEpisodeId)
|
||||
else:
|
||||
logging.debug("BAZARR haven't been able to update existing subtitles to DB : " + str(actual_subtitles))
|
||||
logging.debug(f"BAZARR haven't been able to update existing subtitles to DB: {actual_subtitles}")
|
||||
else:
|
||||
logging.debug("BAZARR this file doesn't seems to exist or isn't accessible.")
|
||||
|
||||
logging.debug('BAZARR ended subtitles indexing for this file: ' + reversed_path)
|
||||
logging.debug(f'BAZARR ended subtitles indexing for this file: {reversed_path}')
|
||||
|
||||
return actual_subtitles
|
||||
|
||||
|
|
|
@ -69,8 +69,8 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
|
|||
|
||||
# to improve performance, skip detection of files larger that 1M
|
||||
if os.path.getsize(subtitle_path) > 1 * 1024 * 1024:
|
||||
logging.debug("BAZARR subtitles file is too large to be text based. Skipping this file: " +
|
||||
subtitle_path)
|
||||
logging.debug(f"BAZARR subtitles file is too large to be text based. Skipping this file: "
|
||||
f"{subtitle_path}")
|
||||
continue
|
||||
|
||||
with open(subtitle_path, 'rb') as f:
|
||||
|
@ -80,8 +80,8 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
|
|||
if encoding and 'encoding' in encoding and encoding['encoding']:
|
||||
encoding = detect(text)['encoding']
|
||||
else:
|
||||
logging.debug("BAZARR skipping this subtitles because we can't guess the encoding. "
|
||||
"It's probably a binary file: " + subtitle_path)
|
||||
logging.debug(f"BAZARR skipping this subtitles because we can't guess the encoding. "
|
||||
f"It's probably a binary file: {subtitle_path}")
|
||||
continue
|
||||
text = text.decode(encoding)
|
||||
|
||||
|
@ -97,8 +97,7 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
|
|||
detected_language = 'zt'
|
||||
|
||||
if detected_language:
|
||||
logging.debug("BAZARR external subtitles detected and guessed this language: " + str(
|
||||
detected_language))
|
||||
logging.debug(f"BAZARR external subtitles detected and guessed this language: {detected_language}")
|
||||
try:
|
||||
subtitles[subtitle] = Language.rebuild(Language.fromietf(detected_language), forced=forced,
|
||||
hi=False)
|
||||
|
@ -121,8 +120,8 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
|
|||
if os.path.exists(subtitle_path) and os.path.splitext(subtitle_path)[1] in core.SUBTITLE_EXTENSIONS:
|
||||
# to improve performance, skip detection of files larger that 1M
|
||||
if os.path.getsize(subtitle_path) > 1 * 1024 * 1024:
|
||||
logging.debug("BAZARR subtitles file is too large to be text based. Skipping this file: " +
|
||||
subtitle_path)
|
||||
logging.debug(f"BAZARR subtitles file is too large to be text based. Skipping this file: "
|
||||
f"{subtitle_path}")
|
||||
continue
|
||||
|
||||
with open(subtitle_path, 'rb') as f:
|
||||
|
@ -132,8 +131,8 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
|
|||
if encoding and 'encoding' in encoding and encoding['encoding']:
|
||||
encoding = detect(text)['encoding']
|
||||
else:
|
||||
logging.debug("BAZARR skipping this subtitles because we can't guess the encoding. "
|
||||
"It's probably a binary file: " + subtitle_path)
|
||||
logging.debug(f"BAZARR skipping this subtitles because we can't guess the encoding. "
|
||||
f"It's probably a binary file: {subtitle_path}")
|
||||
continue
|
||||
text = text.decode(encoding)
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ from .processing import process_subtitle
|
|||
|
||||
@update_pools
|
||||
def manual_search(path, profile_id, providers, sceneName, title, media_type):
|
||||
logging.debug('BAZARR Manually searching subtitles for this file: ' + path)
|
||||
logging.debug(f'BAZARR Manually searching subtitles for this file: {path}')
|
||||
|
||||
final_subtitles = []
|
||||
|
||||
|
@ -64,7 +64,7 @@ def manual_search(path, profile_id, providers, sceneName, title, media_type):
|
|||
logging.info("BAZARR All providers are throttled")
|
||||
return 'All providers are throttled'
|
||||
except Exception:
|
||||
logging.exception("BAZARR Error trying to get Subtitle list from provider for this file: " + path)
|
||||
logging.exception(f"BAZARR Error trying to get Subtitle list from provider for this file: {path}")
|
||||
else:
|
||||
subtitles_list = []
|
||||
minimum_score = settings.general.minimum_score
|
||||
|
@ -145,8 +145,8 @@ def manual_search(path, profile_id, providers, sceneName, title, media_type):
|
|||
|
||||
final_subtitles = sorted(subtitles_list, key=lambda x: (x['orig_score'], x['score_without_hash']),
|
||||
reverse=True)
|
||||
logging.debug('BAZARR ' + str(len(final_subtitles)) + " Subtitles have been found for this file: " + path)
|
||||
logging.debug('BAZARR Ended searching Subtitles for this file: ' + path)
|
||||
logging.debug(f'BAZARR {len(final_subtitles)} Subtitles have been found for this file: {path}')
|
||||
logging.debug(f'BAZARR Ended searching Subtitles for this file: {path}')
|
||||
|
||||
subliminal.region.backend.sync()
|
||||
|
||||
|
@ -156,7 +156,7 @@ def manual_search(path, profile_id, providers, sceneName, title, media_type):
|
|||
@update_pools
|
||||
def manual_download_subtitle(path, audio_language, hi, forced, subtitle, provider, sceneName, title, media_type,
|
||||
use_original_format, profile_id):
|
||||
logging.debug('BAZARR Manually downloading Subtitles for this file: ' + path)
|
||||
logging.debug(f'BAZARR Manually downloading Subtitles for this file: {path}')
|
||||
|
||||
if settings.general.utf8_encode:
|
||||
os.environ["SZ_KEEP_ENCODING"] = ""
|
||||
|
@ -180,16 +180,16 @@ def manual_download_subtitle(path, audio_language, hi, forced, subtitle, provide
|
|||
try:
|
||||
if provider:
|
||||
download_subtitles([subtitle], _get_pool(media_type, profile_id))
|
||||
logging.debug('BAZARR Subtitles file downloaded for this file:' + path)
|
||||
logging.debug(f'BAZARR Subtitles file downloaded for this file: {path}')
|
||||
else:
|
||||
logging.info("BAZARR All providers are throttled")
|
||||
return 'All providers are throttled'
|
||||
except Exception:
|
||||
logging.exception('BAZARR Error downloading Subtitles for this file ' + path)
|
||||
logging.exception(f'BAZARR Error downloading Subtitles for this file {path}')
|
||||
return 'Error downloading Subtitles'
|
||||
else:
|
||||
if not subtitle.is_valid():
|
||||
logging.exception('BAZARR No valid Subtitles file found for this file: ' + path)
|
||||
logging.exception(f'BAZARR No valid Subtitles file found for this file: {path}')
|
||||
return 'No valid Subtitles file found'
|
||||
try:
|
||||
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
|
||||
|
@ -202,7 +202,7 @@ def manual_download_subtitle(path, audio_language, hi, forced, subtitle, provide
|
|||
formats=(subtitle.format,),
|
||||
path_decoder=force_unicode)
|
||||
except Exception:
|
||||
logging.exception('BAZARR Error saving Subtitles file to disk for this file:' + path)
|
||||
logging.exception(f'BAZARR Error saving Subtitles file to disk for this file: {path}')
|
||||
return 'Error saving Subtitles file to disk'
|
||||
else:
|
||||
if saved_subtitles:
|
||||
|
@ -218,14 +218,14 @@ def manual_download_subtitle(path, audio_language, hi, forced, subtitle, provide
|
|||
continue
|
||||
else:
|
||||
logging.error(
|
||||
"BAZARR Tried to manually download a Subtitles for file: " + path
|
||||
+ " but we weren't able to do (probably throttled by " + str(subtitle.provider_name)
|
||||
+ ". Please retry later or select a Subtitles from another provider.")
|
||||
f"BAZARR Tried to manually download a Subtitles for file: {path} but we weren't able to do "
|
||||
f"(probably throttled by {subtitle.provider_name}. Please retry later or select a Subtitles "
|
||||
f"from another provider.")
|
||||
return 'Something went wrong, check the logs for error'
|
||||
|
||||
subliminal.region.backend.sync()
|
||||
|
||||
logging.debug('BAZARR Ended manually downloading Subtitles for file: ' + path)
|
||||
logging.debug(f'BAZARR Ended manually downloading Subtitles for file: {path}')
|
||||
|
||||
|
||||
def _get_language_obj(profile_id):
|
||||
|
|
|
@ -67,7 +67,7 @@ def movies_download_subtitles(no):
|
|||
logging.info("BAZARR All providers are throttled")
|
||||
break
|
||||
|
||||
show_progress(id='movie_search_progress_{}'.format(no),
|
||||
show_progress(id=f'movie_search_progress_{no}',
|
||||
header='Searching missing subtitles...',
|
||||
name=movie.title,
|
||||
value=0,
|
||||
|
@ -88,4 +88,4 @@ def movies_download_subtitles(no):
|
|||
history_log_movie(1, no, result)
|
||||
send_notifications_movie(no, result.message)
|
||||
|
||||
hide_progress(id='movie_search_progress_{}'.format(no))
|
||||
hide_progress(id=f'movie_search_progress_{no}')
|
||||
|
|
|
@ -49,8 +49,8 @@ def series_download_subtitles(no):
|
|||
.where(reduce(operator.and_, conditions))) \
|
||||
.all()
|
||||
if not episodes_details:
|
||||
logging.debug("BAZARR no episode for that sonarrSeriesId have been found in database or they have all been "
|
||||
"ignored because of monitored status, series type or series tags: {}".format(no))
|
||||
logging.debug(f"BAZARR no episode for that sonarrSeriesId have been found in database or they have all been "
|
||||
f"ignored because of monitored status, series type or series tags: {no}")
|
||||
return
|
||||
|
||||
count_episodes_details = len(episodes_details)
|
||||
|
@ -59,12 +59,9 @@ def series_download_subtitles(no):
|
|||
providers_list = get_providers()
|
||||
|
||||
if providers_list:
|
||||
show_progress(id='series_search_progress_{}'.format(no),
|
||||
show_progress(id=f'series_search_progress_{no}',
|
||||
header='Searching missing subtitles...',
|
||||
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode.title,
|
||||
episode.season,
|
||||
episode.episode,
|
||||
episode.episodeTitle),
|
||||
name=f'{episode.title} - S{episode.season:02d}E{episode.episode:02d} - {episode.episodeTitle}',
|
||||
value=i,
|
||||
count=count_episodes_details)
|
||||
|
||||
|
@ -101,7 +98,7 @@ def series_download_subtitles(no):
|
|||
logging.info("BAZARR All providers are throttled")
|
||||
break
|
||||
|
||||
hide_progress(id='series_search_progress_{}'.format(no))
|
||||
hide_progress(id=f'series_search_progress_{no}')
|
||||
|
||||
|
||||
def episode_download_subtitles(no, send_progress=False):
|
||||
|
@ -134,12 +131,9 @@ def episode_download_subtitles(no, send_progress=False):
|
|||
|
||||
if providers_list:
|
||||
if send_progress:
|
||||
show_progress(id='episode_search_progress_{}'.format(no),
|
||||
show_progress(id=f'episode_search_progress_{no}',
|
||||
header='Searching missing subtitles...',
|
||||
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode.title,
|
||||
episode.season,
|
||||
episode.episode,
|
||||
episode.episodeTitle),
|
||||
name=f'{episode.title} - S{episode.season:02d}E{episode.episode:02d} - {episode.episodeTitle}',
|
||||
value=0,
|
||||
count=1)
|
||||
|
||||
|
@ -174,7 +168,7 @@ def episode_download_subtitles(no, send_progress=False):
|
|||
send_notifications(episode.sonarrSeriesId, episode.sonarrEpisodeId, result.message)
|
||||
|
||||
if send_progress:
|
||||
hide_progress(id='episode_search_progress_{}'.format(no))
|
||||
hide_progress(id=f'episode_search_progress_{no}')
|
||||
else:
|
||||
logging.info("BAZARR All providers are throttled")
|
||||
break
|
||||
|
|
|
@ -26,13 +26,13 @@ def postprocessing(command, path):
|
|||
out = out.replace('\n', ' ').replace('\r', ' ')
|
||||
|
||||
except Exception as e:
|
||||
logging.error('BAZARR Post-processing failed for file ' + path + ' : ' + repr(e))
|
||||
logging.error(f'BAZARR Post-processing failed for file {path}: {repr(e)}')
|
||||
else:
|
||||
if err:
|
||||
logging.error(
|
||||
'BAZARR Post-processing result for file ' + path + ' : ' + err.replace('\n', ' ').replace('\r', ' '))
|
||||
parsed_err = err.replace('\n', ' ').replace('\r', ' ')
|
||||
logging.error(f'BAZARR Post-processing result for file {path}: {parsed_err}')
|
||||
elif out == "":
|
||||
logging.info(
|
||||
'BAZARR Post-processing result for file ' + path + ' : Nothing returned from command execution')
|
||||
f'BAZARR Post-processing result for file {path}: Nothing returned from command execution')
|
||||
else:
|
||||
logging.info('BAZARR Post-processing result for file ' + path + ' : ' + out)
|
||||
logging.info(f'BAZARR Post-processing result for file {path}: {out}')
|
||||
|
|
|
@ -31,9 +31,9 @@ class ProcessSubtitlesResult:
|
|||
self.not_matched = not_matched
|
||||
|
||||
if hearing_impaired:
|
||||
self.language_code = downloaded_language_code2 + ":hi"
|
||||
self.language_code = f"{downloaded_language_code2}:hi"
|
||||
elif forced:
|
||||
self.language_code = downloaded_language_code2 + ":forced"
|
||||
self.language_code = f"{downloaded_language_code2}:forced"
|
||||
else:
|
||||
self.language_code = downloaded_language_code2
|
||||
|
||||
|
@ -57,7 +57,7 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
|
|||
modifier_string = " forced"
|
||||
else:
|
||||
modifier_string = ""
|
||||
logging.debug('BAZARR Subtitles file saved to disk: ' + downloaded_path)
|
||||
logging.debug(f'BAZARR Subtitles file saved to disk: {downloaded_path}')
|
||||
if is_upgrade:
|
||||
action = "upgraded"
|
||||
elif is_manual:
|
||||
|
@ -66,8 +66,8 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
|
|||
action = "downloaded"
|
||||
|
||||
percent_score = round(subtitle.score * 100 / max_score, 2)
|
||||
message = downloaded_language + modifier_string + " subtitles " + action + " from " + \
|
||||
downloaded_provider + " with a score of " + str(percent_score) + "%."
|
||||
message = (f"{downloaded_language}{modifier_string} subtitles {action} from {downloaded_provider} with a score of "
|
||||
f"{percent_score}%.")
|
||||
|
||||
if media_type == 'series':
|
||||
episode_metadata = database.execute(
|
||||
|
@ -116,12 +116,12 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
|
|||
pp_threshold = int(settings.general.postprocessing_threshold_movie)
|
||||
|
||||
if not use_pp_threshold or (use_pp_threshold and percent_score < pp_threshold):
|
||||
logging.debug("BAZARR Using post-processing command: {}".format(command))
|
||||
logging.debug(f"BAZARR Using post-processing command: {command}")
|
||||
postprocessing(command, path)
|
||||
set_chmod(subtitles_path=downloaded_path)
|
||||
else:
|
||||
logging.debug("BAZARR post-processing skipped because subtitles score isn't below this "
|
||||
"threshold value: " + str(pp_threshold) + "%")
|
||||
logging.debug(f"BAZARR post-processing skipped because subtitles score isn't below this "
|
||||
f"threshold value: {pp_threshold}%")
|
||||
|
||||
if media_type == 'series':
|
||||
reversed_path = path_mappings.path_replace_reverse(path)
|
||||
|
|
|
@ -33,7 +33,7 @@ def refine_from_ffprobe(path, video):
|
|||
episode_file_id=file_id.episode_file_id)
|
||||
|
||||
if not data or ('ffprobe' not in data and 'mediainfo' not in data):
|
||||
logging.debug("No cache available for this file: {}".format(path))
|
||||
logging.debug(f"No cache available for this file: {path}")
|
||||
return video
|
||||
|
||||
if data['ffprobe']:
|
||||
|
|
|
@ -32,6 +32,6 @@ def sync_subtitles(video_path, srt_path, srt_lang, forced, media_type, percent_s
|
|||
gc.collect()
|
||||
return True
|
||||
else:
|
||||
logging.debug("BAZARR subsync skipped because subtitles score isn't below this "
|
||||
"threshold value: " + subsync_threshold + "%")
|
||||
logging.debug(f"BAZARR subsync skipped because subtitles score isn't below this "
|
||||
f"threshold value: {subsync_threshold}%")
|
||||
return False
|
||||
|
|
|
@ -36,7 +36,7 @@ def delete_subtitles(media_type, language, forced, hi, media_path, subtitles_pat
|
|||
language_log += ':forced'
|
||||
language_string += ' forced'
|
||||
|
||||
result = ProcessSubtitlesResult(message=language_string + " subtitles deleted from disk.",
|
||||
result = ProcessSubtitlesResult(message=f"{language_string} subtitles deleted from disk.",
|
||||
reversed_path=path_mappings.path_replace_reverse(media_path),
|
||||
downloaded_language_code2=language_log,
|
||||
downloaded_provider=None,
|
||||
|
@ -50,7 +50,7 @@ def delete_subtitles(media_type, language, forced, hi, media_path, subtitles_pat
|
|||
try:
|
||||
os.remove(path_mappings.path_replace(subtitles_path))
|
||||
except OSError:
|
||||
logging.exception('BAZARR cannot delete subtitles file: ' + subtitles_path)
|
||||
logging.exception(f'BAZARR cannot delete subtitles file: {subtitles_path}')
|
||||
store_subtitles(path_mappings.path_replace_reverse(media_path), media_path)
|
||||
return False
|
||||
else:
|
||||
|
@ -64,7 +64,7 @@ def delete_subtitles(media_type, language, forced, hi, media_path, subtitles_pat
|
|||
try:
|
||||
os.remove(path_mappings.path_replace_movie(subtitles_path))
|
||||
except OSError:
|
||||
logging.exception('BAZARR cannot delete subtitles file: ' + subtitles_path)
|
||||
logging.exception(f'BAZARR cannot delete subtitles file: {subtitles_path}')
|
||||
store_subtitles_movie(path_mappings.path_replace_reverse_movie(media_path), media_path)
|
||||
return False
|
||||
else:
|
||||
|
|
|
@ -26,7 +26,7 @@ def subtitles_apply_mods(language, subtitle_path, mods, use_original_format, vid
|
|||
sub.content = f.read()
|
||||
|
||||
if not sub.is_valid():
|
||||
logging.exception('BAZARR Invalid subtitle file: ' + subtitle_path)
|
||||
logging.exception(f'BAZARR Invalid subtitle file: {subtitle_path}')
|
||||
return
|
||||
|
||||
if use_original_format:
|
||||
|
|
|
@ -34,7 +34,7 @@ class SubSyncer:
|
|||
radarr_id=None):
|
||||
self.reference = video_path
|
||||
self.srtin = srt_path
|
||||
self.srtout = '{}.synced.srt'.format(os.path.splitext(self.srtin)[0])
|
||||
self.srtout = f'{os.path.splitext(self.srtin)[0]}.synced.srt'
|
||||
self.args = None
|
||||
|
||||
ffprobe_exe = get_binary('ffprobe')
|
||||
|
@ -68,8 +68,8 @@ class SubSyncer:
|
|||
try:
|
||||
result = run(self.args)
|
||||
except Exception:
|
||||
logging.exception('BAZARR an exception occurs during the synchronization process for this subtitles: '
|
||||
'{0}'.format(self.srtin))
|
||||
logging.exception(
|
||||
f'BAZARR an exception occurs during the synchronization process for this subtitles: {self.srtin}')
|
||||
raise OSError
|
||||
else:
|
||||
if settings.subsync.debug:
|
||||
|
@ -81,9 +81,9 @@ class SubSyncer:
|
|||
|
||||
offset_seconds = result['offset_seconds'] or 0
|
||||
framerate_scale_factor = result['framerate_scale_factor'] or 0
|
||||
message = "{0} subtitles synchronization ended with an offset of {1} seconds and a framerate " \
|
||||
"scale factor of {2}.".format(language_from_alpha2(srt_lang), offset_seconds,
|
||||
"{:.2f}".format(framerate_scale_factor))
|
||||
message = (f"{language_from_alpha2(srt_lang)} subtitles synchronization ended with an offset of "
|
||||
f"{offset_seconds} seconds and a framerate scale factor of "
|
||||
f"{f'{framerate_scale_factor:.2f}'}.")
|
||||
|
||||
result = ProcessSubtitlesResult(message=message,
|
||||
reversed_path=path_mappings.path_replace_reverse(self.reference),
|
||||
|
@ -101,6 +101,6 @@ class SubSyncer:
|
|||
else:
|
||||
history_log_movie(action=5, radarr_id=radarr_id, result=result)
|
||||
else:
|
||||
logging.error('BAZARR unable to sync subtitles: {0}'.format(self.srtin))
|
||||
logging.error(f'BAZARR unable to sync subtitles: {self.srtin}')
|
||||
|
||||
return result
|
||||
|
|
|
@ -31,7 +31,7 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
|
|||
if hi:
|
||||
lang_obj = Language.rebuild(lang_obj, hi=True)
|
||||
|
||||
logging.debug('BAZARR is translating in {0} this subtitles {1}'.format(lang_obj, source_srt_file))
|
||||
logging.debug(f'BAZARR is translating in {lang_obj} this subtitles {source_srt_file}')
|
||||
|
||||
max_characters = 5000
|
||||
|
||||
|
@ -46,7 +46,7 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
|
|||
lines_list = [x.plaintext for x in subs]
|
||||
joined_lines_str = '\n\n\n'.join(lines_list)
|
||||
|
||||
logging.debug('BAZARR splitting subtitles into {} characters blocks'.format(max_characters))
|
||||
logging.debug(f'BAZARR splitting subtitles into {max_characters} characters blocks')
|
||||
lines_block_list = []
|
||||
translated_lines_list = []
|
||||
while len(joined_lines_str):
|
||||
|
@ -60,7 +60,7 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
|
|||
lines_block_list.append(new_partial_lines_str)
|
||||
joined_lines_str = joined_lines_str.replace(new_partial_lines_str, '')
|
||||
|
||||
logging.debug('BAZARR is sending {} blocks to Google Translate'.format(len(lines_block_list)))
|
||||
logging.debug(f'BAZARR is sending {len(lines_block_list)} blocks to Google Translate')
|
||||
for block_str in lines_block_list:
|
||||
try:
|
||||
translated_partial_srt_text = GoogleTranslator(source='auto',
|
||||
|
@ -74,7 +74,7 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
|
|||
translated_partial_srt_list = translated_partial_srt_text.split('\n\n\n')
|
||||
translated_lines_list += translated_partial_srt_list
|
||||
|
||||
logging.debug('BAZARR saving translated subtitles to {}'.format(dest_srt_file))
|
||||
logging.debug(f'BAZARR saving translated subtitles to {dest_srt_file}')
|
||||
for i, line in enumerate(subs):
|
||||
try:
|
||||
line.plaintext = translated_lines_list[i]
|
||||
|
|
|
@ -87,10 +87,7 @@ def upgrade_subtitles():
|
|||
|
||||
show_progress(id='upgrade_episodes_progress',
|
||||
header='Upgrading episodes subtitles...',
|
||||
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode['seriesTitle'],
|
||||
episode['season'],
|
||||
episode['episode'],
|
||||
episode['title']),
|
||||
name=f'{episode["seriesTitle"]} - S{episode["season"]:02d}E{episode["episode"]:02d} - {episode["title"]}',
|
||||
value=i,
|
||||
count=count_episode_to_upgrade)
|
||||
|
||||
|
@ -323,10 +320,10 @@ def _language_from_items(items):
|
|||
results = []
|
||||
for item in items:
|
||||
if item['forced'] == 'True':
|
||||
results.append(item['language'] + ':forced')
|
||||
results.append(f'{item["language"]}:forced')
|
||||
elif item['hi'] == 'True':
|
||||
results.append(item['language'] + ':hi')
|
||||
results.append(f'{item["language"]}:hi')
|
||||
else:
|
||||
results.append(item['language'])
|
||||
results.append(item['language'] + ':hi')
|
||||
results.append(f'{item["language"]}:hi')
|
||||
return results
|
||||
|
|
|
@ -84,7 +84,7 @@ def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, aud
|
|||
|
||||
sub.content = subtitle.read()
|
||||
if not sub.is_valid():
|
||||
logging.exception('BAZARR Invalid subtitle file: ' + subtitle.filename)
|
||||
logging.exception(f'BAZARR Invalid subtitle file: {subtitle.filename}')
|
||||
sub.mods = None
|
||||
|
||||
if settings.general.utf8_encode:
|
||||
|
@ -106,11 +106,11 @@ def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, aud
|
|||
formats=(sub.format,) if use_original_format else ("srt",),
|
||||
path_decoder=force_unicode)
|
||||
except Exception:
|
||||
logging.exception('BAZARR Error saving Subtitles file to disk for this file:' + path)
|
||||
logging.exception(f'BAZARR Error saving Subtitles file to disk for this file: {path}')
|
||||
return
|
||||
|
||||
if len(saved_subtitles) < 1:
|
||||
logging.exception('BAZARR Error saving Subtitles file to disk for this file:' + path)
|
||||
logging.exception(f'BAZARR Error saving Subtitles file to disk for this file: {path}')
|
||||
return
|
||||
|
||||
subtitle_path = saved_subtitles[0].storage_path
|
||||
|
@ -168,8 +168,8 @@ def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, aud
|
|||
event_stream(type='movie', action='update', payload=movie_metadata.radarrId)
|
||||
event_stream(type='movie-wanted', action='delete', payload=movie_metadata.radarrId)
|
||||
|
||||
result = ProcessSubtitlesResult(message=language_from_alpha3(language) + modifier_string + " Subtitles manually "
|
||||
"uploaded.",
|
||||
result = ProcessSubtitlesResult(message=f"{language_from_alpha3(language)}{modifier_string} Subtitles manually "
|
||||
"uploaded.",
|
||||
reversed_path=reversed_path,
|
||||
downloaded_language_code2=uploaded_language_code2,
|
||||
downloaded_provider=None,
|
||||
|
|
|
@ -109,10 +109,7 @@ def wanted_search_missing_subtitles_series():
|
|||
for i, episode in enumerate(episodes):
|
||||
show_progress(id='wanted_episodes_progress',
|
||||
header='Searching subtitles...',
|
||||
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode.title,
|
||||
episode.season,
|
||||
episode.episode,
|
||||
episode.episodeTitle),
|
||||
name=f'{episode.title} - S{episode.season:02d}E{episode.episode:02d} - {episode.episodeTitle}',
|
||||
value=i,
|
||||
count=count_episodes)
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ def get_binary(name):
|
|||
installed_exe = which(name)
|
||||
|
||||
if installed_exe and os.path.isfile(installed_exe):
|
||||
logging.debug('BAZARR returning this binary: {}'.format(installed_exe))
|
||||
logging.debug(f'BAZARR returning this binary: {installed_exe}')
|
||||
return installed_exe
|
||||
else:
|
||||
logging.debug('BAZARR binary not found in path, searching for it...')
|
||||
|
@ -72,27 +72,27 @@ def get_binary(name):
|
|||
logging.debug('BAZARR binary not found in binaries.json')
|
||||
raise BinaryNotFound
|
||||
else:
|
||||
logging.debug('BAZARR found this in binaries.json: {}'.format(binary))
|
||||
logging.debug(f'BAZARR found this in binaries.json: {binary}')
|
||||
|
||||
if os.path.isfile(exe) and md5(exe) == binary['checksum']:
|
||||
logging.debug('BAZARR returning this existing and up-to-date binary: {}'.format(exe))
|
||||
logging.debug(f'BAZARR returning this existing and up-to-date binary: {exe}')
|
||||
return exe
|
||||
else:
|
||||
try:
|
||||
logging.debug('BAZARR creating directory tree for {}'.format(exe_dir))
|
||||
logging.debug(f'BAZARR creating directory tree for {exe_dir}')
|
||||
os.makedirs(exe_dir, exist_ok=True)
|
||||
logging.debug('BAZARR downloading {0} from {1}'.format(name, binary['url']))
|
||||
logging.debug(f'BAZARR downloading {name} from {binary["url"]}')
|
||||
r = requests.get(binary['url'])
|
||||
logging.debug('BAZARR saving {0} to {1}'.format(name, exe_dir))
|
||||
logging.debug(f'BAZARR saving {name} to {exe_dir}')
|
||||
with open(exe, 'wb') as f:
|
||||
f.write(r.content)
|
||||
if system != 'Windows':
|
||||
logging.debug('BAZARR adding execute permission on {}'.format(exe))
|
||||
logging.debug(f'BAZARR adding execute permission on {exe}')
|
||||
st = os.stat(exe)
|
||||
os.chmod(exe, st.st_mode | stat.S_IEXEC)
|
||||
except Exception:
|
||||
logging.exception('BAZARR unable to download {0} to {1}'.format(name, exe_dir))
|
||||
logging.exception(f'BAZARR unable to download {name} to {exe_dir}')
|
||||
raise BinaryNotFound
|
||||
else:
|
||||
logging.debug('BAZARR returning this new binary: {}'.format(exe))
|
||||
logging.debug(f'BAZARR returning this new binary: {exe}')
|
||||
return exe
|
||||
|
|
|
@ -9,7 +9,7 @@ def browse_bazarr_filesystem(path='#'):
|
|||
if os.name == 'nt':
|
||||
dir_list = []
|
||||
for drive in string.ascii_uppercase:
|
||||
drive_letter = drive + ':\\'
|
||||
drive_letter = f'{drive}:\\'
|
||||
if os.path.exists(drive_letter):
|
||||
dir_list.append(drive_letter)
|
||||
else:
|
||||
|
|
|
@ -52,7 +52,7 @@ def get_target_folder(file_path):
|
|||
try:
|
||||
os.makedirs(fld)
|
||||
except Exception:
|
||||
logging.error('BAZARR is unable to create directory to save subtitles: ' + fld)
|
||||
logging.error(f'BAZARR is unable to create directory to save subtitles: {fld}')
|
||||
fld = None
|
||||
else:
|
||||
fld = None
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue