mirror of
https://github.com/morpheus65535/bazarr.git
synced 2025-04-24 06:37:16 -04:00
Added on demand subtitles synchronization.
This commit is contained in:
parent
c4d199dc5b
commit
9c9cbe8f19
14 changed files with 960 additions and 48 deletions
|
@ -31,6 +31,7 @@ from utils import history_log, history_log_movie, get_sonarr_version, get_radarr
|
|||
from get_providers import get_providers, get_providers_auth, list_throttled_providers, reset_throttled_providers
|
||||
from event_handler import event_stream
|
||||
from scheduler import scheduler
|
||||
from subsyncer import subsync
|
||||
|
||||
from subliminal_patch.core import SUBTITLE_EXTENSIONS
|
||||
|
||||
|
@ -467,7 +468,8 @@ class EpisodesSubtitlesDelete(Resource):
|
|||
try:
|
||||
os.remove(path_mappings.path_replace(subtitlesPath))
|
||||
result = language_from_alpha3(language) + " subtitles deleted from disk."
|
||||
history_log(0, sonarrSeriesId, sonarrEpisodeId, result, language=alpha2_from_alpha3(language))
|
||||
history_log(0, sonarrSeriesId, sonarrEpisodeId, result, language=alpha2_from_alpha3(language),
|
||||
video_path=path_mappings.path_replace_reverse(episodePath))
|
||||
store_subtitles(path_mappings.path_replace_reverse(episodePath), episodePath)
|
||||
return result, 202
|
||||
except OSError as e:
|
||||
|
@ -665,6 +667,34 @@ class EpisodesHistory(Resource):
|
|||
return jsonify(data=episode_history)
|
||||
|
||||
|
||||
class EpisodesTools(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
episodeid = request.args.get('episodeid')
|
||||
|
||||
episode_ext_subs = database.execute("SELECT path, subtitles FROM table_episodes WHERE sonarrEpisodeId=?",
|
||||
(episodeid,), only_one=True)
|
||||
try:
|
||||
all_subs = ast.literal_eval(episode_ext_subs['subtitles'])
|
||||
except:
|
||||
episode_external_subtitles = None
|
||||
else:
|
||||
episode_external_subtitles = []
|
||||
for subs in all_subs:
|
||||
if subs[1]:
|
||||
subtitle = subs[0].split(':')
|
||||
subs[0] = {"name": language_from_alpha2(subtitle[0]),
|
||||
"code2": subtitle[0],
|
||||
"code3": alpha3_from_alpha2(subtitle[0]),
|
||||
"forced": True if len(subtitle) > 1 else False}
|
||||
episode_external_subtitles.append({'language': subs[0],
|
||||
'path': path_mappings.path_replace(subs[1]),
|
||||
'filename': os.path.basename(subs[1]),
|
||||
'videopath': path_mappings.path_replace(episode_ext_subs['path'])})
|
||||
|
||||
return jsonify(data=episode_external_subtitles)
|
||||
|
||||
|
||||
class Movies(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
|
@ -834,7 +864,8 @@ class MovieSubtitlesDelete(Resource):
|
|||
try:
|
||||
os.remove(path_mappings.path_replace_movie(subtitlesPath))
|
||||
result = language_from_alpha3(language) + " subtitles deleted from disk."
|
||||
history_log_movie(0, radarrId, result, language=alpha2_from_alpha3(language))
|
||||
history_log_movie(0, radarrId, result, language=alpha2_from_alpha3(language),
|
||||
video_path=path_mappings.path_replace_reverse_movie(moviePath))
|
||||
store_subtitles_movie(path_mappings.path_replace_reverse_movie(moviePath), moviePath)
|
||||
return result, 202
|
||||
except OSError as e:
|
||||
|
@ -1030,6 +1061,34 @@ class MovieHistory(Resource):
|
|||
return jsonify(data=movie_history)
|
||||
|
||||
|
||||
class MovieTools(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
movieid = request.args.get('movieid')
|
||||
|
||||
movie_ext_subs = database.execute("SELECT path, subtitles FROM table_movies WHERE radarrId=?",
|
||||
(movieid,), only_one=True)
|
||||
try:
|
||||
all_subs = ast.literal_eval(movie_ext_subs['subtitles'])
|
||||
except:
|
||||
movie_external_subtitles = None
|
||||
else:
|
||||
movie_external_subtitles = []
|
||||
for subs in all_subs:
|
||||
if subs[1]:
|
||||
subtitle = subs[0].split(':')
|
||||
subs[0] = {"name": language_from_alpha2(subtitle[0]),
|
||||
"code2": subtitle[0],
|
||||
"code3": alpha3_from_alpha2(subtitle[0]),
|
||||
"forced": True if len(subtitle) > 1 else False}
|
||||
movie_external_subtitles.append({'language': subs[0],
|
||||
'path': path_mappings.path_replace_movie(subs[1]),
|
||||
'filename': os.path.basename(subs[1]),
|
||||
'videopath': path_mappings.path_replace_movie(movie_ext_subs['path'])})
|
||||
|
||||
return jsonify(data=movie_external_subtitles)
|
||||
|
||||
|
||||
class HistorySeries(Resource):
|
||||
@authenticate
|
||||
def get(self):
|
||||
|
@ -1300,6 +1359,30 @@ class SearchWantedMovies(Resource):
|
|||
return '', 200
|
||||
|
||||
|
||||
class SyncSubtitles(Resource):
|
||||
@authenticate
|
||||
def post(self):
|
||||
language = request.form.get('language')
|
||||
subtitles_path = request.form.get('subtitlesPath')
|
||||
video_path = request.form.get('videoPath')
|
||||
media_type = request.form.get('mediaType')
|
||||
|
||||
if media_type == 'series':
|
||||
episode_metadata = database.execute("SELECT sonarrSeriesId, sonarrEpisodeId FROM table_episodes"
|
||||
" WHERE path = ?", (path_mappings.path_replace_reverse(video_path),),
|
||||
only_one=True)
|
||||
subsync.sync(video_path=video_path, srt_path=subtitles_path,
|
||||
srt_lang=language, media_type=media_type, sonarr_series_id=episode_metadata['sonarrSeriesId'],
|
||||
sonarr_episode_id=episode_metadata['sonarrEpisodeId'])
|
||||
else:
|
||||
movie_metadata = database.execute("SELECT radarrId FROM table_movies WHERE path = ?",
|
||||
(path_mappings.path_replace_reverse_movie(video_path),), only_one=True)
|
||||
subsync.sync(video_path=video_path, srt_path=subtitles_path,
|
||||
srt_lang=language, media_type=media_type, radarr_id=movie_metadata['radarrId'])
|
||||
|
||||
return '', 200
|
||||
|
||||
|
||||
api.add_resource(Shutdown, '/shutdown')
|
||||
api.add_resource(Restart, '/restart')
|
||||
|
||||
|
@ -1330,6 +1413,7 @@ api.add_resource(EpisodesSubtitlesUpload, '/episodes_subtitles_upload')
|
|||
api.add_resource(EpisodesScanDisk, '/episodes_scan_disk')
|
||||
api.add_resource(EpisodesSearchMissing, '/episodes_search_missing')
|
||||
api.add_resource(EpisodesHistory, '/episodes_history')
|
||||
api.add_resource(EpisodesTools, '/episodes_tools')
|
||||
|
||||
api.add_resource(Movies, '/movies')
|
||||
api.add_resource(MoviesEditSave, '/movies_edit_save')
|
||||
|
@ -1341,6 +1425,7 @@ api.add_resource(MovieSubtitlesUpload, '/movie_subtitles_upload')
|
|||
api.add_resource(MovieScanDisk, '/movie_scan_disk')
|
||||
api.add_resource(MovieSearchMissing, '/movie_search_missing')
|
||||
api.add_resource(MovieHistory, '/movie_history')
|
||||
api.add_resource(MovieTools, '/movie_tools')
|
||||
|
||||
api.add_resource(HistorySeries, '/history_series')
|
||||
api.add_resource(HistoryMovies, '/history_movies')
|
||||
|
@ -1349,3 +1434,5 @@ api.add_resource(WantedSeries, '/wanted_series')
|
|||
api.add_resource(WantedMovies, '/wanted_movies')
|
||||
api.add_resource(SearchWantedSeries, '/search_wanted_series')
|
||||
api.add_resource(SearchWantedMovies, '/search_wanted_movies')
|
||||
|
||||
api.add_resource(SyncSubtitles, '/sync_subtitles')
|
||||
|
|
|
@ -220,12 +220,25 @@ def download_subtitle(path, language, audio_language, hi, forced, providers, pro
|
|||
message = downloaded_language + is_forced_string + " subtitles " + action + " from " + \
|
||||
downloaded_provider + " with a score of " + str(percent_score) + "%."
|
||||
|
||||
sync_result = sync_subtitles(video_path=path, srt_path=downloaded_path,
|
||||
srt_lang=downloaded_language_code3, media_type=media_type,
|
||||
percent_score=percent_score)
|
||||
if sync_result:
|
||||
message += " The subtitles file have been synced."
|
||||
|
||||
if media_type == 'series':
|
||||
episode_metadata = database.execute("SELECT sonarrSeriesId, sonarrEpisodeId FROM "
|
||||
"table_episodes WHERE path = ?",
|
||||
(path_mappings.path_replace_reverse(path),),
|
||||
only_one=True)
|
||||
sync_subtitles(video_path=path, srt_path=downloaded_path,
|
||||
srt_lang=downloaded_language_code3, media_type=media_type,
|
||||
percent_score=percent_score,
|
||||
sonarr_series_id=episode_metadata['sonarrSeriesId'],
|
||||
sonarr_episode_id=episode_metadata['sonarrEpisodeId'])
|
||||
else:
|
||||
movie_metadata = database.execute("SELECT radarrId FROM table_movies WHERE path = ?",
|
||||
(path_mappings.path_replace_reverse_movie(path),),
|
||||
only_one=True)
|
||||
sync_subtitles(video_path=path, srt_path=downloaded_path,
|
||||
srt_lang=downloaded_language_code3, media_type=media_type,
|
||||
percent_score=percent_score,
|
||||
radarr_id=movie_metadata['radarrId'])
|
||||
|
||||
if use_postprocessing is True:
|
||||
command = pp_replace(postprocessing_cmd, path, downloaded_path, downloaded_language,
|
||||
downloaded_language_code2, downloaded_language_code3, audio_language,
|
||||
|
@ -450,12 +463,24 @@ def manual_download_subtitle(path, language, audio_language, hi, forced, subtitl
|
|||
message = downloaded_language + is_forced_string + " subtitles downloaded from " + \
|
||||
downloaded_provider + " with a score of " + str(score) + "% using manual search."
|
||||
|
||||
sync_result = sync_subtitles(video_path=path, srt_path=downloaded_path,
|
||||
srt_lang=downloaded_language_code3, media_type=media_type,
|
||||
percent_score=score)
|
||||
if sync_result:
|
||||
message += " The subtitles file have been synced."
|
||||
|
||||
if media_type == 'series':
|
||||
episode_metadata = database.execute("SELECT sonarrSeriesId, sonarrEpisodeId FROM "
|
||||
"table_episodes WHERE path = ?",
|
||||
(path_mappings.path_replace_reverse(path),),
|
||||
only_one=True)
|
||||
sync_subtitles(video_path=path, srt_path=downloaded_path,
|
||||
srt_lang=downloaded_language_code3, media_type=media_type,
|
||||
percent_score=score,
|
||||
sonarr_series_id=episode_metadata['sonarrSeriesId'],
|
||||
sonarr_episode_id=episode_metadata['sonarrEpisodeId'])
|
||||
else:
|
||||
movie_metadata = database.execute("SELECT radarrId FROM table_movies WHERE path = ?",
|
||||
(path_mappings.path_replace_reverse_movie(path),),
|
||||
only_one=True)
|
||||
sync_subtitles(video_path=path, srt_path=downloaded_path,
|
||||
srt_lang=downloaded_language_code3, media_type=media_type,
|
||||
percent_score=score, radarr_id=movie_metadata['radarrId'])
|
||||
|
||||
if use_postprocessing is True:
|
||||
command = pp_replace(postprocessing_cmd, path, downloaded_path, downloaded_language,
|
||||
downloaded_language_code2, downloaded_language_code3, audio_language,
|
||||
|
@ -570,10 +595,19 @@ def manual_upload_subtitle(path, language, forced, title, scene_name, media_type
|
|||
audio_language_code2 = alpha2_from_language(audio_language)
|
||||
audio_language_code3 = alpha3_from_language(audio_language)
|
||||
|
||||
sync_result = sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code3,
|
||||
media_type=media_type, percent_score=100)
|
||||
if sync_result:
|
||||
message += " The subtitles file have been synced."
|
||||
if media_type == 'series':
|
||||
episode_metadata = database.execute("SELECT sonarrSeriesId, sonarrEpisodeId FROM table_episodes WHERE path = ?",
|
||||
(path_mappings.path_replace_reverse(path),),
|
||||
only_one=True)
|
||||
sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code3, media_type=media_type,
|
||||
percent_score=100, sonarr_series_id=episode_metadata['sonarrSeriesId'],
|
||||
sonarr_episode_id=episode_metadata['sonarrEpisodeId'])
|
||||
else:
|
||||
movie_metadata = database.execute("SELECT radarrId FROM table_movies WHERE path = ?",
|
||||
(path_mappings.path_replace_reverse_movie(path),),
|
||||
only_one=True)
|
||||
sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code3, media_type=media_type,
|
||||
percent_score=100, radarr_id=movie_metadata['radarrId'])
|
||||
|
||||
if use_postprocessing is True:
|
||||
command = pp_replace(postprocessing_cmd, path, subtitle_path, uploaded_language,
|
||||
|
@ -1219,8 +1253,9 @@ def postprocessing(command, path):
|
|||
logging.info('BAZARR Post-processing result for file ' + path + ' : ' + out)
|
||||
|
||||
|
||||
def sync_subtitles(video_path, srt_path, srt_lang, media_type, percent_score):
|
||||
if settings.subsync.use_subsync:
|
||||
def sync_subtitles(video_path, srt_path, srt_lang, media_type, percent_score, sonarr_series_id=None,
|
||||
sonarr_episode_id=None, radarr_id=None):
|
||||
if settings.subsync.getboolean('use_subsync'):
|
||||
if media_type == 'series':
|
||||
use_subsync_threshold = settings.subsync.getboolean('use_subsync_threshold')
|
||||
subsync_threshold = settings.subsync.subsync_threshold
|
||||
|
@ -1229,7 +1264,8 @@ def sync_subtitles(video_path, srt_path, srt_lang, media_type, percent_score):
|
|||
subsync_threshold = settings.subsync.subsync_movie_threshold
|
||||
|
||||
if not use_subsync_threshold or (use_subsync_threshold and percent_score < float(subsync_threshold)):
|
||||
subsync.sync(video_path=video_path, srt_path=srt_path, srt_lang=srt_lang)
|
||||
subsync.sync(video_path=video_path, srt_path=srt_path, srt_lang=srt_lang, media_type=media_type,
|
||||
sonarr_series_id=sonarr_series_id, sonarr_episode_id=sonarr_episode_id, radarr_id=radarr_id)
|
||||
return True
|
||||
else:
|
||||
logging.debug("BAZARR subsync skipped because subtitles score isn't below this "
|
||||
|
|
|
@ -4,6 +4,9 @@ from ffsubsync.ffsubsync import run
|
|||
from ffsubsync.constants import *
|
||||
from knowit import api
|
||||
from utils import get_binary
|
||||
from utils import history_log, history_log_movie
|
||||
from get_languages import alpha2_from_alpha3, language_from_alpha3
|
||||
from helper import path_mappings
|
||||
|
||||
|
||||
class SubSyncer:
|
||||
|
@ -30,7 +33,8 @@ class SubSyncer:
|
|||
self.merge_with_reference = None
|
||||
self.output_encoding = 'same'
|
||||
|
||||
def sync(self, video_path, srt_path, srt_lang):
|
||||
def sync(self, video_path, srt_path, srt_lang, media_type, sonarr_series_id=None, sonarr_episode_id=None,
|
||||
radarr_id=None):
|
||||
self.reference = video_path
|
||||
self.srtin = srt_path
|
||||
self.srtout = None
|
||||
|
@ -83,7 +87,26 @@ class SubSyncer:
|
|||
logging.debug('BAZARR FFmpeg used is %s', ffmpeg_exe)
|
||||
|
||||
self.ffmpeg_path = os.path.dirname(ffmpeg_exe)
|
||||
run(self)
|
||||
result = run(self)
|
||||
|
||||
if result['sync_was_successful']:
|
||||
message = "{0} subtitles synchronization ended with an offset of {1} seconds and a framerate scale factor" \
|
||||
" of {2}.".format(language_from_alpha3(srt_lang), result['offset_seconds'],
|
||||
result['framerate_scale_factor'])
|
||||
|
||||
if media_type == 'series':
|
||||
history_log(action=5, sonarr_series_id=sonarr_series_id, sonarr_episode_id=sonarr_episode_id,
|
||||
description=message, video_path=path_mappings.path_replace_reverse(self.reference),
|
||||
language=alpha2_from_alpha3(srt_lang))
|
||||
else:
|
||||
history_log_movie(action=5, radarr_id=radarr_id, description=message,
|
||||
video_path=path_mappings.path_replace_reverse_movie(self.reference),
|
||||
language=alpha2_from_alpha3(srt_lang))
|
||||
else:
|
||||
logging.error('BAZARR unable to sync subtitles: ' + str(result))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class NoAudioTrack(Exception):
|
||||
"""Exception raised if no audio track can be found in video file."""
|
||||
|
|
520
libs/ffsubsync/_version.py
Normal file
520
libs/ffsubsync/_version.py
Normal file
|
@ -0,0 +1,520 @@
|
|||
|
||||
# This file helps to compute a version number in source trees obtained from
|
||||
# git-archive tarball (such as those provided by githubs download-from-tag
|
||||
# feature). Distribution tarballs (built by setup.py sdist) and build
|
||||
# directories (produced by setup.py build) will contain a much shorter file
|
||||
# that just contains the computed version number.
|
||||
|
||||
# This file is released into the public domain. Generated by
|
||||
# versioneer-0.18 (https://github.com/warner/python-versioneer)
|
||||
|
||||
"""Git implementation of _version.py."""
|
||||
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def get_keywords():
|
||||
"""Get the keywords needed to look up the version information."""
|
||||
# these strings will be replaced by git during git-archive.
|
||||
# setup.py/versioneer.py will grep for the variable names, so they must
|
||||
# each be defined on a line of their own. _version.py will just call
|
||||
# get_keywords().
|
||||
git_refnames = "$Format:%d$"
|
||||
git_full = "$Format:%H$"
|
||||
git_date = "$Format:%ci$"
|
||||
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
|
||||
return keywords
|
||||
|
||||
|
||||
class VersioneerConfig:
|
||||
"""Container for Versioneer configuration parameters."""
|
||||
|
||||
|
||||
def get_config():
|
||||
"""Create, populate and return the VersioneerConfig() object."""
|
||||
# these strings are filled in when 'setup.py versioneer' creates
|
||||
# _version.py
|
||||
cfg = VersioneerConfig()
|
||||
cfg.VCS = "git"
|
||||
cfg.style = "pep440-pre"
|
||||
cfg.tag_prefix = ""
|
||||
cfg.parentdir_prefix = "ffsubsync-"
|
||||
cfg.versionfile_source = "ffsubsync/_version.py"
|
||||
cfg.verbose = False
|
||||
return cfg
|
||||
|
||||
|
||||
class NotThisMethod(Exception):
|
||||
"""Exception raised if a method is not valid for the current scenario."""
|
||||
|
||||
|
||||
LONG_VERSION_PY = {}
|
||||
HANDLERS = {}
|
||||
|
||||
|
||||
def register_vcs_handler(vcs, method): # decorator
|
||||
"""Decorator to mark a method as the handler for a particular VCS."""
|
||||
def decorate(f):
|
||||
"""Store f in HANDLERS[vcs][method]."""
|
||||
if vcs not in HANDLERS:
|
||||
HANDLERS[vcs] = {}
|
||||
HANDLERS[vcs][method] = f
|
||||
return f
|
||||
return decorate
|
||||
|
||||
|
||||
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
|
||||
env=None):
|
||||
"""Call the given command(s)."""
|
||||
assert isinstance(commands, list)
|
||||
p = None
|
||||
for c in commands:
|
||||
try:
|
||||
dispcmd = str([c] + args)
|
||||
# remember shell=False, so use git.cmd on windows, not just git
|
||||
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=(subprocess.PIPE if hide_stderr
|
||||
else None))
|
||||
break
|
||||
except EnvironmentError:
|
||||
e = sys.exc_info()[1]
|
||||
if e.errno == errno.ENOENT:
|
||||
continue
|
||||
if verbose:
|
||||
print("unable to run %s" % dispcmd)
|
||||
print(e)
|
||||
return None, None
|
||||
else:
|
||||
if verbose:
|
||||
print("unable to find command, tried %s" % (commands,))
|
||||
return None, None
|
||||
stdout = p.communicate()[0].strip()
|
||||
if sys.version_info[0] >= 3:
|
||||
stdout = stdout.decode()
|
||||
if p.returncode != 0:
|
||||
if verbose:
|
||||
print("unable to run %s (error)" % dispcmd)
|
||||
print("stdout was %s" % stdout)
|
||||
return None, p.returncode
|
||||
return stdout, p.returncode
|
||||
|
||||
|
||||
def versions_from_parentdir(parentdir_prefix, root, verbose):
|
||||
"""Try to determine the version from the parent directory name.
|
||||
|
||||
Source tarballs conventionally unpack into a directory that includes both
|
||||
the project name and a version string. We will also support searching up
|
||||
two directory levels for an appropriately named parent directory
|
||||
"""
|
||||
rootdirs = []
|
||||
|
||||
for i in range(3):
|
||||
dirname = os.path.basename(root)
|
||||
if dirname.startswith(parentdir_prefix):
|
||||
return {"version": dirname[len(parentdir_prefix):],
|
||||
"full-revisionid": None,
|
||||
"dirty": False, "error": None, "date": None}
|
||||
else:
|
||||
rootdirs.append(root)
|
||||
root = os.path.dirname(root) # up a level
|
||||
|
||||
if verbose:
|
||||
print("Tried directories %s but none started with prefix %s" %
|
||||
(str(rootdirs), parentdir_prefix))
|
||||
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
|
||||
|
||||
|
||||
@register_vcs_handler("git", "get_keywords")
|
||||
def git_get_keywords(versionfile_abs):
|
||||
"""Extract version information from the given file."""
|
||||
# the code embedded in _version.py can just fetch the value of these
|
||||
# keywords. When used from setup.py, we don't want to import _version.py,
|
||||
# so we do it with a regexp instead. This function is not used from
|
||||
# _version.py.
|
||||
keywords = {}
|
||||
try:
|
||||
f = open(versionfile_abs, "r")
|
||||
for line in f.readlines():
|
||||
if line.strip().startswith("git_refnames ="):
|
||||
mo = re.search(r'=\s*"(.*)"', line)
|
||||
if mo:
|
||||
keywords["refnames"] = mo.group(1)
|
||||
if line.strip().startswith("git_full ="):
|
||||
mo = re.search(r'=\s*"(.*)"', line)
|
||||
if mo:
|
||||
keywords["full"] = mo.group(1)
|
||||
if line.strip().startswith("git_date ="):
|
||||
mo = re.search(r'=\s*"(.*)"', line)
|
||||
if mo:
|
||||
keywords["date"] = mo.group(1)
|
||||
f.close()
|
||||
except EnvironmentError:
|
||||
pass
|
||||
return keywords
|
||||
|
||||
|
||||
@register_vcs_handler("git", "keywords")
|
||||
def git_versions_from_keywords(keywords, tag_prefix, verbose):
|
||||
"""Get version information from git keywords."""
|
||||
if not keywords:
|
||||
raise NotThisMethod("no keywords at all, weird")
|
||||
date = keywords.get("date")
|
||||
if date is not None:
|
||||
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
|
||||
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
|
||||
# -like" string, which we must then edit to make compliant), because
|
||||
# it's been around since git-1.5.3, and it's too difficult to
|
||||
# discover which version we're using, or to work around using an
|
||||
# older one.
|
||||
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
|
||||
refnames = keywords["refnames"].strip()
|
||||
if refnames.startswith("$Format"):
|
||||
if verbose:
|
||||
print("keywords are unexpanded, not using")
|
||||
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
|
||||
refs = set([r.strip() for r in refnames.strip("()").split(",")])
|
||||
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
|
||||
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
|
||||
TAG = "tag: "
|
||||
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
|
||||
if not tags:
|
||||
# Either we're using git < 1.8.3, or there really are no tags. We use
|
||||
# a heuristic: assume all version tags have a digit. The old git %d
|
||||
# expansion behaves like git log --decorate=short and strips out the
|
||||
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
|
||||
# between branches and tags. By ignoring refnames without digits, we
|
||||
# filter out many common branch names like "release" and
|
||||
# "stabilization", as well as "HEAD" and "master".
|
||||
tags = set([r for r in refs if re.search(r'\d', r)])
|
||||
if verbose:
|
||||
print("discarding '%s', no digits" % ",".join(refs - tags))
|
||||
if verbose:
|
||||
print("likely tags: %s" % ",".join(sorted(tags)))
|
||||
for ref in sorted(tags):
|
||||
# sorting will prefer e.g. "2.0" over "2.0rc1"
|
||||
if ref.startswith(tag_prefix):
|
||||
r = ref[len(tag_prefix):]
|
||||
if verbose:
|
||||
print("picking %s" % r)
|
||||
return {"version": r,
|
||||
"full-revisionid": keywords["full"].strip(),
|
||||
"dirty": False, "error": None,
|
||||
"date": date}
|
||||
# no suitable tags, so version is "0+unknown", but full hex is still there
|
||||
if verbose:
|
||||
print("no suitable tags, using unknown + full revision id")
|
||||
return {"version": "0+unknown",
|
||||
"full-revisionid": keywords["full"].strip(),
|
||||
"dirty": False, "error": "no suitable tags", "date": None}
|
||||
|
||||
|
||||
@register_vcs_handler("git", "pieces_from_vcs")
|
||||
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
|
||||
"""Get version from 'git describe' in the root of the source tree.
|
||||
|
||||
This only gets called if the git-archive 'subst' keywords were *not*
|
||||
expanded, and _version.py hasn't already been rewritten with a short
|
||||
version string, meaning we're inside a checked out source tree.
|
||||
"""
|
||||
GITS = ["git"]
|
||||
if sys.platform == "win32":
|
||||
GITS = ["git.cmd", "git.exe"]
|
||||
|
||||
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
|
||||
hide_stderr=True)
|
||||
if rc != 0:
|
||||
if verbose:
|
||||
print("Directory %s not under git control" % root)
|
||||
raise NotThisMethod("'git rev-parse --git-dir' returned error")
|
||||
|
||||
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
|
||||
# if there isn't one, this yields HEX[-dirty] (no NUM)
|
||||
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
|
||||
"--always", "--long",
|
||||
"--match", "%s*" % tag_prefix],
|
||||
cwd=root)
|
||||
# --long was added in git-1.5.5
|
||||
if describe_out is None:
|
||||
raise NotThisMethod("'git describe' failed")
|
||||
describe_out = describe_out.strip()
|
||||
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
|
||||
if full_out is None:
|
||||
raise NotThisMethod("'git rev-parse' failed")
|
||||
full_out = full_out.strip()
|
||||
|
||||
pieces = {}
|
||||
pieces["long"] = full_out
|
||||
pieces["short"] = full_out[:7] # maybe improved later
|
||||
pieces["error"] = None
|
||||
|
||||
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
|
||||
# TAG might have hyphens.
|
||||
git_describe = describe_out
|
||||
|
||||
# look for -dirty suffix
|
||||
dirty = git_describe.endswith("-dirty")
|
||||
pieces["dirty"] = dirty
|
||||
if dirty:
|
||||
git_describe = git_describe[:git_describe.rindex("-dirty")]
|
||||
|
||||
# now we have TAG-NUM-gHEX or HEX
|
||||
|
||||
if "-" in git_describe:
|
||||
# TAG-NUM-gHEX
|
||||
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
|
||||
if not mo:
|
||||
# unparseable. Maybe git-describe is misbehaving?
|
||||
pieces["error"] = ("unable to parse git-describe output: '%s'"
|
||||
% describe_out)
|
||||
return pieces
|
||||
|
||||
# tag
|
||||
full_tag = mo.group(1)
|
||||
if not full_tag.startswith(tag_prefix):
|
||||
if verbose:
|
||||
fmt = "tag '%s' doesn't start with prefix '%s'"
|
||||
print(fmt % (full_tag, tag_prefix))
|
||||
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
|
||||
% (full_tag, tag_prefix))
|
||||
return pieces
|
||||
pieces["closest-tag"] = full_tag[len(tag_prefix):]
|
||||
|
||||
# distance: number of commits since tag
|
||||
pieces["distance"] = int(mo.group(2))
|
||||
|
||||
# commit: short hex revision ID
|
||||
pieces["short"] = mo.group(3)
|
||||
|
||||
else:
|
||||
# HEX: no tags
|
||||
pieces["closest-tag"] = None
|
||||
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
|
||||
cwd=root)
|
||||
pieces["distance"] = int(count_out) # total number of commits
|
||||
|
||||
# commit date: see ISO-8601 comment in git_versions_from_keywords()
|
||||
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
|
||||
cwd=root)[0].strip()
|
||||
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
|
||||
|
||||
return pieces
|
||||
|
||||
|
||||
def plus_or_dot(pieces):
|
||||
"""Return a + if we don't already have one, else return a ."""
|
||||
if "+" in pieces.get("closest-tag", ""):
|
||||
return "."
|
||||
return "+"
|
||||
|
||||
|
||||
def render_pep440(pieces):
|
||||
"""Build up version string, with post-release "local version identifier".
|
||||
|
||||
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
|
||||
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
|
||||
|
||||
Exceptions:
|
||||
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
|
||||
"""
|
||||
if pieces["closest-tag"]:
|
||||
rendered = pieces["closest-tag"]
|
||||
if pieces["distance"] or pieces["dirty"]:
|
||||
rendered += plus_or_dot(pieces)
|
||||
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
|
||||
if pieces["dirty"]:
|
||||
rendered += ".dirty"
|
||||
else:
|
||||
# exception #1
|
||||
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
|
||||
pieces["short"])
|
||||
if pieces["dirty"]:
|
||||
rendered += ".dirty"
|
||||
return rendered
|
||||
|
||||
|
||||
def render_pep440_pre(pieces):
|
||||
"""TAG[.post.devDISTANCE] -- No -dirty.
|
||||
|
||||
Exceptions:
|
||||
1: no tags. 0.post.devDISTANCE
|
||||
"""
|
||||
if pieces["closest-tag"]:
|
||||
rendered = pieces["closest-tag"]
|
||||
if pieces["distance"]:
|
||||
rendered += ".post.dev%d" % pieces["distance"]
|
||||
else:
|
||||
# exception #1
|
||||
rendered = "0.post.dev%d" % pieces["distance"]
|
||||
return rendered
|
||||
|
||||
|
||||
def render_pep440_post(pieces):
|
||||
"""TAG[.postDISTANCE[.dev0]+gHEX] .
|
||||
|
||||
The ".dev0" means dirty. Note that .dev0 sorts backwards
|
||||
(a dirty tree will appear "older" than the corresponding clean one),
|
||||
but you shouldn't be releasing software with -dirty anyways.
|
||||
|
||||
Exceptions:
|
||||
1: no tags. 0.postDISTANCE[.dev0]
|
||||
"""
|
||||
if pieces["closest-tag"]:
|
||||
rendered = pieces["closest-tag"]
|
||||
if pieces["distance"] or pieces["dirty"]:
|
||||
rendered += ".post%d" % pieces["distance"]
|
||||
if pieces["dirty"]:
|
||||
rendered += ".dev0"
|
||||
rendered += plus_or_dot(pieces)
|
||||
rendered += "g%s" % pieces["short"]
|
||||
else:
|
||||
# exception #1
|
||||
rendered = "0.post%d" % pieces["distance"]
|
||||
if pieces["dirty"]:
|
||||
rendered += ".dev0"
|
||||
rendered += "+g%s" % pieces["short"]
|
||||
return rendered
|
||||
|
||||
|
||||
def render_pep440_old(pieces):
|
||||
"""TAG[.postDISTANCE[.dev0]] .
|
||||
|
||||
The ".dev0" means dirty.
|
||||
|
||||
Eexceptions:
|
||||
1: no tags. 0.postDISTANCE[.dev0]
|
||||
"""
|
||||
if pieces["closest-tag"]:
|
||||
rendered = pieces["closest-tag"]
|
||||
if pieces["distance"] or pieces["dirty"]:
|
||||
rendered += ".post%d" % pieces["distance"]
|
||||
if pieces["dirty"]:
|
||||
rendered += ".dev0"
|
||||
else:
|
||||
# exception #1
|
||||
rendered = "0.post%d" % pieces["distance"]
|
||||
if pieces["dirty"]:
|
||||
rendered += ".dev0"
|
||||
return rendered
|
||||
|
||||
|
||||
def render_git_describe(pieces):
|
||||
"""TAG[-DISTANCE-gHEX][-dirty].
|
||||
|
||||
Like 'git describe --tags --dirty --always'.
|
||||
|
||||
Exceptions:
|
||||
1: no tags. HEX[-dirty] (note: no 'g' prefix)
|
||||
"""
|
||||
if pieces["closest-tag"]:
|
||||
rendered = pieces["closest-tag"]
|
||||
if pieces["distance"]:
|
||||
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
|
||||
else:
|
||||
# exception #1
|
||||
rendered = pieces["short"]
|
||||
if pieces["dirty"]:
|
||||
rendered += "-dirty"
|
||||
return rendered
|
||||
|
||||
|
||||
def render_git_describe_long(pieces):
|
||||
"""TAG-DISTANCE-gHEX[-dirty].
|
||||
|
||||
Like 'git describe --tags --dirty --always -long'.
|
||||
The distance/hash is unconditional.
|
||||
|
||||
Exceptions:
|
||||
1: no tags. HEX[-dirty] (note: no 'g' prefix)
|
||||
"""
|
||||
if pieces["closest-tag"]:
|
||||
rendered = pieces["closest-tag"]
|
||||
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
|
||||
else:
|
||||
# exception #1
|
||||
rendered = pieces["short"]
|
||||
if pieces["dirty"]:
|
||||
rendered += "-dirty"
|
||||
return rendered
|
||||
|
||||
|
||||
def render(pieces, style):
|
||||
"""Render the given version pieces into the requested style."""
|
||||
if pieces["error"]:
|
||||
return {"version": "unknown",
|
||||
"full-revisionid": pieces.get("long"),
|
||||
"dirty": None,
|
||||
"error": pieces["error"],
|
||||
"date": None}
|
||||
|
||||
if not style or style == "default":
|
||||
style = "pep440" # the default
|
||||
|
||||
if style == "pep440":
|
||||
rendered = render_pep440(pieces)
|
||||
elif style == "pep440-pre":
|
||||
rendered = render_pep440_pre(pieces)
|
||||
elif style == "pep440-post":
|
||||
rendered = render_pep440_post(pieces)
|
||||
elif style == "pep440-old":
|
||||
rendered = render_pep440_old(pieces)
|
||||
elif style == "git-describe":
|
||||
rendered = render_git_describe(pieces)
|
||||
elif style == "git-describe-long":
|
||||
rendered = render_git_describe_long(pieces)
|
||||
else:
|
||||
raise ValueError("unknown style '%s'" % style)
|
||||
|
||||
return {"version": rendered, "full-revisionid": pieces["long"],
|
||||
"dirty": pieces["dirty"], "error": None,
|
||||
"date": pieces.get("date")}
|
||||
|
||||
|
||||
def get_versions():
|
||||
"""Get version information or return default if unable to do so."""
|
||||
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
|
||||
# __file__, we can work backwards from there to the root. Some
|
||||
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
|
||||
# case we can only use expanded keywords.
|
||||
|
||||
cfg = get_config()
|
||||
verbose = cfg.verbose
|
||||
|
||||
try:
|
||||
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
|
||||
verbose)
|
||||
except NotThisMethod:
|
||||
pass
|
||||
|
||||
try:
|
||||
root = os.path.realpath(__file__)
|
||||
# versionfile_source is the relative path from the top of the source
|
||||
# tree (where the .git directory might live) to this file. Invert
|
||||
# this to find the root from __file__.
|
||||
for i in cfg.versionfile_source.split('/'):
|
||||
root = os.path.dirname(root)
|
||||
except NameError:
|
||||
return {"version": "0+unknown", "full-revisionid": None,
|
||||
"dirty": None,
|
||||
"error": "unable to find root of source tree",
|
||||
"date": None}
|
||||
|
||||
try:
|
||||
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
|
||||
return render(pieces, cfg.style)
|
||||
except NotThisMethod:
|
||||
pass
|
||||
|
||||
try:
|
||||
if cfg.parentdir_prefix:
|
||||
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
|
||||
except NotThisMethod:
|
||||
pass
|
||||
|
||||
return {"version": "0+unknown", "full-revisionid": None,
|
||||
"dirty": None,
|
||||
"error": "unable to compute version", "date": None}
|
|
@ -13,7 +13,7 @@ DEFAULT_SCALE_FACTOR = 1
|
|||
DEFAULT_VAD = 'subs_then_webrtc'
|
||||
DEFAULT_MAX_OFFSET_SECONDS = 600
|
||||
|
||||
SUBTITLE_EXTENSIONS = ('srt', 'ass', 'ssa')
|
||||
SUBTITLE_EXTENSIONS = ('srt', 'ass', 'ssa', 'sub')
|
||||
|
||||
GITHUB_DEV_USER = 'smacke'
|
||||
PROJECT_NAME = 'FFsubsync'
|
||||
|
|
|
@ -8,10 +8,10 @@ import shutil
|
|||
import sys
|
||||
|
||||
import numpy as np
|
||||
from .sklearn_shim import Pipeline
|
||||
|
||||
from .aligners import FFTAligner, MaxScoreAligner, FailedToFindAlignmentException
|
||||
from .constants import *
|
||||
from .sklearn_shim import Pipeline
|
||||
from .speech_transformers import (
|
||||
VideoSpeechTransformer,
|
||||
DeserializeSpeechTransformer,
|
||||
|
@ -21,7 +21,6 @@ from .subtitle_parser import make_subtitle_parser
|
|||
from .subtitle_transformers import SubtitleMerger, SubtitleShifter
|
||||
from .version import __version__
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -32,23 +31,29 @@ def override(args, **kwargs):
|
|||
|
||||
|
||||
def run(args):
|
||||
retval = 0
|
||||
result = {'retval': 0,
|
||||
'offset_seconds': None,
|
||||
'framerate_scale_factor': None,
|
||||
'sync_was_successful': None}
|
||||
if args.vlc_mode:
|
||||
logger.setLevel(logging.CRITICAL)
|
||||
if args.make_test_case and not args.gui_mode: # this validation not necessary for gui mode
|
||||
if args.srtin is None or args.srtout is None:
|
||||
logger.error('need to specify input and output srt files for test cases')
|
||||
return 1
|
||||
result['retval'] = 1
|
||||
return result
|
||||
if args.overwrite_input:
|
||||
if args.srtin is None:
|
||||
logger.error('need to specify input srt if --overwrite-input is specified since we cannot overwrite stdin')
|
||||
return 1
|
||||
result['retval'] = 1
|
||||
return result
|
||||
if args.srtout is not None:
|
||||
logger.error('overwrite input set but output file specified; refusing to run in case this was not intended')
|
||||
return 1
|
||||
result['retval'] = 1
|
||||
return result
|
||||
args.srtout = args.srtin
|
||||
if args.gui_mode and args.srtout is None:
|
||||
args.srtout = '{}.synced.srt'.format(args.srtin[:-4])
|
||||
args.srtout = '{}.synced.srt'.format(os.path.splitext(args.srtin)[0])
|
||||
ref_format = args.reference[-3:]
|
||||
if args.merge_with_reference and ref_format not in SUBTITLE_EXTENSIONS:
|
||||
logger.error('merging synced output with reference only valid '
|
||||
|
@ -107,8 +112,8 @@ def run(args):
|
|||
logger.info('...done')
|
||||
if args.srtin is None:
|
||||
logger.info('unsynchronized subtitle file not specified; skipping synchronization')
|
||||
return retval
|
||||
parser = make_subtitle_parser(fmt=args.srtin[-3:], caching=True, **args.__dict__)
|
||||
return result
|
||||
parser = make_subtitle_parser(fmt=os.path.splitext(args.srtin)[-1][1:], caching=True, **args.__dict__)
|
||||
logger.info("extracting speech segments from subtitles '%s'...", args.srtin)
|
||||
srt_pipes = [
|
||||
make_subtitle_speech_pipeline(
|
||||
|
@ -147,6 +152,11 @@ def run(args):
|
|||
except FailedToFindAlignmentException as e:
|
||||
sync_was_successful = False
|
||||
logger.error(e)
|
||||
else:
|
||||
result['offset_seconds'] = offset_seconds
|
||||
result['framerate_scale_factor'] = scale_step.scale_factor
|
||||
finally:
|
||||
result['sync_was_successful'] = sync_was_successful
|
||||
if args.make_test_case:
|
||||
if npy_savename is None:
|
||||
raise ValueError('need non-null npy_savename')
|
||||
|
@ -176,11 +186,11 @@ def run(args):
|
|||
else:
|
||||
logger.error('failed to create test archive; no formats supported '
|
||||
'(this should not happen)')
|
||||
retval = 1
|
||||
result['retval'] = 1
|
||||
logger.info('...done')
|
||||
finally:
|
||||
shutil.rmtree(tar_dir)
|
||||
return retval
|
||||
return result
|
||||
|
||||
|
||||
def add_main_args_for_cli(parser):
|
||||
|
@ -200,7 +210,7 @@ def add_main_args_for_cli(parser):
|
|||
|
||||
def add_cli_only_args(parser):
|
||||
parser.add_argument('-v', '--version', action='version',
|
||||
version='%(prog)s {version}'.format(version=__version__))
|
||||
version='{package} {version}'.format(package=__package__, version=__version__))
|
||||
parser.add_argument('--overwrite-input', action='store_true',
|
||||
help='If specified, will overwrite the input srt instead of writing the output to a new file.')
|
||||
parser.add_argument('--encoding', default=DEFAULT_ENCODING,
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
import copy
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
import os
|
||||
|
||||
import pysubs2
|
||||
import srt
|
||||
|
@ -121,15 +122,20 @@ class GenericSubtitlesFile(object):
|
|||
)
|
||||
|
||||
def write_file(self, fname):
|
||||
# TODO: converter to go between self.subs_format and out_format
|
||||
if fname is None:
|
||||
out_format = self._sub_format
|
||||
else:
|
||||
out_format = os.path.splitext(fname)[-1][1:]
|
||||
subs = list(self.gen_raw_resolved_subs())
|
||||
if self.sub_format == 'srt':
|
||||
if out_format == 'srt':
|
||||
to_write = srt.compose(subs)
|
||||
elif self.sub_format in ('ssa', 'ass'):
|
||||
elif out_format in ('ssa', 'ass'):
|
||||
ssaf = pysubs2.SSAFile()
|
||||
ssaf.events = subs
|
||||
to_write = ssaf.to_string(self.sub_format)
|
||||
to_write = ssaf.to_string(out_format)
|
||||
else:
|
||||
raise NotImplementedError('unsupported format: %s' % self.sub_format)
|
||||
raise NotImplementedError('unsupported output format: %s' % out_format)
|
||||
|
||||
to_write = to_write.encode(self.encoding)
|
||||
if six.PY3:
|
||||
|
|
|
@ -301,6 +301,8 @@ class VideoSpeechTransformer(TransformerMixin):
|
|||
if not in_bytes:
|
||||
break
|
||||
newstuff = len(in_bytes) / float(bytes_per_frame) / self.frame_rate
|
||||
if simple_progress + newstuff > total_duration:
|
||||
newstuff = total_duration - simple_progress
|
||||
simple_progress += newstuff
|
||||
pbar.update(newstuff)
|
||||
if self.vlc_mode and total_duration is not None:
|
||||
|
|
|
@ -80,13 +80,15 @@ class GenericSubtitleParser(SubsMixin, TransformerMixin):
|
|||
subs = f.read()
|
||||
if self.encoding == 'infer':
|
||||
encodings_to_try = (chardet.detect(subs)['encoding'],)
|
||||
self.detected_encoding_ = encodings_to_try[0]
|
||||
logger.info('detected encoding: %s' % self.detected_encoding_)
|
||||
exc = None
|
||||
for encoding in encodings_to_try:
|
||||
try:
|
||||
decoded_subs = subs.decode(encoding, errors='replace').strip()
|
||||
if self.sub_format == 'srt':
|
||||
parsed_subs = srt.parse(decoded_subs)
|
||||
elif self.sub_format in ('ass', 'ssa'):
|
||||
elif self.sub_format in ('ass', 'ssa', 'sub'):
|
||||
parsed_subs = pysubs2.SSAFile.from_string(decoded_subs)
|
||||
else:
|
||||
raise NotImplementedError('unsupported format: %s' % self.sub_format)
|
||||
|
@ -98,8 +100,9 @@ class GenericSubtitleParser(SubsMixin, TransformerMixin):
|
|||
encoding=encoding
|
||||
)
|
||||
self.fit_fname = fname
|
||||
self.detected_encoding_ = encoding
|
||||
logger.info('detected encoding: %s' % self.detected_encoding_)
|
||||
if len(encodings_to_try) > 1:
|
||||
self.detected_encoding_ = encoding
|
||||
logger.info('detected encoding: %s' % self.detected_encoding_)
|
||||
return self
|
||||
except Exception as e:
|
||||
exc = e
|
||||
|
|
|
@ -1,11 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
__version__ = '0.4.3'
|
||||
from ._version import get_versions
|
||||
__version__ = get_versions()['version']
|
||||
del get_versions
|
||||
|
||||
|
||||
def make_version_tuple(vstr):
|
||||
def make_version_tuple(vstr=None):
|
||||
if vstr is None:
|
||||
vstr = __version__
|
||||
if vstr[0] == 'v':
|
||||
vstr = vstr[1:]
|
||||
return tuple(map(int, vstr.split('.')))
|
||||
components = []
|
||||
for component in vstr.split('+')[0].split('.'):
|
||||
try:
|
||||
components.append(int(component))
|
||||
except ValueError:
|
||||
break
|
||||
return tuple(components)
|
||||
|
||||
|
||||
def update_available():
|
||||
|
|
|
@ -110,6 +110,7 @@
|
|||
<th>Missing Subtitles</th>
|
||||
<th>Manual Search</th>
|
||||
<th>Manual Upload</th>
|
||||
<th>Tools</th>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
|
@ -305,6 +306,35 @@
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="episodeToolsModal" class="modal" tabindex="-1" role="dialog">
|
||||
<div class="modal-dialog modal-xl" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<h5 class="modal-title"><span id="episode_tools_title_span"></span></h5><br>
|
||||
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
|
||||
<span aria-hidden="true">×</span>
|
||||
</button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
<table id="episode_tools_result" class="table table-striped" style="width:100%">
|
||||
<thead>
|
||||
<tr>
|
||||
<th style="text-align: left;">Language:</th>
|
||||
<th style="text-align: left;">Filename:</th>
|
||||
<th style="text-align: left;">Sync:</th>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-secondary" data-dismiss="modal">Cancel</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock body %}
|
||||
|
||||
{% block tail %}
|
||||
|
@ -441,6 +471,12 @@
|
|||
return ''
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
data: null,
|
||||
render: function (data) {
|
||||
return '<a href="" class="episode_tools badge badge-secondary" data-sonarrEpisodeId="' + data.sonarrEpisodeId + '" data-season="' + data.season + '" data-episode="' + data.episode + '" data-episode_title="' + data.title + '"><i class="fa fa-briefcase"></i></a>';
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
|
@ -700,7 +736,10 @@
|
|||
processData: false,
|
||||
contentType: false,
|
||||
type: 'POST',
|
||||
success: function () {
|
||||
beforeSend: function () {
|
||||
$('#upload_save_button').html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
|
||||
},
|
||||
complete: function () {
|
||||
$('#uploadModal').modal('hide');
|
||||
}
|
||||
});
|
||||
|
@ -865,6 +904,8 @@
|
|||
return "<i class='fas fa-recycle' title='Subtitle file has been upgraded.' data-toggle='tooltip' data-placement='right'></i>";
|
||||
} else if (data === 4) {
|
||||
return "<i class='fas fa-cloud-upload-alt' title='Subtitle file has been manually uploaded.' data-toggle='tooltip' data-placement='right'></i>";
|
||||
} else if (data === 5) {
|
||||
return "<i class='fas fa-clock' title='Subtitle file has been synced.' data-toggle='tooltip' data-placement='right'></i>";
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -880,6 +921,73 @@
|
|||
focus: false
|
||||
});
|
||||
});
|
||||
|
||||
$('#episodes').on('click', '.episode_tools', function (e) {
|
||||
$(this).tooltip('dispose');
|
||||
e.preventDefault();
|
||||
|
||||
$("#episode_tools_title_span").html(seriesDetails['title'] + ' - ' + $(this).data("season") + 'x' + $(this).data("episode") + ' - ' + $(this).data("episode_title"));
|
||||
|
||||
sonarrEpisodeId = $(this).data("sonarrepisodeid");
|
||||
|
||||
$('#episode_tools_result').DataTable({
|
||||
destroy: true,
|
||||
language: {
|
||||
zeroRecords: 'No External Subtitles Found For This Episode'
|
||||
},
|
||||
paging: true,
|
||||
lengthChange: false,
|
||||
pageLength: 5,
|
||||
searching: true,
|
||||
ordering: false,
|
||||
scrollX: true,
|
||||
processing: false,
|
||||
serverSide: false,
|
||||
ajax: {
|
||||
url: '{{ url_for( 'api.episodestools' )}}?episodeid=' + sonarrEpisodeId
|
||||
},
|
||||
columns: [
|
||||
{data: 'language.name'},
|
||||
{data: 'filename'},
|
||||
{
|
||||
data: null,
|
||||
"render": function (data) {
|
||||
return '<a href="" class="subtitles_sync badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-videopath="' + data.videopath + '"><i class="far fa-play-circle"></i></a>';
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
$('#episodeToolsModal')
|
||||
.modal({
|
||||
focus: false
|
||||
});
|
||||
});
|
||||
|
||||
$('#episode_tools_result').on('click', '.subtitles_sync', function (e) {
|
||||
e.preventDefault();
|
||||
const values = {
|
||||
language: $(this).attr("data-language"),
|
||||
subtitlesPath: $(this).attr("data-path"),
|
||||
videoPath: $(this).attr("data-videopath"),
|
||||
mediaType: 'series'
|
||||
};
|
||||
|
||||
var cell = $(this).parent();
|
||||
|
||||
$.ajax({
|
||||
url: "{{ url_for('api.syncsubtitles') }}",
|
||||
type: "POST",
|
||||
dataType: "json",
|
||||
data: values,
|
||||
beforeSend: function () {
|
||||
cell.html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
|
||||
},
|
||||
complete: function () {
|
||||
$('#episodeToolsModal').modal('hide');
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function seriesDetailsRefresh() {
|
||||
|
|
|
@ -53,6 +53,8 @@
|
|||
return "<i class='fas fa-recycle' title='Subtitle file has been upgraded.' data-toggle='tooltip' data-placement='right'></i>";
|
||||
} else if (data === 4) {
|
||||
return "<i class='fas fa-cloud-upload-alt' title='Subtitle file has been manually uploaded.' data-toggle='tooltip' data-placement='right'></i>";
|
||||
} else if (data === 5) {
|
||||
return "<i class='fas fa-clock' title='Subtitle file has been synced.' data-toggle='tooltip' data-placement='right'></i>";
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -55,6 +55,8 @@
|
|||
return "<i class='fas fa-recycle' title='Subtitle file has been upgraded.' data-toggle='tooltip' data-placement='right'></i>";
|
||||
} else if (data === 4) {
|
||||
return "<i class='fas fa-cloud-upload-alt' title='Subtitle file has been manually uploaded.' data-toggle='tooltip' data-placement='right'></i>";
|
||||
} else if (data === 5) {
|
||||
return "<i class='fas fa-clock' title='Subtitle file has been synced.' data-toggle='tooltip' data-placement='right'></i>";
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
105
views/movie.html
105
views/movie.html
|
@ -65,6 +65,10 @@
|
|||
<div><i class="fas fa-history align-top text-themecolor text-center font-20" aria-hidden="true"></i></div>
|
||||
<div class="align-bottom text-themecolor small text-center">History</div>
|
||||
</button>
|
||||
<button class="btn btn-outline" id="tools_button">
|
||||
<div><i class="fa fa-briefcase align-top text-themecolor text-center font-20" aria-hidden="true"></i></div>
|
||||
<div class="align-bottom text-themecolor small text-center">Tools</div>
|
||||
</button>
|
||||
</div>
|
||||
{% endblock bcleft %}
|
||||
|
||||
|
@ -310,6 +314,35 @@
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="movieToolsModal" class="modal" tabindex="-1" role="dialog">
|
||||
<div class="modal-dialog modal-xl" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<h5 class="modal-title"><span id="movie_tools_title_span"></span></h5><br>
|
||||
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
|
||||
<span aria-hidden="true">×</span>
|
||||
</button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
<table id="movie_tools_result" class="table table-striped" style="width:100%">
|
||||
<thead>
|
||||
<tr>
|
||||
<th style="text-align: left;">Language:</th>
|
||||
<th style="text-align: left;">Filename:</th>
|
||||
<th style="text-align: left;">Sync:</th>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-secondary" data-dismiss="modal">Cancel</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock body %}
|
||||
|
||||
{% block tail %}
|
||||
|
@ -564,7 +597,10 @@
|
|||
processData: false,
|
||||
contentType: false,
|
||||
type: 'POST',
|
||||
success: function(){
|
||||
beforeSend: function () {
|
||||
$('#upload_save_button').html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
|
||||
},
|
||||
complete: function(){
|
||||
$('#uploadModal').modal('hide');
|
||||
}
|
||||
});
|
||||
|
@ -690,6 +726,7 @@
|
|||
else if (data === 2) {return "<i class='fas fa-user' title='Subtitle file has been manually downloaded.' data-toggle='tooltip' data-placement='right'></i>";}
|
||||
else if (data === 3) {return "<i class='fas fa-recycle' title='Subtitle file has been upgraded.' data-toggle='tooltip' data-placement='right'></i>";}
|
||||
else if (data === 4) {return "<i class='fas fa-cloud-upload-alt' title='Subtitle file has been manually uploaded.' data-toggle='tooltip' data-placement='right'></i>";}
|
||||
else if (data === 5) {return "<i class='fas fa-clock' title='Subtitle file has been synced.' data-toggle='tooltip' data-placement='right'></i>";}
|
||||
}},
|
||||
{ data: 'language' },
|
||||
{ data: 'provider' },
|
||||
|
@ -703,6 +740,72 @@
|
|||
focus: false
|
||||
});
|
||||
});
|
||||
|
||||
$('#tools_button').on('click', function (e) {
|
||||
$(this).tooltip('dispose');
|
||||
e.preventDefault();
|
||||
|
||||
$("#movie_tools_title_span").html(movieDetails['title']);
|
||||
|
||||
radarrId = movieDetails['radarrId'];
|
||||
|
||||
$('#movie_tools_result').DataTable({
|
||||
destroy: true,
|
||||
language: {
|
||||
zeroRecords: 'No External Subtitles Found For This Movie'
|
||||
},
|
||||
paging: true,
|
||||
lengthChange: false,
|
||||
pageLength: 5,
|
||||
searching: true,
|
||||
ordering: false,
|
||||
scrollX: true,
|
||||
processing: false,
|
||||
serverSide: false,
|
||||
ajax: {
|
||||
url: '{{ url_for( 'api.movietools' )}}?movieid=' + radarrId
|
||||
},
|
||||
columns: [
|
||||
{data: 'language.name'},
|
||||
{data: 'filename'},
|
||||
{
|
||||
data: null,
|
||||
"render": function (data) {
|
||||
return '<a href="" class="subtitles_sync badge badge-secondary" data-language="' + data.language.code3 + '" data-path="' + data.path + '" data-videopath="' + data.videopath + '"><i class="far fa-play-circle"></i></a>';
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
$('#movieToolsModal')
|
||||
.modal({
|
||||
focus: false
|
||||
});
|
||||
});
|
||||
|
||||
$('#movie_tools_result').on('click', '.subtitles_sync', function (e) {
|
||||
e.preventDefault();
|
||||
const values = {
|
||||
language: $(this).attr("data-language"),
|
||||
subtitlesPath: $(this).attr("data-path"),
|
||||
videoPath: $(this).attr("data-videopath"),
|
||||
mediaType: 'movies'
|
||||
};
|
||||
var cell = $(this).parent()
|
||||
;
|
||||
$.ajax({
|
||||
url: "{{ url_for('api.syncsubtitles') }}",
|
||||
type: "POST",
|
||||
dataType: "json",
|
||||
data: values,
|
||||
beforeSend: function () {
|
||||
cell.html('<div class="spinner-border spinner-border-sm" role="status"><span class="sr-only">Loading...</span></div>');
|
||||
},
|
||||
complete: function (data) {
|
||||
$('#movieToolsModal').modal('hide');
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function movieDetailsRefresh() {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue