mirror of
https://github.com/morpheus65535/bazarr.git
synced 2025-04-19 12:24:59 -04:00
Merge branch 'development' of https://github.com/morpheus65535/bazarr into development
This commit is contained in:
commit
c2a1e4d62c
118 changed files with 2508 additions and 4524 deletions
|
@ -62,6 +62,7 @@ If you need something that is not already part of Bazarr, feel free to create a
|
|||
- Karagarga.in
|
||||
- Ktuvit (Get `hashed_password` using method described [here](https://github.com/XBMCil/service.subtitles.ktuvit))
|
||||
- LegendasDivx
|
||||
- Legendas.net
|
||||
- Napiprojekt
|
||||
- Napisy24
|
||||
- Nekur
|
||||
|
|
|
@ -73,6 +73,7 @@ class SystemSettings(Resource):
|
|||
mustNotContain=str(item['mustNotContain']),
|
||||
originalFormat=int(item['originalFormat']) if item['originalFormat'] not in None_Keys else
|
||||
None,
|
||||
tag=item['tag'] if 'tag' in item else None,
|
||||
)
|
||||
.where(TableLanguagesProfiles.profileId == item['profileId']))
|
||||
existing.remove(item['profileId'])
|
||||
|
@ -89,6 +90,7 @@ class SystemSettings(Resource):
|
|||
mustNotContain=str(item['mustNotContain']),
|
||||
originalFormat=int(item['originalFormat']) if item['originalFormat'] not in None_Keys else
|
||||
None,
|
||||
tag=item['tag'] if 'tag' in item else None,
|
||||
))
|
||||
for profileId in existing:
|
||||
# Remove deleted profiles
|
||||
|
|
|
@ -6,10 +6,12 @@ import logging
|
|||
|
||||
from flask_restx import Resource, Namespace
|
||||
from tzlocal import get_localzone_name
|
||||
from alembic.migration import MigrationContext
|
||||
|
||||
from radarr.info import get_radarr_info
|
||||
from sonarr.info import get_sonarr_info
|
||||
from app.get_args import args
|
||||
from app.database import engine, database, select
|
||||
from init import startTime
|
||||
|
||||
from ..utils import authenticate
|
||||
|
@ -34,6 +36,16 @@ class SystemStatus(Resource):
|
|||
timezone = "Exception while getting time zone name."
|
||||
logging.exception("BAZARR is unable to get configured time zone name.")
|
||||
|
||||
try:
|
||||
database_version = ".".join([str(x) for x in engine.dialect.server_version_info])
|
||||
except Exception:
|
||||
database_version = ""
|
||||
|
||||
try:
|
||||
database_migration = MigrationContext.configure(engine.connect()).get_current_revision()
|
||||
except Exception:
|
||||
database_migration = "unknown"
|
||||
|
||||
system_status = {}
|
||||
system_status.update({'bazarr_version': os.environ["BAZARR_VERSION"]})
|
||||
system_status.update({'package_version': package_version})
|
||||
|
@ -41,6 +53,8 @@ class SystemStatus(Resource):
|
|||
system_status.update({'radarr_version': get_radarr_info.version()})
|
||||
system_status.update({'operating_system': platform.platform()})
|
||||
system_status.update({'python_version': platform.python_version()})
|
||||
system_status.update({'database_engine': f'{engine.dialect.name.capitalize()} {database_version}'})
|
||||
system_status.update({'database_migration': database_migration})
|
||||
system_status.update({'bazarr_directory': os.path.dirname(os.path.dirname(os.path.dirname(
|
||||
os.path.dirname(__file__))))})
|
||||
system_status.update({'bazarr_config_directory': args.config_dir})
|
||||
|
|
|
@ -31,12 +31,20 @@ def base_url_slash_cleaner(uri):
|
|||
|
||||
|
||||
def validate_ip_address(ip_string):
|
||||
if ip_string == '*':
|
||||
return True
|
||||
try:
|
||||
ip_address(ip_string)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
def validate_tags(tags):
|
||||
if not tags:
|
||||
return True
|
||||
|
||||
return all(re.match( r'^[a-z0-9_-]+$', item) for item in tags)
|
||||
|
||||
|
||||
ONE_HUNDRED_YEARS_IN_MINUTES = 52560000
|
||||
ONE_HUNDRED_YEARS_IN_HOURS = 876000
|
||||
|
@ -67,7 +75,7 @@ validators = [
|
|||
# general section
|
||||
Validator('general.flask_secret_key', must_exist=True, default=hexlify(os.urandom(16)).decode(),
|
||||
is_type_of=str),
|
||||
Validator('general.ip', must_exist=True, default='0.0.0.0', is_type_of=str, condition=validate_ip_address),
|
||||
Validator('general.ip', must_exist=True, default='*', is_type_of=str, condition=validate_ip_address),
|
||||
Validator('general.port', must_exist=True, default=6767, is_type_of=int, gte=1, lte=65535),
|
||||
Validator('general.base_url', must_exist=True, default='', is_type_of=str),
|
||||
Validator('general.path_mappings', must_exist=True, default=[], is_type_of=list),
|
||||
|
@ -88,6 +96,9 @@ validators = [
|
|||
Validator('general.use_sonarr', must_exist=True, default=False, is_type_of=bool),
|
||||
Validator('general.use_radarr', must_exist=True, default=False, is_type_of=bool),
|
||||
Validator('general.path_mappings_movie', must_exist=True, default=[], is_type_of=list),
|
||||
Validator('general.serie_tag_enabled', must_exist=True, default=False, is_type_of=bool),
|
||||
Validator('general.movie_tag_enabled', must_exist=True, default=False, is_type_of=bool),
|
||||
Validator('general.remove_profile_tags', must_exist=True, default=[], is_type_of=list, condition=validate_tags),
|
||||
Validator('general.serie_default_enabled', must_exist=True, default=False, is_type_of=bool),
|
||||
Validator('general.serie_default_profile', must_exist=True, default='', is_type_of=(int, str)),
|
||||
Validator('general.movie_default_enabled', must_exist=True, default=False, is_type_of=bool),
|
||||
|
@ -176,7 +187,7 @@ validators = [
|
|||
Validator('sonarr.only_monitored', must_exist=True, default=False, is_type_of=bool),
|
||||
Validator('sonarr.series_sync', must_exist=True, default=60, is_type_of=int,
|
||||
is_in=[15, 60, 180, 360, 720, 1440, 10080, ONE_HUNDRED_YEARS_IN_MINUTES]),
|
||||
Validator('sonarr.excluded_tags', must_exist=True, default=[], is_type_of=list),
|
||||
Validator('sonarr.excluded_tags', must_exist=True, default=[], is_type_of=list, condition=validate_tags),
|
||||
Validator('sonarr.excluded_series_types', must_exist=True, default=[], is_type_of=list),
|
||||
Validator('sonarr.use_ffprobe_cache', must_exist=True, default=True, is_type_of=bool),
|
||||
Validator('sonarr.exclude_season_zero', must_exist=True, default=False, is_type_of=bool),
|
||||
|
@ -199,7 +210,7 @@ validators = [
|
|||
Validator('radarr.only_monitored', must_exist=True, default=False, is_type_of=bool),
|
||||
Validator('radarr.movies_sync', must_exist=True, default=60, is_type_of=int,
|
||||
is_in=[15, 60, 180, 360, 720, 1440, 10080, ONE_HUNDRED_YEARS_IN_MINUTES]),
|
||||
Validator('radarr.excluded_tags', must_exist=True, default=[], is_type_of=list),
|
||||
Validator('radarr.excluded_tags', must_exist=True, default=[], is_type_of=list, condition=validate_tags),
|
||||
Validator('radarr.use_ffprobe_cache', must_exist=True, default=True, is_type_of=bool),
|
||||
Validator('radarr.defer_search_signalr', must_exist=True, default=False, is_type_of=bool),
|
||||
Validator('radarr.sync_only_monitored_movies', must_exist=True, default=False, is_type_of=bool),
|
||||
|
@ -271,6 +282,10 @@ validators = [
|
|||
Validator('legendasdivx.password', must_exist=True, default='', is_type_of=str, cast=str),
|
||||
Validator('legendasdivx.skip_wrong_fps', must_exist=True, default=False, is_type_of=bool),
|
||||
|
||||
# legendasnet section
|
||||
Validator('legendasnet.username', must_exist=True, default='', is_type_of=str, cast=str),
|
||||
Validator('legendasnet.password', must_exist=True, default='', is_type_of=str, cast=str),
|
||||
|
||||
# ktuvit section
|
||||
Validator('ktuvit.email', must_exist=True, default='', is_type_of=str),
|
||||
Validator('ktuvit.hashed_password', must_exist=True, default='', is_type_of=str, cast=str),
|
||||
|
@ -298,6 +313,12 @@ validators = [
|
|||
|
||||
# analytics section
|
||||
Validator('analytics.enabled', must_exist=True, default=True, is_type_of=bool),
|
||||
|
||||
# jimaku section
|
||||
Validator('jimaku.api_key', must_exist=True, default='', is_type_of=str),
|
||||
Validator('jimaku.enable_name_search_fallback', must_exist=True, default=True, is_type_of=bool),
|
||||
Validator('jimaku.enable_archives_download', must_exist=True, default=False, is_type_of=bool),
|
||||
Validator('jimaku.enable_ai_subs', must_exist=True, default=False, is_type_of=bool),
|
||||
|
||||
# titlovi section
|
||||
Validator('titlovi.username', must_exist=True, default='', is_type_of=str, cast=str),
|
||||
|
@ -454,6 +475,7 @@ array_keys = ['excluded_tags',
|
|||
'enabled_integrations',
|
||||
'path_mappings',
|
||||
'path_mappings_movie',
|
||||
'remove_profile_tags',
|
||||
'language_equals',
|
||||
'blacklisted_languages',
|
||||
'blacklisted_providers']
|
||||
|
|
|
@ -379,6 +379,7 @@ def update_profile_id_list():
|
|||
'mustContain': ast.literal_eval(x.mustContain) if x.mustContain else [],
|
||||
'mustNotContain': ast.literal_eval(x.mustNotContain) if x.mustNotContain else [],
|
||||
'originalFormat': x.originalFormat,
|
||||
'tag': x.tag,
|
||||
} for x in database.execute(
|
||||
select(TableLanguagesProfiles.profileId,
|
||||
TableLanguagesProfiles.name,
|
||||
|
@ -386,7 +387,8 @@ def update_profile_id_list():
|
|||
TableLanguagesProfiles.items,
|
||||
TableLanguagesProfiles.mustContain,
|
||||
TableLanguagesProfiles.mustNotContain,
|
||||
TableLanguagesProfiles.originalFormat))
|
||||
TableLanguagesProfiles.originalFormat,
|
||||
TableLanguagesProfiles.tag))
|
||||
.all()
|
||||
]
|
||||
|
||||
|
@ -421,7 +423,7 @@ def get_profile_cutoff(profile_id):
|
|||
if profile_id and profile_id != 'null':
|
||||
cutoff_language = []
|
||||
for profile in profile_id_list:
|
||||
profileId, name, cutoff, items, mustContain, mustNotContain, originalFormat = profile.values()
|
||||
profileId, name, cutoff, items, mustContain, mustNotContain, originalFormat, tag = profile.values()
|
||||
if cutoff:
|
||||
if profileId == int(profile_id):
|
||||
for item in items:
|
||||
|
@ -511,7 +513,8 @@ def upgrade_languages_profile_hi_values():
|
|||
TableLanguagesProfiles.items,
|
||||
TableLanguagesProfiles.mustContain,
|
||||
TableLanguagesProfiles.mustNotContain,
|
||||
TableLanguagesProfiles.originalFormat)
|
||||
TableLanguagesProfiles.originalFormat,
|
||||
TableLanguagesProfiles.tag)
|
||||
))\
|
||||
.all():
|
||||
items = json.loads(languages_profile.items)
|
||||
|
@ -525,3 +528,32 @@ def upgrade_languages_profile_hi_values():
|
|||
.values({"items": json.dumps(items)})
|
||||
.where(TableLanguagesProfiles.profileId == languages_profile.profileId)
|
||||
)
|
||||
|
||||
|
||||
def fix_languages_profiles_with_duplicate_ids():
|
||||
languages_profiles = database.execute(
|
||||
select(TableLanguagesProfiles.profileId, TableLanguagesProfiles.items, TableLanguagesProfiles.cutoff)).all()
|
||||
for languages_profile in languages_profiles:
|
||||
if languages_profile.cutoff:
|
||||
# ignore profiles that have a cutoff set
|
||||
continue
|
||||
languages_profile_ids = []
|
||||
languages_profile_has_duplicate = False
|
||||
languages_profile_items = json.loads(languages_profile.items)
|
||||
for items in languages_profile_items:
|
||||
if items['id'] in languages_profile_ids:
|
||||
languages_profile_has_duplicate = True
|
||||
break
|
||||
else:
|
||||
languages_profile_ids.append(items['id'])
|
||||
|
||||
if languages_profile_has_duplicate:
|
||||
item_id = 0
|
||||
for items in languages_profile_items:
|
||||
item_id += 1
|
||||
items['id'] = item_id
|
||||
database.execute(
|
||||
update(TableLanguagesProfiles)
|
||||
.values({"items": json.dumps(languages_profile_items)})
|
||||
.where(TableLanguagesProfiles.profileId == languages_profile.profileId)
|
||||
)
|
||||
|
|
|
@ -264,6 +264,10 @@ def get_providers_auth():
|
|||
'password': settings.legendasdivx.password,
|
||||
'skip_wrong_fps': settings.legendasdivx.skip_wrong_fps,
|
||||
},
|
||||
'legendasnet': {
|
||||
'username': settings.legendasnet.username,
|
||||
'password': settings.legendasnet.password,
|
||||
},
|
||||
'xsubs': {
|
||||
'username': settings.xsubs.username,
|
||||
'password': settings.xsubs.password,
|
||||
|
@ -285,6 +289,12 @@ def get_providers_auth():
|
|||
'username': settings.titlovi.username,
|
||||
'password': settings.titlovi.password,
|
||||
},
|
||||
'jimaku': {
|
||||
'api_key': settings.jimaku.api_key,
|
||||
'enable_name_search_fallback': settings.jimaku.enable_name_search_fallback,
|
||||
'enable_archives_download': settings.jimaku.enable_archives_download,
|
||||
'enable_ai_subs': settings.jimaku.enable_ai_subs,
|
||||
},
|
||||
'ktuvit': {
|
||||
'email': settings.ktuvit.email,
|
||||
'hashed_password': settings.ktuvit.hashed_password,
|
||||
|
|
|
@ -58,10 +58,13 @@ class NoExceptionFormatter(logging.Formatter):
|
|||
|
||||
class UnwantedWaitressMessageFilter(logging.Filter):
|
||||
def filter(self, record):
|
||||
if settings.general.debug:
|
||||
# no filtering in debug mode
|
||||
if settings.general.debug or "BAZARR" in record.msg:
|
||||
# no filtering in debug mode or if originating from us
|
||||
return True
|
||||
|
||||
if record.levelno < logging.ERROR:
|
||||
return False
|
||||
|
||||
unwantedMessages = [
|
||||
"Exception while serving /api/socket.io/",
|
||||
['Session is disconnected', 'Session not found'],
|
||||
|
@ -161,7 +164,7 @@ def configure_logging(debug=False):
|
|||
logging.getLogger("websocket").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("ga4mp.ga4mp").setLevel(logging.ERROR)
|
||||
|
||||
logging.getLogger("waitress").setLevel(logging.ERROR)
|
||||
logging.getLogger("waitress").setLevel(logging.INFO)
|
||||
logging.getLogger("waitress").addFilter(UnwantedWaitressMessageFilter())
|
||||
logging.getLogger("knowit").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("enzyme").setLevel(logging.CRITICAL)
|
||||
|
@ -169,9 +172,14 @@ def configure_logging(debug=False):
|
|||
logging.getLogger("rebulk").setLevel(logging.WARNING)
|
||||
logging.getLogger("stevedore.extension").setLevel(logging.CRITICAL)
|
||||
|
||||
def empty_file(filename):
|
||||
# Open the log file in write mode to clear its contents
|
||||
with open(filename, 'w'):
|
||||
pass # Just opening and closing the file will clear it
|
||||
|
||||
def empty_log():
|
||||
fh.doRollover()
|
||||
empty_file(get_log_file_path())
|
||||
logging.info('BAZARR Log file emptied')
|
||||
|
||||
|
||||
|
|
|
@ -50,7 +50,7 @@ class Server:
|
|||
self.connected = True
|
||||
except OSError as error:
|
||||
if error.errno == errno.EADDRNOTAVAIL:
|
||||
logging.exception("BAZARR cannot bind to specified IP, trying with default (0.0.0.0)")
|
||||
logging.exception("BAZARR cannot bind to specified IP, trying with 0.0.0.0")
|
||||
self.address = '0.0.0.0'
|
||||
self.connected = False
|
||||
super(Server, self).__init__()
|
||||
|
@ -76,8 +76,7 @@ class Server:
|
|||
self.shutdown(EXIT_INTERRUPT)
|
||||
|
||||
def start(self):
|
||||
logging.info(f'BAZARR is started and waiting for request on http://{self.server.effective_host}:'
|
||||
f'{self.server.effective_port}')
|
||||
self.server.print_listen("BAZARR is started and waiting for requests on: http://{}:{}")
|
||||
signal.signal(signal.SIGINT, self.interrupt_handler)
|
||||
try:
|
||||
self.server.run()
|
||||
|
|
|
@ -5,7 +5,8 @@ import os
|
|||
|
||||
from subzero.language import Language
|
||||
|
||||
from app.database import database, insert
|
||||
from app.database import database, insert, update
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -18,7 +19,7 @@ class CustomLanguage:
|
|||
language = "pt-BR"
|
||||
official_alpha2 = "pt"
|
||||
official_alpha3 = "por"
|
||||
name = "Brazilian Portuguese"
|
||||
name = "Portuguese (Brazil)"
|
||||
iso = "BR"
|
||||
_scripts = []
|
||||
_possible_matches = ("pt-br", "pob", "pb", "brazilian", "brasil", "brazil")
|
||||
|
@ -50,13 +51,19 @@ class CustomLanguage:
|
|||
"""Register the custom language subclasses in the database."""
|
||||
|
||||
for sub in cls.__subclasses__():
|
||||
database.execute(
|
||||
insert(table)
|
||||
.values(code3=sub.alpha3,
|
||||
code2=sub.alpha2,
|
||||
name=sub.name,
|
||||
enabled=0)
|
||||
.on_conflict_do_nothing())
|
||||
try:
|
||||
database.execute(
|
||||
insert(table)
|
||||
.values(code3=sub.alpha3,
|
||||
code2=sub.alpha2,
|
||||
name=sub.name,
|
||||
enabled=0))
|
||||
except IntegrityError:
|
||||
database.execute(
|
||||
update(table)
|
||||
.values(code2=sub.alpha2,
|
||||
name=sub.name)
|
||||
.where(table.code3 == sub.alpha3))
|
||||
|
||||
@classmethod
|
||||
def found_external(cls, subtitle, subtitle_path):
|
||||
|
@ -212,7 +219,7 @@ class LatinAmericanSpanish(CustomLanguage):
|
|||
language = "es-MX"
|
||||
official_alpha2 = "es"
|
||||
official_alpha3 = "spa"
|
||||
name = "Latin American Spanish"
|
||||
name = "Spanish (Latino)"
|
||||
iso = "MX" # Not fair, but ok
|
||||
_scripts = ("419",)
|
||||
_possible_matches = (
|
||||
|
|
|
@ -44,6 +44,12 @@ def create_languages_dict():
|
|||
.values(name='Chinese Simplified')
|
||||
.where(TableSettingsLanguages.code3 == 'zho'))
|
||||
|
||||
# replace Modern Greek by Greek to match Sonarr and Radarr languages
|
||||
database.execute(
|
||||
update(TableSettingsLanguages)
|
||||
.values(name='Greek')
|
||||
.where(TableSettingsLanguages.code3 == 'ell'))
|
||||
|
||||
languages_dict = [{
|
||||
'code3': x.code3,
|
||||
'code2': x.code2,
|
||||
|
@ -55,6 +61,19 @@ def create_languages_dict():
|
|||
.all()]
|
||||
|
||||
|
||||
def audio_language_from_name(lang):
|
||||
lang_map = {
|
||||
'Chinese': 'zh',
|
||||
}
|
||||
|
||||
alpha2_code = lang_map.get(lang, None)
|
||||
|
||||
if alpha2_code is None:
|
||||
return lang
|
||||
|
||||
return language_from_alpha2(alpha2_code)
|
||||
|
||||
|
||||
def language_from_alpha2(lang):
|
||||
return next((item['name'] for item in languages_dict if item['code2'] == lang[:2]), None)
|
||||
|
||||
|
|
|
@ -35,7 +35,8 @@ else:
|
|||
# there's missing embedded packages after a commit
|
||||
check_if_new_update()
|
||||
|
||||
from app.database import System, database, update, migrate_db, create_db_revision, upgrade_languages_profile_hi_values # noqa E402
|
||||
from app.database import (System, database, update, migrate_db, create_db_revision, upgrade_languages_profile_hi_values,
|
||||
fix_languages_profiles_with_duplicate_ids) # noqa E402
|
||||
from app.notifier import update_notifier # noqa E402
|
||||
from languages.get_languages import load_language_in_db # noqa E402
|
||||
from app.signalr_client import sonarr_signalr_client, radarr_signalr_client # noqa E402
|
||||
|
@ -50,6 +51,7 @@ if args.create_db_revision:
|
|||
else:
|
||||
migrate_db(app)
|
||||
upgrade_languages_profile_hi_values()
|
||||
fix_languages_profiles_with_duplicate_ids()
|
||||
|
||||
configure_proxy_func()
|
||||
|
||||
|
|
|
@ -28,6 +28,11 @@ def trace(message):
|
|||
logging.debug(FEATURE_PREFIX + message)
|
||||
|
||||
|
||||
def get_language_profiles():
|
||||
return database.execute(
|
||||
select(TableLanguagesProfiles.profileId, TableLanguagesProfiles.name, TableLanguagesProfiles.tag)).all()
|
||||
|
||||
|
||||
def update_all_movies():
|
||||
movies_full_scan_subtitles()
|
||||
logging.info('BAZARR All existing movie subtitles indexed from disk.')
|
||||
|
@ -59,7 +64,7 @@ def update_movie(updated_movie, send_event):
|
|||
def get_movie_monitored_status(movie_id):
|
||||
existing_movie_monitored = database.execute(
|
||||
select(TableMovies.monitored)
|
||||
.where(TableMovies.tmdbId == movie_id))\
|
||||
.where(TableMovies.tmdbId == str(movie_id)))\
|
||||
.first()
|
||||
if existing_movie_monitored is None:
|
||||
return True
|
||||
|
@ -108,6 +113,7 @@ def update_movies(send_event=True):
|
|||
else:
|
||||
audio_profiles = get_profile_list()
|
||||
tagsDict = get_tags()
|
||||
language_profiles = get_language_profiles()
|
||||
|
||||
# Get movies data from radarr
|
||||
movies = get_movies_from_radarr_api(apikey_radarr=apikey_radarr)
|
||||
|
@ -178,6 +184,7 @@ def update_movies(send_event=True):
|
|||
if str(movie['tmdbId']) in current_movies_id_db:
|
||||
parsed_movie = movieParser(movie, action='update',
|
||||
tags_dict=tagsDict,
|
||||
language_profiles=language_profiles,
|
||||
movie_default_profile=movie_default_profile,
|
||||
audio_profiles=audio_profiles)
|
||||
if not any([parsed_movie.items() <= x for x in current_movies_db_kv]):
|
||||
|
@ -186,6 +193,7 @@ def update_movies(send_event=True):
|
|||
else:
|
||||
parsed_movie = movieParser(movie, action='insert',
|
||||
tags_dict=tagsDict,
|
||||
language_profiles=language_profiles,
|
||||
movie_default_profile=movie_default_profile,
|
||||
audio_profiles=audio_profiles)
|
||||
add_movie(parsed_movie, send_event)
|
||||
|
@ -247,6 +255,7 @@ def update_one_movie(movie_id, action, defer_search=False):
|
|||
|
||||
audio_profiles = get_profile_list()
|
||||
tagsDict = get_tags()
|
||||
language_profiles = get_language_profiles()
|
||||
|
||||
try:
|
||||
# Get movie data from radarr api
|
||||
|
@ -256,10 +265,10 @@ def update_one_movie(movie_id, action, defer_search=False):
|
|||
return
|
||||
else:
|
||||
if action == 'updated' and existing_movie:
|
||||
movie = movieParser(movie_data, action='update', tags_dict=tagsDict,
|
||||
movie = movieParser(movie_data, action='update', tags_dict=tagsDict, language_profiles=language_profiles,
|
||||
movie_default_profile=movie_default_profile, audio_profiles=audio_profiles)
|
||||
elif action == 'updated' and not existing_movie:
|
||||
movie = movieParser(movie_data, action='insert', tags_dict=tagsDict,
|
||||
movie = movieParser(movie_data, action='insert', tags_dict=tagsDict, language_profiles=language_profiles,
|
||||
movie_default_profile=movie_default_profile, audio_profiles=audio_profiles)
|
||||
except Exception:
|
||||
logging.exception('BAZARR cannot get movie returned by SignalR feed from Radarr API.')
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
import os
|
||||
|
||||
from app.config import settings
|
||||
from languages.get_languages import language_from_alpha2
|
||||
from languages.get_languages import audio_language_from_name
|
||||
from radarr.info import get_radarr_info
|
||||
from utilities.video_analyzer import embedded_audio_reader
|
||||
from utilities.path_mappings import path_mappings
|
||||
|
@ -11,7 +11,17 @@ from utilities.path_mappings import path_mappings
|
|||
from .converter import RadarrFormatAudioCodec, RadarrFormatVideoCodec
|
||||
|
||||
|
||||
def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles):
|
||||
def get_matching_profile(tags, language_profiles):
|
||||
matching_profile = None
|
||||
if len(tags) > 0:
|
||||
for profileId, name, tag in language_profiles:
|
||||
if tag in tags:
|
||||
matching_profile = profileId
|
||||
break
|
||||
return matching_profile
|
||||
|
||||
|
||||
def movieParser(movie, action, tags_dict, language_profiles, movie_default_profile, audio_profiles):
|
||||
if 'movieFile' in movie:
|
||||
try:
|
||||
overview = str(movie['overview'])
|
||||
|
@ -107,9 +117,7 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
|
|||
for item in movie['movieFile']['languages']:
|
||||
if isinstance(item, dict):
|
||||
if 'name' in item:
|
||||
language = item['name']
|
||||
if item['name'] == 'Portuguese (Brazil)':
|
||||
language = language_from_alpha2('pb')
|
||||
language = audio_language_from_name(item['name'])
|
||||
audio_language.append(language)
|
||||
|
||||
tags = [d['label'] for d in tags_dict if d['id'] in movie['tags']]
|
||||
|
@ -140,6 +148,15 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
|
|||
parsed_movie['subtitles'] = '[]'
|
||||
parsed_movie['profileId'] = movie_default_profile
|
||||
|
||||
if settings.general.movie_tag_enabled:
|
||||
tag_profile = get_matching_profile(tags, language_profiles)
|
||||
if tag_profile:
|
||||
parsed_movie['profileId'] = tag_profile
|
||||
remove_profile_tags_list = settings.general.remove_profile_tags
|
||||
if len(remove_profile_tags_list) > 0:
|
||||
if set(tags) & set(remove_profile_tags_list):
|
||||
parsed_movie['profileId'] = None
|
||||
|
||||
return parsed_movie
|
||||
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ import os
|
|||
from app.config import settings
|
||||
from app.database import TableShows, database, select
|
||||
from constants import MINIMUM_VIDEO_SIZE
|
||||
from languages.get_languages import audio_language_from_name
|
||||
from utilities.path_mappings import path_mappings
|
||||
from utilities.video_analyzer import embedded_audio_reader
|
||||
from sonarr.info import get_sonarr_info
|
||||
|
@ -12,7 +13,17 @@ from sonarr.info import get_sonarr_info
|
|||
from .converter import SonarrFormatVideoCodec, SonarrFormatAudioCodec
|
||||
|
||||
|
||||
def seriesParser(show, action, tags_dict, serie_default_profile, audio_profiles):
|
||||
def get_matching_profile(tags, language_profiles):
|
||||
matching_profile = None
|
||||
if len(tags) > 0:
|
||||
for profileId, name, tag in language_profiles:
|
||||
if tag in tags:
|
||||
matching_profile = profileId
|
||||
break
|
||||
return matching_profile
|
||||
|
||||
|
||||
def seriesParser(show, action, tags_dict, language_profiles, serie_default_profile, audio_profiles):
|
||||
overview = show['overview'] if 'overview' in show else ''
|
||||
poster = ''
|
||||
fanart = ''
|
||||
|
@ -24,9 +35,11 @@ def seriesParser(show, action, tags_dict, serie_default_profile, audio_profiles)
|
|||
if image['coverType'] == 'fanart':
|
||||
fanart = image['url'].split('?')[0]
|
||||
|
||||
alternate_titles = None
|
||||
if show['alternateTitles'] is not None:
|
||||
alternate_titles = str([item['title'] for item in show['alternateTitles']])
|
||||
alternate_titles = [item['title'] for item in show['alternateTitles'] if 'title' in item and item['title'] not
|
||||
in [None, ''] and item["title"] != show["title"]]
|
||||
else:
|
||||
alternate_titles = []
|
||||
|
||||
tags = [d['label'] for d in tags_dict if d['id'] in show['tags']]
|
||||
|
||||
|
@ -42,39 +55,37 @@ def seriesParser(show, action, tags_dict, serie_default_profile, audio_profiles)
|
|||
else:
|
||||
audio_language = []
|
||||
|
||||
if action == 'update':
|
||||
return {'title': show["title"],
|
||||
'path': show["path"],
|
||||
'tvdbId': int(show["tvdbId"]),
|
||||
'sonarrSeriesId': int(show["id"]),
|
||||
'overview': overview,
|
||||
'poster': poster,
|
||||
'fanart': fanart,
|
||||
'audio_language': str(audio_language),
|
||||
'sortTitle': show['sortTitle'],
|
||||
'year': str(show['year']),
|
||||
'alternativeTitles': alternate_titles,
|
||||
'tags': str(tags),
|
||||
'seriesType': show['seriesType'],
|
||||
'imdbId': imdbId,
|
||||
'monitored': str(bool(show['monitored']))}
|
||||
else:
|
||||
return {'title': show["title"],
|
||||
'path': show["path"],
|
||||
'tvdbId': show["tvdbId"],
|
||||
'sonarrSeriesId': show["id"],
|
||||
'overview': overview,
|
||||
'poster': poster,
|
||||
'fanart': fanart,
|
||||
'audio_language': str(audio_language),
|
||||
'sortTitle': show['sortTitle'],
|
||||
'year': str(show['year']),
|
||||
'alternativeTitles': alternate_titles,
|
||||
'tags': str(tags),
|
||||
'seriesType': show['seriesType'],
|
||||
'imdbId': imdbId,
|
||||
'profileId': serie_default_profile,
|
||||
'monitored': str(bool(show['monitored']))}
|
||||
parsed_series = {
|
||||
'title': show["title"],
|
||||
'path': show["path"],
|
||||
'tvdbId': int(show["tvdbId"]),
|
||||
'sonarrSeriesId': int(show["id"]),
|
||||
'overview': overview,
|
||||
'poster': poster,
|
||||
'fanart': fanart,
|
||||
'audio_language': str(audio_language),
|
||||
'sortTitle': show['sortTitle'],
|
||||
'year': str(show['year']),
|
||||
'alternativeTitles': str(alternate_titles),
|
||||
'tags': str(tags),
|
||||
'seriesType': show['seriesType'],
|
||||
'imdbId': imdbId,
|
||||
'monitored': str(bool(show['monitored']))
|
||||
}
|
||||
|
||||
if action == 'insert':
|
||||
parsed_series['profileId'] = serie_default_profile
|
||||
|
||||
if settings.general.serie_tag_enabled:
|
||||
tag_profile = get_matching_profile(tags, language_profiles)
|
||||
if tag_profile:
|
||||
parsed_series['profileId'] = tag_profile
|
||||
remove_profile_tags_list = settings.general.remove_profile_tags
|
||||
if len(remove_profile_tags_list) > 0:
|
||||
if set(tags) & set(remove_profile_tags_list):
|
||||
parsed_series['profileId'] = None
|
||||
|
||||
return parsed_series
|
||||
|
||||
|
||||
def profile_id_to_language(id_, profiles):
|
||||
|
@ -111,13 +122,13 @@ def episodeParser(episode):
|
|||
item = episode['episodeFile']['language']
|
||||
if isinstance(item, dict):
|
||||
if 'name' in item:
|
||||
audio_language.append(item['name'])
|
||||
audio_language.append(audio_language_from_name(item['name']))
|
||||
elif 'languages' in episode['episodeFile'] and len(episode['episodeFile']['languages']):
|
||||
items = episode['episodeFile']['languages']
|
||||
if isinstance(items, list):
|
||||
for item in items:
|
||||
if 'name' in item:
|
||||
audio_language.append(item['name'])
|
||||
audio_language.append(audio_language_from_name(item['name']))
|
||||
else:
|
||||
audio_language = database.execute(
|
||||
select(TableShows.audio_language)
|
||||
|
|
|
@ -26,6 +26,11 @@ def trace(message):
|
|||
logging.debug(FEATURE_PREFIX + message)
|
||||
|
||||
|
||||
def get_language_profiles():
|
||||
return database.execute(
|
||||
select(TableLanguagesProfiles.profileId, TableLanguagesProfiles.name, TableLanguagesProfiles.tag)).all()
|
||||
|
||||
|
||||
def get_series_monitored_table():
|
||||
series_monitored = database.execute(
|
||||
select(TableShows.tvdbId, TableShows.monitored))\
|
||||
|
@ -58,6 +63,7 @@ def update_series(send_event=True):
|
|||
|
||||
audio_profiles = get_profile_list()
|
||||
tagsDict = get_tags()
|
||||
language_profiles = get_language_profiles()
|
||||
|
||||
# Get shows data from Sonarr
|
||||
series = get_series_from_sonarr_api(apikey_sonarr=apikey_sonarr)
|
||||
|
@ -111,6 +117,7 @@ def update_series(send_event=True):
|
|||
|
||||
if show['id'] in current_shows_db:
|
||||
updated_series = seriesParser(show, action='update', tags_dict=tagsDict,
|
||||
language_profiles=language_profiles,
|
||||
serie_default_profile=serie_default_profile,
|
||||
audio_profiles=audio_profiles)
|
||||
|
||||
|
@ -132,6 +139,7 @@ def update_series(send_event=True):
|
|||
event_stream(type='series', payload=show['id'])
|
||||
else:
|
||||
added_series = seriesParser(show, action='insert', tags_dict=tagsDict,
|
||||
language_profiles=language_profiles,
|
||||
serie_default_profile=serie_default_profile,
|
||||
audio_profiles=audio_profiles)
|
||||
|
||||
|
@ -203,7 +211,7 @@ def update_one_series(series_id, action):
|
|||
|
||||
audio_profiles = get_profile_list()
|
||||
tagsDict = get_tags()
|
||||
|
||||
language_profiles = get_language_profiles()
|
||||
try:
|
||||
# Get series data from sonarr api
|
||||
series = None
|
||||
|
@ -215,10 +223,12 @@ def update_one_series(series_id, action):
|
|||
else:
|
||||
if action == 'updated' and existing_series:
|
||||
series = seriesParser(series_data[0], action='update', tags_dict=tagsDict,
|
||||
language_profiles=language_profiles,
|
||||
serie_default_profile=serie_default_profile,
|
||||
audio_profiles=audio_profiles)
|
||||
elif action == 'updated' and not existing_series:
|
||||
series = seriesParser(series_data[0], action='insert', tags_dict=tagsDict,
|
||||
language_profiles=language_profiles,
|
||||
serie_default_profile=serie_default_profile,
|
||||
audio_profiles=audio_profiles)
|
||||
except Exception:
|
||||
|
|
|
@ -216,7 +216,9 @@ def list_missing_subtitles_movies(no=None, send_event=True):
|
|||
|
||||
if cutoff_temp_list:
|
||||
for cutoff_temp in cutoff_temp_list:
|
||||
cutoff_language = [cutoff_temp['language'], cutoff_temp['forced'], cutoff_temp['hi']]
|
||||
cutoff_language = {'language': cutoff_temp['language'],
|
||||
'forced': cutoff_temp['forced'],
|
||||
'hi': cutoff_temp['hi']}
|
||||
if cutoff_temp['audio_exclude'] == 'True' and \
|
||||
any(x['code2'] == cutoff_temp['language'] for x in
|
||||
get_audio_profile_languages(movie_subtitles.audio_language)):
|
||||
|
@ -224,7 +226,10 @@ def list_missing_subtitles_movies(no=None, send_event=True):
|
|||
elif cutoff_language in actual_subtitles_list:
|
||||
cutoff_met = True
|
||||
# HI is considered as good as normal
|
||||
elif cutoff_language and [cutoff_language[0], 'False', 'True'] in actual_subtitles_list:
|
||||
elif (cutoff_language and
|
||||
{'language': cutoff_language['language'],
|
||||
'forced': 'False',
|
||||
'hi': 'True'} in actual_subtitles_list):
|
||||
cutoff_met = True
|
||||
|
||||
if cutoff_met:
|
||||
|
|
|
@ -216,7 +216,9 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
|
|||
|
||||
if cutoff_temp_list:
|
||||
for cutoff_temp in cutoff_temp_list:
|
||||
cutoff_language = [cutoff_temp['language'], cutoff_temp['forced'], cutoff_temp['hi']]
|
||||
cutoff_language = {'language': cutoff_temp['language'],
|
||||
'forced': cutoff_temp['forced'],
|
||||
'hi': cutoff_temp['hi']}
|
||||
if cutoff_temp['audio_exclude'] == 'True' and \
|
||||
any(x['code2'] == cutoff_temp['language'] for x in
|
||||
get_audio_profile_languages(episode_subtitles.audio_language)):
|
||||
|
@ -224,7 +226,10 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
|
|||
elif cutoff_language in actual_subtitles_list:
|
||||
cutoff_met = True
|
||||
# HI is considered as good as normal
|
||||
elif [cutoff_language[0], 'False', 'True'] in actual_subtitles_list:
|
||||
elif (cutoff_language and
|
||||
{'language': cutoff_language['language'],
|
||||
'forced': 'False',
|
||||
'hi': 'True'} in actual_subtitles_list):
|
||||
cutoff_met = True
|
||||
|
||||
if cutoff_met:
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
import os
|
||||
import logging
|
||||
import re
|
||||
|
||||
from guess_language import guess_language
|
||||
from subliminal_patch import core
|
||||
|
@ -136,6 +135,7 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
|
|||
continue
|
||||
text = text.decode(encoding)
|
||||
|
||||
if bool(re.search(core.HI_REGEX, text)):
|
||||
if core.parse_for_hi_regex(subtitle_text=text,
|
||||
alpha3_language=language.alpha3 if hasattr(language, 'alpha3') else None):
|
||||
subtitles[subtitle] = Language.rebuild(subtitles[subtitle], forced=False, hi=True)
|
||||
return subtitles
|
||||
|
|
|
@ -4,10 +4,12 @@ from .ffprobe import refine_from_ffprobe
|
|||
from .database import refine_from_db
|
||||
from .arr_history import refine_from_arr_history
|
||||
from .anidb import refine_from_anidb
|
||||
from .anilist import refine_from_anilist
|
||||
|
||||
registered = {
|
||||
"database": refine_from_db,
|
||||
"ffprobe": refine_from_ffprobe,
|
||||
"arr_history": refine_from_arr_history,
|
||||
"anidb": refine_from_anidb,
|
||||
"anilist": refine_from_anilist, # Must run AFTER AniDB
|
||||
}
|
||||
|
|
|
@ -20,7 +20,10 @@ except ImportError:
|
|||
except ImportError:
|
||||
import xml.etree.ElementTree as etree
|
||||
|
||||
refined_providers = {'animetosho'}
|
||||
refined_providers = {'animetosho', 'jimaku'}
|
||||
providers_requiring_anidb_api = {'animetosho'}
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
api_url = 'http://api.anidb.net:9001/httpapi'
|
||||
|
||||
|
@ -40,6 +43,10 @@ class AniDBClient(object):
|
|||
@property
|
||||
def is_throttled(self):
|
||||
return self.cache and self.cache.get('is_throttled')
|
||||
|
||||
@property
|
||||
def has_api_credentials(self):
|
||||
return self.api_client_key != '' and self.api_client_key is not None
|
||||
|
||||
@property
|
||||
def daily_api_request_count(self):
|
||||
|
@ -62,7 +69,9 @@ class AniDBClient(object):
|
|||
return r.content
|
||||
|
||||
@region.cache_on_arguments(expiration_time=timedelta(days=1).total_seconds())
|
||||
def get_series_id(self, mappings, tvdb_series_season, tvdb_series_id, episode):
|
||||
def get_show_information(self, tvdb_series_id, tvdb_series_season, episode):
|
||||
mappings = etree.fromstring(self.get_series_mappings())
|
||||
|
||||
# Enrich the collection of anime with the episode offset
|
||||
animes = [
|
||||
self.AnimeInfo(anime, int(anime.attrib.get('episodeoffset', 0)))
|
||||
|
@ -71,37 +80,72 @@ class AniDBClient(object):
|
|||
)
|
||||
]
|
||||
|
||||
is_special_entry = False
|
||||
if not animes:
|
||||
return None, None
|
||||
# Some entries will store TVDB seasons in a nested mapping list, identifiable by the value 'a' as the season
|
||||
special_entries = mappings.findall(
|
||||
f".//anime[@tvdbid='{tvdb_series_id}'][@defaulttvdbseason='a']"
|
||||
)
|
||||
|
||||
# Sort the anime by offset in ascending order
|
||||
animes.sort(key=lambda a: a.episode_offset)
|
||||
if not special_entries:
|
||||
return None, None, None
|
||||
|
||||
# Different from Tvdb, Anidb have different ids for the Parts of a season
|
||||
anidb_id = None
|
||||
offset = 0
|
||||
is_special_entry = True
|
||||
for special_entry in special_entries:
|
||||
mapping_list = special_entry.findall(f".//mapping[@tvdbseason='{tvdb_series_season}']")
|
||||
if len(mapping_list) > 0:
|
||||
anidb_id = int(special_entry.attrib.get('anidbid'))
|
||||
offset = int(mapping_list[0].attrib.get('offset', 0))
|
||||
|
||||
for index, anime_info in enumerate(animes):
|
||||
anime, episode_offset = anime_info
|
||||
anidb_id = int(anime.attrib.get('anidbid'))
|
||||
if episode > episode_offset:
|
||||
anidb_id = anidb_id
|
||||
offset = episode_offset
|
||||
if not is_special_entry:
|
||||
# Sort the anime by offset in ascending order
|
||||
animes.sort(key=lambda a: a.episode_offset)
|
||||
|
||||
return anidb_id, episode - offset
|
||||
# Different from Tvdb, Anidb have different ids for the Parts of a season
|
||||
anidb_id = None
|
||||
offset = 0
|
||||
|
||||
for index, anime_info in enumerate(animes):
|
||||
anime, episode_offset = anime_info
|
||||
|
||||
mapping_list = anime.find('mapping-list')
|
||||
|
||||
# Handle mapping list for Specials
|
||||
if mapping_list:
|
||||
for mapping in mapping_list.findall("mapping"):
|
||||
if mapping.text is None:
|
||||
continue
|
||||
|
||||
# Mapping values are usually like ;1-1;2-1;3-1;
|
||||
for episode_ref in mapping.text.split(';'):
|
||||
if not episode_ref:
|
||||
continue
|
||||
|
||||
anidb_episode, tvdb_episode = map(int, episode_ref.split('-'))
|
||||
if tvdb_episode == episode:
|
||||
anidb_id = int(anime.attrib.get('anidbid'))
|
||||
|
||||
return anidb_id, anidb_episode, 0
|
||||
|
||||
if episode > episode_offset:
|
||||
anidb_id = int(anime.attrib.get('anidbid'))
|
||||
offset = episode_offset
|
||||
|
||||
return anidb_id, episode - offset, offset
|
||||
|
||||
@region.cache_on_arguments(expiration_time=timedelta(days=1).total_seconds())
|
||||
def get_series_episodes_ids(self, tvdb_series_id, season, episode):
|
||||
mappings = etree.fromstring(self.get_series_mappings())
|
||||
|
||||
series_id, episode_no = self.get_series_id(mappings, season, tvdb_series_id, episode)
|
||||
|
||||
def get_episode_ids(self, series_id, episode_no):
|
||||
if not series_id:
|
||||
return None, None
|
||||
return None
|
||||
|
||||
episodes = etree.fromstring(self.get_episodes(series_id))
|
||||
|
||||
return series_id, int(episodes.find(f".//episode[epno='{episode_no}']").attrib.get('id'))
|
||||
episode = episodes.find(f".//episode[epno='{episode_no}']")
|
||||
|
||||
if not episode:
|
||||
return series_id, None
|
||||
|
||||
return series_id, int(episode.attrib.get('id'))
|
||||
|
||||
@region.cache_on_arguments(expiration_time=REFINER_EXPIRATION_TIME)
|
||||
def get_episodes(self, series_id):
|
||||
|
@ -156,8 +200,6 @@ class AniDBClient(object):
|
|||
|
||||
def refine_from_anidb(path, video):
|
||||
if not isinstance(video, Episode) or not video.series_tvdb_id:
|
||||
logging.debug(f'Video is not an Anime TV series, skipping refinement for {video}')
|
||||
|
||||
return
|
||||
|
||||
if refined_providers.intersection(settings.general.enabled_providers) and video.series_anidb_id is None:
|
||||
|
@ -169,27 +211,35 @@ def refine_anidb_ids(video):
|
|||
|
||||
season = video.season if video.season else 0
|
||||
|
||||
if anidb_client.is_throttled:
|
||||
logging.warning(f'API daily limit reached. Skipping refinement for {video.series}')
|
||||
|
||||
return video
|
||||
|
||||
try:
|
||||
anidb_series_id, anidb_episode_id = anidb_client.get_series_episodes_ids(
|
||||
video.series_tvdb_id,
|
||||
season, video.episode,
|
||||
)
|
||||
except TooManyRequests:
|
||||
logging.error(f'API daily limit reached while refining {video.series}')
|
||||
|
||||
anidb_client.mark_as_throttled()
|
||||
|
||||
return video
|
||||
|
||||
if not anidb_episode_id:
|
||||
logging.error(f'Could not find anime series {video.series}')
|
||||
|
||||
anidb_series_id, anidb_episode_no, anidb_season_episode_offset = anidb_client.get_show_information(
|
||||
video.series_tvdb_id,
|
||||
season,
|
||||
video.episode,
|
||||
)
|
||||
|
||||
if not anidb_series_id:
|
||||
logger.error(f'Could not find anime series {video.series}')
|
||||
return video
|
||||
|
||||
anidb_episode_id = None
|
||||
if anidb_client.has_api_credentials:
|
||||
if anidb_client.is_throttled:
|
||||
logger.warning(f'API daily limit reached. Skipping episode ID refinement for {video.series}')
|
||||
else:
|
||||
try:
|
||||
anidb_episode_id = anidb_client.get_episode_ids(
|
||||
anidb_series_id,
|
||||
anidb_episode_no
|
||||
)
|
||||
except TooManyRequests:
|
||||
logger.error(f'API daily limit reached while refining {video.series}')
|
||||
anidb_client.mark_as_throttled()
|
||||
else:
|
||||
intersect = providers_requiring_anidb_api.intersection(settings.general.enabled_providers)
|
||||
if len(intersect) >= 1:
|
||||
logger.warn(f'AniDB API credentials are not fully set up, the following providers may not work: {intersect}')
|
||||
|
||||
video.series_anidb_id = anidb_series_id
|
||||
video.series_anidb_episode_id = anidb_episode_id
|
||||
video.series_anidb_episode_no = anidb_episode_no
|
||||
video.series_anidb_season_episode_offset = anidb_season_episode_offset
|
||||
|
|
79
bazarr/subtitles/refiners/anilist.py
Normal file
79
bazarr/subtitles/refiners/anilist.py
Normal file
|
@ -0,0 +1,79 @@
|
|||
# coding=utf-8
|
||||
# fmt: off
|
||||
|
||||
import logging
|
||||
import time
|
||||
import requests
|
||||
from collections import namedtuple
|
||||
from datetime import timedelta
|
||||
|
||||
from app.config import settings
|
||||
from subliminal import Episode, region, __short_version__
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
refined_providers = {'jimaku'}
|
||||
|
||||
|
||||
class AniListClient(object):
|
||||
def __init__(self, session=None, timeout=10):
|
||||
self.session = session or requests.Session()
|
||||
self.session.timeout = timeout
|
||||
self.session.headers['Content-Type'] = 'application/json'
|
||||
self.session.headers['User-Agent'] = 'Subliminal/%s' % __short_version__
|
||||
|
||||
@region.cache_on_arguments(expiration_time=timedelta(days=1).total_seconds())
|
||||
def get_series_mappings(self):
|
||||
r = self.session.get(
|
||||
'https://raw.githubusercontent.com/Fribb/anime-lists/master/anime-list-mini.json'
|
||||
)
|
||||
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def get_series_id(self, candidate_id_name, candidate_id_value):
|
||||
anime_list = self.get_series_mappings()
|
||||
|
||||
tag_map = {
|
||||
"series_anidb_id": "anidb_id",
|
||||
"imdb_id": "imdb_id"
|
||||
}
|
||||
mapped_tag = tag_map.get(candidate_id_name, candidate_id_name)
|
||||
|
||||
obj = [obj for obj in anime_list if mapped_tag in obj and str(obj[mapped_tag]) == str(candidate_id_value)]
|
||||
logger.debug(f"Based on '{mapped_tag}': '{candidate_id_value}', anime-list matched: {obj}")
|
||||
|
||||
if len(obj) > 0:
|
||||
return obj[0]["anilist_id"]
|
||||
else:
|
||||
logger.debug(f"Could not find corresponding AniList ID with '{mapped_tag}': {candidate_id_value}")
|
||||
return None
|
||||
|
||||
|
||||
def refine_from_anilist(path, video):
|
||||
# Safety checks
|
||||
if isinstance(video, Episode):
|
||||
if not video.series_anidb_id:
|
||||
return
|
||||
|
||||
if refined_providers.intersection(settings.general.enabled_providers) and video.anilist_id is None:
|
||||
refine_anilist_ids(video)
|
||||
|
||||
|
||||
def refine_anilist_ids(video):
|
||||
anilist_client = AniListClient()
|
||||
|
||||
if isinstance(video, Episode):
|
||||
candidate_id_name = "series_anidb_id"
|
||||
else:
|
||||
candidate_id_name = "imdb_id"
|
||||
|
||||
candidate_id_value = getattr(video, candidate_id_name, None)
|
||||
if not candidate_id_value:
|
||||
logger.error(f"Found no value for property {candidate_id_name} of video.")
|
||||
return video
|
||||
|
||||
anilist_id = anilist_client.get_series_id(candidate_id_name, candidate_id_value)
|
||||
if not anilist_id:
|
||||
return video
|
||||
|
||||
video.anilist_id = anilist_id
|
|
@ -36,40 +36,47 @@ def delete_subtitles(media_type, language, forced, hi, media_path, subtitles_pat
|
|||
language_log += ':forced'
|
||||
language_string += ' forced'
|
||||
|
||||
if media_type == 'series':
|
||||
pr = path_mappings.path_replace
|
||||
prr = path_mappings.path_replace_reverse
|
||||
else:
|
||||
pr = path_mappings.path_replace_movie
|
||||
prr = path_mappings.path_replace_reverse_movie
|
||||
|
||||
result = ProcessSubtitlesResult(message=f"{language_string} subtitles deleted from disk.",
|
||||
reversed_path=path_mappings.path_replace_reverse(media_path),
|
||||
reversed_path=prr(media_path),
|
||||
downloaded_language_code2=language_log,
|
||||
downloaded_provider=None,
|
||||
score=None,
|
||||
forced=None,
|
||||
subtitle_id=None,
|
||||
reversed_subtitles_path=path_mappings.path_replace_reverse(subtitles_path),
|
||||
reversed_subtitles_path=prr(subtitles_path),
|
||||
hearing_impaired=None)
|
||||
|
||||
if media_type == 'series':
|
||||
try:
|
||||
os.remove(path_mappings.path_replace(subtitles_path))
|
||||
os.remove(pr(subtitles_path))
|
||||
except OSError:
|
||||
logging.exception(f'BAZARR cannot delete subtitles file: {subtitles_path}')
|
||||
store_subtitles(path_mappings.path_replace_reverse(media_path), media_path)
|
||||
store_subtitles(prr(media_path), media_path)
|
||||
return False
|
||||
else:
|
||||
history_log(0, sonarr_series_id, sonarr_episode_id, result)
|
||||
store_subtitles(path_mappings.path_replace_reverse(media_path), media_path)
|
||||
store_subtitles(prr(media_path), media_path)
|
||||
notify_sonarr(sonarr_series_id)
|
||||
event_stream(type='series', action='update', payload=sonarr_series_id)
|
||||
event_stream(type='episode-wanted', action='update', payload=sonarr_episode_id)
|
||||
return True
|
||||
else:
|
||||
try:
|
||||
os.remove(path_mappings.path_replace_movie(subtitles_path))
|
||||
os.remove(pr(subtitles_path))
|
||||
except OSError:
|
||||
logging.exception(f'BAZARR cannot delete subtitles file: {subtitles_path}')
|
||||
store_subtitles_movie(path_mappings.path_replace_reverse_movie(media_path), media_path)
|
||||
store_subtitles_movie(prr(media_path), media_path)
|
||||
return False
|
||||
else:
|
||||
history_log_movie(0, radarr_id, result)
|
||||
store_subtitles_movie(path_mappings.path_replace_reverse_movie(media_path), media_path)
|
||||
store_subtitles_movie(prr(media_path), media_path)
|
||||
notify_radarr(radarr_id)
|
||||
event_stream(type='movie-wanted', action='update', payload=radarr_id)
|
||||
return True
|
||||
|
|
|
@ -97,8 +97,7 @@ class SubSyncer:
|
|||
result = run(self.args)
|
||||
except Exception:
|
||||
logging.exception(
|
||||
f'BAZARR an exception occurs during the synchronization process for this subtitles: {self.srtin}')
|
||||
raise OSError
|
||||
f'BAZARR an exception occurs during the synchronization process for this subtitle file: {self.srtin}')
|
||||
else:
|
||||
if settings.subsync.debug:
|
||||
return result
|
||||
|
@ -113,14 +112,19 @@ class SubSyncer:
|
|||
f"{offset_seconds} seconds and a framerate scale factor of "
|
||||
f"{f'{framerate_scale_factor:.2f}'}.")
|
||||
|
||||
if sonarr_series_id:
|
||||
prr = path_mappings.path_replace_reverse
|
||||
else:
|
||||
prr = path_mappings.path_replace_reverse_movie
|
||||
|
||||
result = ProcessSubtitlesResult(message=message,
|
||||
reversed_path=path_mappings.path_replace_reverse(self.reference),
|
||||
reversed_path=prr(self.reference),
|
||||
downloaded_language_code2=srt_lang,
|
||||
downloaded_provider=None,
|
||||
score=None,
|
||||
forced=forced,
|
||||
subtitle_id=None,
|
||||
reversed_subtitles_path=srt_path,
|
||||
reversed_subtitles_path=prr(self.srtin),
|
||||
hearing_impaired=hi)
|
||||
|
||||
if sonarr_episode_id:
|
||||
|
|
|
@ -6,12 +6,17 @@ import pysubs2
|
|||
from subliminal_patch.core import get_subtitle_path
|
||||
from subzero.language import Language
|
||||
from deep_translator import GoogleTranslator
|
||||
from deep_translator.exceptions import TooManyRequests, RequestError, TranslationNotFound
|
||||
from time import sleep
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
from languages.custom_lang import CustomLanguage
|
||||
from languages.get_languages import alpha3_from_alpha2, language_from_alpha2, language_from_alpha3
|
||||
from radarr.history import history_log_movie
|
||||
from sonarr.history import history_log
|
||||
from subtitles.processing import ProcessSubtitlesResult
|
||||
from app.event_handler import show_progress, hide_progress
|
||||
from utilities.path_mappings import path_mappings
|
||||
|
||||
|
||||
def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, forced, hi, media_type, sonarr_series_id,
|
||||
|
@ -33,8 +38,6 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
|
|||
|
||||
logging.debug(f'BAZARR is translating in {lang_obj} this subtitles {source_srt_file}')
|
||||
|
||||
max_characters = 5000
|
||||
|
||||
dest_srt_file = get_subtitle_path(video_path,
|
||||
language=lang_obj if isinstance(lang_obj, Language) else lang_obj.subzero_language(),
|
||||
extension='.srt',
|
||||
|
@ -44,40 +47,53 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
|
|||
subs = pysubs2.load(source_srt_file, encoding='utf-8')
|
||||
subs.remove_miscellaneous_events()
|
||||
lines_list = [x.plaintext for x in subs]
|
||||
joined_lines_str = '\n\n\n'.join(lines_list)
|
||||
lines_list_len = len(lines_list)
|
||||
|
||||
logging.debug(f'BAZARR splitting subtitles into {max_characters} characters blocks')
|
||||
lines_block_list = []
|
||||
translated_lines_list = []
|
||||
while len(joined_lines_str):
|
||||
partial_lines_str = joined_lines_str[:max_characters]
|
||||
|
||||
if len(joined_lines_str) > max_characters:
|
||||
new_partial_lines_str = partial_lines_str.rsplit('\n\n', 1)[0]
|
||||
else:
|
||||
new_partial_lines_str = partial_lines_str
|
||||
|
||||
lines_block_list.append(new_partial_lines_str)
|
||||
joined_lines_str = joined_lines_str.replace(new_partial_lines_str, '')
|
||||
|
||||
logging.debug(f'BAZARR is sending {len(lines_block_list)} blocks to Google Translate')
|
||||
for block_str in lines_block_list:
|
||||
def translate_line(id, line, attempt):
|
||||
try:
|
||||
translated_partial_srt_text = GoogleTranslator(source='auto',
|
||||
target=language_code_convert_dict.get(lang_obj.alpha2,
|
||||
lang_obj.alpha2)
|
||||
).translate(text=block_str)
|
||||
except Exception:
|
||||
logging.exception(f'BAZARR Unable to translate subtitles {source_srt_file}')
|
||||
return False
|
||||
translated_text = GoogleTranslator(
|
||||
source='auto',
|
||||
target=language_code_convert_dict.get(lang_obj.alpha2, lang_obj.alpha2)
|
||||
).translate(text=line)
|
||||
except TooManyRequests:
|
||||
if attempt <= 5:
|
||||
sleep(1)
|
||||
super(translate_line(id, line, attempt+1))
|
||||
else:
|
||||
logging.debug(f'Too many requests while translating {line}')
|
||||
translated_lines.append({'id': id, 'line': line})
|
||||
except (RequestError, TranslationNotFound):
|
||||
logging.debug(f'Unable to translate line {line}')
|
||||
translated_lines.append({'id': id, 'line': line})
|
||||
else:
|
||||
translated_partial_srt_list = translated_partial_srt_text.split('\n\n')
|
||||
translated_lines_list += translated_partial_srt_list
|
||||
translated_lines.append({'id': id, 'line': translated_text})
|
||||
finally:
|
||||
show_progress(id=f'translate_progress_{dest_srt_file}',
|
||||
header=f'Translating subtitles lines to {language_from_alpha3(to_lang)}...',
|
||||
name='',
|
||||
value=len(translated_lines),
|
||||
count=lines_list_len)
|
||||
|
||||
logging.debug(f'BAZARR is sending {lines_list_len} blocks to Google Translate')
|
||||
|
||||
pool = ThreadPoolExecutor(max_workers=10)
|
||||
|
||||
translated_lines = []
|
||||
|
||||
for i, line in enumerate(lines_list):
|
||||
pool.submit(translate_line, i, line, 1)
|
||||
|
||||
pool.shutdown(wait=True)
|
||||
|
||||
for i, line in enumerate(translated_lines):
|
||||
lines_list[line['id']] = line['line']
|
||||
|
||||
hide_progress(id=f'translate_progress_{dest_srt_file}')
|
||||
|
||||
logging.debug(f'BAZARR saving translated subtitles to {dest_srt_file}')
|
||||
for i, line in enumerate(subs):
|
||||
try:
|
||||
line.plaintext = translated_lines_list[i]
|
||||
line.plaintext = lines_list[i]
|
||||
except IndexError:
|
||||
logging.error(f'BAZARR is unable to translate malformed subtitles: {source_srt_file}')
|
||||
return False
|
||||
|
@ -89,14 +105,19 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
|
|||
|
||||
message = f"{language_from_alpha2(from_lang)} subtitles translated to {language_from_alpha3(to_lang)}."
|
||||
|
||||
if media_type == 'series':
|
||||
prr = path_mappings.path_replace_reverse
|
||||
else:
|
||||
prr = path_mappings.path_replace_reverse_movie
|
||||
|
||||
result = ProcessSubtitlesResult(message=message,
|
||||
reversed_path=video_path,
|
||||
reversed_path=prr(video_path),
|
||||
downloaded_language_code2=to_lang,
|
||||
downloaded_provider=None,
|
||||
score=None,
|
||||
forced=forced,
|
||||
subtitle_id=None,
|
||||
reversed_subtitles_path=dest_srt_file,
|
||||
reversed_subtitles_path=prr(dest_srt_file),
|
||||
hearing_impaired=hi)
|
||||
|
||||
if media_type == 'series':
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
# coding=utf-8
|
||||
|
||||
import json
|
||||
|
||||
from app.config import settings
|
||||
from app.database import TableShowsRootfolder, TableMoviesRootfolder, database, select
|
||||
from app.database import TableShowsRootfolder, TableMoviesRootfolder, TableLanguagesProfiles, database, select
|
||||
from app.event_handler import event_stream
|
||||
from .path_mappings import path_mappings
|
||||
from sonarr.rootfolder import check_sonarr_rootfolder
|
||||
|
@ -47,4 +49,21 @@ def get_health_issues():
|
|||
health_issues.append({'object': path_mappings.path_replace_movie(item.path),
|
||||
'issue': item.error})
|
||||
|
||||
# get languages profiles duplicate ids issues when there's a cutoff set
|
||||
languages_profiles = database.execute(
|
||||
select(TableLanguagesProfiles.items, TableLanguagesProfiles.name, TableLanguagesProfiles.cutoff)).all()
|
||||
for languages_profile in languages_profiles:
|
||||
if not languages_profile.cutoff:
|
||||
# ignore profiles that don't have a cutoff set
|
||||
continue
|
||||
languages_profile_ids = []
|
||||
for items in json.loads(languages_profile.items):
|
||||
if items['id'] in languages_profile_ids:
|
||||
health_issues.append({'object': languages_profile.name,
|
||||
'issue': 'This languages profile has duplicate IDs. You need to edit this profile'
|
||||
' and make sure to select the proper cutoff if required.'})
|
||||
break
|
||||
else:
|
||||
languages_profile_ids.append(items['id'])
|
||||
|
||||
return health_issues
|
||||
|
|
|
@ -130,7 +130,8 @@ class Episode(Video):
|
|||
"""
|
||||
def __init__(self, name, series, season, episode, title=None, year=None, original_series=True, tvdb_id=None,
|
||||
series_tvdb_id=None, series_imdb_id=None, alternative_series=None, series_anidb_id=None,
|
||||
series_anidb_episode_id=None, **kwargs):
|
||||
series_anidb_episode_id=None, series_anidb_season_episode_offset=None,
|
||||
anilist_id=None, **kwargs):
|
||||
super(Episode, self).__init__(name, **kwargs)
|
||||
|
||||
#: Series of the episode
|
||||
|
@ -163,8 +164,11 @@ class Episode(Video):
|
|||
#: Alternative names of the series
|
||||
self.alternative_series = alternative_series or []
|
||||
|
||||
#: Anime specific information
|
||||
self.series_anidb_episode_id = series_anidb_episode_id
|
||||
self.series_anidb_id = series_anidb_id
|
||||
self.series_anidb_season_episode_offset = series_anidb_season_episode_offset
|
||||
self.anilist_id = anilist_id
|
||||
|
||||
@classmethod
|
||||
def fromguess(cls, name, guess):
|
||||
|
@ -207,10 +211,11 @@ class Movie(Video):
|
|||
:param str title: title of the movie.
|
||||
:param int year: year of the movie.
|
||||
:param list alternative_titles: alternative titles of the movie
|
||||
:param int anilist_id: AniList ID of movie (if Anime)
|
||||
:param \*\*kwargs: additional parameters for the :class:`Video` constructor.
|
||||
|
||||
"""
|
||||
def __init__(self, name, title, year=None, alternative_titles=None, **kwargs):
|
||||
def __init__(self, name, title, year=None, alternative_titles=None, anilist_id=None, **kwargs):
|
||||
super(Movie, self).__init__(name, **kwargs)
|
||||
|
||||
#: Title of the movie
|
||||
|
@ -221,6 +226,9 @@ class Movie(Video):
|
|||
|
||||
#: Alternative titles of the movie
|
||||
self.alternative_titles = alternative_titles or []
|
||||
|
||||
#: AniList ID of the movie
|
||||
self.anilist_id = anilist_id
|
||||
|
||||
@classmethod
|
||||
def fromguess(cls, name, guess):
|
||||
|
|
|
@ -49,7 +49,17 @@ SUBTITLE_EXTENSIONS = ('.srt', '.sub', '.smi', '.txt', '.ssa', '.ass', '.mpl', '
|
|||
|
||||
_POOL_LIFETIME = datetime.timedelta(hours=12)
|
||||
|
||||
HI_REGEX = re.compile(r'[*¶♫♪].{3,}[*¶♫♪]|[\[\(\{].{3,}[\]\)\}](?<!{\\an\d})')
|
||||
HI_REGEX_WITHOUT_PARENTHESIS = re.compile(r'[*¶♫♪].{3,}[*¶♫♪]|[\[\{].{3,}[\]\}](?<!{\\an\d})')
|
||||
HI_REGEX_WITH_PARENTHESIS = re.compile(r'[*¶♫♪].{3,}[*¶♫♪]|[\[\(\{].{3,}[\]\)\}](?<!{\\an\d})')
|
||||
|
||||
HI_REGEX_PARENTHESIS_EXCLUDED_LANGUAGES = ['ara']
|
||||
|
||||
|
||||
def parse_for_hi_regex(subtitle_text, alpha3_language):
|
||||
if alpha3_language in HI_REGEX_PARENTHESIS_EXCLUDED_LANGUAGES:
|
||||
return bool(re.search(HI_REGEX_WITHOUT_PARENTHESIS, subtitle_text))
|
||||
else:
|
||||
return bool(re.search(HI_REGEX_WITH_PARENTHESIS, subtitle_text))
|
||||
|
||||
|
||||
def remove_crap_from_fn(fn):
|
||||
|
@ -1203,7 +1213,10 @@ def save_subtitles(file_path, subtitles, single=False, directory=None, chmod=Non
|
|||
continue
|
||||
|
||||
# create subtitle path
|
||||
if subtitle.text and bool(re.search(HI_REGEX, subtitle.text)):
|
||||
if subtitle.text and parse_for_hi_regex(subtitle_text=subtitle.text,
|
||||
alpha3_language=subtitle.language.alpha3 if
|
||||
(hasattr(subtitle, 'language') and hasattr(subtitle.language, 'alpha3'))
|
||||
else None):
|
||||
subtitle.language.hi = True
|
||||
subtitle_path = get_subtitle_path(file_path, None if single else subtitle.language,
|
||||
forced_tag=subtitle.language.forced,
|
||||
|
|
|
@ -141,7 +141,8 @@ class AnimeToshoProvider(Provider, ProviderSubtitleArchiveMixin):
|
|||
for subtitle_file in subtitle_files:
|
||||
hex_id = format(subtitle_file['id'], '08x')
|
||||
|
||||
lang = Language.fromalpha3b(subtitle_file['info']['lang'])
|
||||
# Animetosho assumes missing languages as english as fallback when not specified.
|
||||
lang = Language.fromalpha3b(subtitle_file['info'].get('lang', 'eng'))
|
||||
|
||||
# For Portuguese and Portuguese Brazilian they both share the same code, the name is the only
|
||||
# identifier AnimeTosho provides. Also, some subtitles does not have name, in this case it could
|
||||
|
|
|
@ -5,7 +5,7 @@ from random import randint
|
|||
|
||||
import pycountry
|
||||
from requests.cookies import RequestsCookieJar
|
||||
from subliminal.exceptions import AuthenticationError
|
||||
from subliminal.exceptions import AuthenticationError, ProviderError
|
||||
from subliminal.providers import ParserBeautifulSoup
|
||||
from subliminal_patch.http import RetryingCFSession
|
||||
from subliminal_patch.pitcher import store_verification
|
||||
|
@ -318,7 +318,7 @@ class AvistazNetworkProviderBase(Provider):
|
|||
release_name = release['Title'].get_text().strip()
|
||||
lang = lookup_lang(subtitle_cols['Language'].get_text().strip())
|
||||
download_link = subtitle_cols['Download'].a['href']
|
||||
uploader_name = subtitle_cols['Uploader'].get_text().strip()
|
||||
uploader_name = subtitle_cols['Uploader'].get_text().strip() if 'Uploader' in subtitle_cols else None
|
||||
|
||||
if lang not in languages:
|
||||
continue
|
||||
|
@ -354,7 +354,10 @@ class AvistazNetworkProviderBase(Provider):
|
|||
|
||||
def _parse_release_table(self, html):
|
||||
release_data_table = (ParserBeautifulSoup(html, ['html.parser'])
|
||||
.select_one('#content-area > div:nth-child(4) > div.table-responsive > table > tbody'))
|
||||
.select_one('#content-area > div.block > div.table-responsive > table > tbody'))
|
||||
|
||||
if release_data_table is None:
|
||||
raise ProviderError('Unexpected HTML page layout - no release data table found')
|
||||
|
||||
rows = {}
|
||||
for tr in release_data_table.find_all('tr', recursive=False):
|
||||
|
|
|
@ -112,7 +112,11 @@ class EmbeddedSubtitlesProvider(Provider):
|
|||
# Default is True
|
||||
container.FFMPEG_STATS = False
|
||||
|
||||
tags.LANGUAGE_FALLBACK = self._fallback_lang if self._unknown_as_fallback and self._fallback_lang else None
|
||||
tags.LANGUAGE_FALLBACK = (
|
||||
self._fallback_lang
|
||||
if self._unknown_as_fallback and self._fallback_lang
|
||||
else None
|
||||
)
|
||||
logger.debug("Language fallback set: %s", tags.LANGUAGE_FALLBACK)
|
||||
|
||||
def initialize(self):
|
||||
|
@ -194,7 +198,7 @@ class EmbeddedSubtitlesProvider(Provider):
|
|||
def download_subtitle(self, subtitle: EmbeddedSubtitle):
|
||||
try:
|
||||
path = self._get_subtitle_path(subtitle)
|
||||
except KeyError: # TODO: add MustGetBlacklisted support
|
||||
except KeyError: # TODO: add MustGetBlacklisted support
|
||||
logger.error("Couldn't get subtitle path")
|
||||
return None
|
||||
|
||||
|
@ -229,6 +233,7 @@ class EmbeddedSubtitlesProvider(Provider):
|
|||
timeout=self._timeout,
|
||||
fallback_to_convert=True,
|
||||
basename_callback=_basename_callback,
|
||||
progress_callback=lambda d: logger.debug("Progress: %s", d),
|
||||
)
|
||||
# Add the extracted paths to the containter path key
|
||||
self._cached_paths[container.path] = extracted
|
||||
|
|
|
@ -96,7 +96,12 @@ class HDBitsProvider(Provider):
|
|||
"https://hdbits.org/api/torrents", json={**self._def_params, **lookup}
|
||||
)
|
||||
response.raise_for_status()
|
||||
ids = [item["id"] for item in response.json()["data"]]
|
||||
|
||||
try:
|
||||
ids = [item["id"] for item in response.json()["data"]]
|
||||
except KeyError:
|
||||
logger.debug("No data found")
|
||||
return []
|
||||
|
||||
subtitles = []
|
||||
for torrent_id in ids:
|
||||
|
|
419
custom_libs/subliminal_patch/providers/jimaku.py
Normal file
419
custom_libs/subliminal_patch/providers/jimaku.py
Normal file
|
@ -0,0 +1,419 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
|
||||
from requests import Session
|
||||
from subliminal import region, __short_version__
|
||||
from subliminal.cache import REFINER_EXPIRATION_TIME
|
||||
from subliminal.exceptions import ConfigurationError, AuthenticationError, ServiceUnavailable
|
||||
from subliminal.utils import sanitize
|
||||
from subliminal.video import Episode, Movie
|
||||
from subliminal_patch.providers import Provider
|
||||
from subliminal_patch.subtitle import Subtitle
|
||||
from subliminal_patch.exceptions import APIThrottled
|
||||
from subliminal_patch.providers.utils import get_subtitle_from_archive, get_archive_from_bytes
|
||||
from urllib.parse import urlencode, urljoin
|
||||
from guessit import guessit
|
||||
from subzero.language import Language, FULL_LANGUAGE_LIST
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Unhandled formats, such files will always get filtered out
|
||||
unhandled_archive_formats = (".7z",)
|
||||
accepted_archive_formats = (".zip", ".rar")
|
||||
|
||||
class JimakuSubtitle(Subtitle):
|
||||
'''Jimaku Subtitle.'''
|
||||
provider_name = 'jimaku'
|
||||
|
||||
hash_verifiable = False
|
||||
|
||||
def __init__(self, language, video, download_url, filename):
|
||||
super(JimakuSubtitle, self).__init__(language, page_link=download_url)
|
||||
|
||||
self.video = video
|
||||
self.download_url = download_url
|
||||
self.filename = filename
|
||||
self.release_info = filename
|
||||
self.is_archive = filename.endswith(accepted_archive_formats)
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return self.download_url
|
||||
|
||||
def get_matches(self, video):
|
||||
matches = set()
|
||||
|
||||
# Episode/Movie specific matches
|
||||
if isinstance(video, Episode):
|
||||
if sanitize(video.series) and sanitize(self.video.series) in (
|
||||
sanitize(name) for name in [video.series] + video.alternative_series):
|
||||
matches.add('series')
|
||||
|
||||
if video.season and self.video.season is None or video.season and video.season == self.video.season:
|
||||
matches.add('season')
|
||||
elif isinstance(video, Movie):
|
||||
if sanitize(video.title) and sanitize(self.video.title) in (
|
||||
sanitize(name) for name in [video.title] + video.alternative_titles):
|
||||
matches.add('title')
|
||||
|
||||
# General matches
|
||||
if video.year and video.year == self.video.year:
|
||||
matches.add('year')
|
||||
|
||||
video_type = 'movie' if isinstance(video, Movie) else 'episode'
|
||||
matches.add(video_type)
|
||||
|
||||
guess = guessit(self.filename, {'type': video_type})
|
||||
for g in guess:
|
||||
if g[0] == "release_group" or "source":
|
||||
if video.release_group == g[1]:
|
||||
matches.add('release_group')
|
||||
break
|
||||
|
||||
# Prioritize .srt by repurposing the audio_codec match
|
||||
if self.filename.endswith(".srt"):
|
||||
matches.add('audio_codec')
|
||||
|
||||
return matches
|
||||
|
||||
class JimakuProvider(Provider):
|
||||
'''Jimaku Provider.'''
|
||||
video_types = (Episode, Movie)
|
||||
|
||||
api_url = 'https://jimaku.cc/api'
|
||||
api_ratelimit_max_delay_seconds = 5
|
||||
api_ratelimit_backoff_limit = 3
|
||||
|
||||
corrupted_file_size_threshold = 500
|
||||
|
||||
languages = {Language.fromietf("ja")}
|
||||
|
||||
def __init__(self, enable_name_search_fallback, enable_archives_download, enable_ai_subs, api_key):
|
||||
if api_key:
|
||||
self.api_key = api_key
|
||||
else:
|
||||
raise ConfigurationError('Missing api_key.')
|
||||
|
||||
self.enable_name_search_fallback = enable_name_search_fallback
|
||||
self.download_archives = enable_archives_download
|
||||
self.enable_ai_subs = enable_ai_subs
|
||||
self.session = None
|
||||
|
||||
def initialize(self):
|
||||
self.session = Session()
|
||||
self.session.headers['Content-Type'] = 'application/json'
|
||||
self.session.headers['Authorization'] = self.api_key
|
||||
self.session.headers['User-Agent'] = os.environ.get("SZ_USER_AGENT")
|
||||
|
||||
def terminate(self):
|
||||
self.session.close()
|
||||
|
||||
def _query(self, video):
|
||||
if isinstance(video, Movie):
|
||||
media_name = video.title.lower()
|
||||
elif isinstance(video, Episode):
|
||||
media_name = video.series.lower()
|
||||
|
||||
# With entries that have a season larger than 1, Jimaku appends the corresponding season number to the name.
|
||||
# We'll reassemble media_name here to account for cases where we can only search by name alone.
|
||||
season_addendum = str(video.season) if video.season > 1 else None
|
||||
media_name = f"{media_name} {season_addendum}" if season_addendum else media_name
|
||||
|
||||
# Search for entry
|
||||
searching_for_entry_attempts = 0
|
||||
additional_url_params = {}
|
||||
while searching_for_entry_attempts < 2:
|
||||
searching_for_entry_attempts += 1
|
||||
url = self._assemble_jimaku_search_url(video, media_name, additional_url_params)
|
||||
if not url:
|
||||
return None
|
||||
|
||||
searching_for_entry = "query" in url
|
||||
data = self._search_for_entry(url)
|
||||
|
||||
if not data:
|
||||
if searching_for_entry and searching_for_entry_attempts < 2:
|
||||
logger.info("Maybe this is live action media? Will retry search without anime parameter...")
|
||||
additional_url_params = {'anime': "false"}
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
break
|
||||
|
||||
# We only go for the first entry
|
||||
entry = data[0]
|
||||
|
||||
entry_id = entry.get('id')
|
||||
anilist_id = entry.get('anilist_id', None)
|
||||
entry_name = entry.get('name')
|
||||
is_movie = entry.get('flags', {}).get('movie', False)
|
||||
|
||||
if isinstance(video, Episode) and is_movie:
|
||||
logger.warn("Bazarr thinks this is a series, but Jimaku says this is a movie! May not be able to match subtitles...")
|
||||
|
||||
logger.info(f"Matched entry: ID: '{entry_id}', anilist_id: '{anilist_id}', name: '{entry_name}', english_name: '{entry.get('english_name')}', movie: {is_movie}")
|
||||
if entry.get("flags").get("unverified"):
|
||||
logger.warning(f"This entry '{entry_id}' is unverified, subtitles might be incomplete or have quality issues!")
|
||||
|
||||
# Get a list of subtitles for entry
|
||||
episode_number = video.episode if "episode" in dir(video) else None
|
||||
url_params = {'episode': episode_number} if isinstance(video, Episode) and not is_movie else {}
|
||||
only_look_for_archives = False
|
||||
|
||||
has_offset = isinstance(video, Episode) and video.series_anidb_season_episode_offset is not None
|
||||
|
||||
retry_count = 0
|
||||
adjusted_ep_num = None
|
||||
while retry_count <= 1:
|
||||
# Account for positive episode offset first
|
||||
if isinstance(video, Episode) and not is_movie and retry_count < 1:
|
||||
if video.season > 1 and has_offset:
|
||||
offset_value = video.series_anidb_season_episode_offset
|
||||
offset_value = offset_value if offset_value > 0 else -offset_value
|
||||
|
||||
if episode_number < offset_value:
|
||||
adjusted_ep_num = episode_number + offset_value
|
||||
logger.warning(f"Will try using adjusted episode number {adjusted_ep_num} first")
|
||||
url_params = {'episode': adjusted_ep_num}
|
||||
|
||||
url = f"entries/{entry_id}/files"
|
||||
data = self._search_for_subtitles(url, url_params)
|
||||
|
||||
if not data:
|
||||
if isinstance(video, Episode) and not is_movie and has_offset and retry_count < 1:
|
||||
logger.warning(f"Found no subtitles for adjusted episode number, but will retry with normal episode number {episode_number}")
|
||||
url_params = {'episode': episode_number}
|
||||
elif isinstance(video, Episode) and not is_movie and retry_count < 1:
|
||||
logger.warning(f"Found no subtitles for episode number {episode_number}, but will retry without 'episode' parameter")
|
||||
url_params = {}
|
||||
only_look_for_archives = True
|
||||
else:
|
||||
return None
|
||||
|
||||
retry_count += 1
|
||||
else:
|
||||
if adjusted_ep_num:
|
||||
video.episode = adjusted_ep_num
|
||||
logger.debug(f"This videos episode attribute has been updated to: {video.episode}")
|
||||
break
|
||||
|
||||
# Filter subtitles
|
||||
list_of_subtitles = []
|
||||
|
||||
data = [item for item in data if not item['name'].endswith(unhandled_archive_formats)]
|
||||
|
||||
# Detect only archives being uploaded
|
||||
archive_entries = [item for item in data if item['name'].endswith(accepted_archive_formats)]
|
||||
subtitle_entries = [item for item in data if not item['name'].endswith(accepted_archive_formats)]
|
||||
has_only_archives = len(archive_entries) > 0 and len(subtitle_entries) == 0
|
||||
if has_only_archives:
|
||||
logger.warning("Have only found archived subtitles")
|
||||
|
||||
elif only_look_for_archives:
|
||||
data = [item for item in data if item['name'].endswith(accepted_archive_formats)]
|
||||
|
||||
for item in data:
|
||||
filename = item.get('name')
|
||||
download_url = item.get('url')
|
||||
is_archive = filename.endswith(accepted_archive_formats)
|
||||
|
||||
# Archives will still be considered if they're the only files available, as is mostly the case for movies.
|
||||
if is_archive and not has_only_archives and not self.download_archives:
|
||||
logger.warning(f"Skipping archive '{filename}' because normal subtitles are available instead")
|
||||
continue
|
||||
|
||||
if not self.enable_ai_subs:
|
||||
p = re.compile(r'[\[\(]?(whisperai)[\]\)]?|[\[\(]whisper[\]\)]', re.IGNORECASE)
|
||||
if p.search(filename):
|
||||
logger.warning(f"Skipping subtitle '{filename}' as it's suspected of being AI generated")
|
||||
continue
|
||||
|
||||
sub_languages = self._try_determine_subtitle_languages(filename)
|
||||
if len(sub_languages) > 1:
|
||||
logger.warning(f"Skipping subtitle '{filename}' as it's suspected of containing multiple languages")
|
||||
continue
|
||||
|
||||
# Check if file is obviously corrupt. If no size is returned, assume OK
|
||||
filesize = item.get('size', self.corrupted_file_size_threshold)
|
||||
if filesize < self.corrupted_file_size_threshold:
|
||||
logger.warning(f"Skipping possibly corrupt file '{filename}': Filesize is just {filesize} bytes")
|
||||
continue
|
||||
|
||||
if not filename.endswith(unhandled_archive_formats):
|
||||
lang = sub_languages[0] if len(sub_languages) > 1 else Language("jpn")
|
||||
list_of_subtitles.append(JimakuSubtitle(lang, video, download_url, filename))
|
||||
else:
|
||||
logger.debug(f"Skipping archive '{filename}' as it's not a supported format")
|
||||
|
||||
return list_of_subtitles
|
||||
|
||||
def list_subtitles(self, video, languages=None):
|
||||
subtitles = self._query(video)
|
||||
if not subtitles:
|
||||
return []
|
||||
|
||||
return [s for s in subtitles]
|
||||
|
||||
def download_subtitle(self, subtitle: JimakuSubtitle):
|
||||
target_url = subtitle.download_url
|
||||
response = self.session.get(target_url, timeout=10)
|
||||
response.raise_for_status()
|
||||
|
||||
if subtitle.is_archive:
|
||||
archive = get_archive_from_bytes(response.content)
|
||||
if archive:
|
||||
if isinstance(subtitle.video, Episode):
|
||||
subtitle.content = get_subtitle_from_archive(
|
||||
archive,
|
||||
episode=subtitle.video.episode,
|
||||
episode_title=subtitle.video.title
|
||||
)
|
||||
else:
|
||||
subtitle.content = get_subtitle_from_archive(
|
||||
archive
|
||||
)
|
||||
else:
|
||||
logger.warning("Archive seems to not be an archive! File possibly corrupt?")
|
||||
return None
|
||||
else:
|
||||
subtitle.content = response.content
|
||||
|
||||
def _do_jimaku_request(self, url_path, url_params={}):
|
||||
url = urljoin(f"{self.api_url}/{url_path}", '?' + urlencode(url_params))
|
||||
|
||||
retry_count = 0
|
||||
while retry_count < self.api_ratelimit_backoff_limit:
|
||||
response = self.session.get(url, timeout=10)
|
||||
|
||||
if response.status_code == 429:
|
||||
reset_time = 5
|
||||
retry_count + 1
|
||||
|
||||
logger.warning(f"Jimaku ratelimit hit, waiting for '{reset_time}' seconds ({retry_count}/{self.api_ratelimit_backoff_limit} tries)")
|
||||
time.sleep(reset_time)
|
||||
continue
|
||||
elif response.status_code == 401:
|
||||
raise AuthenticationError("Unauthorized. API key possibly invalid")
|
||||
else:
|
||||
response.raise_for_status()
|
||||
|
||||
data = response.json()
|
||||
logger.debug(f"Length of response on {url}: {len(data)}")
|
||||
if len(data) == 0:
|
||||
logger.error(f"Jimaku returned no items for our our query: {url}")
|
||||
return None
|
||||
elif 'error' in data:
|
||||
raise ServiceUnavailable(f"Jimaku returned an error: '{data.get('error')}', Code: '{data.get('code')}'")
|
||||
else:
|
||||
return data
|
||||
|
||||
raise APIThrottled(f"Jimaku ratelimit max backoff limit of {self.api_ratelimit_backoff_limit} reached, aborting")
|
||||
|
||||
# Wrapper functions to indirectly call _do_jimaku_request with different cache configs
|
||||
@region.cache_on_arguments(expiration_time=REFINER_EXPIRATION_TIME)
|
||||
def _search_for_entry(self, url_path, url_params={}):
|
||||
return self._do_jimaku_request(url_path, url_params)
|
||||
|
||||
@region.cache_on_arguments(expiration_time=timedelta(minutes=1).total_seconds())
|
||||
def _search_for_subtitles(self, url_path, url_params={}):
|
||||
return self._do_jimaku_request(url_path, url_params)
|
||||
|
||||
@staticmethod
|
||||
def _try_determine_subtitle_languages(filename):
|
||||
# This is more like a guess and not a 100% fool-proof way of detecting multi-lang subs:
|
||||
# It assumes that language codes, if present, are in the last metadata group of the subs filename.
|
||||
# If such codes are not present, or we failed to match any at all, then we'll just assume that the sub is purely Japanese.
|
||||
default_language = Language("jpn")
|
||||
|
||||
dot_delimit = filename.split(".")
|
||||
bracket_delimit = re.split(r'[\[\]\(\)]+', filename)
|
||||
|
||||
candidate_list = list()
|
||||
if len(dot_delimit) > 2:
|
||||
candidate_list = dot_delimit[-2]
|
||||
elif len(bracket_delimit) > 2:
|
||||
candidate_list = bracket_delimit[-2]
|
||||
|
||||
candidates = [] if len(candidate_list) == 0 else re.split(r'[,\-\+\& ]+', candidate_list)
|
||||
|
||||
# Discard match group if any candidate...
|
||||
# ...contains any numbers, as the group is likely encoding information
|
||||
if any(re.compile(r'\d').search(string) for string in candidates):
|
||||
return [default_language]
|
||||
# ...is >= 5 chars long, as the group is likely other unrelated metadata
|
||||
if any(len(string) >= 5 for string in candidates):
|
||||
return [default_language]
|
||||
|
||||
languages = list()
|
||||
for candidate in candidates:
|
||||
candidate = candidate.lower()
|
||||
if candidate in ["ass", "srt"]:
|
||||
continue
|
||||
|
||||
# Sometimes, languages are hidden in 4 character blocks, i.e. "JPSC"
|
||||
if len(candidate) == 4:
|
||||
for addendum in [candidate[:2], candidate[2:]]:
|
||||
candidates.append(addendum)
|
||||
continue
|
||||
|
||||
# Sometimes, language codes can have additional info such as 'cc' or 'sdh'. For example: "ja[cc]"
|
||||
if len(dot_delimit) > 2 and any(c in candidate for c in '[]()'):
|
||||
candidate = re.split(r'[\[\]\(\)]+', candidate)[0]
|
||||
|
||||
try:
|
||||
language_squash = {
|
||||
"jp": "ja",
|
||||
"jap": "ja",
|
||||
"chs": "zho",
|
||||
"cht": "zho",
|
||||
"zhi": "zho",
|
||||
"cn": "zho"
|
||||
}
|
||||
|
||||
candidate = language_squash[candidate] if candidate in language_squash else candidate
|
||||
if len(candidate) > 2:
|
||||
language = Language(candidate)
|
||||
else:
|
||||
language = Language.fromietf(candidate)
|
||||
|
||||
if not any(l.alpha3 == language.alpha3 for l in languages):
|
||||
languages.append(language)
|
||||
except:
|
||||
if candidate in FULL_LANGUAGE_LIST:
|
||||
# Create a dummy for the unknown language
|
||||
languages.append(Language("zul"))
|
||||
|
||||
if len(languages) > 1:
|
||||
# Sometimes a metadata group that actually contains info about codecs gets processed as valid languages.
|
||||
# To prevent false positives, we'll check if Japanese language codes are in the processed languages list.
|
||||
# If not, then it's likely that we didn't actually match language codes -> Assume Japanese only subtitle.
|
||||
contains_jpn = any([l for l in languages if l.alpha3 == "jpn"])
|
||||
|
||||
return languages if contains_jpn else [Language("jpn")]
|
||||
else:
|
||||
return [default_language]
|
||||
|
||||
def _assemble_jimaku_search_url(self, video, media_name, additional_params={}):
|
||||
endpoint = "entries/search"
|
||||
anilist_id = video.anilist_id
|
||||
|
||||
params = {}
|
||||
if anilist_id:
|
||||
params = {'anilist_id': anilist_id}
|
||||
else:
|
||||
if self.enable_name_search_fallback or isinstance(video, Movie):
|
||||
params = {'query': media_name}
|
||||
else:
|
||||
logger.error(f"Skipping '{media_name}': Got no AniList ID and fuzzy matching using name is disabled")
|
||||
return None
|
||||
|
||||
if additional_params:
|
||||
params.update(additional_params)
|
||||
|
||||
logger.info(f"Will search for entry based on params: {params}")
|
||||
return urljoin(endpoint, '?' + urlencode(params))
|
264
custom_libs/subliminal_patch/providers/legendasnet.py
Normal file
264
custom_libs/subliminal_patch/providers/legendasnet.py
Normal file
|
@ -0,0 +1,264 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import io
|
||||
import json
|
||||
|
||||
from zipfile import ZipFile, is_zipfile
|
||||
from urllib.parse import urljoin
|
||||
from requests import Session
|
||||
|
||||
from subzero.language import Language
|
||||
from subliminal import Episode, Movie
|
||||
from subliminal.exceptions import ConfigurationError, ProviderError, DownloadLimitExceeded
|
||||
from subliminal_patch.exceptions import APIThrottled
|
||||
from .mixins import ProviderRetryMixin
|
||||
from subliminal_patch.subtitle import Subtitle
|
||||
from subliminal.subtitle import fix_line_ending
|
||||
from subliminal_patch.providers import Provider
|
||||
from subliminal_patch.providers import utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
retry_amount = 3
|
||||
retry_timeout = 5
|
||||
|
||||
|
||||
class LegendasNetSubtitle(Subtitle):
|
||||
provider_name = 'legendasnet'
|
||||
hash_verifiable = False
|
||||
|
||||
def __init__(self, language, forced, page_link, download_link, file_id, release_names, uploader,
|
||||
season=None, episode=None):
|
||||
super().__init__(language)
|
||||
language = Language.rebuild(language, forced=forced)
|
||||
|
||||
self.season = season
|
||||
self.episode = episode
|
||||
self.releases = release_names
|
||||
self.release_info = ', '.join(release_names)
|
||||
self.language = language
|
||||
self.forced = forced
|
||||
self.file_id = file_id
|
||||
self.page_link = page_link
|
||||
self.download_link = download_link
|
||||
self.uploader = uploader
|
||||
self.matches = None
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return self.file_id
|
||||
|
||||
def get_matches(self, video):
|
||||
matches = set()
|
||||
|
||||
# handle movies and series separately
|
||||
if isinstance(video, Episode):
|
||||
# series
|
||||
matches.add('series')
|
||||
# season
|
||||
if video.season == self.season:
|
||||
matches.add('season')
|
||||
# episode
|
||||
if video.episode == self.episode:
|
||||
matches.add('episode')
|
||||
# imdb
|
||||
matches.add('series_imdb_id')
|
||||
else:
|
||||
# title
|
||||
matches.add('title')
|
||||
# imdb
|
||||
matches.add('imdb_id')
|
||||
|
||||
utils.update_matches(matches, video, self.release_info)
|
||||
|
||||
self.matches = matches
|
||||
|
||||
return matches
|
||||
|
||||
|
||||
class LegendasNetProvider(ProviderRetryMixin, Provider):
|
||||
"""Legendas.Net Provider"""
|
||||
server_hostname = 'legendas.net/api'
|
||||
|
||||
languages = {Language('por', 'BR')}
|
||||
video_types = (Episode, Movie)
|
||||
|
||||
def __init__(self, username, password):
|
||||
self.session = Session()
|
||||
self.session.headers = {'User-Agent': os.environ.get("SZ_USER_AGENT", "Sub-Zero/2")}
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.access_token = None
|
||||
self.video = None
|
||||
self._started = None
|
||||
self.login()
|
||||
|
||||
def login(self):
|
||||
headersList = {
|
||||
"Accept": "*/*",
|
||||
"User-Agent": self.session.headers['User-Agent'],
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
payload = json.dumps({
|
||||
"email": self.username,
|
||||
"password": self.password
|
||||
})
|
||||
|
||||
response = self.session.request("POST", self.server_url() + 'login', data=payload, headers=headersList)
|
||||
if response.status_code != 200:
|
||||
raise ConfigurationError('Failed to login and retrieve access token')
|
||||
self.access_token = response.json().get('access_token')
|
||||
if not self.access_token:
|
||||
raise ConfigurationError('Access token not found in login response')
|
||||
self.session.headers.update({'Authorization': f'Bearer {self.access_token}'})
|
||||
|
||||
def initialize(self):
|
||||
self._started = time.time()
|
||||
|
||||
def terminate(self):
|
||||
self.session.close()
|
||||
|
||||
def server_url(self):
|
||||
return f'https://{self.server_hostname}/v1/'
|
||||
|
||||
def query(self, languages, video):
|
||||
self.video = video
|
||||
|
||||
# query the server
|
||||
if isinstance(self.video, Episode):
|
||||
res = self.retry(
|
||||
lambda: self.session.get(self.server_url() + 'search/tv',
|
||||
json={
|
||||
'name': video.series,
|
||||
'page': 1,
|
||||
'per_page': 25,
|
||||
'tv_episode': video.episode,
|
||||
'tv_season': video.season,
|
||||
'imdb_id': video.series_imdb_id
|
||||
},
|
||||
headers={'Content-Type': 'application/json'},
|
||||
timeout=30),
|
||||
amount=retry_amount,
|
||||
retry_timeout=retry_timeout
|
||||
)
|
||||
else:
|
||||
res = self.retry(
|
||||
lambda: self.session.get(self.server_url() + 'search/movie',
|
||||
json={
|
||||
'name': video.title,
|
||||
'page': 1,
|
||||
'per_page': 25,
|
||||
'imdb_id': video.imdb_id
|
||||
},
|
||||
headers={'Content-Type': 'application/json'},
|
||||
timeout=30),
|
||||
amount=retry_amount,
|
||||
retry_timeout=retry_timeout
|
||||
)
|
||||
|
||||
if res.status_code == 404:
|
||||
logger.error(f"Endpoint not found: {res.url}")
|
||||
raise ProviderError("Endpoint not found")
|
||||
elif res.status_code == 429:
|
||||
raise APIThrottled("Too many requests")
|
||||
elif res.status_code == 403:
|
||||
raise ConfigurationError("Invalid access token")
|
||||
elif res.status_code != 200:
|
||||
res.raise_for_status()
|
||||
|
||||
subtitles = []
|
||||
|
||||
result = res.json()
|
||||
|
||||
if ('success' in result and not result['success']) or ('status' in result and not result['status']):
|
||||
logger.debug(result["error"])
|
||||
return []
|
||||
|
||||
if isinstance(self.video, Episode):
|
||||
if len(result['tv_shows']):
|
||||
for item in result['tv_shows']:
|
||||
subtitle = LegendasNetSubtitle(
|
||||
language=Language('por', 'BR'),
|
||||
forced=self._is_forced(item),
|
||||
page_link=f"https://legendas.net/tv_legenda?movie_id={result['tv_shows'][0]['tmdb_id']}&"
|
||||
f"legenda_id={item['id']}",
|
||||
download_link=item['path'],
|
||||
file_id=item['id'],
|
||||
release_names=[item.get('release_name', '')],
|
||||
uploader=item['uploader'],
|
||||
season=item.get('season', ''),
|
||||
episode=item.get('episode', '')
|
||||
)
|
||||
subtitle.get_matches(self.video)
|
||||
if subtitle.language in languages:
|
||||
subtitles.append(subtitle)
|
||||
else:
|
||||
if len(result['movies']):
|
||||
for item in result['movies']:
|
||||
subtitle = LegendasNetSubtitle(
|
||||
language=Language('por', 'BR'),
|
||||
forced=self._is_forced(item),
|
||||
page_link=f"https://legendas.net/legenda?movie_id={result['movies'][0]['tmdb_id']}&"
|
||||
f"legenda_id={item['id']}",
|
||||
download_link=item['path'],
|
||||
file_id=item['id'],
|
||||
release_names=[item.get('release_name', '')],
|
||||
uploader=item['uploader'],
|
||||
season=None,
|
||||
episode=None
|
||||
)
|
||||
subtitle.get_matches(self.video)
|
||||
if subtitle.language in languages:
|
||||
subtitles.append(subtitle)
|
||||
|
||||
return subtitles
|
||||
|
||||
@staticmethod
|
||||
def _is_forced(item):
|
||||
forced_tags = ['forced', 'foreign']
|
||||
for tag in forced_tags:
|
||||
if tag in item.get('comment', '').lower():
|
||||
return True
|
||||
|
||||
# nothing match so we consider it as normal subtitles
|
||||
return False
|
||||
|
||||
def list_subtitles(self, video, languages):
|
||||
return self.query(languages, video)
|
||||
|
||||
def download_subtitle(self, subtitle):
|
||||
logger.debug('Downloading subtitle %r', subtitle)
|
||||
download_link = urljoin("https://legendas.net", subtitle.download_link)
|
||||
|
||||
r = self.retry(
|
||||
lambda: self.session.get(download_link, timeout=30),
|
||||
amount=retry_amount,
|
||||
retry_timeout=retry_timeout
|
||||
)
|
||||
|
||||
if r.status_code == 429:
|
||||
raise DownloadLimitExceeded("Daily download limit exceeded")
|
||||
elif r.status_code == 403:
|
||||
raise ConfigurationError("Invalid access token")
|
||||
elif r.status_code != 200:
|
||||
r.raise_for_status()
|
||||
|
||||
if not r:
|
||||
logger.error(f'Could not download subtitle from {download_link}')
|
||||
subtitle.content = None
|
||||
return
|
||||
else:
|
||||
archive_stream = io.BytesIO(r.content)
|
||||
if is_zipfile(archive_stream):
|
||||
archive = ZipFile(archive_stream)
|
||||
for name in archive.namelist():
|
||||
subtitle_content = archive.read(name)
|
||||
subtitle.content = fix_line_ending(subtitle_content)
|
||||
return
|
||||
else:
|
||||
subtitle_content = r.content
|
||||
subtitle.content = fix_line_ending(subtitle_content)
|
||||
return
|
|
@ -209,7 +209,8 @@ class PodnapisiProvider(_PodnapisiProvider, ProviderSubtitleArchiveMixin):
|
|||
break
|
||||
|
||||
# exit if no results
|
||||
if not xml.find('pagination/results') or not int(xml.find('pagination/results').text):
|
||||
if (not xml.find('pagination/results') or not xml.find('pagination/results').text or not
|
||||
int(xml.find('pagination/results').text)):
|
||||
logger.debug('No subtitles found')
|
||||
break
|
||||
|
||||
|
|
|
@ -277,7 +277,11 @@ class SoustitreseuProvider(Provider, ProviderSubtitleArchiveMixin):
|
|||
release = name[:-4].lower().rstrip('tag').rstrip('en').rstrip('fr')
|
||||
_guess = guessit(release)
|
||||
if isinstance(video, Episode):
|
||||
if video.episode != _guess['episode'] or video.season != _guess['season']:
|
||||
try:
|
||||
if video.episode != _guess['episode'] or video.season != _guess['season']:
|
||||
continue
|
||||
except KeyError:
|
||||
# episode or season are missing from guessit result
|
||||
continue
|
||||
|
||||
matches = set()
|
||||
|
|
|
@ -172,7 +172,7 @@ class SubdivxSubtitlesProvider(Provider):
|
|||
|
||||
logger.debug("Query: %s", query)
|
||||
|
||||
response = self.session.post(search_link, data=payload)
|
||||
response = self.session.post(search_link, data=payload, timeout=30)
|
||||
|
||||
if response.status_code == 500:
|
||||
logger.debug(
|
||||
|
|
|
@ -17,8 +17,7 @@ from .mixins import ProviderRetryMixin
|
|||
from subliminal_patch.subtitle import Subtitle
|
||||
from subliminal.subtitle import fix_line_ending
|
||||
from subliminal_patch.providers import Provider
|
||||
from subliminal_patch.subtitle import guess_matches
|
||||
from guessit import guessit
|
||||
from subliminal_patch.providers import utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -27,8 +26,6 @@ retry_timeout = 5
|
|||
|
||||
language_converters.register('subdl = subliminal_patch.converters.subdl:SubdlConverter')
|
||||
|
||||
supported_languages = list(language_converters['subdl'].to_subdl.keys())
|
||||
|
||||
|
||||
class SubdlSubtitle(Subtitle):
|
||||
provider_name = 'subdl'
|
||||
|
@ -59,7 +56,6 @@ class SubdlSubtitle(Subtitle):
|
|||
|
||||
def get_matches(self, video):
|
||||
matches = set()
|
||||
type_ = "movie" if isinstance(video, Movie) else "episode"
|
||||
|
||||
# handle movies and series separately
|
||||
if isinstance(video, Episode):
|
||||
|
@ -79,8 +75,7 @@ class SubdlSubtitle(Subtitle):
|
|||
# imdb
|
||||
matches.add('imdb_id')
|
||||
|
||||
# other properties
|
||||
matches |= guess_matches(video, guessit(self.release_info, {"type": type_}))
|
||||
utils.update_matches(matches, video, self.release_info)
|
||||
|
||||
self.matches = matches
|
||||
|
||||
|
@ -91,7 +86,7 @@ class SubdlProvider(ProviderRetryMixin, Provider):
|
|||
"""Subdl Provider"""
|
||||
server_hostname = 'api.subdl.com'
|
||||
|
||||
languages = {Language(*lang) for lang in supported_languages}
|
||||
languages = {Language(*lang) for lang in list(language_converters['subdl'].to_subdl.keys())}
|
||||
languages.update(set(Language.rebuild(lang, forced=True) for lang in languages))
|
||||
languages.update(set(Language.rebuild(l, hi=True) for l in languages))
|
||||
|
||||
|
@ -130,7 +125,8 @@ class SubdlProvider(ProviderRetryMixin, Provider):
|
|||
imdb_id = self.video.imdb_id
|
||||
|
||||
# be sure to remove duplicates using list(set())
|
||||
langs_list = sorted(list(set([lang.basename.upper() for lang in languages])))
|
||||
langs_list = sorted(list(set([language_converters['subdl'].convert(lang.alpha3, lang.country, lang.script) for
|
||||
lang in languages])))
|
||||
|
||||
langs = ','.join(langs_list)
|
||||
logger.debug(f'Searching for those languages: {langs}')
|
||||
|
@ -148,7 +144,9 @@ class SubdlProvider(ProviderRetryMixin, Provider):
|
|||
('subs_per_page', 30),
|
||||
('type', 'tv'),
|
||||
('comment', 1),
|
||||
('releases', 1)),
|
||||
('releases', 1),
|
||||
('bazarr', 1)), # this argument filter incompatible image based or
|
||||
# txt subtitles
|
||||
timeout=30),
|
||||
amount=retry_amount,
|
||||
retry_timeout=retry_timeout
|
||||
|
@ -163,7 +161,9 @@ class SubdlProvider(ProviderRetryMixin, Provider):
|
|||
('subs_per_page', 30),
|
||||
('type', 'movie'),
|
||||
('comment', 1),
|
||||
('releases', 1)),
|
||||
('releases', 1),
|
||||
('bazarr', 1)), # this argument filter incompatible image based or
|
||||
# txt subtitles
|
||||
timeout=30),
|
||||
amount=retry_amount,
|
||||
retry_timeout=retry_timeout
|
||||
|
@ -181,7 +181,8 @@ class SubdlProvider(ProviderRetryMixin, Provider):
|
|||
result = res.json()
|
||||
|
||||
if ('success' in result and not result['success']) or ('status' in result and not result['status']):
|
||||
raise ProviderError(result['error'])
|
||||
logger.debug(result["error"])
|
||||
return []
|
||||
|
||||
logger.debug(f"Query returned {len(result['subtitles'])} subtitles")
|
||||
|
||||
|
|
|
@ -132,9 +132,9 @@ _DEFAULT_HEADERS = {
|
|||
class Subf2mProvider(Provider):
|
||||
provider_name = "subf2m"
|
||||
|
||||
_movie_title_regex = re.compile(r"^(.+?)( \((\d{4})\))?$")
|
||||
_movie_title_regex = re.compile(r"^(.+?)(\s+\((\d{4})\))?$")
|
||||
_tv_show_title_regex = re.compile(
|
||||
r"^(.+?) [-\(]\s?(.*?) (season|series)\)?( \((\d{4})\))?$"
|
||||
r"^(.+?)\s+[-\(]\s?(.*?)\s+(season|series)\)?(\s+\((\d{4})\))?$"
|
||||
)
|
||||
_tv_show_title_alt_regex = re.compile(r"(.+)\s(\d{1,2})(?:\s|$)")
|
||||
_supported_languages = {}
|
||||
|
@ -220,7 +220,7 @@ class Subf2mProvider(Provider):
|
|||
|
||||
results = []
|
||||
for result in self._gen_results(title):
|
||||
text = result.text.lower()
|
||||
text = result.text.strip().lower()
|
||||
match = self._movie_title_regex.match(text)
|
||||
if not match:
|
||||
continue
|
||||
|
@ -254,7 +254,7 @@ class Subf2mProvider(Provider):
|
|||
|
||||
results = []
|
||||
for result in self._gen_results(title):
|
||||
text = result.text.lower()
|
||||
text = result.text.strip().lower()
|
||||
|
||||
match = self._tv_show_title_regex.match(text)
|
||||
if not match:
|
||||
|
|
|
@ -455,7 +455,13 @@ class SuperSubtitlesProvider(Provider, ProviderSubtitleArchiveMixin):
|
|||
|
||||
soup = ParserBeautifulSoup(r, ['lxml'])
|
||||
tables = soup.find_all("table")
|
||||
tables = tables[0].find_all("tr")
|
||||
|
||||
try:
|
||||
tables = tables[0].find_all("tr")
|
||||
except IndexError:
|
||||
logger.debug("No tables found for %s", url)
|
||||
return []
|
||||
|
||||
i = 0
|
||||
|
||||
for table in tables:
|
||||
|
|
|
@ -65,7 +65,7 @@ def _get_matching_sub(
|
|||
guess = guessit(sub_name, options=guess_options)
|
||||
|
||||
matched_episode_num = guess.get("episode")
|
||||
if matched_episode_num:
|
||||
if not matched_episode_num:
|
||||
logger.debug("No episode number found in file: %s", sub_name)
|
||||
|
||||
if episode_title is not None:
|
||||
|
@ -86,11 +86,13 @@ def _get_matching_sub(
|
|||
return None
|
||||
|
||||
|
||||
def _analize_sub_name(sub_name: str, title_):
|
||||
titles = re.split(r"[.-]", os.path.splitext(sub_name)[0])
|
||||
def _analize_sub_name(sub_name: str, title_: str):
|
||||
titles = re.split(r"[\s_\.\+]?[.-][\s_\.\+]?", os.path.splitext(sub_name)[0])
|
||||
|
||||
for title in titles:
|
||||
title = title.strip()
|
||||
ratio = SequenceMatcher(None, title, title_).ratio()
|
||||
ratio = SequenceMatcher(None, title.lower(), title_.lower()).ratio()
|
||||
|
||||
if ratio > 0.85:
|
||||
logger.debug(
|
||||
"Episode title matched: '%s' -> '%s' [%s]", title, sub_name, ratio
|
||||
|
|
|
@ -143,7 +143,7 @@ def encode_audio_stream(path, ffmpeg_path, audio_stream_language=None):
|
|||
logger.debug(f"Whisper will only use the {audio_stream_language} audio stream for {path}")
|
||||
inp = inp[f'a:m:language:{audio_stream_language}']
|
||||
|
||||
out, _ = inp.output("-", format="s16le", acodec="pcm_s16le", ac=1, ar=16000) \
|
||||
out, _ = inp.output("-", format="s16le", acodec="pcm_s16le", ac=1, ar=16000, af="aresample=async=1") \
|
||||
.run(cmd=[ffmpeg_path, "-nostdin"], capture_stdout=True, capture_stderr=True)
|
||||
|
||||
except ffmpeg.Error as e:
|
||||
|
|
|
@ -316,7 +316,7 @@ class ZimukuProvider(Provider):
|
|||
r = self.yunsuo_bypass(download_link, headers={'Referer': subtitle.page_link}, timeout=30)
|
||||
r.raise_for_status()
|
||||
try:
|
||||
filename = r.headers["Content-Disposition"]
|
||||
filename = r.headers["Content-Disposition"].lower()
|
||||
except KeyError:
|
||||
logger.debug("Unable to parse subtitles filename. Dropping this subtitles.")
|
||||
return
|
||||
|
|
|
@ -12,6 +12,7 @@ import chardet
|
|||
import pysrt
|
||||
import pysubs2
|
||||
from bs4 import UnicodeDammit
|
||||
from copy import deepcopy
|
||||
from pysubs2 import SSAStyle
|
||||
from pysubs2.formats.subrip import parse_tags, MAX_REPRESENTABLE_TIME
|
||||
from pysubs2.time import ms_to_times
|
||||
|
@ -65,6 +66,11 @@ class Subtitle(Subtitle_):
|
|||
# format = "srt" # default format is srt
|
||||
|
||||
def __init__(self, language, hearing_impaired=False, page_link=None, encoding=None, mods=None, original_format=False):
|
||||
# language needs to be cloned because it is actually a reference to the provider language object
|
||||
# if a new copy is not created then all subsequent subtitles for this provider will incorrectly be modified
|
||||
# at least until Bazarr is restarted or the provider language object is recreated somehow
|
||||
language = deepcopy(language)
|
||||
|
||||
# set subtitle language to hi if it's hearing_impaired
|
||||
if hearing_impaired:
|
||||
language = Language.rebuild(language, hi=True)
|
||||
|
@ -275,7 +281,7 @@ class Subtitle(Subtitle_):
|
|||
return encoding
|
||||
|
||||
def is_valid(self):
|
||||
"""Check if a :attr:`text` is a valid SubRip format. Note that orignal format will pypass the checking
|
||||
"""Check if a :attr:`text` is a valid SubRip format. Note that original format will bypass the checking
|
||||
|
||||
:return: whether or not the subtitle is valid.
|
||||
:rtype: bool
|
||||
|
|
|
@ -35,6 +35,8 @@ class Video(Video_):
|
|||
info_url=None,
|
||||
series_anidb_id=None,
|
||||
series_anidb_episode_id=None,
|
||||
series_anidb_season_episode_offset=None,
|
||||
anilist_id=None,
|
||||
**kwargs
|
||||
):
|
||||
super(Video, self).__init__(
|
||||
|
@ -61,3 +63,5 @@ class Video(Video_):
|
|||
self.info_url = info_url
|
||||
self.series_anidb_series_id = series_anidb_id,
|
||||
self.series_anidb_episode_id = series_anidb_episode_id,
|
||||
self.series_anidb_season_episode_offset = series_anidb_season_episode_offset,
|
||||
self.anilist_id = anilist_id,
|
||||
|
|
|
@ -162,14 +162,4 @@ class Language(Language_):
|
|||
return Language(*Language_.fromalpha3b(s).__getstate__())
|
||||
|
||||
|
||||
IETF_MATCH = ".+\.([^-.]+)(?:-[A-Za-z]+)?$"
|
||||
ENDSWITH_LANGUAGECODE_RE = re.compile("\.([^-.]{2,3})(?:-[A-Za-z]{2,})?$")
|
||||
|
||||
|
||||
def match_ietf_language(s, ietf=False):
|
||||
language_match = re.match(".+\.([^\.]+)$" if not ietf
|
||||
else IETF_MATCH, s)
|
||||
if language_match and len(language_match.groups()) == 1:
|
||||
language = language_match.groups()[0]
|
||||
return language
|
||||
return s
|
||||
ENDSWITH_LANGUAGECODE_RE = re.compile(r"\.([^-.]{2,3})(?:-[A-Za-z]{2,})?$")
|
||||
|
|
|
@ -15,12 +15,11 @@
|
|||
"@typescript-eslint/no-unused-vars": "warn"
|
||||
},
|
||||
"extends": [
|
||||
"react-app",
|
||||
"plugin:react-hooks/recommended",
|
||||
"eslint:recommended",
|
||||
"plugin:react-hooks/recommended",
|
||||
"plugin:@typescript-eslint/recommended"
|
||||
],
|
||||
"plugins": ["testing-library", "simple-import-sort"],
|
||||
"plugins": ["testing-library", "simple-import-sort", "react-refresh"],
|
||||
"overrides": [
|
||||
{
|
||||
"files": [
|
||||
|
@ -63,6 +62,7 @@
|
|||
}
|
||||
}
|
||||
],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"sourceType": "module",
|
||||
"ecmaVersion": "latest"
|
||||
|
|
1951
frontend/package-lock.json
generated
1951
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -13,12 +13,12 @@
|
|||
},
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@mantine/core": "^7.11.0",
|
||||
"@mantine/dropzone": "^7.11.0",
|
||||
"@mantine/form": "^7.11.0",
|
||||
"@mantine/hooks": "^7.11.0",
|
||||
"@mantine/modals": "^7.11.0",
|
||||
"@mantine/notifications": "^7.11.0",
|
||||
"@mantine/core": "^7.12.2",
|
||||
"@mantine/dropzone": "^7.12.2",
|
||||
"@mantine/form": "^7.12.2",
|
||||
"@mantine/hooks": "^7.12.2",
|
||||
"@mantine/modals": "^7.12.2",
|
||||
"@mantine/notifications": "^7.12.2",
|
||||
"@tanstack/react-query": "^5.40.1",
|
||||
"@tanstack/react-table": "^8.19.2",
|
||||
"axios": "^1.6.8",
|
||||
|
@ -30,10 +30,10 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"@fontsource/roboto": "^5.0.12",
|
||||
"@fortawesome/fontawesome-svg-core": "^6.5.2",
|
||||
"@fortawesome/free-brands-svg-icons": "^6.5.2",
|
||||
"@fortawesome/free-regular-svg-icons": "^6.5.2",
|
||||
"@fortawesome/free-solid-svg-icons": "^6.5.2",
|
||||
"@fortawesome/fontawesome-svg-core": "^6.6.0",
|
||||
"@fortawesome/free-brands-svg-icons": "^6.6.0",
|
||||
"@fortawesome/free-regular-svg-icons": "^6.6.0",
|
||||
"@fortawesome/free-solid-svg-icons": "^6.6.0",
|
||||
"@fortawesome/react-fontawesome": "^0.2.2",
|
||||
"@tanstack/react-query-devtools": "^5.40.1",
|
||||
"@testing-library/jest-dom": "^6.4.2",
|
||||
|
@ -42,16 +42,18 @@
|
|||
"@types/jest": "^29.5.12",
|
||||
"@types/lodash": "^4.17.1",
|
||||
"@types/node": "^20.12.6",
|
||||
"@types/react": "^18.3.3",
|
||||
"@types/react": "^18.3.5",
|
||||
"@types/react-dom": "^18.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "^7.16.0",
|
||||
"@typescript-eslint/parser": "^7.16.0",
|
||||
"@vite-pwa/assets-generator": "^0.2.4",
|
||||
"@vitejs/plugin-react": "^4.2.1",
|
||||
"@vitest/coverage-v8": "^1.4.0",
|
||||
"@vitest/ui": "^1.2.2",
|
||||
"clsx": "^2.1.0",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-react-app": "^7.0.1",
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.7",
|
||||
"eslint-plugin-simple-import-sort": "^12.1.0",
|
||||
"eslint-plugin-testing-library": "^6.2.0",
|
||||
"husky": "^9.0.11",
|
||||
|
@ -62,7 +64,7 @@
|
|||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^3.2.4",
|
||||
"pretty-quick": "^4.0.0",
|
||||
"recharts": "^2.12.6",
|
||||
"recharts": "^2.12.7",
|
||||
"sass": "^1.74.1",
|
||||
"typescript": "^5.4.4",
|
||||
"vite": "^5.2.8",
|
||||
|
|
|
@ -270,6 +270,7 @@ function useRoutes(): CustomRouteObject[] {
|
|||
{
|
||||
path: "status",
|
||||
name: "Status",
|
||||
badge: data?.status,
|
||||
element: (
|
||||
<Lazy>
|
||||
<SystemStatusView></SystemStatusView>
|
||||
|
@ -309,6 +310,7 @@ function useRoutes(): CustomRouteObject[] {
|
|||
data?.sonarr_signalr,
|
||||
data?.radarr_signalr,
|
||||
data?.announcements,
|
||||
data?.status,
|
||||
radarr,
|
||||
sonarr,
|
||||
],
|
||||
|
|
|
@ -25,23 +25,6 @@ const cacheEpisodes = (client: QueryClient, episodes: Item.Episode[]) => {
|
|||
});
|
||||
};
|
||||
|
||||
export function useEpisodesByIds(ids: number[]) {
|
||||
const client = useQueryClient();
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: [QueryKeys.Series, QueryKeys.Episodes, ids],
|
||||
queryFn: () => api.episodes.byEpisodeId(ids),
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (query.isSuccess && query.data) {
|
||||
cacheEpisodes(client, query.data);
|
||||
}
|
||||
}, [query.isSuccess, query.data, client]);
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
export function useEpisodesBySeriesId(id: number) {
|
||||
const client = useQueryClient();
|
||||
|
||||
|
@ -87,10 +70,11 @@ export function useEpisodeAddBlacklist() {
|
|||
},
|
||||
|
||||
onSuccess: (_, { seriesId }) => {
|
||||
client.invalidateQueries({
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.Series, QueryKeys.Episodes, QueryKeys.Blacklist],
|
||||
});
|
||||
client.invalidateQueries({
|
||||
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.Series, seriesId],
|
||||
});
|
||||
},
|
||||
|
@ -105,8 +89,8 @@ export function useEpisodeDeleteBlacklist() {
|
|||
mutationFn: (param: { all?: boolean; form?: FormType.DeleteBlacklist }) =>
|
||||
api.episodes.deleteBlacklist(param.all, param.form),
|
||||
|
||||
onSuccess: (_) => {
|
||||
client.invalidateQueries({
|
||||
onSuccess: () => {
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.Series, QueryKeys.Episodes, QueryKeys.Blacklist],
|
||||
});
|
||||
},
|
||||
|
|
|
@ -15,23 +15,6 @@ const cacheMovies = (client: QueryClient, movies: Item.Movie[]) => {
|
|||
});
|
||||
};
|
||||
|
||||
export function useMoviesByIds(ids: number[]) {
|
||||
const client = useQueryClient();
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: [QueryKeys.Movies, ...ids],
|
||||
queryFn: () => api.movies.movies(ids),
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (query.isSuccess && query.data) {
|
||||
cacheMovies(client, query.data);
|
||||
}
|
||||
}, [query.isSuccess, query.data, client]);
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
export function useMovieById(id: number) {
|
||||
return useQuery({
|
||||
queryKey: [QueryKeys.Movies, id],
|
||||
|
@ -74,12 +57,13 @@ export function useMovieModification() {
|
|||
|
||||
onSuccess: (_, form) => {
|
||||
form.id.forEach((v) => {
|
||||
client.invalidateQueries({
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.Movies, v],
|
||||
});
|
||||
});
|
||||
|
||||
// TODO: query less
|
||||
client.invalidateQueries({
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.Movies],
|
||||
});
|
||||
},
|
||||
|
@ -93,7 +77,7 @@ export function useMovieAction() {
|
|||
mutationFn: (form: FormType.MoviesAction) => api.movies.action(form),
|
||||
|
||||
onSuccess: () => {
|
||||
client.invalidateQueries({
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.Movies],
|
||||
});
|
||||
},
|
||||
|
@ -125,10 +109,11 @@ export function useMovieAddBlacklist() {
|
|||
},
|
||||
|
||||
onSuccess: (_, { id }) => {
|
||||
client.invalidateQueries({
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.Movies, QueryKeys.Blacklist],
|
||||
});
|
||||
client.invalidateQueries({
|
||||
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.Movies, id],
|
||||
});
|
||||
},
|
||||
|
@ -143,8 +128,8 @@ export function useMovieDeleteBlacklist() {
|
|||
mutationFn: (param: { all?: boolean; form?: FormType.DeleteBlacklist }) =>
|
||||
api.movies.deleteBlacklist(param.all, param.form),
|
||||
|
||||
onSuccess: (_, param) => {
|
||||
client.invalidateQueries({
|
||||
onSuccess: () => {
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.Movies, QueryKeys.Blacklist],
|
||||
});
|
||||
},
|
||||
|
|
|
@ -54,22 +54,27 @@ export function useSettingsMutation() {
|
|||
mutationFn: (data: LooseObject) => api.system.updateSettings(data),
|
||||
|
||||
onSuccess: () => {
|
||||
client.invalidateQueries({
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.System],
|
||||
});
|
||||
client.invalidateQueries({
|
||||
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.Series],
|
||||
});
|
||||
client.invalidateQueries({
|
||||
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.Episodes],
|
||||
});
|
||||
client.invalidateQueries({
|
||||
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.Movies],
|
||||
});
|
||||
client.invalidateQueries({
|
||||
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.Wanted],
|
||||
});
|
||||
client.invalidateQueries({
|
||||
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.Badges],
|
||||
});
|
||||
},
|
||||
|
@ -101,7 +106,7 @@ export function useDeleteLogs() {
|
|||
mutationFn: () => api.system.deleteLogs(),
|
||||
|
||||
onSuccess: () => {
|
||||
client.invalidateQueries({
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.System, QueryKeys.Logs],
|
||||
});
|
||||
},
|
||||
|
@ -128,11 +133,12 @@ export function useSystemAnnouncementsAddDismiss() {
|
|||
return api.system.addAnnouncementsDismiss(hash);
|
||||
},
|
||||
|
||||
onSuccess: (_, { hash }) => {
|
||||
client.invalidateQueries({
|
||||
onSuccess: () => {
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.System, QueryKeys.Announcements],
|
||||
});
|
||||
client.invalidateQueries({
|
||||
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.System, QueryKeys.Badges],
|
||||
});
|
||||
},
|
||||
|
@ -156,10 +162,11 @@ export function useRunTask() {
|
|||
mutationFn: (id: string) => api.system.runTask(id),
|
||||
|
||||
onSuccess: () => {
|
||||
client.invalidateQueries({
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.System, QueryKeys.Tasks],
|
||||
});
|
||||
client.invalidateQueries({
|
||||
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.System, QueryKeys.Backups],
|
||||
});
|
||||
},
|
||||
|
@ -180,7 +187,7 @@ export function useCreateBackups() {
|
|||
mutationFn: () => api.system.createBackups(),
|
||||
|
||||
onSuccess: () => {
|
||||
client.invalidateQueries({
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.System, QueryKeys.Backups],
|
||||
});
|
||||
},
|
||||
|
@ -194,7 +201,7 @@ export function useRestoreBackups() {
|
|||
mutationFn: (filename: string) => api.system.restoreBackups(filename),
|
||||
|
||||
onSuccess: () => {
|
||||
client.invalidateQueries({
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.System, QueryKeys.Backups],
|
||||
});
|
||||
},
|
||||
|
@ -208,7 +215,7 @@ export function useDeleteBackups() {
|
|||
mutationFn: (filename: string) => api.system.deleteBackups(filename),
|
||||
|
||||
onSuccess: () => {
|
||||
client.invalidateQueries({
|
||||
void client.invalidateQueries({
|
||||
queryKey: [QueryKeys.System, QueryKeys.Backups],
|
||||
});
|
||||
},
|
||||
|
|
|
@ -47,4 +47,8 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
.label {
|
||||
overflow: visible;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ import { useNavigate } from "react-router-dom";
|
|||
import { Autocomplete, ComboboxItem, OptionsFilter, Text } from "@mantine/core";
|
||||
import { faSearch } from "@fortawesome/free-solid-svg-icons";
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
import { chain, includes } from "lodash";
|
||||
import { useServerSearch } from "@/apis/hooks";
|
||||
import { useDebouncedValue } from "@/utilities";
|
||||
|
||||
|
@ -15,23 +16,45 @@ function useSearch(query: string) {
|
|||
const debouncedQuery = useDebouncedValue(query, 500);
|
||||
const { data } = useServerSearch(debouncedQuery, debouncedQuery.length >= 0);
|
||||
|
||||
const duplicates = chain(data)
|
||||
.groupBy((item) => `${item.title} (${item.year})`)
|
||||
.filter((group) => group.length > 1)
|
||||
.map((group) => `${group[0].title} (${group[0].year})`)
|
||||
.value();
|
||||
|
||||
return useMemo<SearchResultItem[]>(
|
||||
() =>
|
||||
data?.map((v) => {
|
||||
let link: string;
|
||||
if (v.sonarrSeriesId) {
|
||||
link = `/series/${v.sonarrSeriesId}`;
|
||||
} else if (v.radarrId) {
|
||||
link = `/movies/${v.radarrId}`;
|
||||
} else {
|
||||
const { link, displayName } = (() => {
|
||||
const hasDuplicate = includes(duplicates, `${v.title} (${v.year})`);
|
||||
|
||||
if (v.sonarrSeriesId) {
|
||||
return {
|
||||
link: `/series/${v.sonarrSeriesId}`,
|
||||
displayName: hasDuplicate
|
||||
? `${v.title} (${v.year}) (S)`
|
||||
: `${v.title} (${v.year})`,
|
||||
};
|
||||
}
|
||||
|
||||
if (v.radarrId) {
|
||||
return {
|
||||
link: `/movies/${v.radarrId}`,
|
||||
displayName: hasDuplicate
|
||||
? `${v.title} (${v.year}) (M)`
|
||||
: `${v.title} (${v.year})`,
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error("Unknown search result");
|
||||
}
|
||||
})();
|
||||
|
||||
return {
|
||||
value: `${v.title} (${v.year})`,
|
||||
value: displayName,
|
||||
link,
|
||||
};
|
||||
}) ?? [],
|
||||
[data],
|
||||
[data, duplicates],
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ const TextPopover: FunctionComponent<TextPopoverProps> = ({
|
|||
opened={hovered}
|
||||
label={text}
|
||||
{...tooltip}
|
||||
style={{ textWrap: "pretty" }}
|
||||
style={{ textWrap: "wrap" }}
|
||||
>
|
||||
<div ref={ref}>{children}</div>
|
||||
</Tooltip>
|
||||
|
|
|
@ -16,7 +16,6 @@ type MutateActionProps<DATA, VAR> = Omit<
|
|||
|
||||
function MutateAction<DATA, VAR>({
|
||||
mutation,
|
||||
noReset,
|
||||
onSuccess,
|
||||
onError,
|
||||
args,
|
||||
|
|
|
@ -15,7 +15,6 @@ type MutateButtonProps<DATA, VAR> = Omit<
|
|||
|
||||
function MutateButton<DATA, VAR>({
|
||||
mutation,
|
||||
noReset,
|
||||
onSuccess,
|
||||
onError,
|
||||
args,
|
||||
|
|
|
@ -12,7 +12,7 @@ interface QueryOverlayProps {
|
|||
const QueryOverlay: FunctionComponent<QueryOverlayProps> = ({
|
||||
children,
|
||||
global = false,
|
||||
result: { isLoading, isError, error },
|
||||
result: { isLoading },
|
||||
}) => {
|
||||
return (
|
||||
<LoadingProvider value={isLoading}>
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { FunctionComponent } from "react";
|
||||
import { Badge, BadgeProps, Group, GroupProps } from "@mantine/core";
|
||||
import { BuildKey } from "@/utilities";
|
||||
import { normalizeAudioLanguage } from "@/utilities/languages";
|
||||
|
||||
export type AudioListProps = GroupProps & {
|
||||
audios: Language.Info[];
|
||||
|
@ -16,7 +17,7 @@ const AudioList: FunctionComponent<AudioListProps> = ({
|
|||
<Group gap="xs" {...group}>
|
||||
{audios.map((audio, idx) => (
|
||||
<Badge color="blue" key={BuildKey(idx, audio.code2)} {...badgeProps}>
|
||||
{audio.name}
|
||||
{normalizeAudioLanguage(audio.name)}
|
||||
</Badge>
|
||||
))}
|
||||
</Group>
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import { FunctionComponent, useEffect, useMemo } from "react";
|
||||
import React, { FunctionComponent, useEffect, useMemo } from "react";
|
||||
import {
|
||||
Button,
|
||||
Checkbox,
|
||||
Divider,
|
||||
MantineColor,
|
||||
Select,
|
||||
Stack,
|
||||
Text,
|
||||
} from "@mantine/core";
|
||||
|
@ -17,8 +17,9 @@ import {
|
|||
} from "@fortawesome/free-solid-svg-icons";
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
import { ColumnDef } from "@tanstack/react-table";
|
||||
import { isString } from "lodash";
|
||||
import { isString, uniqBy } from "lodash";
|
||||
import { useMovieSubtitleModification } from "@/apis/hooks";
|
||||
import { subtitlesTypeOptions } from "@/components/forms/uploadFormSelectorTypes";
|
||||
import { Action, Selector } from "@/components/inputs";
|
||||
import SimpleTable from "@/components/tables/SimpleTable";
|
||||
import TextPopover from "@/components/TextPopover";
|
||||
|
@ -88,7 +89,7 @@ const MovieUploadForm: FunctionComponent<Props> = ({
|
|||
|
||||
const languages = useProfileItemsToLanguages(profile);
|
||||
const languageOptions = useSelectorOptions(
|
||||
languages,
|
||||
uniqBy(languages, "code2"),
|
||||
(v) => v.name,
|
||||
(v) => v.code2,
|
||||
);
|
||||
|
@ -207,34 +208,6 @@ const MovieUploadForm: FunctionComponent<Props> = ({
|
|||
return <Text className="table-primary">{file.name}</Text>;
|
||||
},
|
||||
},
|
||||
{
|
||||
header: "Forced",
|
||||
accessorKey: "forced",
|
||||
cell: ({ row: { original, index } }) => {
|
||||
return (
|
||||
<Checkbox
|
||||
checked={original.forced}
|
||||
onChange={({ currentTarget: { checked } }) => {
|
||||
action.mutate(index, { ...original, forced: checked });
|
||||
}}
|
||||
></Checkbox>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
header: "HI",
|
||||
accessorKey: "hi",
|
||||
cell: ({ row: { original, index } }) => {
|
||||
return (
|
||||
<Checkbox
|
||||
checked={original.hi}
|
||||
onChange={({ currentTarget: { checked } }) => {
|
||||
action.mutate(index, { ...original, hi: checked });
|
||||
}}
|
||||
></Checkbox>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
header: "Language",
|
||||
accessorKey: "language",
|
||||
|
@ -251,6 +224,61 @@ const MovieUploadForm: FunctionComponent<Props> = ({
|
|||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
header: () => (
|
||||
<Selector
|
||||
options={subtitlesTypeOptions}
|
||||
value={null}
|
||||
placeholder="Type"
|
||||
onChange={(value) => {
|
||||
if (value) {
|
||||
action.update((item) => {
|
||||
switch (value) {
|
||||
case "hi":
|
||||
return { ...item, hi: true, forced: false };
|
||||
case "forced":
|
||||
return { ...item, hi: false, forced: true };
|
||||
case "normal":
|
||||
return { ...item, hi: false, forced: false };
|
||||
default:
|
||||
return item;
|
||||
}
|
||||
});
|
||||
}
|
||||
}}
|
||||
></Selector>
|
||||
),
|
||||
accessorKey: "type",
|
||||
cell: ({ row: { original, index } }) => {
|
||||
return (
|
||||
<Select
|
||||
value={
|
||||
subtitlesTypeOptions.find((s) => {
|
||||
if (original.hi) {
|
||||
return s.value === "hi";
|
||||
}
|
||||
|
||||
if (original.forced) {
|
||||
return s.value === "forced";
|
||||
}
|
||||
|
||||
return s.value === "normal";
|
||||
})?.value
|
||||
}
|
||||
data={subtitlesTypeOptions}
|
||||
onChange={(value) => {
|
||||
if (value) {
|
||||
action.mutate(index, {
|
||||
...original,
|
||||
hi: value === "hi",
|
||||
forced: value === "forced",
|
||||
});
|
||||
}
|
||||
}}
|
||||
></Select>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "action",
|
||||
cell: ({ row: { index } }) => {
|
||||
|
|
|
@ -3,3 +3,11 @@
|
|||
padding: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.evenly {
|
||||
flex-wrap: wrap;
|
||||
|
||||
& > div {
|
||||
flex: 1;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ import {
|
|||
Accordion,
|
||||
Button,
|
||||
Checkbox,
|
||||
Flex,
|
||||
Select,
|
||||
Stack,
|
||||
Switch,
|
||||
|
@ -72,9 +73,16 @@ const ProfileEditForm: FunctionComponent<Props> = ({
|
|||
(value) => value.length > 0,
|
||||
"Must have a name",
|
||||
),
|
||||
tag: FormUtils.validation((value) => {
|
||||
if (!value) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return /^[a-z_0-9-]+$/.test(value);
|
||||
}, "Only lowercase alphanumeric characters, underscores (_) and hyphens (-) are allowed"),
|
||||
items: FormUtils.validation(
|
||||
(value) => value.length > 0,
|
||||
"Must contain at lease 1 language",
|
||||
"Must contain at least 1 language",
|
||||
),
|
||||
},
|
||||
});
|
||||
|
@ -265,7 +273,24 @@ const ProfileEditForm: FunctionComponent<Props> = ({
|
|||
})}
|
||||
>
|
||||
<Stack>
|
||||
<TextInput label="Name" {...form.getInputProps("name")}></TextInput>
|
||||
<Flex
|
||||
direction={{ base: "column", sm: "row" }}
|
||||
gap="sm"
|
||||
className={styles.evenly}
|
||||
>
|
||||
<TextInput label="Name" {...form.getInputProps("name")}></TextInput>
|
||||
<TextInput
|
||||
label="Tag"
|
||||
{...form.getInputProps("tag")}
|
||||
onBlur={() =>
|
||||
form.setFieldValue(
|
||||
"tag",
|
||||
(prev) =>
|
||||
prev?.toLowerCase().trim().replace(/\s+/g, "_") ?? undefined,
|
||||
)
|
||||
}
|
||||
></TextInput>
|
||||
</Flex>
|
||||
<Accordion
|
||||
multiple
|
||||
chevronPosition="right"
|
||||
|
@ -274,7 +299,6 @@ const ProfileEditForm: FunctionComponent<Props> = ({
|
|||
>
|
||||
<Accordion.Item value="Languages">
|
||||
<Stack>
|
||||
{form.errors.items}
|
||||
<SimpleTable
|
||||
columns={columns}
|
||||
data={form.values.items}
|
||||
|
@ -282,6 +306,7 @@ const ProfileEditForm: FunctionComponent<Props> = ({
|
|||
<Button fullWidth onClick={addItem}>
|
||||
Add Language
|
||||
</Button>
|
||||
<Text c="var(--mantine-color-error)">{form.errors.items}</Text>
|
||||
<Selector
|
||||
clearable
|
||||
label="Cutoff"
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import { FunctionComponent, useEffect, useMemo } from "react";
|
||||
import React, { FunctionComponent, useEffect, useMemo } from "react";
|
||||
import {
|
||||
Button,
|
||||
Checkbox,
|
||||
Divider,
|
||||
MantineColor,
|
||||
Select,
|
||||
Stack,
|
||||
Text,
|
||||
} from "@mantine/core";
|
||||
|
@ -17,12 +17,13 @@ import {
|
|||
} from "@fortawesome/free-solid-svg-icons";
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
import { ColumnDef } from "@tanstack/react-table";
|
||||
import { isString } from "lodash";
|
||||
import { isString, uniqBy } from "lodash";
|
||||
import {
|
||||
useEpisodesBySeriesId,
|
||||
useEpisodeSubtitleModification,
|
||||
useSubtitleInfos,
|
||||
} from "@/apis/hooks";
|
||||
import { subtitlesTypeOptions } from "@/components/forms/uploadFormSelectorTypes";
|
||||
import { Action, Selector } from "@/components/inputs";
|
||||
import SimpleTable from "@/components/tables/SimpleTable";
|
||||
import TextPopover from "@/components/TextPopover";
|
||||
|
@ -100,7 +101,7 @@ const SeriesUploadForm: FunctionComponent<Props> = ({
|
|||
const profile = useLanguageProfileBy(series.profileId);
|
||||
const languages = useProfileItemsToLanguages(profile);
|
||||
const languageOptions = useSelectorOptions(
|
||||
languages,
|
||||
uniqBy(languages, "code2"),
|
||||
(v) => v.name,
|
||||
(v) => v.code2,
|
||||
);
|
||||
|
@ -235,42 +236,6 @@ const SeriesUploadForm: FunctionComponent<Props> = ({
|
|||
return <Text className="table-primary">{name}</Text>;
|
||||
},
|
||||
},
|
||||
{
|
||||
header: "Forced",
|
||||
accessorKey: "forced",
|
||||
cell: ({ row: { original, index } }) => {
|
||||
return (
|
||||
<Checkbox
|
||||
checked={original.forced}
|
||||
onChange={({ currentTarget: { checked } }) => {
|
||||
action.mutate(index, {
|
||||
...original,
|
||||
forced: checked,
|
||||
hi: checked ? false : original.hi,
|
||||
});
|
||||
}}
|
||||
></Checkbox>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
header: "HI",
|
||||
accessorKey: "hi",
|
||||
cell: ({ row: { original, index } }) => {
|
||||
return (
|
||||
<Checkbox
|
||||
checked={original.hi}
|
||||
onChange={({ currentTarget: { checked } }) => {
|
||||
action.mutate(index, {
|
||||
...original,
|
||||
hi: checked,
|
||||
forced: checked ? false : original.forced,
|
||||
});
|
||||
}}
|
||||
></Checkbox>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
header: () => (
|
||||
<Selector
|
||||
|
@ -280,8 +245,7 @@ const SeriesUploadForm: FunctionComponent<Props> = ({
|
|||
onChange={(value) => {
|
||||
if (value) {
|
||||
action.update((item) => {
|
||||
item.language = value;
|
||||
return item;
|
||||
return { ...item, language: value };
|
||||
});
|
||||
}
|
||||
}}
|
||||
|
@ -301,6 +265,61 @@ const SeriesUploadForm: FunctionComponent<Props> = ({
|
|||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
header: () => (
|
||||
<Selector
|
||||
options={subtitlesTypeOptions}
|
||||
value={null}
|
||||
placeholder="Type"
|
||||
onChange={(value) => {
|
||||
if (value) {
|
||||
action.update((item) => {
|
||||
switch (value) {
|
||||
case "hi":
|
||||
return { ...item, hi: true, forced: false };
|
||||
case "forced":
|
||||
return { ...item, hi: false, forced: true };
|
||||
case "normal":
|
||||
return { ...item, hi: false, forced: false };
|
||||
default:
|
||||
return item;
|
||||
}
|
||||
});
|
||||
}
|
||||
}}
|
||||
></Selector>
|
||||
),
|
||||
accessorKey: "type",
|
||||
cell: ({ row: { original, index } }) => {
|
||||
return (
|
||||
<Select
|
||||
value={
|
||||
subtitlesTypeOptions.find((s) => {
|
||||
if (original.hi) {
|
||||
return s.value === "hi";
|
||||
}
|
||||
|
||||
if (original.forced) {
|
||||
return s.value === "forced";
|
||||
}
|
||||
|
||||
return s.value === "normal";
|
||||
})?.value
|
||||
}
|
||||
data={subtitlesTypeOptions}
|
||||
onChange={(value) => {
|
||||
if (value) {
|
||||
action.mutate(index, {
|
||||
...original,
|
||||
hi: value === "hi",
|
||||
forced: value === "forced",
|
||||
});
|
||||
}
|
||||
}}
|
||||
></Select>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "episode",
|
||||
header: "Episode",
|
||||
|
|
16
frontend/src/components/forms/uploadFormSelectorTypes.tsx
Normal file
16
frontend/src/components/forms/uploadFormSelectorTypes.tsx
Normal file
|
@ -0,0 +1,16 @@
|
|||
import { SelectorOption } from "@/components";
|
||||
|
||||
export const subtitlesTypeOptions: SelectorOption<string>[] = [
|
||||
{
|
||||
label: "Normal",
|
||||
value: "normal",
|
||||
},
|
||||
{
|
||||
label: "Hearing-Impaired",
|
||||
value: "hi",
|
||||
},
|
||||
{
|
||||
label: "Forced",
|
||||
value: "forced",
|
||||
},
|
||||
];
|
|
@ -7,7 +7,7 @@ import {
|
|||
Select,
|
||||
SelectProps,
|
||||
} from "@mantine/core";
|
||||
import { isNull, isUndefined, noop } from "lodash";
|
||||
import { isNull, isUndefined } from "lodash";
|
||||
import { LOG } from "@/utilities/console";
|
||||
|
||||
export type SelectorOption<T> = Override<
|
||||
|
@ -49,10 +49,7 @@ export type GroupedSelectorProps<T> = Override<
|
|||
>;
|
||||
|
||||
export function GroupedSelector<T>({
|
||||
value,
|
||||
options,
|
||||
getkey = DefaultKeyBuilder,
|
||||
onOptionSubmit = noop,
|
||||
...select
|
||||
}: GroupedSelectorProps<T>) {
|
||||
return (
|
||||
|
|
|
@ -5,11 +5,8 @@ import { ModalSettings } from "@mantine/modals/lib/context";
|
|||
import { ModalComponent, ModalIdContext } from "./WithModal";
|
||||
|
||||
export function useModals() {
|
||||
const {
|
||||
openContextModal: openMantineContextModal,
|
||||
closeContextModal: closeContextModalRaw,
|
||||
...rest
|
||||
} = useMantineModals();
|
||||
const { openContextModal: openMantineContextModal, ...rest } =
|
||||
useMantineModals();
|
||||
|
||||
const openContextModal = useCallback(
|
||||
<ARGS extends {}>(
|
||||
|
@ -26,7 +23,7 @@ export function useModals() {
|
|||
[openMantineContextModal],
|
||||
);
|
||||
|
||||
const closeContextModal = useCallback(
|
||||
const closeContext = useCallback(
|
||||
(modal: ModalComponent) => {
|
||||
rest.closeModal(modal.modalKey);
|
||||
},
|
||||
|
@ -43,7 +40,7 @@ export function useModals() {
|
|||
|
||||
// TODO: Performance
|
||||
return useMemo(
|
||||
() => ({ openContextModal, closeContextModal, closeSelf, ...rest }),
|
||||
[closeContextModal, closeSelf, openContextModal, rest],
|
||||
() => ({ openContextModal, closeContext, closeSelf, ...rest }),
|
||||
[closeContext, closeSelf, openContextModal, rest],
|
||||
);
|
||||
}
|
||||
|
|
|
@ -40,13 +40,17 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
|
|||
update: (ids) => {
|
||||
LOG("info", "Invalidating series", ids);
|
||||
ids.forEach((id) => {
|
||||
queryClient.invalidateQueries({ queryKey: [QueryKeys.Series, id] });
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.Series, id],
|
||||
});
|
||||
});
|
||||
},
|
||||
delete: (ids) => {
|
||||
LOG("info", "Invalidating series", ids);
|
||||
ids.forEach((id) => {
|
||||
queryClient.invalidateQueries({ queryKey: [QueryKeys.Series, id] });
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.Series, id],
|
||||
});
|
||||
});
|
||||
},
|
||||
},
|
||||
|
@ -55,13 +59,17 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
|
|||
update: (ids) => {
|
||||
LOG("info", "Invalidating movies", ids);
|
||||
ids.forEach((id) => {
|
||||
queryClient.invalidateQueries({ queryKey: [QueryKeys.Movies, id] });
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.Movies, id],
|
||||
});
|
||||
});
|
||||
},
|
||||
delete: (ids) => {
|
||||
LOG("info", "Invalidating movies", ids);
|
||||
ids.forEach((id) => {
|
||||
queryClient.invalidateQueries({ queryKey: [QueryKeys.Movies, id] });
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.Movies, id],
|
||||
});
|
||||
});
|
||||
},
|
||||
},
|
||||
|
@ -78,7 +86,7 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
|
|||
id,
|
||||
]);
|
||||
if (episode !== undefined) {
|
||||
queryClient.invalidateQueries({
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.Series, episode.sonarrSeriesId],
|
||||
});
|
||||
}
|
||||
|
@ -92,7 +100,7 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
|
|||
id,
|
||||
]);
|
||||
if (episode !== undefined) {
|
||||
queryClient.invalidateQueries({
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.Series, episode.sonarrSeriesId],
|
||||
});
|
||||
}
|
||||
|
@ -101,28 +109,28 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
|
|||
},
|
||||
{
|
||||
key: "episode-wanted",
|
||||
update: (ids) => {
|
||||
update: () => {
|
||||
// Find a better way to update wanted
|
||||
queryClient.invalidateQueries({
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.Episodes, QueryKeys.Wanted],
|
||||
});
|
||||
},
|
||||
delete: () => {
|
||||
queryClient.invalidateQueries({
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.Episodes, QueryKeys.Wanted],
|
||||
});
|
||||
},
|
||||
},
|
||||
{
|
||||
key: "movie-wanted",
|
||||
update: (ids) => {
|
||||
update: () => {
|
||||
// Find a better way to update wanted
|
||||
queryClient.invalidateQueries({
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.Movies, QueryKeys.Wanted],
|
||||
});
|
||||
},
|
||||
delete: () => {
|
||||
queryClient.invalidateQueries({
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.Movies, QueryKeys.Wanted],
|
||||
});
|
||||
},
|
||||
|
@ -130,13 +138,13 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
|
|||
{
|
||||
key: "settings",
|
||||
any: () => {
|
||||
queryClient.invalidateQueries({ queryKey: [QueryKeys.System] });
|
||||
void queryClient.invalidateQueries({ queryKey: [QueryKeys.System] });
|
||||
},
|
||||
},
|
||||
{
|
||||
key: "languages",
|
||||
any: () => {
|
||||
queryClient.invalidateQueries({
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.System, QueryKeys.Languages],
|
||||
});
|
||||
},
|
||||
|
@ -144,7 +152,7 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
|
|||
{
|
||||
key: "badges",
|
||||
any: () => {
|
||||
queryClient.invalidateQueries({
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.System, QueryKeys.Badges],
|
||||
});
|
||||
},
|
||||
|
@ -152,7 +160,7 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
|
|||
{
|
||||
key: "movie-history",
|
||||
any: () => {
|
||||
queryClient.invalidateQueries({
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.Movies, QueryKeys.History],
|
||||
});
|
||||
},
|
||||
|
@ -160,7 +168,7 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
|
|||
{
|
||||
key: "movie-blacklist",
|
||||
any: () => {
|
||||
queryClient.invalidateQueries({
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.Movies, QueryKeys.Blacklist],
|
||||
});
|
||||
},
|
||||
|
@ -168,7 +176,7 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
|
|||
{
|
||||
key: "episode-history",
|
||||
any: () => {
|
||||
queryClient.invalidateQueries({
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.Episodes, QueryKeys.History],
|
||||
});
|
||||
},
|
||||
|
@ -176,7 +184,7 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
|
|||
{
|
||||
key: "episode-blacklist",
|
||||
any: () => {
|
||||
queryClient.invalidateQueries({
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.Episodes, QueryKeys.Blacklist],
|
||||
});
|
||||
},
|
||||
|
@ -184,7 +192,7 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
|
|||
{
|
||||
key: "reset-episode-wanted",
|
||||
any: () => {
|
||||
queryClient.invalidateQueries({
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.Episodes, QueryKeys.Wanted],
|
||||
});
|
||||
},
|
||||
|
@ -192,7 +200,7 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
|
|||
{
|
||||
key: "reset-movie-wanted",
|
||||
any: () => {
|
||||
queryClient.invalidateQueries({
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.Movies, QueryKeys.Wanted],
|
||||
});
|
||||
},
|
||||
|
@ -200,7 +208,7 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
|
|||
{
|
||||
key: "task",
|
||||
any: () => {
|
||||
queryClient.invalidateQueries({
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: [QueryKeys.System, QueryKeys.Tasks],
|
||||
});
|
||||
},
|
||||
|
|
|
@ -6,6 +6,7 @@ import { faBookmark as farBookmark } from "@fortawesome/free-regular-svg-icons";
|
|||
import { faBookmark, faWrench } from "@fortawesome/free-solid-svg-icons";
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
import { ColumnDef } from "@tanstack/react-table";
|
||||
import { uniqueId } from "lodash";
|
||||
import { useMovieModification, useMoviesPagination } from "@/apis/hooks";
|
||||
import { Action } from "@/components";
|
||||
import { AudioList } from "@/components/bazarr";
|
||||
|
@ -95,7 +96,7 @@ const MovieView: FunctionComponent = () => {
|
|||
<Badge
|
||||
mr="xs"
|
||||
color="yellow"
|
||||
key={BuildKey(v.code2, v.hi, v.forced)}
|
||||
key={uniqueId(`${BuildKey(v.code2, v.hi, v.forced)}_`)}
|
||||
>
|
||||
<Language.Text value={v}></Language.Text>
|
||||
</Badge>
|
||||
|
|
|
@ -65,25 +65,34 @@ const SeriesView: FunctionComponent = () => {
|
|||
cell: (row) => {
|
||||
const { episodeFileCount, episodeMissingCount, profileId, title } =
|
||||
row.row.original;
|
||||
let progress = 0;
|
||||
let label = "";
|
||||
if (episodeFileCount === 0 || !profileId) {
|
||||
progress = 0.0;
|
||||
} else {
|
||||
progress = (1.0 - episodeMissingCount / episodeFileCount) * 100.0;
|
||||
label = `${
|
||||
episodeFileCount - episodeMissingCount
|
||||
}/${episodeFileCount}`;
|
||||
}
|
||||
|
||||
const label = `${episodeFileCount - episodeMissingCount}/${episodeFileCount}`;
|
||||
return (
|
||||
<Progress.Root key={title} size="xl">
|
||||
<Progress.Section
|
||||
value={progress}
|
||||
value={
|
||||
episodeFileCount === 0 || !profileId
|
||||
? 0
|
||||
: (1.0 - episodeMissingCount / episodeFileCount) * 100.0
|
||||
}
|
||||
color={episodeMissingCount === 0 ? "brand" : "yellow"}
|
||||
>
|
||||
<Progress.Label>{label}</Progress.Label>
|
||||
</Progress.Section>
|
||||
{episodeMissingCount === episodeFileCount && (
|
||||
<Progress.Label
|
||||
styles={{
|
||||
label: {
|
||||
position: "absolute",
|
||||
top: "3px",
|
||||
left: "50%",
|
||||
transform: "translateX(-50%)",
|
||||
},
|
||||
}}
|
||||
>
|
||||
{label}
|
||||
</Progress.Label>
|
||||
)}
|
||||
</Progress.Root>
|
||||
);
|
||||
},
|
||||
|
|
|
@ -43,10 +43,10 @@ const SettingsGeneralView: FunctionComponent = () => {
|
|||
<Section header="Host">
|
||||
<Text
|
||||
label="Address"
|
||||
placeholder="0.0.0.0"
|
||||
placeholder="*"
|
||||
settingKey="settings-general-ip"
|
||||
></Text>
|
||||
<Message>Valid IPv4 address or '0.0.0.0' for all interfaces</Message>
|
||||
<Message>Valid IP address or '*' for all interfaces</Message>
|
||||
<Number
|
||||
label="Port"
|
||||
placeholder="6767"
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import { FunctionComponent } from "react";
|
||||
import { Text as MantineText } from "@mantine/core";
|
||||
import { useLanguageProfiles, useLanguages } from "@/apis/hooks";
|
||||
import {
|
||||
Check,
|
||||
Chips,
|
||||
CollapseBox,
|
||||
Layout,
|
||||
Message,
|
||||
|
@ -115,6 +117,50 @@ const SettingsLanguagesView: FunctionComponent = () => {
|
|||
<Section header="Languages Profile">
|
||||
<Table></Table>
|
||||
</Section>
|
||||
<Section header="Tag-Based Automatic Language Profile Selection Settings">
|
||||
<Message>
|
||||
If enabled, Bazarr will look at the names of all tags of a Series from
|
||||
Sonarr (or a Movie from Radarr) to find a matching Bazarr language
|
||||
profile tag. It will use as the language profile the FIRST tag from
|
||||
Sonarr/Radarr that matches the tag of a Bazarr language profile
|
||||
EXACTLY. If multiple tags match, there is no guarantee as to which one
|
||||
will be used, so choose your tag names carefully. Also, if you update
|
||||
the tag names in Sonarr/Radarr, Bazarr will detect this and repeat the
|
||||
matching process for the affected shows. However, if a show's only
|
||||
matching tag is removed from Sonarr/Radarr, Bazarr will NOT remove the
|
||||
show's existing language profile for that reason. But if you wish to
|
||||
have language profiles removed automatically by tag value, simply
|
||||
enter a list of one or more tags in the{" "}
|
||||
<MantineText fw={700} span>
|
||||
Remove Profile Tags
|
||||
</MantineText>{" "}
|
||||
entry list below. If your video tag matches one of the tags in that
|
||||
list, then Bazarr will remove the language profile for that video. If
|
||||
there is a conflict between profile selection and profile removal,
|
||||
then profile removal wins out and is performed.
|
||||
</Message>
|
||||
<Check
|
||||
label="Series"
|
||||
settingKey="settings-general-serie_tag_enabled"
|
||||
></Check>
|
||||
<Check
|
||||
label="Movies"
|
||||
settingKey="settings-general-movie_tag_enabled"
|
||||
></Check>
|
||||
<Chips
|
||||
label="Remove Profile Tags"
|
||||
settingKey="settings-general-remove_profile_tags"
|
||||
sanitizeFn={(values: string[] | null) =>
|
||||
values?.map((item) =>
|
||||
item.replace(/[^a-z0-9_-]/gi, "").toLowerCase(),
|
||||
)
|
||||
}
|
||||
></Chips>
|
||||
<Message>
|
||||
Enter tag values that will trigger a language profile removal. Leave
|
||||
empty if you don't want Bazarr to remove language profiles.
|
||||
</Message>
|
||||
</Section>
|
||||
<Section header="Default Settings">
|
||||
<Check
|
||||
label="Series"
|
||||
|
|
|
@ -2,7 +2,7 @@ import { FunctionComponent, useCallback, useMemo } from "react";
|
|||
import { Badge, Button, Group } from "@mantine/core";
|
||||
import { faTrash, faWrench } from "@fortawesome/free-solid-svg-icons";
|
||||
import { ColumnDef } from "@tanstack/react-table";
|
||||
import { cloneDeep } from "lodash";
|
||||
import { cloneDeep, includes, maxBy } from "lodash";
|
||||
import { Action } from "@/components";
|
||||
import {
|
||||
anyCutoff,
|
||||
|
@ -65,6 +65,10 @@ const Table: FunctionComponent = () => {
|
|||
header: "Name",
|
||||
accessorKey: "name",
|
||||
},
|
||||
{
|
||||
header: "Tag",
|
||||
accessorKey: "tag",
|
||||
},
|
||||
{
|
||||
header: "Languages",
|
||||
accessorKey: "items",
|
||||
|
@ -75,10 +79,10 @@ const Table: FunctionComponent = () => {
|
|||
}) => {
|
||||
return (
|
||||
<Group gap="xs" wrap="nowrap">
|
||||
{items.map((v) => {
|
||||
{items.map((v, i) => {
|
||||
const isCutoff = v.id === cutoff || cutoff === anyCutoff;
|
||||
return (
|
||||
<ItemBadge key={v.id} cutoff={isCutoff} item={v}></ItemBadge>
|
||||
<ItemBadge key={i} cutoff={isCutoff} item={v}></ItemBadge>
|
||||
);
|
||||
})}
|
||||
</Group>
|
||||
|
@ -144,9 +148,45 @@ const Table: FunctionComponent = () => {
|
|||
icon={faWrench}
|
||||
c="gray"
|
||||
onClick={() => {
|
||||
const lastId = maxBy(profile.items, "id")?.id || 0;
|
||||
|
||||
// We once had an issue on the past where there were duplicated
|
||||
// item ids that needs to become unique upon editing.
|
||||
const sanitizedProfile = {
|
||||
...cloneDeep(profile),
|
||||
items: profile.items.reduce(
|
||||
(acc, value) => {
|
||||
const { ids, duplicatedIds, items } = acc;
|
||||
|
||||
// We once had an issue on the past where there were duplicated
|
||||
// item ids that needs to become unique upon editing.
|
||||
if (includes(ids, value.id)) {
|
||||
duplicatedIds.push(value.id);
|
||||
items.push({
|
||||
...value,
|
||||
id: lastId + duplicatedIds.length,
|
||||
});
|
||||
|
||||
return acc;
|
||||
}
|
||||
|
||||
ids.push(value.id);
|
||||
items.push(value);
|
||||
|
||||
return acc;
|
||||
},
|
||||
{
|
||||
ids: [] as number[],
|
||||
duplicatedIds: [] as number[],
|
||||
items: [] as typeof profile.items,
|
||||
},
|
||||
).items,
|
||||
tag: profile.tag || undefined,
|
||||
};
|
||||
|
||||
modals.openContextModal(ProfileEditModal, {
|
||||
languages,
|
||||
profile: cloneDeep(profile),
|
||||
profile: sanitizedProfile,
|
||||
onComplete: updateProfile,
|
||||
});
|
||||
}}
|
||||
|
@ -178,6 +218,7 @@ const Table: FunctionComponent = () => {
|
|||
const profile = {
|
||||
profileId: nextProfileId,
|
||||
name: "",
|
||||
tag: undefined,
|
||||
items: [],
|
||||
cutoff: null,
|
||||
mustContain: [],
|
||||
|
|
|
@ -108,10 +108,12 @@ export const ProviderView: FunctionComponent<ProviderViewProps> = ({
|
|||
})
|
||||
.map((v, idx) => (
|
||||
<Card
|
||||
titleStyles={{ overflow: "hidden", textOverflow: "ellipsis" }}
|
||||
key={BuildKey(v.key, idx)}
|
||||
header={v.name ?? capitalize(v.key)}
|
||||
description={v.description}
|
||||
onClick={() => select(v)}
|
||||
lineClamp={2}
|
||||
></Card>
|
||||
));
|
||||
} else {
|
||||
|
|
|
@ -218,6 +218,35 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
|
|||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
key: "jimaku",
|
||||
name: "Jimaku.cc",
|
||||
description: "Japanese Subtitles Provider",
|
||||
message:
|
||||
"API key required. Subtitles stem from various sources and might have quality/timing issues.",
|
||||
inputs: [
|
||||
{
|
||||
type: "password",
|
||||
key: "api_key",
|
||||
name: "API key",
|
||||
},
|
||||
{
|
||||
type: "switch",
|
||||
key: "enable_name_search_fallback",
|
||||
name: "Search by name if no AniList ID was determined (Less accurate, required for live action)",
|
||||
},
|
||||
{
|
||||
type: "switch",
|
||||
key: "enable_archives_download",
|
||||
name: "Also consider archives alongside uncompressed subtitles",
|
||||
},
|
||||
{
|
||||
type: "switch",
|
||||
key: "enable_ai_subs",
|
||||
name: "Download AI generated subtitles",
|
||||
},
|
||||
],
|
||||
},
|
||||
{ key: "hosszupuska", description: "Hungarian Subtitles Provider" },
|
||||
{
|
||||
key: "karagarga",
|
||||
|
@ -276,6 +305,21 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
|
|||
{ type: "switch", key: "skip_wrong_fps", name: "Skip Wrong FPS" },
|
||||
],
|
||||
},
|
||||
{
|
||||
key: "legendasnet",
|
||||
name: "Legendas.net",
|
||||
description: "Brazilian Subtitles Provider",
|
||||
inputs: [
|
||||
{
|
||||
type: "text",
|
||||
key: "username",
|
||||
},
|
||||
{
|
||||
type: "password",
|
||||
key: "password",
|
||||
},
|
||||
],
|
||||
},
|
||||
{ key: "napiprojekt", description: "Polish Subtitles Provider" },
|
||||
{
|
||||
key: "napisy24",
|
||||
|
|
|
@ -54,6 +54,11 @@ const SettingsRadarrView: FunctionComponent = () => {
|
|||
<Chips
|
||||
label="Excluded Tags"
|
||||
settingKey="settings-radarr-excluded_tags"
|
||||
sanitizeFn={(values: string[] | null) =>
|
||||
values?.map((item) =>
|
||||
item.replace(/[^a-z0-9_-]/gi, "").toLowerCase(),
|
||||
)
|
||||
}
|
||||
></Chips>
|
||||
<Message>
|
||||
Movies with those tags (case sensitive) in Radarr will be excluded
|
||||
|
|
|
@ -56,6 +56,11 @@ const SettingsSonarrView: FunctionComponent = () => {
|
|||
<Chips
|
||||
label="Excluded Tags"
|
||||
settingKey="settings-sonarr-excluded_tags"
|
||||
sanitizeFn={(values: string[] | null) =>
|
||||
values?.map((item) =>
|
||||
item.replace(/[^a-z0-9_-]/gi, "").toLowerCase(),
|
||||
)
|
||||
}
|
||||
></Chips>
|
||||
<Message>
|
||||
Episodes from series with those tags (case sensitive) in Sonarr will
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { FunctionComponent } from "react";
|
||||
import { Code, Space, Table } from "@mantine/core";
|
||||
import React, { FunctionComponent } from "react";
|
||||
import { Code, Space, Table, Text as MantineText } from "@mantine/core";
|
||||
import {
|
||||
Check,
|
||||
CollapseBox,
|
||||
|
@ -115,14 +115,16 @@ const commandOptions: CommandOption[] = [
|
|||
},
|
||||
];
|
||||
|
||||
const commandOptionElements: JSX.Element[] = commandOptions.map((op, idx) => (
|
||||
<tr key={idx}>
|
||||
<td>
|
||||
<Code>{op.option}</Code>
|
||||
</td>
|
||||
<td>{op.description}</td>
|
||||
</tr>
|
||||
));
|
||||
const commandOptionElements: React.JSX.Element[] = commandOptions.map(
|
||||
(op, idx) => (
|
||||
<tr key={idx}>
|
||||
<td>
|
||||
<Code>{op.option}</Code>
|
||||
</td>
|
||||
<td>{op.description}</td>
|
||||
</tr>
|
||||
),
|
||||
);
|
||||
|
||||
const SettingsSubtitlesView: FunctionComponent = () => {
|
||||
return (
|
||||
|
@ -436,8 +438,11 @@ const SettingsSubtitlesView: FunctionComponent = () => {
|
|||
<Slider settingKey="settings-subsync-subsync_threshold"></Slider>
|
||||
<Space />
|
||||
<Message>
|
||||
Only series subtitles with scores <b>below</b> this value will be
|
||||
automatically synchronized.
|
||||
Only series subtitles with scores{" "}
|
||||
<MantineText fw={700} span>
|
||||
below
|
||||
</MantineText>{" "}
|
||||
this value will be automatically synchronized.
|
||||
</Message>
|
||||
</CollapseBox>
|
||||
<Check
|
||||
|
@ -451,8 +456,11 @@ const SettingsSubtitlesView: FunctionComponent = () => {
|
|||
<Slider settingKey="settings-subsync-subsync_movie_threshold"></Slider>
|
||||
<Space />
|
||||
<Message>
|
||||
Only movie subtitles with scores <b>below</b> this value will be
|
||||
automatically synchronized.
|
||||
Only movie subtitles with scores{" "}
|
||||
<MantineText fw={700} span>
|
||||
below
|
||||
</MantineText>{" "}
|
||||
this value will be automatically synchronized.
|
||||
</Message>
|
||||
</CollapseBox>
|
||||
</CollapseBox>
|
||||
|
@ -478,8 +486,11 @@ const SettingsSubtitlesView: FunctionComponent = () => {
|
|||
<Slider settingKey="settings-general-postprocessing_threshold"></Slider>
|
||||
<Space />
|
||||
<Message>
|
||||
Only series subtitles with scores <b>below</b> this value will be
|
||||
automatically post-processed.
|
||||
Only series subtitles with scores{" "}
|
||||
<MantineText fw={700} span>
|
||||
below
|
||||
</MantineText>{" "}
|
||||
this value will be automatically post-processed.
|
||||
</Message>
|
||||
</CollapseBox>
|
||||
<Check
|
||||
|
@ -493,8 +504,11 @@ const SettingsSubtitlesView: FunctionComponent = () => {
|
|||
<Slider settingKey="settings-general-postprocessing_threshold_movie"></Slider>
|
||||
<Space />
|
||||
<Message>
|
||||
Only movie subtitles with scores <b>below</b> this value will be
|
||||
automatically post-processed.
|
||||
Only movie subtitles with scores{" "}
|
||||
<MantineText fw={700} span>
|
||||
below
|
||||
</MantineText>{" "}
|
||||
this value will be automatically post-processed.
|
||||
</Message>
|
||||
</CollapseBox>
|
||||
<Text
|
||||
|
|
|
@ -1,14 +1,23 @@
|
|||
import { FunctionComponent } from "react";
|
||||
import { Center, Stack, Text, UnstyledButton } from "@mantine/core";
|
||||
import {
|
||||
Center,
|
||||
MantineStyleProp,
|
||||
Stack,
|
||||
Text,
|
||||
UnstyledButton,
|
||||
} from "@mantine/core";
|
||||
import { faPlus } from "@fortawesome/free-solid-svg-icons";
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
import TextPopover from "@/components/TextPopover";
|
||||
import styles from "./Card.module.scss";
|
||||
|
||||
interface CardProps {
|
||||
header?: string;
|
||||
description?: string;
|
||||
plus?: boolean;
|
||||
header?: string;
|
||||
lineClamp?: number | undefined;
|
||||
onClick?: () => void;
|
||||
plus?: boolean;
|
||||
titleStyles?: MantineStyleProp | undefined;
|
||||
}
|
||||
|
||||
export const Card: FunctionComponent<CardProps> = ({
|
||||
|
@ -16,6 +25,8 @@ export const Card: FunctionComponent<CardProps> = ({
|
|||
description,
|
||||
plus,
|
||||
onClick,
|
||||
lineClamp,
|
||||
titleStyles,
|
||||
}) => {
|
||||
return (
|
||||
<UnstyledButton p="lg" onClick={onClick} className={styles.card}>
|
||||
|
@ -24,9 +35,15 @@ export const Card: FunctionComponent<CardProps> = ({
|
|||
<FontAwesomeIcon size="2x" icon={faPlus}></FontAwesomeIcon>
|
||||
</Center>
|
||||
) : (
|
||||
<Stack h="100%" gap={0} align="flex-start">
|
||||
<Text fw="bold">{header}</Text>
|
||||
<Text hidden={description === undefined}>{description}</Text>
|
||||
<Stack h="100%" gap={0}>
|
||||
<Text fw="bold" style={titleStyles}>
|
||||
{header}
|
||||
</Text>
|
||||
<TextPopover text={description}>
|
||||
<Text hidden={description === undefined} lineClamp={lineClamp}>
|
||||
{description}
|
||||
</Text>
|
||||
</TextPopover>
|
||||
</Stack>
|
||||
)}
|
||||
</UnstyledButton>
|
||||
|
|
|
@ -2,7 +2,7 @@ import { FunctionComponent, PropsWithChildren, ReactElement } from "react";
|
|||
import { useForm } from "@mantine/form";
|
||||
import { describe, it } from "vitest";
|
||||
import { FormContext, FormValues } from "@/pages/Settings/utilities/FormValues";
|
||||
import { render, RenderOptions, screen } from "@/tests";
|
||||
import { render, screen } from "@/tests";
|
||||
import { Number, Text } from "./forms";
|
||||
|
||||
const FormSupport: FunctionComponent<PropsWithChildren> = ({ children }) => {
|
||||
|
@ -15,10 +15,8 @@ const FormSupport: FunctionComponent<PropsWithChildren> = ({ children }) => {
|
|||
return <FormContext.Provider value={form}>{children}</FormContext.Provider>;
|
||||
};
|
||||
|
||||
const formRender = (
|
||||
ui: ReactElement,
|
||||
options?: Omit<RenderOptions, "wrapper">,
|
||||
) => render(<FormSupport>{ui}</FormSupport>);
|
||||
const formRender = (ui: ReactElement) =>
|
||||
render(<FormSupport>{ui}</FormSupport>);
|
||||
|
||||
describe("Settings form", () => {
|
||||
describe("number component", () => {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { FunctionComponent, ReactNode, ReactText } from "react";
|
||||
import { FunctionComponent, ReactNode } from "react";
|
||||
import {
|
||||
Input,
|
||||
NumberInput,
|
||||
|
@ -49,7 +49,7 @@ export const Number: FunctionComponent<NumberProps> = (props) => {
|
|||
);
|
||||
};
|
||||
|
||||
export type TextProps = BaseInput<ReactText> & TextInputProps;
|
||||
export type TextProps = BaseInput<string | number> & TextInputProps;
|
||||
|
||||
export const Text: FunctionComponent<TextProps> = (props) => {
|
||||
const { value, update, rest } = useBaseInput(props);
|
||||
|
@ -86,11 +86,7 @@ export interface CheckProps extends BaseInput<boolean> {
|
|||
inline?: boolean;
|
||||
}
|
||||
|
||||
export const Check: FunctionComponent<CheckProps> = ({
|
||||
label,
|
||||
inline,
|
||||
...props
|
||||
}) => {
|
||||
export const Check: FunctionComponent<CheckProps> = ({ label, ...props }) => {
|
||||
const { value, update, rest } = useBaseInput(props);
|
||||
|
||||
return (
|
||||
|
@ -160,13 +156,25 @@ export const Slider: FunctionComponent<SliderProps> = (props) => {
|
|||
};
|
||||
|
||||
type ChipsProp = BaseInput<string[]> &
|
||||
Omit<ChipInputProps, "onChange" | "data">;
|
||||
Omit<ChipInputProps, "onChange" | "data"> & {
|
||||
sanitizeFn?: (values: string[] | null) => string[] | undefined;
|
||||
};
|
||||
|
||||
export const Chips: FunctionComponent<ChipsProp> = (props) => {
|
||||
const { value, update, rest } = useBaseInput(props);
|
||||
|
||||
const handleChange = (value: string[] | null) => {
|
||||
const sanitizedValues = props.sanitizeFn?.(value) ?? value;
|
||||
|
||||
update(sanitizedValues || null);
|
||||
};
|
||||
|
||||
return (
|
||||
<ChipInput {...rest} value={value ?? []} onChange={update}></ChipInput>
|
||||
<ChipInput
|
||||
{...rest}
|
||||
value={value ?? []}
|
||||
onChange={handleChange}
|
||||
></ChipInput>
|
||||
);
|
||||
};
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ const Table: FunctionComponent<Props> = ({ announcements }) => {
|
|||
() => [
|
||||
{
|
||||
header: "Since",
|
||||
accessor: "timestamp",
|
||||
accessorKey: "timestamp",
|
||||
cell: ({
|
||||
row: {
|
||||
original: { timestamp },
|
||||
|
@ -30,7 +30,7 @@ const Table: FunctionComponent<Props> = ({ announcements }) => {
|
|||
},
|
||||
{
|
||||
header: "Announcement",
|
||||
accessor: "text",
|
||||
accessorKey: "text",
|
||||
cell: ({
|
||||
row: {
|
||||
original: { text },
|
||||
|
@ -41,7 +41,7 @@ const Table: FunctionComponent<Props> = ({ announcements }) => {
|
|||
},
|
||||
{
|
||||
header: "More Info",
|
||||
accessor: "link",
|
||||
accessorKey: "link",
|
||||
cell: ({
|
||||
row: {
|
||||
original: { link },
|
||||
|
@ -56,7 +56,7 @@ const Table: FunctionComponent<Props> = ({ announcements }) => {
|
|||
},
|
||||
{
|
||||
header: "Dismiss",
|
||||
accessor: "hash",
|
||||
accessorKey: "hash",
|
||||
cell: ({
|
||||
row: {
|
||||
original: { dismissible, hash },
|
||||
|
|
|
@ -144,6 +144,8 @@ const SystemStatusView: FunctionComponent = () => {
|
|||
<Row title="Radarr Version">{status?.radarr_version}</Row>
|
||||
<Row title="Operating System">{status?.operating_system}</Row>
|
||||
<Row title="Python Version">{status?.python_version}</Row>
|
||||
<Row title="Database Engine">{status?.database_engine}</Row>
|
||||
<Row title="Database Version">{status?.database_migration}</Row>
|
||||
<Row title="Bazarr Directory">{status?.bazarr_directory}</Row>
|
||||
<Row title="Bazarr Config Directory">
|
||||
{status?.bazarr_config_directory}
|
||||
|
|
|
@ -17,7 +17,7 @@ const Table: FunctionComponent<Props> = ({ tasks }) => {
|
|||
() => [
|
||||
{
|
||||
header: "Name",
|
||||
accessor: "name",
|
||||
accessorKey: "name",
|
||||
cell: ({
|
||||
row: {
|
||||
original: { name },
|
||||
|
@ -28,7 +28,7 @@ const Table: FunctionComponent<Props> = ({ tasks }) => {
|
|||
},
|
||||
{
|
||||
header: "Interval",
|
||||
accessor: "interval",
|
||||
accessorKey: "interval",
|
||||
cell: ({
|
||||
row: {
|
||||
original: { interval },
|
||||
|
@ -39,11 +39,11 @@ const Table: FunctionComponent<Props> = ({ tasks }) => {
|
|||
},
|
||||
{
|
||||
header: "Next Execution",
|
||||
accessor: "next_run_in",
|
||||
accessorKey: "next_run_in",
|
||||
},
|
||||
{
|
||||
header: "Run",
|
||||
accessor: "job_running",
|
||||
accessorKey: "job_running",
|
||||
cell: ({
|
||||
row: {
|
||||
original: { job_id: jobId, job_running: jobRunning },
|
||||
|
|
|
@ -21,7 +21,7 @@ const WantedMoviesView: FunctionComponent = () => {
|
|||
() => [
|
||||
{
|
||||
header: "Name",
|
||||
accessor: "title",
|
||||
accessorKey: "title",
|
||||
cell: ({
|
||||
row: {
|
||||
original: { title, radarrId },
|
||||
|
@ -37,7 +37,7 @@ const WantedMoviesView: FunctionComponent = () => {
|
|||
},
|
||||
{
|
||||
header: "Missing",
|
||||
accessor: "missing_subtitles",
|
||||
accessorKey: "missing_subtitles",
|
||||
cell: ({
|
||||
row: {
|
||||
original: { radarrId, missing_subtitles: missingSubtitles },
|
||||
|
|
|
@ -31,6 +31,7 @@ import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
|||
import { Language } from "@/components/bazarr";
|
||||
import { BuildKey } from "@/utilities";
|
||||
import {
|
||||
normalizeAudioLanguage,
|
||||
useLanguageProfileBy,
|
||||
useProfileItemsToLanguages,
|
||||
} from "@/utilities/languages";
|
||||
|
@ -87,7 +88,7 @@ const ItemOverview: FunctionComponent<Props> = (props) => {
|
|||
icon={faMusic}
|
||||
title="Audio Language"
|
||||
>
|
||||
{v.name}
|
||||
{normalizeAudioLanguage(v.name)}
|
||||
</ItemBadge>
|
||||
)) ?? [],
|
||||
[item?.audio_language],
|
||||
|
@ -142,12 +143,7 @@ const ItemOverview: FunctionComponent<Props> = (props) => {
|
|||
}}
|
||||
>
|
||||
<Grid.Col span={3} visibleFrom="sm">
|
||||
<Image
|
||||
src={item?.poster}
|
||||
mx="auto"
|
||||
maw="250px"
|
||||
fallbackSrc="https://placehold.co/250x250?text=Placeholder"
|
||||
></Image>
|
||||
<Image src={item?.poster} mx="auto" maw="250px"></Image>
|
||||
</Grid.Col>
|
||||
<Grid.Col span={8} maw="100%" style={{ overflow: "hidden" }}>
|
||||
<Stack align="flex-start" gap="xs" mx={6}>
|
||||
|
|
1
frontend/src/types/api.d.ts
vendored
1
frontend/src/types/api.d.ts
vendored
|
@ -40,6 +40,7 @@ declare namespace Language {
|
|||
mustContain: string[];
|
||||
mustNotContain: string[];
|
||||
originalFormat: boolean | null;
|
||||
tag: string | undefined;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
1
frontend/src/types/settings.d.ts
vendored
1
frontend/src/types/settings.d.ts
vendored
|
@ -62,6 +62,7 @@ declare namespace Settings {
|
|||
postprocessing_cmd?: string;
|
||||
postprocessing_threshold: number;
|
||||
postprocessing_threshold_movie: number;
|
||||
remove_profile_tags: string[];
|
||||
single_language: boolean;
|
||||
subfolder: string;
|
||||
subfolder_custom?: string;
|
||||
|
|
2
frontend/src/types/system.d.ts
vendored
2
frontend/src/types/system.d.ts
vendored
|
@ -20,6 +20,8 @@ declare namespace System {
|
|||
bazarr_config_directory: string;
|
||||
bazarr_directory: string;
|
||||
bazarr_version: string;
|
||||
database_engine: string;
|
||||
database_migration: string;
|
||||
operating_system: string;
|
||||
package_version: string;
|
||||
python_version: string;
|
||||
|
|
|
@ -51,3 +51,7 @@ export function useLanguageFromCode3(code3: string) {
|
|||
[data, code3],
|
||||
);
|
||||
}
|
||||
|
||||
export const normalizeAudioLanguage = (name: string) => {
|
||||
return name === "Chinese Simplified" ? "Chinese" : name;
|
||||
};
|
||||
|
|
|
@ -36,6 +36,9 @@ export default defineConfig(async ({ mode, command }) => {
|
|||
enableBuild: false,
|
||||
}),
|
||||
VitePWA({
|
||||
workbox: {
|
||||
globIgnores: ["index.html"],
|
||||
},
|
||||
registerType: "autoUpdate",
|
||||
includeAssets: [
|
||||
`${imagesFolder}/favicon.ico`,
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
argparse is (c) 2006-2009 Steven J. Bethard <steven.bethard@gmail.com>.
|
||||
|
||||
The argparse module was contributed to Python as of Python 2.7 and thus
|
||||
was licensed under the Python license. Same license applies to all files in
|
||||
the argparse package project.
|
||||
|
||||
For details about the Python License, please see doc/Python-License.txt.
|
||||
|
||||
History
|
||||
-------
|
||||
|
||||
Before (and including) argparse 1.1, the argparse package was licensed under
|
||||
Apache License v2.0.
|
||||
|
||||
After argparse 1.1, all project files from the argparse project were deleted
|
||||
due to license compatibility issues between Apache License 2.0 and GNU GPL v2.
|
||||
|
||||
The project repository then had a clean start with some files taken from
|
||||
Python 2.7.1, so definitely all files are under Python License now.
|
||||
|
|
@ -1,84 +0,0 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: argparse
|
||||
Version: 1.4.0
|
||||
Summary: Python command-line parsing library
|
||||
Home-page: https://github.com/ThomasWaldmann/argparse/
|
||||
Author: Thomas Waldmann
|
||||
Author-email: tw@waldmann-edv.de
|
||||
License: Python Software Foundation License
|
||||
Keywords: argparse command line parser parsing
|
||||
Platform: any
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Console
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: Python Software Foundation License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 2.3
|
||||
Classifier: Programming Language :: Python :: 2.4
|
||||
Classifier: Programming Language :: Python :: 2.5
|
||||
Classifier: Programming Language :: Python :: 2.6
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3.0
|
||||
Classifier: Programming Language :: Python :: 3.1
|
||||
Classifier: Programming Language :: Python :: 3.2
|
||||
Classifier: Programming Language :: Python :: 3.3
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
Classifier: Topic :: Software Development
|
||||
License-File: LICENSE.txt
|
||||
|
||||
The argparse module makes it easy to write user friendly command line
|
||||
interfaces.
|
||||
|
||||
The program defines what arguments it requires, and argparse will figure out
|
||||
how to parse those out of sys.argv. The argparse module also automatically
|
||||
generates help and usage messages and issues errors when users give the
|
||||
program invalid arguments.
|
||||
|
||||
As of Python >= 2.7 and >= 3.2, the argparse module is maintained within the
|
||||
Python standard library. For users who still need to support Python < 2.7 or
|
||||
< 3.2, it is also provided as a separate package, which tries to stay
|
||||
compatible with the module in the standard library, but also supports older
|
||||
Python versions.
|
||||
|
||||
Also, we can fix bugs here for users who are stuck on some non-current python
|
||||
version, like e.g. 3.2.3 (which has bugs that were fixed in a later 3.2.x
|
||||
release).
|
||||
|
||||
argparse is licensed under the Python license, for details see LICENSE.txt.
|
||||
|
||||
|
||||
Compatibility
|
||||
-------------
|
||||
|
||||
argparse should work on Python >= 2.3, it was tested on:
|
||||
|
||||
* 2.3, 2.4, 2.5, 2.6 and 2.7
|
||||
* 3.1, 3.2, 3.3, 3.4
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
Try one of these:
|
||||
|
||||
python setup.py install
|
||||
|
||||
easy_install argparse
|
||||
|
||||
pip install argparse
|
||||
|
||||
putting argparse.py in some directory listed in sys.path should also work
|
||||
|
||||
|
||||
Bugs
|
||||
----
|
||||
|
||||
If you find a bug in argparse (pypi), please try to reproduce it with latest
|
||||
python 2.7 and 3.4 (and use argparse from stdlib).
|
||||
|
||||
If it happens there also, please file a bug in the python.org issue tracker.
|
||||
If it does not happen there, file a bug in the argparse package issue tracker.
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
argparse-1.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
argparse-1.4.0.dist-info/LICENSE.txt,sha256=bVBNRcTRCfkl7wWJYLbRzicSu2tXk-kmv8FRcWrHQEg,741
|
||||
argparse-1.4.0.dist-info/METADATA,sha256=yZGPMA4uvkui2P7qaaiI89zqwjDbyFcehJG4j5Pk8Yk,2816
|
||||
argparse-1.4.0.dist-info/RECORD,,
|
||||
argparse-1.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
argparse-1.4.0.dist-info/WHEEL,sha256=P2T-6epvtXQ2cBOE_U1K4_noqlJFN3tj15djMgEu4NM,110
|
||||
argparse-1.4.0.dist-info/top_level.txt,sha256=TgiWrQsF0mKWwqS2KHLORD0ZtqYHPRGdCAAzKwtVvJ4,9
|
||||
argparse.py,sha256=0ksYqisQDQvhoiuo19JERCSpg51tc641GFJIx7pTA0g,89214
|
|
@ -1,6 +0,0 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.41.3)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
|
|
@ -1 +0,0 @@
|
|||
argparse
|
2392
libs/argparse.py
2392
libs/argparse.py
File diff suppressed because it is too large
Load diff
|
@ -1 +0,0 @@
|
|||
pip
|
|
@ -1,13 +0,0 @@
|
|||
fese-0.2.9.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
fese-0.2.9.dist-info/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
||||
fese-0.2.9.dist-info/METADATA,sha256=nJz9q6FwX7fqmsO3jgM0ZgV0gsCeILWoxVRUqCbJkFI,655
|
||||
fese-0.2.9.dist-info/RECORD,,
|
||||
fese-0.2.9.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
fese-0.2.9.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
||||
fese-0.2.9.dist-info/top_level.txt,sha256=ra2BuARVEUZpk76YpHnjVoqjR2FxvzhCdmW2OyBWGzE,5
|
||||
fese/__init__.py,sha256=_YUpx7sq26ioEp5LZOEKa-0MrRHQUuRuDCs0EQ6Amv4,150
|
||||
fese/container.py,sha256=sLuxP0vlba4iGVohGfYtd-QcjQ-YxMU6lqMOM-Wtqlc,10340
|
||||
fese/disposition.py,sha256=hv4YmXpsvKmUdpeWvSrZkhKgtZLZ8t56dmwMddsqxus,2156
|
||||
fese/exceptions.py,sha256=VZaubpq8SPpkUGp28Ryebsf9YzqbKK62nni6YZgDPYI,372
|
||||
fese/stream.py,sha256=Hgf6-amksHpuhSoY6SL6C3q4YtGCuRHl4fusBWE9nBE,4866
|
||||
fese/tags.py,sha256=qKkcjJmCKgnXIbZ9x-nngCNYAfv5cbJZ4A6EP0ckZME,5454
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue