mirror of
https://github.com/morpheus65535/bazarr.git
synced 2025-04-23 22:27:17 -04:00
update subzero/subliminal_patch/fcache to current versions
This commit is contained in:
parent
fc06af6c41
commit
9ac3b0009b
18 changed files with 128 additions and 65 deletions
|
@ -5,6 +5,7 @@ import pickle
|
|||
import shutil
|
||||
import tempfile
|
||||
import traceback
|
||||
import hashlib
|
||||
|
||||
import appdirs
|
||||
|
||||
|
@ -89,7 +90,7 @@ class FileCache(MutableMapping):
|
|||
"""
|
||||
|
||||
def __init__(self, appname, flag='c', mode=0o666, keyencoding='utf-8',
|
||||
serialize=True, app_cache_dir=None):
|
||||
serialize=True, app_cache_dir=None, key_file_ext=".txt"):
|
||||
"""Initialize a :class:`FileCache` object."""
|
||||
if not isinstance(flag, str):
|
||||
raise TypeError("flag must be str not '{}'".format(type(flag)))
|
||||
|
@ -130,6 +131,7 @@ class FileCache(MutableMapping):
|
|||
self._mode = mode
|
||||
self._keyencoding = keyencoding
|
||||
self._serialize = serialize
|
||||
self.key_file_ext = key_file_ext
|
||||
|
||||
def _parse_appname(self, appname):
|
||||
"""Splits an appname into the appname and subcache components."""
|
||||
|
@ -188,6 +190,11 @@ class FileCache(MutableMapping):
|
|||
except:
|
||||
logger.error("Couldn't write content from %r to cache file: %r: %s", ekey, filename,
|
||||
traceback.format_exc())
|
||||
try:
|
||||
self.__write_to_file(filename + self.key_file_ext, ekey)
|
||||
except:
|
||||
logger.error("Couldn't write content from %r to cache file: %r: %s", ekey, filename,
|
||||
traceback.format_exc())
|
||||
self._buffer.clear()
|
||||
self._sync = False
|
||||
|
||||
|
@ -196,8 +203,7 @@ class FileCache(MutableMapping):
|
|||
raise ValueError("invalid operation on closed cache")
|
||||
|
||||
def _encode_key(self, key):
|
||||
"""Encode key using *hex_codec* for constructing a cache filename.
|
||||
|
||||
"""
|
||||
Keys are implicitly converted to :class:`bytes` if passed as
|
||||
:class:`str`.
|
||||
|
||||
|
@ -206,16 +212,15 @@ class FileCache(MutableMapping):
|
|||
key = key.encode(self._keyencoding)
|
||||
elif not isinstance(key, bytes):
|
||||
raise TypeError("key must be bytes or str")
|
||||
return codecs.encode(key, 'hex_codec').decode(self._keyencoding)
|
||||
return key.decode(self._keyencoding)
|
||||
|
||||
def _decode_key(self, key):
|
||||
"""Decode key using hex_codec to retrieve the original key.
|
||||
|
||||
"""
|
||||
Keys are returned as :class:`str` if serialization is enabled.
|
||||
Keys are returned as :class:`bytes` if serialization is disabled.
|
||||
|
||||
"""
|
||||
bkey = codecs.decode(key.encode(self._keyencoding), 'hex_codec')
|
||||
bkey = key.encode(self._keyencoding)
|
||||
return bkey.decode(self._keyencoding) if self._serialize else bkey
|
||||
|
||||
def _dumps(self, value):
|
||||
|
@ -226,18 +231,24 @@ class FileCache(MutableMapping):
|
|||
|
||||
def _key_to_filename(self, key):
|
||||
"""Convert an encoded key to an absolute cache filename."""
|
||||
return os.path.join(self.cache_dir, key)
|
||||
if isinstance(key, unicode):
|
||||
key = key.encode(self._keyencoding)
|
||||
return os.path.join(self.cache_dir, hashlib.md5(key).hexdigest())
|
||||
|
||||
def _filename_to_key(self, absfilename):
|
||||
"""Convert an absolute cache filename to a key name."""
|
||||
return os.path.split(absfilename)[1]
|
||||
hkey_hdr_fn = absfilename + self.key_file_ext
|
||||
if os.path.isfile(hkey_hdr_fn):
|
||||
with open(hkey_hdr_fn, 'rb') as f:
|
||||
key = f.read()
|
||||
return key.decode(self._keyencoding) if self._serialize else key
|
||||
|
||||
def _all_filenames(self, scandir_generic=True):
|
||||
"""Return a list of absolute cache filenames"""
|
||||
_scandir = _scandir_generic if scandir_generic else scandir
|
||||
try:
|
||||
for entry in _scandir(self.cache_dir):
|
||||
if entry.is_file(follow_symlinks=False):
|
||||
if entry.is_file(follow_symlinks=False) and not entry.name.endswith(self.key_file_ext):
|
||||
yield os.path.join(self.cache_dir, entry.name)
|
||||
except (FileNotFoundError, OSError):
|
||||
raise StopIteration
|
||||
|
@ -250,14 +261,17 @@ class FileCache(MutableMapping):
|
|||
else:
|
||||
return set(file_keys + list(self._buffer))
|
||||
|
||||
def _write_to_file(self, filename, bytesvalue):
|
||||
def __write_to_file(self, filename, value):
|
||||
"""Write bytesvalue to filename."""
|
||||
fh, tmp = tempfile.mkstemp()
|
||||
with os.fdopen(fh, self._flag) as f:
|
||||
f.write(self._dumps(bytesvalue))
|
||||
f.write(value)
|
||||
rename(tmp, filename)
|
||||
os.chmod(filename, self._mode)
|
||||
|
||||
def _write_to_file(self, filename, bytesvalue):
|
||||
self.__write_to_file(filename, self._dumps(bytesvalue))
|
||||
|
||||
def _read_from_file(self, filename):
|
||||
"""Read data from filename."""
|
||||
try:
|
||||
|
@ -274,6 +288,7 @@ class FileCache(MutableMapping):
|
|||
else:
|
||||
filename = self._key_to_filename(ekey)
|
||||
self._write_to_file(filename, value)
|
||||
self.__write_to_file(filename + self.key_file_ext, ekey)
|
||||
|
||||
def __getitem__(self, key):
|
||||
ekey = self._encode_key(key)
|
||||
|
@ -283,8 +298,9 @@ class FileCache(MutableMapping):
|
|||
except KeyError:
|
||||
pass
|
||||
filename = self._key_to_filename(ekey)
|
||||
if filename not in self._all_filenames():
|
||||
if not os.path.isfile(filename):
|
||||
raise KeyError(key)
|
||||
|
||||
return self._read_from_file(filename)
|
||||
|
||||
def __delitem__(self, key):
|
||||
|
@ -301,6 +317,11 @@ class FileCache(MutableMapping):
|
|||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
try:
|
||||
os.remove(filename + self.key_file_ext)
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
def __iter__(self):
|
||||
for key in self._all_keys():
|
||||
yield self._decode_key(key)
|
||||
|
@ -310,4 +331,10 @@ class FileCache(MutableMapping):
|
|||
|
||||
def __contains__(self, key):
|
||||
ekey = self._encode_key(key)
|
||||
return ekey in self._all_keys()
|
||||
if not self._sync:
|
||||
try:
|
||||
return ekey in self._buffer
|
||||
except KeyError:
|
||||
pass
|
||||
filename = self._key_to_filename(ekey)
|
||||
return os.path.isfile(filename)
|
||||
|
|
|
@ -514,8 +514,7 @@ def scan_video(path, dont_use_actual_file=False, hints=None, providers=None, ski
|
|||
|
||||
# guess
|
||||
hints["single_value"] = True
|
||||
if video_type == "movie":
|
||||
hints["expected_title"] = [hints["title"]]
|
||||
hints["expected_title"] = [hints["title"]]
|
||||
|
||||
guessed_result = guessit(guess_from, options=hints)
|
||||
logger.debug('GuessIt found: %s', json.dumps(guessed_result, cls=GuessitEncoder, indent=4, ensure_ascii=False))
|
||||
|
|
|
@ -16,9 +16,7 @@ from exceptions import APIThrottled
|
|||
from subzero.lib.io import get_viable_encoding
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
pem_file = os.path.normpath(os.path.join(os.path.dirname(os.path.realpath(unicode(__file__, get_viable_encoding()))),
|
||||
"..", "..", certifi.where()))
|
||||
|
||||
pem_file = os.path.normpath(os.path.join(os.path.dirname(os.path.realpath(unicode(__file__, get_viable_encoding()))), "..", certifi.where()))
|
||||
try:
|
||||
default_ssl_context = ssl.create_default_context(cafile=pem_file)
|
||||
except AttributeError:
|
||||
|
@ -33,10 +31,17 @@ custom_resolver.nameservers = ['8.8.8.8', '1.1.1.1']
|
|||
|
||||
|
||||
class CertifiSession(Session):
|
||||
timeout = 10
|
||||
|
||||
def __init__(self):
|
||||
super(CertifiSession, self).__init__()
|
||||
self.verify = pem_file
|
||||
|
||||
def request(self, *args, **kwargs):
|
||||
if kwargs.get('timeout') is None:
|
||||
kwargs['timeout'] = self.timeout
|
||||
return super(CertifiSession, self).request(*args, **kwargs)
|
||||
|
||||
|
||||
class RetryingSession(CertifiSession):
|
||||
proxied_functions = ("get", "post")
|
||||
|
|
|
@ -33,8 +33,9 @@ def fix_inconsistent_naming(title):
|
|||
:rtype: str
|
||||
|
||||
"""
|
||||
return _fix_inconsistent_naming(title, {"DC's Legends of Tomorrow": "Legends of Tomorrow",
|
||||
"Marvel's Jessica Jones": "Jessica Jones"})
|
||||
return _fix_inconsistent_naming(title, {"Stargate Origins": "Stargate: Origins",
|
||||
"Marvel's Agents of S.H.I.E.L.D.": "Marvels+Agents+of+S.H.I.E.L.D",
|
||||
"Mayans M.C.": "Mayans MC"}, True )
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -89,7 +90,7 @@ class HosszupuskaSubtitle(Subtitle):
|
|||
def get_matches(self, video):
|
||||
matches = set()
|
||||
# series
|
||||
if video.series and sanitize(self.series) == sanitize(video.series):
|
||||
if video.series and ( sanitize(self.series) == sanitize(fix_inconsistent_naming(video.series)) or sanitize(self.series) == sanitize(video.series)):
|
||||
matches.add('series')
|
||||
# season
|
||||
if video.season and self.season == video.season:
|
||||
|
@ -150,7 +151,7 @@ class HosszupuskaProvider(Provider, ProviderSubtitleArchiveMixin):
|
|||
seasona = "%02d" % season
|
||||
episodea = "%02d" % episode
|
||||
series = fix_inconsistent_naming(series)
|
||||
seriesa = series.replace(' ', '+').replace('\'', '')
|
||||
seriesa = series.replace(' ', '+')
|
||||
|
||||
# get the episode page
|
||||
logger.info('Getting the page for episode %s', episode)
|
||||
|
|
|
@ -7,7 +7,8 @@ from subliminal.exceptions import ConfigurationError
|
|||
from subliminal.providers.legendastv import LegendasTVSubtitle as _LegendasTVSubtitle, \
|
||||
LegendasTVProvider as _LegendasTVProvider, Episode, Movie, guess_matches, guessit, sanitize, region, type_map, \
|
||||
raise_for_status, json, SHOW_EXPIRATION_TIME, title_re, season_re, datetime, pytz, NO_VALUE, releases_key, \
|
||||
SUBTITLE_EXTENSIONS
|
||||
SUBTITLE_EXTENSIONS, language_converters
|
||||
from subzero.language import Language
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -63,6 +64,7 @@ class LegendasTVSubtitle(_LegendasTVSubtitle):
|
|||
|
||||
|
||||
class LegendasTVProvider(_LegendasTVProvider):
|
||||
languages = {Language(*l) for l in language_converters['legendastv'].to_legendastv.keys()}
|
||||
subtitle_class = LegendasTVSubtitle
|
||||
|
||||
def __init__(self, username=None, password=None):
|
||||
|
|
|
@ -3,6 +3,7 @@ import logging
|
|||
|
||||
from subliminal.providers.napiprojekt import NapiProjektProvider as _NapiProjektProvider, \
|
||||
NapiProjektSubtitle as _NapiProjektSubtitle, get_subhash
|
||||
from subzero.language import Language
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -18,6 +19,7 @@ class NapiProjektSubtitle(_NapiProjektSubtitle):
|
|||
|
||||
|
||||
class NapiProjektProvider(_NapiProjektProvider):
|
||||
languages = {Language.fromalpha2(l) for l in ['pl']}
|
||||
subtitle_class = NapiProjektSubtitle
|
||||
|
||||
def query(self, language, hash):
|
||||
|
|
|
@ -163,12 +163,13 @@ class OpenSubtitlesProvider(ProviderRetryMixin, _OpenSubtitlesProvider):
|
|||
token = region.get("os_token", expiration_time=3600)
|
||||
if token is not NO_VALUE:
|
||||
try:
|
||||
logger.debug('Trying previous token')
|
||||
logger.debug('Trying previous token: %r', token[:10]+"X"*(len(token)-10))
|
||||
checked(lambda: self.server.NoOperation(token))
|
||||
self.token = token
|
||||
logger.debug("Using previous login token: %s", self.token)
|
||||
logger.debug("Using previous login token: %r", token[:10]+"X"*(len(token)-10))
|
||||
return
|
||||
except:
|
||||
logger.debug('Token not valid.')
|
||||
pass
|
||||
|
||||
try:
|
||||
|
@ -299,6 +300,9 @@ class OpenSubtitlesProvider(ProviderRetryMixin, _OpenSubtitlesProvider):
|
|||
elif also_foreign and foreign_parts_only:
|
||||
language = Language.rebuild(language, forced=True)
|
||||
|
||||
if language not in languages:
|
||||
continue
|
||||
|
||||
query_parameters = _subtitle_item.get("QueryParameters")
|
||||
|
||||
subtitle = self.subtitle_class(language, hearing_impaired, page_link, subtitle_id, matched_by,
|
||||
|
|
|
@ -175,7 +175,7 @@ class PodnapisiProvider(_PodnapisiProvider, ProviderSubtitleArchiveMixin):
|
|||
if pid in pids:
|
||||
continue
|
||||
|
||||
language = Language.fromietf(subtitle_xml.find('language').text)
|
||||
_language = Language.fromietf(subtitle_xml.find('language').text)
|
||||
hearing_impaired = 'n' in (subtitle_xml.find('flags').text or '')
|
||||
foreign = 'f' in (subtitle_xml.find('flags').text or '')
|
||||
if only_foreign and not foreign:
|
||||
|
@ -185,7 +185,10 @@ class PodnapisiProvider(_PodnapisiProvider, ProviderSubtitleArchiveMixin):
|
|||
continue
|
||||
|
||||
elif also_foreign and foreign:
|
||||
language = Language.rebuild(language, forced=True)
|
||||
_language = Language.rebuild(_language, forced=True)
|
||||
|
||||
if language != _language:
|
||||
continue
|
||||
|
||||
page_link = subtitle_xml.find('url').text
|
||||
releases = []
|
||||
|
@ -198,12 +201,12 @@ class PodnapisiProvider(_PodnapisiProvider, ProviderSubtitleArchiveMixin):
|
|||
r_year = int(subtitle_xml.find('year').text)
|
||||
|
||||
if is_episode:
|
||||
subtitle = self.subtitle_class(language, hearing_impaired, page_link, pid, releases, title,
|
||||
subtitle = self.subtitle_class(_language, hearing_impaired, page_link, pid, releases, title,
|
||||
season=r_season, episode=r_episode, year=r_year,
|
||||
asked_for_release_group=video.release_group,
|
||||
asked_for_episode=episode)
|
||||
else:
|
||||
subtitle = self.subtitle_class(language, hearing_impaired, page_link, pid, releases, title,
|
||||
subtitle = self.subtitle_class(_language, hearing_impaired, page_link, pid, releases, title,
|
||||
year=r_year, asked_for_release_group=video.release_group)
|
||||
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
from subliminal.providers.subscenter import SubsCenterProvider as _SubsCenterProvider, \
|
||||
SubsCenterSubtitle as _SubsCenterSubtitle
|
||||
from subzero.language import Language
|
||||
|
||||
|
||||
class SubsCenterSubtitle(_SubsCenterSubtitle):
|
||||
|
@ -21,6 +22,7 @@ class SubsCenterSubtitle(_SubsCenterSubtitle):
|
|||
|
||||
|
||||
class SubsCenterProvider(_SubsCenterProvider):
|
||||
languages = {Language.fromalpha2(l) for l in ['he']}
|
||||
subtitle_class = SubsCenterSubtitle
|
||||
hearing_impaired_verifiable = True
|
||||
server_url = 'http://www.subscenter.info/he/'
|
||||
|
|
|
@ -21,6 +21,10 @@ class TVsubtitlesSubtitle(_TVsubtitlesSubtitle):
|
|||
|
||||
|
||||
class TVsubtitlesProvider(_TVsubtitlesProvider):
|
||||
languages = {Language('por', 'BR')} | {Language(l) for l in [
|
||||
'ara', 'bul', 'ces', 'dan', 'deu', 'ell', 'eng', 'fin', 'fra', 'hun', 'ita', 'jpn', 'kor', 'nld', 'pol', 'por',
|
||||
'ron', 'rus', 'spa', 'swe', 'tur', 'ukr', 'zho'
|
||||
]}
|
||||
subtitle_class = TVsubtitlesSubtitle
|
||||
|
||||
@region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME)
|
||||
|
|
|
@ -63,12 +63,12 @@ class DroneAPIClient(object):
|
|||
out[key] = quote(value)
|
||||
return out
|
||||
|
||||
def get(self, endpoint, **params):
|
||||
def get(self, endpoint, requests_kwargs=None, **params):
|
||||
url = urljoin(self.api_url, endpoint)
|
||||
params = self.build_params(params)
|
||||
|
||||
# perform the request
|
||||
r = self.session.get(url, params=params)
|
||||
r = self.session.get(url, params=params, **(requests_kwargs or {}))
|
||||
r.raise_for_status()
|
||||
|
||||
# get the response as json
|
||||
|
@ -79,8 +79,8 @@ class DroneAPIClient(object):
|
|||
return j
|
||||
return []
|
||||
|
||||
def status(self):
|
||||
return self.get("system/status")
|
||||
def status(self, **kwargs):
|
||||
return self.get("system/status", requests_kwargs=kwargs)
|
||||
|
||||
def update_video(self, video, scene_name):
|
||||
"""
|
||||
|
|
|
@ -35,11 +35,12 @@ def sanitize(string, ignore_characters=None, default_characters={'-', ':', '(',
|
|||
return string.strip().lower()
|
||||
|
||||
|
||||
def fix_inconsistent_naming(title, inconsistent_titles_dict=None):
|
||||
def fix_inconsistent_naming(title, inconsistent_titles_dict=None, no_sanitize=False):
|
||||
"""Fix titles with inconsistent naming using dictionary and sanitize them.
|
||||
|
||||
:param str title: original title.
|
||||
:param dict inconsistent_titles_dict: dictionary of titles with inconsistent naming.
|
||||
:param bool no_sanitize: indication to not sanitize title.
|
||||
:return: new title.
|
||||
:rtype: str
|
||||
|
||||
|
@ -54,5 +55,9 @@ def fix_inconsistent_naming(title, inconsistent_titles_dict=None):
|
|||
pattern = re.compile('|'.join(re.escape(key) for key in inconsistent_titles_dict.keys()))
|
||||
title = pattern.sub(lambda x: inconsistent_titles_dict[x.group()], title)
|
||||
|
||||
if no_sanitize:
|
||||
return title
|
||||
else:
|
||||
return sanitize(title)
|
||||
# return fixed and sanitized title
|
||||
return sanitize(title)
|
||||
return title
|
||||
|
|
|
@ -12,6 +12,7 @@ class Video(Video_):
|
|||
hints = None
|
||||
season_fully_aired = None
|
||||
audio_languages = None
|
||||
external_subtitle_languages = None
|
||||
|
||||
def __init__(self, name, format=None, release_group=None, resolution=None, video_codec=None, audio_codec=None,
|
||||
imdb_id=None, hashes=None, size=None, subtitle_languages=None, audio_languages=None):
|
||||
|
@ -22,3 +23,4 @@ class Video(Video_):
|
|||
self.plexapi_metadata = {}
|
||||
self.hints = {}
|
||||
self.audio_languages = audio_languages or set()
|
||||
self.external_subtitle_languages = set()
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# coding=utf-8
|
||||
from babelfish.exceptions import LanguageError
|
||||
import types
|
||||
|
||||
from babelfish.exceptions import LanguageError
|
||||
from babelfish import Language as Language_, basestr
|
||||
|
||||
|
||||
|
@ -34,7 +35,12 @@ def wrap_forced(f):
|
|||
cls = args[0]
|
||||
args = args[1:]
|
||||
s = args.pop(0)
|
||||
base, forced = s.split(":") if ":" in s else (s, False)
|
||||
forced = None
|
||||
if isinstance(s, types.StringTypes):
|
||||
base, forced = s.split(":") if ":" in s else (s, False)
|
||||
else:
|
||||
base = s
|
||||
|
||||
instance = f(cls, base, *args, **kwargs)
|
||||
if isinstance(instance, Language):
|
||||
instance.forced = forced == "forced"
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -117,10 +117,6 @@ SZ_FIX_DATA = {
|
|||
}
|
||||
|
||||
SZ_FIX_DATA_GLOBAL = {
|
||||
"PartialWordsAlways": {
|
||||
u"¶¶": u"♫",
|
||||
u"¶": u"♪"
|
||||
}
|
||||
}
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -33,11 +33,10 @@ class CommonFixes(SubtitleTextModification):
|
|||
# line = : text
|
||||
NReProcessor(re.compile(r'(?u)(^\W*:\s*(?=\w+))'), "", name="CM_empty_colon_start"),
|
||||
|
||||
# multi space
|
||||
NReProcessor(re.compile(r'(?u)(\s{2,})'), " ", name="CM_multi_space"),
|
||||
|
||||
# fix music symbols
|
||||
NReProcessor(re.compile(ur'(?u)(?:^[-\s]*[*#¶]+(?![^\s\-*#¶]))|(?:[*#¶]+\s*$)'), u"♪", name="CM_music_symbols"),
|
||||
NReProcessor(re.compile(ur'(?u)(^[-\s]*[*#¶]+\s*)|(\s*[*#¶]+\s*$)'),
|
||||
lambda x: u"♪ " if x.group(1) else u" ♪",
|
||||
name="CM_music_symbols"),
|
||||
|
||||
# '' = "
|
||||
NReProcessor(re.compile(ur'(?u)([\'’ʼ❜‘‛][\'’ʼ❜‘‛]+)'), u'"', name="CM_double_apostrophe"),
|
||||
|
|
|
@ -17,17 +17,23 @@ def has_external_subtitle(part_id, stored_subs, language):
|
|||
|
||||
|
||||
def set_existing_languages(video, video_info, external_subtitles=False, embedded_subtitles=False, known_embedded=None,
|
||||
stored_subs=None, languages=None, only_one=False):
|
||||
stored_subs=None, languages=None, only_one=False, known_metadata_subs=None):
|
||||
logger.debug(u"Determining existing subtitles for %s", video.name)
|
||||
|
||||
external_langs_found = set()
|
||||
# scan for external subtitles
|
||||
external_langs_found = set(search_external_subtitles(video.name, languages=languages,
|
||||
only_one=only_one).values())
|
||||
if known_metadata_subs:
|
||||
# existing metadata subtitles
|
||||
external_langs_found = known_metadata_subs
|
||||
|
||||
external_langs_found.update(set(search_external_subtitles(video.name, languages=languages,
|
||||
only_one=only_one).values()))
|
||||
|
||||
# found external subtitles should be considered?
|
||||
if external_subtitles:
|
||||
# |= is update, thanks plex
|
||||
video.subtitle_languages.update(external_langs_found)
|
||||
video.external_subtitle_languages.update(external_langs_found)
|
||||
|
||||
else:
|
||||
# did we already download subtitles for this?
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue