mirror of
https://github.com/morpheus65535/bazarr.git
synced 2025-04-23 22:27:17 -04:00
WIP
This commit is contained in:
parent
f389c38a9c
commit
e7cb2a71e2
18 changed files with 149 additions and 274 deletions
|
@ -29,12 +29,12 @@ def track_event(category=None, action=None, label=None):
|
|||
|
||||
try:
|
||||
if settings.analytics.visitor:
|
||||
visitor = pickle.loads(base64.b64decode(settings.analytics.visitor))
|
||||
visitor = pickle.loads(base64.b64decode(settings.analytics.visitor), encoding='utf-8')
|
||||
if visitor.unique_id > int(0x7fffffff):
|
||||
visitor.unique_id = random.randint(0, 0x7fffffff)
|
||||
except:
|
||||
visitor = Visitor()
|
||||
visitor.unique_id = long(random.randint(0, 0x7fffffff))
|
||||
visitor.unique_id = random.randint(0, 0x7fffffff)
|
||||
|
||||
session = Session()
|
||||
event = Event(category=category, action=action, label=label, value=1)
|
||||
|
|
|
@ -430,7 +430,7 @@ def manual_download_subtitle(path, language, hi, forced, subtitle, provider, pro
|
|||
directory=fld,
|
||||
chmod=chmod,
|
||||
# formats=("srt", "vtt")
|
||||
path_decoder=force_unicode)
|
||||
path_decoder=None)
|
||||
|
||||
except Exception as e:
|
||||
logging.exception('BAZARR Error saving subtitles file to disk for this file:' + path)
|
||||
|
|
|
@ -291,9 +291,9 @@ def list_missing_subtitles(no=None):
|
|||
|
||||
|
||||
def list_missing_subtitles_movies(no=None):
|
||||
movies_subtitles_clause = {TableMovies.radarr_id.is_null(False)}
|
||||
movies_subtitles_clause = "TableMovies.radarr_id.is_null(False)"
|
||||
if no is not None:
|
||||
movies_subtitles_clause = {TableMovies.radarr_id ** no}
|
||||
movies_subtitles_clause = "TableMovies.radarr_id ** no"
|
||||
|
||||
movies_subtitles = TableMovies.select(
|
||||
TableMovies.radarr_id,
|
||||
|
|
|
@ -13,7 +13,7 @@ See <http://github.com/ActiveState/appdirs> for details and usage.
|
|||
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
|
||||
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
||||
|
||||
__version_info__ = (1, 4, 0)
|
||||
__version_info__ = (1, 4, 3)
|
||||
__version__ = '.'.join(map(str, __version_info__))
|
||||
|
||||
|
||||
|
@ -117,7 +117,7 @@ def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
|
|||
returned, or '/usr/local/share/<AppName>',
|
||||
if XDG_DATA_DIRS is not set
|
||||
|
||||
Typical user data directories are:
|
||||
Typical site data directories are:
|
||||
Mac OS X: /Library/Application Support/<AppName>
|
||||
Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
|
||||
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
|
||||
|
@ -184,13 +184,13 @@ def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
|
|||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||
for a discussion of issues.
|
||||
|
||||
Typical user data directories are:
|
||||
Typical user config directories are:
|
||||
Mac OS X: same as user_data_dir
|
||||
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
|
||||
Win *: same as user_data_dir
|
||||
|
||||
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
|
||||
That means, by deafult "~/.config/<AppName>".
|
||||
That means, by default "~/.config/<AppName>".
|
||||
"""
|
||||
if system in ["win32", "darwin"]:
|
||||
path = user_data_dir(appname, appauthor, None, roaming)
|
||||
|
@ -222,7 +222,7 @@ def site_config_dir(appname=None, appauthor=None, version=None, multipath=False)
|
|||
returned. By default, the first item from XDG_CONFIG_DIRS is
|
||||
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
|
||||
|
||||
Typical user data directories are:
|
||||
Typical site config directories are:
|
||||
Mac OS X: same as site_data_dir
|
||||
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
|
||||
$XDG_CONFIG_DIRS
|
||||
|
@ -311,6 +311,48 @@ def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
|
|||
return path
|
||||
|
||||
|
||||
def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||
r"""Return full path to the user-specific state dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"roaming" (boolean, default False) can be set True to use the Windows
|
||||
roaming appdata directory. That means that for users on a Windows
|
||||
network setup for roaming profiles, this user data will be
|
||||
sync'd on login. See
|
||||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||
for a discussion of issues.
|
||||
|
||||
Typical user state directories are:
|
||||
Mac OS X: same as user_data_dir
|
||||
Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
|
||||
Win *: same as user_data_dir
|
||||
|
||||
For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
|
||||
to extend the XDG spec and support $XDG_STATE_HOME.
|
||||
|
||||
That means, by default "~/.local/state/<AppName>".
|
||||
"""
|
||||
if system in ["win32", "darwin"]:
|
||||
path = user_data_dir(appname, appauthor, None, roaming)
|
||||
else:
|
||||
path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||
r"""Return full path to the user-specific log dir for this application.
|
||||
|
||||
|
@ -329,7 +371,7 @@ def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
|
|||
"Logs" to the base app data dir for Windows, and "log" to the
|
||||
base cache dir for Unix. See discussion below.
|
||||
|
||||
Typical user cache directories are:
|
||||
Typical user log directories are:
|
||||
Mac OS X: ~/Library/Logs/<AppName>
|
||||
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
|
||||
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
|
||||
|
@ -364,8 +406,8 @@ def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
|
|||
|
||||
class AppDirs(object):
|
||||
"""Convenience wrapper for getting application dirs."""
|
||||
def __init__(self, appname, appauthor=None, version=None, roaming=False,
|
||||
multipath=False):
|
||||
def __init__(self, appname=None, appauthor=None, version=None,
|
||||
roaming=False, multipath=False):
|
||||
self.appname = appname
|
||||
self.appauthor = appauthor
|
||||
self.version = version
|
||||
|
@ -397,6 +439,11 @@ class AppDirs(object):
|
|||
return user_cache_dir(self.appname, self.appauthor,
|
||||
version=self.version)
|
||||
|
||||
@property
|
||||
def user_state_dir(self):
|
||||
return user_state_dir(self.appname, self.appauthor,
|
||||
version=self.version)
|
||||
|
||||
@property
|
||||
def user_log_dir(self):
|
||||
return user_log_dir(self.appname, self.appauthor,
|
||||
|
@ -410,7 +457,10 @@ def _get_win_folder_from_registry(csidl_name):
|
|||
registry for this guarantees us the correct answer for all CSIDL_*
|
||||
names.
|
||||
"""
|
||||
import _winreg
|
||||
if PY3:
|
||||
import winreg as _winreg
|
||||
else:
|
||||
import _winreg
|
||||
|
||||
shell_folder_name = {
|
||||
"CSIDL_APPDATA": "AppData",
|
||||
|
@ -500,7 +550,7 @@ def _get_win_folder_with_jna(csidl_name):
|
|||
if has_high_char:
|
||||
buf = array.zeros('c', buf_size)
|
||||
kernel = win32.Kernel32.INSTANCE
|
||||
if kernal.GetShortPathName(dir, buf, buf_size):
|
||||
if kernel.GetShortPathName(dir, buf, buf_size):
|
||||
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||
|
||||
return dir
|
||||
|
@ -527,9 +577,15 @@ if __name__ == "__main__":
|
|||
appname = "MyApp"
|
||||
appauthor = "MyCompany"
|
||||
|
||||
props = ("user_data_dir", "site_data_dir",
|
||||
"user_config_dir", "site_config_dir",
|
||||
"user_cache_dir", "user_log_dir")
|
||||
props = ("user_data_dir",
|
||||
"user_config_dir",
|
||||
"user_cache_dir",
|
||||
"user_state_dir",
|
||||
"user_log_dir",
|
||||
"site_data_dir",
|
||||
"site_config_dir")
|
||||
|
||||
print("-- app dirs %s --" % __version__)
|
||||
|
||||
print("-- app dirs (with optional 'version')")
|
||||
dirs = AppDirs(appname, appauthor, version="1.0")
|
||||
|
|
|
@ -112,7 +112,7 @@ class Lock(object):
|
|||
if not self._is_expired(createdtime):
|
||||
return NOT_REGENERATED
|
||||
|
||||
async = False
|
||||
_async = False
|
||||
|
||||
if self._has_value(createdtime):
|
||||
if not self.mutex.acquire(False):
|
||||
|
@ -138,14 +138,14 @@ class Lock(object):
|
|||
elif self.async_creator:
|
||||
log.debug("Passing creation lock to async runner")
|
||||
self.async_creator(self.mutex)
|
||||
async = True
|
||||
_async = True
|
||||
return value, createdtime
|
||||
|
||||
log.debug("Calling creation function")
|
||||
created = self.creator()
|
||||
return created
|
||||
finally:
|
||||
if not async:
|
||||
if not _async:
|
||||
self.mutex.release()
|
||||
log.debug("Released creation lock")
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ class ReadWriteMutex(object):
|
|||
|
||||
def __init__(self):
|
||||
# counts how many asynchronous methods are executing
|
||||
self.async = 0
|
||||
self._async = 0
|
||||
|
||||
# pointer to thread that is the current sync operation
|
||||
self.current_sync_operation = None
|
||||
|
@ -45,7 +45,7 @@ class ReadWriteMutex(object):
|
|||
if self.current_sync_operation is not None:
|
||||
return False
|
||||
|
||||
self.async += 1
|
||||
self._async += 1
|
||||
log.debug("%s acquired read lock", self)
|
||||
finally:
|
||||
self.condition.release()
|
||||
|
@ -57,16 +57,16 @@ class ReadWriteMutex(object):
|
|||
"""Release the 'read' lock."""
|
||||
self.condition.acquire()
|
||||
try:
|
||||
self.async -= 1
|
||||
self._async -= 1
|
||||
|
||||
# check if we are the last asynchronous reader thread
|
||||
# out the door.
|
||||
if self.async == 0:
|
||||
if self._async == 0:
|
||||
# yes. so if a sync operation is waiting, notifyAll to wake
|
||||
# it up
|
||||
if self.current_sync_operation is not None:
|
||||
self.condition.notifyAll()
|
||||
elif self.async < 0:
|
||||
elif self._async < 0:
|
||||
raise LockError("Synchronizer error - too many "
|
||||
"release_read_locks called")
|
||||
log.debug("%s released read lock", self)
|
||||
|
@ -96,7 +96,7 @@ class ReadWriteMutex(object):
|
|||
self.current_sync_operation = threading.currentThread()
|
||||
|
||||
# now wait again for asyncs to finish
|
||||
if self.async > 0:
|
||||
if self._async > 0:
|
||||
if wait:
|
||||
# wait
|
||||
self.condition.wait()
|
||||
|
|
|
@ -4,13 +4,9 @@ import os
|
|||
import pickle
|
||||
import shutil
|
||||
import tempfile
|
||||
import traceback
|
||||
import hashlib
|
||||
|
||||
import appdirs
|
||||
|
||||
from scandir import scandir, scandir_generic as _scandir_generic
|
||||
|
||||
try:
|
||||
from collections.abc import MutableMapping
|
||||
unicode = str
|
||||
|
@ -90,7 +86,7 @@ class FileCache(MutableMapping):
|
|||
"""
|
||||
|
||||
def __init__(self, appname, flag='c', mode=0o666, keyencoding='utf-8',
|
||||
serialize=True, app_cache_dir=None, key_file_ext=".txt"):
|
||||
serialize=True, app_cache_dir=None):
|
||||
"""Initialize a :class:`FileCache` object."""
|
||||
if not isinstance(flag, str):
|
||||
raise TypeError("flag must be str not '{}'".format(type(flag)))
|
||||
|
@ -131,7 +127,6 @@ class FileCache(MutableMapping):
|
|||
self._mode = mode
|
||||
self._keyencoding = keyencoding
|
||||
self._serialize = serialize
|
||||
self.key_file_ext = key_file_ext
|
||||
|
||||
def _parse_appname(self, appname):
|
||||
"""Splits an appname into the appname and subcache components."""
|
||||
|
@ -185,16 +180,7 @@ class FileCache(MutableMapping):
|
|||
self._sync = True
|
||||
for ekey in self._buffer:
|
||||
filename = self._key_to_filename(ekey)
|
||||
try:
|
||||
self._write_to_file(filename, self._buffer[ekey])
|
||||
except:
|
||||
logger.error("Couldn't write content from %r to cache file: %r: %s", ekey, filename,
|
||||
traceback.format_exc())
|
||||
try:
|
||||
self.__write_to_file(filename + self.key_file_ext, ekey)
|
||||
except:
|
||||
logger.error("Couldn't write content from %r to cache file: %r: %s", ekey, filename,
|
||||
traceback.format_exc())
|
||||
self._write_to_file(filename, self._buffer[ekey])
|
||||
self._buffer.clear()
|
||||
self._sync = False
|
||||
|
||||
|
@ -203,7 +189,8 @@ class FileCache(MutableMapping):
|
|||
raise ValueError("invalid operation on closed cache")
|
||||
|
||||
def _encode_key(self, key):
|
||||
"""
|
||||
"""Encode key using *hex_codec* for constructing a cache filename.
|
||||
|
||||
Keys are implicitly converted to :class:`bytes` if passed as
|
||||
:class:`str`.
|
||||
|
||||
|
@ -212,15 +199,16 @@ class FileCache(MutableMapping):
|
|||
key = key.encode(self._keyencoding)
|
||||
elif not isinstance(key, bytes):
|
||||
raise TypeError("key must be bytes or str")
|
||||
return key.decode(self._keyencoding)
|
||||
return codecs.encode(key, 'hex_codec').decode(self._keyencoding)
|
||||
|
||||
def _decode_key(self, key):
|
||||
"""
|
||||
"""Decode key using hex_codec to retrieve the original key.
|
||||
|
||||
Keys are returned as :class:`str` if serialization is enabled.
|
||||
Keys are returned as :class:`bytes` if serialization is disabled.
|
||||
|
||||
"""
|
||||
bkey = key.encode(self._keyencoding)
|
||||
bkey = codecs.decode(key.encode(self._keyencoding), 'hex_codec')
|
||||
return bkey.decode(self._keyencoding) if self._serialize else bkey
|
||||
|
||||
def _dumps(self, value):
|
||||
|
@ -231,27 +219,19 @@ class FileCache(MutableMapping):
|
|||
|
||||
def _key_to_filename(self, key):
|
||||
"""Convert an encoded key to an absolute cache filename."""
|
||||
if isinstance(key, unicode):
|
||||
key = key.encode(self._keyencoding)
|
||||
return os.path.join(self.cache_dir, hashlib.md5(key).hexdigest())
|
||||
return os.path.join(self.cache_dir, key)
|
||||
|
||||
def _filename_to_key(self, absfilename):
|
||||
"""Convert an absolute cache filename to a key name."""
|
||||
hkey_hdr_fn = absfilename + self.key_file_ext
|
||||
if os.path.isfile(hkey_hdr_fn):
|
||||
with open(hkey_hdr_fn, 'rb') as f:
|
||||
key = f.read()
|
||||
return key.decode(self._keyencoding) if self._serialize else key
|
||||
return os.path.split(absfilename)[1]
|
||||
|
||||
def _all_filenames(self, scandir_generic=True):
|
||||
def _all_filenames(self):
|
||||
"""Return a list of absolute cache filenames"""
|
||||
_scandir = _scandir_generic if scandir_generic else scandir
|
||||
try:
|
||||
for entry in _scandir(self.cache_dir):
|
||||
if entry.is_file(follow_symlinks=False) and not entry.name.endswith(self.key_file_ext):
|
||||
yield os.path.join(self.cache_dir, entry.name)
|
||||
return [os.path.join(self.cache_dir, filename) for filename in
|
||||
os.listdir(self.cache_dir)]
|
||||
except (FileNotFoundError, OSError):
|
||||
raise StopIteration
|
||||
return []
|
||||
|
||||
def _all_keys(self):
|
||||
"""Return a list of all encoded key names."""
|
||||
|
@ -261,17 +241,14 @@ class FileCache(MutableMapping):
|
|||
else:
|
||||
return set(file_keys + list(self._buffer))
|
||||
|
||||
def __write_to_file(self, filename, value):
|
||||
def _write_to_file(self, filename, bytesvalue):
|
||||
"""Write bytesvalue to filename."""
|
||||
fh, tmp = tempfile.mkstemp()
|
||||
with os.fdopen(fh, self._flag) as f:
|
||||
f.write(value)
|
||||
f.write(self._dumps(bytesvalue))
|
||||
rename(tmp, filename)
|
||||
os.chmod(filename, self._mode)
|
||||
|
||||
def _write_to_file(self, filename, bytesvalue):
|
||||
self.__write_to_file(filename, self._dumps(bytesvalue))
|
||||
|
||||
def _read_from_file(self, filename):
|
||||
"""Read data from filename."""
|
||||
try:
|
||||
|
@ -288,7 +265,6 @@ class FileCache(MutableMapping):
|
|||
else:
|
||||
filename = self._key_to_filename(ekey)
|
||||
self._write_to_file(filename, value)
|
||||
self.__write_to_file(filename + self.key_file_ext, ekey)
|
||||
|
||||
def __getitem__(self, key):
|
||||
ekey = self._encode_key(key)
|
||||
|
@ -298,9 +274,8 @@ class FileCache(MutableMapping):
|
|||
except KeyError:
|
||||
pass
|
||||
filename = self._key_to_filename(ekey)
|
||||
if not os.path.isfile(filename):
|
||||
if filename not in self._all_filenames():
|
||||
raise KeyError(key)
|
||||
|
||||
return self._read_from_file(filename)
|
||||
|
||||
def __delitem__(self, key):
|
||||
|
@ -317,11 +292,6 @@ class FileCache(MutableMapping):
|
|||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
try:
|
||||
os.remove(filename + self.key_file_ext)
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
def __iter__(self):
|
||||
for key in self._all_keys():
|
||||
yield self._decode_key(key)
|
||||
|
@ -331,10 +301,4 @@ class FileCache(MutableMapping):
|
|||
|
||||
def __contains__(self, key):
|
||||
ekey = self._encode_key(key)
|
||||
if not self._sync:
|
||||
try:
|
||||
return ekey in self._buffer
|
||||
except KeyError:
|
||||
pass
|
||||
filename = self._key_to_filename(ekey)
|
||||
return os.path.isfile(filename)
|
||||
return ekey in self._all_keys()
|
||||
|
|
|
@ -10,8 +10,8 @@ __license__ = "WTFPL"
|
|||
from datetime import datetime
|
||||
|
||||
def _df(seconds, denominator=1, text='', past=True):
|
||||
if past: return str((seconds + denominator/2)/ denominator) + text + ' ago'
|
||||
else: return 'in ' + str((seconds + denominator/2)/ denominator) + text
|
||||
if past: return str(round(seconds / denominator)) + text + ' ago'
|
||||
else: return 'in ' + str(round(seconds / denominator)) + text
|
||||
|
||||
def date(time=False, asdays=False, short=False):
|
||||
'''Returns a pretty formatted date.
|
||||
|
|
|
@ -1,4 +1,2 @@
|
|||
|
||||
from pyprobe.pyprobe import VideoFileParser
|
||||
from pyprobe.helpers import timeToTuple, sizeStr
|
||||
|
||||
from .pyprobe import VideoFileParser
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from __future__ import absolute_import
|
||||
from os import path
|
||||
|
||||
from pyprobe.baseparser import BaseParser
|
||||
from .baseparser import BaseParser
|
||||
|
||||
|
||||
class StreamParser(BaseParser):
|
||||
|
@ -44,7 +45,7 @@ class VideoStreamParser(BaseParser):
|
|||
"""Returns a tuple (width, height)"""
|
||||
width = data.get("width", None)
|
||||
height = data.get("height", None)
|
||||
if width == None and height == None:
|
||||
if width is None and height is None:
|
||||
return None, (0, 0)
|
||||
try:
|
||||
return (width, height), (int(float(width)), int(float(height)))
|
||||
|
@ -67,7 +68,7 @@ class VideoStreamParser(BaseParser):
|
|||
input_str = data.get("avg_frame_rate", None)
|
||||
try:
|
||||
num, den = input_str.split("/")
|
||||
return input_str, float(num) / float(den)
|
||||
return input_str, round(float(num) / float(den), 3)
|
||||
except (ValueError, ZeroDivisionError, AttributeError):
|
||||
info = cls.average_framerate(data)
|
||||
return input_str, info
|
||||
|
@ -125,6 +126,15 @@ class SubtitleStreamParser(BaseParser):
|
|||
return info, (info or "null")
|
||||
return None, "null"
|
||||
|
||||
@staticmethod
|
||||
def value_forced(data):
|
||||
"""Returns a bool """
|
||||
disposition = data.get("disposition", None)
|
||||
if disposition:
|
||||
info = disposition.get("forced", None)
|
||||
return bool(info), (bool(info) or False)
|
||||
return None, "null"
|
||||
|
||||
|
||||
class ChapterParser(BaseParser):
|
||||
@staticmethod
|
||||
|
@ -182,7 +192,7 @@ class RootParser(BaseParser):
|
|||
def value_size(data):
|
||||
"""Returns an int"""
|
||||
info = data.get("size", None)
|
||||
if info == None:
|
||||
if info is None:
|
||||
file_path = data.get("filename", "")
|
||||
if path.isfile(file_path):
|
||||
info = str(path.getsize(file_path))
|
||||
|
@ -195,7 +205,7 @@ class RootParser(BaseParser):
|
|||
def value_bit_rate(cls, data):
|
||||
"""Returns an int"""
|
||||
info = data.get("bit_rate", None)
|
||||
if info == None:
|
||||
if info is None:
|
||||
_, size = cls.value_size(data)
|
||||
_, duration = cls.value_duration(data)
|
||||
if size and duration:
|
||||
|
|
|
@ -1,51 +1,26 @@
|
|||
from __future__ import absolute_import
|
||||
import json
|
||||
import subprocess
|
||||
import xml.etree
|
||||
import xml.etree.ElementTree
|
||||
from io import StringIO
|
||||
from os import path
|
||||
import re
|
||||
from sys import getfilesystemencoding
|
||||
|
||||
from pyprobe import ffprobeparsers, mediainfoparsers
|
||||
from . import ffprobeparsers
|
||||
|
||||
|
||||
class VideoFileParser:
|
||||
def __init__(
|
||||
self,
|
||||
ffprobe="ffprobe",
|
||||
mediainfo="mediainfo",
|
||||
includeMissing=True,
|
||||
rawMode=False,
|
||||
):
|
||||
self._ffprobe = ffprobe
|
||||
self._mediainfo = mediainfo
|
||||
self._includeMissing = includeMissing
|
||||
self._rawMode = rawMode
|
||||
|
||||
########################################
|
||||
# Main Method
|
||||
|
||||
def parseMediainfo(self, inputFile):
|
||||
"""Takes an input file and returns the parsed data using mediainfo.
|
||||
|
||||
Args:
|
||||
inputFile (str): Video file path
|
||||
|
||||
Returns:
|
||||
dict<str, dict<str, var>>: Parsed video info
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: The input video file or input executable was not found
|
||||
IOError: Execution failed
|
||||
|
||||
"""
|
||||
if not path.isfile(inputFile):
|
||||
raise FileNotFoundError(inputFile + " not found")
|
||||
self._checkExecutable(self._mediainfo)
|
||||
self._checkMediainfoVersion(self._mediainfo)
|
||||
xmlData = self._executeMediainfo(inputFile)
|
||||
return self._parseMediainfo(xmlData, inputFile)
|
||||
|
||||
def parseFfprobe(self, inputFile):
|
||||
"""Takes an input file and returns the parsed data using ffprobe.
|
||||
|
||||
|
@ -66,122 +41,6 @@ class VideoFileParser:
|
|||
fdict = self._executeFfprobe(inputFile)
|
||||
return self._parseFfprobe(fdict, inputFile)
|
||||
|
||||
########################################
|
||||
# Mediainfo Parsing
|
||||
|
||||
def _executeMediainfo(self, inputFile):
|
||||
"""Executes mediainfo program on input file to get raw info
|
||||
|
||||
Args:
|
||||
inputFile (str): Video file path
|
||||
|
||||
Returns:
|
||||
xml.ElementTree.etree: Mediainfo output
|
||||
|
||||
Raises:
|
||||
IOError: Mediainfo output could not be parsed as XML data
|
||||
|
||||
"""
|
||||
commandArgs = ["-f", "--Language=raw", "--Output=XML"]
|
||||
outputXml = self._executeParser(self._mediainfo, commandArgs, inputFile)
|
||||
try:
|
||||
xmlRoot = self._decodeMediainfoOutput(outputXml)
|
||||
except xml.etree.ElementTree.ParseError:
|
||||
raise IOError("Could not decode mediainfo output for file " + inputFile)
|
||||
return xmlRoot
|
||||
|
||||
def _parseMediainfo(self, xmlRoot, inputFile):
|
||||
"""Parse mediainfo output into an organized data structure
|
||||
|
||||
Args:
|
||||
xmlRoot (xml.ElementTree.etree): Mediainfo output
|
||||
inputFile (str): Video file path
|
||||
|
||||
Returns:
|
||||
dict<str, dict<str, var>>: Parsed video data
|
||||
|
||||
"""
|
||||
videoInfo = {}
|
||||
videoInfo["path"] = path.abspath(inputFile)
|
||||
videoInfo.update(
|
||||
mediainfoparsers.RootParser.parse(
|
||||
xmlRoot.find(".//track[@type='General']"),
|
||||
self._rawMode,
|
||||
self._includeMissing,
|
||||
)
|
||||
)
|
||||
videoInfo.update(self._parseMediainfoStreams(xmlRoot))
|
||||
videoInfo.update(self._parseMediainfoChapters(xmlRoot, videoInfo["duration"]))
|
||||
return videoInfo
|
||||
|
||||
@staticmethod
|
||||
def _decodeMediainfoOutput(xmlData):
|
||||
# Strip namespaces from xml string
|
||||
# Code used from https://stackoverflow.com/a/25920989
|
||||
it = xml.etree.ElementTree.iterparse(StringIO(xmlData))
|
||||
for _, el in it:
|
||||
if "}" in el.tag:
|
||||
el.tag = el.tag.split("}", 1)[1]
|
||||
return it.root
|
||||
|
||||
def _parseMediainfoStreams(self, xmlData):
|
||||
"""Parses video, audio, and subtitle streams
|
||||
|
||||
Args:
|
||||
xmlData (dict): Stream data from mediainfo
|
||||
|
||||
Returns:
|
||||
dict<str, dict<str, var>>: Parsed streams - video, audio, and subtitle
|
||||
|
||||
"""
|
||||
parsedInfo = {"videos": [], "audios": [], "subtitles": []}
|
||||
for stream in xmlData.findall(".//track"):
|
||||
streamType = stream.attrib["type"]
|
||||
if streamType == "Video":
|
||||
parsedInfo["videos"].append(
|
||||
mediainfoparsers.VideoStreamParser.parse(
|
||||
stream, self._rawMode, self._includeMissing
|
||||
)
|
||||
)
|
||||
elif streamType == "Audio":
|
||||
parsedInfo["audios"].append(
|
||||
mediainfoparsers.AudioStreamParser.parse(
|
||||
stream, self._rawMode, self._includeMissing
|
||||
)
|
||||
)
|
||||
elif streamType == "Text":
|
||||
parsedInfo["subtitles"].append(
|
||||
mediainfoparsers.SubtitleStreamParser.parse(
|
||||
stream, self._rawMode, self._includeMissing
|
||||
)
|
||||
)
|
||||
return parsedInfo
|
||||
|
||||
def _parseMediainfoChapters(self, xmlData, duration):
|
||||
"""Since mediainfo does not give end times for each chapter,
|
||||
start times for the following chapter are added to the previous chapter.
|
||||
|
||||
Args:
|
||||
xmlData (dict): Stream data from mediainfo
|
||||
duration (int): Video duration
|
||||
|
||||
Returns:
|
||||
dict<str, dict<str, var>>: Parsed chapters
|
||||
|
||||
"""
|
||||
parsedInfo = {"chapters": []}
|
||||
for extra in xmlData.find(".//track[@type='Menu']/extra"):
|
||||
match = re.fullmatch(r"_\d*_\d\d_\d\d_\d\d\d", extra.tag)
|
||||
if match:
|
||||
parsedInfo["chapters"].append(
|
||||
mediainfoparsers.ChapterParser.parse(
|
||||
extra, self._rawMode, self._includeMissing
|
||||
)
|
||||
)
|
||||
if not self._rawMode:
|
||||
mediainfoparsers.ChapterParser.addEndTimes(parsedInfo["chapters"], duration)
|
||||
return parsedInfo
|
||||
|
||||
########################################
|
||||
# ffprobe Parsing
|
||||
|
||||
|
@ -198,6 +57,8 @@ class VideoFileParser:
|
|||
|
||||
"""
|
||||
commandArgs = [
|
||||
"-v",
|
||||
"quiet",
|
||||
"-hide_banner",
|
||||
"-show_error",
|
||||
"-show_format",
|
||||
|
@ -288,7 +149,7 @@ class VideoFileParser:
|
|||
|
||||
"""
|
||||
parsedInfo = {"chapters": []}
|
||||
if fOutput["chapters"] == None:
|
||||
if fOutput["chapters"] is None:
|
||||
return parsedInfo
|
||||
for chapter in fOutput["chapters"]:
|
||||
parsedInfo["chapters"].append(
|
||||
|
@ -336,31 +197,17 @@ class VideoFileParser:
|
|||
|
||||
"""
|
||||
try:
|
||||
subprocess.run(
|
||||
subprocess.check_output(
|
||||
[executable, "--help"],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
stderr=subprocess.STDOUT
|
||||
)
|
||||
except FileNotFoundError:
|
||||
except OSError:
|
||||
raise FileNotFoundError(executable + " not found")
|
||||
|
||||
@staticmethod
|
||||
def _checkMediainfoVersion(executable):
|
||||
"""Checks if the Mediainfo version is >=17.10
|
||||
In the version jump from 0.7.97 to 17.10 came lots of changes
|
||||
to the way Mediainfo outputs data. Therefore, this will
|
||||
only support versions >=17.10.
|
||||
|
||||
Some linux software repositories still distribute old
|
||||
versions of mediainfo, so the user must install
|
||||
using packages from mediainfo's website.
|
||||
|
||||
"""
|
||||
command = [executable, "--version"]
|
||||
completedProcess = subprocess.run(
|
||||
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8"
|
||||
)
|
||||
match = re.search(r"v\d*(\.\d*)*", completedProcess.stdout)
|
||||
version = match.group()[1:]
|
||||
if version.split(".")[0] == "0":
|
||||
raise IOError("Mediainfo version is <17.10 - (v" + version + ")")
|
||||
|
||||
class FileNotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class IOError(Exception):
|
||||
pass
|
||||
|
|
|
@ -855,7 +855,7 @@ def save_subtitles(file_path, subtitles, single=False, directory=None, chmod=Non
|
|||
content = subtitle.get_modified_content(format=format, debug=debug_mods)
|
||||
if content:
|
||||
with open(subtitle_path, 'w') as f:
|
||||
f.write(content)
|
||||
f.write(content.decode('utf-8'))
|
||||
subtitle.storage_path = subtitle_path
|
||||
else:
|
||||
logger.error(u"Something went wrong when getting modified subtitle for %s", subtitle)
|
||||
|
|
|
@ -148,7 +148,7 @@ class CFSession(CloudScraper):
|
|||
cache_key = "cf_data3_%s" % domain
|
||||
|
||||
if not self.cookies.get("cf_clearance", "", domain=domain):
|
||||
cf_data = str(region.get(cache_key))
|
||||
cf_data = region.get(cache_key)
|
||||
if cf_data is not NO_VALUE:
|
||||
cf_cookies, hdrs = cf_data
|
||||
logger.debug("Trying to use old cf data for %s: %s", domain, cf_data)
|
||||
|
@ -165,9 +165,9 @@ class CFSession(CloudScraper):
|
|||
pass
|
||||
else:
|
||||
if cf_data and "cf_clearance" in cf_data[0] and cf_data[0]["cf_clearance"]:
|
||||
if cf_data != str(region.get(cache_key)):
|
||||
if cf_data != region.get(cache_key):
|
||||
logger.debug("Storing cf data for %s: %s", domain, cf_data)
|
||||
region.set(cache_key, bytes(cf_data)
|
||||
region.set(cache_key, cf_data)
|
||||
elif cf_data[0]["cf_clearance"]:
|
||||
logger.debug("CF Live tokens not updated")
|
||||
|
||||
|
|
|
@ -243,7 +243,7 @@ class DBCPitcher(DBCProxyLessPitcher):
|
|||
|
||||
|
||||
def load_verification(site_name, session, callback=lambda x: None):
|
||||
ccks = str(region.get("%s_data" % site_name, expiration_time=15552000)) # 6m
|
||||
ccks = region.get("%s_data" % site_name, expiration_time=15552000) # 6m
|
||||
if ccks != NO_VALUE:
|
||||
cookies, user_agent = ccks
|
||||
logger.debug("%s: Re-using previous user agent: %s", site_name.capitalize(), user_agent)
|
||||
|
@ -257,4 +257,4 @@ def load_verification(site_name, session, callback=lambda x: None):
|
|||
|
||||
|
||||
def store_verification(site_name, session):
|
||||
region.set("%s_data" % site_name, bytes(session.cookies._cookies, session.headers["User-Agent"]))
|
||||
region.set("%s_data" % site_name, session.cookies._cookies, session.headers["User-Agent"])
|
||||
|
|
|
@ -199,7 +199,7 @@ class LegendasTVProvider(_LegendasTVProvider):
|
|||
|
||||
# attempt to get the releases from the cache
|
||||
cache_key = releases_key.format(archive_id=a.id, archive_name=a.name)
|
||||
releases = str(region.get(cache_key, expiration_time=expiration_time))
|
||||
releases = region.get(cache_key, expiration_time=expiration_time)
|
||||
|
||||
# the releases are not in cache or cache is expired
|
||||
if releases == NO_VALUE:
|
||||
|
@ -226,7 +226,7 @@ class LegendasTVProvider(_LegendasTVProvider):
|
|||
releases.append(name)
|
||||
|
||||
# cache the releases
|
||||
region.set(cache_key, bytes(releases)
|
||||
region.set(cache_key, releases)
|
||||
|
||||
# iterate over releases
|
||||
for r in releases:
|
||||
|
|
|
@ -154,7 +154,7 @@ class OpenSubtitlesProvider(ProviderRetryMixin, _OpenSubtitlesProvider):
|
|||
self.token = response['token']
|
||||
logger.debug('Logged in with token %r', self.token[:10]+"X"*(len(self.token)-10))
|
||||
|
||||
region.set("os_token", bytes(self.token))
|
||||
region.set("os_token", bytearray(self.token, encoding='utf-8'))
|
||||
|
||||
def use_token_or_login(self, func):
|
||||
if not self.token:
|
||||
|
|
|
@ -141,7 +141,7 @@ class SubsceneProvider(Provider, ProviderSubtitleArchiveMixin):
|
|||
logger.info("Creating session")
|
||||
self.session = RetryingCFSession()
|
||||
|
||||
prev_cookies = str(region.get("subscene_cookies2"))
|
||||
prev_cookies = region.get("subscene_cookies2")
|
||||
if prev_cookies != NO_VALUE:
|
||||
logger.debug("Re-using old subscene cookies: %r", prev_cookies)
|
||||
self.session.cookies.update(prev_cookies)
|
||||
|
@ -194,7 +194,7 @@ class SubsceneProvider(Provider, ProviderSubtitleArchiveMixin):
|
|||
del cj[cn]
|
||||
|
||||
logger.debug("Storing cookies: %r", cj)
|
||||
region.set("subscene_cookies2", bytes(cj)
|
||||
region.set("subscene_cookies2", cj)
|
||||
return
|
||||
raise ProviderError("Something went wrong when trying to log in #1")
|
||||
|
||||
|
@ -219,9 +219,9 @@ class SubsceneProvider(Provider, ProviderSubtitleArchiveMixin):
|
|||
acc_filters["SelectedIds"] = selected_ids
|
||||
self.filters["LanguageFilter"] = ",".join(acc_filters["SelectedIds"])
|
||||
|
||||
last_filters = str(region.get("subscene_filters"))
|
||||
last_filters = region.get("subscene_filters")
|
||||
if last_filters != acc_filters:
|
||||
region.set("subscene_filters", bytes(acc_filters)
|
||||
region.set("subscene_filters", acc_filters)
|
||||
logger.debug("Setting account filters to %r", acc_filters)
|
||||
self.session.post("https://u.subscene.com/filter", acc_filters, allow_redirects=False)
|
||||
|
||||
|
|
|
@ -104,7 +104,7 @@ class Subtitle(Subtitle_):
|
|||
self.set_encoding("utf-8")
|
||||
|
||||
# normalize line endings
|
||||
self.content = self.content.replace("\r\n", "\n").replace('\r', '\n')
|
||||
self.content = self.content.replace(b"\r\n", b"\n").replace(b'\r', b'\n')
|
||||
|
||||
def guess_encoding(self):
|
||||
"""Guess encoding using the language, falling back on chardet.
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue