mirror of
https://github.com/morpheus65535/bazarr.git
synced 2025-04-24 06:37:16 -04:00
Updated guessit and rebulk to latest version
This commit is contained in:
parent
762cd61b21
commit
24c075051d
48 changed files with 253 additions and 336 deletions
|
@ -4,14 +4,12 @@
|
|||
Entry point module
|
||||
"""
|
||||
# pragma: no cover
|
||||
from __future__ import print_function
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
import six
|
||||
from collections import OrderedDict
|
||||
|
||||
from rebulk.__version__ import __version__ as __rebulk_version__
|
||||
|
||||
from guessit import api
|
||||
|
@ -20,12 +18,6 @@ from guessit.jsonutils import GuessitEncoder
|
|||
from guessit.options import argument_parser, parse_options, load_config, merge_options
|
||||
|
||||
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError: # pragma: no-cover
|
||||
from ordereddict import OrderedDict # pylint:disable=import-error
|
||||
|
||||
|
||||
def guess_filename(filename, options):
|
||||
"""
|
||||
Guess a single filename using given options
|
||||
|
@ -48,6 +40,7 @@ def guess_filename(filename, options):
|
|||
if options.get('json'):
|
||||
print(json.dumps(guess, cls=GuessitEncoder, ensure_ascii=False))
|
||||
elif options.get('yaml'):
|
||||
# pylint:disable=import-outside-toplevel
|
||||
import yaml
|
||||
from guessit import yamlutils
|
||||
|
||||
|
@ -78,6 +71,7 @@ def display_properties(options):
|
|||
else:
|
||||
print(json.dumps(list(properties.keys()), cls=GuessitEncoder, ensure_ascii=False))
|
||||
elif options.get('yaml'):
|
||||
# pylint:disable=import-outside-toplevel
|
||||
import yaml
|
||||
from guessit import yamlutils
|
||||
if options.get('values'):
|
||||
|
@ -97,24 +91,10 @@ def display_properties(options):
|
|||
print(4 * ' ' + '[!] %s' % (property_value,))
|
||||
|
||||
|
||||
def fix_argv_encoding():
|
||||
"""
|
||||
Fix encoding of sys.argv on windows Python 2
|
||||
"""
|
||||
if six.PY2 and os.name == 'nt': # pragma: no cover
|
||||
# see http://bugs.python.org/issue2128
|
||||
import locale
|
||||
|
||||
for i, j in enumerate(sys.argv):
|
||||
sys.argv[i] = j.decode(locale.getpreferredencoding())
|
||||
|
||||
|
||||
def main(args=None): # pylint:disable=too-many-branches
|
||||
"""
|
||||
Main function for entry point
|
||||
"""
|
||||
fix_argv_encoding()
|
||||
|
||||
if args is None: # pragma: no cover
|
||||
options = parse_options()
|
||||
else:
|
||||
|
@ -142,7 +122,7 @@ def main(args=None): # pylint:disable=too-many-branches
|
|||
|
||||
if options.get('yaml'):
|
||||
try:
|
||||
import yaml # pylint:disable=unused-variable,unused-import
|
||||
import yaml # pylint:disable=unused-variable,unused-import,import-outside-toplevel
|
||||
except ImportError: # pragma: no cover
|
||||
del options['yaml']
|
||||
print('PyYAML is not installed. \'--yaml\' option will be ignored ...', file=sys.stderr)
|
||||
|
@ -156,10 +136,7 @@ def main(args=None): # pylint:disable=too-many-branches
|
|||
for filename in options.get('filename'):
|
||||
filenames.append(filename)
|
||||
if options.get('input_file'):
|
||||
if six.PY2:
|
||||
input_file = open(options.get('input_file'), 'r')
|
||||
else:
|
||||
input_file = open(options.get('input_file'), 'r', encoding='utf-8')
|
||||
input_file = open(options.get('input_file'), 'r', encoding='utf-8')
|
||||
try:
|
||||
filenames.extend([line.strip() for line in input_file.readlines()])
|
||||
finally:
|
||||
|
|
|
@ -4,4 +4,4 @@
|
|||
Version module
|
||||
"""
|
||||
# pragma: no cover
|
||||
__version__ = '3.1.1'
|
||||
__version__ = '3.3.1'
|
||||
|
|
|
@ -4,15 +4,12 @@
|
|||
API functions that can be used by external software
|
||||
"""
|
||||
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError: # pragma: no-cover
|
||||
from ordereddict import OrderedDict # pylint:disable=import-error
|
||||
from collections import OrderedDict
|
||||
|
||||
from pathlib import Path
|
||||
import os
|
||||
import traceback
|
||||
|
||||
import six
|
||||
from rebulk.introspector import introspect
|
||||
|
||||
from .__version__ import __version__
|
||||
|
@ -26,18 +23,18 @@ class GuessitException(Exception):
|
|||
"""
|
||||
|
||||
def __init__(self, string, options):
|
||||
super(GuessitException, self).__init__("An internal error has occured in guessit.\n"
|
||||
"===================== Guessit Exception Report =====================\n"
|
||||
"version=%s\n"
|
||||
"string=%s\n"
|
||||
"options=%s\n"
|
||||
"--------------------------------------------------------------------\n"
|
||||
"%s"
|
||||
"--------------------------------------------------------------------\n"
|
||||
"Please report at "
|
||||
"https://github.com/guessit-io/guessit/issues.\n"
|
||||
"====================================================================" %
|
||||
(__version__, str(string), str(options), traceback.format_exc()))
|
||||
super().__init__("An internal error has occured in guessit.\n"
|
||||
"===================== Guessit Exception Report =====================\n"
|
||||
"version=%s\n"
|
||||
"string=%s\n"
|
||||
"options=%s\n"
|
||||
"--------------------------------------------------------------------\n"
|
||||
"%s"
|
||||
"--------------------------------------------------------------------\n"
|
||||
"Please report at "
|
||||
"https://github.com/guessit-io/guessit/issues.\n"
|
||||
"====================================================================" %
|
||||
(__version__, str(string), str(options), traceback.format_exc()))
|
||||
|
||||
self.string = string
|
||||
self.options = options
|
||||
|
@ -113,9 +110,7 @@ class GuessItApi(object):
|
|||
return [cls._fix_encoding(item) for item in value]
|
||||
if isinstance(value, dict):
|
||||
return {cls._fix_encoding(k): cls._fix_encoding(v) for k, v in value.items()}
|
||||
if six.PY2 and isinstance(value, six.text_type):
|
||||
return value.encode('utf-8')
|
||||
if six.PY3 and isinstance(value, six.binary_type):
|
||||
if isinstance(value, bytes):
|
||||
return value.decode('ascii')
|
||||
return value
|
||||
|
||||
|
@ -175,16 +170,12 @@ class GuessItApi(object):
|
|||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
try:
|
||||
from pathlib import Path
|
||||
if isinstance(string, Path):
|
||||
try:
|
||||
# Handle path-like object
|
||||
string = os.fspath(string)
|
||||
except AttributeError:
|
||||
string = str(string)
|
||||
except ImportError:
|
||||
pass
|
||||
if isinstance(string, Path):
|
||||
try:
|
||||
# Handle path-like object
|
||||
string = os.fspath(string)
|
||||
except AttributeError:
|
||||
string = str(string)
|
||||
|
||||
try:
|
||||
options = parse_options(options, True)
|
||||
|
@ -194,32 +185,27 @@ class GuessItApi(object):
|
|||
result_decode = False
|
||||
result_encode = False
|
||||
|
||||
if six.PY2:
|
||||
if isinstance(string, six.text_type):
|
||||
string = string.encode("utf-8")
|
||||
result_decode = True
|
||||
elif isinstance(string, six.binary_type):
|
||||
string = six.binary_type(string)
|
||||
if six.PY3:
|
||||
if isinstance(string, six.binary_type):
|
||||
string = string.decode('ascii')
|
||||
result_encode = True
|
||||
elif isinstance(string, six.text_type):
|
||||
string = six.text_type(string)
|
||||
if isinstance(string, bytes):
|
||||
string = string.decode('ascii')
|
||||
result_encode = True
|
||||
|
||||
matches = self.rebulk.matches(string, options)
|
||||
if result_decode:
|
||||
for match in matches:
|
||||
if isinstance(match.value, six.binary_type):
|
||||
if isinstance(match.value, bytes):
|
||||
match.value = match.value.decode("utf-8")
|
||||
if result_encode:
|
||||
for match in matches:
|
||||
if isinstance(match.value, six.text_type):
|
||||
if isinstance(match.value, str):
|
||||
match.value = match.value.encode("ascii")
|
||||
return matches.to_dict(options.get('advanced', False), options.get('single_value', False),
|
||||
options.get('enforce_list', False))
|
||||
except:
|
||||
raise GuessitException(string, options)
|
||||
matches_dict = matches.to_dict(options.get('advanced', False), options.get('single_value', False),
|
||||
options.get('enforce_list', False))
|
||||
output_input_string = options.get('output_input_string', False)
|
||||
if output_input_string:
|
||||
matches_dict['input_string'] = matches.input_string
|
||||
return matches_dict
|
||||
except Exception as err:
|
||||
raise GuessitException(string, options) from err
|
||||
|
||||
def properties(self, options=None):
|
||||
"""
|
||||
|
@ -235,8 +221,8 @@ class GuessItApi(object):
|
|||
options = merge_options(config, options)
|
||||
unordered = introspect(self.rebulk, options).properties
|
||||
ordered = OrderedDict()
|
||||
for k in sorted(unordered.keys(), key=six.text_type):
|
||||
ordered[k] = list(sorted(unordered[k], key=six.text_type))
|
||||
for k in sorted(unordered.keys(), key=str):
|
||||
ordered[k] = list(sorted(unordered[k], key=str))
|
||||
if hasattr(self.rebulk, 'customize_properties'):
|
||||
ordered = self.rebulk.customize_properties(ordered)
|
||||
return ordered
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Backports
|
||||
"""
|
||||
# pragma: no-cover
|
||||
# pylint: skip-file
|
||||
|
||||
def cmp_to_key(mycmp):
|
||||
"""functools.cmp_to_key backport"""
|
||||
class KeyClass(object):
|
||||
"""Key class"""
|
||||
def __init__(self, obj, *args): # pylint: disable=unused-argument
|
||||
self.obj = obj
|
||||
def __lt__(self, other):
|
||||
return mycmp(self.obj, other.obj) < 0
|
||||
def __gt__(self, other):
|
||||
return mycmp(self.obj, other.obj) > 0
|
||||
def __eq__(self, other):
|
||||
return mycmp(self.obj, other.obj) == 0
|
||||
def __le__(self, other):
|
||||
return mycmp(self.obj, other.obj) <= 0
|
||||
def __ge__(self, other):
|
||||
return mycmp(self.obj, other.obj) >= 0
|
||||
def __ne__(self, other):
|
||||
return mycmp(self.obj, other.obj) != 0
|
||||
return KeyClass
|
|
@ -416,6 +416,10 @@
|
|||
"Animal Planet": "ANPL",
|
||||
"AnimeLab": "ANLB",
|
||||
"AOL": "AOL",
|
||||
"AppleTV": [
|
||||
"ATVP",
|
||||
"ATV+"
|
||||
],
|
||||
"ARD": "ARD",
|
||||
"BBC iPlayer": [
|
||||
"iP",
|
||||
|
@ -482,6 +486,7 @@
|
|||
"HBO",
|
||||
"re:HBO-?Go"
|
||||
],
|
||||
"HBO Max": "HMAX",
|
||||
"HGTV": "HGTV",
|
||||
"History": [
|
||||
"HIST",
|
||||
|
@ -490,7 +495,10 @@
|
|||
"Hulu": "HULU",
|
||||
"Investigation Discovery": "ID",
|
||||
"IFC": "IFC",
|
||||
"iTunes": "iTunes",
|
||||
"iTunes": [
|
||||
"iTunes",
|
||||
{"pattern": "iT", "ignore_case": false}
|
||||
],
|
||||
"ITV": "ITV",
|
||||
"Knowledge Network": "KNOW",
|
||||
"Lifetime": "LIFE",
|
||||
|
@ -537,6 +545,7 @@
|
|||
"SeeSo"
|
||||
],
|
||||
"Shomi": "SHMI",
|
||||
"Showtime": "SHO",
|
||||
"Spike": "SPIK",
|
||||
"Spike TV": [
|
||||
"SPKE",
|
||||
|
|
|
@ -4,10 +4,7 @@
|
|||
Monkeypatch initialisation functions
|
||||
"""
|
||||
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError: # pragma: no-cover
|
||||
from ordereddict import OrderedDict # pylint:disable=import-error
|
||||
from collections import OrderedDict
|
||||
|
||||
from rebulk.match import Match
|
||||
|
||||
|
|
|
@ -11,8 +11,6 @@ import shlex
|
|||
|
||||
from argparse import ArgumentParser
|
||||
|
||||
import six
|
||||
|
||||
|
||||
def build_argument_parser():
|
||||
"""
|
||||
|
@ -68,6 +66,8 @@ def build_argument_parser():
|
|||
help='Display information for filename guesses as json output')
|
||||
output_opts.add_argument('-y', '--yaml', dest='yaml', action='store_true', default=None,
|
||||
help='Display information for filename guesses as yaml output')
|
||||
output_opts.add_argument('-i', '--output-input-string', dest='output_input_string', action='store_true',
|
||||
default=False, help='Add input_string property in the output')
|
||||
|
||||
conf_opts = opts.add_argument_group("Configuration")
|
||||
conf_opts.add_argument('-c', '--config', dest='config', action='append', default=None,
|
||||
|
@ -108,7 +108,7 @@ def parse_options(options=None, api=False):
|
|||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
if isinstance(options, six.string_types):
|
||||
if isinstance(options, str):
|
||||
args = shlex.split(options)
|
||||
options = vars(argument_parser.parse_args(args))
|
||||
elif options is None:
|
||||
|
@ -153,7 +153,7 @@ def load_config(options):
|
|||
cwd = os.getcwd()
|
||||
yaml_supported = False
|
||||
try:
|
||||
import yaml # pylint:disable=unused-variable,unused-import
|
||||
import yaml # pylint:disable=unused-variable,unused-import,import-outside-toplevel
|
||||
yaml_supported = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
@ -225,7 +225,7 @@ def merge_option_value(option, value, merged):
|
|||
if value is not None and option != 'pristine':
|
||||
if option in merged.keys() and isinstance(merged[option], list):
|
||||
for val in value:
|
||||
if val not in merged[option]:
|
||||
if val not in merged[option] and val is not None:
|
||||
merged[option].append(val)
|
||||
elif option in merged.keys() and isinstance(merged[option], dict):
|
||||
merged[option] = merge_options(merged[option], value)
|
||||
|
@ -250,13 +250,13 @@ def load_config_file(filepath):
|
|||
return json.load(config_file_data)
|
||||
if filepath.endswith('.yaml') or filepath.endswith('.yml'):
|
||||
try:
|
||||
import yaml
|
||||
import yaml # pylint:disable=import-outside-toplevel
|
||||
with open(filepath) as config_file_data:
|
||||
return yaml.load(config_file_data, yaml.SafeLoader)
|
||||
except ImportError: # pragma: no cover
|
||||
except ImportError as err: # pragma: no cover
|
||||
raise ConfigurationException('Configuration file extension is not supported. '
|
||||
'PyYAML should be installed to support "%s" file' % (
|
||||
filepath,))
|
||||
filepath,)) from err
|
||||
|
||||
try:
|
||||
# Try to load input as JSON
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"""
|
||||
Common module
|
||||
"""
|
||||
import re
|
||||
from rebulk.remodule import re
|
||||
|
||||
seps = r' [](){}+*|=-_~#/\\.,;:' # list of tags/words separators
|
||||
seps_no_groups = seps.replace('[](){}', '')
|
||||
|
|
|
@ -3,10 +3,8 @@
|
|||
"""
|
||||
Comparators
|
||||
"""
|
||||
try:
|
||||
from functools import cmp_to_key
|
||||
except ImportError:
|
||||
from ...backports import cmp_to_key
|
||||
|
||||
from functools import cmp_to_key
|
||||
|
||||
|
||||
def marker_comparator_predicate(match):
|
||||
|
@ -14,10 +12,10 @@ def marker_comparator_predicate(match):
|
|||
Match predicate used in comparator
|
||||
"""
|
||||
return (
|
||||
not match.private
|
||||
and match.name not in ('proper_count', 'title')
|
||||
and not (match.name == 'container' and 'extension' in match.tags)
|
||||
and not (match.name == 'other' and match.value == 'Rip')
|
||||
not match.private
|
||||
and match.name not in ('proper_count', 'title')
|
||||
and not (match.name == 'container' and 'extension' in match.tags)
|
||||
and not (match.name == 'other' and match.value == 'Rip')
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"""
|
||||
Expected property factory
|
||||
"""
|
||||
import re
|
||||
from rebulk.remodule import re
|
||||
|
||||
from rebulk import Rebulk
|
||||
from rebulk.utils import find_all
|
||||
|
|
|
@ -3,10 +3,9 @@
|
|||
"""
|
||||
Quantities: Size
|
||||
"""
|
||||
import re
|
||||
from abc import abstractmethod
|
||||
|
||||
import six
|
||||
from rebulk.remodule import re
|
||||
|
||||
from ..common import seps
|
||||
|
||||
|
@ -50,7 +49,7 @@ class Quantity(object):
|
|||
return hash(str(self))
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, six.string_types):
|
||||
if isinstance(other, str):
|
||||
return str(self) == other
|
||||
if not isinstance(other, self.__class__):
|
||||
return NotImplemented
|
||||
|
|
|
@ -5,6 +5,7 @@ Groups markers (...), [...] and {...}
|
|||
"""
|
||||
from rebulk import Rebulk
|
||||
|
||||
from ...options import ConfigurationException
|
||||
|
||||
def groups(config):
|
||||
"""
|
||||
|
@ -21,6 +22,9 @@ def groups(config):
|
|||
starting = config['starting']
|
||||
ending = config['ending']
|
||||
|
||||
if len(starting) != len(ending):
|
||||
raise ConfigurationException("Starting and ending groups must have the same length")
|
||||
|
||||
def mark_groups(input_string):
|
||||
"""
|
||||
Functional pattern to mark groups (...), [...] and {...}.
|
||||
|
@ -28,7 +32,7 @@ def groups(config):
|
|||
:param input_string:
|
||||
:return:
|
||||
"""
|
||||
openings = ([], [], [])
|
||||
openings = ([], ) * len(starting)
|
||||
i = 0
|
||||
|
||||
ret = []
|
||||
|
|
|
@ -6,8 +6,6 @@ Processors
|
|||
from collections import defaultdict
|
||||
import copy
|
||||
|
||||
import six
|
||||
|
||||
from rebulk import Rebulk, Rule, CustomRule, POST_PROCESS, PRE_PROCESS, AppendMatch, RemoveMatch
|
||||
|
||||
from .common import seps_no_groups
|
||||
|
@ -68,7 +66,7 @@ class EquivalentHoles(Rule):
|
|||
for name in matches.names:
|
||||
for hole in list(holes):
|
||||
for current_match in matches.named(name):
|
||||
if isinstance(current_match.value, six.string_types) and \
|
||||
if isinstance(current_match.value, str) and \
|
||||
hole.value.lower() == current_match.value.lower():
|
||||
if 'equivalent-ignore' in current_match.tags:
|
||||
continue
|
||||
|
@ -96,7 +94,7 @@ class RemoveAmbiguous(Rule):
|
|||
consequence = RemoveMatch
|
||||
|
||||
def __init__(self, sort_function=marker_sorted, predicate=None):
|
||||
super(RemoveAmbiguous, self).__init__()
|
||||
super().__init__()
|
||||
self.sort_function = sort_function
|
||||
self.predicate = predicate
|
||||
|
||||
|
@ -131,7 +129,7 @@ class RemoveLessSpecificSeasonEpisode(RemoveAmbiguous):
|
|||
keep the one tagged as 'SxxExx' or in the rightmost filepart.
|
||||
"""
|
||||
def __init__(self, name):
|
||||
super(RemoveLessSpecificSeasonEpisode, self).__init__(
|
||||
super().__init__(
|
||||
sort_function=(lambda markers, matches:
|
||||
marker_sorted(list(reversed(markers)), matches,
|
||||
lambda match: match.name == name and 'SxxExx' in match.tags)),
|
||||
|
|
|
@ -130,7 +130,7 @@ class AudioProfileRule(Rule):
|
|||
consequence = RemoveMatch
|
||||
|
||||
def __init__(self, codec):
|
||||
super(AudioProfileRule, self).__init__()
|
||||
super().__init__()
|
||||
self.codec = codec
|
||||
|
||||
def enabled(self, context):
|
||||
|
@ -166,7 +166,7 @@ class DtsHDRule(AudioProfileRule):
|
|||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(DtsHDRule, self).__init__('DTS-HD')
|
||||
super().__init__('DTS-HD')
|
||||
|
||||
|
||||
class DtsRule(AudioProfileRule):
|
||||
|
@ -175,7 +175,7 @@ class DtsRule(AudioProfileRule):
|
|||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(DtsRule, self).__init__('DTS')
|
||||
super().__init__('DTS')
|
||||
|
||||
|
||||
class AacRule(AudioProfileRule):
|
||||
|
@ -184,7 +184,7 @@ class AacRule(AudioProfileRule):
|
|||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(AacRule, self).__init__('AAC')
|
||||
super().__init__('AAC')
|
||||
|
||||
|
||||
class DolbyDigitalRule(AudioProfileRule):
|
||||
|
@ -193,7 +193,7 @@ class DolbyDigitalRule(AudioProfileRule):
|
|||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(DolbyDigitalRule, self).__init__('Dolby Digital')
|
||||
super().__init__('Dolby Digital')
|
||||
|
||||
|
||||
class HqConflictRule(Rule):
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"""
|
||||
video_bit_rate and audio_bit_rate properties
|
||||
"""
|
||||
import re
|
||||
from rebulk.remodule import re
|
||||
|
||||
from rebulk import Rebulk
|
||||
from rebulk.rules import Rule, RemoveMatch, RenameMatch
|
||||
|
|
|
@ -47,7 +47,7 @@ class RemoveConflictsWithEpisodeTitle(Rule):
|
|||
consequence = RemoveMatch
|
||||
|
||||
def __init__(self, previous_names):
|
||||
super(RemoveConflictsWithEpisodeTitle, self).__init__()
|
||||
super().__init__()
|
||||
self.previous_names = previous_names
|
||||
self.next_names = ('streaming_service', 'screen_size', 'source',
|
||||
'video_codec', 'audio_codec', 'other', 'container')
|
||||
|
@ -129,7 +129,7 @@ class EpisodeTitleFromPosition(TitleBaseRule):
|
|||
dependency = TitleToEpisodeTitle
|
||||
|
||||
def __init__(self, previous_names):
|
||||
super(EpisodeTitleFromPosition, self).__init__('episode_title', ['title'])
|
||||
super().__init__('episode_title', ['title'])
|
||||
self.previous_names = previous_names
|
||||
|
||||
def hole_filter(self, hole, matches):
|
||||
|
@ -150,12 +150,12 @@ class EpisodeTitleFromPosition(TitleBaseRule):
|
|||
def should_remove(self, match, matches, filepart, hole, context):
|
||||
if match.name == 'episode_details':
|
||||
return False
|
||||
return super(EpisodeTitleFromPosition, self).should_remove(match, matches, filepart, hole, context)
|
||||
return super().should_remove(match, matches, filepart, hole, context)
|
||||
|
||||
def when(self, matches, context): # pylint:disable=inconsistent-return-statements
|
||||
if matches.named('episode_title'):
|
||||
return
|
||||
return super(EpisodeTitleFromPosition, self).when(matches, context)
|
||||
return super().when(matches, context)
|
||||
|
||||
|
||||
class AlternativeTitleReplace(Rule):
|
||||
|
@ -166,7 +166,7 @@ class AlternativeTitleReplace(Rule):
|
|||
consequence = RenameMatch
|
||||
|
||||
def __init__(self, previous_names):
|
||||
super(AlternativeTitleReplace, self).__init__()
|
||||
super().__init__()
|
||||
self.previous_names = previous_names
|
||||
|
||||
def when(self, matches, context): # pylint:disable=inconsistent-return-statements
|
||||
|
|
|
@ -479,7 +479,7 @@ class SeePatternRange(Rule):
|
|||
consequence = [RemoveMatch, AppendMatch]
|
||||
|
||||
def __init__(self, range_separators):
|
||||
super(SeePatternRange, self).__init__()
|
||||
super().__init__()
|
||||
self.range_separators = range_separators
|
||||
|
||||
def when(self, matches, context):
|
||||
|
@ -516,7 +516,7 @@ class AbstractSeparatorRange(Rule):
|
|||
consequence = [RemoveMatch, AppendMatch]
|
||||
|
||||
def __init__(self, range_separators, property_name):
|
||||
super(AbstractSeparatorRange, self).__init__()
|
||||
super().__init__()
|
||||
self.range_separators = range_separators
|
||||
self.property_name = property_name
|
||||
|
||||
|
@ -608,7 +608,7 @@ class EpisodeNumberSeparatorRange(AbstractSeparatorRange):
|
|||
"""
|
||||
|
||||
def __init__(self, range_separators):
|
||||
super(EpisodeNumberSeparatorRange, self).__init__(range_separators, "episode")
|
||||
super().__init__(range_separators, "episode")
|
||||
|
||||
|
||||
class SeasonSeparatorRange(AbstractSeparatorRange):
|
||||
|
@ -617,7 +617,7 @@ class SeasonSeparatorRange(AbstractSeparatorRange):
|
|||
"""
|
||||
|
||||
def __init__(self, range_separators):
|
||||
super(SeasonSeparatorRange, self).__init__(range_separators, "season")
|
||||
super().__init__(range_separators, "season")
|
||||
|
||||
|
||||
class RemoveWeakIfMovie(Rule):
|
||||
|
@ -662,7 +662,7 @@ class RemoveWeak(Rule):
|
|||
consequence = RemoveMatch, AppendMatch
|
||||
|
||||
def __init__(self, episode_words):
|
||||
super(RemoveWeak, self).__init__()
|
||||
super().__init__()
|
||||
self.episode_words = episode_words
|
||||
|
||||
def when(self, matches, context):
|
||||
|
|
|
@ -396,7 +396,7 @@ class SubtitlePrefixLanguageRule(Rule):
|
|||
|
||||
def then(self, matches, when_response, context):
|
||||
to_rename, to_remove = when_response
|
||||
super(SubtitlePrefixLanguageRule, self).then(matches, to_remove, context)
|
||||
super().then(matches, to_remove, context)
|
||||
for prefix, match in to_rename:
|
||||
# Remove suffix equivalent of prefix.
|
||||
suffix = copy.copy(prefix)
|
||||
|
@ -435,7 +435,7 @@ class SubtitleSuffixLanguageRule(Rule):
|
|||
|
||||
def then(self, matches, when_response, context):
|
||||
to_rename, to_remove = when_response
|
||||
super(SubtitleSuffixLanguageRule, self).then(matches, to_remove, context)
|
||||
super().then(matches, to_remove, context)
|
||||
for match in to_rename:
|
||||
matches.remove(match)
|
||||
match.name = 'subtitle_language'
|
||||
|
@ -488,7 +488,7 @@ class RemoveInvalidLanguages(Rule):
|
|||
|
||||
def __init__(self, common_words):
|
||||
"""Constructor."""
|
||||
super(RemoveInvalidLanguages, self).__init__()
|
||||
super().__init__()
|
||||
self.common_words = common_words
|
||||
|
||||
def when(self, matches, context):
|
||||
|
|
|
@ -86,7 +86,7 @@ def other(config): # pylint:disable=unused-argument,too-many-statements
|
|||
rebulk.regex('(HD)(?P<another>Rip)', value={'other': 'HD', 'another': 'Rip'},
|
||||
private_parent=True, children=True, validator={'__parent__': seps_surround}, validate_all=True)
|
||||
|
||||
for value in ('Screener', 'Remux', 'PAL', 'SECAM', 'NTSC', 'XXX'):
|
||||
for value in ('Screener', 'Remux', 'Hybrid', 'PAL', 'SECAM', 'NTSC', 'XXX'):
|
||||
rebulk.string(value, value=value)
|
||||
rebulk.string('3D', value='3D', tags='has-neighbor')
|
||||
|
||||
|
|
|
@ -98,7 +98,7 @@ class DashSeparatedReleaseGroup(Rule):
|
|||
|
||||
def __init__(self, value_formatter):
|
||||
"""Default constructor."""
|
||||
super(DashSeparatedReleaseGroup, self).__init__()
|
||||
super().__init__()
|
||||
self.value_formatter = value_formatter
|
||||
|
||||
@classmethod
|
||||
|
@ -212,7 +212,7 @@ class SceneReleaseGroup(Rule):
|
|||
|
||||
def __init__(self, value_formatter):
|
||||
"""Default constructor."""
|
||||
super(SceneReleaseGroup, self).__init__()
|
||||
super().__init__()
|
||||
self.value_formatter = value_formatter
|
||||
|
||||
@staticmethod
|
||||
|
@ -321,7 +321,6 @@ class AnimeReleaseGroup(Rule):
|
|||
|
||||
for filepart in marker_sorted(matches.markers.named('path'), matches):
|
||||
|
||||
# pylint:disable=bad-continuation
|
||||
empty_group = matches.markers.range(filepart.start,
|
||||
filepart.end,
|
||||
lambda marker: (marker.name == 'group'
|
||||
|
|
|
@ -69,7 +69,7 @@ class PostProcessScreenSize(Rule):
|
|||
consequence = AppendMatch
|
||||
|
||||
def __init__(self, standard_heights, min_ar, max_ar):
|
||||
super(PostProcessScreenSize, self).__init__()
|
||||
super().__init__()
|
||||
self.standard_heights = standard_heights
|
||||
self.min_ar = min_ar
|
||||
self.max_ar = max_ar
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"""
|
||||
size property
|
||||
"""
|
||||
import re
|
||||
from rebulk.remodule import re
|
||||
|
||||
from rebulk import Rebulk
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"""
|
||||
streaming_service property
|
||||
"""
|
||||
import re
|
||||
from rebulk.remodule import re
|
||||
|
||||
from rebulk import Rebulk
|
||||
from rebulk.rules import Rule, RemoveMatch
|
||||
|
@ -25,13 +25,22 @@ def streaming_service(config): # pylint: disable=too-many-statements,unused-arg
|
|||
rebulk = rebulk.string_defaults(ignore_case=True).regex_defaults(flags=re.IGNORECASE, abbreviations=[dash])
|
||||
rebulk.defaults(name='streaming_service', tags=['source-prefix'])
|
||||
|
||||
regex_prefix = 're:'
|
||||
|
||||
for value, items in config.items():
|
||||
patterns = items if isinstance(items, list) else [items]
|
||||
for pattern in patterns:
|
||||
if pattern.startswith('re:'):
|
||||
rebulk.regex(pattern, value=value)
|
||||
if isinstance(pattern, dict):
|
||||
pattern_value = pattern.pop('pattern')
|
||||
kwargs = pattern
|
||||
pattern = pattern_value
|
||||
else:
|
||||
rebulk.string(pattern, value=value)
|
||||
kwargs = {}
|
||||
regex = kwargs.pop('regex', False)
|
||||
if regex or pattern.startswith(regex_prefix):
|
||||
rebulk.regex(pattern[len(regex_prefix):], value=value, **kwargs)
|
||||
else:
|
||||
rebulk.string(pattern, value=value, **kwargs)
|
||||
|
||||
rebulk.rules(ValidateStreamingService)
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ class TitleBaseRule(Rule):
|
|||
consequence = [AppendMatch, RemoveMatch]
|
||||
|
||||
def __init__(self, match_name, match_tags=None, alternative_match_name=None):
|
||||
super(TitleBaseRule, self).__init__()
|
||||
super().__init__()
|
||||
self.match_name = match_name
|
||||
self.match_tags = match_tags
|
||||
self.alternative_match_name = alternative_match_name
|
||||
|
@ -299,7 +299,7 @@ class TitleFromPosition(TitleBaseRule):
|
|||
properties = {'title': [None], 'alternative_title': [None]}
|
||||
|
||||
def __init__(self):
|
||||
super(TitleFromPosition, self).__init__('title', ['title'], 'alternative_title')
|
||||
super().__init__('title', ['title'], 'alternative_title')
|
||||
|
||||
def enabled(self, context):
|
||||
return not is_disabled(context, 'alternative_title')
|
||||
|
|
|
@ -27,7 +27,7 @@ def website(config):
|
|||
rebulk = rebulk.regex_defaults(flags=re.IGNORECASE).string_defaults(ignore_case=True)
|
||||
rebulk.defaults(name="website")
|
||||
|
||||
with resource_stream('guessit', 'tlds-alpha-by-domain.txt') as tld_file:
|
||||
with resource_stream('guessit', 'data/tlds-alpha-by-domain.txt') as tld_file:
|
||||
tlds = [
|
||||
tld.strip().decode('utf-8')
|
||||
for tld in tld_file.readlines()
|
||||
|
|
|
@ -1752,6 +1752,7 @@
|
|||
year: 2018
|
||||
other:
|
||||
- 3D
|
||||
- Hybrid
|
||||
- Proper
|
||||
- Remux
|
||||
proper_count: 1
|
||||
|
|
|
@ -80,6 +80,9 @@
|
|||
? Remux
|
||||
: other: Remux
|
||||
|
||||
? Hybrid
|
||||
: other: Hybrid
|
||||
|
||||
? 3D.2019
|
||||
: other: 3D
|
||||
|
||||
|
|
|
@ -577,13 +577,13 @@
|
|||
release_group: BTW
|
||||
type: episode
|
||||
|
||||
# Streaming service: RTÉ One
|
||||
# Streaming service: RTE One
|
||||
? Show.Name.S10E01.576p.RTE.WEBRip.AAC2.0.H.264-RTN
|
||||
: title: Show Name
|
||||
season: 10
|
||||
episode: 1
|
||||
screen_size: 576p
|
||||
streaming_service: RTÉ One
|
||||
streaming_service: RTE One
|
||||
source: Web
|
||||
other: Rip
|
||||
audio_codec: AAC
|
||||
|
@ -818,7 +818,6 @@
|
|||
episode: 0
|
||||
episode_details: Pilot
|
||||
episode_title: Pilot
|
||||
language: zh
|
||||
other:
|
||||
- Proper
|
||||
- Rip
|
||||
|
@ -862,7 +861,6 @@
|
|||
? What.The.Fuck.France.S01E01.Le.doublage.CNLP.WEBRip.AAC2.0.x264-TURTLE
|
||||
: audio_channels: '2.0'
|
||||
audio_codec: AAC
|
||||
country: FR
|
||||
episode: 1
|
||||
episode_title: Le doublage
|
||||
other: Rip
|
||||
|
@ -870,7 +868,7 @@
|
|||
season: 1
|
||||
source: Web
|
||||
streaming_service: Canal+
|
||||
title: What The Fuck
|
||||
title: What The Fuck France
|
||||
type: episode
|
||||
video_codec: H.264
|
||||
|
||||
|
@ -943,14 +941,13 @@
|
|||
? The.Amazing.Race.Canada.S03.720p.CTV.WEBRip.AAC2.0.H.264-BTW
|
||||
: audio_channels: '2.0'
|
||||
audio_codec: AAC
|
||||
country: CA
|
||||
other: Rip
|
||||
release_group: BTW
|
||||
screen_size: 720p
|
||||
season: 3
|
||||
source: Web
|
||||
streaming_service: CTV
|
||||
title: The Amazing Race
|
||||
title: The Amazing Race Canada
|
||||
type: episode
|
||||
video_codec: H.264
|
||||
|
||||
|
@ -1240,13 +1237,12 @@
|
|||
? Big.Brother.Canada.S05.GLBL.WEBRip.AAC2.0.H.264-RTN
|
||||
: audio_channels: '2.0'
|
||||
audio_codec: AAC
|
||||
country: CA
|
||||
other: Rip
|
||||
release_group: RTN
|
||||
season: 5
|
||||
source: Web
|
||||
streaming_service: Global
|
||||
title: Big Brother
|
||||
title: Big Brother Canada
|
||||
type: episode
|
||||
video_codec: H.264
|
||||
|
||||
|
@ -1330,7 +1326,6 @@
|
|||
? Handmade.in.Japan.S01E01.720p.iP.WEBRip.AAC2.0.H.264-SUP
|
||||
: audio_channels: '2.0'
|
||||
audio_codec: AAC
|
||||
country: JP
|
||||
episode: 1
|
||||
other: Rip
|
||||
release_group: SUP
|
||||
|
@ -1338,7 +1333,7 @@
|
|||
season: 1
|
||||
source: Web
|
||||
streaming_service: BBC iPlayer
|
||||
title: Handmade in
|
||||
title: Handmade in Japan
|
||||
type: episode
|
||||
video_codec: H.264
|
||||
|
||||
|
@ -1463,9 +1458,8 @@
|
|||
? Bunsen.is.a.Beast.S01E23.Guinea.Some.Lovin.1080p.NICK.WEBRip.AAC2.0.x264-TVSmash
|
||||
: audio_channels: '2.0'
|
||||
audio_codec: AAC
|
||||
country: GN
|
||||
episode: 23
|
||||
episode_title: Some Lovin
|
||||
episode_title: Guinea Some Lovin
|
||||
other: Rip
|
||||
release_group: TVSmash
|
||||
screen_size: 1080p
|
||||
|
@ -1538,13 +1532,14 @@
|
|||
episode_title: The Masquerade
|
||||
other: Rip
|
||||
part: 2
|
||||
release_group: VP9-BTW
|
||||
release_group: BTW
|
||||
screen_size: 1080p
|
||||
season: 2
|
||||
source: Web
|
||||
streaming_service: YouTube Red
|
||||
title: Escape The Night
|
||||
type: episode
|
||||
video_codec: VP9
|
||||
|
||||
? Escape.The.Night.S02E02.The.Masquerade.Part.II.2160p.RED.WEBRip.AAC5.1.VP9-BTW
|
||||
: audio_channels: '5.1'
|
||||
|
@ -1553,13 +1548,14 @@
|
|||
episode_title: The Masquerade
|
||||
other: Rip
|
||||
part: 2
|
||||
release_group: VP9-BTW
|
||||
release_group: BTW
|
||||
screen_size: 2160p
|
||||
season: 2
|
||||
source: Web
|
||||
streaming_service: YouTube Red
|
||||
title: Escape The Night
|
||||
type: episode
|
||||
video_codec: VP9
|
||||
|
||||
? Escape.The.Night.S02E02.The.Masquerade.Part.II.720p.RED.WEBRip.AAC5.1.VP9-BTW
|
||||
: audio_channels: '5.1'
|
||||
|
@ -1568,13 +1564,14 @@
|
|||
episode_title: The Masquerade
|
||||
other: Rip
|
||||
part: 2
|
||||
release_group: VP9-BTW
|
||||
release_group: BTW
|
||||
screen_size: 720p
|
||||
season: 2
|
||||
source: Web
|
||||
streaming_service: YouTube Red
|
||||
title: Escape The Night
|
||||
type: episode
|
||||
video_codec: VP9
|
||||
|
||||
? The.Family.Law.S02E01.720p.SBS.WEB-DL.AAC2.0.H.264-BTN
|
||||
: audio_channels: '2.0'
|
||||
|
@ -1892,7 +1889,7 @@
|
|||
season: 1
|
||||
source: Web
|
||||
streaming_service: Vimeo
|
||||
title: '555'
|
||||
# title: '555'
|
||||
type: episode
|
||||
video_codec: H.264
|
||||
|
||||
|
|
|
@ -3,10 +3,9 @@
|
|||
# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name, pointless-string-statement
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import six
|
||||
|
||||
from ..api import guessit, properties, suggested_expected, GuessitException
|
||||
|
||||
|
@ -19,25 +18,19 @@ def test_default():
|
|||
|
||||
|
||||
def test_forced_unicode():
|
||||
ret = guessit(u'Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv')
|
||||
assert ret and 'title' in ret and isinstance(ret['title'], six.text_type)
|
||||
ret = guessit('Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv')
|
||||
assert ret and 'title' in ret and isinstance(ret['title'], str)
|
||||
|
||||
|
||||
def test_forced_binary():
|
||||
ret = guessit(b'Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv')
|
||||
assert ret and 'title' in ret and isinstance(ret['title'], six.binary_type)
|
||||
assert ret and 'title' in ret and isinstance(ret['title'], bytes)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.version_info < (3, 4), reason="Path is not available")
|
||||
def test_pathlike_object():
|
||||
try:
|
||||
from pathlib import Path
|
||||
|
||||
path = Path('Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv')
|
||||
ret = guessit(path)
|
||||
assert ret and 'title' in ret
|
||||
except ImportError: # pragma: no-cover
|
||||
pass
|
||||
path = Path('Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv')
|
||||
ret = guessit(path)
|
||||
assert ret and 'title' in ret
|
||||
|
||||
|
||||
def test_unicode_japanese():
|
||||
|
@ -51,16 +44,8 @@ def test_unicode_japanese_options():
|
|||
|
||||
|
||||
def test_forced_unicode_japanese_options():
|
||||
ret = guessit(u"[阿维达].Avida.2006.FRENCH.DVDRiP.XViD-PROD.avi", options={"expected_title": [u"阿维达"]})
|
||||
assert ret and 'title' in ret and ret['title'] == u"阿维达"
|
||||
|
||||
# TODO: This doesn't compile on python 3, but should be tested on python 2.
|
||||
"""
|
||||
if six.PY2:
|
||||
def test_forced_binary_japanese_options():
|
||||
ret = guessit(b"[阿维达].Avida.2006.FRENCH.DVDRiP.XViD-PROD.avi", options={"expected_title": [b"阿维达"]})
|
||||
assert ret and 'title' in ret and ret['title'] == b"阿维达"
|
||||
"""
|
||||
ret = guessit("[阿维达].Avida.2006.FRENCH.DVDRiP.XViD-PROD.avi", options={"expected_title": ["阿维达"]})
|
||||
assert ret and 'title' in ret and ret['title'] == "阿维达"
|
||||
|
||||
|
||||
def test_properties():
|
||||
|
|
|
@ -3,12 +3,9 @@
|
|||
# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name, pointless-string-statement
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
import six
|
||||
|
||||
from ..api import guessit, properties, GuessitException
|
||||
|
||||
|
@ -21,13 +18,13 @@ def test_default():
|
|||
|
||||
|
||||
def test_forced_unicode():
|
||||
ret = guessit(u'Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv')
|
||||
assert ret and 'title' in ret and isinstance(ret['title'], six.text_type)
|
||||
ret = guessit('Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv')
|
||||
assert ret and 'title' in ret and isinstance(ret['title'], str)
|
||||
|
||||
|
||||
def test_forced_binary():
|
||||
ret = guessit(b'Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv')
|
||||
assert ret and 'title' in ret and isinstance(ret['title'], six.binary_type)
|
||||
assert ret and 'title' in ret and isinstance(ret['title'], bytes)
|
||||
|
||||
|
||||
def test_unicode_japanese():
|
||||
|
@ -41,24 +38,18 @@ def test_unicode_japanese_options():
|
|||
|
||||
|
||||
def test_forced_unicode_japanese_options():
|
||||
ret = guessit(u"[阿维达].Avida.2006.FRENCH.DVDRiP.XViD-PROD.avi", options={"expected_title": [u"阿维达"]})
|
||||
assert ret and 'title' in ret and ret['title'] == u"阿维达"
|
||||
|
||||
# TODO: This doesn't compile on python 3, but should be tested on python 2.
|
||||
"""
|
||||
if six.PY2:
|
||||
def test_forced_binary_japanese_options():
|
||||
ret = guessit(b"[阿维达].Avida.2006.FRENCH.DVDRiP.XViD-PROD.avi", options={"expected_title": [b"阿维达"]})
|
||||
assert ret and 'title' in ret and ret['title'] == b"阿维达"
|
||||
"""
|
||||
ret = guessit("[阿维达].Avida.2006.FRENCH.DVDRiP.XViD-PROD.avi", options={"expected_title": ["阿维达"]})
|
||||
assert ret and 'title' in ret and ret['title'] == "阿维达"
|
||||
|
||||
|
||||
def test_ensure_standard_string_class():
|
||||
def test_ensure_custom_string_class():
|
||||
class CustomStr(str):
|
||||
pass
|
||||
|
||||
ret = guessit(CustomStr('1080p'), options={'advanced': True})
|
||||
assert ret and 'screen_size' in ret and not isinstance(ret['screen_size'].input_string, CustomStr)
|
||||
ret = guessit(CustomStr('some.title.1080p.mkv'), options={'advanced': True})
|
||||
assert ret and 'screen_size' in ret and isinstance(ret['screen_size'].input_string, CustomStr)
|
||||
assert ret and 'title' in ret and isinstance(ret['title'].input_string, CustomStr)
|
||||
assert ret and 'container' in ret and isinstance(ret['container'].input_string, CustomStr)
|
||||
|
||||
|
||||
def test_properties():
|
||||
|
|
|
@ -1,16 +1,25 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
from _pytest.capture import CaptureFixture
|
||||
|
||||
from ..__main__ import main
|
||||
|
||||
__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
|
||||
|
||||
|
||||
# Prevent output from spamming the console
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def no_stdout(monkeypatch):
|
||||
with open(os.devnull, "w") as f:
|
||||
monkeypatch.setattr(sys, "stdout", f)
|
||||
yield
|
||||
|
||||
def test_main_no_args():
|
||||
main([])
|
||||
|
||||
|
@ -24,7 +33,7 @@ def test_main_unicode():
|
|||
|
||||
|
||||
def test_main_forced_unicode():
|
||||
main([u'Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv'])
|
||||
main(['Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv'])
|
||||
|
||||
|
||||
def test_main_verbose():
|
||||
|
@ -70,3 +79,22 @@ def test_main_help():
|
|||
|
||||
def test_main_version():
|
||||
main(['--version'])
|
||||
|
||||
|
||||
def test_json_output_input_string(capsys: CaptureFixture):
|
||||
main(['--json', '--output-input-string', 'test.avi'])
|
||||
|
||||
outerr = capsys.readouterr()
|
||||
data = json.loads(outerr.out)
|
||||
|
||||
assert 'input_string' in data
|
||||
assert data['input_string'] == 'test.avi'
|
||||
|
||||
|
||||
def test_json_no_output_input_string(capsys: CaptureFixture):
|
||||
main(['--json', 'test.avi'])
|
||||
|
||||
outerr = capsys.readouterr()
|
||||
data = json.loads(outerr.out)
|
||||
|
||||
assert 'input_string' not in data
|
||||
|
|
|
@ -7,7 +7,6 @@ import os
|
|||
from io import open # pylint: disable=redefined-builtin
|
||||
|
||||
import babelfish
|
||||
import six # pylint:disable=wrong-import-order
|
||||
import yaml # pylint:disable=wrong-import-order
|
||||
from rebulk.remodule import re
|
||||
from rebulk.utils import is_iterable
|
||||
|
@ -53,16 +52,16 @@ class EntryResult(object):
|
|||
if self.ok:
|
||||
return self.string + ': OK!'
|
||||
if self.warning:
|
||||
return '%s%s: WARNING! (valid=%i, extra=%i)' % ('-' if self.negates else '', self.string, len(self.valid),
|
||||
len(self.extra))
|
||||
return '%s%s: WARNING! (valid=%i, extra=%s)' % ('-' if self.negates else '', self.string, len(self.valid),
|
||||
self.extra)
|
||||
if self.error:
|
||||
return '%s%s: ERROR! (valid=%i, missing=%i, different=%i, extra=%i, others=%i)' % \
|
||||
('-' if self.negates else '', self.string, len(self.valid), len(self.missing), len(self.different),
|
||||
len(self.extra), len(self.others))
|
||||
return '%s%s: ERROR! (valid=%i, extra=%s, missing=%s, different=%s, others=%s)' % \
|
||||
('-' if self.negates else '', self.string, len(self.valid), self.extra, self.missing,
|
||||
self.different, self.others)
|
||||
|
||||
return '%s%s: UNKOWN! (valid=%i, missing=%i, different=%i, extra=%i, others=%i)' % \
|
||||
('-' if self.negates else '', self.string, len(self.valid), len(self.missing), len(self.different),
|
||||
len(self.extra), len(self.others))
|
||||
return '%s%s: UNKOWN! (valid=%i, extra=%s, missing=%s, different=%s, others=%s)' % \
|
||||
('-' if self.negates else '', self.string, len(self.valid), self.extra, self.missing, self.different,
|
||||
self.others)
|
||||
|
||||
@property
|
||||
def details(self):
|
||||
|
@ -110,7 +109,7 @@ def files_and_ids(predicate=None):
|
|||
for filename in filenames:
|
||||
name, ext = os.path.splitext(filename)
|
||||
filepath = os.path.join(dirpath_rel, filename)
|
||||
if ext == '.yml' and (not predicate or predicate(filepath)):
|
||||
if ext in ['.yml', '.yaml'] and (not predicate or predicate(filepath)):
|
||||
files.append(filepath)
|
||||
ids.append(os.path.join(dirpath_rel, name))
|
||||
|
||||
|
@ -161,7 +160,7 @@ class TestYml(object):
|
|||
|
||||
for string, expected in data.items():
|
||||
TestYml.set_default(expected, default)
|
||||
string = TestYml.fix_encoding(string, expected)
|
||||
string = TestYml.fix_encoding(string)
|
||||
|
||||
entries.append((filename, string, expected))
|
||||
unique_id = self._get_unique_id(entry_set, '[' + filename + '] ' + str(string))
|
||||
|
@ -178,17 +177,7 @@ class TestYml(object):
|
|||
expected[k] = v
|
||||
|
||||
@classmethod
|
||||
def fix_encoding(cls, string, expected):
|
||||
if six.PY2:
|
||||
if isinstance(string, six.text_type):
|
||||
string = string.encode('utf-8')
|
||||
converts = []
|
||||
for k, v in expected.items():
|
||||
if isinstance(v, six.text_type):
|
||||
v = v.encode('utf-8')
|
||||
converts.append((k, v))
|
||||
for k, v in converts:
|
||||
expected[k] = v
|
||||
def fix_encoding(cls, string):
|
||||
if not isinstance(string, str):
|
||||
string = str(string)
|
||||
return string
|
||||
|
|
|
@ -4,10 +4,7 @@
|
|||
Options
|
||||
"""
|
||||
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError: # pragma: no-cover
|
||||
from ordereddict import OrderedDict # pylint:disable=import-error
|
||||
from collections import OrderedDict
|
||||
import babelfish
|
||||
|
||||
import yaml # pylint:disable=wrong-import-order
|
||||
|
@ -24,8 +21,8 @@ class OrderedDictYAMLLoader(yaml.SafeLoader):
|
|||
def __init__(self, *args, **kwargs):
|
||||
yaml.SafeLoader.__init__(self, *args, **kwargs)
|
||||
|
||||
self.add_constructor(u'tag:yaml.org,2002:map', type(self).construct_yaml_map)
|
||||
self.add_constructor(u'tag:yaml.org,2002:omap', type(self).construct_yaml_map)
|
||||
self.add_constructor('tag:yaml.org,2002:map', type(self).construct_yaml_map)
|
||||
self.add_constructor('tag:yaml.org,2002:omap', type(self).construct_yaml_map)
|
||||
|
||||
def construct_yaml_map(self, node):
|
||||
data = OrderedDict()
|
||||
|
|
|
@ -7,4 +7,4 @@ Define simple search patterns in bulk to perform advanced matching on any string
|
|||
from .rebulk import Rebulk
|
||||
from .rules import Rule, CustomRule, AppendMatch, RemoveMatch, RenameMatch, AppendTags, RemoveTags
|
||||
from .processors import ConflictSolver, PrivateRemover, POST_PROCESS, PRE_PROCESS
|
||||
from .pattern import REGEX_AVAILABLE
|
||||
from .pattern import REGEX_ENABLED
|
||||
|
|
|
@ -4,4 +4,4 @@
|
|||
Version module
|
||||
"""
|
||||
# pragma: no cover
|
||||
__version__ = '2.0.1'
|
||||
__version__ = '3.0.1'
|
||||
|
|
|
@ -7,16 +7,13 @@ from abc import ABCMeta, abstractmethod
|
|||
from copy import deepcopy
|
||||
from logging import getLogger
|
||||
|
||||
from six import add_metaclass
|
||||
|
||||
from .loose import set_defaults
|
||||
from .pattern import RePattern, StringPattern, FunctionalPattern
|
||||
|
||||
log = getLogger(__name__).log
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class Builder(object):
|
||||
class Builder(metaclass=ABCMeta):
|
||||
"""
|
||||
Base builder class for patterns
|
||||
"""
|
||||
|
@ -147,7 +144,7 @@ class Builder(object):
|
|||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
from .chain import Chain
|
||||
from .chain import Chain # pylint:disable=import-outside-toplevel
|
||||
set_defaults(self._chain_defaults, kwargs)
|
||||
set_defaults(self._defaults, kwargs)
|
||||
chain = Chain(self, **kwargs)
|
||||
|
|
|
@ -125,7 +125,7 @@ class Chain(Pattern, Builder):
|
|||
:rtype:
|
||||
"""
|
||||
# pylint: disable=too-many-locals
|
||||
ret = super(Chain, self)._process_match(match, match_index, child=child)
|
||||
ret = super()._process_match(match, match_index, child=child)
|
||||
if ret:
|
||||
return True
|
||||
|
||||
|
@ -144,7 +144,7 @@ class Chain(Pattern, Builder):
|
|||
for last_match in last_matches:
|
||||
match.children.remove(last_match)
|
||||
match.end = match.children[-1].end if match.children else match.start
|
||||
ret = super(Chain, self)._process_match(match, match_index, child=child)
|
||||
ret = super()._process_match(match, match_index, child=child)
|
||||
if ret:
|
||||
return True
|
||||
|
||||
|
|
|
@ -6,13 +6,11 @@ Introspect rebulk object to retrieve capabilities.
|
|||
from abc import ABCMeta, abstractmethod
|
||||
from collections import defaultdict
|
||||
|
||||
import six
|
||||
from .pattern import StringPattern, RePattern, FunctionalPattern
|
||||
from .utils import extend_safe
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class Description(object):
|
||||
class Description(metaclass=ABCMeta):
|
||||
"""
|
||||
Abstract class for a description.
|
||||
"""
|
||||
|
|
|
@ -15,7 +15,6 @@ try:
|
|||
from collections import OrderedDict # pylint:disable=ungrouped-imports
|
||||
except ImportError: # pragma: no cover
|
||||
from ordereddict import OrderedDict # pylint:disable=import-error
|
||||
import six
|
||||
|
||||
from .loose import ensure_list, filter_index
|
||||
from .utils import is_iterable
|
||||
|
@ -28,7 +27,7 @@ class MatchesDict(OrderedDict):
|
|||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(MatchesDict, self).__init__()
|
||||
super().__init__()
|
||||
self.matches = defaultdict(list)
|
||||
self.values_list = defaultdict(list)
|
||||
|
||||
|
@ -67,7 +66,7 @@ class _BaseMatches(MutableSequence):
|
|||
def _start_dict(self):
|
||||
if self.__start_dict is None:
|
||||
self.__start_dict = defaultdict(_BaseMatches._base)
|
||||
for start, values in itertools.groupby([m for m in self._delegate], lambda item: item.start):
|
||||
for start, values in itertools.groupby(list(self._delegate), lambda item: item.start):
|
||||
_BaseMatches._base_extend(self.__start_dict[start], values)
|
||||
|
||||
return self.__start_dict
|
||||
|
@ -76,7 +75,7 @@ class _BaseMatches(MutableSequence):
|
|||
def _end_dict(self):
|
||||
if self.__end_dict is None:
|
||||
self.__end_dict = defaultdict(_BaseMatches._base)
|
||||
for start, values in itertools.groupby([m for m in self._delegate], lambda item: item.end):
|
||||
for start, values in itertools.groupby(list(self._delegate), lambda item: item.end):
|
||||
_BaseMatches._base_extend(self.__end_dict[start], values)
|
||||
|
||||
return self.__end_dict
|
||||
|
@ -534,13 +533,6 @@ class _BaseMatches(MutableSequence):
|
|||
ret[match.name] = value
|
||||
return ret
|
||||
|
||||
if six.PY2: # pragma: no cover
|
||||
def clear(self):
|
||||
"""
|
||||
Python 3 backport
|
||||
"""
|
||||
del self[:]
|
||||
|
||||
def __len__(self):
|
||||
return len(self._delegate)
|
||||
|
||||
|
@ -583,11 +575,11 @@ class Matches(_BaseMatches):
|
|||
|
||||
def __init__(self, matches=None, input_string=None):
|
||||
self.markers = Markers(input_string=input_string)
|
||||
super(Matches, self).__init__(matches=matches, input_string=input_string)
|
||||
super().__init__(matches=matches, input_string=input_string)
|
||||
|
||||
def _add_match(self, match):
|
||||
assert not match.marker, "A marker match should not be added to <Matches> object"
|
||||
super(Matches, self)._add_match(match)
|
||||
super()._add_match(match)
|
||||
|
||||
|
||||
class Markers(_BaseMatches):
|
||||
|
@ -596,11 +588,11 @@ class Markers(_BaseMatches):
|
|||
"""
|
||||
|
||||
def __init__(self, matches=None, input_string=None):
|
||||
super(Markers, self).__init__(matches=None, input_string=input_string)
|
||||
super().__init__(matches=None, input_string=input_string)
|
||||
|
||||
def _add_match(self, match):
|
||||
assert match.marker, "A non-marker match should not be added to <Markers> object"
|
||||
super(Markers, self)._add_match(match)
|
||||
super()._add_match(match)
|
||||
|
||||
|
||||
class Match(object):
|
||||
|
|
|
@ -7,19 +7,16 @@ Abstract pattern class definition along with various implementations (regexp, st
|
|||
|
||||
from abc import ABCMeta, abstractmethod, abstractproperty
|
||||
|
||||
import six
|
||||
|
||||
from . import debug
|
||||
from .formatters import default_formatter
|
||||
from .loose import call, ensure_list, ensure_dict
|
||||
from .match import Match
|
||||
from .remodule import re, REGEX_AVAILABLE
|
||||
from .remodule import re, REGEX_ENABLED
|
||||
from .utils import find_all, is_iterable, get_first_defined
|
||||
from .validators import allways_true
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class BasePattern(object):
|
||||
class BasePattern(metaclass=ABCMeta):
|
||||
"""
|
||||
Base class for Pattern like objects
|
||||
"""
|
||||
|
@ -41,8 +38,7 @@ class BasePattern(object):
|
|||
pass
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class Pattern(BasePattern):
|
||||
class Pattern(BasePattern, metaclass=ABCMeta):
|
||||
"""
|
||||
Definition of a particular pattern to search for.
|
||||
"""
|
||||
|
@ -396,7 +392,7 @@ class StringPattern(Pattern):
|
|||
"""
|
||||
|
||||
def __init__(self, *patterns, **kwargs):
|
||||
super(StringPattern, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
self._patterns = patterns
|
||||
self._kwargs = kwargs
|
||||
self._match_kwargs = filter_match_kwargs(kwargs)
|
||||
|
@ -422,11 +418,11 @@ class RePattern(Pattern):
|
|||
"""
|
||||
|
||||
def __init__(self, *patterns, **kwargs):
|
||||
super(RePattern, self).__init__(**kwargs)
|
||||
self.repeated_captures = REGEX_AVAILABLE
|
||||
super().__init__(**kwargs)
|
||||
self.repeated_captures = REGEX_ENABLED
|
||||
if 'repeated_captures' in kwargs:
|
||||
self.repeated_captures = kwargs.get('repeated_captures')
|
||||
if self.repeated_captures and not REGEX_AVAILABLE: # pragma: no cover
|
||||
if self.repeated_captures and not REGEX_ENABLED: # pragma: no cover
|
||||
raise NotImplementedError("repeated_capture is available only with regex module.")
|
||||
self.abbreviations = kwargs.get('abbreviations', [])
|
||||
self._kwargs = kwargs
|
||||
|
@ -434,7 +430,7 @@ class RePattern(Pattern):
|
|||
self._children_match_kwargs = filter_match_kwargs(kwargs, children=True)
|
||||
self._patterns = []
|
||||
for pattern in patterns:
|
||||
if isinstance(pattern, six.string_types):
|
||||
if isinstance(pattern, str):
|
||||
if self.abbreviations and pattern:
|
||||
for key, replacement in self.abbreviations:
|
||||
pattern = pattern.replace(key, replacement)
|
||||
|
@ -494,7 +490,7 @@ class FunctionalPattern(Pattern):
|
|||
"""
|
||||
|
||||
def __init__(self, *patterns, **kwargs):
|
||||
super(FunctionalPattern, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
self._patterns = patterns
|
||||
self._kwargs = kwargs
|
||||
self._match_kwargs = filter_match_kwargs(kwargs)
|
||||
|
|
|
@ -53,7 +53,7 @@ class Rebulk(Builder):
|
|||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
super(Rebulk, self).__init__()
|
||||
super().__init__()
|
||||
if not callable(disabled):
|
||||
self.disabled = lambda context: disabled
|
||||
else:
|
||||
|
|
|
@ -5,13 +5,17 @@ Uniform re module
|
|||
"""
|
||||
# pylint: disable-all
|
||||
import os
|
||||
import logging
|
||||
|
||||
REGEX_AVAILABLE = False
|
||||
if os.environ.get('REGEX_DISABLED') in ["1", "true", "True", "Y"]:
|
||||
import re
|
||||
else:
|
||||
log = logging.getLogger(__name__).log
|
||||
|
||||
REGEX_ENABLED = False
|
||||
if os.environ.get('REBULK_REGEX_ENABLED') in ["1", "true", "True", "Y"]:
|
||||
try:
|
||||
import regex as re
|
||||
REGEX_AVAILABLE = True
|
||||
REGEX_ENABLED = True
|
||||
except ImportError:
|
||||
log.warning('regex module is not available. Unset REBULK_REGEX_ENABLED environment variable, or install regex module to enabled it.')
|
||||
import re
|
||||
else:
|
||||
import re
|
||||
|
|
|
@ -8,7 +8,6 @@ import inspect
|
|||
from itertools import groupby
|
||||
from logging import getLogger
|
||||
|
||||
import six
|
||||
from .utils import is_iterable
|
||||
|
||||
from .toposort import toposort
|
||||
|
@ -18,8 +17,7 @@ from . import debug
|
|||
log = getLogger(__name__).log
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class Consequence(object):
|
||||
class Consequence(metaclass=ABCMeta):
|
||||
"""
|
||||
Definition of a consequence to apply.
|
||||
"""
|
||||
|
@ -40,8 +38,7 @@ class Consequence(object):
|
|||
pass
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class Condition(object):
|
||||
class Condition(metaclass=ABCMeta):
|
||||
"""
|
||||
Definition of a condition to check.
|
||||
"""
|
||||
|
@ -60,8 +57,7 @@ class Condition(object):
|
|||
pass
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class CustomRule(Condition, Consequence):
|
||||
class CustomRule(Condition, Consequence, metaclass=ABCMeta):
|
||||
"""
|
||||
Definition of a rule to apply
|
||||
"""
|
||||
|
@ -243,7 +239,7 @@ class Rules(list):
|
|||
"""
|
||||
|
||||
def __init__(self, *rules):
|
||||
super(Rules, self).__init__()
|
||||
super().__init__()
|
||||
self.load(*rules)
|
||||
|
||||
def load(self, *rules):
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
# pylint: disable=no-self-use, pointless-statement, missing-docstring, unneeded-not, len-as-condition
|
||||
|
||||
import pytest
|
||||
import six
|
||||
|
||||
from ..match import Match, Matches
|
||||
from ..pattern import StringPattern, RePattern
|
||||
|
@ -72,23 +71,18 @@ class TestMatchClass(object):
|
|||
assert match2 > match1
|
||||
assert match2 >= match1
|
||||
|
||||
if six.PY3:
|
||||
with pytest.raises(TypeError):
|
||||
match1 < other
|
||||
with pytest.raises(TypeError):
|
||||
match1 < other
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
match1 <= other
|
||||
with pytest.raises(TypeError):
|
||||
match1 <= other
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
match1 > other
|
||||
with pytest.raises(TypeError):
|
||||
match1 > other
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
match1 >= other
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
match1 >= other
|
||||
else:
|
||||
assert match1 < other
|
||||
assert match1 <= other
|
||||
assert not match1 > other
|
||||
assert not match1 >= other
|
||||
|
||||
def test_value(self):
|
||||
match1 = Match(1, 3)
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# pylint: disable=no-self-use, pointless-statement, missing-docstring, unbalanced-tuple-unpacking, len-as-condition
|
||||
# pylint: disable=no-self-use, pointless-statement, missing-docstring, unbalanced-tuple-unpacking, len-as-condition, no-member
|
||||
|
||||
import re
|
||||
import pytest
|
||||
|
||||
from ..pattern import StringPattern, RePattern, FunctionalPattern, REGEX_AVAILABLE
|
||||
from ..pattern import StringPattern, RePattern, FunctionalPattern, REGEX_ENABLED
|
||||
from ..match import Match
|
||||
|
||||
class TestStringPattern(object):
|
||||
|
@ -706,7 +706,7 @@ class TestFormatter(object):
|
|||
assert len(matches) == 1
|
||||
|
||||
match = matches[0]
|
||||
if REGEX_AVAILABLE:
|
||||
if REGEX_ENABLED:
|
||||
assert len(match.children) == 5
|
||||
assert [child.value for child in match.children] == ["02", "03", "04", "05", "06"]
|
||||
else:
|
||||
|
|
|
@ -22,7 +22,7 @@ from functools import reduce
|
|||
class CyclicDependency(ValueError):
|
||||
def __init__(self, cyclic):
|
||||
s = 'Cyclic dependencies exist among these items: {0}'.format(', '.join(repr(x) for x in cyclic.items()))
|
||||
super(CyclicDependency, self).__init__(s)
|
||||
super().__init__(s)
|
||||
self.cyclic = cyclic
|
||||
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ ffsubsync=2020-08-04
|
|||
Flask=1.1.1
|
||||
gevent-websocker=0.10.1
|
||||
gitpython=2.1.9
|
||||
guessit=3.1.1
|
||||
guessit=3.3.1
|
||||
guess_language-spirit=0.5.3
|
||||
Js2Py=0.63 <-- modified: manually merged from upstream: https://github.com/PiotrDabkowski/Js2Py/pull/192/files
|
||||
knowit=0.3.0-dev
|
||||
|
@ -22,7 +22,7 @@ pyga=2.6.1
|
|||
pysrt=1.1.1
|
||||
pytz=2018.4
|
||||
rarfile=3.0
|
||||
rebulk=2.0.1
|
||||
rebulk=3.0.1
|
||||
requests=2.18.4
|
||||
semver=2.13.0
|
||||
SimpleConfigParser=0.1.0 <-- modified version: do not update!!!
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue