mirror of
https://github.com/morpheus65535/bazarr.git
synced 2025-04-24 22:57:13 -04:00
Added AvistaZ and CinemaZ subtitles provider
This commit is contained in:
parent
43d313a31b
commit
b933eb87b9
11 changed files with 543 additions and 1 deletions
|
@ -215,6 +215,14 @@ validators = [
|
||||||
Validator('addic7ed.user_agent', must_exist=True, default='', is_type_of=str),
|
Validator('addic7ed.user_agent', must_exist=True, default='', is_type_of=str),
|
||||||
Validator('addic7ed.vip', must_exist=True, default=False, is_type_of=bool),
|
Validator('addic7ed.vip', must_exist=True, default=False, is_type_of=bool),
|
||||||
|
|
||||||
|
# avistaz section
|
||||||
|
Validator('avistaz.cookies', must_exist=True, default='', is_type_of=str),
|
||||||
|
Validator('avistaz.user_agent', must_exist=True, default='', is_type_of=str),
|
||||||
|
|
||||||
|
# cinemaz section
|
||||||
|
Validator('cinemaz.cookies', must_exist=True, default='', is_type_of=str),
|
||||||
|
Validator('cinemaz.user_agent', must_exist=True, default='', is_type_of=str),
|
||||||
|
|
||||||
# podnapisi section
|
# podnapisi section
|
||||||
Validator('podnapisi.verify_ssl', must_exist=True, default=True, is_type_of=bool),
|
Validator('podnapisi.verify_ssl', must_exist=True, default=True, is_type_of=bool),
|
||||||
|
|
||||||
|
|
|
@ -229,6 +229,14 @@ def get_providers_auth():
|
||||||
'user_agent': settings.addic7ed.user_agent,
|
'user_agent': settings.addic7ed.user_agent,
|
||||||
'is_vip': settings.addic7ed.vip,
|
'is_vip': settings.addic7ed.vip,
|
||||||
},
|
},
|
||||||
|
'avistaz': {
|
||||||
|
'cookies': settings.avistaz.cookies,
|
||||||
|
'user_agent': settings.avistaz.user_agent,
|
||||||
|
},
|
||||||
|
'cinemaz': {
|
||||||
|
'cookies': settings.cinemaz.cookies,
|
||||||
|
'user_agent': settings.cinemaz.user_agent,
|
||||||
|
},
|
||||||
'opensubtitles': {
|
'opensubtitles': {
|
||||||
'username': settings.opensubtitles.username,
|
'username': settings.opensubtitles.username,
|
||||||
'password': settings.opensubtitles.password,
|
'password': settings.opensubtitles.password,
|
||||||
|
|
|
@ -93,3 +93,32 @@ def get_movies_from_radarr_api(apikey_radarr, radarr_id=None):
|
||||||
return r.json()
|
return r.json()
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def get_history_from_radarr_api(apikey_radarr, movie_id):
|
||||||
|
url_radarr_api_history = f"{url_api_radarr()}history?eventType=1&movieIds={movie_id}&apikey={apikey_radarr}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
r = requests.get(url_radarr_api_history, timeout=int(settings.sonarr.http_timeout), verify=False,
|
||||||
|
headers=headers)
|
||||||
|
r.raise_for_status()
|
||||||
|
except requests.exceptions.HTTPError:
|
||||||
|
logging.exception("BAZARR Error trying to get history from Radarr. Http error.")
|
||||||
|
return
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
logging.exception("BAZARR Error trying to get history from Radarr. Connection Error.")
|
||||||
|
return
|
||||||
|
except requests.exceptions.Timeout:
|
||||||
|
logging.exception("BAZARR Error trying to get history from Radarr. Timeout Error.")
|
||||||
|
return
|
||||||
|
except requests.exceptions.RequestException:
|
||||||
|
logging.exception("BAZARR Error trying to get history from Radarr.")
|
||||||
|
return
|
||||||
|
except Exception as e:
|
||||||
|
logging.exception(f"Exception raised while getting history from Radarr API: {e}")
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
if r.status_code == 200:
|
||||||
|
return r.json()
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
|
@ -166,3 +166,32 @@ def get_episodesFiles_from_sonarr_api(apikey_sonarr, series_id=None, episode_fil
|
||||||
return r.json()
|
return r.json()
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def get_history_from_sonarr_api(apikey_sonarr, episode_id):
|
||||||
|
url_sonarr_api_history = f"{url_api_sonarr()}history?eventType=1&episodeId={episode_id}&apikey={apikey_sonarr}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
r = requests.get(url_sonarr_api_history, timeout=int(settings.sonarr.http_timeout), verify=False,
|
||||||
|
headers=headers)
|
||||||
|
r.raise_for_status()
|
||||||
|
except requests.exceptions.HTTPError:
|
||||||
|
logging.exception("BAZARR Error trying to get history from Sonarr. Http error.")
|
||||||
|
return
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
logging.exception("BAZARR Error trying to get history from Sonarr. Connection Error.")
|
||||||
|
return
|
||||||
|
except requests.exceptions.Timeout:
|
||||||
|
logging.exception("BAZARR Error trying to get history from Sonarr. Timeout Error.")
|
||||||
|
return
|
||||||
|
except requests.exceptions.RequestException:
|
||||||
|
logging.exception("BAZARR Error trying to get history from Sonarr.")
|
||||||
|
return
|
||||||
|
except Exception as e:
|
||||||
|
logging.exception(f"Exception raised while getting history from Sonarr API: {e}")
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
if r.status_code == 200:
|
||||||
|
return r.json()
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
|
@ -2,5 +2,10 @@
|
||||||
|
|
||||||
from .ffprobe import refine_from_ffprobe
|
from .ffprobe import refine_from_ffprobe
|
||||||
from .database import refine_from_db
|
from .database import refine_from_db
|
||||||
|
from .arr_history import refine_from_arr_history
|
||||||
|
|
||||||
registered = {"database": refine_from_db, "ffprobe": refine_from_ffprobe}
|
registered = {
|
||||||
|
"database": refine_from_db,
|
||||||
|
"ffprobe": refine_from_ffprobe,
|
||||||
|
"arr_history": refine_from_arr_history,
|
||||||
|
}
|
||||||
|
|
32
bazarr/subtitles/refiners/arr_history.py
Normal file
32
bazarr/subtitles/refiners/arr_history.py
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
# coding=utf-8
|
||||||
|
# fmt: off
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from app.config import settings
|
||||||
|
from radarr.sync.utils import get_history_from_radarr_api
|
||||||
|
from sonarr.sync.utils import get_history_from_sonarr_api
|
||||||
|
from subliminal import Episode, Movie
|
||||||
|
|
||||||
|
refined_providers = {'avistaz', 'cinemaz'}
|
||||||
|
|
||||||
|
|
||||||
|
def refine_from_arr_history(path, video):
|
||||||
|
if refined_providers.intersection(settings.general.enabled_providers) and video.info_url is None:
|
||||||
|
refine_info_url(video)
|
||||||
|
|
||||||
|
|
||||||
|
def refine_info_url(video):
|
||||||
|
if isinstance(video, Episode) and video.sonarrEpisodeId:
|
||||||
|
history = get_history_from_sonarr_api(settings.sonarr.apikey, video.sonarrEpisodeId)
|
||||||
|
elif isinstance(video, Movie) and video.radarrId:
|
||||||
|
history = get_history_from_radarr_api(settings.radarr.apikey, video.radarrId)
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
|
for grab in history['records']:
|
||||||
|
# take the latest grab for the episode
|
||||||
|
if 'nzbInfoUrl' in grab['data'] and grab['data']['nzbInfoUrl']:
|
||||||
|
video.info_url = grab['data']['nzbInfoUrl']
|
||||||
|
logging.debug(f'Refining {video} with Info URL: {video.info_url}')
|
||||||
|
break
|
|
@ -68,6 +68,24 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
|
||||||
name: "Argenteam Dump",
|
name: "Argenteam Dump",
|
||||||
description: "Subtitles dump of the now extinct Argenteam",
|
description: "Subtitles dump of the now extinct Argenteam",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
key: "avistaz",
|
||||||
|
name: "AvistaZ",
|
||||||
|
description:
|
||||||
|
"avistaz.to - AvistaZ is an Asian torrent tracker for HD movies, TV shows and music",
|
||||||
|
inputs: [
|
||||||
|
{
|
||||||
|
type: "text",
|
||||||
|
key: "cookies",
|
||||||
|
name: "Cookies, e.g., PHPSESSID=abc; wikisubtitlesuser=xyz; wikisubtitlespass=efg",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "text",
|
||||||
|
key: "user_agent",
|
||||||
|
name: "User-Agent, e.g., Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:95.0) Gecko/20100101 Firefox/95.0",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
{
|
{
|
||||||
key: "assrt",
|
key: "assrt",
|
||||||
description: "Chinese Subtitles Provider",
|
description: "Chinese Subtitles Provider",
|
||||||
|
@ -96,6 +114,24 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
|
||||||
description:
|
description:
|
||||||
"Provider removed from Bazarr because it was causing too many issues.\nIt will always return no subtitles.",
|
"Provider removed from Bazarr because it was causing too many issues.\nIt will always return no subtitles.",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
key: "cinemaz",
|
||||||
|
name: "CinemaZ",
|
||||||
|
description:
|
||||||
|
"cinemaz.to - CinemaZ is a private torrent tracker which is dedicated to little-known and cult films that you will not find on other popular torrent resources.",
|
||||||
|
inputs: [
|
||||||
|
{
|
||||||
|
type: "text",
|
||||||
|
key: "cookies",
|
||||||
|
name: "Cookies, e.g., PHPSESSID=abc; wikisubtitlesuser=xyz; wikisubtitlespass=efg",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "text",
|
||||||
|
key: "user_agent",
|
||||||
|
name: "User-Agent, e.g., Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:95.0) Gecko/20100101 Firefox/95.0",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
{
|
{
|
||||||
key: "embeddedsubtitles",
|
key: "embeddedsubtitles",
|
||||||
name: "Embedded Subtitles",
|
name: "Embedded Subtitles",
|
||||||
|
|
10
libs/subliminal_patch/providers/avistaz.py
Normal file
10
libs/subliminal_patch/providers/avistaz.py
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
from .avistaz_network import AvistazNetworkProviderBase
|
||||||
|
|
||||||
|
|
||||||
|
class AvistazProvider(AvistazNetworkProviderBase):
|
||||||
|
"""AvistaZ.to Provider."""
|
||||||
|
server_url = 'https://avistaz.to/'
|
||||||
|
provider_name = 'avistaz'
|
373
libs/subliminal_patch/providers/avistaz_network.py
Normal file
373
libs/subliminal_patch/providers/avistaz_network.py
Normal file
|
@ -0,0 +1,373 @@
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from http.cookies import SimpleCookie
|
||||||
|
from random import randint
|
||||||
|
|
||||||
|
import pycountry
|
||||||
|
from requests.cookies import RequestsCookieJar
|
||||||
|
from subliminal.exceptions import AuthenticationError
|
||||||
|
from subliminal.providers import ParserBeautifulSoup
|
||||||
|
from subliminal_patch.http import RetryingCFSession
|
||||||
|
from subliminal_patch.pitcher import store_verification
|
||||||
|
from subliminal_patch.providers import Provider
|
||||||
|
from subliminal_patch.subtitle import Subtitle
|
||||||
|
from subzero.language import Language
|
||||||
|
from .utils import get_archive_from_bytes, get_subtitle_from_archive, FIRST_THOUSAND_OR_SO_USER_AGENTS as AGENT_LIST
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
supported_languages_names = [
|
||||||
|
"Abkhazian",
|
||||||
|
"Afar",
|
||||||
|
"Afrikaans",
|
||||||
|
"Akan",
|
||||||
|
"Albanian",
|
||||||
|
"Amharic",
|
||||||
|
"Arabic",
|
||||||
|
"Aragonese",
|
||||||
|
"Armenian",
|
||||||
|
"Assamese",
|
||||||
|
"Avaric",
|
||||||
|
"Avestan",
|
||||||
|
"Aymara",
|
||||||
|
"Azerbaijani",
|
||||||
|
"Bambara",
|
||||||
|
"Bashkir",
|
||||||
|
"Basque",
|
||||||
|
"Belarusian",
|
||||||
|
"Bengali",
|
||||||
|
"Bihari languages",
|
||||||
|
"Bislama",
|
||||||
|
"Bokmål, Norwegian",
|
||||||
|
"Bosnian",
|
||||||
|
"Brazilian Portuguese",
|
||||||
|
"Breton",
|
||||||
|
"Bulgarian",
|
||||||
|
"Burmese",
|
||||||
|
"Cantonese",
|
||||||
|
"Catalan",
|
||||||
|
"Central Khmer",
|
||||||
|
"Chamorro",
|
||||||
|
"Chechen",
|
||||||
|
"Chichewa",
|
||||||
|
"Chinese",
|
||||||
|
"Church Slavic",
|
||||||
|
"Chuvash",
|
||||||
|
"Cornish",
|
||||||
|
"Corsican",
|
||||||
|
"Cree",
|
||||||
|
"Croatian",
|
||||||
|
"Czech",
|
||||||
|
"Danish",
|
||||||
|
"Dhivehi",
|
||||||
|
"Dutch",
|
||||||
|
"Dzongkha",
|
||||||
|
"English",
|
||||||
|
"Esperanto",
|
||||||
|
"Estonian",
|
||||||
|
"Ewe",
|
||||||
|
"Faroese",
|
||||||
|
"Fijian",
|
||||||
|
"Filipino",
|
||||||
|
"Finnish",
|
||||||
|
"French",
|
||||||
|
"Fulah",
|
||||||
|
"Gaelic",
|
||||||
|
"Galician",
|
||||||
|
"Ganda",
|
||||||
|
"Georgian",
|
||||||
|
"German",
|
||||||
|
"Greek",
|
||||||
|
"Guarani",
|
||||||
|
"Gujarati",
|
||||||
|
"Haitian",
|
||||||
|
"Hausa",
|
||||||
|
"Hebrew",
|
||||||
|
"Herero",
|
||||||
|
"Hindi",
|
||||||
|
"Hiri Motu",
|
||||||
|
"Hungarian",
|
||||||
|
"Icelandic",
|
||||||
|
"Ido",
|
||||||
|
"Igbo",
|
||||||
|
"Indonesian",
|
||||||
|
"Interlingua",
|
||||||
|
"Interlingue",
|
||||||
|
"Inuktitut",
|
||||||
|
"Inupiaq",
|
||||||
|
"Irish",
|
||||||
|
"Italian",
|
||||||
|
"Japanese",
|
||||||
|
"Javanese",
|
||||||
|
"Kalaallisut",
|
||||||
|
"Kannada",
|
||||||
|
"Kanuri",
|
||||||
|
"Kashmiri",
|
||||||
|
"Kazakh",
|
||||||
|
"Kikuyu",
|
||||||
|
"Kinyarwanda",
|
||||||
|
"Kirghiz",
|
||||||
|
"Komi",
|
||||||
|
"Kongo",
|
||||||
|
"Korean",
|
||||||
|
"Kuanyama",
|
||||||
|
"Kurdish",
|
||||||
|
"Lao",
|
||||||
|
"Latin",
|
||||||
|
"Latvian",
|
||||||
|
"Limburgan",
|
||||||
|
"Lingala",
|
||||||
|
"Lithuanian",
|
||||||
|
"Luba-Katanga",
|
||||||
|
"Luxembourgish",
|
||||||
|
"Macedonian",
|
||||||
|
"Malagasy",
|
||||||
|
"Malay",
|
||||||
|
"Malayalam",
|
||||||
|
"Maltese",
|
||||||
|
"Mandarin",
|
||||||
|
"Manx",
|
||||||
|
"Maori",
|
||||||
|
"Marathi",
|
||||||
|
"Marshallese",
|
||||||
|
"Mongolian",
|
||||||
|
"Moore",
|
||||||
|
"Nauru",
|
||||||
|
"Navajo",
|
||||||
|
"Ndebele, North",
|
||||||
|
"Ndebele, South",
|
||||||
|
"Ndonga",
|
||||||
|
"Nepali",
|
||||||
|
"Northern Sami",
|
||||||
|
"Norwegian",
|
||||||
|
"Norwegian Nynorsk",
|
||||||
|
"Occitan (post 1500)",
|
||||||
|
"Ojibwa",
|
||||||
|
"Oriya",
|
||||||
|
"Oromo",
|
||||||
|
"Ossetian",
|
||||||
|
"Pali",
|
||||||
|
"Panjabi",
|
||||||
|
"Persian",
|
||||||
|
"Polish",
|
||||||
|
"Portuguese",
|
||||||
|
"Pushto",
|
||||||
|
"Quechua",
|
||||||
|
"Romanian",
|
||||||
|
"Romansh",
|
||||||
|
"Rundi",
|
||||||
|
"Russian",
|
||||||
|
"Samoan",
|
||||||
|
"Sango",
|
||||||
|
"Sanskrit",
|
||||||
|
"Sardinian",
|
||||||
|
"Serbian",
|
||||||
|
"Shona",
|
||||||
|
"Sichuan Yi",
|
||||||
|
"Sindhi",
|
||||||
|
"Sinhala",
|
||||||
|
"Slovak",
|
||||||
|
"Slovenian",
|
||||||
|
"Somali",
|
||||||
|
"Sotho, Southern",
|
||||||
|
"Spanish",
|
||||||
|
"Sundanese",
|
||||||
|
"Swahili",
|
||||||
|
"Swati",
|
||||||
|
"Swedish",
|
||||||
|
"Tagalog",
|
||||||
|
"Tahitian",
|
||||||
|
"Tajik",
|
||||||
|
"Tamil",
|
||||||
|
"Tatar",
|
||||||
|
"Telugu",
|
||||||
|
"Thai",
|
||||||
|
"Tibetan",
|
||||||
|
"Tigrinya",
|
||||||
|
"Tongan",
|
||||||
|
"Tsonga",
|
||||||
|
"Tswana",
|
||||||
|
"Turkish",
|
||||||
|
"Turkmen",
|
||||||
|
"Twi",
|
||||||
|
"Uighur",
|
||||||
|
"Ukrainian",
|
||||||
|
"Urdu",
|
||||||
|
"Uzbek",
|
||||||
|
"Venda",
|
||||||
|
"Vietnamese",
|
||||||
|
"Volapük",
|
||||||
|
"Walloon",
|
||||||
|
"Welsh",
|
||||||
|
"Western Frisian",
|
||||||
|
"Wolof",
|
||||||
|
"Xhosa",
|
||||||
|
"Yiddish",
|
||||||
|
"Yoruba",
|
||||||
|
"Zhuang",
|
||||||
|
"Zulu"
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class AvistazNetworkSubtitle(Subtitle):
|
||||||
|
"""AvistaZ.to Subtitle."""
|
||||||
|
provider_name = None
|
||||||
|
|
||||||
|
def __init__(self, provider_name, page_link, download_link, language, video, filename, release, uploader):
|
||||||
|
super().__init__(language, page_link=page_link)
|
||||||
|
self.provider_name = provider_name
|
||||||
|
self.hearing_impaired = None
|
||||||
|
self.language = language
|
||||||
|
self.filename = filename
|
||||||
|
self.release_info = release
|
||||||
|
self.page_link = page_link
|
||||||
|
self.download_link = download_link
|
||||||
|
self.video = video
|
||||||
|
self.matches = None
|
||||||
|
self.content = None
|
||||||
|
self.uploader = uploader
|
||||||
|
self.encoding = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def id(self):
|
||||||
|
return self.filename
|
||||||
|
|
||||||
|
def get_matches(self, video):
|
||||||
|
# we download subtitles directly from the
|
||||||
|
# release page, so it's always a perfect match
|
||||||
|
self.matches = {'hash'}
|
||||||
|
return self.matches
|
||||||
|
|
||||||
|
|
||||||
|
def lookup_lang(name):
|
||||||
|
try:
|
||||||
|
return Language(pycountry.languages.lookup(name).alpha_3)
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class AvistazNetworkProviderBase(Provider):
|
||||||
|
"""AvistaZ Network base provider"""
|
||||||
|
subtitle_class = AvistazNetworkSubtitle
|
||||||
|
languages = set(filter(None, map(lookup_lang, supported_languages_names)))
|
||||||
|
languages.update(set(Language.rebuild(L, hi=True) for L in languages))
|
||||||
|
|
||||||
|
server_url = None
|
||||||
|
provider_name = None
|
||||||
|
hash_verifiable = True
|
||||||
|
|
||||||
|
def __init__(self, cookies, user_agent=None):
|
||||||
|
self.session = None
|
||||||
|
self.cookies = cookies
|
||||||
|
self.user_agent = user_agent
|
||||||
|
|
||||||
|
def initialize(self):
|
||||||
|
self.session = RetryingCFSession()
|
||||||
|
|
||||||
|
if self.user_agent:
|
||||||
|
self.session.headers['User-Agent'] = self.user_agent
|
||||||
|
else:
|
||||||
|
self.session.headers['User-Agent'] = AGENT_LIST[randint(0, len(AGENT_LIST) - 1)]
|
||||||
|
|
||||||
|
if self.cookies:
|
||||||
|
self.session.cookies = RequestsCookieJar()
|
||||||
|
simple_cookie = SimpleCookie()
|
||||||
|
simple_cookie.load(self.cookies)
|
||||||
|
|
||||||
|
for k, v in simple_cookie.items():
|
||||||
|
self.session.cookies.set(k, v.value)
|
||||||
|
|
||||||
|
rr = self.session.get(self.server_url + 'rules', allow_redirects=False, timeout=10,
|
||||||
|
headers={"Referer": self.server_url})
|
||||||
|
if rr.status_code in [302, 404, 403]:
|
||||||
|
logger.info('Cookies expired')
|
||||||
|
raise AuthenticationError("cookies not valid anymore")
|
||||||
|
|
||||||
|
store_verification(self.provider_name, self.session)
|
||||||
|
logger.debug('Cookies valid')
|
||||||
|
time.sleep(2)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def terminate(self):
|
||||||
|
self.session.close()
|
||||||
|
|
||||||
|
def list_subtitles(self, video, languages):
|
||||||
|
if video.info_url is None or not video.info_url.startswith(self.server_url):
|
||||||
|
logger.debug('%s not downloaded from %s. Skipped', video, self.server_url)
|
||||||
|
return []
|
||||||
|
|
||||||
|
html = self._query_info_url(video.info_url)
|
||||||
|
|
||||||
|
if html is None:
|
||||||
|
logger.debug('%s release page not found. Release might have been removed', video)
|
||||||
|
return []
|
||||||
|
|
||||||
|
release = self._parse_release_table(html)
|
||||||
|
|
||||||
|
if release['Subtitles'].table is None:
|
||||||
|
logger.debug('No subtitles found for %s', video)
|
||||||
|
return []
|
||||||
|
|
||||||
|
subtitle_columns = list(map(lambda x: x.get_text(), release['Subtitles'].thead.find_all('th')))
|
||||||
|
|
||||||
|
subtitles = []
|
||||||
|
for row in release['Subtitles'].tbody.find_all('tr', recursive=False):
|
||||||
|
|
||||||
|
subtitle_cols = self._parse_subtitle_row(row, subtitle_columns)
|
||||||
|
|
||||||
|
release_name = release['Title'].get_text().strip()
|
||||||
|
lang = lookup_lang(subtitle_cols['Language'].get_text().strip())
|
||||||
|
download_link = subtitle_cols['Download'].a['href']
|
||||||
|
uploader_name = subtitle_cols['Uploader'].get_text().strip()
|
||||||
|
|
||||||
|
if lang not in languages:
|
||||||
|
continue
|
||||||
|
|
||||||
|
subtitles.append(self.subtitle_class(
|
||||||
|
provider_name=self.provider_name,
|
||||||
|
page_link=video.info_url,
|
||||||
|
download_link=download_link,
|
||||||
|
language=lang,
|
||||||
|
video=video,
|
||||||
|
filename=download_link.split('/')[-1],
|
||||||
|
release=release_name,
|
||||||
|
uploader=uploader_name,
|
||||||
|
))
|
||||||
|
|
||||||
|
return subtitles
|
||||||
|
|
||||||
|
def _query_info_url(self, info_url):
|
||||||
|
response = self.session.get(info_url, timeout=30)
|
||||||
|
|
||||||
|
if response.status_code == 404:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
return response.content.decode('utf-8', 'ignore')
|
||||||
|
|
||||||
|
def _parse_subtitle_row(self, row, subtitle_columns):
|
||||||
|
columns = {}
|
||||||
|
for i, data in enumerate(row.find_all('td', recursive=False)):
|
||||||
|
columns[subtitle_columns[i]] = data
|
||||||
|
return columns
|
||||||
|
|
||||||
|
def _parse_release_table(self, html):
|
||||||
|
release_data_table = (ParserBeautifulSoup(html, ['html.parser'])
|
||||||
|
.select_one('#content-area > div:nth-child(4) > div.table-responsive > table > tbody'))
|
||||||
|
|
||||||
|
rows = {}
|
||||||
|
for tr in release_data_table.find_all('tr', recursive=False):
|
||||||
|
rows[tr.td.get_text()] = tr.select_one('td:nth-child(2)', recursive=False)
|
||||||
|
return rows
|
||||||
|
|
||||||
|
def download_subtitle(self, subtitle):
|
||||||
|
response = self.session.get(subtitle.download_link)
|
||||||
|
response.raise_for_status()
|
||||||
|
if subtitle.filename.endswith((".zip", ".rar")):
|
||||||
|
archive = get_archive_from_bytes(response.content)
|
||||||
|
subtitle.content = get_subtitle_from_archive(
|
||||||
|
archive, episode=subtitle.video.episode
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
subtitle.content = response.content
|
9
libs/subliminal_patch/providers/cinemaz.py
Normal file
9
libs/subliminal_patch/providers/cinemaz.py
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
from subliminal_patch.providers.avistaz_network import AvistazNetworkProviderBase
|
||||||
|
|
||||||
|
|
||||||
|
class CinemazProvider(AvistazNetworkProviderBase):
|
||||||
|
"""CinemaZ.to Provider."""
|
||||||
|
server_url = 'https://cinemaz.to/'
|
||||||
|
provider_name = 'cinemaz'
|
|
@ -14,6 +14,7 @@ class Video(Video_):
|
||||||
season_fully_aired = None
|
season_fully_aired = None
|
||||||
audio_languages = None
|
audio_languages = None
|
||||||
external_subtitle_languages = None
|
external_subtitle_languages = None
|
||||||
|
info_url = None
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -31,6 +32,7 @@ class Video(Video_):
|
||||||
streaming_service=None,
|
streaming_service=None,
|
||||||
edition=None,
|
edition=None,
|
||||||
other=None,
|
other=None,
|
||||||
|
info_url=None,
|
||||||
**kwargs
|
**kwargs
|
||||||
):
|
):
|
||||||
super(Video, self).__init__(
|
super(Video, self).__init__(
|
||||||
|
@ -54,3 +56,4 @@ class Video(Video_):
|
||||||
self.edition = edition
|
self.edition = edition
|
||||||
self.original_path = name
|
self.original_path = name
|
||||||
self.other = other
|
self.other = other
|
||||||
|
self.info_url = info_url
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue