mirror of
https://github.com/morpheus65535/bazarr.git
synced 2025-04-24 06:37:16 -04:00
Merge branch 'development' into radarr
This commit is contained in:
commit
efb1b94247
34 changed files with 524 additions and 1392 deletions
|
@ -29,7 +29,6 @@ You can also reach me on [Discord](https://discord.gg/MH2e2eb).
|
|||
* opensubtitles
|
||||
* podnapisi
|
||||
* shooter
|
||||
* subscenter
|
||||
* thesubdb
|
||||
* tvsubtitles
|
||||
|
||||
|
|
|
@ -48,6 +48,7 @@ def configure_logging():
|
|||
logging.getLogger("enzyme").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("apscheduler").setLevel(logging.WARNING)
|
||||
logging.getLogger("subliminal").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("stevedore.extension").setLevel(logging.CRITICAL)
|
||||
root = logging.getLogger()
|
||||
root.setLevel(log_level)
|
||||
root.addHandler(fh)
|
||||
|
@ -165,12 +166,16 @@ def series():
|
|||
offset = (int(page) - 1) * 15
|
||||
max_page = int(math.ceil(missing_count / 15.0))
|
||||
|
||||
c.execute("SELECT tvdbId, title, path_substitution(path), languages, hearing_impaired, sonarrSeriesId, poster, audio_language FROM table_shows ORDER BY title ASC LIMIT 15 OFFSET ?", (offset,))
|
||||
c.execute("SELECT tvdbId, title, path_substitution(path), languages, hearing_impaired, sonarrSeriesId, poster, audio_language FROM table_shows ORDER BY sortTitle ASC LIMIT 15 OFFSET ?", (offset,))
|
||||
data = c.fetchall()
|
||||
c.execute("SELECT code2, name FROM table_settings_languages WHERE enabled = 1")
|
||||
languages = c.fetchall()
|
||||
c.execute("SELECT table_shows.sonarrSeriesId, COUNT(table_episodes.missing_subtitles) FROM table_shows LEFT JOIN table_episodes ON table_shows.sonarrSeriesId=table_episodes.sonarrSeriesId WHERE table_episodes.missing_subtitles IS NOT '[]' GROUP BY table_shows.sonarrSeriesId")
|
||||
missing_subtitles_list = c.fetchall()
|
||||
c.execute("SELECT table_shows.sonarrSeriesId, COUNT(table_episodes.missing_subtitles) FROM table_shows LEFT JOIN table_episodes ON table_shows.sonarrSeriesId=table_episodes.sonarrSeriesId GROUP BY table_shows.sonarrSeriesId")
|
||||
total_subtitles_list = c.fetchall()
|
||||
c.close()
|
||||
output = template('series', __file__=__file__, bazarr_version=bazarr_version, rows=data, languages=languages, missing_count=missing_count, page=page, max_page=max_page, base_url=base_url, single_language=single_language)
|
||||
output = template('series', __file__=__file__, bazarr_version=bazarr_version, rows=data, missing_subtitles_list=missing_subtitles_list, total_subtitles_list=total_subtitles_list, languages=languages, missing_count=missing_count, page=page, max_page=max_page, base_url=base_url, single_language=single_language)
|
||||
return output
|
||||
|
||||
@route(base_url + 'serieseditor')
|
||||
|
|
|
@ -9,6 +9,15 @@ providers_list = sorted(provider_manager.names())
|
|||
db = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'data/db/bazarr.db'), timeout=30)
|
||||
c = db.cursor()
|
||||
|
||||
# Remove unsupported providers
|
||||
providers_in_db = c.execute('SELECT name FROM table_settings_providers').fetchall()
|
||||
for provider_in_db in providers_in_db:
|
||||
if provider_in_db[0] not in providers_list:
|
||||
c.execute('DELETE FROM table_settings_providers WHERE name = ?', (provider_in_db[0], ))
|
||||
|
||||
# Commit changes to database table
|
||||
db.commit()
|
||||
|
||||
# Insert providers in database table
|
||||
for provider_name in providers_list:
|
||||
c.execute('''INSERT OR IGNORE INTO table_settings_providers(name) VALUES(?)''', (provider_name, ))
|
||||
|
|
|
@ -48,9 +48,9 @@ def update_series():
|
|||
|
||||
# Update or insert shows list in database table
|
||||
try:
|
||||
c.execute('''INSERT INTO table_shows(title, path, tvdbId, languages,`hearing_impaired`, sonarrSeriesId, overview, poster, fanart, `audio_language`) VALUES (?,?,?,(SELECT languages FROM table_shows WHERE tvdbId = ?),(SELECT `hearing_impaired` FROM table_shows WHERE tvdbId = ?), ?, ?, ?, ?, ?)''', (show["title"], show["path"], show["tvdbId"], show["tvdbId"], show["tvdbId"], show["id"], overview, poster, fanart, profile_id_to_language(show['qualityProfileId'])))
|
||||
c.execute('''INSERT INTO table_shows(title, path, tvdbId, languages,`hearing_impaired`, sonarrSeriesId, overview, poster, fanart, `audio_language`, sortTitle) VALUES (?,?,?,(SELECT languages FROM table_shows WHERE tvdbId = ?),(SELECT `hearing_impaired` FROM table_shows WHERE tvdbId = ?), ?, ?, ?, ?, ?, ?)''', (show["title"], show["path"], show["tvdbId"], show["tvdbId"], show["tvdbId"], show["id"], overview, poster, fanart, profile_id_to_language(show['qualityProfileId']), show['sortTitle']))
|
||||
except:
|
||||
c.execute('''UPDATE table_shows SET title = ?, path = ?, tvdbId = ?, sonarrSeriesId = ?, overview = ?, poster = ?, fanart = ?, `audio_language` = ? WHERE tvdbid = ?''', (show["title"],show["path"],show["tvdbId"],show["id"],overview,poster,fanart,profile_id_to_language((show['qualityProfileId'] if sonarr_version == 2 else show['languageProfileId'])),show["tvdbId"]))
|
||||
c.execute('''UPDATE table_shows SET title = ?, path = ?, tvdbId = ?, sonarrSeriesId = ?, overview = ?, poster = ?, fanart = ?, `audio_language` = ? , sortTitle = ? WHERE tvdbid = ?''', (show["title"],show["path"],show["tvdbId"],show["id"],overview,poster,fanart,profile_id_to_language((show['qualityProfileId'] if sonarr_version == 2 else show['languageProfileId'])),show['sortTitle'],show["tvdbId"]))
|
||||
|
||||
# Delete shows not in Sonarr anymore
|
||||
deleted_items = []
|
||||
|
|
|
@ -1,381 +0,0 @@
|
|||
Subliminal
|
||||
==========
|
||||
Subtitles, faster than your thoughts.
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/subliminal.svg
|
||||
:target: https://pypi.python.org/pypi/subliminal
|
||||
:alt: Latest Version
|
||||
|
||||
.. image:: https://travis-ci.org/Diaoul/subliminal.svg?branch=develop
|
||||
:target: https://travis-ci.org/Diaoul/subliminal
|
||||
:alt: Travis CI build status
|
||||
|
||||
.. image:: https://readthedocs.org/projects/subliminal/badge/?version=latest
|
||||
:target: https://subliminal.readthedocs.org/
|
||||
:alt: Documentation Status
|
||||
|
||||
.. image:: https://coveralls.io/repos/Diaoul/subliminal/badge.svg?branch=develop&service=github
|
||||
:target: https://coveralls.io/github/Diaoul/subliminal?branch=develop
|
||||
:alt: Code coverage
|
||||
|
||||
.. image:: https://img.shields.io/github/license/Diaoul/subliminal.svg
|
||||
:target: https://github.com/Diaoul/subliminal/blob/master/LICENSE
|
||||
:alt: License
|
||||
|
||||
.. image:: https://img.shields.io/badge/gitter-join%20chat-1dce73.svg
|
||||
:alt: Join the chat at https://gitter.im/Diaoul/subliminal
|
||||
:target: https://gitter.im/Diaoul/subliminal
|
||||
|
||||
|
||||
:Project page: https://github.com/Diaoul/subliminal
|
||||
:Documentation: https://subliminal.readthedocs.org/
|
||||
|
||||
|
||||
Usage
|
||||
-----
|
||||
CLI
|
||||
^^^
|
||||
Download English subtitles::
|
||||
|
||||
$ subliminal download -l en The.Big.Bang.Theory.S05E18.HDTV.x264-LOL.mp4
|
||||
Collecting videos [####################################] 100%
|
||||
1 video collected / 0 video ignored / 0 error
|
||||
Downloading subtitles [####################################] 100%
|
||||
Downloaded 1 subtitle
|
||||
|
||||
Library
|
||||
^^^^^^^
|
||||
Download best subtitles in French and English for videos less than two weeks old in a video folder:
|
||||
|
||||
.. code:: python
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from babelfish import Language
|
||||
from subliminal import download_best_subtitles, region, save_subtitles, scan_videos
|
||||
|
||||
# configure the cache
|
||||
region.configure('dogpile.cache.dbm', arguments={'filename': 'cachefile.dbm'})
|
||||
|
||||
# scan for videos newer than 2 weeks and their existing subtitles in a folder
|
||||
videos = scan_videos('/video/folder', age=timedelta(weeks=2))
|
||||
|
||||
# download best subtitles
|
||||
subtitles = download_best_subtitles(videos, {Language('eng'), Language('fra')})
|
||||
|
||||
# save them to disk, next to the video
|
||||
for v in videos:
|
||||
save_subtitles(v, subtitles[v])
|
||||
|
||||
Docker
|
||||
^^^^^^
|
||||
Run subliminal in a docker container::
|
||||
|
||||
$ docker run --rm --name subliminal -v subliminal_cache:/usr/src/cache -v /tvshows:/tvshows -it diaoulael/subliminal download -l en /tvshows/The.Big.Bang.Theory.S05E18.HDTV.x264-LOL.mp4
|
||||
|
||||
Installation
|
||||
------------
|
||||
Subliminal can be installed as a regular python module by running::
|
||||
|
||||
$ [sudo] pip install subliminal
|
||||
|
||||
For a better isolation with your system you should use a dedicated virtualenv or install for your user only using
|
||||
the ``--user`` flag.
|
||||
|
||||
Nautilus/Nemo integration
|
||||
-------------------------
|
||||
See the dedicated `project page <https://github.com/Diaoul/nautilus-subliminal>`_ for more information.
|
||||
|
||||
|
||||
Changelog
|
||||
---------
|
||||
|
||||
2.0.5
|
||||
^^^^^
|
||||
**release date:** 2016-09-03
|
||||
|
||||
* Fix addic7ed provider for some series name
|
||||
* Fix existing subtitles detection
|
||||
* Improve scoring
|
||||
* Add Docker container
|
||||
* Add .ogv video extension
|
||||
|
||||
|
||||
2.0.4
|
||||
^^^^^
|
||||
**release date:** 2016-09-03
|
||||
|
||||
* Fix subscenter
|
||||
|
||||
|
||||
2.0.3
|
||||
^^^^^
|
||||
**release date:** 2016-06-10
|
||||
|
||||
* Fix clearing cache in CLI
|
||||
|
||||
|
||||
2.0.2
|
||||
^^^^^
|
||||
**release date:** 2016-06-06
|
||||
|
||||
* Fix for dogpile.cache>=0.6.0
|
||||
* Fix missing sphinx_rtd_theme dependency
|
||||
|
||||
|
||||
2.0.1
|
||||
^^^^^
|
||||
**release date:** 2016-06-06
|
||||
|
||||
* Fix beautifulsoup4 minimal requirement
|
||||
|
||||
|
||||
2.0.0
|
||||
^^^^^
|
||||
**release date:** 2016-06-04
|
||||
|
||||
* Add refiners to enrich videos with information from metadata, tvdb and omdb
|
||||
* Add asynchronous provider search for faster searches
|
||||
* Add registrable managers so subliminal can run without install
|
||||
* Add archive support
|
||||
* Add the ability to customize scoring logic
|
||||
* Add an age argument to scan_videos for faster scanning
|
||||
* Add legendas.tv provider
|
||||
* Add shooter.cn provider
|
||||
* Improve matching and scoring
|
||||
* Improve documentation
|
||||
* Split nautilus integration into its own project
|
||||
|
||||
|
||||
1.1.1
|
||||
^^^^^
|
||||
**release date:** 2016-01-03
|
||||
|
||||
* Fix scanning videos on bad MKV files
|
||||
|
||||
|
||||
1.1
|
||||
^^^
|
||||
**release date:** 2015-12-29
|
||||
|
||||
* Fix library usage example in README
|
||||
* Fix for series name with special characters in addic7ed provider
|
||||
* Fix id property in thesubdb provider
|
||||
* Improve matching on titles
|
||||
* Add support for nautilus context menu with translations
|
||||
* Add support for searching subtitles in a separate directory
|
||||
* Add subscenter provider
|
||||
* Add support for python 3.5
|
||||
|
||||
|
||||
1.0.1
|
||||
^^^^^
|
||||
**release date:** 2015-07-23
|
||||
|
||||
* Fix unicode issues in CLI (python 2 only)
|
||||
* Fix score scaling in CLI (python 2 only)
|
||||
* Improve error handling in CLI
|
||||
* Color collect report in CLI
|
||||
|
||||
|
||||
1.0
|
||||
^^^
|
||||
**release date:** 2015-07-22
|
||||
|
||||
* Many changes and fixes
|
||||
* New test suite
|
||||
* New documentation
|
||||
* New CLI
|
||||
* Added support for SubsCenter
|
||||
|
||||
|
||||
0.7.5
|
||||
^^^^^
|
||||
**release date:** 2015-03-04
|
||||
|
||||
* Update requirements
|
||||
* Remove BierDopje provider
|
||||
* Add pre-guessed video optional argument in scan_video
|
||||
* Improve hearing impaired support
|
||||
* Fix TVSubtitles and Podnapisi providers
|
||||
|
||||
|
||||
0.7.4
|
||||
^^^^^
|
||||
**release date:** 2014-01-27
|
||||
|
||||
* Fix requirements for guessit and babelfish
|
||||
|
||||
|
||||
0.7.3
|
||||
^^^^^
|
||||
**release date:** 2013-11-22
|
||||
|
||||
* Fix windows compatibility
|
||||
* Improve subtitle validation
|
||||
* Improve embedded subtitle languages detection
|
||||
* Improve unittests
|
||||
|
||||
|
||||
0.7.2
|
||||
^^^^^
|
||||
**release date:** 2013-11-10
|
||||
|
||||
* Fix TVSubtitles for ambiguous series
|
||||
* Add a CACHE_VERSION to force cache reloading on version change
|
||||
* Set CLI default cache expiration time to 30 days
|
||||
* Add podnapisi provider
|
||||
* Support script for languages e.g. Latn, Cyrl
|
||||
* Improve logging levels
|
||||
* Fix subtitle validation in some rare cases
|
||||
|
||||
|
||||
0.7.1
|
||||
^^^^^
|
||||
**release date:** 2013-11-06
|
||||
|
||||
* Improve CLI
|
||||
* Add login support for Addic7ed
|
||||
* Remove lxml dependency
|
||||
* Many fixes
|
||||
|
||||
|
||||
0.7.0
|
||||
^^^^^
|
||||
**release date:** 2013-10-29
|
||||
|
||||
**WARNING:** Complete rewrite of subliminal with backward incompatible changes
|
||||
|
||||
* Use enzyme to parse metadata of videos
|
||||
* Use babelfish to handle languages
|
||||
* Use dogpile.cache for caching
|
||||
* Use charade to detect subtitle encoding
|
||||
* Use pysrt for subtitle validation
|
||||
* Use entry points for subtitle providers
|
||||
* New subtitle score computation
|
||||
* Hearing impaired subtitles support
|
||||
* Drop async support
|
||||
* Drop a few providers
|
||||
* And much more...
|
||||
|
||||
|
||||
0.6.4
|
||||
^^^^^
|
||||
**release date:** 2013-05-19
|
||||
|
||||
* Fix requirements due to enzyme 0.3
|
||||
|
||||
|
||||
0.6.3
|
||||
^^^^^
|
||||
**release date:** 2013-01-17
|
||||
|
||||
* Fix requirements due to requests 1.0
|
||||
|
||||
|
||||
0.6.2
|
||||
^^^^^
|
||||
**release date:** 2012-09-15
|
||||
|
||||
* Fix BierDopje
|
||||
* Fix Addic7ed
|
||||
* Fix SubsWiki
|
||||
* Fix missing enzyme import
|
||||
* Add Catalan and Galician languages to Addic7ed
|
||||
* Add possible services in help message of the CLI
|
||||
* Allow existing filenames to be passed without the ./ prefix
|
||||
|
||||
|
||||
0.6.1
|
||||
^^^^^
|
||||
**release date:** 2012-06-24
|
||||
|
||||
* Fix subtitle release name in BierDopje
|
||||
* Fix subtitles being downloaded multiple times
|
||||
* Add Chinese support to TvSubtitles
|
||||
* Fix encoding issues
|
||||
* Fix single download subtitles without the force option
|
||||
* Add Spanish (Latin America) exception to Addic7ed
|
||||
* Fix group_by_video when a list entry has None as subtitles
|
||||
* Add support for Galician language in Subtitulos
|
||||
* Add an integrity check after subtitles download for Addic7ed
|
||||
* Add error handling for if not strict in Language
|
||||
* Fix TheSubDB hash method to return None if the file is too small
|
||||
* Fix guessit.Language in Video.scan
|
||||
* Fix language detection of subtitles
|
||||
|
||||
|
||||
0.6.0
|
||||
^^^^^
|
||||
**release date:** 2012-06-16
|
||||
|
||||
**WARNING:** Backward incompatible changes
|
||||
|
||||
* Fix --workers option in CLI
|
||||
* Use a dedicated module for languages
|
||||
* Use beautifulsoup4
|
||||
* Improve return types
|
||||
* Add scan_filter option
|
||||
* Add --age option in CLI
|
||||
* Add TvSubtitles service
|
||||
* Add Addic7ed service
|
||||
|
||||
|
||||
0.5.1
|
||||
^^^^^
|
||||
**release date:** 2012-03-25
|
||||
|
||||
* Improve error handling of enzyme parsing
|
||||
|
||||
|
||||
0.5
|
||||
^^^
|
||||
**release date:** 2012-03-25
|
||||
**WARNING:** Backward incompatible changes
|
||||
|
||||
* Use more unicode
|
||||
* New list_subtitles and download_subtitles methods
|
||||
* New Pool object for asynchronous work
|
||||
* Improve sort algorithm
|
||||
* Better error handling
|
||||
* Make sorting customizable
|
||||
* Remove class Subliminal
|
||||
* Remove permissions handling
|
||||
|
||||
|
||||
0.4
|
||||
^^^
|
||||
**release date:** 2011-11-11
|
||||
|
||||
* Many fixes
|
||||
* Better error handling
|
||||
|
||||
|
||||
0.3
|
||||
^^^
|
||||
**release date:** 2011-08-18
|
||||
|
||||
* Fix a bug when series is not guessed by guessit
|
||||
* Fix dependencies failure when installing package
|
||||
* Fix encoding issues with logging
|
||||
* Add a script to ease subtitles download
|
||||
* Add possibility to choose mode of created files
|
||||
* Add more checks before adjusting permissions
|
||||
|
||||
|
||||
0.2
|
||||
^^^
|
||||
**release date:** 2011-07-11
|
||||
|
||||
* Fix plugin configuration
|
||||
* Fix some encoding issues
|
||||
* Remove extra logging
|
||||
|
||||
|
||||
0.1
|
||||
^^^
|
||||
**release date:** *private release*
|
||||
|
||||
* Initial release
|
||||
|
||||
|
|
@ -1 +0,0 @@
|
|||
pip
|
|
@ -1,434 +0,0 @@
|
|||
Metadata-Version: 2.0
|
||||
Name: subliminal
|
||||
Version: 2.0.5
|
||||
Summary: Subtitles, faster than your thoughts
|
||||
Home-page: https://github.com/Diaoul/subliminal
|
||||
Author: Antoine Bertin
|
||||
Author-email: diaoulael@gmail.com
|
||||
License: MIT
|
||||
Keywords: subtitle subtitles video movie episode tv show series
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.3
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Topic :: Multimedia :: Video
|
||||
Requires-Dist: appdirs (>=1.3)
|
||||
Requires-Dist: babelfish (>=0.5.2)
|
||||
Requires-Dist: beautifulsoup4 (>=4.4.0)
|
||||
Requires-Dist: chardet (>=2.3.0)
|
||||
Requires-Dist: click (>=4.0)
|
||||
Requires-Dist: dogpile.cache (>=0.6.0)
|
||||
Requires-Dist: enzyme (>=0.4.1)
|
||||
Requires-Dist: futures (>=3.0)
|
||||
Requires-Dist: guessit (>=2.0.1)
|
||||
Requires-Dist: pysrt (>=1.0.1)
|
||||
Requires-Dist: pytz (>=2012c)
|
||||
Requires-Dist: rarfile (>=2.7)
|
||||
Requires-Dist: requests (>=2.0)
|
||||
Requires-Dist: six (>=1.9.0)
|
||||
Requires-Dist: stevedore (>=1.0.0)
|
||||
Provides-Extra: dev
|
||||
Requires-Dist: sphinx; extra == 'dev'
|
||||
Requires-Dist: sphinx-rtd-theme; extra == 'dev'
|
||||
Requires-Dist: sphinxcontrib-programoutput; extra == 'dev'
|
||||
Requires-Dist: tox; extra == 'dev'
|
||||
Requires-Dist: wheel; extra == 'dev'
|
||||
Provides-Extra: test
|
||||
Requires-Dist: mock; extra == 'test'
|
||||
Requires-Dist: pytest; extra == 'test'
|
||||
Requires-Dist: pytest-cov; extra == 'test'
|
||||
Requires-Dist: pytest-flakes; extra == 'test'
|
||||
Requires-Dist: pytest-pep8; extra == 'test'
|
||||
Requires-Dist: sympy; extra == 'test'
|
||||
Requires-Dist: vcrpy (>=1.6.1); extra == 'test'
|
||||
|
||||
Subliminal
|
||||
==========
|
||||
Subtitles, faster than your thoughts.
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/subliminal.svg
|
||||
:target: https://pypi.python.org/pypi/subliminal
|
||||
:alt: Latest Version
|
||||
|
||||
.. image:: https://travis-ci.org/Diaoul/subliminal.svg?branch=develop
|
||||
:target: https://travis-ci.org/Diaoul/subliminal
|
||||
:alt: Travis CI build status
|
||||
|
||||
.. image:: https://readthedocs.org/projects/subliminal/badge/?version=latest
|
||||
:target: https://subliminal.readthedocs.org/
|
||||
:alt: Documentation Status
|
||||
|
||||
.. image:: https://coveralls.io/repos/Diaoul/subliminal/badge.svg?branch=develop&service=github
|
||||
:target: https://coveralls.io/github/Diaoul/subliminal?branch=develop
|
||||
:alt: Code coverage
|
||||
|
||||
.. image:: https://img.shields.io/github/license/Diaoul/subliminal.svg
|
||||
:target: https://github.com/Diaoul/subliminal/blob/master/LICENSE
|
||||
:alt: License
|
||||
|
||||
.. image:: https://img.shields.io/badge/gitter-join%20chat-1dce73.svg
|
||||
:alt: Join the chat at https://gitter.im/Diaoul/subliminal
|
||||
:target: https://gitter.im/Diaoul/subliminal
|
||||
|
||||
|
||||
:Project page: https://github.com/Diaoul/subliminal
|
||||
:Documentation: https://subliminal.readthedocs.org/
|
||||
|
||||
|
||||
Usage
|
||||
-----
|
||||
CLI
|
||||
^^^
|
||||
Download English subtitles::
|
||||
|
||||
$ subliminal download -l en The.Big.Bang.Theory.S05E18.HDTV.x264-LOL.mp4
|
||||
Collecting videos [####################################] 100%
|
||||
1 video collected / 0 video ignored / 0 error
|
||||
Downloading subtitles [####################################] 100%
|
||||
Downloaded 1 subtitle
|
||||
|
||||
Library
|
||||
^^^^^^^
|
||||
Download best subtitles in French and English for videos less than two weeks old in a video folder:
|
||||
|
||||
.. code:: python
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from babelfish import Language
|
||||
from subliminal import download_best_subtitles, region, save_subtitles, scan_videos
|
||||
|
||||
# configure the cache
|
||||
region.configure('dogpile.cache.dbm', arguments={'filename': 'cachefile.dbm'})
|
||||
|
||||
# scan for videos newer than 2 weeks and their existing subtitles in a folder
|
||||
videos = scan_videos('/video/folder', age=timedelta(weeks=2))
|
||||
|
||||
# download best subtitles
|
||||
subtitles = download_best_subtitles(videos, {Language('eng'), Language('fra')})
|
||||
|
||||
# save them to disk, next to the video
|
||||
for v in videos:
|
||||
save_subtitles(v, subtitles[v])
|
||||
|
||||
Docker
|
||||
^^^^^^
|
||||
Run subliminal in a docker container::
|
||||
|
||||
$ docker run --rm --name subliminal -v subliminal_cache:/usr/src/cache -v /tvshows:/tvshows -it diaoulael/subliminal download -l en /tvshows/The.Big.Bang.Theory.S05E18.HDTV.x264-LOL.mp4
|
||||
|
||||
Installation
|
||||
------------
|
||||
Subliminal can be installed as a regular python module by running::
|
||||
|
||||
$ [sudo] pip install subliminal
|
||||
|
||||
For a better isolation with your system you should use a dedicated virtualenv or install for your user only using
|
||||
the ``--user`` flag.
|
||||
|
||||
Nautilus/Nemo integration
|
||||
-------------------------
|
||||
See the dedicated `project page <https://github.com/Diaoul/nautilus-subliminal>`_ for more information.
|
||||
|
||||
|
||||
Changelog
|
||||
---------
|
||||
|
||||
2.0.5
|
||||
^^^^^
|
||||
**release date:** 2016-09-03
|
||||
|
||||
* Fix addic7ed provider for some series name
|
||||
* Fix existing subtitles detection
|
||||
* Improve scoring
|
||||
* Add Docker container
|
||||
* Add .ogv video extension
|
||||
|
||||
|
||||
2.0.4
|
||||
^^^^^
|
||||
**release date:** 2016-09-03
|
||||
|
||||
* Fix subscenter
|
||||
|
||||
|
||||
2.0.3
|
||||
^^^^^
|
||||
**release date:** 2016-06-10
|
||||
|
||||
* Fix clearing cache in CLI
|
||||
|
||||
|
||||
2.0.2
|
||||
^^^^^
|
||||
**release date:** 2016-06-06
|
||||
|
||||
* Fix for dogpile.cache>=0.6.0
|
||||
* Fix missing sphinx_rtd_theme dependency
|
||||
|
||||
|
||||
2.0.1
|
||||
^^^^^
|
||||
**release date:** 2016-06-06
|
||||
|
||||
* Fix beautifulsoup4 minimal requirement
|
||||
|
||||
|
||||
2.0.0
|
||||
^^^^^
|
||||
**release date:** 2016-06-04
|
||||
|
||||
* Add refiners to enrich videos with information from metadata, tvdb and omdb
|
||||
* Add asynchronous provider search for faster searches
|
||||
* Add registrable managers so subliminal can run without install
|
||||
* Add archive support
|
||||
* Add the ability to customize scoring logic
|
||||
* Add an age argument to scan_videos for faster scanning
|
||||
* Add legendas.tv provider
|
||||
* Add shooter.cn provider
|
||||
* Improve matching and scoring
|
||||
* Improve documentation
|
||||
* Split nautilus integration into its own project
|
||||
|
||||
|
||||
1.1.1
|
||||
^^^^^
|
||||
**release date:** 2016-01-03
|
||||
|
||||
* Fix scanning videos on bad MKV files
|
||||
|
||||
|
||||
1.1
|
||||
^^^
|
||||
**release date:** 2015-12-29
|
||||
|
||||
* Fix library usage example in README
|
||||
* Fix for series name with special characters in addic7ed provider
|
||||
* Fix id property in thesubdb provider
|
||||
* Improve matching on titles
|
||||
* Add support for nautilus context menu with translations
|
||||
* Add support for searching subtitles in a separate directory
|
||||
* Add subscenter provider
|
||||
* Add support for python 3.5
|
||||
|
||||
|
||||
1.0.1
|
||||
^^^^^
|
||||
**release date:** 2015-07-23
|
||||
|
||||
* Fix unicode issues in CLI (python 2 only)
|
||||
* Fix score scaling in CLI (python 2 only)
|
||||
* Improve error handling in CLI
|
||||
* Color collect report in CLI
|
||||
|
||||
|
||||
1.0
|
||||
^^^
|
||||
**release date:** 2015-07-22
|
||||
|
||||
* Many changes and fixes
|
||||
* New test suite
|
||||
* New documentation
|
||||
* New CLI
|
||||
* Added support for SubsCenter
|
||||
|
||||
|
||||
0.7.5
|
||||
^^^^^
|
||||
**release date:** 2015-03-04
|
||||
|
||||
* Update requirements
|
||||
* Remove BierDopje provider
|
||||
* Add pre-guessed video optional argument in scan_video
|
||||
* Improve hearing impaired support
|
||||
* Fix TVSubtitles and Podnapisi providers
|
||||
|
||||
|
||||
0.7.4
|
||||
^^^^^
|
||||
**release date:** 2014-01-27
|
||||
|
||||
* Fix requirements for guessit and babelfish
|
||||
|
||||
|
||||
0.7.3
|
||||
^^^^^
|
||||
**release date:** 2013-11-22
|
||||
|
||||
* Fix windows compatibility
|
||||
* Improve subtitle validation
|
||||
* Improve embedded subtitle languages detection
|
||||
* Improve unittests
|
||||
|
||||
|
||||
0.7.2
|
||||
^^^^^
|
||||
**release date:** 2013-11-10
|
||||
|
||||
* Fix TVSubtitles for ambiguous series
|
||||
* Add a CACHE_VERSION to force cache reloading on version change
|
||||
* Set CLI default cache expiration time to 30 days
|
||||
* Add podnapisi provider
|
||||
* Support script for languages e.g. Latn, Cyrl
|
||||
* Improve logging levels
|
||||
* Fix subtitle validation in some rare cases
|
||||
|
||||
|
||||
0.7.1
|
||||
^^^^^
|
||||
**release date:** 2013-11-06
|
||||
|
||||
* Improve CLI
|
||||
* Add login support for Addic7ed
|
||||
* Remove lxml dependency
|
||||
* Many fixes
|
||||
|
||||
|
||||
0.7.0
|
||||
^^^^^
|
||||
**release date:** 2013-10-29
|
||||
|
||||
**WARNING:** Complete rewrite of subliminal with backward incompatible changes
|
||||
|
||||
* Use enzyme to parse metadata of videos
|
||||
* Use babelfish to handle languages
|
||||
* Use dogpile.cache for caching
|
||||
* Use charade to detect subtitle encoding
|
||||
* Use pysrt for subtitle validation
|
||||
* Use entry points for subtitle providers
|
||||
* New subtitle score computation
|
||||
* Hearing impaired subtitles support
|
||||
* Drop async support
|
||||
* Drop a few providers
|
||||
* And much more...
|
||||
|
||||
|
||||
0.6.4
|
||||
^^^^^
|
||||
**release date:** 2013-05-19
|
||||
|
||||
* Fix requirements due to enzyme 0.3
|
||||
|
||||
|
||||
0.6.3
|
||||
^^^^^
|
||||
**release date:** 2013-01-17
|
||||
|
||||
* Fix requirements due to requests 1.0
|
||||
|
||||
|
||||
0.6.2
|
||||
^^^^^
|
||||
**release date:** 2012-09-15
|
||||
|
||||
* Fix BierDopje
|
||||
* Fix Addic7ed
|
||||
* Fix SubsWiki
|
||||
* Fix missing enzyme import
|
||||
* Add Catalan and Galician languages to Addic7ed
|
||||
* Add possible services in help message of the CLI
|
||||
* Allow existing filenames to be passed without the ./ prefix
|
||||
|
||||
|
||||
0.6.1
|
||||
^^^^^
|
||||
**release date:** 2012-06-24
|
||||
|
||||
* Fix subtitle release name in BierDopje
|
||||
* Fix subtitles being downloaded multiple times
|
||||
* Add Chinese support to TvSubtitles
|
||||
* Fix encoding issues
|
||||
* Fix single download subtitles without the force option
|
||||
* Add Spanish (Latin America) exception to Addic7ed
|
||||
* Fix group_by_video when a list entry has None as subtitles
|
||||
* Add support for Galician language in Subtitulos
|
||||
* Add an integrity check after subtitles download for Addic7ed
|
||||
* Add error handling for if not strict in Language
|
||||
* Fix TheSubDB hash method to return None if the file is too small
|
||||
* Fix guessit.Language in Video.scan
|
||||
* Fix language detection of subtitles
|
||||
|
||||
|
||||
0.6.0
|
||||
^^^^^
|
||||
**release date:** 2012-06-16
|
||||
|
||||
**WARNING:** Backward incompatible changes
|
||||
|
||||
* Fix --workers option in CLI
|
||||
* Use a dedicated module for languages
|
||||
* Use beautifulsoup4
|
||||
* Improve return types
|
||||
* Add scan_filter option
|
||||
* Add --age option in CLI
|
||||
* Add TvSubtitles service
|
||||
* Add Addic7ed service
|
||||
|
||||
|
||||
0.5.1
|
||||
^^^^^
|
||||
**release date:** 2012-03-25
|
||||
|
||||
* Improve error handling of enzyme parsing
|
||||
|
||||
|
||||
0.5
|
||||
^^^
|
||||
**release date:** 2012-03-25
|
||||
**WARNING:** Backward incompatible changes
|
||||
|
||||
* Use more unicode
|
||||
* New list_subtitles and download_subtitles methods
|
||||
* New Pool object for asynchronous work
|
||||
* Improve sort algorithm
|
||||
* Better error handling
|
||||
* Make sorting customizable
|
||||
* Remove class Subliminal
|
||||
* Remove permissions handling
|
||||
|
||||
|
||||
0.4
|
||||
^^^
|
||||
**release date:** 2011-11-11
|
||||
|
||||
* Many fixes
|
||||
* Better error handling
|
||||
|
||||
|
||||
0.3
|
||||
^^^
|
||||
**release date:** 2011-08-18
|
||||
|
||||
* Fix a bug when series is not guessed by guessit
|
||||
* Fix dependencies failure when installing package
|
||||
* Fix encoding issues with logging
|
||||
* Add a script to ease subtitles download
|
||||
* Add possibility to choose mode of created files
|
||||
* Add more checks before adjusting permissions
|
||||
|
||||
|
||||
0.2
|
||||
^^^
|
||||
**release date:** 2011-07-11
|
||||
|
||||
* Fix plugin configuration
|
||||
* Fix some encoding issues
|
||||
* Remove extra logging
|
||||
|
||||
|
||||
0.1
|
||||
^^^
|
||||
**release date:** *private release*
|
||||
|
||||
* Initial release
|
||||
|
||||
|
|
@ -1,72 +0,0 @@
|
|||
subliminal/__init__.py,sha256=970OWbvZFXuhq7qLuitHMgjiB9QVDY1NBxMWSl3NiSk,818
|
||||
subliminal/cache.py,sha256=sooQb7P8wPRk4Y4aBnfTORFH4Dzclb3qL5HOqksdBBY,428
|
||||
subliminal/cli.py,sha256=AblFu00eGrA7z-ACdYGGKEZZViEpH0hZFHHT7DlM6nA,19797
|
||||
subliminal/core.py,sha256=7I_3JTKXIw7DseNf219MVqIrPo8UD6dGDDmZdoUps80,26967
|
||||
subliminal/exceptions.py,sha256=5F8AcSgrN1YhfAkok1d3mTkCVz2WvIfFOl5I2vgWBJE,674
|
||||
subliminal/extensions.py,sha256=oZqJ4chQpLVk0l8q8fe0KqjVxVV9gmOzieqkhJlTSs4,3992
|
||||
subliminal/score.py,sha256=_9AqR7E39PbjqNs34jNwhYyZAFwva_ksg34zXDagIXA,8638
|
||||
subliminal/subtitle.py,sha256=vX1h4PM4Ys6QdjDNF4eP1Gw6nuCauvNXAIJabzMavBs,8283
|
||||
subliminal/utils.py,sha256=Ceb7z913BLTMUb-RiKdFj3xxmdkW9XbXezDQ3t1-Erk,4070
|
||||
subliminal/video.py,sha256=VRSUJuo9myFqq5vEhixjYoaSIqQ21XB2-tgJ6zy3NGw,7852
|
||||
subliminal/converters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
subliminal/converters/addic7ed.py,sha256=OilfGDwDOxf1-s0DVUT84b2AGEVuNWVWAq_deSZ5JU0,1734
|
||||
subliminal/converters/legendastv.py,sha256=Kt8Yc1HGTFDtWXjWvyTtEP1RtqDMo83mbEWkp1eRth0,1267
|
||||
subliminal/converters/shooter.py,sha256=OYmxbCV-ZjiSg6nvzTe0FM_UtQkYUV1ionAlOKRSYdE,842
|
||||
subliminal/converters/thesubdb.py,sha256=gt9HNT_2Pyat_NNjiEernZkK5c0Py3NN82XFkkkuX-I,1123
|
||||
subliminal/converters/tvsubtitles.py,sha256=TaEQtAhr_AEgvKDK3YCBAhlSj3AC55EHZHFsWkLxdVw,1100
|
||||
subliminal/providers/__init__.py,sha256=Q-_CPdRRxhTavbNE4C1jwyTz1MV-8gZlpQz5FK2PJ6E,5180
|
||||
subliminal/providers/addic7ed.py,sha256=SM5zb5vZ7psnx1pj-bynIGkXdBG8X_hAr258XmKZyCo,11049
|
||||
subliminal/providers/legendastv.py,sha256=aDFZ6fSctdrbY7NZLNhfmrkmYNeXsdbkubL6AojikIE,16220
|
||||
subliminal/providers/napiprojekt.py,sha256=yrHoUAk3oQIYNiG7D-gXJyhGj2lrVTKRaN7coZO7Efw,2759
|
||||
subliminal/providers/opensubtitles.py,sha256=H-zMUaXs9aVNBWcYEzG3CN-ThG5l1BItz_IjUzhHtes,11452
|
||||
subliminal/providers/podnapisi.py,sha256=sboZmyZ3-IP_Sl0HNtbDqmjPrO55J_eEcJeyUatdK5Y,6971
|
||||
subliminal/providers/shooter.py,sha256=E8pzH3ldnPtCIlM3Z12hEnMrHR5rTLVPRAd57PzVBP0,2392
|
||||
subliminal/providers/subscenter.py,sha256=U_xR3ZmU2Wsr2oNTkJiUgiJDCEqeOMZej3VJOUqpFKo,9252
|
||||
subliminal/providers/thesubdb.py,sha256=HwweKdJJ2eATAPlgW7RYFw9P1TQtdgjM4WEm55a9JT0,2650
|
||||
subliminal/providers/tvsubtitles.py,sha256=JZUVCKm8cPgaTsJeKVo5YsniYTERBhr9zr85bf-peE8,8046
|
||||
subliminal/refiners/__init__.py,sha256=pIBa68WL_wsNB__5GJ17khTDJe8qGKXo5tT9tF6e-Kk,320
|
||||
subliminal/refiners/metadata.py,sha256=vjFhDWerJ5Bvxbf8s32YR5IRsqOclZfjEJ1PcyUf--4,4007
|
||||
subliminal/refiners/omdb.py,sha256=mGRDAuGyr4B80l6hDa_SC9vRT8ZdePfWAXT5SMJYnrI,5736
|
||||
subliminal/refiners/tvdb.py,sha256=C1m3JORdUy0UIzd6-7sPQqeSJfXRIPZupbmgOszZjgM,11390
|
||||
subliminal/subtitles/__init__.py,sha256=_Rw6d9wjF5saaB3KSyTetjpDToeEJNo8goALXzAXy9w,2407
|
||||
subliminal/subtitles/subrip.py,sha256=kjqJgAoQo306tTM-Q5rgP-9tg0fUdXNRmzyXYwvz3R8,1688
|
||||
subliminal-2.0.5.dist-info/DESCRIPTION.rst,sha256=-7k37t7F46A9SmN8vRI0ykLnWNMkBhfNohikeYX_4GM,8667
|
||||
subliminal-2.0.5.dist-info/METADATA,sha256=SGdtIOXHDcDfVY8hLbfUwkiBgqpnf8Gw8XkoEc5lLvc,10663
|
||||
subliminal-2.0.5.dist-info/RECORD,,
|
||||
subliminal-2.0.5.dist-info/WHEEL,sha256=bee59qcPjkyXfMaxNWjl2CGotqfumWx9pC1hlVLr2mM,92
|
||||
subliminal-2.0.5.dist-info/entry_points.txt,sha256=siGYKkN_wGhk6FKcqIBXKtVP8lKDP9hwBlpJgu75Dt0,1009
|
||||
subliminal-2.0.5.dist-info/metadata.json,sha256=uY5p0Bv2nHzIFZLea_QhCeW9qvjvnXs4SI0Ij4PTPJM,2982
|
||||
subliminal-2.0.5.dist-info/top_level.txt,sha256=fE0cw-mP4bz_11i0g35ee1xQs32_VDgojpW8w-_i0_M,11
|
||||
../../Scripts/subliminal.exe,sha256=XqlFoLqjfAHoOGujj2JC-XX36GFLhbOFdc27kmooQoI,89444
|
||||
subliminal-2.0.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
subliminal/converters/thesubdb.pyc,,
|
||||
subliminal/providers/napiprojekt.pyc,,
|
||||
subliminal/refiners/__init__.pyc,,
|
||||
subliminal/providers/opensubtitles.pyc,,
|
||||
subliminal/converters/legendastv.pyc,,
|
||||
subliminal/converters/__init__.pyc,,
|
||||
subliminal/exceptions.pyc,,
|
||||
subliminal/providers/thesubdb.pyc,,
|
||||
subliminal/utils.pyc,,
|
||||
subliminal/refiners/metadata.pyc,,
|
||||
subliminal/providers/subscenter.pyc,,
|
||||
subliminal/converters/shooter.pyc,,
|
||||
subliminal/providers/addic7ed.pyc,,
|
||||
subliminal/refiners/omdb.pyc,,
|
||||
subliminal/providers/podnapisi.pyc,,
|
||||
subliminal/cache.pyc,,
|
||||
subliminal/refiners/tvdb.pyc,,
|
||||
subliminal/providers/tvsubtitles.pyc,,
|
||||
subliminal/extensions.pyc,,
|
||||
subliminal/video.pyc,,
|
||||
subliminal/converters/addic7ed.pyc,,
|
||||
subliminal/__init__.pyc,,
|
||||
subliminal/converters/tvsubtitles.pyc,,
|
||||
subliminal/cli.pyc,,
|
||||
subliminal/providers/__init__.pyc,,
|
||||
subliminal/score.pyc,,
|
||||
subliminal/providers/legendastv.pyc,,
|
||||
subliminal/core.pyc,,
|
||||
subliminal/subtitle.pyc,,
|
||||
subliminal/subtitles/__init__.pyc,,
|
||||
subliminal/providers/shooter.pyc,,
|
|
@ -1,5 +0,0 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.29.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
[babelfish.language_converters]
|
||||
addic7ed = subliminal.converters.addic7ed:Addic7edConverter
|
||||
shooter = subliminal.converters.shooter:ShooterConverter
|
||||
thesubdb = subliminal.converters.thesubdb:TheSubDBConverter
|
||||
tvsubtitles = subliminal.converters.tvsubtitles:TVsubtitlesConverter
|
||||
|
||||
[console_scripts]
|
||||
subliminal = subliminal.cli:subliminal
|
||||
|
||||
[subliminal.providers]
|
||||
addic7ed = subliminal.providers.addic7ed:Addic7edProvider
|
||||
legendastv = subliminal.providers.legendastv:LegendasTVProvider
|
||||
opensubtitles = subliminal.providers.opensubtitles:OpenSubtitlesProvider
|
||||
podnapisi = subliminal.providers.podnapisi:PodnapisiProvider
|
||||
shooter = subliminal.providers.shooter:ShooterProvider
|
||||
subscenter = subliminal.providers.subscenter:SubsCenterProvider
|
||||
thesubdb = subliminal.providers.thesubdb:TheSubDBProvider
|
||||
tvsubtitles = subliminal.providers.tvsubtitles:TVsubtitlesProvider
|
||||
|
||||
[subliminal.refiners]
|
||||
metadata = subliminal.refiners.metadata:refine
|
||||
omdb = subliminal.refiners.omdb:refine
|
||||
tvdb = subliminal.refiners.tvdb:refine
|
||||
|
|
@ -1 +0,0 @@
|
|||
{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Multimedia :: Video"], "extensions": {"python.commands": {"wrap_console": {"subliminal": "subliminal.cli:subliminal"}}, "python.details": {"contacts": [{"email": "diaoulael@gmail.com", "name": "Antoine Bertin", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/Diaoul/subliminal"}}, "python.exports": {"babelfish.language_converters": {"addic7ed": "subliminal.converters.addic7ed:Addic7edConverter", "shooter": "subliminal.converters.shooter:ShooterConverter", "thesubdb": "subliminal.converters.thesubdb:TheSubDBConverter", "tvsubtitles": "subliminal.converters.tvsubtitles:TVsubtitlesConverter"}, "console_scripts": {"subliminal": "subliminal.cli:subliminal"}, "subliminal.providers": {"addic7ed": "subliminal.providers.addic7ed:Addic7edProvider", "legendastv": "subliminal.providers.legendastv:LegendasTVProvider", "opensubtitles": "subliminal.providers.opensubtitles:OpenSubtitlesProvider", "podnapisi": "subliminal.providers.podnapisi:PodnapisiProvider", "shooter": "subliminal.providers.shooter:ShooterProvider", "subscenter": "subliminal.providers.subscenter:SubsCenterProvider", "thesubdb": "subliminal.providers.thesubdb:TheSubDBProvider", "tvsubtitles": "subliminal.providers.tvsubtitles:TVsubtitlesProvider"}, "subliminal.refiners": {"metadata": "subliminal.refiners.metadata:refine", "omdb": "subliminal.refiners.omdb:refine", "tvdb": "subliminal.refiners.tvdb:refine"}}}, "extras": ["dev", "test"], "generator": "bdist_wheel (0.29.0)", "keywords": ["subtitle", "subtitles", "video", "movie", "episode", "tv", "show", "series"], "license": "MIT", "metadata_version": "2.0", "name": "subliminal", "run_requires": [{"requires": ["appdirs (>=1.3)", "babelfish (>=0.5.2)", "beautifulsoup4 (>=4.4.0)", "chardet (>=2.3.0)", "click (>=4.0)", "dogpile.cache (>=0.6.0)", "enzyme (>=0.4.1)", "futures (>=3.0)", "guessit (>=2.0.1)", "pysrt (>=1.0.1)", "pytz (>=2012c)", "rarfile (>=2.7)", "requests (>=2.0)", "six (>=1.9.0)", "stevedore (>=1.0.0)"]}, {"extra": "test", "requires": ["mock", "pytest-cov", "pytest-flakes", "pytest-pep8", "pytest", "sympy", "vcrpy (>=1.6.1)"]}, {"extra": "dev", "requires": ["sphinx-rtd-theme", "sphinx", "sphinxcontrib-programoutput", "tox", "wheel"]}], "summary": "Subtitles, faster than your thoughts", "test_requires": [{"requires": ["mock", "pytest", "pytest-cov", "pytest-flakes", "pytest-pep8", "sympy", "vcrpy (>=1.6.1)"]}], "version": "2.0.5"}
|
|
@ -1 +0,0 @@
|
|||
subliminal
|
|
@ -1,6 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
__title__ = 'subliminal'
|
||||
__version__ = '2.0.5'
|
||||
__version__ = '2.1.0.dev'
|
||||
__short_version__ = '.'.join(__version__.split('.')[:2])
|
||||
__author__ = 'Antoine Bertin'
|
||||
__license__ = 'MIT'
|
||||
|
|
|
@ -219,13 +219,12 @@ config_file = 'config.ini'
|
|||
@click.option('--legendastv', type=click.STRING, nargs=2, metavar='USERNAME PASSWORD', help='LegendasTV configuration.')
|
||||
@click.option('--opensubtitles', type=click.STRING, nargs=2, metavar='USERNAME PASSWORD',
|
||||
help='OpenSubtitles configuration.')
|
||||
@click.option('--subscenter', type=click.STRING, nargs=2, metavar='USERNAME PASSWORD', help='SubsCenter configuration.')
|
||||
@click.option('--cache-dir', type=click.Path(writable=True, file_okay=False), default=dirs.user_cache_dir,
|
||||
show_default=True, expose_value=True, help='Path to the cache directory.')
|
||||
@click.option('--debug', is_flag=True, help='Print useful information for debugging subliminal and for reporting bugs.')
|
||||
@click.version_option(__version__)
|
||||
@click.pass_context
|
||||
def subliminal(ctx, addic7ed, legendastv, opensubtitles, subscenter, cache_dir, debug):
|
||||
def subliminal(ctx, addic7ed, legendastv, opensubtitles, cache_dir, debug):
|
||||
"""Subtitles, faster than your thoughts."""
|
||||
# create cache directory
|
||||
try:
|
||||
|
@ -253,8 +252,6 @@ def subliminal(ctx, addic7ed, legendastv, opensubtitles, subscenter, cache_dir,
|
|||
ctx.obj['provider_configs']['legendastv'] = {'username': legendastv[0], 'password': legendastv[1]}
|
||||
if opensubtitles:
|
||||
ctx.obj['provider_configs']['opensubtitles'] = {'username': opensubtitles[0], 'password': opensubtitles[1]}
|
||||
if subscenter:
|
||||
ctx.obj['provider_configs']['subscenter'] = {'username': subscenter[0], 'password': subscenter[1]}
|
||||
|
||||
|
||||
@subliminal.command()
|
||||
|
|
|
@ -6,14 +6,18 @@ import io
|
|||
import itertools
|
||||
import logging
|
||||
import operator
|
||||
import os.path
|
||||
import os
|
||||
import socket
|
||||
|
||||
from babelfish import Language, LanguageReverseError
|
||||
from guessit import guessit
|
||||
from rarfile import NotRarFile, RarCannotExec, RarFile
|
||||
from six.moves.xmlrpc_client import ProtocolError
|
||||
from rarfile import BadRarFile, NotRarFile, RarCannotExec, RarFile
|
||||
from zipfile import BadZipfile
|
||||
from ssl import SSLError
|
||||
import requests
|
||||
|
||||
from .exceptions import ServiceUnavailable
|
||||
from .extensions import provider_manager, refiner_manager
|
||||
from .score import compute_score as default_compute_score
|
||||
from .subtitle import SUBTITLE_EXTENSIONS, get_subtitle_path
|
||||
|
@ -79,6 +83,18 @@ class ProviderPool(object):
|
|||
self.initialized_providers[name].terminate()
|
||||
except (requests.Timeout, socket.timeout):
|
||||
logger.error('Provider %r timed out, improperly terminated', name)
|
||||
except (ServiceUnavailable, ProtocolError): # OpenSubtitles raises xmlrpclib.ProtocolError when unavailable
|
||||
logger.error('Provider %r unavailable, improperly terminated', name)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code in range(500, 600):
|
||||
logger.error('Provider %r unavailable, improperly terminated', name)
|
||||
else:
|
||||
logger.exception('Provider %r http error %r, improperly terminated', name, e.response.status_code)
|
||||
except SSLError as e:
|
||||
if e.args[0] == 'The read operation timed out':
|
||||
logger.error('Provider %r unavailable, improperly terminated', name)
|
||||
else:
|
||||
logger.exception('Provider %r SSL error %r, improperly terminated', name, e.args[0])
|
||||
except:
|
||||
logger.exception('Provider %r terminated unexpectedly', name)
|
||||
|
||||
|
@ -118,6 +134,18 @@ class ProviderPool(object):
|
|||
return self[provider].list_subtitles(video, provider_languages)
|
||||
except (requests.Timeout, socket.timeout):
|
||||
logger.error('Provider %r timed out', provider)
|
||||
except (ServiceUnavailable, ProtocolError): # OpenSubtitles raises xmlrpclib.ProtocolError when unavailable
|
||||
logger.error('Provider %r unavailable', provider)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code in range(500, 600):
|
||||
logger.error('Provider %r unavailable', provider)
|
||||
else:
|
||||
logger.exception('Provider %r http error %r', provider, e.response.status_code)
|
||||
except SSLError as e:
|
||||
if e.args[0] == 'The read operation timed out':
|
||||
logger.error('Provider %r unavailable', provider)
|
||||
else:
|
||||
logger.exception('Provider %r SSL error %r', provider, e.args[0])
|
||||
except:
|
||||
logger.exception('Unexpected error in provider %r', provider)
|
||||
|
||||
|
@ -173,6 +201,28 @@ class ProviderPool(object):
|
|||
logger.error('Provider %r timed out, discarding it', subtitle.provider_name)
|
||||
self.discarded_providers.add(subtitle.provider_name)
|
||||
return False
|
||||
except (ServiceUnavailable, ProtocolError): # OpenSubtitles raises xmlrpclib.ProtocolError when unavailable
|
||||
logger.error('Provider %r unavailable, discarding it', subtitle.provider_name)
|
||||
self.discarded_providers.add(subtitle.provider_name)
|
||||
return False
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code in range(500, 600):
|
||||
logger.error('Provider %r unavailable, discarding it', subtitle.provider_name)
|
||||
else:
|
||||
logger.exception('Provider %r http error %r, discarding it', subtitle.provider_name,
|
||||
e.response.status_code)
|
||||
self.discarded_providers.add(subtitle.provider_name)
|
||||
return False
|
||||
except SSLError as e:
|
||||
if e.args[0] == 'The read operation timed out':
|
||||
logger.error('Provider %r unavailable, discarding it', subtitle.provider_name)
|
||||
else:
|
||||
logger.exception('Provider %r SSL error %r, discarding it', subtitle.provider_name, e.args[0])
|
||||
self.discarded_providers.add(subtitle.provider_name)
|
||||
return False
|
||||
except (BadRarFile, BadZipfile):
|
||||
logger.error('Bad archive for %r', subtitle)
|
||||
return False
|
||||
except:
|
||||
logger.exception('Unexpected error in provider %r, discarding it', subtitle.provider_name)
|
||||
self.discarded_providers.add(subtitle.provider_name)
|
||||
|
@ -338,7 +388,7 @@ def search_external_subtitles(path, directory=None):
|
|||
subtitles = {}
|
||||
for p in os.listdir(directory or dirpath):
|
||||
# keep only valid subtitle filenames
|
||||
if not p.startswith(fileroot) or not p.endswith(SUBTITLE_EXTENSIONS):
|
||||
if not p.startswith(fileroot) or not p.lower().endswith(SUBTITLE_EXTENSIONS):
|
||||
continue
|
||||
|
||||
# extract the potential language code
|
||||
|
@ -370,7 +420,7 @@ def scan_video(path):
|
|||
raise ValueError('Path does not exist')
|
||||
|
||||
# check video extension
|
||||
if not path.endswith(VIDEO_EXTENSIONS):
|
||||
if not path.lower().endswith(VIDEO_EXTENSIONS):
|
||||
raise ValueError('%r is not a valid video extension' % os.path.splitext(path)[1])
|
||||
|
||||
dirpath, filename = os.path.split(path)
|
||||
|
@ -418,7 +468,7 @@ def scan_archive(path):
|
|||
rar = RarFile(path)
|
||||
|
||||
# filter on video extensions
|
||||
rar_filenames = [f for f in rar.namelist() if f.endswith(VIDEO_EXTENSIONS)]
|
||||
rar_filenames = [f for f in rar.namelist() if f.lower().endswith(VIDEO_EXTENSIONS)]
|
||||
|
||||
# no video found
|
||||
if not rar_filenames:
|
||||
|
@ -471,17 +521,26 @@ def scan_videos(path, age=None, archives=True):
|
|||
if dirname.startswith('.'):
|
||||
logger.debug('Skipping hidden dirname %r in %r', dirname, dirpath)
|
||||
dirnames.remove(dirname)
|
||||
# Skip Sample folder
|
||||
if dirname.lower() == 'sample':
|
||||
logger.debug('Skipping sample dirname %r in %r', dirname, dirpath)
|
||||
dirnames.remove(dirname)
|
||||
|
||||
# scan for videos
|
||||
for filename in filenames:
|
||||
# filter on videos and archives
|
||||
if not (filename.endswith(VIDEO_EXTENSIONS) or archives and filename.endswith(ARCHIVE_EXTENSIONS)):
|
||||
if not (filename.lower().endswith(VIDEO_EXTENSIONS) or
|
||||
archives and filename.lower().endswith(ARCHIVE_EXTENSIONS)):
|
||||
continue
|
||||
|
||||
# skip hidden files
|
||||
if filename.startswith('.'):
|
||||
logger.debug('Skipping hidden filename %r in %r', filename, dirpath)
|
||||
continue
|
||||
# skip 'sample' media files
|
||||
if os.path.splitext(filename)[0].lower() == 'sample':
|
||||
logger.debug('Skipping sample filename %r in %r', filename, dirpath)
|
||||
continue
|
||||
|
||||
# reconstruct the file path
|
||||
filepath = os.path.join(dirpath, filename)
|
||||
|
@ -492,18 +551,24 @@ def scan_videos(path, age=None, archives=True):
|
|||
continue
|
||||
|
||||
# skip old files
|
||||
if age and datetime.utcnow() - datetime.utcfromtimestamp(os.path.getmtime(filepath)) > age:
|
||||
logger.debug('Skipping old file %r in %r', filename, dirpath)
|
||||
try:
|
||||
file_age = datetime.utcfromtimestamp(os.path.getmtime(filepath))
|
||||
except ValueError:
|
||||
logger.warning('Could not get age of file %r in %r', filename, dirpath)
|
||||
continue
|
||||
else:
|
||||
if age and datetime.utcnow() - file_age > age:
|
||||
logger.debug('Skipping old file %r in %r', filename, dirpath)
|
||||
continue
|
||||
|
||||
# scan
|
||||
if filename.endswith(VIDEO_EXTENSIONS): # video
|
||||
if filename.lower().endswith(VIDEO_EXTENSIONS): # video
|
||||
try:
|
||||
video = scan_video(filepath)
|
||||
except ValueError: # pragma: no cover
|
||||
logger.exception('Error scanning video')
|
||||
continue
|
||||
elif archives and filename.endswith(ARCHIVE_EXTENSIONS): # archive
|
||||
elif archives and filename.lower().endswith(ARCHIVE_EXTENSIONS): # archive
|
||||
try:
|
||||
video = scan_archive(filepath)
|
||||
except (NotRarFile, RarCannotExec, ValueError): # pragma: no cover
|
||||
|
@ -541,7 +606,8 @@ def refine(video, episode_refiners=None, movie_refiners=None, **kwargs):
|
|||
try:
|
||||
refiner_manager[refiner].plugin(video, **kwargs)
|
||||
except:
|
||||
logger.exception('Failed to refine video')
|
||||
logger.error('Failed to refine video %r', video.name)
|
||||
logger.debug('Refiner exception:', exc_info=True)
|
||||
|
||||
|
||||
def list_subtitles(videos, languages, pool_class=ProviderPool, **kwargs):
|
||||
|
|
|
@ -19,8 +19,8 @@ class AuthenticationError(ProviderError):
|
|||
pass
|
||||
|
||||
|
||||
class TooManyRequests(ProviderError):
|
||||
"""Exception raised by providers when too many requests are made."""
|
||||
class ServiceUnavailable(ProviderError):
|
||||
"""Exception raised when status is '503 Service Unavailable'."""
|
||||
pass
|
||||
|
||||
|
||||
|
|
|
@ -29,9 +29,9 @@ class RegistrableExtensionManager(ExtensionManager):
|
|||
|
||||
super(RegistrableExtensionManager, self).__init__(namespace, **kwargs)
|
||||
|
||||
def _find_entry_points(self, namespace):
|
||||
def list_entry_points(self):
|
||||
# copy of default extensions
|
||||
eps = list(super(RegistrableExtensionManager, self)._find_entry_points(namespace))
|
||||
eps = list(super(RegistrableExtensionManager, self).list_entry_points())
|
||||
|
||||
# internal extensions
|
||||
for iep in self.internal_extensions:
|
||||
|
@ -93,7 +93,6 @@ provider_manager = RegistrableExtensionManager('subliminal.providers', [
|
|||
'opensubtitles = subliminal.providers.opensubtitles:OpenSubtitlesProvider',
|
||||
'podnapisi = subliminal.providers.podnapisi:PodnapisiProvider',
|
||||
'shooter = subliminal.providers.shooter:ShooterProvider',
|
||||
'subscenter = subliminal.providers.subscenter:SubsCenterProvider',
|
||||
'thesubdb = subliminal.providers.thesubdb:TheSubDBProvider',
|
||||
'tvsubtitles = subliminal.providers.tvsubtitles:TVsubtitlesProvider'
|
||||
])
|
||||
|
|
|
@ -68,6 +68,9 @@ class Provider(object):
|
|||
#: Required hash, if any
|
||||
required_hash = None
|
||||
|
||||
#: Subtitle class to use
|
||||
subtitle_class = None
|
||||
|
||||
def __enter__(self):
|
||||
self.initialize()
|
||||
return self
|
||||
|
|
|
@ -9,7 +9,7 @@ from requests import Session
|
|||
from . import ParserBeautifulSoup, Provider
|
||||
from .. import __short_version__
|
||||
from ..cache import SHOW_EXPIRATION_TIME, region
|
||||
from ..exceptions import AuthenticationError, ConfigurationError, DownloadLimitExceeded, TooManyRequests
|
||||
from ..exceptions import AuthenticationError, ConfigurationError, DownloadLimitExceeded
|
||||
from ..score import get_equivalent_release_groups
|
||||
from ..subtitle import Subtitle, fix_line_ending, guess_matches
|
||||
from ..utils import sanitize, sanitize_release_group
|
||||
|
@ -19,8 +19,11 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
language_converters.register('addic7ed = subliminal.converters.addic7ed:Addic7edConverter')
|
||||
|
||||
# Series cell matching regex
|
||||
show_cells_re = re.compile(b'<td class="version">.*?</td>', re.DOTALL)
|
||||
|
||||
#: Series header parsing regex
|
||||
series_year_re = re.compile(r'^(?P<series>[ \w\'.:(),&!?-]+?)(?: \((?P<year>\d{4})\))?$')
|
||||
series_year_re = re.compile(r'^(?P<series>[ \w\'.:(),*&!?-]+?)(?: \((?P<year>\d{4})\))?$')
|
||||
|
||||
|
||||
class Addic7edSubtitle(Subtitle):
|
||||
|
@ -29,7 +32,7 @@ class Addic7edSubtitle(Subtitle):
|
|||
|
||||
def __init__(self, language, hearing_impaired, page_link, series, season, episode, title, year, version,
|
||||
download_link):
|
||||
super(Addic7edSubtitle, self).__init__(language, hearing_impaired, page_link)
|
||||
super(Addic7edSubtitle, self).__init__(language, hearing_impaired=hearing_impaired, page_link=page_link)
|
||||
self.series = series
|
||||
self.season = season
|
||||
self.episode = episode
|
||||
|
@ -45,8 +48,9 @@ class Addic7edSubtitle(Subtitle):
|
|||
def get_matches(self, video):
|
||||
matches = set()
|
||||
|
||||
# series
|
||||
if video.series and sanitize(self.series) == sanitize(video.series):
|
||||
# series name
|
||||
if video.series and sanitize(self.series) in (
|
||||
sanitize(name) for name in [video.series] + video.alternative_series):
|
||||
matches.add('series')
|
||||
# season
|
||||
if video.season and self.season == video.season:
|
||||
|
@ -54,7 +58,7 @@ class Addic7edSubtitle(Subtitle):
|
|||
# episode
|
||||
if video.episode and self.episode == video.episode:
|
||||
matches.add('episode')
|
||||
# title
|
||||
# title of the episode
|
||||
if video.title and sanitize(self.title) == sanitize(video.title):
|
||||
matches.add('title')
|
||||
# year
|
||||
|
@ -86,21 +90,23 @@ class Addic7edProvider(Provider):
|
|||
]}
|
||||
video_types = (Episode,)
|
||||
server_url = 'http://www.addic7ed.com/'
|
||||
subtitle_class = Addic7edSubtitle
|
||||
|
||||
def __init__(self, username=None, password=None):
|
||||
if username is not None and password is None or username is None and password is not None:
|
||||
if any((username, password)) and not all((username, password)):
|
||||
raise ConfigurationError('Username and password must be specified')
|
||||
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.logged_in = False
|
||||
self.session = None
|
||||
|
||||
def initialize(self):
|
||||
self.session = Session()
|
||||
self.session.headers['User-Agent'] = 'Subliminal/%s' % __short_version__
|
||||
|
||||
# login
|
||||
if self.username is not None and self.password is not None:
|
||||
if self.username and self.password:
|
||||
logger.info('Logging in')
|
||||
data = {'username': self.username, 'password': self.password, 'Submit': 'Log in'}
|
||||
r = self.session.post(self.server_url + 'dologin.php', data, allow_redirects=False, timeout=10)
|
||||
|
@ -134,7 +140,16 @@ class Addic7edProvider(Provider):
|
|||
logger.info('Getting show ids')
|
||||
r = self.session.get(self.server_url + 'shows.php', timeout=10)
|
||||
r.raise_for_status()
|
||||
soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser'])
|
||||
|
||||
# LXML parser seems to fail when parsing Addic7ed.com HTML markup.
|
||||
# Last known version to work properly is 3.6.4 (next version, 3.7.0, fails)
|
||||
# Assuming the site's markup is bad, and stripping it down to only contain what's needed.
|
||||
show_cells = re.findall(show_cells_re, r.content)
|
||||
if show_cells:
|
||||
soup = ParserBeautifulSoup(b''.join(show_cells), ['lxml', 'html.parser'])
|
||||
else:
|
||||
# If RegEx fails, fall back to original r.content and use 'html.parser'
|
||||
soup = ParserBeautifulSoup(r.content, ['html.parser'])
|
||||
|
||||
# populate the show ids
|
||||
show_ids = {}
|
||||
|
@ -164,10 +179,8 @@ class Addic7edProvider(Provider):
|
|||
|
||||
# make the search
|
||||
logger.info('Searching show ids with %r', params)
|
||||
r = self.session.get(self.server_url + 'search.php', params=params, timeout=10)
|
||||
r = self.session.get(self.server_url + 'srch.php', params=params, timeout=10)
|
||||
r.raise_for_status()
|
||||
if r.status_code == 304:
|
||||
raise TooManyRequests()
|
||||
soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser'])
|
||||
|
||||
# get the suggestion
|
||||
|
@ -218,24 +231,23 @@ class Addic7edProvider(Provider):
|
|||
|
||||
# search as last resort
|
||||
if not show_id:
|
||||
logger.warning('Series not found in show ids')
|
||||
logger.warning('Series %s not found in show ids', series)
|
||||
show_id = self._search_show_id(series)
|
||||
|
||||
return show_id
|
||||
|
||||
def query(self, series, season, year=None, country=None):
|
||||
# get the show id
|
||||
show_id = self.get_show_id(series, year, country)
|
||||
if show_id is None:
|
||||
logger.error('No show id found for %r (%r)', series, {'year': year, 'country': country})
|
||||
return []
|
||||
|
||||
def query(self, show_id, series, season, year=None, country=None):
|
||||
# get the page of the season of the show
|
||||
logger.info('Getting the page of show id %d, season %d', show_id, season)
|
||||
r = self.session.get(self.server_url + 'show/%d' % show_id, params={'season': season}, timeout=10)
|
||||
r.raise_for_status()
|
||||
if r.status_code == 304:
|
||||
raise TooManyRequests()
|
||||
|
||||
if not r.content:
|
||||
# Provider returns a status of 304 Not Modified with an empty content
|
||||
# raise_for_status won't raise exception for that status code
|
||||
logger.debug('No data returned from provider')
|
||||
return []
|
||||
|
||||
soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser'])
|
||||
|
||||
# loop over subtitle rows
|
||||
|
@ -262,16 +274,32 @@ class Addic7edProvider(Provider):
|
|||
version = cells[4].text
|
||||
download_link = cells[9].a['href'][1:]
|
||||
|
||||
subtitle = Addic7edSubtitle(language, hearing_impaired, page_link, series, season, episode, title, year,
|
||||
version, download_link)
|
||||
subtitle = self.subtitle_class(language, hearing_impaired, page_link, series, season, episode, title, year,
|
||||
version, download_link)
|
||||
logger.debug('Found subtitle %r', subtitle)
|
||||
subtitles.append(subtitle)
|
||||
|
||||
return subtitles
|
||||
|
||||
def list_subtitles(self, video, languages):
|
||||
return [s for s in self.query(video.series, video.season, video.year)
|
||||
if s.language in languages and s.episode == video.episode]
|
||||
# lookup show_id
|
||||
titles = [video.series] + video.alternative_series
|
||||
show_id = None
|
||||
for title in titles:
|
||||
show_id = self.get_show_id(title, video.year)
|
||||
if show_id is not None:
|
||||
break
|
||||
|
||||
# query for subtitles with the show_id
|
||||
if show_id is not None:
|
||||
subtitles = [s for s in self.query(show_id, title, video.season, video.year)
|
||||
if s.language in languages and s.episode == video.episode]
|
||||
if subtitles:
|
||||
return subtitles
|
||||
else:
|
||||
logger.error('No show id found for %r (%r)', video.series, {'year': video.year})
|
||||
|
||||
return []
|
||||
|
||||
def download_subtitle(self, subtitle):
|
||||
# download the subtitle
|
||||
|
@ -280,6 +308,12 @@ class Addic7edProvider(Provider):
|
|||
timeout=10)
|
||||
r.raise_for_status()
|
||||
|
||||
if not r.content:
|
||||
# Provider returns a status of 304 Not Modified with an empty content
|
||||
# raise_for_status won't raise exception for that status code
|
||||
logger.debug('Unable to download subtitle. No data returned from provider')
|
||||
return
|
||||
|
||||
# detect download limit exceeded
|
||||
if r.headers['Content-Type'] == 'text/html':
|
||||
raise DownloadLimitExceeded
|
||||
|
|
|
@ -18,7 +18,7 @@ from zipfile import ZipFile, is_zipfile
|
|||
from . import ParserBeautifulSoup, Provider
|
||||
from .. import __short_version__
|
||||
from ..cache import SHOW_EXPIRATION_TIME, region
|
||||
from ..exceptions import AuthenticationError, ConfigurationError, ProviderError
|
||||
from ..exceptions import AuthenticationError, ConfigurationError, ProviderError, ServiceUnavailable
|
||||
from ..subtitle import SUBTITLE_EXTENSIONS, Subtitle, fix_line_ending, guess_matches, sanitize
|
||||
from ..video import Episode, Movie
|
||||
|
||||
|
@ -44,8 +44,11 @@ rating_re = re.compile(r'nota (?P<rating>\d+)')
|
|||
#: Timestamp parsing regex
|
||||
timestamp_re = re.compile(r'(?P<day>\d+)/(?P<month>\d+)/(?P<year>\d+) - (?P<hour>\d+):(?P<minute>\d+)')
|
||||
|
||||
#: Title with year/country regex
|
||||
title_re = re.compile(r'^(?P<series>.*?)(?: \((?:(?P<year>\d{4})|(?P<country>[A-Z]{2}))\))?$')
|
||||
|
||||
#: Cache key for releases
|
||||
releases_key = __name__ + ':releases|{archive_id}'
|
||||
releases_key = __name__ + ':releases|{archive_id}|{archive_name}'
|
||||
|
||||
|
||||
class LegendasTVArchive(object):
|
||||
|
@ -60,8 +63,8 @@ class LegendasTVArchive(object):
|
|||
:param int rating: rating (0-10).
|
||||
:param timestamp: timestamp.
|
||||
:type timestamp: datetime.datetime
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, id, name, pack, featured, link, downloads=0, rating=0, timestamp=None):
|
||||
#: Identifier
|
||||
self.id = id
|
||||
|
@ -96,10 +99,11 @@ class LegendasTVArchive(object):
|
|||
|
||||
class LegendasTVSubtitle(Subtitle):
|
||||
"""LegendasTV Subtitle."""
|
||||
|
||||
provider_name = 'legendastv'
|
||||
|
||||
def __init__(self, language, type, title, year, imdb_id, season, archive, name):
|
||||
super(LegendasTVSubtitle, self).__init__(language, archive.link)
|
||||
super(LegendasTVSubtitle, self).__init__(language, page_link=archive.link)
|
||||
self.type = type
|
||||
self.title = title
|
||||
self.year = year
|
||||
|
@ -118,11 +122,12 @@ class LegendasTVSubtitle(Subtitle):
|
|||
# episode
|
||||
if isinstance(video, Episode) and self.type == 'episode':
|
||||
# series
|
||||
if video.series and sanitize(self.title) == sanitize(video.series):
|
||||
if video.series and (sanitize(self.title) in (
|
||||
sanitize(name) for name in [video.series] + video.alternative_series)):
|
||||
matches.add('series')
|
||||
|
||||
# year (year is based on season air date hence the adjustment)
|
||||
if video.original_series and self.year is None or video.year and video.year == self.year - self.season + 1:
|
||||
# year
|
||||
if video.original_series and self.year is None or video.year and video.year == self.year:
|
||||
matches.add('year')
|
||||
|
||||
# imdb_id
|
||||
|
@ -132,7 +137,8 @@ class LegendasTVSubtitle(Subtitle):
|
|||
# movie
|
||||
elif isinstance(video, Movie) and self.type == 'movie':
|
||||
# title
|
||||
if video.title and sanitize(self.title) == sanitize(video.title):
|
||||
if video.title and (sanitize(self.title) in (
|
||||
sanitize(name) for name in [video.title] + video.alternative_titles)):
|
||||
matches.add('title')
|
||||
|
||||
# year
|
||||
|
@ -143,9 +149,6 @@ class LegendasTVSubtitle(Subtitle):
|
|||
if video.imdb_id and self.imdb_id == video.imdb_id:
|
||||
matches.add('imdb_id')
|
||||
|
||||
# archive name
|
||||
matches |= guess_matches(video, guessit(self.archive.name, {'type': self.type}))
|
||||
|
||||
# name
|
||||
matches |= guess_matches(video, guessit(self.name, {'type': self.type}))
|
||||
|
||||
|
@ -157,29 +160,38 @@ class LegendasTVProvider(Provider):
|
|||
|
||||
:param str username: username.
|
||||
:param str password: password.
|
||||
|
||||
"""
|
||||
|
||||
languages = {Language.fromlegendastv(l) for l in language_converters['legendastv'].codes}
|
||||
server_url = 'http://legendas.tv/'
|
||||
subtitle_class = LegendasTVSubtitle
|
||||
|
||||
def __init__(self, username=None, password=None):
|
||||
if username and not password or not username and password:
|
||||
|
||||
# Provider needs UNRAR installed. If not available raise ConfigurationError
|
||||
try:
|
||||
rarfile.custom_check([rarfile.UNRAR_TOOL], True)
|
||||
except rarfile.RarExecError:
|
||||
raise ConfigurationError('UNRAR tool not available')
|
||||
|
||||
if any((username, password)) and not all((username, password)):
|
||||
raise ConfigurationError('Username and password must be specified')
|
||||
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.logged_in = False
|
||||
self.session = None
|
||||
|
||||
def initialize(self):
|
||||
self.session = Session()
|
||||
self.session.headers['User-Agent'] = 'Subliminal/%s' % __short_version__
|
||||
|
||||
# login
|
||||
if self.username is not None and self.password is not None:
|
||||
if self.username and self.password:
|
||||
logger.info('Logging in')
|
||||
data = {'_method': 'POST', 'data[User][username]': self.username, 'data[User][password]': self.password}
|
||||
r = self.session.post(self.server_url + 'login', data, allow_redirects=False, timeout=10)
|
||||
r.raise_for_status()
|
||||
raise_for_status(r)
|
||||
|
||||
soup = ParserBeautifulSoup(r.content, ['html.parser'])
|
||||
if soup.find('div', {'class': 'alert-error'}, string=re.compile(u'Usuário ou senha inválidos')):
|
||||
|
@ -193,94 +205,174 @@ class LegendasTVProvider(Provider):
|
|||
if self.logged_in:
|
||||
logger.info('Logging out')
|
||||
r = self.session.get(self.server_url + 'users/logout', allow_redirects=False, timeout=10)
|
||||
r.raise_for_status()
|
||||
raise_for_status(r)
|
||||
logger.debug('Logged out')
|
||||
self.logged_in = False
|
||||
|
||||
self.session.close()
|
||||
|
||||
@region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME)
|
||||
def search_titles(self, title):
|
||||
@staticmethod
|
||||
def is_valid_title(title, title_id, sanitized_title, season, year):
|
||||
"""Check if is a valid title."""
|
||||
sanitized_result = sanitize(title['title'])
|
||||
if sanitized_result != sanitized_title:
|
||||
logger.debug("Mismatched title, discarding title %d (%s)",
|
||||
title_id, sanitized_result)
|
||||
return
|
||||
|
||||
# episode type
|
||||
if season:
|
||||
# discard mismatches on type
|
||||
if title['type'] != 'episode':
|
||||
logger.debug("Mismatched 'episode' type, discarding title %d (%s)", title_id, sanitized_result)
|
||||
return
|
||||
|
||||
# discard mismatches on season
|
||||
if 'season' not in title or title['season'] != season:
|
||||
logger.debug('Mismatched season %s, discarding title %d (%s)',
|
||||
title.get('season'), title_id, sanitized_result)
|
||||
return
|
||||
# movie type
|
||||
else:
|
||||
# discard mismatches on type
|
||||
if title['type'] != 'movie':
|
||||
logger.debug("Mismatched 'movie' type, discarding title %d (%s)", title_id, sanitized_result)
|
||||
return
|
||||
|
||||
# discard mismatches on year
|
||||
if year is not None and 'year' in title and title['year'] != year:
|
||||
logger.debug("Mismatched movie year, discarding title %d (%s)", title_id, sanitized_result)
|
||||
return
|
||||
return True
|
||||
|
||||
@region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME, should_cache_fn=lambda value: value)
|
||||
def search_titles(self, title, season, title_year):
|
||||
"""Search for titles matching the `title`.
|
||||
|
||||
For episodes, each season has it own title
|
||||
:param str title: the title to search for.
|
||||
:param int season: season of the title
|
||||
:param int title_year: year of the title
|
||||
:return: found titles.
|
||||
:rtype: dict
|
||||
|
||||
"""
|
||||
# make the query
|
||||
logger.info('Searching title %r', title)
|
||||
r = self.session.get(self.server_url + 'legenda/sugestao/{}'.format(title), timeout=10)
|
||||
r.raise_for_status()
|
||||
results = json.loads(r.text)
|
||||
|
||||
# loop over results
|
||||
titles = {}
|
||||
for result in results:
|
||||
source = result['_source']
|
||||
sanitized_titles = [sanitize(title)]
|
||||
ignore_characters = {'\'', '.'}
|
||||
if any(c in title for c in ignore_characters):
|
||||
sanitized_titles.append(sanitize(title, ignore_characters=ignore_characters))
|
||||
|
||||
# extract id
|
||||
title_id = int(source['id_filme'])
|
||||
for sanitized_title in sanitized_titles:
|
||||
# make the query
|
||||
if season:
|
||||
logger.info('Searching episode title %r for season %r', sanitized_title, season)
|
||||
else:
|
||||
logger.info('Searching movie title %r', sanitized_title)
|
||||
|
||||
# extract type and title
|
||||
title = {'type': type_map[source['tipo']], 'title': source['dsc_nome']}
|
||||
r = self.session.get(self.server_url + 'legenda/sugestao/{}'.format(sanitized_title), timeout=10)
|
||||
raise_for_status(r)
|
||||
results = json.loads(r.text)
|
||||
|
||||
# extract year
|
||||
if source['dsc_data_lancamento'] and source['dsc_data_lancamento'].isdigit():
|
||||
title['year'] = int(source['dsc_data_lancamento'])
|
||||
# loop over results
|
||||
for result in results:
|
||||
source = result['_source']
|
||||
|
||||
# extract imdb_id
|
||||
if source['id_imdb'] != '0':
|
||||
if not source['id_imdb'].startswith('tt'):
|
||||
title['imdb_id'] = 'tt' + source['id_imdb'].zfill(7)
|
||||
else:
|
||||
title['imdb_id'] = source['id_imdb']
|
||||
# extract id
|
||||
title_id = int(source['id_filme'])
|
||||
|
||||
# extract season
|
||||
if title['type'] == 'episode':
|
||||
if source['temporada'] and source['temporada'].isdigit():
|
||||
title['season'] = int(source['temporada'])
|
||||
else:
|
||||
match = season_re.search(source['dsc_nome_br'])
|
||||
if match:
|
||||
title['season'] = int(match.group('season'))
|
||||
# extract type
|
||||
title = {'type': type_map[source['tipo']]}
|
||||
|
||||
# extract title, year and country
|
||||
name, year, country = title_re.match(source['dsc_nome']).groups()
|
||||
title['title'] = name
|
||||
|
||||
# extract imdb_id
|
||||
if source['id_imdb'] != '0':
|
||||
if not source['id_imdb'].startswith('tt'):
|
||||
title['imdb_id'] = 'tt' + source['id_imdb'].zfill(7)
|
||||
else:
|
||||
logger.warning('No season detected for title %d', title_id)
|
||||
title['imdb_id'] = source['id_imdb']
|
||||
|
||||
# add title
|
||||
titles[title_id] = title
|
||||
# extract season
|
||||
if title['type'] == 'episode':
|
||||
if source['temporada'] and source['temporada'].isdigit():
|
||||
title['season'] = int(source['temporada'])
|
||||
else:
|
||||
match = season_re.search(source['dsc_nome_br'])
|
||||
if match:
|
||||
title['season'] = int(match.group('season'))
|
||||
else:
|
||||
logger.debug('No season detected for title %d (%s)', title_id, name)
|
||||
|
||||
logger.debug('Found %d titles', len(titles))
|
||||
# extract year
|
||||
if year:
|
||||
title['year'] = int(year)
|
||||
elif source['dsc_data_lancamento'] and source['dsc_data_lancamento'].isdigit():
|
||||
# year is based on season air date hence the adjustment
|
||||
title['year'] = int(source['dsc_data_lancamento']) - title.get('season', 1) + 1
|
||||
|
||||
# add title only if is valid
|
||||
# Check against title without ignored chars
|
||||
if self.is_valid_title(title, title_id, sanitized_titles[0], season, title_year):
|
||||
titles[title_id] = title
|
||||
|
||||
logger.debug('Found %d titles', len(titles))
|
||||
|
||||
return titles
|
||||
|
||||
@region.cache_on_arguments(expiration_time=timedelta(minutes=15).total_seconds())
|
||||
def get_archives(self, title_id, language_code):
|
||||
"""Get the archive list from a given `title_id` and `language_code`.
|
||||
def get_archives(self, title_id, language_code, title_type, season, episode):
|
||||
"""Get the archive list from a given `title_id`, `language_code`, `title_type`, `season` and `episode`.
|
||||
|
||||
:param int title_id: title id.
|
||||
:param int language_code: language code.
|
||||
:param str title_type: episode or movie
|
||||
:param int season: season
|
||||
:param int episode: episode
|
||||
:return: the archives.
|
||||
:rtype: list of :class:`LegendasTVArchive`
|
||||
|
||||
"""
|
||||
logger.info('Getting archives for title %d and language %d', title_id, language_code)
|
||||
archives = []
|
||||
page = 1
|
||||
page = 0
|
||||
while True:
|
||||
# get the archive page
|
||||
url = self.server_url + 'util/carrega_legendas_busca_filme/{title}/{language}/-/{page}'.format(
|
||||
title=title_id, language=language_code, page=page)
|
||||
url = self.server_url + 'legenda/busca/-/{language}/-/{page}/{title}'.format(
|
||||
language=language_code, page=page, title=title_id)
|
||||
r = self.session.get(url)
|
||||
r.raise_for_status()
|
||||
raise_for_status(r)
|
||||
|
||||
# parse the results
|
||||
soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser'])
|
||||
for archive_soup in soup.select('div.list_element > article > div'):
|
||||
for archive_soup in soup.select('div.list_element > article > div > div.f_left'):
|
||||
# create archive
|
||||
archive = LegendasTVArchive(archive_soup.a['href'].split('/')[2], archive_soup.a.text,
|
||||
'pack' in archive_soup['class'], 'destaque' in archive_soup['class'],
|
||||
archive = LegendasTVArchive(archive_soup.a['href'].split('/')[2],
|
||||
archive_soup.a.text,
|
||||
'pack' in archive_soup.parent['class'],
|
||||
'destaque' in archive_soup.parent['class'],
|
||||
self.server_url + archive_soup.a['href'][1:])
|
||||
# clean name of path separators and pack flags
|
||||
clean_name = archive.name.replace('/', '-')
|
||||
if archive.pack and clean_name.startswith('(p)'):
|
||||
clean_name = clean_name[3:]
|
||||
|
||||
# guess from name
|
||||
guess = guessit(clean_name, {'type': title_type})
|
||||
|
||||
# episode
|
||||
if season and episode:
|
||||
# discard mismatches on episode in non-pack archives
|
||||
|
||||
# Guessit may return int for single episode or list for multi-episode
|
||||
# Check if archive name has multiple episodes releases on it
|
||||
if not archive.pack and 'episode' in guess:
|
||||
wanted_episode = set(episode) if isinstance(episode, list) else {episode}
|
||||
archive_episode = guess['episode'] if isinstance(guess['episode'], list) else {guess['episode']}
|
||||
|
||||
if not wanted_episode.intersection(archive_episode):
|
||||
logger.debug('Mismatched episode %s, discarding archive: %s', guess['episode'], clean_name)
|
||||
continue
|
||||
|
||||
# extract text containing downloads, rating and timestamp
|
||||
data_text = archive_soup.find('p', class_='data').text
|
||||
|
@ -300,6 +392,8 @@ class LegendasTVProvider(Provider):
|
|||
raise ProviderError('Archive timestamp is in the future')
|
||||
|
||||
# add archive
|
||||
logger.info('Found archive for title %d and language %d at page %s: %s',
|
||||
title_id, language_code, page, archive)
|
||||
archives.append(archive)
|
||||
|
||||
# stop on last page
|
||||
|
@ -322,7 +416,7 @@ class LegendasTVProvider(Provider):
|
|||
"""
|
||||
logger.info('Downloading archive %s', archive.id)
|
||||
r = self.session.get(self.server_url + 'downloadarquivo/{}'.format(archive.id))
|
||||
r.raise_for_status()
|
||||
raise_for_status(r)
|
||||
|
||||
# open the archive
|
||||
archive_stream = io.BytesIO(r.content)
|
||||
|
@ -337,60 +431,26 @@ class LegendasTVProvider(Provider):
|
|||
|
||||
def query(self, language, title, season=None, episode=None, year=None):
|
||||
# search for titles
|
||||
titles = self.search_titles(sanitize(title))
|
||||
|
||||
# search for titles with the quote or dot character
|
||||
ignore_characters = {'\'', '.'}
|
||||
if any(c in title for c in ignore_characters):
|
||||
titles.update(self.search_titles(sanitize(title, ignore_characters=ignore_characters)))
|
||||
titles = self.search_titles(title, season, year)
|
||||
|
||||
subtitles = []
|
||||
# iterate over titles
|
||||
for title_id, t in titles.items():
|
||||
# discard mismatches on title
|
||||
if sanitize(t['title']) != sanitize(title):
|
||||
continue
|
||||
|
||||
# episode
|
||||
if season and episode:
|
||||
# discard mismatches on type
|
||||
if t['type'] != 'episode':
|
||||
continue
|
||||
|
||||
# discard mismatches on season
|
||||
if 'season' not in t or t['season'] != season:
|
||||
continue
|
||||
# movie
|
||||
else:
|
||||
# discard mismatches on type
|
||||
if t['type'] != 'movie':
|
||||
continue
|
||||
|
||||
# discard mismatches on year
|
||||
if year is not None and 'year' in t and t['year'] != year:
|
||||
continue
|
||||
logger.info('Getting archives for title %d and language %d', title_id, language.legendastv)
|
||||
archives = self.get_archives(title_id, language.legendastv, t['type'], season, episode)
|
||||
if not archives:
|
||||
logger.info('No archives found for title %d and language %d', title_id, language.legendastv)
|
||||
|
||||
# iterate over title's archives
|
||||
for a in self.get_archives(title_id, language.legendastv):
|
||||
# clean name of path separators and pack flags
|
||||
clean_name = a.name.replace('/', '-')
|
||||
if a.pack and clean_name.startswith('(p)'):
|
||||
clean_name = clean_name[3:]
|
||||
|
||||
# guess from name
|
||||
guess = guessit(clean_name, {'type': t['type']})
|
||||
|
||||
# episode
|
||||
if season and episode:
|
||||
# discard mismatches on episode in non-pack archives
|
||||
if not a.pack and 'episode' in guess and guess['episode'] != episode:
|
||||
continue
|
||||
for a in archives:
|
||||
|
||||
# compute an expiration time based on the archive timestamp
|
||||
expiration_time = (datetime.utcnow().replace(tzinfo=pytz.utc) - a.timestamp).total_seconds()
|
||||
|
||||
# attempt to get the releases from the cache
|
||||
releases = region.get(releases_key.format(archive_id=a.id), expiration_time=expiration_time)
|
||||
cache_key = releases_key.format(archive_id=a.id, archive_name=a.name)
|
||||
releases = region.get(cache_key, expiration_time=expiration_time)
|
||||
|
||||
# the releases are not in cache or cache is expired
|
||||
if releases == NO_VALUE:
|
||||
|
@ -417,12 +477,12 @@ class LegendasTVProvider(Provider):
|
|||
releases.append(name)
|
||||
|
||||
# cache the releases
|
||||
region.set(releases_key.format(archive_id=a.id), releases)
|
||||
region.set(cache_key, releases)
|
||||
|
||||
# iterate over releases
|
||||
for r in releases:
|
||||
subtitle = LegendasTVSubtitle(language, t['type'], t['title'], t.get('year'), t.get('imdb_id'),
|
||||
t.get('season'), a, r)
|
||||
subtitle = self.subtitle_class(language, t['type'], t['title'], t.get('year'), t.get('imdb_id'),
|
||||
t.get('season'), a, r)
|
||||
logger.debug('Found subtitle %r', subtitle)
|
||||
subtitles.append(subtitle)
|
||||
|
||||
|
@ -431,13 +491,19 @@ class LegendasTVProvider(Provider):
|
|||
def list_subtitles(self, video, languages):
|
||||
season = episode = None
|
||||
if isinstance(video, Episode):
|
||||
title = video.series
|
||||
titles = [video.series] + video.alternative_series
|
||||
season = video.season
|
||||
episode = video.episode
|
||||
else:
|
||||
title = video.title
|
||||
titles = [video.title] + video.alternative_titles
|
||||
|
||||
return [s for l in languages for s in self.query(l, title, season=season, episode=episode, year=video.year)]
|
||||
for title in titles:
|
||||
subtitles = [s for l in languages for s in
|
||||
self.query(l, title, season=season, episode=episode, year=video.year)]
|
||||
if subtitles:
|
||||
return subtitles
|
||||
|
||||
return []
|
||||
|
||||
def download_subtitle(self, subtitle):
|
||||
# download archive in case we previously hit the releases cache and didn't download it
|
||||
|
@ -446,3 +512,11 @@ class LegendasTVProvider(Provider):
|
|||
|
||||
# extract subtitle's content
|
||||
subtitle.content = fix_line_ending(subtitle.archive.content.read(subtitle.name))
|
||||
|
||||
|
||||
def raise_for_status(r):
|
||||
# When site is under maintaince and http status code 200.
|
||||
if 'Em breve estaremos de volta' in r.text:
|
||||
raise ServiceUnavailable
|
||||
else:
|
||||
r.raise_for_status()
|
||||
|
|
|
@ -42,6 +42,7 @@ class NapiProjektSubtitle(Subtitle):
|
|||
def __init__(self, language, hash):
|
||||
super(NapiProjektSubtitle, self).__init__(language)
|
||||
self.hash = hash
|
||||
self.content = None
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
|
@ -62,6 +63,10 @@ class NapiProjektProvider(Provider):
|
|||
languages = {Language.fromalpha2(l) for l in ['pl']}
|
||||
required_hash = 'napiprojekt'
|
||||
server_url = 'http://napiprojekt.pl/unit_napisy/dl.php'
|
||||
subtitle_class = NapiProjektSubtitle
|
||||
|
||||
def __init__(self):
|
||||
self.session = None
|
||||
|
||||
def initialize(self):
|
||||
self.session = Session()
|
||||
|
@ -81,16 +86,16 @@ class NapiProjektProvider(Provider):
|
|||
'f': hash,
|
||||
't': get_subhash(hash)}
|
||||
logger.info('Searching subtitle %r', params)
|
||||
response = self.session.get(self.server_url, params=params, timeout=10)
|
||||
response.raise_for_status()
|
||||
r = self.session.get(self.server_url, params=params, timeout=10)
|
||||
r.raise_for_status()
|
||||
|
||||
# handle subtitles not found and errors
|
||||
if response.content[:4] == b'NPc0':
|
||||
if r.content[:4] == b'NPc0':
|
||||
logger.debug('No subtitles found')
|
||||
return None
|
||||
|
||||
subtitle = NapiProjektSubtitle(language, hash)
|
||||
subtitle.content = response.content
|
||||
subtitle = self.subtitle_class(language, hash)
|
||||
subtitle.content = r.content
|
||||
logger.debug('Found subtitle %r', subtitle)
|
||||
|
||||
return subtitle
|
||||
|
|
|
@ -11,7 +11,8 @@ from six.moves.xmlrpc_client import ServerProxy
|
|||
|
||||
from . import Provider, TimeoutSafeTransport
|
||||
from .. import __short_version__
|
||||
from ..exceptions import AuthenticationError, ConfigurationError, DownloadLimitExceeded, ProviderError
|
||||
from ..exceptions import (AuthenticationError, ConfigurationError, DownloadLimitExceeded, ProviderError,
|
||||
ServiceUnavailable)
|
||||
from ..subtitle import Subtitle, fix_line_ending, guess_matches
|
||||
from ..utils import sanitize
|
||||
from ..video import Episode, Movie
|
||||
|
@ -26,7 +27,8 @@ class OpenSubtitlesSubtitle(Subtitle):
|
|||
|
||||
def __init__(self, language, hearing_impaired, page_link, subtitle_id, matched_by, movie_kind, hash, movie_name,
|
||||
movie_release_name, movie_year, movie_imdb_id, series_season, series_episode, filename, encoding):
|
||||
super(OpenSubtitlesSubtitle, self).__init__(language, hearing_impaired, page_link, encoding)
|
||||
super(OpenSubtitlesSubtitle, self).__init__(language, hearing_impaired=hearing_impaired,
|
||||
page_link=page_link, encoding=encoding)
|
||||
self.subtitle_id = subtitle_id
|
||||
self.matched_by = matched_by
|
||||
self.movie_kind = movie_kind
|
||||
|
@ -58,7 +60,8 @@ class OpenSubtitlesSubtitle(Subtitle):
|
|||
if isinstance(video, Episode) and self.movie_kind == 'episode':
|
||||
# tag match, assume series, year, season and episode matches
|
||||
if self.matched_by == 'tag':
|
||||
matches |= {'series', 'year', 'season', 'episode'}
|
||||
if not video.imdb_id or self.movie_imdb_id == video.imdb_id:
|
||||
matches |= {'series', 'year', 'season', 'episode'}
|
||||
# series
|
||||
if video.series and sanitize(self.series_name) == sanitize(video.series):
|
||||
matches.add('series')
|
||||
|
@ -87,7 +90,8 @@ class OpenSubtitlesSubtitle(Subtitle):
|
|||
elif isinstance(video, Movie) and self.movie_kind == 'movie':
|
||||
# tag match, assume title and year matches
|
||||
if self.matched_by == 'tag':
|
||||
matches |= {'title', 'year'}
|
||||
if not video.imdb_id or self.movie_imdb_id == video.imdb_id:
|
||||
matches |= {'title', 'year'}
|
||||
# title
|
||||
if video.title and sanitize(self.movie_name) == sanitize(video.title):
|
||||
matches.add('title')
|
||||
|
@ -122,10 +126,11 @@ class OpenSubtitlesProvider(Provider):
|
|||
|
||||
"""
|
||||
languages = {Language.fromopensubtitles(l) for l in language_converters['opensubtitles'].codes}
|
||||
subtitle_class = OpenSubtitlesSubtitle
|
||||
|
||||
def __init__(self, username=None, password=None):
|
||||
self.server = ServerProxy('https://api.opensubtitles.org/xml-rpc', TimeoutSafeTransport(10))
|
||||
if username and not password or not username and password:
|
||||
if any((username, password)) and not all((username, password)):
|
||||
raise ConfigurationError('Username and password must be specified')
|
||||
# None values not allowed for logging in, so replace it by ''
|
||||
self.username = username or ''
|
||||
|
@ -156,7 +161,10 @@ class OpenSubtitlesProvider(Provider):
|
|||
if hash and size:
|
||||
criteria.append({'moviehash': hash, 'moviebytesize': str(size)})
|
||||
if imdb_id:
|
||||
criteria.append({'imdbid': imdb_id[2:]})
|
||||
if season and episode:
|
||||
criteria.append({'imdbid': imdb_id[2:], 'season': season, 'episode': episode})
|
||||
else:
|
||||
criteria.append({'imdbid': imdb_id[2:]})
|
||||
if tag:
|
||||
criteria.append({'tag': tag})
|
||||
if query and season and episode:
|
||||
|
@ -199,9 +207,9 @@ class OpenSubtitlesProvider(Provider):
|
|||
filename = subtitle_item['SubFileName']
|
||||
encoding = subtitle_item.get('SubEncoding') or None
|
||||
|
||||
subtitle = OpenSubtitlesSubtitle(language, hearing_impaired, page_link, subtitle_id, matched_by, movie_kind,
|
||||
hash, movie_name, movie_release_name, movie_year, movie_imdb_id,
|
||||
series_season, series_episode, filename, encoding)
|
||||
subtitle = self.subtitle_class(language, hearing_impaired, page_link, subtitle_id, matched_by, movie_kind,
|
||||
hash, movie_name, movie_release_name, movie_year, movie_imdb_id,
|
||||
series_season, series_episode, filename, encoding)
|
||||
logger.debug('Found subtitle %r by %s', subtitle, matched_by)
|
||||
subtitles.append(subtitle)
|
||||
|
||||
|
@ -260,11 +268,6 @@ class DisabledUserAgent(OpenSubtitlesError, AuthenticationError):
|
|||
pass
|
||||
|
||||
|
||||
class ServiceUnavailable(OpenSubtitlesError):
|
||||
"""Exception raised when status is '503 Service Unavailable'."""
|
||||
pass
|
||||
|
||||
|
||||
def checked(response):
|
||||
"""Check a response status before returning it.
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ class PodnapisiSubtitle(Subtitle):
|
|||
|
||||
def __init__(self, language, hearing_impaired, page_link, pid, releases, title, season=None, episode=None,
|
||||
year=None):
|
||||
super(PodnapisiSubtitle, self).__init__(language, hearing_impaired, page_link)
|
||||
super(PodnapisiSubtitle, self).__init__(language, hearing_impaired=hearing_impaired, page_link=page_link)
|
||||
self.pid = pid
|
||||
self.releases = releases
|
||||
self.title = title
|
||||
|
@ -49,7 +49,8 @@ class PodnapisiSubtitle(Subtitle):
|
|||
# episode
|
||||
if isinstance(video, Episode):
|
||||
# series
|
||||
if video.series and sanitize(self.title) == sanitize(video.series):
|
||||
if video.series and (sanitize(self.title) in (
|
||||
sanitize(name) for name in [video.series] + video.alternative_series)):
|
||||
matches.add('series')
|
||||
# year
|
||||
if video.original_series and self.year is None or video.year and video.year == self.year:
|
||||
|
@ -66,7 +67,8 @@ class PodnapisiSubtitle(Subtitle):
|
|||
# movie
|
||||
elif isinstance(video, Movie):
|
||||
# title
|
||||
if video.title and sanitize(self.title) == sanitize(video.title):
|
||||
if video.title and (sanitize(self.title) in (
|
||||
sanitize(name) for name in [video.title] + video.alternative_titles)):
|
||||
matches.add('title')
|
||||
# year
|
||||
if video.year and self.year == video.year:
|
||||
|
@ -82,7 +84,11 @@ class PodnapisiProvider(Provider):
|
|||
"""Podnapisi Provider."""
|
||||
languages = ({Language('por', 'BR'), Language('srp', script='Latn')} |
|
||||
{Language.fromalpha2(l) for l in language_converters['alpha2'].codes})
|
||||
server_url = 'http://podnapisi.net/subtitles/'
|
||||
server_url = 'https://www.podnapisi.net/subtitles/'
|
||||
subtitle_class = PodnapisiSubtitle
|
||||
|
||||
def __init__(self):
|
||||
self.session = None
|
||||
|
||||
def initialize(self):
|
||||
self.session = Session()
|
||||
|
@ -108,7 +114,9 @@ class PodnapisiProvider(Provider):
|
|||
pids = set()
|
||||
while True:
|
||||
# query the server
|
||||
xml = etree.fromstring(self.session.get(self.server_url + 'search/old', params=params, timeout=10).content)
|
||||
r = self.session.get(self.server_url + 'search/old', params=params, timeout=10)
|
||||
r.raise_for_status()
|
||||
xml = etree.fromstring(r.content)
|
||||
|
||||
# exit if no results
|
||||
if not int(xml.find('pagination/results').text):
|
||||
|
@ -118,10 +126,14 @@ class PodnapisiProvider(Provider):
|
|||
# loop over subtitles
|
||||
for subtitle_xml in xml.findall('subtitle'):
|
||||
# read xml elements
|
||||
pid = subtitle_xml.find('pid').text
|
||||
# ignore duplicates, see http://www.podnapisi.net/forum/viewtopic.php?f=62&t=26164&start=10#p213321
|
||||
if pid in pids:
|
||||
continue
|
||||
|
||||
language = Language.fromietf(subtitle_xml.find('language').text)
|
||||
hearing_impaired = 'n' in (subtitle_xml.find('flags').text or '')
|
||||
page_link = subtitle_xml.find('url').text
|
||||
pid = subtitle_xml.find('pid').text
|
||||
releases = []
|
||||
if subtitle_xml.find('release').text:
|
||||
for release in subtitle_xml.find('release').text.split():
|
||||
|
@ -134,15 +146,11 @@ class PodnapisiProvider(Provider):
|
|||
year = int(subtitle_xml.find('year').text)
|
||||
|
||||
if is_episode:
|
||||
subtitle = PodnapisiSubtitle(language, hearing_impaired, page_link, pid, releases, title,
|
||||
season=season, episode=episode, year=year)
|
||||
subtitle = self.subtitle_class(language, hearing_impaired, page_link, pid, releases, title,
|
||||
season=season, episode=episode, year=year)
|
||||
else:
|
||||
subtitle = PodnapisiSubtitle(language, hearing_impaired, page_link, pid, releases, title,
|
||||
year=year)
|
||||
|
||||
# ignore duplicates, see http://www.podnapisi.net/forum/viewtopic.php?f=62&t=26164&start=10#p213321
|
||||
if pid in pids:
|
||||
continue
|
||||
subtitle = self.subtitle_class(language, hearing_impaired, page_link, pid, releases, title,
|
||||
year=year)
|
||||
|
||||
logger.debug('Found subtitle %r', subtitle)
|
||||
subtitles.append(subtitle)
|
||||
|
@ -159,11 +167,21 @@ class PodnapisiProvider(Provider):
|
|||
return subtitles
|
||||
|
||||
def list_subtitles(self, video, languages):
|
||||
season = episode = None
|
||||
if isinstance(video, Episode):
|
||||
return [s for l in languages for s in self.query(l, video.series, season=video.season,
|
||||
episode=video.episode, year=video.year)]
|
||||
elif isinstance(video, Movie):
|
||||
return [s for l in languages for s in self.query(l, video.title, year=video.year)]
|
||||
titles = [video.series] + video.alternative_series
|
||||
season = video.season
|
||||
episode = video.episode
|
||||
else:
|
||||
titles = [video.title] + video.alternative_titles
|
||||
|
||||
for title in titles:
|
||||
subtitles = [s for l in languages for s in
|
||||
self.query(l, title, season=season, episode=episode, year=video.year)]
|
||||
if subtitles:
|
||||
return subtitles
|
||||
|
||||
return []
|
||||
|
||||
def download_subtitle(self, subtitle):
|
||||
# download as a zip
|
||||
|
|
|
@ -42,6 +42,10 @@ class ShooterProvider(Provider):
|
|||
"""Shooter Provider."""
|
||||
languages = {Language(l) for l in ['eng', 'zho']}
|
||||
server_url = 'https://www.shooter.cn/api/subapi.php'
|
||||
subtitle_class = ShooterSubtitle
|
||||
|
||||
def __init__(self):
|
||||
self.session = None
|
||||
|
||||
def initialize(self):
|
||||
self.session = Session()
|
||||
|
@ -64,7 +68,7 @@ class ShooterProvider(Provider):
|
|||
|
||||
# parse the subtitles
|
||||
results = json.loads(r.text)
|
||||
subtitles = [ShooterSubtitle(language, hash, t['Link']) for s in results for t in s['Files']]
|
||||
subtitles = [self.subtitle_class(language, hash, t['Link']) for s in results for t in s['Files']]
|
||||
|
||||
return subtitles
|
||||
|
||||
|
|
|
@ -1,235 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import bisect
|
||||
from collections import defaultdict
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import zipfile
|
||||
|
||||
from babelfish import Language
|
||||
from guessit import guessit
|
||||
from requests import Session
|
||||
|
||||
from . import ParserBeautifulSoup, Provider
|
||||
from .. import __short_version__
|
||||
from ..cache import SHOW_EXPIRATION_TIME, region
|
||||
from ..exceptions import AuthenticationError, ConfigurationError, ProviderError
|
||||
from ..subtitle import Subtitle, fix_line_ending, guess_matches
|
||||
from ..utils import sanitize
|
||||
from ..video import Episode, Movie
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SubsCenterSubtitle(Subtitle):
|
||||
"""SubsCenter Subtitle."""
|
||||
provider_name = 'subscenter'
|
||||
|
||||
def __init__(self, language, hearing_impaired, page_link, series, season, episode, title, subtitle_id, subtitle_key,
|
||||
downloaded, releases):
|
||||
super(SubsCenterSubtitle, self).__init__(language, hearing_impaired, page_link)
|
||||
self.series = series
|
||||
self.season = season
|
||||
self.episode = episode
|
||||
self.title = title
|
||||
self.subtitle_id = subtitle_id
|
||||
self.subtitle_key = subtitle_key
|
||||
self.downloaded = downloaded
|
||||
self.releases = releases
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return str(self.subtitle_id)
|
||||
|
||||
def get_matches(self, video):
|
||||
matches = set()
|
||||
|
||||
# episode
|
||||
if isinstance(video, Episode):
|
||||
# series
|
||||
if video.series and sanitize(self.series) == sanitize(video.series):
|
||||
matches.add('series')
|
||||
# season
|
||||
if video.season and self.season == video.season:
|
||||
matches.add('season')
|
||||
# episode
|
||||
if video.episode and self.episode == video.episode:
|
||||
matches.add('episode')
|
||||
# guess
|
||||
for release in self.releases:
|
||||
matches |= guess_matches(video, guessit(release, {'type': 'episode'}))
|
||||
# movie
|
||||
elif isinstance(video, Movie):
|
||||
# guess
|
||||
for release in self.releases:
|
||||
matches |= guess_matches(video, guessit(release, {'type': 'movie'}))
|
||||
|
||||
# title
|
||||
if video.title and sanitize(self.title) == sanitize(video.title):
|
||||
matches.add('title')
|
||||
|
||||
return matches
|
||||
|
||||
|
||||
class SubsCenterProvider(Provider):
|
||||
"""SubsCenter Provider."""
|
||||
languages = {Language.fromalpha2(l) for l in ['he']}
|
||||
server_url = 'http://www.subscenter.co/he/'
|
||||
|
||||
def __init__(self, username=None, password=None):
|
||||
if username is not None and password is None or username is None and password is not None:
|
||||
raise ConfigurationError('Username and password must be specified')
|
||||
|
||||
self.session = None
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.logged_in = False
|
||||
|
||||
def initialize(self):
|
||||
self.session = Session()
|
||||
self.session.headers['User-Agent'] = 'Subliminal/{}'.format(__short_version__)
|
||||
|
||||
# login
|
||||
if self.username is not None and self.password is not None:
|
||||
logger.debug('Logging in')
|
||||
url = self.server_url + 'subscenter/accounts/login/'
|
||||
|
||||
# retrieve CSRF token
|
||||
self.session.get(url)
|
||||
csrf_token = self.session.cookies['csrftoken']
|
||||
|
||||
# actual login
|
||||
data = {'username': self.username, 'password': self.password, 'csrfmiddlewaretoken': csrf_token}
|
||||
r = self.session.post(url, data, allow_redirects=False, timeout=10)
|
||||
|
||||
if r.status_code != 302:
|
||||
raise AuthenticationError(self.username)
|
||||
|
||||
logger.info('Logged in')
|
||||
self.logged_in = True
|
||||
|
||||
def terminate(self):
|
||||
# logout
|
||||
if self.logged_in:
|
||||
logger.info('Logging out')
|
||||
r = self.session.get(self.server_url + 'subscenter/accounts/logout/', timeout=10)
|
||||
r.raise_for_status()
|
||||
logger.info('Logged out')
|
||||
self.logged_in = False
|
||||
|
||||
self.session.close()
|
||||
|
||||
@region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME)
|
||||
def _search_url_titles(self, title):
|
||||
"""Search the URL titles by kind for the given `title`.
|
||||
|
||||
:param str title: title to search for.
|
||||
:return: the URL titles by kind.
|
||||
:rtype: collections.defaultdict
|
||||
|
||||
"""
|
||||
# make the search
|
||||
logger.info('Searching title name for %r', title)
|
||||
r = self.session.get(self.server_url + 'subtitle/search/', params={'q': title}, timeout=10)
|
||||
r.raise_for_status()
|
||||
|
||||
# check for redirections
|
||||
if r.history and all([h.status_code == 302 for h in r.history]):
|
||||
logger.debug('Redirected to the subtitles page')
|
||||
links = [r.url]
|
||||
else:
|
||||
# get the suggestions (if needed)
|
||||
soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser'])
|
||||
links = [link.attrs['href'] for link in soup.select('#processes div.generalWindowTop a')]
|
||||
logger.debug('Found %d suggestions', len(links))
|
||||
|
||||
url_titles = defaultdict(list)
|
||||
for link in links:
|
||||
parts = link.split('/')
|
||||
url_titles[parts[-3]].append(parts[-2])
|
||||
|
||||
return url_titles
|
||||
|
||||
def query(self, title, season=None, episode=None):
|
||||
# search for the url title
|
||||
url_titles = self._search_url_titles(title)
|
||||
|
||||
# episode
|
||||
if season and episode:
|
||||
if 'series' not in url_titles:
|
||||
logger.error('No URL title found for series %r', title)
|
||||
return []
|
||||
url_title = url_titles['series'][0]
|
||||
logger.debug('Using series title %r', url_title)
|
||||
url = self.server_url + 'cst/data/series/sb/{}/{}/{}/'.format(url_title, season, episode)
|
||||
page_link = self.server_url + 'subtitle/series/{}/{}/{}/'.format(url_title, season, episode)
|
||||
else:
|
||||
if 'movie' not in url_titles:
|
||||
logger.error('No URL title found for movie %r', title)
|
||||
return []
|
||||
url_title = url_titles['movie'][0]
|
||||
logger.debug('Using movie title %r', url_title)
|
||||
url = self.server_url + 'cst/data/movie/sb/{}/'.format(url_title)
|
||||
page_link = self.server_url + 'subtitle/movie/{}/'.format(url_title)
|
||||
|
||||
# get the list of subtitles
|
||||
logger.debug('Getting the list of subtitles')
|
||||
r = self.session.get(url)
|
||||
r.raise_for_status()
|
||||
results = json.loads(r.text)
|
||||
|
||||
# loop over results
|
||||
subtitles = {}
|
||||
for language_code, language_data in results.items():
|
||||
for quality_data in language_data.values():
|
||||
for quality, subtitles_data in quality_data.items():
|
||||
for subtitle_item in subtitles_data.values():
|
||||
# read the item
|
||||
language = Language.fromalpha2(language_code)
|
||||
hearing_impaired = bool(subtitle_item['hearing_impaired'])
|
||||
subtitle_id = subtitle_item['id']
|
||||
subtitle_key = subtitle_item['key']
|
||||
downloaded = subtitle_item['downloaded']
|
||||
release = subtitle_item['subtitle_version']
|
||||
|
||||
# add the release and increment downloaded count if we already have the subtitle
|
||||
if subtitle_id in subtitles:
|
||||
logger.debug('Found additional release %r for subtitle %d', release, subtitle_id)
|
||||
bisect.insort_left(subtitles[subtitle_id].releases, release) # deterministic order
|
||||
subtitles[subtitle_id].downloaded += downloaded
|
||||
continue
|
||||
|
||||
# otherwise create it
|
||||
subtitle = SubsCenterSubtitle(language, hearing_impaired, page_link, title, season, episode,
|
||||
title, subtitle_id, subtitle_key, downloaded, [release])
|
||||
logger.debug('Found subtitle %r', subtitle)
|
||||
subtitles[subtitle_id] = subtitle
|
||||
|
||||
return subtitles.values()
|
||||
|
||||
def list_subtitles(self, video, languages):
|
||||
season = episode = None
|
||||
title = video.title
|
||||
|
||||
if isinstance(video, Episode):
|
||||
title = video.series
|
||||
season = video.season
|
||||
episode = video.episode
|
||||
|
||||
return [s for s in self.query(title, season, episode) if s.language in languages]
|
||||
|
||||
def download_subtitle(self, subtitle):
|
||||
# download
|
||||
url = self.server_url + 'subtitle/download/{}/{}/'.format(subtitle.language.alpha2, subtitle.subtitle_id)
|
||||
params = {'v': subtitle.releases[0], 'key': subtitle.subtitle_key}
|
||||
r = self.session.get(url, params=params, headers={'Referer': subtitle.page_link}, timeout=10)
|
||||
r.raise_for_status()
|
||||
|
||||
# open the zip
|
||||
with zipfile.ZipFile(io.BytesIO(r.content)) as zf:
|
||||
# remove some filenames from the namelist
|
||||
namelist = [n for n in zf.namelist() if not n.endswith('.txt')]
|
||||
if len(namelist) > 1:
|
||||
raise ProviderError('More than one file to unzip')
|
||||
|
||||
subtitle.content = fix_line_ending(zf.read(namelist[0]))
|
|
@ -40,6 +40,10 @@ class TheSubDBProvider(Provider):
|
|||
languages = {Language.fromthesubdb(l) for l in language_converters['thesubdb'].codes}
|
||||
required_hash = 'thesubdb'
|
||||
server_url = 'http://api.thesubdb.com/'
|
||||
subtitle_class = TheSubDBSubtitle
|
||||
|
||||
def __init__(self):
|
||||
self.session = None
|
||||
|
||||
def initialize(self):
|
||||
self.session = Session()
|
||||
|
@ -66,7 +70,7 @@ class TheSubDBProvider(Provider):
|
|||
for language_code in r.text.split(','):
|
||||
language = Language.fromthesubdb(language_code)
|
||||
|
||||
subtitle = TheSubDBSubtitle(language, hash)
|
||||
subtitle = self.subtitle_class(language, hash)
|
||||
logger.debug('Found subtitle %r', subtitle)
|
||||
subtitles.append(subtitle)
|
||||
|
||||
|
|
|
@ -47,7 +47,8 @@ class TVsubtitlesSubtitle(Subtitle):
|
|||
matches = set()
|
||||
|
||||
# series
|
||||
if video.series and sanitize(self.series) == sanitize(video.series):
|
||||
if video.series and (sanitize(self.series) in (
|
||||
sanitize(name) for name in [video.series] + video.alternative_series)):
|
||||
matches.add('series')
|
||||
# season
|
||||
if video.season and self.season == video.season:
|
||||
|
@ -80,6 +81,10 @@ class TVsubtitlesProvider(Provider):
|
|||
]}
|
||||
video_types = (Episode,)
|
||||
server_url = 'http://www.tvsubtitles.net/'
|
||||
subtitle_class = TVsubtitlesSubtitle
|
||||
|
||||
def __init__(self):
|
||||
self.session = None
|
||||
|
||||
def initialize(self):
|
||||
self.session = Session()
|
||||
|
@ -158,13 +163,7 @@ class TVsubtitlesProvider(Provider):
|
|||
|
||||
return episode_ids
|
||||
|
||||
def query(self, series, season, episode, year=None):
|
||||
# search the show id
|
||||
show_id = self.search_show_id(series, year)
|
||||
if show_id is None:
|
||||
logger.error('No show id found for %r (%r)', series, {'year': year})
|
||||
return []
|
||||
|
||||
def query(self, show_id, series, season, episode, year=None):
|
||||
# get the episode ids
|
||||
episode_ids = self.get_episode_ids(show_id, season)
|
||||
if episode not in episode_ids:
|
||||
|
@ -184,9 +183,9 @@ class TVsubtitlesProvider(Provider):
|
|||
subtitle_id = int(row.parent['href'][10:-5])
|
||||
page_link = self.server_url + 'subtitle-%d.html' % subtitle_id
|
||||
rip = row.find('p', title='rip').text.strip() or None
|
||||
release = row.find('p', title='release').text.strip() or None
|
||||
release = row.find('h5').text.strip() or None
|
||||
|
||||
subtitle = TVsubtitlesSubtitle(language, page_link, subtitle_id, series, season, episode, year, rip,
|
||||
subtitle = self.subtitle_class(language, page_link, subtitle_id, series, season, episode, year, rip,
|
||||
release)
|
||||
logger.debug('Found subtitle %s', subtitle)
|
||||
subtitles.append(subtitle)
|
||||
|
@ -194,7 +193,24 @@ class TVsubtitlesProvider(Provider):
|
|||
return subtitles
|
||||
|
||||
def list_subtitles(self, video, languages):
|
||||
return [s for s in self.query(video.series, video.season, video.episode, video.year) if s.language in languages]
|
||||
# lookup show_id
|
||||
titles = [video.series] + video.alternative_series
|
||||
show_id = None
|
||||
for title in titles:
|
||||
show_id = self.search_show_id(title, video.year)
|
||||
if show_id is not None:
|
||||
break
|
||||
|
||||
# query for subtitles with the show_id
|
||||
if show_id is not None:
|
||||
subtitles = [s for s in self.query(show_id, title, video.season, video.episode, video.year)
|
||||
if s.language in languages and s.episode == video.episode]
|
||||
if subtitles:
|
||||
return subtitles
|
||||
else:
|
||||
logger.error('No show id found for %r (%r)', video.series, {'year': video.year})
|
||||
|
||||
return []
|
||||
|
||||
def download_subtitle(self, subtitle):
|
||||
# download as a zip
|
||||
|
|
|
@ -331,6 +331,7 @@ def refine(video, **kwargs):
|
|||
# add series information
|
||||
logger.debug('Found series %r', series)
|
||||
video.series = matching_result['match']['series']
|
||||
video.alternative_series.extend(series['aliases'])
|
||||
video.year = matching_result['match']['year']
|
||||
video.original_series = matching_result['match']['original_series']
|
||||
video.series_tvdb_id = series['id']
|
||||
|
|
|
@ -44,7 +44,7 @@ movie_scores = {'hash': 119, 'title': 60, 'year': 30, 'release_group': 15,
|
|||
'format': 7, 'audio_codec': 3, 'resolution': 2, 'video_codec': 2, 'hearing_impaired': 1}
|
||||
|
||||
#: Equivalent release groups
|
||||
equivalent_release_groups = ({'LOL', 'DIMENSION'}, {'ASAP', 'IMMERSE', 'FLEET'})
|
||||
equivalent_release_groups = ({'LOL', 'DIMENSION'}, {'ASAP', 'IMMERSE', 'FLEET'}, {'AVS', 'SVA'})
|
||||
|
||||
|
||||
def get_equivalent_release_groups(release_group):
|
||||
|
|
|
@ -208,8 +208,14 @@ def guess_matches(video, guess, partial=False):
|
|||
if video.season and 'season' in guess and guess['season'] == video.season:
|
||||
matches.add('season')
|
||||
# episode
|
||||
if video.episode and 'episode' in guess and guess['episode'] == video.episode:
|
||||
matches.add('episode')
|
||||
# Currently we only have single-ep support (guessit returns a multi-ep as a list with int values)
|
||||
# Most providers only support single-ep, so make sure it contains only 1 episode
|
||||
# In case of multi-ep, take the lowest episode (subtitles will normally be available on lowest episode number)
|
||||
if video.episode and 'episode' in guess:
|
||||
episode_guess = guess['episode']
|
||||
episode = min(episode_guess) if episode_guess and isinstance(episode_guess, list) else episode_guess
|
||||
if episode == video.episode:
|
||||
matches.add('episode')
|
||||
# year
|
||||
if video.year and 'year' in guess and guess['year'] == video.year:
|
||||
matches.add('year')
|
||||
|
|
|
@ -13,9 +13,9 @@ VIDEO_EXTENSIONS = ('.3g2', '.3gp', '.3gp2', '.3gpp', '.60d', '.ajp', '.asf', '.
|
|||
'.bix', '.box', '.cam', '.dat', '.divx', '.dmf', '.dv', '.dvr-ms', '.evo', '.flc', '.fli',
|
||||
'.flic', '.flv', '.flx', '.gvi', '.gvp', '.h264', '.m1v', '.m2p', '.m2ts', '.m2v', '.m4e',
|
||||
'.m4v', '.mjp', '.mjpeg', '.mjpg', '.mkv', '.moov', '.mov', '.movhd', '.movie', '.movx', '.mp4',
|
||||
'.mpe', '.mpeg', '.mpg', '.mpv', '.mpv2', '.mxf', '.nsv', '.nut', '.ogg', '.ogm' '.ogv', '.omf',
|
||||
'.mpe', '.mpeg', '.mpg', '.mpv', '.mpv2', '.mxf', '.nsv', '.nut', '.ogg', '.ogm', '.ogv', '.omf',
|
||||
'.ps', '.qt', '.ram', '.rm', '.rmvb', '.swf', '.ts', '.vfw', '.vid', '.video', '.viv', '.vivo',
|
||||
'.vob', '.vro', '.wm', '.wmv', '.wmx', '.wrap', '.wvx', '.wx', '.x264', '.xvid')
|
||||
'.vob', '.vro', '.webm', '.wm', '.wmv', '.wmx', '.wrap', '.wvx', '.wx', '.x264', '.xvid')
|
||||
|
||||
|
||||
class Video(object):
|
||||
|
@ -123,11 +123,12 @@ class Episode(Video):
|
|||
:param int year: year of the series.
|
||||
:param bool original_series: whether the series is the first with this name.
|
||||
:param int tvdb_id: TVDB id of the episode.
|
||||
:param list alternative_series: alternative names of the series
|
||||
:param \*\*kwargs: additional parameters for the :class:`Video` constructor.
|
||||
|
||||
"""
|
||||
def __init__(self, name, series, season, episode, title=None, year=None, original_series=True, tvdb_id=None,
|
||||
series_tvdb_id=None, series_imdb_id=None, **kwargs):
|
||||
series_tvdb_id=None, series_imdb_id=None, alternative_series=None, **kwargs):
|
||||
super(Episode, self).__init__(name, **kwargs)
|
||||
|
||||
#: Series of the episode
|
||||
|
@ -157,6 +158,9 @@ class Episode(Video):
|
|||
#: IMDb id of the series
|
||||
self.series_imdb_id = series_imdb_id
|
||||
|
||||
#: Alternative names of the series
|
||||
self.alternative_series = alternative_series or []
|
||||
|
||||
@classmethod
|
||||
def fromguess(cls, name, guess):
|
||||
if guess['type'] != 'episode':
|
||||
|
@ -165,7 +169,13 @@ class Episode(Video):
|
|||
if 'title' not in guess or 'episode' not in guess:
|
||||
raise ValueError('Insufficient data to process the guess')
|
||||
|
||||
return cls(name, guess['title'], guess.get('season', 1), guess['episode'], title=guess.get('episode_title'),
|
||||
# Currently we only have single-ep support (guessit returns a multi-ep as a list with int values)
|
||||
# Most providers only support single-ep, so make sure it contains only 1 episode
|
||||
# In case of multi-ep, take the lowest episode (subtitles will normally be available on lowest episode number)
|
||||
episode_guess = guess.get('episode')
|
||||
episode = min(episode_guess) if episode_guess and isinstance(episode_guess, list) else episode_guess
|
||||
|
||||
return cls(name, guess['title'], guess.get('season', 1), episode, title=guess.get('episode_title'),
|
||||
year=guess.get('year'), format=guess.get('format'), original_series='year' not in guess,
|
||||
release_group=guess.get('release_group'), resolution=guess.get('screen_size'),
|
||||
video_codec=guess.get('video_codec'), audio_codec=guess.get('audio_codec'))
|
||||
|
@ -186,10 +196,11 @@ class Movie(Video):
|
|||
|
||||
:param str title: title of the movie.
|
||||
:param int year: year of the movie.
|
||||
:param list alternative_titles: alternative titles of the movie
|
||||
:param \*\*kwargs: additional parameters for the :class:`Video` constructor.
|
||||
|
||||
"""
|
||||
def __init__(self, name, title, year=None, **kwargs):
|
||||
def __init__(self, name, title, year=None, alternative_titles=None, **kwargs):
|
||||
super(Movie, self).__init__(name, **kwargs)
|
||||
|
||||
#: Title of the movie
|
||||
|
@ -198,6 +209,9 @@ class Movie(Video):
|
|||
#: Year of the movie
|
||||
self.year = year
|
||||
|
||||
#: Alternative titles of the movie
|
||||
self.alternative_titles = alternative_titles or []
|
||||
|
||||
@classmethod
|
||||
def fromguess(cls, name, guess):
|
||||
if guess['type'] != 'movie':
|
||||
|
|
10
update_db.py
10
update_db.py
|
@ -83,15 +83,7 @@ if os.path.exists(os.path.join(os.path.dirname(__file__), 'data/db/bazarr.db'))
|
|||
c.execute('UPDATE table_settings_sonarr SET full_update="Daily"')
|
||||
|
||||
try:
|
||||
c.execute('CREATE TABLE "table_settings_radarr" ( `ip` TEXT NOT NULL, `port` INTEGER NOT NULL, `base_url` TEXT, `ssl` INTEGER, `apikey` TEXT , "full_update" "text")')
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
db.commit()
|
||||
c.execute('INSERT INTO `table_settings_radarr` (ip,port,base_url,ssl,apikey,full_update) VALUES ("127.0.0.1",7878,"/","False",Null,"Daily")')
|
||||
|
||||
try:
|
||||
c.execute('CREATE TABLE "table_movies" ( `tmdbId` TEXT NOT NULL UNIQUE, `title` TEXT NOT NULL, `path` TEXT NOT NULL UNIQUE, `languages` TEXT, `subtitles` TEXT, `missing_subtitles` TEXT, `hearing_impaired` TEXT, `radarrId` INTEGER NOT NULL UNIQUE, `overview` TEXT, `poster` TEXT, `fanart` TEXT, "audio_language" "text", `sceceName` TEXT, PRIMARY KEY(`tmdbId`) )')
|
||||
c.execute('alter table table_shows add column "sortTitle" "text"')
|
||||
except:
|
||||
pass
|
||||
|
||||
|
|
|
@ -122,7 +122,9 @@
|
|||
%else:
|
||||
%for season in seasons:
|
||||
<div id="fondblanc" class="ui container">
|
||||
<h1 class="ui header">Season {{season[0][2]}}</h1>
|
||||
%missing_subs = len([i for i in season if i[6] != "[]"])
|
||||
%total_subs = len(season)
|
||||
<h1 class="ui header">Season {{season[0][2]}}<div class="ui tiny {{!'green' if missing_subs == 0 else 'yellow'}} circular label">{{!total_subs - missing_subs}} / {{total_subs}}</div></h1>
|
||||
<div class="ui accordion">
|
||||
<div class="title">
|
||||
<div class="ui one column stackable center aligned page grid">
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
<!DOCTYPE html>
|
||||
<script src="{{base_url}}static/jquery/jquery-latest.min.js"></script>
|
||||
<script src="{{base_url}}static/semantic/semantic.min.js"></script>
|
||||
<script src="{{base_url}}static/jquery/tablesort.js"></script>
|
||||
<link rel="stylesheet" href="{{base_url}}static/semantic/semantic.min.css">
|
||||
|
||||
<link rel="apple-touch-icon" sizes="120x120" href="{{base_url}}static/apple-touch-icon.png">
|
||||
|
@ -40,6 +39,7 @@
|
|||
}
|
||||
.fast.backward, .backward, .forward, .fast.forward { pointer-events: auto; }
|
||||
.fast.backward.disabled, .backward.disabled, .forward.disabled, .fast.forward.disabled { pointer-events: none; }
|
||||
.ui.progress:last-child {margin: 0 0 0em !important;}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
@ -52,15 +52,16 @@
|
|||
<div class="ui basic buttons">
|
||||
<button id="serieseditor" class="ui button"><i class="configure icon"></i>Series Editor</button>
|
||||
</div>
|
||||
<table id="tableseries" class="ui very basic selectable sortable table">
|
||||
<table id="tableseries" class="ui very basic selectable table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="sorted ascending">Name</th>
|
||||
<th>Name</th>
|
||||
<th>Path</th>
|
||||
<th>Audio language</th>
|
||||
<th>Subtitles languages</th>
|
||||
<th>Hearing-impaired</th>
|
||||
<th class="no-sort"></th>
|
||||
<th class="two wide">Subtitles</th>
|
||||
<th></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
|
@ -87,6 +88,24 @@
|
|||
%end
|
||||
</td>
|
||||
<td>{{!"" if row[4] == None else row[4]}}</td>
|
||||
<td>
|
||||
%for total_subtitles in total_subtitles_list:
|
||||
% if total_subtitles[0] == row[5]:
|
||||
% total_subs = total_subtitles[1]
|
||||
% end
|
||||
%end
|
||||
%missing_subs = 0
|
||||
%for missing_subtitles in missing_subtitles_list:
|
||||
% if missing_subtitles[0] == row[5]:
|
||||
% missing_subs = missing_subtitles[1]
|
||||
% end
|
||||
%end
|
||||
<div class="ui progress" data-value="{{total_subs - missing_subs}}" data-total="{{total_subs}}">
|
||||
<div class="bar">
|
||||
<div class="progress"></div>
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
<td {{!"style='background-color: #e8e8e8;'" if row[4] == None else ""}}>
|
||||
<%
|
||||
subs_languages_list = []
|
||||
|
@ -202,8 +221,6 @@
|
|||
sessionStorage.clear();
|
||||
}
|
||||
|
||||
$('table').tablesort();
|
||||
|
||||
$('a, button:not(.cancel)').click(function(){
|
||||
$('#loader').addClass('active');
|
||||
})
|
||||
|
@ -255,4 +272,22 @@
|
|||
})
|
||||
|
||||
$('#series_languages').dropdown();
|
||||
|
||||
$('.progress').progress({
|
||||
label: 'ratio',
|
||||
text: {
|
||||
ratio: '{value} / {total}'
|
||||
},
|
||||
showActivity: false
|
||||
});
|
||||
|
||||
$( ".progress" ).each(function() {
|
||||
if ($(this).progress('is complete') != true) {
|
||||
$(this).progress('set warning');
|
||||
}
|
||||
if ($(this).progress('get total') == 0) {
|
||||
$(this).progress('set success');
|
||||
$(this).progress('set bar label', '0 / 0');
|
||||
}
|
||||
});
|
||||
</script>
|
Loading…
Add table
Add a link
Reference in a new issue