mirror of
https://github.com/morpheus65535/bazarr.git
synced 2025-04-24 06:37:16 -04:00
WIP
This commit is contained in:
parent
43d33dc247
commit
f389c38a9c
47 changed files with 1856 additions and 2021 deletions
|
@ -1,5 +1,4 @@
|
|||
# coding=utf-8
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import datetime
|
||||
import logging
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
# coding=utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ from bs4 import UnicodeDammit
|
|||
from itertools import islice
|
||||
from database import TableShows, TableEpisodes, TableMovies
|
||||
from peewee import fn, JOIN
|
||||
from functools import reduce
|
||||
|
||||
from get_args import args
|
||||
from get_languages import alpha2_from_alpha3, get_language_set
|
||||
|
@ -224,9 +225,9 @@ def store_subtitles_movie(file):
|
|||
|
||||
|
||||
def list_missing_subtitles(no=None):
|
||||
episodes_subtitles_clause = {TableShows.sonarr_series_id.is_null(False)}
|
||||
episodes_subtitles_clause = "TableShows.sonarr_series_id.is_null(False)"
|
||||
if no is not None:
|
||||
episodes_subtitles_clause = {TableShows.sonarr_series_id ** no}
|
||||
episodes_subtitles_clause = "TableShows.sonarr_series_id ** no"
|
||||
|
||||
episodes_subtitles = TableEpisodes.select(
|
||||
TableEpisodes.sonarr_episode_id,
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
# coding=utf-8
|
||||
|
||||
bazarr_version = '0.8.2'
|
||||
|
||||
import six
|
||||
from six.moves import zip
|
||||
from functools import reduce
|
||||
bazarr_version = '0.8.2'
|
||||
|
||||
import gc
|
||||
import sys
|
||||
|
@ -60,8 +61,6 @@ from config import settings, url_sonarr, url_radarr, url_radarr_short, url_sonar
|
|||
from subliminal_patch.extensions import provider_registry as provider_manager
|
||||
from subliminal_patch.core import SUBTITLE_EXTENSIONS
|
||||
|
||||
reload(sys)
|
||||
sys.setdefaultencoding('utf8')
|
||||
gc.enable()
|
||||
|
||||
os.environ["SZ_USER_AGENT"] = "Bazarr/1"
|
||||
|
@ -2016,10 +2015,12 @@ def system():
|
|||
def get_logs():
|
||||
authorize()
|
||||
logs = []
|
||||
for line in reversed(open(os.path.join(args.config_dir, 'log', 'bazarr.log')).readlines()):
|
||||
lin = []
|
||||
lin = line.split('|')
|
||||
logs.append(lin)
|
||||
with open(os.path.join(args.config_dir, 'log', 'bazarr.log')) as file:
|
||||
for line in file.readlines():
|
||||
lin = []
|
||||
lin = line.split('|')
|
||||
logs.append(lin)
|
||||
logs.reverse()
|
||||
|
||||
return dict(data=logs)
|
||||
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
from .core import where
|
||||
|
||||
__version__ = "2019.03.09"
|
||||
__version__ = "2019.09.11"
|
||||
|
|
|
@ -771,36 +771,6 @@ vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
|
|||
+OkuE6N36B9K
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Class 2 Primary CA O=Certplus
|
||||
# Subject: CN=Class 2 Primary CA O=Certplus
|
||||
# Label: "Certplus Class 2 Primary CA"
|
||||
# Serial: 177770208045934040241468760488327595043
|
||||
# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b
|
||||
# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb
|
||||
# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw
|
||||
PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz
|
||||
cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9
|
||||
MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz
|
||||
IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ
|
||||
ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR
|
||||
VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL
|
||||
kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd
|
||||
EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas
|
||||
H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0
|
||||
HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud
|
||||
DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4
|
||||
QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu
|
||||
Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/
|
||||
AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8
|
||||
yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR
|
||||
FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA
|
||||
ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB
|
||||
kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7
|
||||
l7+ijrRU
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co.
|
||||
# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co.
|
||||
# Label: "DST Root CA X3"
|
||||
|
@ -1219,36 +1189,6 @@ t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
|
|||
WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
|
||||
# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
|
||||
# Label: "Deutsche Telekom Root CA 2"
|
||||
# Serial: 38
|
||||
# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08
|
||||
# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf
|
||||
# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc
|
||||
MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj
|
||||
IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB
|
||||
IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE
|
||||
RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl
|
||||
U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290
|
||||
IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU
|
||||
ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC
|
||||
QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr
|
||||
rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S
|
||||
NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc
|
||||
QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH
|
||||
txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP
|
||||
BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
|
||||
AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp
|
||||
tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa
|
||||
IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl
|
||||
6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+
|
||||
xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU
|
||||
Cm26OWMohpLzGITY+9HPBVZkVw==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
|
||||
# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
|
||||
# Label: "Cybertrust Global Root"
|
||||
|
@ -3453,46 +3393,6 @@ AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
|
|||
5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
|
||||
# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
|
||||
# Label: "Certinomis - Root CA"
|
||||
# Serial: 1
|
||||
# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f
|
||||
# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8
|
||||
# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET
|
||||
MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb
|
||||
BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz
|
||||
MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx
|
||||
FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g
|
||||
Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2
|
||||
fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl
|
||||
LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV
|
||||
WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF
|
||||
TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb
|
||||
5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc
|
||||
CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri
|
||||
wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ
|
||||
wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG
|
||||
m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4
|
||||
F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng
|
||||
WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB
|
||||
BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0
|
||||
2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF
|
||||
AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/
|
||||
0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw
|
||||
F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS
|
||||
g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj
|
||||
qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN
|
||||
h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/
|
||||
ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V
|
||||
btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj
|
||||
Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ
|
||||
8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW
|
||||
gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
|
||||
# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
|
||||
# Label: "OISTE WISeKey Global Root GB CA"
|
||||
|
|
|
@ -267,10 +267,7 @@ def alabel(label):
|
|||
|
||||
try:
|
||||
label = label.encode('ascii')
|
||||
try:
|
||||
ulabel(label)
|
||||
except IDNAError:
|
||||
raise IDNAError('The label {0} is not a valid A-label'.format(label))
|
||||
ulabel(label)
|
||||
if not valid_label_length(label):
|
||||
raise IDNAError('Label too long')
|
||||
return label
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# This file is automatically generated by tools/idna-data
|
||||
|
||||
__version__ = "10.0.0"
|
||||
__version__ = "11.0.0"
|
||||
scripts = {
|
||||
'Greek': (
|
||||
0x37000000374,
|
||||
|
@ -49,7 +49,7 @@ scripts = {
|
|||
0x30210000302a,
|
||||
0x30380000303c,
|
||||
0x340000004db6,
|
||||
0x4e0000009feb,
|
||||
0x4e0000009ff0,
|
||||
0xf9000000fa6e,
|
||||
0xfa700000fada,
|
||||
0x200000002a6d7,
|
||||
|
@ -62,7 +62,7 @@ scripts = {
|
|||
'Hebrew': (
|
||||
0x591000005c8,
|
||||
0x5d0000005eb,
|
||||
0x5f0000005f5,
|
||||
0x5ef000005f5,
|
||||
0xfb1d0000fb37,
|
||||
0xfb380000fb3d,
|
||||
0xfb3e0000fb3f,
|
||||
|
@ -248,6 +248,7 @@ joining_types = {
|
|||
0x6fb: 68,
|
||||
0x6fc: 68,
|
||||
0x6ff: 68,
|
||||
0x70f: 84,
|
||||
0x710: 82,
|
||||
0x712: 68,
|
||||
0x713: 68,
|
||||
|
@ -522,6 +523,7 @@ joining_types = {
|
|||
0x1875: 68,
|
||||
0x1876: 68,
|
||||
0x1877: 68,
|
||||
0x1878: 68,
|
||||
0x1880: 85,
|
||||
0x1881: 85,
|
||||
0x1882: 85,
|
||||
|
@ -690,6 +692,70 @@ joining_types = {
|
|||
0x10bad: 68,
|
||||
0x10bae: 68,
|
||||
0x10baf: 85,
|
||||
0x10d00: 76,
|
||||
0x10d01: 68,
|
||||
0x10d02: 68,
|
||||
0x10d03: 68,
|
||||
0x10d04: 68,
|
||||
0x10d05: 68,
|
||||
0x10d06: 68,
|
||||
0x10d07: 68,
|
||||
0x10d08: 68,
|
||||
0x10d09: 68,
|
||||
0x10d0a: 68,
|
||||
0x10d0b: 68,
|
||||
0x10d0c: 68,
|
||||
0x10d0d: 68,
|
||||
0x10d0e: 68,
|
||||
0x10d0f: 68,
|
||||
0x10d10: 68,
|
||||
0x10d11: 68,
|
||||
0x10d12: 68,
|
||||
0x10d13: 68,
|
||||
0x10d14: 68,
|
||||
0x10d15: 68,
|
||||
0x10d16: 68,
|
||||
0x10d17: 68,
|
||||
0x10d18: 68,
|
||||
0x10d19: 68,
|
||||
0x10d1a: 68,
|
||||
0x10d1b: 68,
|
||||
0x10d1c: 68,
|
||||
0x10d1d: 68,
|
||||
0x10d1e: 68,
|
||||
0x10d1f: 68,
|
||||
0x10d20: 68,
|
||||
0x10d21: 68,
|
||||
0x10d22: 82,
|
||||
0x10d23: 68,
|
||||
0x10f30: 68,
|
||||
0x10f31: 68,
|
||||
0x10f32: 68,
|
||||
0x10f33: 82,
|
||||
0x10f34: 68,
|
||||
0x10f35: 68,
|
||||
0x10f36: 68,
|
||||
0x10f37: 68,
|
||||
0x10f38: 68,
|
||||
0x10f39: 68,
|
||||
0x10f3a: 68,
|
||||
0x10f3b: 68,
|
||||
0x10f3c: 68,
|
||||
0x10f3d: 68,
|
||||
0x10f3e: 68,
|
||||
0x10f3f: 68,
|
||||
0x10f40: 68,
|
||||
0x10f41: 68,
|
||||
0x10f42: 68,
|
||||
0x10f43: 68,
|
||||
0x10f44: 68,
|
||||
0x10f45: 85,
|
||||
0x10f51: 68,
|
||||
0x10f52: 68,
|
||||
0x10f53: 68,
|
||||
0x10f54: 82,
|
||||
0x110bd: 85,
|
||||
0x110cd: 85,
|
||||
0x1e900: 68,
|
||||
0x1e901: 68,
|
||||
0x1e902: 68,
|
||||
|
@ -1034,14 +1100,15 @@ codepoint_classes = {
|
|||
0x52d0000052e,
|
||||
0x52f00000530,
|
||||
0x5590000055a,
|
||||
0x56100000587,
|
||||
0x56000000587,
|
||||
0x58800000589,
|
||||
0x591000005be,
|
||||
0x5bf000005c0,
|
||||
0x5c1000005c3,
|
||||
0x5c4000005c6,
|
||||
0x5c7000005c8,
|
||||
0x5d0000005eb,
|
||||
0x5f0000005f3,
|
||||
0x5ef000005f3,
|
||||
0x6100000061b,
|
||||
0x62000000640,
|
||||
0x64100000660,
|
||||
|
@ -1054,12 +1121,13 @@ codepoint_classes = {
|
|||
0x7100000074b,
|
||||
0x74d000007b2,
|
||||
0x7c0000007f6,
|
||||
0x7fd000007fe,
|
||||
0x8000000082e,
|
||||
0x8400000085c,
|
||||
0x8600000086b,
|
||||
0x8a0000008b5,
|
||||
0x8b6000008be,
|
||||
0x8d4000008e2,
|
||||
0x8d3000008e2,
|
||||
0x8e300000958,
|
||||
0x96000000964,
|
||||
0x96600000970,
|
||||
|
@ -1077,6 +1145,7 @@ codepoint_classes = {
|
|||
0x9e0000009e4,
|
||||
0x9e6000009f2,
|
||||
0x9fc000009fd,
|
||||
0x9fe000009ff,
|
||||
0xa0100000a04,
|
||||
0xa0500000a0b,
|
||||
0xa0f00000a11,
|
||||
|
@ -1136,8 +1205,7 @@ codepoint_classes = {
|
|||
0xbd000000bd1,
|
||||
0xbd700000bd8,
|
||||
0xbe600000bf0,
|
||||
0xc0000000c04,
|
||||
0xc0500000c0d,
|
||||
0xc0000000c0d,
|
||||
0xc0e00000c11,
|
||||
0xc1200000c29,
|
||||
0xc2a00000c3a,
|
||||
|
@ -1276,7 +1344,7 @@ codepoint_classes = {
|
|||
0x17dc000017de,
|
||||
0x17e0000017ea,
|
||||
0x18100000181a,
|
||||
0x182000001878,
|
||||
0x182000001879,
|
||||
0x1880000018ab,
|
||||
0x18b0000018f6,
|
||||
0x19000000191f,
|
||||
|
@ -1544,11 +1612,11 @@ codepoint_classes = {
|
|||
0x309d0000309f,
|
||||
0x30a1000030fb,
|
||||
0x30fc000030ff,
|
||||
0x31050000312f,
|
||||
0x310500003130,
|
||||
0x31a0000031bb,
|
||||
0x31f000003200,
|
||||
0x340000004db6,
|
||||
0x4e0000009feb,
|
||||
0x4e0000009ff0,
|
||||
0xa0000000a48d,
|
||||
0xa4d00000a4fe,
|
||||
0xa5000000a60d,
|
||||
|
@ -1655,8 +1723,10 @@ codepoint_classes = {
|
|||
0xa7a50000a7a6,
|
||||
0xa7a70000a7a8,
|
||||
0xa7a90000a7aa,
|
||||
0xa7af0000a7b0,
|
||||
0xa7b50000a7b6,
|
||||
0xa7b70000a7b8,
|
||||
0xa7b90000a7ba,
|
||||
0xa7f70000a7f8,
|
||||
0xa7fa0000a828,
|
||||
0xa8400000a874,
|
||||
|
@ -1664,8 +1734,7 @@ codepoint_classes = {
|
|||
0xa8d00000a8da,
|
||||
0xa8e00000a8f8,
|
||||
0xa8fb0000a8fc,
|
||||
0xa8fd0000a8fe,
|
||||
0xa9000000a92e,
|
||||
0xa8fd0000a92e,
|
||||
0xa9300000a954,
|
||||
0xa9800000a9c1,
|
||||
0xa9cf0000a9da,
|
||||
|
@ -1743,7 +1812,7 @@ codepoint_classes = {
|
|||
0x10a0500010a07,
|
||||
0x10a0c00010a14,
|
||||
0x10a1500010a18,
|
||||
0x10a1900010a34,
|
||||
0x10a1900010a36,
|
||||
0x10a3800010a3b,
|
||||
0x10a3f00010a40,
|
||||
0x10a6000010a7d,
|
||||
|
@ -1756,6 +1825,11 @@ codepoint_classes = {
|
|||
0x10b8000010b92,
|
||||
0x10c0000010c49,
|
||||
0x10cc000010cf3,
|
||||
0x10d0000010d28,
|
||||
0x10d3000010d3a,
|
||||
0x10f0000010f1d,
|
||||
0x10f2700010f28,
|
||||
0x10f3000010f51,
|
||||
0x1100000011047,
|
||||
0x1106600011070,
|
||||
0x1107f000110bb,
|
||||
|
@ -1763,10 +1837,11 @@ codepoint_classes = {
|
|||
0x110f0000110fa,
|
||||
0x1110000011135,
|
||||
0x1113600011140,
|
||||
0x1114400011147,
|
||||
0x1115000011174,
|
||||
0x1117600011177,
|
||||
0x11180000111c5,
|
||||
0x111ca000111cd,
|
||||
0x111c9000111cd,
|
||||
0x111d0000111db,
|
||||
0x111dc000111dd,
|
||||
0x1120000011212,
|
||||
|
@ -1786,7 +1861,7 @@ codepoint_classes = {
|
|||
0x1132a00011331,
|
||||
0x1133200011334,
|
||||
0x113350001133a,
|
||||
0x1133c00011345,
|
||||
0x1133b00011345,
|
||||
0x1134700011349,
|
||||
0x1134b0001134e,
|
||||
0x1135000011351,
|
||||
|
@ -1796,6 +1871,7 @@ codepoint_classes = {
|
|||
0x1137000011375,
|
||||
0x114000001144b,
|
||||
0x114500001145a,
|
||||
0x1145e0001145f,
|
||||
0x11480000114c6,
|
||||
0x114c7000114c8,
|
||||
0x114d0000114da,
|
||||
|
@ -1807,15 +1883,17 @@ codepoint_classes = {
|
|||
0x116500001165a,
|
||||
0x11680000116b8,
|
||||
0x116c0000116ca,
|
||||
0x117000001171a,
|
||||
0x117000001171b,
|
||||
0x1171d0001172c,
|
||||
0x117300001173a,
|
||||
0x118000001183b,
|
||||
0x118c0000118ea,
|
||||
0x118ff00011900,
|
||||
0x11a0000011a3f,
|
||||
0x11a4700011a48,
|
||||
0x11a5000011a84,
|
||||
0x11a8600011a9a,
|
||||
0x11a9d00011a9e,
|
||||
0x11ac000011af9,
|
||||
0x11c0000011c09,
|
||||
0x11c0a00011c37,
|
||||
|
@ -1831,6 +1909,13 @@ codepoint_classes = {
|
|||
0x11d3c00011d3e,
|
||||
0x11d3f00011d48,
|
||||
0x11d5000011d5a,
|
||||
0x11d6000011d66,
|
||||
0x11d6700011d69,
|
||||
0x11d6a00011d8f,
|
||||
0x11d9000011d92,
|
||||
0x11d9300011d99,
|
||||
0x11da000011daa,
|
||||
0x11ee000011ef7,
|
||||
0x120000001239a,
|
||||
0x1248000012544,
|
||||
0x130000001342f,
|
||||
|
@ -1845,11 +1930,12 @@ codepoint_classes = {
|
|||
0x16b5000016b5a,
|
||||
0x16b6300016b78,
|
||||
0x16b7d00016b90,
|
||||
0x16e6000016e80,
|
||||
0x16f0000016f45,
|
||||
0x16f5000016f7f,
|
||||
0x16f8f00016fa0,
|
||||
0x16fe000016fe2,
|
||||
0x17000000187ed,
|
||||
0x17000000187f2,
|
||||
0x1880000018af3,
|
||||
0x1b0000001b11f,
|
||||
0x1b1700001b2fc,
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
__version__ = '2.7'
|
||||
__version__ = '2.8'
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
1022
libs/ipaddress.py
1022
libs/ipaddress.py
File diff suppressed because it is too large
Load diff
|
@ -1,5 +1,6 @@
|
|||
# coding=utf-8
|
||||
|
||||
from main import get_filebot_attrs
|
||||
from __future__ import absolute_import
|
||||
from .main import get_filebot_attrs
|
||||
|
||||
__all__ = ["get_filebot_attrs"]
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
# coding=utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# coding=utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
|
@ -10,7 +12,7 @@ import types
|
|||
import os
|
||||
|
||||
from pipes import quote
|
||||
from lib import find_executable
|
||||
from .lib import find_executable
|
||||
|
||||
mswindows = False
|
||||
if sys.platform == "win32":
|
||||
|
@ -87,7 +89,7 @@ def get_filebot_attrs(fn):
|
|||
args_func, match_func = XATTR_MAP.get(sys.platform, XATTR_MAP["default"])
|
||||
|
||||
args = args_func(fn)
|
||||
if isinstance(args, types.ListType):
|
||||
if isinstance(args, list):
|
||||
try:
|
||||
env = dict(os.environ)
|
||||
if not mswindows:
|
||||
|
@ -132,4 +134,4 @@ def get_filebot_attrs(fn):
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print get_filebot_attrs(sys.argv[1])
|
||||
print(get_filebot_attrs(sys.argv[1]))
|
||||
|
|
|
@ -1,2 +1,4 @@
|
|||
|
||||
from pyprobe import VideoFileParser
|
||||
from pyprobe.pyprobe import VideoFileParser
|
||||
from pyprobe.helpers import timeToTuple, sizeStr
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
from os import path
|
||||
|
||||
from baseparser import BaseParser
|
||||
from pyprobe.baseparser import BaseParser
|
||||
|
||||
|
||||
class StreamParser(BaseParser):
|
||||
|
@ -44,7 +44,7 @@ class VideoStreamParser(BaseParser):
|
|||
"""Returns a tuple (width, height)"""
|
||||
width = data.get("width", None)
|
||||
height = data.get("height", None)
|
||||
if width is None and height is None:
|
||||
if width == None and height == None:
|
||||
return None, (0, 0)
|
||||
try:
|
||||
return (width, height), (int(float(width)), int(float(height)))
|
||||
|
@ -67,7 +67,7 @@ class VideoStreamParser(BaseParser):
|
|||
input_str = data.get("avg_frame_rate", None)
|
||||
try:
|
||||
num, den = input_str.split("/")
|
||||
return input_str, round(float(num) / float(den), 3)
|
||||
return input_str, float(num) / float(den)
|
||||
except (ValueError, ZeroDivisionError, AttributeError):
|
||||
info = cls.average_framerate(data)
|
||||
return input_str, info
|
||||
|
@ -125,15 +125,6 @@ class SubtitleStreamParser(BaseParser):
|
|||
return info, (info or "null")
|
||||
return None, "null"
|
||||
|
||||
@staticmethod
|
||||
def value_forced(data):
|
||||
"""Returns a bool """
|
||||
disposition = data.get("disposition", None)
|
||||
if disposition:
|
||||
info = disposition.get("forced", None)
|
||||
return bool(info), (bool(info) or False)
|
||||
return None, "null"
|
||||
|
||||
|
||||
class ChapterParser(BaseParser):
|
||||
@staticmethod
|
||||
|
@ -191,7 +182,7 @@ class RootParser(BaseParser):
|
|||
def value_size(data):
|
||||
"""Returns an int"""
|
||||
info = data.get("size", None)
|
||||
if info is None:
|
||||
if info == None:
|
||||
file_path = data.get("filename", "")
|
||||
if path.isfile(file_path):
|
||||
info = str(path.getsize(file_path))
|
||||
|
@ -204,7 +195,7 @@ class RootParser(BaseParser):
|
|||
def value_bit_rate(cls, data):
|
||||
"""Returns an int"""
|
||||
info = data.get("bit_rate", None)
|
||||
if info is None:
|
||||
if info == None:
|
||||
_, size = cls.value_size(data)
|
||||
_, duration = cls.value_duration(data)
|
||||
if size and duration:
|
||||
|
|
|
@ -1,25 +1,51 @@
|
|||
import json
|
||||
import subprocess
|
||||
import xml.etree
|
||||
import xml.etree.ElementTree
|
||||
from io import StringIO
|
||||
from os import path
|
||||
from sys import getfilesystemencoding
|
||||
import re
|
||||
|
||||
import ffprobeparsers
|
||||
from pyprobe import ffprobeparsers, mediainfoparsers
|
||||
|
||||
|
||||
class VideoFileParser:
|
||||
def __init__(
|
||||
self,
|
||||
ffprobe="ffprobe",
|
||||
mediainfo="mediainfo",
|
||||
includeMissing=True,
|
||||
rawMode=False,
|
||||
):
|
||||
self._ffprobe = ffprobe
|
||||
self._mediainfo = mediainfo
|
||||
self._includeMissing = includeMissing
|
||||
self._rawMode = rawMode
|
||||
|
||||
########################################
|
||||
# Main Method
|
||||
|
||||
def parseMediainfo(self, inputFile):
|
||||
"""Takes an input file and returns the parsed data using mediainfo.
|
||||
|
||||
Args:
|
||||
inputFile (str): Video file path
|
||||
|
||||
Returns:
|
||||
dict<str, dict<str, var>>: Parsed video info
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: The input video file or input executable was not found
|
||||
IOError: Execution failed
|
||||
|
||||
"""
|
||||
if not path.isfile(inputFile):
|
||||
raise FileNotFoundError(inputFile + " not found")
|
||||
self._checkExecutable(self._mediainfo)
|
||||
self._checkMediainfoVersion(self._mediainfo)
|
||||
xmlData = self._executeMediainfo(inputFile)
|
||||
return self._parseMediainfo(xmlData, inputFile)
|
||||
|
||||
def parseFfprobe(self, inputFile):
|
||||
"""Takes an input file and returns the parsed data using ffprobe.
|
||||
|
||||
|
@ -40,6 +66,122 @@ class VideoFileParser:
|
|||
fdict = self._executeFfprobe(inputFile)
|
||||
return self._parseFfprobe(fdict, inputFile)
|
||||
|
||||
########################################
|
||||
# Mediainfo Parsing
|
||||
|
||||
def _executeMediainfo(self, inputFile):
|
||||
"""Executes mediainfo program on input file to get raw info
|
||||
|
||||
Args:
|
||||
inputFile (str): Video file path
|
||||
|
||||
Returns:
|
||||
xml.ElementTree.etree: Mediainfo output
|
||||
|
||||
Raises:
|
||||
IOError: Mediainfo output could not be parsed as XML data
|
||||
|
||||
"""
|
||||
commandArgs = ["-f", "--Language=raw", "--Output=XML"]
|
||||
outputXml = self._executeParser(self._mediainfo, commandArgs, inputFile)
|
||||
try:
|
||||
xmlRoot = self._decodeMediainfoOutput(outputXml)
|
||||
except xml.etree.ElementTree.ParseError:
|
||||
raise IOError("Could not decode mediainfo output for file " + inputFile)
|
||||
return xmlRoot
|
||||
|
||||
def _parseMediainfo(self, xmlRoot, inputFile):
|
||||
"""Parse mediainfo output into an organized data structure
|
||||
|
||||
Args:
|
||||
xmlRoot (xml.ElementTree.etree): Mediainfo output
|
||||
inputFile (str): Video file path
|
||||
|
||||
Returns:
|
||||
dict<str, dict<str, var>>: Parsed video data
|
||||
|
||||
"""
|
||||
videoInfo = {}
|
||||
videoInfo["path"] = path.abspath(inputFile)
|
||||
videoInfo.update(
|
||||
mediainfoparsers.RootParser.parse(
|
||||
xmlRoot.find(".//track[@type='General']"),
|
||||
self._rawMode,
|
||||
self._includeMissing,
|
||||
)
|
||||
)
|
||||
videoInfo.update(self._parseMediainfoStreams(xmlRoot))
|
||||
videoInfo.update(self._parseMediainfoChapters(xmlRoot, videoInfo["duration"]))
|
||||
return videoInfo
|
||||
|
||||
@staticmethod
|
||||
def _decodeMediainfoOutput(xmlData):
|
||||
# Strip namespaces from xml string
|
||||
# Code used from https://stackoverflow.com/a/25920989
|
||||
it = xml.etree.ElementTree.iterparse(StringIO(xmlData))
|
||||
for _, el in it:
|
||||
if "}" in el.tag:
|
||||
el.tag = el.tag.split("}", 1)[1]
|
||||
return it.root
|
||||
|
||||
def _parseMediainfoStreams(self, xmlData):
|
||||
"""Parses video, audio, and subtitle streams
|
||||
|
||||
Args:
|
||||
xmlData (dict): Stream data from mediainfo
|
||||
|
||||
Returns:
|
||||
dict<str, dict<str, var>>: Parsed streams - video, audio, and subtitle
|
||||
|
||||
"""
|
||||
parsedInfo = {"videos": [], "audios": [], "subtitles": []}
|
||||
for stream in xmlData.findall(".//track"):
|
||||
streamType = stream.attrib["type"]
|
||||
if streamType == "Video":
|
||||
parsedInfo["videos"].append(
|
||||
mediainfoparsers.VideoStreamParser.parse(
|
||||
stream, self._rawMode, self._includeMissing
|
||||
)
|
||||
)
|
||||
elif streamType == "Audio":
|
||||
parsedInfo["audios"].append(
|
||||
mediainfoparsers.AudioStreamParser.parse(
|
||||
stream, self._rawMode, self._includeMissing
|
||||
)
|
||||
)
|
||||
elif streamType == "Text":
|
||||
parsedInfo["subtitles"].append(
|
||||
mediainfoparsers.SubtitleStreamParser.parse(
|
||||
stream, self._rawMode, self._includeMissing
|
||||
)
|
||||
)
|
||||
return parsedInfo
|
||||
|
||||
def _parseMediainfoChapters(self, xmlData, duration):
|
||||
"""Since mediainfo does not give end times for each chapter,
|
||||
start times for the following chapter are added to the previous chapter.
|
||||
|
||||
Args:
|
||||
xmlData (dict): Stream data from mediainfo
|
||||
duration (int): Video duration
|
||||
|
||||
Returns:
|
||||
dict<str, dict<str, var>>: Parsed chapters
|
||||
|
||||
"""
|
||||
parsedInfo = {"chapters": []}
|
||||
for extra in xmlData.find(".//track[@type='Menu']/extra"):
|
||||
match = re.fullmatch(r"_\d*_\d\d_\d\d_\d\d\d", extra.tag)
|
||||
if match:
|
||||
parsedInfo["chapters"].append(
|
||||
mediainfoparsers.ChapterParser.parse(
|
||||
extra, self._rawMode, self._includeMissing
|
||||
)
|
||||
)
|
||||
if not self._rawMode:
|
||||
mediainfoparsers.ChapterParser.addEndTimes(parsedInfo["chapters"], duration)
|
||||
return parsedInfo
|
||||
|
||||
########################################
|
||||
# ffprobe Parsing
|
||||
|
||||
|
@ -56,8 +198,6 @@ class VideoFileParser:
|
|||
|
||||
"""
|
||||
commandArgs = [
|
||||
"-v",
|
||||
"quiet",
|
||||
"-hide_banner",
|
||||
"-show_error",
|
||||
"-show_format",
|
||||
|
@ -148,7 +288,7 @@ class VideoFileParser:
|
|||
|
||||
"""
|
||||
parsedInfo = {"chapters": []}
|
||||
if fOutput["chapters"] is None:
|
||||
if fOutput["chapters"] == None:
|
||||
return parsedInfo
|
||||
for chapter in fOutput["chapters"]:
|
||||
parsedInfo["chapters"].append(
|
||||
|
@ -174,16 +314,15 @@ class VideoFileParser:
|
|||
IOError: ffprobe execution failed
|
||||
|
||||
"""
|
||||
command = [parser] + commandArgs + [inputFile.encode(getfilesystemencoding())]
|
||||
try:
|
||||
completedProcess = subprocess.check_output(
|
||||
command, stderr=subprocess.STDOUT
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
command = [parser] + commandArgs + [inputFile]
|
||||
completedProcess = subprocess.run(
|
||||
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8"
|
||||
)
|
||||
if completedProcess.returncode != 0:
|
||||
raise IOError(
|
||||
"Error occurred during execution - " + e.output
|
||||
"Error occurred during execution - " + completedProcess.stderr
|
||||
)
|
||||
return completedProcess
|
||||
return completedProcess.stdout
|
||||
|
||||
@staticmethod
|
||||
def _checkExecutable(executable):
|
||||
|
@ -197,17 +336,31 @@ class VideoFileParser:
|
|||
|
||||
"""
|
||||
try:
|
||||
subprocess.check_output(
|
||||
subprocess.run(
|
||||
[executable, "--help"],
|
||||
stderr=subprocess.STDOUT
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
except OSError:
|
||||
except FileNotFoundError:
|
||||
raise FileNotFoundError(executable + " not found")
|
||||
|
||||
|
||||
class FileNotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class IOError(Exception):
|
||||
pass
|
||||
@staticmethod
|
||||
def _checkMediainfoVersion(executable):
|
||||
"""Checks if the Mediainfo version is >=17.10
|
||||
In the version jump from 0.7.97 to 17.10 came lots of changes
|
||||
to the way Mediainfo outputs data. Therefore, this will
|
||||
only support versions >=17.10.
|
||||
|
||||
Some linux software repositories still distribute old
|
||||
versions of mediainfo, so the user must install
|
||||
using packages from mediainfo's website.
|
||||
|
||||
"""
|
||||
command = [executable, "--version"]
|
||||
completedProcess = subprocess.run(
|
||||
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8"
|
||||
)
|
||||
match = re.search(r"v\d*(\.\d*)*", completedProcess.stdout)
|
||||
version = match.group()[1:]
|
||||
if version.split(".")[0] == "0":
|
||||
raise IOError("Mediainfo version is <17.10 - (v" + version + ")")
|
||||
|
|
|
@ -57,10 +57,10 @@ def check_compatibility(urllib3_version, chardet_version):
|
|||
# Check urllib3 for compatibility.
|
||||
major, minor, patch = urllib3_version # noqa: F811
|
||||
major, minor, patch = int(major), int(minor), int(patch)
|
||||
# urllib3 >= 1.21.1, <= 1.24
|
||||
# urllib3 >= 1.21.1, <= 1.25
|
||||
assert major == 1
|
||||
assert minor >= 21
|
||||
assert minor <= 24
|
||||
assert minor <= 25
|
||||
|
||||
# Check chardet for compatibility.
|
||||
major, minor, patch = chardet_version.split('.')[:3]
|
||||
|
|
|
@ -5,10 +5,10 @@
|
|||
__title__ = 'requests'
|
||||
__description__ = 'Python HTTP for Humans.'
|
||||
__url__ = 'http://python-requests.org'
|
||||
__version__ = '2.21.0'
|
||||
__build__ = 0x022100
|
||||
__version__ = '2.22.0'
|
||||
__build__ = 0x022200
|
||||
__author__ = 'Kenneth Reitz'
|
||||
__author_email__ = 'me@kennethreitz.org'
|
||||
__license__ = 'Apache 2.0'
|
||||
__copyright__ = 'Copyright 2018 Kenneth Reitz'
|
||||
__copyright__ = 'Copyright 2019 Kenneth Reitz'
|
||||
__cake__ = u'\u2728 \U0001f370 \u2728'
|
||||
|
|
|
@ -19,7 +19,7 @@ def request(method, url, **kwargs):
|
|||
:param method: method for the new :class:`Request` object.
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param params: (optional) Dictionary, list of tuples or bytes to send
|
||||
in the body of the :class:`Request`.
|
||||
in the query string for the :class:`Request`.
|
||||
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
||||
object to send in the body of the :class:`Request`.
|
||||
:param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
|
||||
|
@ -65,7 +65,7 @@ def get(url, params=None, **kwargs):
|
|||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param params: (optional) Dictionary, list of tuples or bytes to send
|
||||
in the body of the :class:`Request`.
|
||||
in the query string for the :class:`Request`.
|
||||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||||
:return: :class:`Response <Response>` object
|
||||
:rtype: requests.Response
|
||||
|
|
|
@ -148,7 +148,7 @@ class CFSession(CloudScraper):
|
|||
cache_key = "cf_data3_%s" % domain
|
||||
|
||||
if not self.cookies.get("cf_clearance", "", domain=domain):
|
||||
cf_data = region.get(cache_key)
|
||||
cf_data = str(region.get(cache_key))
|
||||
if cf_data is not NO_VALUE:
|
||||
cf_cookies, hdrs = cf_data
|
||||
logger.debug("Trying to use old cf data for %s: %s", domain, cf_data)
|
||||
|
@ -165,9 +165,9 @@ class CFSession(CloudScraper):
|
|||
pass
|
||||
else:
|
||||
if cf_data and "cf_clearance" in cf_data[0] and cf_data[0]["cf_clearance"]:
|
||||
if cf_data != region.get(cache_key):
|
||||
if cf_data != str(region.get(cache_key)):
|
||||
logger.debug("Storing cf data for %s: %s", domain, cf_data)
|
||||
region.set(cache_key, cf_data)
|
||||
region.set(cache_key, bytes(cf_data)
|
||||
elif cf_data[0]["cf_clearance"]:
|
||||
logger.debug("CF Live tokens not updated")
|
||||
|
||||
|
|
|
@ -243,7 +243,7 @@ class DBCPitcher(DBCProxyLessPitcher):
|
|||
|
||||
|
||||
def load_verification(site_name, session, callback=lambda x: None):
|
||||
ccks = region.get("%s_data" % site_name, expiration_time=15552000) # 6m
|
||||
ccks = str(region.get("%s_data" % site_name, expiration_time=15552000)) # 6m
|
||||
if ccks != NO_VALUE:
|
||||
cookies, user_agent = ccks
|
||||
logger.debug("%s: Re-using previous user agent: %s", site_name.capitalize(), user_agent)
|
||||
|
@ -257,4 +257,4 @@ def load_verification(site_name, session, callback=lambda x: None):
|
|||
|
||||
|
||||
def store_verification(site_name, session):
|
||||
region.set("%s_data" % site_name, (session.cookies._cookies, session.headers["User-Agent"]))
|
||||
region.set("%s_data" % site_name, bytes(session.cookies._cookies, session.headers["User-Agent"]))
|
||||
|
|
|
@ -22,7 +22,7 @@ class Provider(_Provider):
|
|||
|
||||
# register providers
|
||||
# fixme: this is bad
|
||||
for name in os.listdir(os.path.dirname(six.text_type(__file__, get_viable_encoding()))):
|
||||
for name in os.listdir(os.path.dirname(__file__)):
|
||||
if name in ("__init__.py", "mixins.py", "utils.py") or not name.endswith(".py"):
|
||||
continue
|
||||
|
||||
|
|
|
@ -140,7 +140,7 @@ class AssrtProvider(Provider):
|
|||
logger.debug('No subtitle found')
|
||||
|
||||
# parse the subtitles
|
||||
pattern = re.compile(ur'lang(?P<code>\w+)')
|
||||
pattern = re.compile(r'lang(?P<code>\w+)')
|
||||
subtitles = []
|
||||
for sub in result['sub']['subs']:
|
||||
if 'lang' not in sub:
|
||||
|
|
|
@ -199,7 +199,7 @@ class LegendasTVProvider(_LegendasTVProvider):
|
|||
|
||||
# attempt to get the releases from the cache
|
||||
cache_key = releases_key.format(archive_id=a.id, archive_name=a.name)
|
||||
releases = region.get(cache_key, expiration_time=expiration_time)
|
||||
releases = str(region.get(cache_key, expiration_time=expiration_time))
|
||||
|
||||
# the releases are not in cache or cache is expired
|
||||
if releases == NO_VALUE:
|
||||
|
@ -226,7 +226,7 @@ class LegendasTVProvider(_LegendasTVProvider):
|
|||
releases.append(name)
|
||||
|
||||
# cache the releases
|
||||
region.set(cache_key, releases)
|
||||
region.set(cache_key, bytes(releases)
|
||||
|
||||
# iterate over releases
|
||||
for r in releases:
|
||||
|
|
|
@ -154,7 +154,7 @@ class OpenSubtitlesProvider(ProviderRetryMixin, _OpenSubtitlesProvider):
|
|||
self.token = response['token']
|
||||
logger.debug('Logged in with token %r', self.token[:10]+"X"*(len(self.token)-10))
|
||||
|
||||
region.set("os_token", self.token)
|
||||
region.set("os_token", bytes(self.token))
|
||||
|
||||
def use_token_or_login(self, func):
|
||||
if not self.token:
|
||||
|
@ -175,7 +175,7 @@ class OpenSubtitlesProvider(ProviderRetryMixin, _OpenSubtitlesProvider):
|
|||
|
||||
logger.info('Logging in')
|
||||
|
||||
token = region.get("os_token")
|
||||
token = str(region.get("os_token"))
|
||||
if token is not NO_VALUE:
|
||||
try:
|
||||
logger.debug('Trying previous token: %r', token[:10]+"X"*(len(token)-10))
|
||||
|
|
|
@ -141,7 +141,7 @@ class SubsceneProvider(Provider, ProviderSubtitleArchiveMixin):
|
|||
logger.info("Creating session")
|
||||
self.session = RetryingCFSession()
|
||||
|
||||
prev_cookies = region.get("subscene_cookies2")
|
||||
prev_cookies = str(region.get("subscene_cookies2"))
|
||||
if prev_cookies != NO_VALUE:
|
||||
logger.debug("Re-using old subscene cookies: %r", prev_cookies)
|
||||
self.session.cookies.update(prev_cookies)
|
||||
|
@ -194,7 +194,7 @@ class SubsceneProvider(Provider, ProviderSubtitleArchiveMixin):
|
|||
del cj[cn]
|
||||
|
||||
logger.debug("Storing cookies: %r", cj)
|
||||
region.set("subscene_cookies2", cj)
|
||||
region.set("subscene_cookies2", bytes(cj)
|
||||
return
|
||||
raise ProviderError("Something went wrong when trying to log in #1")
|
||||
|
||||
|
@ -219,9 +219,9 @@ class SubsceneProvider(Provider, ProviderSubtitleArchiveMixin):
|
|||
acc_filters["SelectedIds"] = selected_ids
|
||||
self.filters["LanguageFilter"] = ",".join(acc_filters["SelectedIds"])
|
||||
|
||||
last_filters = region.get("subscene_filters")
|
||||
last_filters = str(region.get("subscene_filters"))
|
||||
if last_filters != acc_filters:
|
||||
region.set("subscene_filters", acc_filters)
|
||||
region.set("subscene_filters", bytes(acc_filters)
|
||||
logger.debug("Setting account filters to %r", acc_filters)
|
||||
self.session.post("https://u.subscene.com/filter", acc_filters, allow_redirects=False)
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ if is_PY2:
|
|||
from urllib2 import Request, urlopen
|
||||
else:
|
||||
from contextlib import suppress
|
||||
from urllib2.request import Request, urlopen
|
||||
from urllib.request import Request, urlopen
|
||||
|
||||
from dogpile.cache.api import NO_VALUE
|
||||
from subliminal.cache import region
|
||||
|
@ -56,7 +56,7 @@ DEFAULT_USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWeb"\
|
|||
"Kit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36"
|
||||
|
||||
|
||||
ENDPOINT_RE = re.compile(ur'(?uis)<form.+?action="/subtitles/(.+)">.*?<input type="text"')
|
||||
ENDPOINT_RE = re.compile(r'(?uis)<form.+?action="/subtitles/(.+)">.*?<input type="text"')
|
||||
|
||||
|
||||
class NewEndpoint(Exception):
|
||||
|
|
|
@ -82,6 +82,9 @@ class Language(Language_):
|
|||
def __setstate__(self, state):
|
||||
self.alpha3, self.country, self.script, self.forced = state
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self))
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, basestr):
|
||||
return str(self) == other
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
"""
|
||||
urllib3 - Thread-safe connection pooling and re-using.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
import warnings
|
||||
|
||||
|
@ -27,7 +26,7 @@ from logging import NullHandler
|
|||
|
||||
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
|
||||
__license__ = 'MIT'
|
||||
__version__ = '1.24.2'
|
||||
__version__ = '1.25.3'
|
||||
|
||||
__all__ = (
|
||||
'HTTPConnectionPool',
|
||||
|
|
|
@ -19,10 +19,11 @@ except (ImportError, AttributeError): # Platform-specific: No SSL.
|
|||
pass
|
||||
|
||||
|
||||
try: # Python 3:
|
||||
# Not a no-op, we're adding this to the namespace so it can be imported.
|
||||
try:
|
||||
# Python 3: not a no-op, we're adding this to the namespace so it can be imported.
|
||||
ConnectionError = ConnectionError
|
||||
except NameError: # Python 2:
|
||||
except NameError:
|
||||
# Python 2
|
||||
class ConnectionError(Exception):
|
||||
pass
|
||||
|
||||
|
@ -101,7 +102,7 @@ class HTTPConnection(_HTTPConnection, object):
|
|||
is_verified = False
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
if six.PY3: # Python 3
|
||||
if six.PY3:
|
||||
kw.pop('strict', None)
|
||||
|
||||
# Pre-set source_address.
|
||||
|
@ -158,7 +159,7 @@ class HTTPConnection(_HTTPConnection, object):
|
|||
conn = connection.create_connection(
|
||||
(self._dns_host, self.port), self.timeout, **extra_kw)
|
||||
|
||||
except SocketTimeout as e:
|
||||
except SocketTimeout:
|
||||
raise ConnectTimeoutError(
|
||||
self, "Connection to %s timed out. (connect timeout=%s)" %
|
||||
(self.host, self.timeout))
|
||||
|
@ -171,7 +172,8 @@ class HTTPConnection(_HTTPConnection, object):
|
|||
|
||||
def _prepare_conn(self, conn):
|
||||
self.sock = conn
|
||||
if self._tunnel_host:
|
||||
# Google App Engine's httplib does not define _tunnel_host
|
||||
if getattr(self, '_tunnel_host', None):
|
||||
# TODO: Fix tunnel so it doesn't depend on self.sock state.
|
||||
self._tunnel()
|
||||
# Mark this connection as not reusable
|
||||
|
@ -226,7 +228,8 @@ class HTTPSConnection(HTTPConnection):
|
|||
ssl_version = None
|
||||
|
||||
def __init__(self, host, port=None, key_file=None, cert_file=None,
|
||||
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
|
||||
key_password=None, strict=None,
|
||||
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
|
||||
ssl_context=None, server_hostname=None, **kw):
|
||||
|
||||
HTTPConnection.__init__(self, host, port, strict=strict,
|
||||
|
@ -234,6 +237,7 @@ class HTTPSConnection(HTTPConnection):
|
|||
|
||||
self.key_file = key_file
|
||||
self.cert_file = cert_file
|
||||
self.key_password = key_password
|
||||
self.ssl_context = ssl_context
|
||||
self.server_hostname = server_hostname
|
||||
|
||||
|
@ -245,16 +249,28 @@ class HTTPSConnection(HTTPConnection):
|
|||
conn = self._new_conn()
|
||||
self._prepare_conn(conn)
|
||||
|
||||
# Wrap socket using verification with the root certs in
|
||||
# trusted_root_certs
|
||||
default_ssl_context = False
|
||||
if self.ssl_context is None:
|
||||
default_ssl_context = True
|
||||
self.ssl_context = create_urllib3_context(
|
||||
ssl_version=resolve_ssl_version(None),
|
||||
cert_reqs=resolve_cert_reqs(None),
|
||||
ssl_version=resolve_ssl_version(self.ssl_version),
|
||||
cert_reqs=resolve_cert_reqs(self.cert_reqs),
|
||||
)
|
||||
|
||||
# Try to load OS default certs if none are given.
|
||||
# Works well on Windows (requires Python3.4+)
|
||||
context = self.ssl_context
|
||||
if (not self.ca_certs and not self.ca_cert_dir and default_ssl_context
|
||||
and hasattr(context, 'load_default_certs')):
|
||||
context.load_default_certs()
|
||||
|
||||
self.sock = ssl_wrap_socket(
|
||||
sock=conn,
|
||||
keyfile=self.key_file,
|
||||
certfile=self.cert_file,
|
||||
key_password=self.key_password,
|
||||
ssl_context=self.ssl_context,
|
||||
server_hostname=self.server_hostname
|
||||
)
|
||||
|
@ -272,25 +288,24 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
|||
assert_fingerprint = None
|
||||
|
||||
def set_cert(self, key_file=None, cert_file=None,
|
||||
cert_reqs=None, ca_certs=None,
|
||||
cert_reqs=None, key_password=None, ca_certs=None,
|
||||
assert_hostname=None, assert_fingerprint=None,
|
||||
ca_cert_dir=None):
|
||||
"""
|
||||
This method should only be called once, before the connection is used.
|
||||
"""
|
||||
# If cert_reqs is not provided, we can try to guess. If the user gave
|
||||
# us a cert database, we assume they want to use it: otherwise, if
|
||||
# they gave us an SSL Context object we should use whatever is set for
|
||||
# it.
|
||||
# If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also
|
||||
# have an SSLContext object in which case we'll use its verify_mode.
|
||||
if cert_reqs is None:
|
||||
if ca_certs or ca_cert_dir:
|
||||
cert_reqs = 'CERT_REQUIRED'
|
||||
elif self.ssl_context is not None:
|
||||
if self.ssl_context is not None:
|
||||
cert_reqs = self.ssl_context.verify_mode
|
||||
else:
|
||||
cert_reqs = resolve_cert_reqs(None)
|
||||
|
||||
self.key_file = key_file
|
||||
self.cert_file = cert_file
|
||||
self.cert_reqs = cert_reqs
|
||||
self.key_password = key_password
|
||||
self.assert_hostname = assert_hostname
|
||||
self.assert_fingerprint = assert_fingerprint
|
||||
self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
|
||||
|
@ -301,7 +316,8 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
|||
conn = self._new_conn()
|
||||
hostname = self.host
|
||||
|
||||
if self._tunnel_host:
|
||||
# Google App Engine's httplib does not define _tunnel_host
|
||||
if getattr(self, '_tunnel_host', None):
|
||||
self.sock = conn
|
||||
# Calls self._set_hostport(), so self.host is
|
||||
# self._tunnel_host below.
|
||||
|
@ -326,7 +342,9 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
|||
|
||||
# Wrap socket using verification with the root certs in
|
||||
# trusted_root_certs
|
||||
default_ssl_context = False
|
||||
if self.ssl_context is None:
|
||||
default_ssl_context = True
|
||||
self.ssl_context = create_urllib3_context(
|
||||
ssl_version=resolve_ssl_version(self.ssl_version),
|
||||
cert_reqs=resolve_cert_reqs(self.cert_reqs),
|
||||
|
@ -334,10 +352,18 @@ class VerifiedHTTPSConnection(HTTPSConnection):
|
|||
|
||||
context = self.ssl_context
|
||||
context.verify_mode = resolve_cert_reqs(self.cert_reqs)
|
||||
|
||||
# Try to load OS default certs if none are given.
|
||||
# Works well on Windows (requires Python3.4+)
|
||||
if (not self.ca_certs and not self.ca_cert_dir and default_ssl_context
|
||||
and hasattr(context, 'load_default_certs')):
|
||||
context.load_default_certs()
|
||||
|
||||
self.sock = ssl_wrap_socket(
|
||||
sock=conn,
|
||||
keyfile=self.key_file,
|
||||
certfile=self.cert_file,
|
||||
key_password=self.key_password,
|
||||
ca_certs=self.ca_certs,
|
||||
ca_cert_dir=self.ca_cert_dir,
|
||||
server_hostname=server_hostname,
|
||||
|
|
|
@ -26,6 +26,7 @@ from .exceptions import (
|
|||
from .packages.ssl_match_hostname import CertificateError
|
||||
from .packages import six
|
||||
from .packages.six.moves import queue
|
||||
from .packages.rfc3986.normalizers import normalize_host
|
||||
from .connection import (
|
||||
port_by_scheme,
|
||||
DummyConnection,
|
||||
|
@ -65,7 +66,7 @@ class ConnectionPool(object):
|
|||
if not host:
|
||||
raise LocationValueError("No host specified.")
|
||||
|
||||
self.host = _ipv6_host(host, self.scheme)
|
||||
self.host = _normalize_host(host, scheme=self.scheme)
|
||||
self._proxy_host = host.lower()
|
||||
self.port = port
|
||||
|
||||
|
@ -373,9 +374,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
|||
|
||||
# Receive the response from the server
|
||||
try:
|
||||
try: # Python 2.7, use buffering of HTTP responses
|
||||
try:
|
||||
# Python 2.7, use buffering of HTTP responses
|
||||
httplib_response = conn.getresponse(buffering=True)
|
||||
except TypeError: # Python 3
|
||||
except TypeError:
|
||||
# Python 3
|
||||
try:
|
||||
httplib_response = conn.getresponse()
|
||||
except Exception as e:
|
||||
|
@ -432,8 +435,8 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
|||
|
||||
# TODO: Add optional support for socket.gethostbyname checking.
|
||||
scheme, host, port = get_host(url)
|
||||
|
||||
host = _ipv6_host(host, self.scheme)
|
||||
if host is not None:
|
||||
host = _normalize_host(host, scheme=scheme)
|
||||
|
||||
# Use explicit default port for comparison when none is given
|
||||
if self.port and not port:
|
||||
|
@ -672,7 +675,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
|||
# released back to the pool once the entire response is read
|
||||
response.read()
|
||||
except (TimeoutError, HTTPException, SocketError, ProtocolError,
|
||||
BaseSSLError, SSLError) as e:
|
||||
BaseSSLError, SSLError):
|
||||
pass
|
||||
|
||||
# Handle redirect?
|
||||
|
@ -746,8 +749,8 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
|||
If ``assert_hostname`` is False, no verification is done.
|
||||
|
||||
The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
|
||||
``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
|
||||
available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
|
||||
``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
|
||||
is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
|
||||
the connection socket into an SSL socket.
|
||||
"""
|
||||
|
||||
|
@ -759,7 +762,7 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
|||
block=False, headers=None, retries=None,
|
||||
_proxy=None, _proxy_headers=None,
|
||||
key_file=None, cert_file=None, cert_reqs=None,
|
||||
ca_certs=None, ssl_version=None,
|
||||
key_password=None, ca_certs=None, ssl_version=None,
|
||||
assert_hostname=None, assert_fingerprint=None,
|
||||
ca_cert_dir=None, **conn_kw):
|
||||
|
||||
|
@ -767,12 +770,10 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
|||
block, headers, retries, _proxy, _proxy_headers,
|
||||
**conn_kw)
|
||||
|
||||
if ca_certs and cert_reqs is None:
|
||||
cert_reqs = 'CERT_REQUIRED'
|
||||
|
||||
self.key_file = key_file
|
||||
self.cert_file = cert_file
|
||||
self.cert_reqs = cert_reqs
|
||||
self.key_password = key_password
|
||||
self.ca_certs = ca_certs
|
||||
self.ca_cert_dir = ca_cert_dir
|
||||
self.ssl_version = ssl_version
|
||||
|
@ -787,6 +788,7 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
|||
|
||||
if isinstance(conn, VerifiedHTTPSConnection):
|
||||
conn.set_cert(key_file=self.key_file,
|
||||
key_password=self.key_password,
|
||||
cert_file=self.cert_file,
|
||||
cert_reqs=self.cert_reqs,
|
||||
ca_certs=self.ca_certs,
|
||||
|
@ -824,7 +826,9 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
|||
|
||||
conn = self.ConnectionCls(host=actual_host, port=actual_port,
|
||||
timeout=self.timeout.connect_timeout,
|
||||
strict=self.strict, **self.conn_kw)
|
||||
strict=self.strict, cert_file=self.cert_file,
|
||||
key_file=self.key_file, key_password=self.key_password,
|
||||
**self.conn_kw)
|
||||
|
||||
return self._prepare_conn(conn)
|
||||
|
||||
|
@ -875,9 +879,9 @@ def connection_from_url(url, **kw):
|
|||
return HTTPConnectionPool(host, port=port, **kw)
|
||||
|
||||
|
||||
def _ipv6_host(host, scheme):
|
||||
def _normalize_host(host, scheme):
|
||||
"""
|
||||
Process IPv6 address literals
|
||||
Normalize hosts for comparisons and use with sockets.
|
||||
"""
|
||||
|
||||
# httplib doesn't like it when we include brackets in IPv6 addresses
|
||||
|
@ -886,11 +890,8 @@ def _ipv6_host(host, scheme):
|
|||
# Instead, we need to make sure we never pass ``None`` as the port.
|
||||
# However, for backward compatibility reasons we can't actually
|
||||
# *assert* that. See http://bugs.python.org/issue28539
|
||||
#
|
||||
# Also if an IPv6 address literal has a zone identifier, the
|
||||
# percent sign might be URIencoded, convert it back into ASCII
|
||||
if host.startswith('[') and host.endswith(']'):
|
||||
host = host.replace('%25', '%').strip('[]')
|
||||
host = host.strip('[]')
|
||||
if scheme in NORMALIZABLE_SCHEMES:
|
||||
host = host.lower()
|
||||
host = normalize_host(host)
|
||||
return host
|
||||
|
|
|
@ -516,6 +516,8 @@ class SecurityConst(object):
|
|||
kTLSProtocol1 = 4
|
||||
kTLSProtocol11 = 7
|
||||
kTLSProtocol12 = 8
|
||||
kTLSProtocol13 = 10
|
||||
kTLSProtocolMaxSupported = 999
|
||||
|
||||
kSSLClientSide = 1
|
||||
kSSLStreamType = 0
|
||||
|
@ -558,30 +560,27 @@ class SecurityConst(object):
|
|||
errSecInvalidTrustSettings = -25262
|
||||
|
||||
# Cipher suites. We only pick the ones our default cipher string allows.
|
||||
# Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values
|
||||
TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C
|
||||
TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030
|
||||
TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B
|
||||
TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F
|
||||
TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 = 0x00A3
|
||||
TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9
|
||||
TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8
|
||||
TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F
|
||||
TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 = 0x00A2
|
||||
TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E
|
||||
TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024
|
||||
TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028
|
||||
TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A
|
||||
TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014
|
||||
TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B
|
||||
TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 = 0x006A
|
||||
TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039
|
||||
TLS_DHE_DSS_WITH_AES_256_CBC_SHA = 0x0038
|
||||
TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023
|
||||
TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027
|
||||
TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009
|
||||
TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013
|
||||
TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067
|
||||
TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 = 0x0040
|
||||
TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033
|
||||
TLS_DHE_DSS_WITH_AES_128_CBC_SHA = 0x0032
|
||||
TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D
|
||||
TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C
|
||||
TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D
|
||||
|
@ -590,4 +589,5 @@ class SecurityConst(object):
|
|||
TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F
|
||||
TLS_AES_128_GCM_SHA256 = 0x1301
|
||||
TLS_AES_256_GCM_SHA384 = 0x1302
|
||||
TLS_CHACHA20_POLY1305_SHA256 = 0x1303
|
||||
TLS_AES_128_CCM_8_SHA256 = 0x1305
|
||||
TLS_AES_128_CCM_SHA256 = 0x1304
|
||||
|
|
|
@ -70,6 +70,7 @@ import sys
|
|||
|
||||
from .. import util
|
||||
|
||||
|
||||
__all__ = ['inject_into_urllib3', 'extract_from_urllib3']
|
||||
|
||||
# SNI always works.
|
||||
|
@ -77,20 +78,19 @@ HAS_SNI = True
|
|||
|
||||
# Map from urllib3 to PyOpenSSL compatible parameter-values.
|
||||
_openssl_versions = {
|
||||
ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD,
|
||||
util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
|
||||
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
|
||||
}
|
||||
|
||||
if hasattr(ssl, 'PROTOCOL_SSLv3') and hasattr(OpenSSL.SSL, 'SSLv3_METHOD'):
|
||||
_openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD
|
||||
|
||||
if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'):
|
||||
_openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
|
||||
|
||||
if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'):
|
||||
_openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
|
||||
|
||||
try:
|
||||
_openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD})
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
_stdlib_to_openssl_verify = {
|
||||
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
|
||||
|
@ -117,6 +117,7 @@ def inject_into_urllib3():
|
|||
|
||||
_validate_dependencies_met()
|
||||
|
||||
util.SSLContext = PyOpenSSLContext
|
||||
util.ssl_.SSLContext = PyOpenSSLContext
|
||||
util.HAS_SNI = HAS_SNI
|
||||
util.ssl_.HAS_SNI = HAS_SNI
|
||||
|
@ -127,6 +128,7 @@ def inject_into_urllib3():
|
|||
def extract_from_urllib3():
|
||||
'Undo monkey-patching by :func:`inject_into_urllib3`.'
|
||||
|
||||
util.SSLContext = orig_util_SSLContext
|
||||
util.ssl_.SSLContext = orig_util_SSLContext
|
||||
util.HAS_SNI = orig_util_HAS_SNI
|
||||
util.ssl_.HAS_SNI = orig_util_HAS_SNI
|
||||
|
@ -184,6 +186,7 @@ def _dnsname_to_stdlib(name):
|
|||
except idna.core.IDNAError:
|
||||
return None
|
||||
|
||||
# Don't send IPv6 addresses through the IDNA encoder.
|
||||
if ':' in name:
|
||||
return name
|
||||
|
||||
|
@ -279,7 +282,7 @@ class WrappedSocket(object):
|
|||
return b''
|
||||
else:
|
||||
raise SocketError(str(e))
|
||||
except OpenSSL.SSL.ZeroReturnError as e:
|
||||
except OpenSSL.SSL.ZeroReturnError:
|
||||
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
|
||||
return b''
|
||||
else:
|
||||
|
@ -289,6 +292,10 @@ class WrappedSocket(object):
|
|||
raise timeout('The read operation timed out')
|
||||
else:
|
||||
return self.recv(*args, **kwargs)
|
||||
|
||||
# TLS 1.3 post-handshake authentication
|
||||
except OpenSSL.SSL.Error as e:
|
||||
raise ssl.SSLError("read error: %r" % e)
|
||||
else:
|
||||
return data
|
||||
|
||||
|
@ -300,7 +307,7 @@ class WrappedSocket(object):
|
|||
return 0
|
||||
else:
|
||||
raise SocketError(str(e))
|
||||
except OpenSSL.SSL.ZeroReturnError as e:
|
||||
except OpenSSL.SSL.ZeroReturnError:
|
||||
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
|
||||
return 0
|
||||
else:
|
||||
|
@ -311,6 +318,10 @@ class WrappedSocket(object):
|
|||
else:
|
||||
return self.recv_into(*args, **kwargs)
|
||||
|
||||
# TLS 1.3 post-handshake authentication
|
||||
except OpenSSL.SSL.Error as e:
|
||||
raise ssl.SSLError("read error: %r" % e)
|
||||
|
||||
def settimeout(self, timeout):
|
||||
return self.socket.settimeout(timeout)
|
||||
|
||||
|
@ -363,6 +374,9 @@ class WrappedSocket(object):
|
|||
'subjectAltName': get_subj_alt_name(x509)
|
||||
}
|
||||
|
||||
def version(self):
|
||||
return self.connection.get_protocol_version_name()
|
||||
|
||||
def _reuse(self):
|
||||
self._makefile_refs += 1
|
||||
|
||||
|
@ -435,7 +449,9 @@ class PyOpenSSLContext(object):
|
|||
def load_cert_chain(self, certfile, keyfile=None, password=None):
|
||||
self._ctx.use_certificate_chain_file(certfile)
|
||||
if password is not None:
|
||||
self._ctx.set_passwd_cb(lambda max_length, prompt_twice, userdata: password)
|
||||
if not isinstance(password, six.binary_type):
|
||||
password = password.encode('utf-8')
|
||||
self._ctx.set_passwd_cb(lambda *_: password)
|
||||
self._ctx.use_privatekey_file(keyfile or certfile)
|
||||
|
||||
def wrap_socket(self, sock, server_side=False,
|
||||
|
|
|
@ -23,6 +23,31 @@ To use this module, simply import and inject it::
|
|||
urllib3.contrib.securetransport.inject_into_urllib3()
|
||||
|
||||
Happy TLSing!
|
||||
|
||||
This code is a bastardised version of the code found in Will Bond's oscrypto
|
||||
library. An enormous debt is owed to him for blazing this trail for us. For
|
||||
that reason, this code should be considered to be covered both by urllib3's
|
||||
license and by oscrypto's:
|
||||
|
||||
Copyright (c) 2015-2016 Will Bond <will@wbond.net>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the "Software"),
|
||||
to deal in the Software without restriction, including without limitation
|
||||
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
|
@ -86,35 +111,32 @@ SSL_WRITE_BLOCKSIZE = 16384
|
|||
# individual cipher suites. We need to do this because this is how
|
||||
# SecureTransport wants them.
|
||||
CIPHER_SUITES = [
|
||||
SecurityConst.TLS_AES_256_GCM_SHA384,
|
||||
SecurityConst.TLS_CHACHA20_POLY1305_SHA256,
|
||||
SecurityConst.TLS_AES_128_GCM_SHA256,
|
||||
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
|
||||
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
|
||||
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
|
||||
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
|
||||
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
|
||||
SecurityConst.TLS_DHE_DSS_WITH_AES_256_GCM_SHA384,
|
||||
SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
|
||||
SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
|
||||
SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
|
||||
SecurityConst.TLS_DHE_DSS_WITH_AES_128_GCM_SHA256,
|
||||
SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
|
||||
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
|
||||
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
|
||||
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
|
||||
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
|
||||
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
|
||||
SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA256,
|
||||
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
|
||||
SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA,
|
||||
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
|
||||
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
|
||||
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
|
||||
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
|
||||
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
|
||||
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
|
||||
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
|
||||
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
|
||||
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
|
||||
SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
|
||||
SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA256,
|
||||
SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
|
||||
SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA,
|
||||
SecurityConst.TLS_AES_256_GCM_SHA384,
|
||||
SecurityConst.TLS_AES_128_GCM_SHA256,
|
||||
SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384,
|
||||
SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256,
|
||||
SecurityConst.TLS_AES_128_CCM_8_SHA256,
|
||||
SecurityConst.TLS_AES_128_CCM_SHA256,
|
||||
SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256,
|
||||
SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256,
|
||||
SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
|
||||
|
@ -122,9 +144,10 @@ CIPHER_SUITES = [
|
|||
]
|
||||
|
||||
# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
|
||||
# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
|
||||
# TLSv1 and a high of TLSv1.3. For everything else, we pin to that version.
|
||||
# TLSv1 to 1.2 are supported on macOS 10.8+ and TLSv1.3 is macOS 10.13+
|
||||
_protocol_to_min_max = {
|
||||
ssl.PROTOCOL_SSLv23: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
|
||||
util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocolMaxSupported),
|
||||
}
|
||||
|
||||
if hasattr(ssl, "PROTOCOL_SSLv2"):
|
||||
|
@ -147,14 +170,13 @@ if hasattr(ssl, "PROTOCOL_TLSv1_2"):
|
|||
_protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = (
|
||||
SecurityConst.kTLSProtocol12, SecurityConst.kTLSProtocol12
|
||||
)
|
||||
if hasattr(ssl, "PROTOCOL_TLS"):
|
||||
_protocol_to_min_max[ssl.PROTOCOL_TLS] = _protocol_to_min_max[ssl.PROTOCOL_SSLv23]
|
||||
|
||||
|
||||
def inject_into_urllib3():
|
||||
"""
|
||||
Monkey-patch urllib3 with SecureTransport-backed SSL-support.
|
||||
"""
|
||||
util.SSLContext = SecureTransportContext
|
||||
util.ssl_.SSLContext = SecureTransportContext
|
||||
util.HAS_SNI = HAS_SNI
|
||||
util.ssl_.HAS_SNI = HAS_SNI
|
||||
|
@ -166,6 +188,7 @@ def extract_from_urllib3():
|
|||
"""
|
||||
Undo monkey-patching by :func:`inject_into_urllib3`.
|
||||
"""
|
||||
util.SSLContext = orig_util_SSLContext
|
||||
util.ssl_.SSLContext = orig_util_SSLContext
|
||||
util.HAS_SNI = orig_util_HAS_SNI
|
||||
util.ssl_.HAS_SNI = orig_util_HAS_SNI
|
||||
|
@ -458,7 +481,14 @@ class WrappedSocket(object):
|
|||
# Set the minimum and maximum TLS versions.
|
||||
result = Security.SSLSetProtocolVersionMin(self.context, min_version)
|
||||
_assert_no_error(result)
|
||||
|
||||
# TLS 1.3 isn't necessarily enabled by the OS
|
||||
# so we have to detect when we error out and try
|
||||
# setting TLS 1.3 if it's allowed. kTLSProtocolMaxSupported
|
||||
# was added in macOS 10.13 along with kTLSProtocol13.
|
||||
result = Security.SSLSetProtocolVersionMax(self.context, max_version)
|
||||
if result != 0 and max_version == SecurityConst.kTLSProtocolMaxSupported:
|
||||
result = Security.SSLSetProtocolVersionMax(self.context, SecurityConst.kTLSProtocol12)
|
||||
_assert_no_error(result)
|
||||
|
||||
# If there's a trust DB, we need to use it. We do that by telling
|
||||
|
@ -667,6 +697,25 @@ class WrappedSocket(object):
|
|||
|
||||
return der_bytes
|
||||
|
||||
def version(self):
|
||||
protocol = Security.SSLProtocol()
|
||||
result = Security.SSLGetNegotiatedProtocolVersion(self.context, ctypes.byref(protocol))
|
||||
_assert_no_error(result)
|
||||
if protocol.value == SecurityConst.kTLSProtocol13:
|
||||
return 'TLSv1.3'
|
||||
elif protocol.value == SecurityConst.kTLSProtocol12:
|
||||
return 'TLSv1.2'
|
||||
elif protocol.value == SecurityConst.kTLSProtocol11:
|
||||
return 'TLSv1.1'
|
||||
elif protocol.value == SecurityConst.kTLSProtocol1:
|
||||
return 'TLSv1'
|
||||
elif protocol.value == SecurityConst.kSSLProtocol3:
|
||||
return 'SSLv3'
|
||||
elif protocol.value == SecurityConst.kSSLProtocol2:
|
||||
return 'SSLv2'
|
||||
else:
|
||||
raise ssl.SSLError('Unknown TLS version: %r' % protocol)
|
||||
|
||||
def _reuse(self):
|
||||
self._makefile_refs += 1
|
||||
|
||||
|
|
|
@ -1,25 +1,38 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
This module contains provisional support for SOCKS proxies from within
|
||||
urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and
|
||||
urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and
|
||||
SOCKS5. To enable its functionality, either install PySocks or install this
|
||||
module with the ``socks`` extra.
|
||||
|
||||
The SOCKS implementation supports the full range of urllib3 features. It also
|
||||
supports the following SOCKS features:
|
||||
|
||||
- SOCKS4
|
||||
- SOCKS4a
|
||||
- SOCKS5
|
||||
- SOCKS4A (``proxy_url='socks4a://...``)
|
||||
- SOCKS4 (``proxy_url='socks4://...``)
|
||||
- SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
|
||||
- SOCKS5 with local DNS (``proxy_url='socks5://...``)
|
||||
- Usernames and passwords for the SOCKS proxy
|
||||
|
||||
Known Limitations:
|
||||
.. note::
|
||||
It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
|
||||
your ``proxy_url`` to ensure that DNS resolution is done from the remote
|
||||
server instead of client-side when connecting to a domain name.
|
||||
|
||||
SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
|
||||
supports IPv4, IPv6, and domain names.
|
||||
|
||||
When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
|
||||
will be sent as the ``userid`` section of the SOCKS request::
|
||||
|
||||
proxy_url="socks4a://<userid>@proxy-host"
|
||||
|
||||
When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
|
||||
of the ``proxy_url`` will be sent as the username/password to authenticate
|
||||
with the proxy::
|
||||
|
||||
proxy_url="socks5h://<username>:<password>@proxy-host"
|
||||
|
||||
- Currently PySocks does not support contacting remote websites via literal
|
||||
IPv6 addresses. Any such connection attempt will fail. You must use a domain
|
||||
name.
|
||||
- Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any
|
||||
such connection attempt will fail.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
|
@ -88,7 +101,7 @@ class SOCKSConnection(HTTPConnection):
|
|||
**extra_kw
|
||||
)
|
||||
|
||||
except SocketTimeout as e:
|
||||
except SocketTimeout:
|
||||
raise ConnectTimeoutError(
|
||||
self, "Connection to %s timed out. (connect timeout=%s)" %
|
||||
(self.host, self.timeout))
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from __future__ import absolute_import
|
||||
import email.utils
|
||||
import mimetypes
|
||||
import re
|
||||
|
||||
from .packages import six
|
||||
|
||||
|
@ -19,57 +20,147 @@ def guess_content_type(filename, default='application/octet-stream'):
|
|||
return default
|
||||
|
||||
|
||||
def format_header_param(name, value):
|
||||
def format_header_param_rfc2231(name, value):
|
||||
"""
|
||||
Helper function to format and quote a single header parameter.
|
||||
Helper function to format and quote a single header parameter using the
|
||||
strategy defined in RFC 2231.
|
||||
|
||||
Particularly useful for header parameters which might contain
|
||||
non-ASCII values, like file names. This follows RFC 2231, as
|
||||
suggested by RFC 2388 Section 4.4.
|
||||
non-ASCII values, like file names. This follows RFC 2388 Section 4.4.
|
||||
|
||||
:param name:
|
||||
The name of the parameter, a string expected to be ASCII only.
|
||||
:param value:
|
||||
The value of the parameter, provided as a unicode string.
|
||||
The value of the parameter, provided as ``bytes`` or `str``.
|
||||
:ret:
|
||||
An RFC-2231-formatted unicode string.
|
||||
"""
|
||||
if isinstance(value, six.binary_type):
|
||||
value = value.decode("utf-8")
|
||||
|
||||
if not any(ch in value for ch in '"\\\r\n'):
|
||||
result = '%s="%s"' % (name, value)
|
||||
result = u'%s="%s"' % (name, value)
|
||||
try:
|
||||
result.encode('ascii')
|
||||
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||
pass
|
||||
else:
|
||||
return result
|
||||
if not six.PY3 and isinstance(value, six.text_type): # Python 2:
|
||||
|
||||
if not six.PY3: # Python 2:
|
||||
value = value.encode('utf-8')
|
||||
|
||||
# encode_rfc2231 accepts an encoded string and returns an ascii-encoded
|
||||
# string in Python 2 but accepts and returns unicode strings in Python 3
|
||||
value = email.utils.encode_rfc2231(value, 'utf-8')
|
||||
value = '%s*=%s' % (name, value)
|
||||
|
||||
if not six.PY3: # Python 2:
|
||||
value = value.decode('utf-8')
|
||||
|
||||
return value
|
||||
|
||||
|
||||
_HTML5_REPLACEMENTS = {
|
||||
u"\u0022": u"%22",
|
||||
# Replace "\" with "\\".
|
||||
u"\u005C": u"\u005C\u005C",
|
||||
u"\u005C": u"\u005C\u005C",
|
||||
}
|
||||
|
||||
# All control characters from 0x00 to 0x1F *except* 0x1B.
|
||||
_HTML5_REPLACEMENTS.update({
|
||||
six.unichr(cc): u"%{:02X}".format(cc)
|
||||
for cc
|
||||
in range(0x00, 0x1F+1)
|
||||
if cc not in (0x1B,)
|
||||
})
|
||||
|
||||
|
||||
def _replace_multiple(value, needles_and_replacements):
|
||||
|
||||
def replacer(match):
|
||||
return needles_and_replacements[match.group(0)]
|
||||
|
||||
pattern = re.compile(
|
||||
r"|".join([
|
||||
re.escape(needle) for needle in needles_and_replacements.keys()
|
||||
])
|
||||
)
|
||||
|
||||
result = pattern.sub(replacer, value)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def format_header_param_html5(name, value):
|
||||
"""
|
||||
Helper function to format and quote a single header parameter using the
|
||||
HTML5 strategy.
|
||||
|
||||
Particularly useful for header parameters which might contain
|
||||
non-ASCII values, like file names. This follows the `HTML5 Working Draft
|
||||
Section 4.10.22.7`_ and matches the behavior of curl and modern browsers.
|
||||
|
||||
.. _HTML5 Working Draft Section 4.10.22.7:
|
||||
https://w3c.github.io/html/sec-forms.html#multipart-form-data
|
||||
|
||||
:param name:
|
||||
The name of the parameter, a string expected to be ASCII only.
|
||||
:param value:
|
||||
The value of the parameter, provided as ``bytes`` or `str``.
|
||||
:ret:
|
||||
A unicode string, stripped of troublesome characters.
|
||||
"""
|
||||
if isinstance(value, six.binary_type):
|
||||
value = value.decode("utf-8")
|
||||
|
||||
value = _replace_multiple(value, _HTML5_REPLACEMENTS)
|
||||
|
||||
return u'%s="%s"' % (name, value)
|
||||
|
||||
|
||||
# For backwards-compatibility.
|
||||
format_header_param = format_header_param_html5
|
||||
|
||||
|
||||
class RequestField(object):
|
||||
"""
|
||||
A data container for request body parameters.
|
||||
|
||||
:param name:
|
||||
The name of this request field.
|
||||
The name of this request field. Must be unicode.
|
||||
:param data:
|
||||
The data/value body.
|
||||
:param filename:
|
||||
An optional filename of the request field.
|
||||
An optional filename of the request field. Must be unicode.
|
||||
:param headers:
|
||||
An optional dict-like object of headers to initially use for the field.
|
||||
:param header_formatter:
|
||||
An optional callable that is used to encode and format the headers. By
|
||||
default, this is :func:`format_header_param_html5`.
|
||||
"""
|
||||
def __init__(self, name, data, filename=None, headers=None):
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
data,
|
||||
filename=None,
|
||||
headers=None,
|
||||
header_formatter=format_header_param_html5):
|
||||
self._name = name
|
||||
self._filename = filename
|
||||
self.data = data
|
||||
self.headers = {}
|
||||
if headers:
|
||||
self.headers = dict(headers)
|
||||
self.header_formatter = header_formatter
|
||||
|
||||
@classmethod
|
||||
def from_tuples(cls, fieldname, value):
|
||||
def from_tuples(
|
||||
cls,
|
||||
fieldname,
|
||||
value,
|
||||
header_formatter=format_header_param_html5):
|
||||
"""
|
||||
A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
|
||||
|
||||
|
@ -97,21 +188,24 @@ class RequestField(object):
|
|||
content_type = None
|
||||
data = value
|
||||
|
||||
request_param = cls(fieldname, data, filename=filename)
|
||||
request_param = cls(
|
||||
fieldname, data, filename=filename, header_formatter=header_formatter)
|
||||
request_param.make_multipart(content_type=content_type)
|
||||
|
||||
return request_param
|
||||
|
||||
def _render_part(self, name, value):
|
||||
"""
|
||||
Overridable helper function to format a single header parameter.
|
||||
Overridable helper function to format a single header parameter. By
|
||||
default, this calls ``self.header_formatter``.
|
||||
|
||||
:param name:
|
||||
The name of the parameter, a string expected to be ASCII only.
|
||||
:param value:
|
||||
The value of the parameter, provided as a unicode string.
|
||||
"""
|
||||
return format_header_param(name, value)
|
||||
|
||||
return self.header_formatter(name, value)
|
||||
|
||||
def _render_parts(self, header_parts):
|
||||
"""
|
||||
|
@ -133,7 +227,7 @@ class RequestField(object):
|
|||
if value is not None:
|
||||
parts.append(self._render_part(name, value))
|
||||
|
||||
return '; '.join(parts)
|
||||
return u'; '.join(parts)
|
||||
|
||||
def render_headers(self):
|
||||
"""
|
||||
|
@ -144,15 +238,15 @@ class RequestField(object):
|
|||
sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location']
|
||||
for sort_key in sort_keys:
|
||||
if self.headers.get(sort_key, False):
|
||||
lines.append('%s: %s' % (sort_key, self.headers[sort_key]))
|
||||
lines.append(u'%s: %s' % (sort_key, self.headers[sort_key]))
|
||||
|
||||
for header_name, header_value in self.headers.items():
|
||||
if header_name not in sort_keys:
|
||||
if header_value:
|
||||
lines.append('%s: %s' % (header_name, header_value))
|
||||
lines.append(u'%s: %s' % (header_name, header_value))
|
||||
|
||||
lines.append('\r\n')
|
||||
return '\r\n'.join(lines)
|
||||
lines.append(u'\r\n')
|
||||
return u'\r\n'.join(lines)
|
||||
|
||||
def make_multipart(self, content_disposition=None, content_type=None,
|
||||
content_location=None):
|
||||
|
@ -168,10 +262,10 @@ class RequestField(object):
|
|||
The 'Content-Location' of the request body.
|
||||
|
||||
"""
|
||||
self.headers['Content-Disposition'] = content_disposition or 'form-data'
|
||||
self.headers['Content-Disposition'] += '; '.join([
|
||||
'', self._render_parts(
|
||||
(('name', self._name), ('filename', self._filename))
|
||||
self.headers['Content-Disposition'] = content_disposition or u'form-data'
|
||||
self.headers['Content-Disposition'] += u'; '.join([
|
||||
u'', self._render_parts(
|
||||
((u'name', self._name), (u'filename', self._filename))
|
||||
)
|
||||
])
|
||||
self.headers['Content-Type'] = content_type
|
||||
|
|
|
@ -1,259 +0,0 @@
|
|||
# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
|
||||
# Passes Python2.7's test suite and incorporates all the latest updates.
|
||||
# Copyright 2009 Raymond Hettinger, released under the MIT License.
|
||||
# http://code.activestate.com/recipes/576693/
|
||||
try:
|
||||
from thread import get_ident as _get_ident
|
||||
except ImportError:
|
||||
from dummy_thread import get_ident as _get_ident
|
||||
|
||||
try:
|
||||
from _abcoll import KeysView, ValuesView, ItemsView
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class OrderedDict(dict):
|
||||
'Dictionary that remembers insertion order'
|
||||
# An inherited dict maps keys to values.
|
||||
# The inherited dict provides __getitem__, __len__, __contains__, and get.
|
||||
# The remaining methods are order-aware.
|
||||
# Big-O running times for all methods are the same as for regular dictionaries.
|
||||
|
||||
# The internal self.__map dictionary maps keys to links in a doubly linked list.
|
||||
# The circular doubly linked list starts and ends with a sentinel element.
|
||||
# The sentinel element never gets deleted (this simplifies the algorithm).
|
||||
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
|
||||
|
||||
def __init__(self, *args, **kwds):
|
||||
'''Initialize an ordered dictionary. Signature is the same as for
|
||||
regular dictionaries, but keyword arguments are not recommended
|
||||
because their insertion order is arbitrary.
|
||||
|
||||
'''
|
||||
if len(args) > 1:
|
||||
raise TypeError('expected at most 1 arguments, got %d' % len(args))
|
||||
try:
|
||||
self.__root
|
||||
except AttributeError:
|
||||
self.__root = root = [] # sentinel node
|
||||
root[:] = [root, root, None]
|
||||
self.__map = {}
|
||||
self.__update(*args, **kwds)
|
||||
|
||||
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
|
||||
'od.__setitem__(i, y) <==> od[i]=y'
|
||||
# Setting a new item creates a new link which goes at the end of the linked
|
||||
# list, and the inherited dictionary is updated with the new key/value pair.
|
||||
if key not in self:
|
||||
root = self.__root
|
||||
last = root[0]
|
||||
last[1] = root[0] = self.__map[key] = [last, root, key]
|
||||
dict_setitem(self, key, value)
|
||||
|
||||
def __delitem__(self, key, dict_delitem=dict.__delitem__):
|
||||
'od.__delitem__(y) <==> del od[y]'
|
||||
# Deleting an existing item uses self.__map to find the link which is
|
||||
# then removed by updating the links in the predecessor and successor nodes.
|
||||
dict_delitem(self, key)
|
||||
link_prev, link_next, key = self.__map.pop(key)
|
||||
link_prev[1] = link_next
|
||||
link_next[0] = link_prev
|
||||
|
||||
def __iter__(self):
|
||||
'od.__iter__() <==> iter(od)'
|
||||
root = self.__root
|
||||
curr = root[1]
|
||||
while curr is not root:
|
||||
yield curr[2]
|
||||
curr = curr[1]
|
||||
|
||||
def __reversed__(self):
|
||||
'od.__reversed__() <==> reversed(od)'
|
||||
root = self.__root
|
||||
curr = root[0]
|
||||
while curr is not root:
|
||||
yield curr[2]
|
||||
curr = curr[0]
|
||||
|
||||
def clear(self):
|
||||
'od.clear() -> None. Remove all items from od.'
|
||||
try:
|
||||
for node in self.__map.itervalues():
|
||||
del node[:]
|
||||
root = self.__root
|
||||
root[:] = [root, root, None]
|
||||
self.__map.clear()
|
||||
except AttributeError:
|
||||
pass
|
||||
dict.clear(self)
|
||||
|
||||
def popitem(self, last=True):
|
||||
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
|
||||
Pairs are returned in LIFO order if last is true or FIFO order if false.
|
||||
|
||||
'''
|
||||
if not self:
|
||||
raise KeyError('dictionary is empty')
|
||||
root = self.__root
|
||||
if last:
|
||||
link = root[0]
|
||||
link_prev = link[0]
|
||||
link_prev[1] = root
|
||||
root[0] = link_prev
|
||||
else:
|
||||
link = root[1]
|
||||
link_next = link[1]
|
||||
root[1] = link_next
|
||||
link_next[0] = root
|
||||
key = link[2]
|
||||
del self.__map[key]
|
||||
value = dict.pop(self, key)
|
||||
return key, value
|
||||
|
||||
# -- the following methods do not depend on the internal structure --
|
||||
|
||||
def keys(self):
|
||||
'od.keys() -> list of keys in od'
|
||||
return list(self)
|
||||
|
||||
def values(self):
|
||||
'od.values() -> list of values in od'
|
||||
return [self[key] for key in self]
|
||||
|
||||
def items(self):
|
||||
'od.items() -> list of (key, value) pairs in od'
|
||||
return [(key, self[key]) for key in self]
|
||||
|
||||
def iterkeys(self):
|
||||
'od.iterkeys() -> an iterator over the keys in od'
|
||||
return iter(self)
|
||||
|
||||
def itervalues(self):
|
||||
'od.itervalues -> an iterator over the values in od'
|
||||
for k in self:
|
||||
yield self[k]
|
||||
|
||||
def iteritems(self):
|
||||
'od.iteritems -> an iterator over the (key, value) items in od'
|
||||
for k in self:
|
||||
yield (k, self[k])
|
||||
|
||||
def update(*args, **kwds):
|
||||
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
|
||||
|
||||
If E is a dict instance, does: for k in E: od[k] = E[k]
|
||||
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
|
||||
Or if E is an iterable of items, does: for k, v in E: od[k] = v
|
||||
In either case, this is followed by: for k, v in F.items(): od[k] = v
|
||||
|
||||
'''
|
||||
if len(args) > 2:
|
||||
raise TypeError('update() takes at most 2 positional '
|
||||
'arguments (%d given)' % (len(args),))
|
||||
elif not args:
|
||||
raise TypeError('update() takes at least 1 argument (0 given)')
|
||||
self = args[0]
|
||||
# Make progressively weaker assumptions about "other"
|
||||
other = ()
|
||||
if len(args) == 2:
|
||||
other = args[1]
|
||||
if isinstance(other, dict):
|
||||
for key in other:
|
||||
self[key] = other[key]
|
||||
elif hasattr(other, 'keys'):
|
||||
for key in other.keys():
|
||||
self[key] = other[key]
|
||||
else:
|
||||
for key, value in other:
|
||||
self[key] = value
|
||||
for key, value in kwds.items():
|
||||
self[key] = value
|
||||
|
||||
__update = update # let subclasses override update without breaking __init__
|
||||
|
||||
__marker = object()
|
||||
|
||||
def pop(self, key, default=__marker):
|
||||
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
|
||||
If key is not found, d is returned if given, otherwise KeyError is raised.
|
||||
|
||||
'''
|
||||
if key in self:
|
||||
result = self[key]
|
||||
del self[key]
|
||||
return result
|
||||
if default is self.__marker:
|
||||
raise KeyError(key)
|
||||
return default
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
|
||||
if key in self:
|
||||
return self[key]
|
||||
self[key] = default
|
||||
return default
|
||||
|
||||
def __repr__(self, _repr_running={}):
|
||||
'od.__repr__() <==> repr(od)'
|
||||
call_key = id(self), _get_ident()
|
||||
if call_key in _repr_running:
|
||||
return '...'
|
||||
_repr_running[call_key] = 1
|
||||
try:
|
||||
if not self:
|
||||
return '%s()' % (self.__class__.__name__,)
|
||||
return '%s(%r)' % (self.__class__.__name__, self.items())
|
||||
finally:
|
||||
del _repr_running[call_key]
|
||||
|
||||
def __reduce__(self):
|
||||
'Return state information for pickling'
|
||||
items = [[k, self[k]] for k in self]
|
||||
inst_dict = vars(self).copy()
|
||||
for k in vars(OrderedDict()):
|
||||
inst_dict.pop(k, None)
|
||||
if inst_dict:
|
||||
return (self.__class__, (items,), inst_dict)
|
||||
return self.__class__, (items,)
|
||||
|
||||
def copy(self):
|
||||
'od.copy() -> a shallow copy of od'
|
||||
return self.__class__(self)
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, value=None):
|
||||
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
|
||||
and values equal to v (which defaults to None).
|
||||
|
||||
'''
|
||||
d = cls()
|
||||
for key in iterable:
|
||||
d[key] = value
|
||||
return d
|
||||
|
||||
def __eq__(self, other):
|
||||
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
|
||||
while comparison to a regular mapping is order-insensitive.
|
||||
|
||||
'''
|
||||
if isinstance(other, OrderedDict):
|
||||
return len(self)==len(other) and self.items() == other.items()
|
||||
return dict.__eq__(self, other)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
# -- the following methods are only used in Python 2.7 --
|
||||
|
||||
def viewkeys(self):
|
||||
"od.viewkeys() -> a set-like object providing a view on od's keys"
|
||||
return KeysView(self)
|
||||
|
||||
def viewvalues(self):
|
||||
"od.viewvalues() -> an object providing a view on od's values"
|
||||
return ValuesView(self)
|
||||
|
||||
def viewitems(self):
|
||||
"od.viewitems() -> a set-like object providing a view on od's items"
|
||||
return ItemsView(self)
|
|
@ -20,7 +20,8 @@ __all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
|
||||
'ssl_version', 'ca_cert_dir', 'ssl_context')
|
||||
'ssl_version', 'ca_cert_dir', 'ssl_context',
|
||||
'key_password')
|
||||
|
||||
# All known keyword arguments that could be provided to the pool manager, its
|
||||
# pools, or the underlying connections. This is used to construct a pool key.
|
||||
|
@ -34,6 +35,7 @@ _key_fields = (
|
|||
'key_block', # bool
|
||||
'key_source_address', # str
|
||||
'key_key_file', # str
|
||||
'key_key_password', # str
|
||||
'key_cert_file', # str
|
||||
'key_cert_reqs', # str
|
||||
'key_ca_certs', # str
|
||||
|
@ -48,7 +50,7 @@ _key_fields = (
|
|||
'key__socks_options', # dict
|
||||
'key_assert_hostname', # bool or string
|
||||
'key_assert_fingerprint', # str
|
||||
'key_server_hostname', #str
|
||||
'key_server_hostname', # str
|
||||
)
|
||||
|
||||
#: The namedtuple class used to construct keys for the connection pool.
|
||||
|
|
|
@ -6,6 +6,11 @@ import logging
|
|||
from socket import timeout as SocketTimeout
|
||||
from socket import error as SocketError
|
||||
|
||||
try:
|
||||
import brotli
|
||||
except ImportError:
|
||||
brotli = None
|
||||
|
||||
from ._collections import HTTPHeaderDict
|
||||
from .exceptions import (
|
||||
BodyNotHttplibCompatible, ProtocolError, DecodeError, ReadTimeoutError,
|
||||
|
@ -90,6 +95,25 @@ class GzipDecoder(object):
|
|||
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
|
||||
|
||||
|
||||
if brotli is not None:
|
||||
class BrotliDecoder(object):
|
||||
# Supports both 'brotlipy' and 'Brotli' packages
|
||||
# since they share an import name. The top branches
|
||||
# are for 'brotlipy' and bottom branches for 'Brotli'
|
||||
def __init__(self):
|
||||
self._obj = brotli.Decompressor()
|
||||
|
||||
def decompress(self, data):
|
||||
if hasattr(self._obj, 'decompress'):
|
||||
return self._obj.decompress(data)
|
||||
return self._obj.process(data)
|
||||
|
||||
def flush(self):
|
||||
if hasattr(self._obj, 'flush'):
|
||||
return self._obj.flush()
|
||||
return b''
|
||||
|
||||
|
||||
class MultiDecoder(object):
|
||||
"""
|
||||
From RFC7231:
|
||||
|
@ -118,6 +142,9 @@ def _get_decoder(mode):
|
|||
if mode == 'gzip':
|
||||
return GzipDecoder()
|
||||
|
||||
if brotli is not None and mode == 'br':
|
||||
return BrotliDecoder()
|
||||
|
||||
return DeflateDecoder()
|
||||
|
||||
|
||||
|
@ -155,6 +182,8 @@ class HTTPResponse(io.IOBase):
|
|||
"""
|
||||
|
||||
CONTENT_DECODERS = ['gzip', 'deflate']
|
||||
if brotli is not None:
|
||||
CONTENT_DECODERS += ['br']
|
||||
REDIRECT_STATUSES = [301, 302, 303, 307, 308]
|
||||
|
||||
def __init__(self, body='', headers=None, status=0, version=0, reason=None,
|
||||
|
@ -311,24 +340,32 @@ class HTTPResponse(io.IOBase):
|
|||
if content_encoding in self.CONTENT_DECODERS:
|
||||
self._decoder = _get_decoder(content_encoding)
|
||||
elif ',' in content_encoding:
|
||||
encodings = [e.strip() for e in content_encoding.split(',') if e.strip() in self.CONTENT_DECODERS]
|
||||
encodings = [
|
||||
e.strip() for e in content_encoding.split(',')
|
||||
if e.strip() in self.CONTENT_DECODERS]
|
||||
if len(encodings):
|
||||
self._decoder = _get_decoder(content_encoding)
|
||||
|
||||
DECODER_ERROR_CLASSES = (IOError, zlib.error)
|
||||
if brotli is not None:
|
||||
DECODER_ERROR_CLASSES += (brotli.error,)
|
||||
|
||||
def _decode(self, data, decode_content, flush_decoder):
|
||||
"""
|
||||
Decode the data passed in and potentially flush the decoder.
|
||||
"""
|
||||
if not decode_content:
|
||||
return data
|
||||
|
||||
try:
|
||||
if decode_content and self._decoder:
|
||||
if self._decoder:
|
||||
data = self._decoder.decompress(data)
|
||||
except (IOError, zlib.error) as e:
|
||||
except self.DECODER_ERROR_CLASSES as e:
|
||||
content_encoding = self.headers.get('content-encoding', '').lower()
|
||||
raise DecodeError(
|
||||
"Received response with content-encoding: %s, but "
|
||||
"failed to decode it." % content_encoding, e)
|
||||
|
||||
if flush_decoder and decode_content:
|
||||
if flush_decoder:
|
||||
data += self._flush_decoder()
|
||||
|
||||
return data
|
||||
|
@ -508,9 +545,10 @@ class HTTPResponse(io.IOBase):
|
|||
headers = r.msg
|
||||
|
||||
if not isinstance(headers, HTTPHeaderDict):
|
||||
if PY3: # Python 3
|
||||
if PY3:
|
||||
headers = HTTPHeaderDict(headers.items())
|
||||
else: # Python 2
|
||||
else:
|
||||
# Python 2.7
|
||||
headers = HTTPHeaderDict.from_httplib(headers)
|
||||
|
||||
# HTTPResponse objects in Python 3 don't have a .strict attribute
|
||||
|
@ -703,3 +741,20 @@ class HTTPResponse(io.IOBase):
|
|||
return self.retries.history[-1].redirect_location
|
||||
else:
|
||||
return self._request_url
|
||||
|
||||
def __iter__(self):
|
||||
buffer = [b""]
|
||||
for chunk in self.stream(decode_content=True):
|
||||
if b"\n" in chunk:
|
||||
chunk = chunk.split(b"\n")
|
||||
yield b"".join(buffer) + chunk[0] + b"\n"
|
||||
for x in chunk[1:-1]:
|
||||
yield x + b"\n"
|
||||
if chunk[-1]:
|
||||
buffer = [chunk[-1]]
|
||||
else:
|
||||
buffer = []
|
||||
else:
|
||||
buffer.append(chunk)
|
||||
if buffer:
|
||||
yield b"".join(buffer)
|
||||
|
|
|
@ -12,6 +12,7 @@ from .ssl_ import (
|
|||
resolve_cert_reqs,
|
||||
resolve_ssl_version,
|
||||
ssl_wrap_socket,
|
||||
PROTOCOL_TLS,
|
||||
)
|
||||
from .timeout import (
|
||||
current_time,
|
||||
|
@ -35,6 +36,7 @@ __all__ = (
|
|||
'IS_PYOPENSSL',
|
||||
'IS_SECURETRANSPORT',
|
||||
'SSLContext',
|
||||
'PROTOCOL_TLS',
|
||||
'Retry',
|
||||
'Timeout',
|
||||
'Url',
|
||||
|
|
|
@ -5,6 +5,13 @@ from ..packages.six import b, integer_types
|
|||
from ..exceptions import UnrewindableBodyError
|
||||
|
||||
ACCEPT_ENCODING = 'gzip,deflate'
|
||||
try:
|
||||
import brotli as _unused_module_brotli # noqa: F401
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
ACCEPT_ENCODING += ',br'
|
||||
|
||||
_FAILEDTELL = object()
|
||||
|
||||
|
||||
|
|
|
@ -1,581 +0,0 @@
|
|||
# Backport of selectors.py from Python 3.5+ to support Python < 3.4
|
||||
# Also has the behavior specified in PEP 475 which is to retry syscalls
|
||||
# in the case of an EINTR error. This module is required because selectors34
|
||||
# does not follow this behavior and instead returns that no dile descriptor
|
||||
# events have occurred rather than retry the syscall. The decision to drop
|
||||
# support for select.devpoll is made to maintain 100% test coverage.
|
||||
|
||||
import errno
|
||||
import math
|
||||
import select
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
from collections import namedtuple, Mapping
|
||||
|
||||
try:
|
||||
monotonic = time.monotonic
|
||||
except (AttributeError, ImportError): # Python 3.3<
|
||||
monotonic = time.time
|
||||
|
||||
EVENT_READ = (1 << 0)
|
||||
EVENT_WRITE = (1 << 1)
|
||||
|
||||
HAS_SELECT = True # Variable that shows whether the platform has a selector.
|
||||
_SYSCALL_SENTINEL = object() # Sentinel in case a system call returns None.
|
||||
_DEFAULT_SELECTOR = None
|
||||
|
||||
|
||||
class SelectorError(Exception):
|
||||
def __init__(self, errcode):
|
||||
super(SelectorError, self).__init__()
|
||||
self.errno = errcode
|
||||
|
||||
def __repr__(self):
|
||||
return "<SelectorError errno={0}>".format(self.errno)
|
||||
|
||||
def __str__(self):
|
||||
return self.__repr__()
|
||||
|
||||
|
||||
def _fileobj_to_fd(fileobj):
|
||||
""" Return a file descriptor from a file object. If
|
||||
given an integer will simply return that integer back. """
|
||||
if isinstance(fileobj, int):
|
||||
fd = fileobj
|
||||
else:
|
||||
try:
|
||||
fd = int(fileobj.fileno())
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
raise ValueError("Invalid file object: {0!r}".format(fileobj))
|
||||
if fd < 0:
|
||||
raise ValueError("Invalid file descriptor: {0}".format(fd))
|
||||
return fd
|
||||
|
||||
|
||||
# Determine which function to use to wrap system calls because Python 3.5+
|
||||
# already handles the case when system calls are interrupted.
|
||||
if sys.version_info >= (3, 5):
|
||||
def _syscall_wrapper(func, _, *args, **kwargs):
|
||||
""" This is the short-circuit version of the below logic
|
||||
because in Python 3.5+ all system calls automatically restart
|
||||
and recalculate their timeouts. """
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except (OSError, IOError, select.error) as e:
|
||||
errcode = None
|
||||
if hasattr(e, "errno"):
|
||||
errcode = e.errno
|
||||
raise SelectorError(errcode)
|
||||
else:
|
||||
def _syscall_wrapper(func, recalc_timeout, *args, **kwargs):
|
||||
""" Wrapper function for syscalls that could fail due to EINTR.
|
||||
All functions should be retried if there is time left in the timeout
|
||||
in accordance with PEP 475. """
|
||||
timeout = kwargs.get("timeout", None)
|
||||
if timeout is None:
|
||||
expires = None
|
||||
recalc_timeout = False
|
||||
else:
|
||||
timeout = float(timeout)
|
||||
if timeout < 0.0: # Timeout less than 0 treated as no timeout.
|
||||
expires = None
|
||||
else:
|
||||
expires = monotonic() + timeout
|
||||
|
||||
args = list(args)
|
||||
if recalc_timeout and "timeout" not in kwargs:
|
||||
raise ValueError(
|
||||
"Timeout must be in args or kwargs to be recalculated")
|
||||
|
||||
result = _SYSCALL_SENTINEL
|
||||
while result is _SYSCALL_SENTINEL:
|
||||
try:
|
||||
result = func(*args, **kwargs)
|
||||
# OSError is thrown by select.select
|
||||
# IOError is thrown by select.epoll.poll
|
||||
# select.error is thrown by select.poll.poll
|
||||
# Aren't we thankful for Python 3.x rework for exceptions?
|
||||
except (OSError, IOError, select.error) as e:
|
||||
# select.error wasn't a subclass of OSError in the past.
|
||||
errcode = None
|
||||
if hasattr(e, "errno"):
|
||||
errcode = e.errno
|
||||
elif hasattr(e, "args"):
|
||||
errcode = e.args[0]
|
||||
|
||||
# Also test for the Windows equivalent of EINTR.
|
||||
is_interrupt = (errcode == errno.EINTR or (hasattr(errno, "WSAEINTR") and
|
||||
errcode == errno.WSAEINTR))
|
||||
|
||||
if is_interrupt:
|
||||
if expires is not None:
|
||||
current_time = monotonic()
|
||||
if current_time > expires:
|
||||
raise OSError(errno=errno.ETIMEDOUT)
|
||||
if recalc_timeout:
|
||||
if "timeout" in kwargs:
|
||||
kwargs["timeout"] = expires - current_time
|
||||
continue
|
||||
if errcode:
|
||||
raise SelectorError(errcode)
|
||||
else:
|
||||
raise
|
||||
return result
|
||||
|
||||
|
||||
SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data'])
|
||||
|
||||
|
||||
class _SelectorMapping(Mapping):
|
||||
""" Mapping of file objects to selector keys """
|
||||
|
||||
def __init__(self, selector):
|
||||
self._selector = selector
|
||||
|
||||
def __len__(self):
|
||||
return len(self._selector._fd_to_key)
|
||||
|
||||
def __getitem__(self, fileobj):
|
||||
try:
|
||||
fd = self._selector._fileobj_lookup(fileobj)
|
||||
return self._selector._fd_to_key[fd]
|
||||
except KeyError:
|
||||
raise KeyError("{0!r} is not registered.".format(fileobj))
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._selector._fd_to_key)
|
||||
|
||||
|
||||
class BaseSelector(object):
|
||||
""" Abstract Selector class
|
||||
|
||||
A selector supports registering file objects to be monitored
|
||||
for specific I/O events.
|
||||
|
||||
A file object is a file descriptor or any object with a
|
||||
`fileno()` method. An arbitrary object can be attached to the
|
||||
file object which can be used for example to store context info,
|
||||
a callback, etc.
|
||||
|
||||
A selector can use various implementations (select(), poll(), epoll(),
|
||||
and kqueue()) depending on the platform. The 'DefaultSelector' class uses
|
||||
the most efficient implementation for the current platform.
|
||||
"""
|
||||
def __init__(self):
|
||||
# Maps file descriptors to keys.
|
||||
self._fd_to_key = {}
|
||||
|
||||
# Read-only mapping returned by get_map()
|
||||
self._map = _SelectorMapping(self)
|
||||
|
||||
def _fileobj_lookup(self, fileobj):
|
||||
""" Return a file descriptor from a file object.
|
||||
This wraps _fileobj_to_fd() to do an exhaustive
|
||||
search in case the object is invalid but we still
|
||||
have it in our map. Used by unregister() so we can
|
||||
unregister an object that was previously registered
|
||||
even if it is closed. It is also used by _SelectorMapping
|
||||
"""
|
||||
try:
|
||||
return _fileobj_to_fd(fileobj)
|
||||
except ValueError:
|
||||
|
||||
# Search through all our mapped keys.
|
||||
for key in self._fd_to_key.values():
|
||||
if key.fileobj is fileobj:
|
||||
return key.fd
|
||||
|
||||
# Raise ValueError after all.
|
||||
raise
|
||||
|
||||
def register(self, fileobj, events, data=None):
|
||||
""" Register a file object for a set of events to monitor. """
|
||||
if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)):
|
||||
raise ValueError("Invalid events: {0!r}".format(events))
|
||||
|
||||
key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data)
|
||||
|
||||
if key.fd in self._fd_to_key:
|
||||
raise KeyError("{0!r} (FD {1}) is already registered"
|
||||
.format(fileobj, key.fd))
|
||||
|
||||
self._fd_to_key[key.fd] = key
|
||||
return key
|
||||
|
||||
def unregister(self, fileobj):
|
||||
""" Unregister a file object from being monitored. """
|
||||
try:
|
||||
key = self._fd_to_key.pop(self._fileobj_lookup(fileobj))
|
||||
except KeyError:
|
||||
raise KeyError("{0!r} is not registered".format(fileobj))
|
||||
|
||||
# Getting the fileno of a closed socket on Windows errors with EBADF.
|
||||
except socket.error as e: # Platform-specific: Windows.
|
||||
if e.errno != errno.EBADF:
|
||||
raise
|
||||
else:
|
||||
for key in self._fd_to_key.values():
|
||||
if key.fileobj is fileobj:
|
||||
self._fd_to_key.pop(key.fd)
|
||||
break
|
||||
else:
|
||||
raise KeyError("{0!r} is not registered".format(fileobj))
|
||||
return key
|
||||
|
||||
def modify(self, fileobj, events, data=None):
|
||||
""" Change a registered file object monitored events and data. """
|
||||
# NOTE: Some subclasses optimize this operation even further.
|
||||
try:
|
||||
key = self._fd_to_key[self._fileobj_lookup(fileobj)]
|
||||
except KeyError:
|
||||
raise KeyError("{0!r} is not registered".format(fileobj))
|
||||
|
||||
if events != key.events:
|
||||
self.unregister(fileobj)
|
||||
key = self.register(fileobj, events, data)
|
||||
|
||||
elif data != key.data:
|
||||
# Use a shortcut to update the data.
|
||||
key = key._replace(data=data)
|
||||
self._fd_to_key[key.fd] = key
|
||||
|
||||
return key
|
||||
|
||||
def select(self, timeout=None):
|
||||
""" Perform the actual selection until some monitored file objects
|
||||
are ready or the timeout expires. """
|
||||
raise NotImplementedError()
|
||||
|
||||
def close(self):
|
||||
""" Close the selector. This must be called to ensure that all
|
||||
underlying resources are freed. """
|
||||
self._fd_to_key.clear()
|
||||
self._map = None
|
||||
|
||||
def get_key(self, fileobj):
|
||||
""" Return the key associated with a registered file object. """
|
||||
mapping = self.get_map()
|
||||
if mapping is None:
|
||||
raise RuntimeError("Selector is closed")
|
||||
try:
|
||||
return mapping[fileobj]
|
||||
except KeyError:
|
||||
raise KeyError("{0!r} is not registered".format(fileobj))
|
||||
|
||||
def get_map(self):
|
||||
""" Return a mapping of file objects to selector keys """
|
||||
return self._map
|
||||
|
||||
def _key_from_fd(self, fd):
|
||||
""" Return the key associated to a given file descriptor
|
||||
Return None if it is not found. """
|
||||
try:
|
||||
return self._fd_to_key[fd]
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
self.close()
|
||||
|
||||
|
||||
# Almost all platforms have select.select()
|
||||
if hasattr(select, "select"):
|
||||
class SelectSelector(BaseSelector):
|
||||
""" Select-based selector. """
|
||||
def __init__(self):
|
||||
super(SelectSelector, self).__init__()
|
||||
self._readers = set()
|
||||
self._writers = set()
|
||||
|
||||
def register(self, fileobj, events, data=None):
|
||||
key = super(SelectSelector, self).register(fileobj, events, data)
|
||||
if events & EVENT_READ:
|
||||
self._readers.add(key.fd)
|
||||
if events & EVENT_WRITE:
|
||||
self._writers.add(key.fd)
|
||||
return key
|
||||
|
||||
def unregister(self, fileobj):
|
||||
key = super(SelectSelector, self).unregister(fileobj)
|
||||
self._readers.discard(key.fd)
|
||||
self._writers.discard(key.fd)
|
||||
return key
|
||||
|
||||
def _select(self, r, w, timeout=None):
|
||||
""" Wrapper for select.select because timeout is a positional arg """
|
||||
return select.select(r, w, [], timeout)
|
||||
|
||||
def select(self, timeout=None):
|
||||
# Selecting on empty lists on Windows errors out.
|
||||
if not len(self._readers) and not len(self._writers):
|
||||
return []
|
||||
|
||||
timeout = None if timeout is None else max(timeout, 0.0)
|
||||
ready = []
|
||||
r, w, _ = _syscall_wrapper(self._select, True, self._readers,
|
||||
self._writers, timeout)
|
||||
r = set(r)
|
||||
w = set(w)
|
||||
for fd in r | w:
|
||||
events = 0
|
||||
if fd in r:
|
||||
events |= EVENT_READ
|
||||
if fd in w:
|
||||
events |= EVENT_WRITE
|
||||
|
||||
key = self._key_from_fd(fd)
|
||||
if key:
|
||||
ready.append((key, events & key.events))
|
||||
return ready
|
||||
|
||||
|
||||
if hasattr(select, "poll"):
|
||||
class PollSelector(BaseSelector):
|
||||
""" Poll-based selector """
|
||||
def __init__(self):
|
||||
super(PollSelector, self).__init__()
|
||||
self._poll = select.poll()
|
||||
|
||||
def register(self, fileobj, events, data=None):
|
||||
key = super(PollSelector, self).register(fileobj, events, data)
|
||||
event_mask = 0
|
||||
if events & EVENT_READ:
|
||||
event_mask |= select.POLLIN
|
||||
if events & EVENT_WRITE:
|
||||
event_mask |= select.POLLOUT
|
||||
self._poll.register(key.fd, event_mask)
|
||||
return key
|
||||
|
||||
def unregister(self, fileobj):
|
||||
key = super(PollSelector, self).unregister(fileobj)
|
||||
self._poll.unregister(key.fd)
|
||||
return key
|
||||
|
||||
def _wrap_poll(self, timeout=None):
|
||||
""" Wrapper function for select.poll.poll() so that
|
||||
_syscall_wrapper can work with only seconds. """
|
||||
if timeout is not None:
|
||||
if timeout <= 0:
|
||||
timeout = 0
|
||||
else:
|
||||
# select.poll.poll() has a resolution of 1 millisecond,
|
||||
# round away from zero to wait *at least* timeout seconds.
|
||||
timeout = math.ceil(timeout * 1e3)
|
||||
|
||||
result = self._poll.poll(timeout)
|
||||
return result
|
||||
|
||||
def select(self, timeout=None):
|
||||
ready = []
|
||||
fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout)
|
||||
for fd, event_mask in fd_events:
|
||||
events = 0
|
||||
if event_mask & ~select.POLLIN:
|
||||
events |= EVENT_WRITE
|
||||
if event_mask & ~select.POLLOUT:
|
||||
events |= EVENT_READ
|
||||
|
||||
key = self._key_from_fd(fd)
|
||||
if key:
|
||||
ready.append((key, events & key.events))
|
||||
|
||||
return ready
|
||||
|
||||
|
||||
if hasattr(select, "epoll"):
|
||||
class EpollSelector(BaseSelector):
|
||||
""" Epoll-based selector """
|
||||
def __init__(self):
|
||||
super(EpollSelector, self).__init__()
|
||||
self._epoll = select.epoll()
|
||||
|
||||
def fileno(self):
|
||||
return self._epoll.fileno()
|
||||
|
||||
def register(self, fileobj, events, data=None):
|
||||
key = super(EpollSelector, self).register(fileobj, events, data)
|
||||
events_mask = 0
|
||||
if events & EVENT_READ:
|
||||
events_mask |= select.EPOLLIN
|
||||
if events & EVENT_WRITE:
|
||||
events_mask |= select.EPOLLOUT
|
||||
_syscall_wrapper(self._epoll.register, False, key.fd, events_mask)
|
||||
return key
|
||||
|
||||
def unregister(self, fileobj):
|
||||
key = super(EpollSelector, self).unregister(fileobj)
|
||||
try:
|
||||
_syscall_wrapper(self._epoll.unregister, False, key.fd)
|
||||
except SelectorError:
|
||||
# This can occur when the fd was closed since registry.
|
||||
pass
|
||||
return key
|
||||
|
||||
def select(self, timeout=None):
|
||||
if timeout is not None:
|
||||
if timeout <= 0:
|
||||
timeout = 0.0
|
||||
else:
|
||||
# select.epoll.poll() has a resolution of 1 millisecond
|
||||
# but luckily takes seconds so we don't need a wrapper
|
||||
# like PollSelector. Just for better rounding.
|
||||
timeout = math.ceil(timeout * 1e3) * 1e-3
|
||||
timeout = float(timeout)
|
||||
else:
|
||||
timeout = -1.0 # epoll.poll() must have a float.
|
||||
|
||||
# We always want at least 1 to ensure that select can be called
|
||||
# with no file descriptors registered. Otherwise will fail.
|
||||
max_events = max(len(self._fd_to_key), 1)
|
||||
|
||||
ready = []
|
||||
fd_events = _syscall_wrapper(self._epoll.poll, True,
|
||||
timeout=timeout,
|
||||
maxevents=max_events)
|
||||
for fd, event_mask in fd_events:
|
||||
events = 0
|
||||
if event_mask & ~select.EPOLLIN:
|
||||
events |= EVENT_WRITE
|
||||
if event_mask & ~select.EPOLLOUT:
|
||||
events |= EVENT_READ
|
||||
|
||||
key = self._key_from_fd(fd)
|
||||
if key:
|
||||
ready.append((key, events & key.events))
|
||||
return ready
|
||||
|
||||
def close(self):
|
||||
self._epoll.close()
|
||||
super(EpollSelector, self).close()
|
||||
|
||||
|
||||
if hasattr(select, "kqueue"):
|
||||
class KqueueSelector(BaseSelector):
|
||||
""" Kqueue / Kevent-based selector """
|
||||
def __init__(self):
|
||||
super(KqueueSelector, self).__init__()
|
||||
self._kqueue = select.kqueue()
|
||||
|
||||
def fileno(self):
|
||||
return self._kqueue.fileno()
|
||||
|
||||
def register(self, fileobj, events, data=None):
|
||||
key = super(KqueueSelector, self).register(fileobj, events, data)
|
||||
if events & EVENT_READ:
|
||||
kevent = select.kevent(key.fd,
|
||||
select.KQ_FILTER_READ,
|
||||
select.KQ_EV_ADD)
|
||||
|
||||
_syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
|
||||
|
||||
if events & EVENT_WRITE:
|
||||
kevent = select.kevent(key.fd,
|
||||
select.KQ_FILTER_WRITE,
|
||||
select.KQ_EV_ADD)
|
||||
|
||||
_syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
|
||||
|
||||
return key
|
||||
|
||||
def unregister(self, fileobj):
|
||||
key = super(KqueueSelector, self).unregister(fileobj)
|
||||
if key.events & EVENT_READ:
|
||||
kevent = select.kevent(key.fd,
|
||||
select.KQ_FILTER_READ,
|
||||
select.KQ_EV_DELETE)
|
||||
try:
|
||||
_syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
|
||||
except SelectorError:
|
||||
pass
|
||||
if key.events & EVENT_WRITE:
|
||||
kevent = select.kevent(key.fd,
|
||||
select.KQ_FILTER_WRITE,
|
||||
select.KQ_EV_DELETE)
|
||||
try:
|
||||
_syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
|
||||
except SelectorError:
|
||||
pass
|
||||
|
||||
return key
|
||||
|
||||
def select(self, timeout=None):
|
||||
if timeout is not None:
|
||||
timeout = max(timeout, 0)
|
||||
|
||||
max_events = len(self._fd_to_key) * 2
|
||||
ready_fds = {}
|
||||
|
||||
kevent_list = _syscall_wrapper(self._kqueue.control, True,
|
||||
None, max_events, timeout)
|
||||
|
||||
for kevent in kevent_list:
|
||||
fd = kevent.ident
|
||||
event_mask = kevent.filter
|
||||
events = 0
|
||||
if event_mask == select.KQ_FILTER_READ:
|
||||
events |= EVENT_READ
|
||||
if event_mask == select.KQ_FILTER_WRITE:
|
||||
events |= EVENT_WRITE
|
||||
|
||||
key = self._key_from_fd(fd)
|
||||
if key:
|
||||
if key.fd not in ready_fds:
|
||||
ready_fds[key.fd] = (key, events & key.events)
|
||||
else:
|
||||
old_events = ready_fds[key.fd][1]
|
||||
ready_fds[key.fd] = (key, (events | old_events) & key.events)
|
||||
|
||||
return list(ready_fds.values())
|
||||
|
||||
def close(self):
|
||||
self._kqueue.close()
|
||||
super(KqueueSelector, self).close()
|
||||
|
||||
|
||||
if not hasattr(select, 'select'): # Platform-specific: AppEngine
|
||||
HAS_SELECT = False
|
||||
|
||||
|
||||
def _can_allocate(struct):
|
||||
""" Checks that select structs can be allocated by the underlying
|
||||
operating system, not just advertised by the select module. We don't
|
||||
check select() because we'll be hopeful that most platforms that
|
||||
don't have it available will not advertise it. (ie: GAE) """
|
||||
try:
|
||||
# select.poll() objects won't fail until used.
|
||||
if struct == 'poll':
|
||||
p = select.poll()
|
||||
p.poll(0)
|
||||
|
||||
# All others will fail on allocation.
|
||||
else:
|
||||
getattr(select, struct)().close()
|
||||
return True
|
||||
except (OSError, AttributeError) as e:
|
||||
return False
|
||||
|
||||
|
||||
# Choose the best implementation, roughly:
|
||||
# kqueue == epoll > poll > select. Devpoll not supported. (See above)
|
||||
# select() also can't accept a FD > FD_SETSIZE (usually around 1024)
|
||||
def DefaultSelector():
|
||||
""" This function serves as a first call for DefaultSelector to
|
||||
detect if the select module is being monkey-patched incorrectly
|
||||
by eventlet, greenlet, and preserve proper behavior. """
|
||||
global _DEFAULT_SELECTOR
|
||||
if _DEFAULT_SELECTOR is None:
|
||||
if _can_allocate('kqueue'):
|
||||
_DEFAULT_SELECTOR = KqueueSelector
|
||||
elif _can_allocate('epoll'):
|
||||
_DEFAULT_SELECTOR = EpollSelector
|
||||
elif _can_allocate('poll'):
|
||||
_DEFAULT_SELECTOR = PollSelector
|
||||
elif hasattr(select, 'select'):
|
||||
_DEFAULT_SELECTOR = SelectSelector
|
||||
else: # Platform-specific: AppEngine
|
||||
raise ValueError('Platform does not have a selector')
|
||||
return _DEFAULT_SELECTOR()
|
|
@ -2,13 +2,14 @@ from __future__ import absolute_import
|
|||
import errno
|
||||
import warnings
|
||||
import hmac
|
||||
import socket
|
||||
import re
|
||||
|
||||
from binascii import hexlify, unhexlify
|
||||
from hashlib import md5, sha1, sha256
|
||||
|
||||
from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
|
||||
from ..packages import six
|
||||
from ..packages.rfc3986 import abnf_regexp
|
||||
|
||||
|
||||
SSLContext = None
|
||||
|
@ -40,14 +41,33 @@ def _const_compare_digest_backport(a, b):
|
|||
_const_compare_digest = getattr(hmac, 'compare_digest',
|
||||
_const_compare_digest_backport)
|
||||
|
||||
# Borrow rfc3986's regular expressions for IPv4
|
||||
# and IPv6 addresses for use in is_ipaddress()
|
||||
_IP_ADDRESS_REGEX = re.compile(
|
||||
r'^(?:%s|%s|%s)$' % (
|
||||
abnf_regexp.IPv4_RE,
|
||||
abnf_regexp.IPv6_RE,
|
||||
abnf_regexp.IPv6_ADDRZ_RFC4007_RE
|
||||
)
|
||||
)
|
||||
|
||||
try: # Test for SSL features
|
||||
import ssl
|
||||
from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
|
||||
from ssl import wrap_socket, CERT_REQUIRED
|
||||
from ssl import HAS_SNI # Has SNI?
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try: # Platform-specific: Python 3.6
|
||||
from ssl import PROTOCOL_TLS
|
||||
PROTOCOL_SSLv23 = PROTOCOL_TLS
|
||||
except ImportError:
|
||||
try:
|
||||
from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS
|
||||
PROTOCOL_SSLv23 = PROTOCOL_TLS
|
||||
except ImportError:
|
||||
PROTOCOL_SSLv23 = PROTOCOL_TLS = 2
|
||||
|
||||
|
||||
try:
|
||||
from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
|
||||
|
@ -56,25 +76,6 @@ except ImportError:
|
|||
OP_NO_COMPRESSION = 0x20000
|
||||
|
||||
|
||||
# Python 2.7 doesn't have inet_pton on non-Linux so we fallback on inet_aton in
|
||||
# those cases. This means that we can only detect IPv4 addresses in this case.
|
||||
if hasattr(socket, 'inet_pton'):
|
||||
inet_pton = socket.inet_pton
|
||||
else:
|
||||
# Maybe we can use ipaddress if the user has urllib3[secure]?
|
||||
try:
|
||||
import ipaddress
|
||||
|
||||
def inet_pton(_, host):
|
||||
if isinstance(host, bytes):
|
||||
host = host.decode('ascii')
|
||||
return ipaddress.ip_address(host)
|
||||
|
||||
except ImportError: # Platform-specific: Non-Linux
|
||||
def inet_pton(_, host):
|
||||
return socket.inet_aton(host)
|
||||
|
||||
|
||||
# A secure default.
|
||||
# Sources for more information on TLS ciphers:
|
||||
#
|
||||
|
@ -83,37 +84,35 @@ else:
|
|||
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
|
||||
#
|
||||
# The general intent is:
|
||||
# - Prefer TLS 1.3 cipher suites
|
||||
# - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
|
||||
# - prefer ECDHE over DHE for better performance,
|
||||
# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and
|
||||
# security,
|
||||
# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common,
|
||||
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
|
||||
# - disable NULL authentication, MD5 MACs, DSS, and other
|
||||
# insecure ciphers for security reasons.
|
||||
# - NOTE: TLS 1.3 cipher suites are managed through a different interface
|
||||
# not exposed by CPython (yet!) and are enabled by default if they're available.
|
||||
DEFAULT_CIPHERS = ':'.join([
|
||||
'TLS13-AES-256-GCM-SHA384',
|
||||
'TLS13-CHACHA20-POLY1305-SHA256',
|
||||
'TLS13-AES-128-GCM-SHA256',
|
||||
'ECDHE+AESGCM',
|
||||
'ECDHE+CHACHA20',
|
||||
'DHE+AESGCM',
|
||||
'DHE+CHACHA20',
|
||||
'ECDH+AESGCM',
|
||||
'ECDH+CHACHA20',
|
||||
'DH+AESGCM',
|
||||
'DH+CHACHA20',
|
||||
'ECDH+AES256',
|
||||
'DH+AES256',
|
||||
'ECDH+AES128',
|
||||
'ECDH+AES',
|
||||
'DH+AES',
|
||||
'RSA+AESGCM',
|
||||
'RSA+AES',
|
||||
'!aNULL',
|
||||
'!eNULL',
|
||||
'!MD5',
|
||||
'!DSS',
|
||||
])
|
||||
|
||||
try:
|
||||
from ssl import SSLContext # Modern SSL?
|
||||
except ImportError:
|
||||
import sys
|
||||
|
||||
class SSLContext(object): # Platform-specific: Python 2
|
||||
def __init__(self, protocol_version):
|
||||
self.protocol = protocol_version
|
||||
|
@ -199,7 +198,7 @@ def resolve_cert_reqs(candidate):
|
|||
constant which can directly be passed to wrap_socket.
|
||||
"""
|
||||
if candidate is None:
|
||||
return CERT_NONE
|
||||
return CERT_REQUIRED
|
||||
|
||||
if isinstance(candidate, str):
|
||||
res = getattr(ssl, candidate, None)
|
||||
|
@ -215,7 +214,7 @@ def resolve_ssl_version(candidate):
|
|||
like resolve_cert_reqs
|
||||
"""
|
||||
if candidate is None:
|
||||
return PROTOCOL_SSLv23
|
||||
return PROTOCOL_TLS
|
||||
|
||||
if isinstance(candidate, str):
|
||||
res = getattr(ssl, candidate, None)
|
||||
|
@ -261,7 +260,7 @@ def create_urllib3_context(ssl_version=None, cert_reqs=None,
|
|||
Constructed SSLContext object with specified options
|
||||
:rtype: SSLContext
|
||||
"""
|
||||
context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
|
||||
context = SSLContext(ssl_version or PROTOCOL_TLS)
|
||||
|
||||
context.set_ciphers(ciphers or DEFAULT_CIPHERS)
|
||||
|
||||
|
@ -291,7 +290,7 @@ def create_urllib3_context(ssl_version=None, cert_reqs=None,
|
|||
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
||||
ca_certs=None, server_hostname=None,
|
||||
ssl_version=None, ciphers=None, ssl_context=None,
|
||||
ca_cert_dir=None):
|
||||
ca_cert_dir=None, key_password=None):
|
||||
"""
|
||||
All arguments except for server_hostname, ssl_context, and ca_cert_dir have
|
||||
the same meaning as they do when using :func:`ssl.wrap_socket`.
|
||||
|
@ -307,6 +306,8 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
|||
A directory containing CA certificates in multiple separate files, as
|
||||
supported by OpenSSL's -CApath flag or the capath argument to
|
||||
SSLContext.load_verify_locations().
|
||||
:param key_password:
|
||||
Optional password if the keyfile is encrypted.
|
||||
"""
|
||||
context = ssl_context
|
||||
if context is None:
|
||||
|
@ -328,14 +329,21 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
|||
raise SSLError(e)
|
||||
raise
|
||||
|
||||
# Don't load system certs unless there were no CA certs or
|
||||
# SSLContext object specified manually.
|
||||
elif ssl_context is None and hasattr(context, 'load_default_certs'):
|
||||
# try to load OS default certs; works well on Windows (require Python3.4+)
|
||||
context.load_default_certs()
|
||||
|
||||
# Attempt to detect if we get the goofy behavior of the
|
||||
# keyfile being encrypted and OpenSSL asking for the
|
||||
# passphrase via the terminal and instead error out.
|
||||
if keyfile and key_password is None and _is_key_file_encrypted(keyfile):
|
||||
raise SSLError("Client private key is encrypted, password is required")
|
||||
|
||||
if certfile:
|
||||
context.load_cert_chain(certfile, keyfile)
|
||||
if key_password is None:
|
||||
context.load_cert_chain(certfile, keyfile)
|
||||
else:
|
||||
context.load_cert_chain(certfile, keyfile, key_password)
|
||||
|
||||
# If we detect server_hostname is an IP address then the SNI
|
||||
# extension should not be used according to RFC3546 Section 3.1
|
||||
|
@ -361,7 +369,8 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
|||
|
||||
|
||||
def is_ipaddress(hostname):
|
||||
"""Detects whether the hostname given is an IP address.
|
||||
"""Detects whether the hostname given is an IPv4 or IPv6 address.
|
||||
Also detects IPv6 addresses with Zone IDs.
|
||||
|
||||
:param str hostname: Hostname to examine.
|
||||
:return: True if the hostname is an IP address, False otherwise.
|
||||
|
@ -369,16 +378,15 @@ def is_ipaddress(hostname):
|
|||
if six.PY3 and isinstance(hostname, bytes):
|
||||
# IDN A-label bytes are ASCII compatible.
|
||||
hostname = hostname.decode('ascii')
|
||||
return _IP_ADDRESS_REGEX.match(hostname) is not None
|
||||
|
||||
families = [socket.AF_INET]
|
||||
if hasattr(socket, 'AF_INET6'):
|
||||
families.append(socket.AF_INET6)
|
||||
|
||||
for af in families:
|
||||
try:
|
||||
inet_pton(af, hostname)
|
||||
except (socket.error, ValueError, OSError):
|
||||
pass
|
||||
else:
|
||||
return True
|
||||
def _is_key_file_encrypted(key_file):
|
||||
"""Detects if a key file is encrypted or not."""
|
||||
with open(key_file, 'r') as f:
|
||||
for line in f:
|
||||
# Look for Proc-Type: 4,ENCRYPTED
|
||||
if 'ENCRYPTED' in line:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
|
|
@ -131,7 +131,8 @@ class Timeout(object):
|
|||
raise ValueError("Attempted to set %s timeout to %s, but the "
|
||||
"timeout cannot be set to a value less "
|
||||
"than or equal to 0." % (name, value))
|
||||
except TypeError: # Python 3
|
||||
except TypeError:
|
||||
# Python 3
|
||||
raise ValueError("Timeout value %s was %s, but it must be an "
|
||||
"int, float or None." % (name, value))
|
||||
|
||||
|
|
|
@ -1,7 +1,12 @@
|
|||
from __future__ import absolute_import
|
||||
import re
|
||||
from collections import namedtuple
|
||||
|
||||
from ..exceptions import LocationParseError
|
||||
from ..packages import six, rfc3986
|
||||
from ..packages.rfc3986.exceptions import RFC3986Exception, ValidationError
|
||||
from ..packages.rfc3986.validators import Validator
|
||||
from ..packages.rfc3986 import abnf_regexp, normalizers, compat, misc
|
||||
|
||||
|
||||
url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment']
|
||||
|
@ -10,10 +15,16 @@ url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment']
|
|||
# urllib3 infers URLs without a scheme (None) to be http.
|
||||
NORMALIZABLE_SCHEMES = ('http', 'https', None)
|
||||
|
||||
# Regex for detecting URLs with schemes. RFC 3986 Section 3.1
|
||||
SCHEME_REGEX = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+\-]*:|/)")
|
||||
|
||||
PATH_CHARS = abnf_regexp.UNRESERVED_CHARS_SET | abnf_regexp.SUB_DELIMITERS_SET | {':', '@', '/'}
|
||||
QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {'?'}
|
||||
|
||||
|
||||
class Url(namedtuple('Url', url_attrs)):
|
||||
"""
|
||||
Datastructure for representing an HTTP URL. Used as a return value for
|
||||
Data structure for representing an HTTP URL. Used as a return value for
|
||||
:func:`parse_url`. Both the scheme and host are normalized as they are
|
||||
both case-insensitive according to RFC 3986.
|
||||
"""
|
||||
|
@ -23,10 +34,8 @@ class Url(namedtuple('Url', url_attrs)):
|
|||
query=None, fragment=None):
|
||||
if path and not path.startswith('/'):
|
||||
path = '/' + path
|
||||
if scheme:
|
||||
if scheme is not None:
|
||||
scheme = scheme.lower()
|
||||
if host and scheme in NORMALIZABLE_SCHEMES:
|
||||
host = host.lower()
|
||||
return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
|
||||
query, fragment)
|
||||
|
||||
|
@ -72,23 +81,23 @@ class Url(namedtuple('Url', url_attrs)):
|
|||
'http://username:password@host.com:80/path?query#fragment'
|
||||
"""
|
||||
scheme, auth, host, port, path, query, fragment = self
|
||||
url = ''
|
||||
url = u''
|
||||
|
||||
# We use "is not None" we want things to happen with empty strings (or 0 port)
|
||||
if scheme is not None:
|
||||
url += scheme + '://'
|
||||
url += scheme + u'://'
|
||||
if auth is not None:
|
||||
url += auth + '@'
|
||||
url += auth + u'@'
|
||||
if host is not None:
|
||||
url += host
|
||||
if port is not None:
|
||||
url += ':' + str(port)
|
||||
url += u':' + str(port)
|
||||
if path is not None:
|
||||
url += path
|
||||
if query is not None:
|
||||
url += '?' + query
|
||||
url += u'?' + query
|
||||
if fragment is not None:
|
||||
url += '#' + fragment
|
||||
url += u'#' + fragment
|
||||
|
||||
return url
|
||||
|
||||
|
@ -98,6 +107,8 @@ class Url(namedtuple('Url', url_attrs)):
|
|||
|
||||
def split_first(s, delims):
|
||||
"""
|
||||
.. deprecated:: 1.25
|
||||
|
||||
Given a string and an iterable of delimiters, split on the first found
|
||||
delimiter. Return two split parts and the matched delimiter.
|
||||
|
||||
|
@ -129,10 +140,44 @@ def split_first(s, delims):
|
|||
return s[:min_idx], s[min_idx + 1:], min_delim
|
||||
|
||||
|
||||
def _encode_invalid_chars(component, allowed_chars, encoding='utf-8'):
|
||||
"""Percent-encodes a URI component without reapplying
|
||||
onto an already percent-encoded component. Based on
|
||||
rfc3986.normalizers.encode_component()
|
||||
"""
|
||||
if component is None:
|
||||
return component
|
||||
|
||||
# Try to see if the component we're encoding is already percent-encoded
|
||||
# so we can skip all '%' characters but still encode all others.
|
||||
percent_encodings = len(normalizers.PERCENT_MATCHER.findall(
|
||||
compat.to_str(component, encoding)))
|
||||
|
||||
uri_bytes = component.encode('utf-8', 'surrogatepass')
|
||||
is_percent_encoded = percent_encodings == uri_bytes.count(b'%')
|
||||
|
||||
encoded_component = bytearray()
|
||||
|
||||
for i in range(0, len(uri_bytes)):
|
||||
# Will return a single character bytestring on both Python 2 & 3
|
||||
byte = uri_bytes[i:i+1]
|
||||
byte_ord = ord(byte)
|
||||
if ((is_percent_encoded and byte == b'%')
|
||||
or (byte_ord < 128 and byte.decode() in allowed_chars)):
|
||||
encoded_component.extend(byte)
|
||||
continue
|
||||
encoded_component.extend('%{0:02x}'.format(byte_ord).encode().upper())
|
||||
|
||||
return encoded_component.decode(encoding)
|
||||
|
||||
|
||||
def parse_url(url):
|
||||
"""
|
||||
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
|
||||
performed to parse incomplete urls. Fields not provided will be None.
|
||||
This parser is RFC 3986 compliant.
|
||||
|
||||
:param str url: URL to parse into a :class:`.Url` namedtuple.
|
||||
|
||||
Partly backwards-compatible with :mod:`urlparse`.
|
||||
|
||||
|
@ -145,81 +190,95 @@ def parse_url(url):
|
|||
>>> parse_url('/foo?bar')
|
||||
Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
|
||||
"""
|
||||
|
||||
# While this code has overlap with stdlib's urlparse, it is much
|
||||
# simplified for our needs and less annoying.
|
||||
# Additionally, this implementations does silly things to be optimal
|
||||
# on CPython.
|
||||
|
||||
if not url:
|
||||
# Empty
|
||||
return Url()
|
||||
|
||||
scheme = None
|
||||
auth = None
|
||||
host = None
|
||||
port = None
|
||||
path = None
|
||||
fragment = None
|
||||
query = None
|
||||
is_string = not isinstance(url, six.binary_type)
|
||||
|
||||
# Scheme
|
||||
if '://' in url:
|
||||
scheme, url = url.split('://', 1)
|
||||
# RFC 3986 doesn't like URLs that have a host but don't start
|
||||
# with a scheme and we support URLs like that so we need to
|
||||
# detect that problem and add an empty scheme indication.
|
||||
# We don't get hurt on path-only URLs here as it's stripped
|
||||
# off and given an empty scheme anyways.
|
||||
if not SCHEME_REGEX.search(url):
|
||||
url = "//" + url
|
||||
|
||||
# Find the earliest Authority Terminator
|
||||
# (http://tools.ietf.org/html/rfc3986#section-3.2)
|
||||
url, path_, delim = split_first(url, ['/', '?', '#'])
|
||||
|
||||
if delim:
|
||||
# Reassemble the path
|
||||
path = delim + path_
|
||||
|
||||
# Auth
|
||||
if '@' in url:
|
||||
# Last '@' denotes end of auth part
|
||||
auth, url = url.rsplit('@', 1)
|
||||
|
||||
# IPv6
|
||||
if url and url[0] == '[':
|
||||
host, url = url.split(']', 1)
|
||||
host += ']'
|
||||
|
||||
# Port
|
||||
if ':' in url:
|
||||
_host, port = url.split(':', 1)
|
||||
|
||||
if not host:
|
||||
host = _host
|
||||
|
||||
if port:
|
||||
# If given, ports must be integers. No whitespace, no plus or
|
||||
# minus prefixes, no non-integer digits such as ^2 (superscript).
|
||||
if not port.isdigit():
|
||||
raise LocationParseError(url)
|
||||
def idna_encode(name):
|
||||
if name and any([ord(x) > 128 for x in name]):
|
||||
try:
|
||||
port = int(port)
|
||||
except ValueError:
|
||||
raise LocationParseError(url)
|
||||
else:
|
||||
# Blank ports are cool, too. (rfc3986#section-3.2.3)
|
||||
port = None
|
||||
import idna
|
||||
except ImportError:
|
||||
raise LocationParseError("Unable to parse URL without the 'idna' module")
|
||||
try:
|
||||
return idna.encode(name.lower(), strict=True, std3_rules=True)
|
||||
except idna.IDNAError:
|
||||
raise LocationParseError(u"Name '%s' is not a valid IDNA label" % name)
|
||||
return name
|
||||
|
||||
elif not host and url:
|
||||
host = url
|
||||
try:
|
||||
split_iri = misc.IRI_MATCHER.match(compat.to_str(url)).groupdict()
|
||||
iri_ref = rfc3986.IRIReference(
|
||||
split_iri['scheme'], split_iri['authority'],
|
||||
_encode_invalid_chars(split_iri['path'], PATH_CHARS),
|
||||
_encode_invalid_chars(split_iri['query'], QUERY_CHARS),
|
||||
_encode_invalid_chars(split_iri['fragment'], FRAGMENT_CHARS)
|
||||
)
|
||||
has_authority = iri_ref.authority is not None
|
||||
uri_ref = iri_ref.encode(idna_encoder=idna_encode)
|
||||
except (ValueError, RFC3986Exception):
|
||||
return six.raise_from(LocationParseError(url), None)
|
||||
|
||||
# rfc3986 strips the authority if it's invalid
|
||||
if has_authority and uri_ref.authority is None:
|
||||
raise LocationParseError(url)
|
||||
|
||||
# Only normalize schemes we understand to not break http+unix
|
||||
# or other schemes that don't follow RFC 3986.
|
||||
if uri_ref.scheme is None or uri_ref.scheme.lower() in NORMALIZABLE_SCHEMES:
|
||||
uri_ref = uri_ref.normalize()
|
||||
|
||||
# Validate all URIReference components and ensure that all
|
||||
# components that were set before are still set after
|
||||
# normalization has completed.
|
||||
validator = Validator()
|
||||
try:
|
||||
validator.check_validity_of(
|
||||
*validator.COMPONENT_NAMES
|
||||
).validate(uri_ref)
|
||||
except ValidationError:
|
||||
return six.raise_from(LocationParseError(url), None)
|
||||
|
||||
# For the sake of backwards compatibility we put empty
|
||||
# string values for path if there are any defined values
|
||||
# beyond the path in the URL.
|
||||
# TODO: Remove this when we break backwards compatibility.
|
||||
path = uri_ref.path
|
||||
if not path:
|
||||
return Url(scheme, auth, host, port, path, query, fragment)
|
||||
if (uri_ref.query is not None
|
||||
or uri_ref.fragment is not None):
|
||||
path = ""
|
||||
else:
|
||||
path = None
|
||||
|
||||
# Fragment
|
||||
if '#' in path:
|
||||
path, fragment = path.split('#', 1)
|
||||
# Ensure that each part of the URL is a `str` for
|
||||
# backwards compatibility.
|
||||
def to_input_type(x):
|
||||
if x is None:
|
||||
return None
|
||||
elif not is_string and not isinstance(x, six.binary_type):
|
||||
return x.encode('utf-8')
|
||||
return x
|
||||
|
||||
# Query
|
||||
if '?' in path:
|
||||
path, query = path.split('?', 1)
|
||||
|
||||
return Url(scheme, auth, host, port, path, query, fragment)
|
||||
return Url(
|
||||
scheme=to_input_type(uri_ref.scheme),
|
||||
auth=to_input_type(uri_ref.userinfo),
|
||||
host=to_input_type(uri_ref.host),
|
||||
port=int(uri_ref.port) if uri_ref.port is not None else None,
|
||||
path=to_input_type(path),
|
||||
query=to_input_type(uri_ref.query),
|
||||
fragment=to_input_type(uri_ref.fragment)
|
||||
)
|
||||
|
||||
|
||||
def get_host(url):
|
||||
|
|
|
@ -61,6 +61,7 @@
|
|||
% from database import TableEpisodes, TableMovies, System
|
||||
% import operator
|
||||
% from config import settings
|
||||
% from functools import reduce
|
||||
|
||||
%episodes_missing_subtitles_clause = [
|
||||
% (TableEpisodes.missing_subtitles != '[]')
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue