This commit is contained in:
morpheus65535 2025-06-09 21:57:33 -04:00
parent cc5fa4200c
commit 9700e6be08
314 changed files with 55778 additions and 5415 deletions

View file

@ -56,42 +56,6 @@ class NoExceptionFormatter(logging.Formatter):
return ''
class UnwantedWaitressMessageFilter(logging.Filter):
def filter(self, record):
if settings.general.debug or "BAZARR" in record.msg:
# no filtering in debug mode or if originating from us
return True
if record.levelno < logging.ERROR:
return False
unwantedMessages = [
"Exception while serving /api/socket.io/",
['Session is disconnected', 'Session not found'],
"Exception while serving /api/socket.io/",
["'Session is disconnected'", "'Session not found'"],
"Exception while serving /api/socket.io/",
['"Session is disconnected"', '"Session not found"'],
"Exception when servicing %r",
[],
]
wanted = True
listLength = len(unwantedMessages)
for i in range(0, listLength, 2):
if record.msg == unwantedMessages[i]:
exceptionTuple = record.exc_info
if exceptionTuple is not None:
if len(unwantedMessages[i+1]) == 0 or str(exceptionTuple[1]) in unwantedMessages[i+1]:
wanted = False
break
return wanted
def configure_logging(debug=False):
warnings.simplefilter('ignore', category=ResourceWarning)
warnings.simplefilter('ignore', category=PytzUsageWarning)
@ -166,8 +130,6 @@ def configure_logging(debug=False):
logging.getLogger("websocket").setLevel(logging.CRITICAL)
logging.getLogger("ga4mp.ga4mp").setLevel(logging.ERROR)
logging.getLogger("waitress").setLevel(logging.INFO)
logging.getLogger("waitress").addFilter(UnwantedWaitressMessageFilter())
logging.getLogger("knowit").setLevel(logging.CRITICAL)
logging.getLogger("enzyme").setLevel(logging.CRITICAL)
logging.getLogger("guessit").setLevel(logging.WARNING)

View file

@ -3,11 +3,10 @@
import signal
import warnings
import logging
import errno
from literals import EXIT_INTERRUPT, EXIT_NORMAL, EXIT_PORT_ALREADY_IN_USE_ERROR
from utilities.central import restart_bazarr, stop_bazarr
import cherrypy
from waitress.server import create_server
from literals import EXIT_INTERRUPT, EXIT_NORMAL
from utilities.central import restart_bazarr, stop_bazarr
from time import sleep
from api import api_bp
@ -32,6 +31,7 @@ class Server:
warnings.simplefilter("ignore", BrokenPipeError)
self.server = None
self.server6 = None
self.connected = False
self.address = str(settings.general.ip)
self.port = int(args.port) if args.port else int(settings.general.port)
@ -42,60 +42,56 @@ class Server:
self.configure_server()
def configure_server(self):
try:
self.server = create_server(app,
host=self.address,
port=self.port,
threads=100)
self.connected = True
except OSError as error:
if error.errno == errno.EADDRNOTAVAIL:
logging.exception("BAZARR cannot bind to specified IP, trying with 0.0.0.0")
self.address = '0.0.0.0'
self.connected = False
super(Server, self).__init__()
elif error.errno == errno.EADDRINUSE:
if self.port != '6767':
logging.exception("BAZARR cannot bind to specified TCP port, trying with default (6767)")
self.port = '6767'
self.connected = False
super(Server, self).__init__()
else:
logging.exception("BAZARR cannot bind to default TCP port (6767) because it's already in use, "
"exiting...")
self.shutdown(EXIT_PORT_ALREADY_IN_USE_ERROR)
elif error.errno in [errno.ENOLINK, errno.EAFNOSUPPORT]:
logging.exception("BAZARR cannot bind to IPv6 (*), trying with 0.0.0.0")
self.address = '0.0.0.0'
self.connected = False
super(Server, self).__init__()
else:
logging.exception("BAZARR cannot start because of unhandled exception.")
self.shutdown()
cherrypy.config.update({'log.screen': False, 'log.access_file': '', 'log.error_file': ''})
cherrypy.config.update({'server.shutdown_timeout': 1, 'server.thread_pool': 100})
cherrypy.log.error_log.setLevel(logging.CRITICAL)
self.server = cherrypy._cpserver.Server()
if self.address == '*':
# we listen to every IPv4 available IP addresses
self.server.socket_host = '0.0.0.0'
# we must create a distinct server to support both IPv4 and IPv6 at the same time
self.server6 = cherrypy._cpserver.Server()
self.server6.socket_host = '::'
self.server6.socket_port = self.port
else:
# we bind to only IPv4 or IPv6, not both at the same time
self.server.socket_host = self.address
self.server.socket_port = self.port
cherrypy.tree.graft(app, script_name='/')
self.connected = True
def interrupt_handler(self, signum, frame):
# print('Server signal interrupt handler called with signal', signum)
if not self.interrupted:
# ignore user hammering Ctrl-C; we heard you the first time!
self.interrupted = True
self.shutdown(EXIT_INTERRUPT)
def start(self):
self.server.print_listen("BAZARR is started and waiting for requests on: http://{}:{}")
signal.signal(signal.SIGINT, self.interrupt_handler)
try:
self.server.run()
self.server.start()
if self.server6:
self.server6.start()
except (KeyboardInterrupt, SystemExit):
self.shutdown()
except Exception:
pass
except Exception as e:
logging.critical(f"BAZARR cannot start because of: {e}")
self.shutdown()
else:
logging.info(f"BAZARR is started and waiting for requests on: {self.server.base()}")
def close_all(self):
print("Closing database...")
logging.info("Closing database...")
close_database()
logging.info("Please wait while we're closing webserver...")
# IPv6 only webserver must be stopped first if it's been started.
if self.server6:
self.server6.stop()
# then we stop the main webserver
if self.server:
print("Closing webserver...")
self.server.close()
self.server.stop()
def shutdown(self, status=EXIT_NORMAL):
self.close_all()

View file

@ -1,243 +0,0 @@
from __future__ import absolute_import
import functools
from collections import namedtuple
from threading import RLock
_CacheInfo = namedtuple("_CacheInfo", ["hits", "misses", "maxsize", "currsize"])
@functools.wraps(functools.update_wrapper)
def update_wrapper(
wrapper,
wrapped,
assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES,
):
"""
Patch two bugs in functools.update_wrapper.
"""
# workaround for http://bugs.python.org/issue3445
assigned = tuple(attr for attr in assigned if hasattr(wrapped, attr))
wrapper = functools.update_wrapper(wrapper, wrapped, assigned, updated)
# workaround for https://bugs.python.org/issue17482
wrapper.__wrapped__ = wrapped
return wrapper
class _HashedSeq(list):
"""This class guarantees that hash() will be called no more than once
per element. This is important because the lru_cache() will hash
the key multiple times on a cache miss.
"""
__slots__ = 'hashvalue'
def __init__(self, tup, hash=hash):
self[:] = tup
self.hashvalue = hash(tup)
def __hash__(self):
return self.hashvalue
def _make_key(
args,
kwds,
typed,
kwd_mark=(object(),),
fasttypes={int, str},
tuple=tuple,
type=type,
len=len,
):
"""Make a cache key from optionally typed positional and keyword arguments
The key is constructed in a way that is flat as possible rather than
as a nested structure that would take more memory.
If there is only a single argument and its data type is known to cache
its hash value, then that argument is returned without a wrapper. This
saves space and improves lookup speed.
"""
# All of code below relies on kwds preserving the order input by the user.
# Formerly, we sorted() the kwds before looping. The new way is *much*
# faster; however, it means that f(x=1, y=2) will now be treated as a
# distinct call from f(y=2, x=1) which will be cached separately.
key = args
if kwds:
key += kwd_mark
for item in kwds.items():
key += item
if typed:
key += tuple(type(v) for v in args)
if kwds:
key += tuple(type(v) for v in kwds.values())
elif len(key) == 1 and type(key[0]) in fasttypes:
return key[0]
return _HashedSeq(key)
def lru_cache(maxsize=128, typed=False):
"""Least-recently-used cache decorator.
If *maxsize* is set to None, the LRU features are disabled and the cache
can grow without bound.
If *typed* is True, arguments of different types will be cached separately.
For example, f(decimal.Decimal("3.0")) and f(3.0) will be treated as
distinct calls with distinct results. Some types such as str and int may
be cached separately even when typed is false.
Arguments to the cached function must be hashable.
View the cache statistics named tuple (hits, misses, maxsize, currsize)
with f.cache_info(). Clear the cache and statistics with f.cache_clear().
Access the underlying function with f.__wrapped__.
See: https://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU)
"""
# Users should only access the lru_cache through its public API:
# cache_info, cache_clear, and f.__wrapped__
# The internals of the lru_cache are encapsulated for thread safety and
# to allow the implementation to change (including a possible C version).
if isinstance(maxsize, int):
# Negative maxsize is treated as 0
if maxsize < 0:
maxsize = 0
elif callable(maxsize) and isinstance(typed, bool):
# The user_function was passed in directly via the maxsize argument
user_function, maxsize = maxsize, 128
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
wrapper.cache_parameters = lambda: {'maxsize': maxsize, 'typed': typed}
return update_wrapper(wrapper, user_function)
elif maxsize is not None:
raise TypeError('Expected first argument to be an integer, a callable, or None')
def decorating_function(user_function):
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
wrapper.cache_parameters = lambda: {'maxsize': maxsize, 'typed': typed}
return update_wrapper(wrapper, user_function)
return decorating_function
def _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo):
# Constants shared by all lru cache instances:
sentinel = object() # unique object used to signal cache misses
make_key = _make_key # build a key from the function arguments
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
cache = {}
hits = misses = 0
full = False
cache_get = cache.get # bound method to lookup a key or return None
cache_len = cache.__len__ # get cache size without calling len()
lock = RLock() # because linkedlist updates aren't threadsafe
root = [] # root of the circular doubly linked list
root[:] = [root, root, None, None] # initialize by pointing to self
if maxsize == 0:
def wrapper(*args, **kwds):
# No caching -- just a statistics update
nonlocal misses
misses += 1
result = user_function(*args, **kwds)
return result
elif maxsize is None:
def wrapper(*args, **kwds):
# Simple caching without ordering or size limit
nonlocal hits, misses
key = make_key(args, kwds, typed)
result = cache_get(key, sentinel)
if result is not sentinel:
hits += 1
return result
misses += 1
result = user_function(*args, **kwds)
cache[key] = result
return result
else:
def wrapper(*args, **kwds):
# Size limited caching that tracks accesses by recency
nonlocal root, hits, misses, full
key = make_key(args, kwds, typed)
with lock:
link = cache_get(key)
if link is not None:
# Move the link to the front of the circular queue
link_prev, link_next, _key, result = link
link_prev[NEXT] = link_next
link_next[PREV] = link_prev
last = root[PREV]
last[NEXT] = root[PREV] = link
link[PREV] = last
link[NEXT] = root
hits += 1
return result
misses += 1
result = user_function(*args, **kwds)
with lock:
if key in cache:
# Getting here means that this same key was added to the
# cache while the lock was released. Since the link
# update is already done, we need only return the
# computed result and update the count of misses.
pass
elif full:
# Use the old root to store the new key and result.
oldroot = root
oldroot[KEY] = key
oldroot[RESULT] = result
# Empty the oldest link and make it the new root.
# Keep a reference to the old key and old result to
# prevent their ref counts from going to zero during the
# update. That will prevent potentially arbitrary object
# clean-up code (i.e. __del__) from running while we're
# still adjusting the links.
root = oldroot[NEXT]
oldkey = root[KEY]
root[KEY] = root[RESULT] = None
# Now update the cache dictionary.
del cache[oldkey]
# Save the potentially reentrant cache[key] assignment
# for last, after the root and links have been put in
# a consistent state.
cache[key] = oldroot
else:
# Put result in a new link at the front of the queue.
last = root[PREV]
link = [last, root, key, result]
last[NEXT] = root[PREV] = cache[key] = link
# Use the cache_len bound method instead of the len() function
# which could potentially be wrapped in an lru_cache itself.
full = cache_len() >= maxsize
return result
def cache_info():
"""Report cache statistics"""
with lock:
return _CacheInfo(hits, misses, maxsize, cache_len())
def cache_clear():
"""Clear the cache and cache statistics"""
nonlocal hits, misses, full
with lock:
cache.clear()
root[:] = [root, root, None, None]
hits = misses = 0
full = False
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
return wrapper

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,5 @@
from . import main
if __name__ == '__main__':
main()

View file

@ -0,0 +1,24 @@
import sys
if sys.version_info < (3, 9):
def removesuffix(self, suffix):
# suffix='' should not call self[:-0].
if suffix and self.endswith(suffix):
return self[: -len(suffix)]
else:
return self[:]
def removeprefix(self, prefix):
if self.startswith(prefix):
return self[len(prefix) :]
else:
return self[:]
else:
def removesuffix(self, suffix):
return self.removesuffix(suffix)
def removeprefix(self, prefix):
return self.removeprefix(prefix)

Binary file not shown.

View file

@ -0,0 +1,17 @@
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.

View file

@ -0,0 +1,46 @@
Metadata-Version: 2.1
Name: backports.tarfile
Version: 1.2.0
Summary: Backport of CPython tarfile module
Author-email: "Jason R. Coombs" <jaraco@jaraco.com>
Project-URL: Homepage, https://github.com/jaraco/backports.tarfile
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Requires-Python: >=3.8
Description-Content-Type: text/x-rst
License-File: LICENSE
Provides-Extra: docs
Requires-Dist: sphinx >=3.5 ; extra == 'docs'
Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
Requires-Dist: furo ; extra == 'docs'
Requires-Dist: sphinx-lint ; extra == 'docs'
Provides-Extra: testing
Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'testing'
Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
Requires-Dist: pytest-cov ; extra == 'testing'
Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
Requires-Dist: jaraco.test ; extra == 'testing'
Requires-Dist: pytest !=8.0.* ; extra == 'testing'
.. image:: https://img.shields.io/pypi/v/backports.tarfile.svg
:target: https://pypi.org/project/backports.tarfile
.. image:: https://img.shields.io/pypi/pyversions/backports.tarfile.svg
.. image:: https://github.com/jaraco/backports.tarfile/actions/workflows/main.yml/badge.svg
:target: https://github.com/jaraco/backports.tarfile/actions?query=workflow%3A%22tests%22
:alt: tests
.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
:target: https://github.com/astral-sh/ruff
:alt: Ruff
.. .. image:: https://readthedocs.org/projects/backportstarfile/badge/?version=latest
.. :target: https://backportstarfile.readthedocs.io/en/latest/?badge=latest
.. image:: https://img.shields.io/badge/skeleton-2024-informational
:target: https://blog.jaraco.com/skeleton

View file

@ -0,0 +1,12 @@
backports/__init__.py,sha256=iOEMwnlORWezdO8-2vxBIPSR37D7JGjluZ8f55vzxls,81
backports/tarfile/__init__.py,sha256=Pwf2qUIfB0SolJPCKcx3vz3UEu_aids4g4sAfxy94qg,108491
backports/tarfile/__main__.py,sha256=Yw2oGT1afrz2eBskzdPYL8ReB_3liApmhFkN2EbDmc4,59
backports/tarfile/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
backports/tarfile/compat/py38.py,sha256=iYkyt_gvWjLzGUTJD9TuTfMMjOk-ersXZmRlvQYN2qE,568
backports_tarfile-1.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
backports_tarfile-1.2.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
backports_tarfile-1.2.0.dist-info/METADATA,sha256=ghXFTq132dxaEIolxr3HK1mZqm9iyUmaRANZQSr6WlE,2020
backports_tarfile-1.2.0.dist-info/RECORD,,
backports_tarfile-1.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
backports_tarfile-1.2.0.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
backports_tarfile-1.2.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10

View file

@ -1,5 +1,5 @@
Wheel-Version: 1.0
Generator: setuptools (75.3.0)
Generator: setuptools (75.3.2)
Root-Is-Purelib: true
Tag: py3-none-any

View file

@ -0,0 +1 @@
backports

View file

@ -0,0 +1 @@
pip

View file

@ -0,0 +1,15 @@
Apache Software License 2.0
Copyright (c) 2020, Paul Ganssle (Google)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -0,0 +1,113 @@
Metadata-Version: 2.1
Name: backports.zoneinfo
Version: 0.2.1
Summary: Backport of the standard library zoneinfo module
Home-page: https://github.com/pganssle/zoneinfo
Author: Python Software Foundation
Author-email: datetime-sig@python.org
License: Apache-2.0
Project-URL: Source, https://github.com/pganssle/zoneinfo
Project-URL: Documentation, https://zoneinfo.readthedocs.io/en/latest/
Project-URL: Bug Reports, https://github.com/pganssle/zoneinfo/issues
Classifier: Development Status :: 4 - Beta
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Requires-Python: >=3.6
Description-Content-Type: text/markdown
License-File: LICENSE
License-File: licenses/LICENSE_APACHE
Requires-Dist: importlib-resources; python_version < "3.7"
Provides-Extra: tzdata
Requires-Dist: tzdata; extra == "tzdata"
# `backports.zoneinfo`: Backport of the standard library module `zoneinfo`
This package was originally the reference implementation for [PEP 615](https://www.python.org/dev/peps/pep-0615/), which proposes support for the IANA time zone database in the standard library, and now serves as a backport to Python 3.6+ (including PyPy).
This exposes the `backports.zoneinfo` module, which is a backport of the [`zoneinfo`](https://docs.python.org/3.9/library/zoneinfo.html#module-zoneinfo) module. The backport's documentation can be found [on readthedocs](https://zoneinfo.readthedocs.io/en/latest/).
The module uses the system time zone data if available, and falls back to the [`tzdata`](https://tzdata.readthedocs.io/en/latest/) package (available [on PyPI](https://pypi.org/project/tzdata/)) if installed.
## Installation and depending on this library
This module is called [`backports.zoneinfo`](https://pypi.org/project/backports.zoneinfo) on PyPI. To install it in your local environment, use:
```
pip install backports.zoneinfo
```
Or (particularly on Windows), you can also use the `tzdata` extra (which basically just declares a dependency on `tzdata`, so this doesn't actually save you any typing 😅):
```
pip install backports.zoneinfo[tzdata]
```
If you want to use this in your application, it is best to use [PEP 508 environment markers](https://www.python.org/dev/peps/pep-0508/#environment-markers) to declare a dependency *conditional on the Python version*:
```
backports.zoneinfo;python_version<"3.9"
```
Support for `backports.zoneinfo` in Python 3.9+ is currently minimal, since it is expected that you would use the standard library `zoneinfo` module instead.
## Use
The `backports.zoneinfo` module should be a drop-in replacement for the Python 3.9 standard library module `zoneinfo`. If you do not support anything earlier than Python 3.9, **you do not need this library**; if you are supporting Python 3.6+, you may want to use this idiom to "fall back" to ``backports.zoneinfo``:
```python
try:
import zoneinfo
except ImportError:
from backports import zoneinfo
```
To get access to time zones with this module, construct a `ZoneInfo` object and attach it to your datetime:
```python
>>> from backports.zoneinfo import ZoneInfo
>>> from datetime import datetime, timedelta, timezone
>>> dt = datetime(1992, 3, 1, tzinfo=ZoneInfo("Europe/Minsk"))
>>> print(dt)
1992-03-01 00:00:00+02:00
>>> print(dt.utcoffset())
2:00:00
>>> print(dt.tzname())
EET
```
Arithmetic works as expected without the need for a "normalization" step:
```python
>>> dt += timedelta(days=90)
>>> print(dt)
1992-05-30 00:00:00+03:00
>>> dt.utcoffset()
datetime.timedelta(seconds=10800)
>>> dt.tzname()
'EEST'
```
Ambiguous and imaginary times are handled using the `fold` attribute added in [PEP 495](https://www.python.org/dev/peps/pep-0495/):
```python
>>> dt = datetime(2020, 11, 1, 1, tzinfo=ZoneInfo("America/Chicago"))
>>> print(dt)
2020-11-01 01:00:00-05:00
>>> print(dt.replace(fold=1))
2020-11-01 01:00:00-06:00
>>> UTC = timezone.utc
>>> print(dt.astimezone(UTC))
2020-11-01 06:00:00+00:00
>>> print(dt.replace(fold=1).astimezone(UTC))
2020-11-01 07:00:00+00:00
```
# Contributing
Currently we are not accepting contributions to this repository because we have not put the CLA in place and we would like to avoid complicating the process of adoption into the standard library. Contributions to [CPython](https://github.com/python/cpython) will eventually be backported to this repository — see [the Python developer's guide](https://devguide.python.org/) for more information on how to contribute to CPython.

View file

@ -0,0 +1,17 @@
backports/__init__.py,sha256=KNscjLyptBUeU07KtwwRFdTJqAVURMf4GjM9CqXnxMI,227
backports/zoneinfo/__init__.py,sha256=atCU_fMgkQIE-DCjSJOist9GAqaWQAGhrDA5bCXPQxU,1235
backports/zoneinfo/__init__.pyi,sha256=1_T7dB1-Fh1s7f2zNa1QrP9pO_aBHemeaIiJBPQz3Fs,1234
backports/zoneinfo/_common.py,sha256=gKuY_V-YKbaabT5VRw1MWYJxfuiDDLCg7UolYYI42rw,5469
backports/zoneinfo/_czoneinfo.cpython-38-darwin.so,sha256=Usw6eh4R-g2b5HNn3d1STdi11U9XSvB3zeHwRDvDyds,37488
backports/zoneinfo/_tzpath.py,sha256=yFkwr6qKvcC6wtuhrJRh487x57L39up8bpl9w-gOGX8,5860
backports/zoneinfo/_version.py,sha256=HfjVOrpTnmZ-xVFCYSVmX50EXaBQeJteUHG-PD6iQs8,22
backports/zoneinfo/_zoneinfo.py,sha256=B1W3KlPeUHjk1qpBybGHorPMvfcLwGF7dtYj-IFAxY4,24353
backports/zoneinfo/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
backports_zoneinfo-0.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
backports_zoneinfo-0.2.1.dist-info/LICENSE,sha256=M-jlAC01EtP8wigrmV5rrZ0zR4G5xawxhD9ASQDh87Q,592
backports_zoneinfo-0.2.1.dist-info/LICENSE_APACHE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
backports_zoneinfo-0.2.1.dist-info/METADATA,sha256=kTzmkvHH1uwhH6KqhiCqH2q1Ffb_y2-PkG92E_-Yu08,4759
backports_zoneinfo-0.2.1.dist-info/RECORD,,
backports_zoneinfo-0.2.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
backports_zoneinfo-0.2.1.dist-info/WHEEL,sha256=Kf098HLFXRXX1Jgxpjr9laFfTW8uHOHMzRpMFYNCUHo,108
backports_zoneinfo-0.2.1.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10

View file

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: setuptools (75.3.2)
Root-Is-Purelib: false
Tag: cp38-cp38-macosx_12_0_x86_64

View file

@ -0,0 +1 @@
backports

View file

@ -0,0 +1 @@
pip

View file

@ -0,0 +1,30 @@
Copyright © 2004-2020, CherryPy Team (team@cherrypy.dev)
All rights reserved.
* * *
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of CherryPy nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View file

@ -0,0 +1,155 @@
Metadata-Version: 2.1
Name: cheroot
Version: 10.0.1
Summary: Highly-optimized, pure-python HTTP server
Home-page: https://cheroot.cherrypy.dev
Author: CherryPy Team
Author-email: team@cherrypy.dev
Project-URL: Chat: Matrix, https://matrix.to/#/#cherrypy-space:matrix.org
Project-URL: CI: GitHub, https://github.com/cherrypy/cheroot/actions
Project-URL: Docs: RTD, https://cheroot.cherrypy.dev
Project-URL: GitHub: issues, https://github.com/cherrypy/cheroot/issues
Project-URL: GitHub: repo, https://github.com/cherrypy/cheroot
Project-URL: Tidelift: funding, https://tidelift.com/subscription/pkg/pypi-cheroot?utm_source=pypi-cheroot&utm_medium=referral&utm_campaign=pypi
Keywords: http,server,ssl,wsgi
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: Operating System :: OS Independent
Classifier: Framework :: CherryPy
Classifier: License :: OSI Approved :: BSD License
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: Implementation
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: Jython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Internet :: WWW/HTTP
Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Server
Classifier: Typing :: Typed
Requires-Python: >=3.6
Description-Content-Type: text/x-rst
License-File: LICENSE.md
Requires-Dist: more-itertools >=2.6
Requires-Dist: jaraco.functools
Requires-Dist: importlib-metadata ; python_version < "3.8"
Provides-Extra: docs
Requires-Dist: sphinx >=1.8.2 ; extra == 'docs'
Requires-Dist: jaraco.packaging >=3.2 ; extra == 'docs'
Requires-Dist: sphinx-tabs >=1.1.0 ; extra == 'docs'
Requires-Dist: furo ; extra == 'docs'
Requires-Dist: python-dateutil ; extra == 'docs'
Requires-Dist: sphinxcontrib-apidoc >=0.3.0 ; extra == 'docs'
.. image:: https://raw.githubusercontent.com/vshymanskyy/StandWithUkraine/main/banner-direct.svg
:target: https://github.com/vshymanskyy/StandWithUkraine/blob/main/docs/README.md
:alt: SWUbanner
.. image:: https://img.shields.io/pypi/v/cheroot.svg
:target: https://pypi.org/project/cheroot
.. image:: https://tidelift.com/badges/package/pypi/cheroot
:target: https://tidelift.com/subscription/pkg/pypi-cheroot?utm_source=pypi-cheroot&utm_medium=readme
:alt: Cheroot is available as part of the Tidelift Subscription
.. image:: https://github.com/cherrypy/cheroot/actions/workflows/ci-cd.yml/badge.svg
:target: https://github.com/cherrypy/cheroot/actions/workflows/ci-cd.yml
:alt: GitHub Actions CI/CD Workflow
.. image:: https://img.shields.io/badge/license-BSD-blue.svg?maxAge=3600
:target: https://pypi.org/project/cheroot
.. image:: https://img.shields.io/pypi/pyversions/cheroot.svg
:target: https://pypi.org/project/cheroot
.. image:: https://codecov.io/gh/cherrypy/cheroot/branch/master/graph/badge.svg
:target: https://codecov.io/gh/cherrypy/cheroot
:alt: codecov
.. image:: https://readthedocs.org/projects/cheroot/badge/?version=latest
:target: https://cheroot.cherrypy.dev/en/latest/?badge=latest
.. image:: https://img.shields.io/badge/StackOverflow-Cheroot-blue.svg
:target: https://stackoverflow.com/questions/tagged/cheroot+or+cherrypy
.. image:: https://img.shields.io/matrix/octomachinery:matrix.org?label=Discuss%20on%20Matrix%20at%20%23cherrypy%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
:target: https://matrix.to/#/%23cherrypy:matrix.org
:alt: Matrix Room — #cherrypy:matrix.org
.. image:: https://img.shields.io/matrix/pyba:matrix.org?label=Discuss%20on%20Matrix%20at%20%23cherrypy-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
:target: https://matrix.to/#/%23cherrypy-space:matrix.org
:alt: Matrix Space — #cherrypy-space:matrix.org
.. image:: https://img.shields.io/gitter/room/cherrypy/cherrypy.svg
:target: https://gitter.im/cherrypy/cherrypy
.. image:: https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square
:target: http://makeapullrequest.com/
.. image:: https://app.fossa.io/api/projects/git%2Bgithub.com%2Fcherrypy%2Fcheroot.svg?type=shield
:target: https://app.fossa.io/projects/git%2Bgithub.com%2Fcherrypy%2Fcheroot?ref=badge_shield
:alt: FOSSA Status
Cheroot is the high-performance, pure-Python HTTP server used by CherryPy.
Status
======
The test suite currently relies on pytest. It's being run via GitHub
Actions CI/CD workflows.
For Enterprise
==============
.. list-table::
:widths: 10 100
* - |tideliftlogo|
- Professional support for Cheroot is available as part of the
`Tidelift Subscription`_. The CherryPy maintainers and the
maintainers of thousands of other packages are working with
Tidelift to deliver one enterprise subscription that covers all
of the open source you use.
Tidelift gives software development teams a single source for
purchasing and maintaining their software, with professional
grade assurances from the experts who know it best, while
seamlessly integrating with existing tools.
`Learn more <Tidelift Subscription_>`_.
.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-cheroot?utm_source=pypi-cheroot&utm_medium=referral&utm_campaign=readme
.. |tideliftlogo| image:: https://cdn2.hubspot.net/hubfs/4008838/website/logos/logos_for_download/Tidelift_primary-shorthand-logo.png
:target: https://tidelift.com/subscription/pkg/pypi-cheroot?utm_source=pypi-cheroot&utm_medium=readme
:width: 75
:alt: Tidelift
Contribute Cheroot
==================
**Want to add something to upstream?** Feel free to submit a PR or file an issue
if unsure. Please follow `CherryPy's common contribution guidelines
<https://github.com/cherrypy/cherrypy/blob/master/.github/CONTRIBUTING.rst>`_.
Note that PR is more likely to be accepted if it includes tests and detailed
description helping maintainers to understand it better 🎉
Oh, and be pythonic, please 🐍
**Don't know how?** Check out `How to Contribute to Open Source
<https://opensource.guide/how-to-contribute/>`_ article by GitHub 🚀
License
=======
.. image:: https://app.fossa.io/api/projects/git%2Bgithub.com%2Fcherrypy%2Fcheroot.svg?type=large
:target: https://app.fossa.io/projects/git%2Bgithub.com%2Fcherrypy%2Fcheroot?ref=badge_large
:alt: FOSSA Status

View file

@ -0,0 +1,54 @@
../../bin/cheroot,sha256=oF3agD9sq5k0QgYj4dDqsbfICXleYbRsfFtei42F5KI,233
cheroot-10.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
cheroot-10.0.1.dist-info/LICENSE.md,sha256=4g_utJGn6YCE8VcZNJ6YV6rUHEUDxeR5-IFbBj2_dWQ,1511
cheroot-10.0.1.dist-info/METADATA,sha256=3TG5grb9JAh8xEIMi1fbbh_DjXGmjZZKTAlmQMd9H7c,7008
cheroot-10.0.1.dist-info/RECORD,,
cheroot-10.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
cheroot-10.0.1.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
cheroot-10.0.1.dist-info/entry_points.txt,sha256=K_I24wVCr42Pygr3RAy4-dSVdNiQPCXDHcBvJcBvk9U,45
cheroot-10.0.1.dist-info/top_level.txt,sha256=P8VZfrem5gTRS34X6Thu7jyEoj_zSaPNI_3P0fShbAI,8
cheroot/__init__.py,sha256=ZZgcItIBeyj6d3xlBw_o4uFxtBwL86Y9MwzFSfmd1_c,284
cheroot/__init__.pyi,sha256=Y25n44pyE3vp92MiABKrcK3IWRyQ1JG1rZ4Ufqy2nC0,17
cheroot/__main__.py,sha256=jLOqwD221LYbg1ksPWhjOKYu4M4yV7y3mteM6CJi-Oc,109
cheroot/_compat.py,sha256=t1y0uqANmAv4SfkFrqHUk6NCgWOXkt2W8_1r0u7EQTA,2083
cheroot/_compat.pyi,sha256=jrBkeGVNS6B6TXzP0NyyBQyGvf-ucCFbLi5RitP1eJs,605
cheroot/cli.py,sha256=5qxJ0tK7nUPwTI2Q97VyjlPNdVwznoj-gTT7DJ_y_S8,6987
cheroot/cli.pyi,sha256=LIKNaRFyZVRRl2n3Jm_VTJjYaNsDSJQ7IcEr5qcjZR0,828
cheroot/connections.py,sha256=0kNsWFKZF3E9-glEiHu8XQMJ-PBeHX_V4kUjpeIRV40,15085
cheroot/connections.pyi,sha256=r-I9Mkn-PHcjqQvekmMmQNbh8a5-sJyfLp6d5sP7T48,714
cheroot/errors.py,sha256=vOHGdmaJwk9eUu-XqTyYjYxCdC6DgdTJHSm0bE3soIc,2753
cheroot/errors.pyi,sha256=WPJaht4vEcELxTStNPMYuEmztK52aLbOcYE8MwGTmSU,425
cheroot/makefile.py,sha256=XVLIM1ngS_XbLzZryUPgs5SSnV8CHyyRGzU3J4QpVPE,2306
cheroot/makefile.pyi,sha256=39bXRM-9N1jOAEgfudimuqoOkLFAShg-naldaQDKnGM,542
cheroot/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
cheroot/server.py,sha256=lkLN4zgMA4KzKRrh-3kegOyV5iVLAxg7ObjgVNv_W0M,77930
cheroot/server.pyi,sha256=_r4KsLNMGvh6UZHN-hYrz7VAhut4CfrTHmNoyWauYoE,5064
cheroot/ssl/__init__.py,sha256=U2aplZGnvXibKBSGN3Y9cje_zFS-mA_bJz5jO9h7JaY,1416
cheroot/ssl/__init__.pyi,sha256=liQgp4uYOywZfAGCqcaxVYEM6OBFCWOS-bCGtDI2q2w,581
cheroot/ssl/builtin.py,sha256=Q-EV29cvKecW1pL0N9THI_lO9aTEU1MOQqYxAXt5tVI,16445
cheroot/ssl/builtin.pyi,sha256=nriJS-4eEkyw04oOZW8dslIXLfr_Kzkb7izy6TavEIw,561
cheroot/ssl/pyopenssl.py,sha256=l4uHi9ysaee1WvuPo82wdXeds_VgatEZmZBwBPob7X8,13235
cheroot/ssl/pyopenssl.pyi,sha256=YlpXT8Rpj_mvBtOBiFuBep758p618JSegch9x2OCP38,1086
cheroot/test/__init__.py,sha256=_hgyWgeLHsLLScMhGwMOsevD3Wg3pssK535YfNJFoeU,26
cheroot/test/_pytest_plugin.py,sha256=u_13sGqGbIDP7z9ZC1_SRr_DWBoDQDR3O37IPqOxwyI,1768
cheroot/test/conftest.py,sha256=FWfmMk5SF_mgLVMM1WgkiJ3_LXH5vYJld7ODXBTd2I4,2715
cheroot/test/helper.py,sha256=A5HlkxxyswxYRT_0IjHja_TWvd2VxDY44Elzni0uke0,4755
cheroot/test/test__compat.py,sha256=uC4QuuYSDDMyErGij-vHfGV1fKTA_ylnZEmWXMLXUvc,1661
cheroot/test/test_cli.py,sha256=7bmQNozJ8cakCFw-ogUzAZq7yGUaSJYwzgXQx8tQfLg,2664
cheroot/test/test_conn.py,sha256=Zl4H2i8y7QfUqhlelmcD99qf3g8FnAfAfVeo5ggSxXs,53393
cheroot/test/test_core.py,sha256=xF0hR9-Zh-PfOLzP1ntxFQ11qQ4Fo93s8rOEF3N4D7w,14699
cheroot/test/test_dispatch.py,sha256=NJtth_qaHlKa7V83j-ekrf_8G2-7HI8EQiZdKTUcCx4,1210
cheroot/test/test_errors.py,sha256=i4-o3CTwluDN_70N9c_9QI4OY1imIMHSqOHpmyqML_8,924
cheroot/test/test_makefile.py,sha256=j3ZVMMDJI_JPTun-agdaJSKa8z3kH5HhXv_95Lg9T2I,1191
cheroot/test/test_server.py,sha256=vBZ35OJOGEgBcPmjpomwMIuCBoIlGSKc0OHS1EnhQqY,16697
cheroot/test/test_ssl.py,sha256=CkbF164i-yV5Infp8zE7sXSRZQOxlIILulu3eWRhM90,22268
cheroot/test/test_wsgi.py,sha256=vdyH1JN2K8qbfx4olnda-VHOxErtemaVVEUjVlNC5mo,2823
cheroot/test/webtest.py,sha256=3tjaCGiotSoEX9s1ULVKQojqLm82z3CDKZSrZQLzSTI,18398
cheroot/testing.py,sha256=BOfpxQ5NA1GMztF6CuugBLty3TDXA0Njr1Uh9qAuCx8,4857
cheroot/testing.pyi,sha256=HlbbwoROyn5DSAuCQxcUw58v2cCKu1MG7ySzwbH0ZXk,448
cheroot/workers/__init__.py,sha256=-ziyw7iPWHs2dN4R_Q7AQZ7r0dQPTes1nVCzAg2LOc8,25
cheroot/workers/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
cheroot/workers/threadpool.py,sha256=3fcSuktidgcoxlTXXrmXEjahitl6FmRXKuMQBGWOX8g,15457
cheroot/workers/threadpool.pyi,sha256=9xF6s4LAwnURJbdG1K8f97zGk2YVb2kEULJMaxsUXiI,925
cheroot/wsgi.py,sha256=_eUOnbMUvc6wdzaSjEyQrUuu6Q6ye5yYNC7z9rovnCw,14239
cheroot/wsgi.pyi,sha256=4DVIwCgcYtIxIA7wd-ZHcBNKOSiaXwaAAwrsUcg0h0M,1516

View file

View file

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: setuptools (75.3.2)
Root-Is-Purelib: true
Tag: py3-none-any

View file

@ -0,0 +1,2 @@
[console_scripts]
cheroot = cheroot.cli:main

View file

@ -0,0 +1 @@
cheroot

12
libs/cheroot/__init__.py Normal file
View file

@ -0,0 +1,12 @@
"""High-performance, pure-Python HTTP server used by CherryPy."""
try:
from importlib import metadata
except ImportError:
import importlib_metadata as metadata # noqa: WPS440
try:
__version__ = metadata.version('cheroot')
except Exception:
__version__ = 'unknown'

View file

@ -0,0 +1 @@
__version__: str

6
libs/cheroot/__main__.py Normal file
View file

@ -0,0 +1,6 @@
"""Stub for accessing the Cheroot CLI tool."""
from .cli import main
if __name__ == '__main__':
main()

84
libs/cheroot/_compat.py Normal file
View file

@ -0,0 +1,84 @@
# pylint: disable=unused-import
"""Compatibility code for using Cheroot with various versions of Python."""
import os
import platform
try:
import ssl
IS_ABOVE_OPENSSL10 = ssl.OPENSSL_VERSION_INFO >= (1, 1)
del ssl
except ImportError:
IS_ABOVE_OPENSSL10 = None
IS_CI = bool(os.getenv('CI'))
IS_GITHUB_ACTIONS_WORKFLOW = bool(os.getenv('GITHUB_WORKFLOW'))
IS_PYPY = platform.python_implementation() == 'PyPy'
SYS_PLATFORM = platform.system()
IS_WINDOWS = SYS_PLATFORM == 'Windows'
IS_LINUX = SYS_PLATFORM == 'Linux'
IS_MACOS = SYS_PLATFORM == 'Darwin'
IS_SOLARIS = SYS_PLATFORM == 'SunOS'
PLATFORM_ARCH = platform.machine()
IS_PPC = PLATFORM_ARCH.startswith('ppc')
def ntob(n, encoding='ISO-8859-1'):
"""Return the native string as bytes in the given encoding."""
assert_native(n)
# In Python 3, the native string type is unicode
return n.encode(encoding)
def ntou(n, encoding='ISO-8859-1'):
"""Return the native string as Unicode with the given encoding."""
assert_native(n)
# In Python 3, the native string type is unicode
return n
def bton(b, encoding='ISO-8859-1'):
"""Return the byte string as native string in the given encoding."""
return b.decode(encoding)
def assert_native(n):
"""Check whether the input is of native :py:class:`str` type.
Raises:
TypeError: in case of failed check
"""
if not isinstance(n, str):
raise TypeError('n must be a native str (got %s)' % type(n).__name__)
def extract_bytes(mv):
r"""Retrieve bytes out of the given input buffer.
:param mv: input :py:func:`buffer`
:type mv: memoryview or bytes
:return: unwrapped bytes
:rtype: bytes
:raises ValueError: if the input is not one of \
:py:class:`memoryview`/:py:func:`buffer` \
or :py:class:`bytes`
"""
if isinstance(mv, memoryview):
return mv.tobytes()
if isinstance(mv, bytes):
return mv
raise ValueError(
'extract_bytes() only accepts bytes and memoryview/buffer',
)

22
libs/cheroot/_compat.pyi Normal file
View file

@ -0,0 +1,22 @@
from typing import Any, ContextManager, Optional, Type, Union
def suppress(*exceptions: Type[BaseException]) -> ContextManager[None]: ...
IS_ABOVE_OPENSSL10: Optional[bool]
IS_CI: bool
IS_GITHUB_ACTIONS_WORKFLOW: bool
IS_PYPY: bool
SYS_PLATFORM: str
IS_WINDOWS: bool
IS_LINUX: bool
IS_MACOS: bool
IS_SOLARIS: bool
PLATFORM_ARCH: str
IS_PPC: bool
def ntob(n: str, encoding: str = ...) -> bytes: ...
def ntou(n: str, encoding: str = ...) -> str: ...
def bton(b: bytes, encoding: str = ...) -> str: ...
def assert_native(n: str) -> None: ...
def extract_bytes(mv: Union[memoryview, bytes]) -> bytes: ...

243
libs/cheroot/cli.py Normal file
View file

@ -0,0 +1,243 @@
"""Command line tool for starting a Cheroot WSGI/HTTP server instance.
Basic usage:
.. code-block:: shell-session
$ # Start a server on 127.0.0.1:8000 with the default settings
$ # for the WSGI app myapp/wsgi.py:application()
$ cheroot myapp.wsgi
$ # Start a server on 0.0.0.0:9000 with 8 threads
$ # for the WSGI app myapp/wsgi.py:main_app()
$ cheroot myapp.wsgi:main_app --bind 0.0.0.0:9000 --threads 8
$ # Start a server for the cheroot.server.Gateway subclass
$ # myapp/gateway.py:HTTPGateway
$ cheroot myapp.gateway:HTTPGateway
$ # Start a server on the UNIX socket /var/spool/myapp.sock
$ cheroot myapp.wsgi --bind /var/spool/myapp.sock
$ # Start a server on the abstract UNIX socket CherootServer
$ cheroot myapp.wsgi --bind @CherootServer
.. spelling::
cli
"""
import argparse
import os
import sys
import urllib.parse # noqa: WPS301
from importlib import import_module
from contextlib import suppress
from . import server
from . import wsgi
class BindLocation:
"""A class for storing the bind location for a Cheroot instance."""
class TCPSocket(BindLocation):
"""TCPSocket."""
def __init__(self, address, port):
"""Initialize.
Args:
address (str): Host name or IP address
port (int): TCP port number
"""
self.bind_addr = address, port
class UnixSocket(BindLocation):
"""UnixSocket."""
def __init__(self, path):
"""Initialize."""
self.bind_addr = path
class AbstractSocket(BindLocation):
"""AbstractSocket."""
def __init__(self, abstract_socket):
"""Initialize."""
self.bind_addr = '\x00{sock_path}'.format(sock_path=abstract_socket)
class Application:
"""Application."""
@classmethod
def resolve(cls, full_path):
"""Read WSGI app/Gateway path string and import application module."""
mod_path, _, app_path = full_path.partition(':')
app = getattr(import_module(mod_path), app_path or 'application')
# suppress the `TypeError` exception, just in case `app` is not a class
with suppress(TypeError):
if issubclass(app, server.Gateway):
return GatewayYo(app)
return cls(app)
def __init__(self, wsgi_app):
"""Initialize."""
if not callable(wsgi_app):
raise TypeError(
'Application must be a callable object or '
'cheroot.server.Gateway subclass',
)
self.wsgi_app = wsgi_app
def server_args(self, parsed_args):
"""Return keyword args for Server class."""
args = {
arg: value
for arg, value in vars(parsed_args).items()
if not arg.startswith('_') and value is not None
}
args.update(vars(self))
return args
def server(self, parsed_args):
"""Server."""
return wsgi.Server(**self.server_args(parsed_args))
class GatewayYo:
"""Gateway."""
def __init__(self, gateway):
"""Init."""
self.gateway = gateway
def server(self, parsed_args):
"""Server."""
server_args = vars(self)
server_args['bind_addr'] = parsed_args['bind_addr']
if parsed_args.max is not None:
server_args['maxthreads'] = parsed_args.max
if parsed_args.numthreads is not None:
server_args['minthreads'] = parsed_args.numthreads
return server.HTTPServer(**server_args)
def parse_wsgi_bind_location(bind_addr_string):
"""Convert bind address string to a BindLocation."""
# if the string begins with an @ symbol, use an abstract socket,
# this is the first condition to verify, otherwise the urlparse
# validation would detect //@<value> as a valid url with a hostname
# with value: "<value>" and port: None
if bind_addr_string.startswith('@'):
return AbstractSocket(bind_addr_string[1:])
# try and match for an IP/hostname and port
match = urllib.parse.urlparse(
'//{addr}'.format(addr=bind_addr_string),
)
try:
addr = match.hostname
port = match.port
if addr is not None or port is not None:
return TCPSocket(addr, port)
except ValueError:
pass
# else, assume a UNIX socket path
return UnixSocket(path=bind_addr_string)
def parse_wsgi_bind_addr(bind_addr_string):
"""Convert bind address string to bind address parameter."""
return parse_wsgi_bind_location(bind_addr_string).bind_addr
_arg_spec = {
'_wsgi_app': {
'metavar': 'APP_MODULE',
'type': Application.resolve,
'help': 'WSGI application callable or cheroot.server.Gateway subclass',
},
'--bind': {
'metavar': 'ADDRESS',
'dest': 'bind_addr',
'type': parse_wsgi_bind_addr,
'default': '[::1]:8000',
'help': 'Network interface to listen on (default: [::1]:8000)',
},
'--chdir': {
'metavar': 'PATH',
'type': os.chdir,
'help': 'Set the working directory',
},
'--server-name': {
'dest': 'server_name',
'type': str,
'help': 'Web server name to be advertised via Server HTTP header',
},
'--threads': {
'metavar': 'INT',
'dest': 'numthreads',
'type': int,
'help': 'Minimum number of worker threads',
},
'--max-threads': {
'metavar': 'INT',
'dest': 'max',
'type': int,
'help': 'Maximum number of worker threads',
},
'--timeout': {
'metavar': 'INT',
'dest': 'timeout',
'type': int,
'help': 'Timeout in seconds for accepted connections',
},
'--shutdown-timeout': {
'metavar': 'INT',
'dest': 'shutdown_timeout',
'type': int,
'help': 'Time in seconds to wait for worker threads to cleanly exit',
},
'--request-queue-size': {
'metavar': 'INT',
'dest': 'request_queue_size',
'type': int,
'help': 'Maximum number of queued connections',
},
'--accepted-queue-size': {
'metavar': 'INT',
'dest': 'accepted_queue_size',
'type': int,
'help': 'Maximum number of active requests in queue',
},
'--accepted-queue-timeout': {
'metavar': 'INT',
'dest': 'accepted_queue_timeout',
'type': int,
'help': 'Timeout in seconds for putting requests into queue',
},
}
def main():
"""Create a new Cheroot instance with arguments from the command line."""
parser = argparse.ArgumentParser(
description='Start an instance of the Cheroot WSGI/HTTP server.',
)
for arg, spec in _arg_spec.items():
parser.add_argument(arg, **spec)
raw_args = parser.parse_args()
# ensure cwd in sys.path
'' in sys.path or sys.path.insert(0, '')
# create a server based on the arguments provided
raw_args._wsgi_app.server(raw_args).safe_start()

32
libs/cheroot/cli.pyi Normal file
View file

@ -0,0 +1,32 @@
from typing import Any
class BindLocation: ...
class TCPSocket(BindLocation):
bind_addr: Any
def __init__(self, address, port) -> None: ...
class UnixSocket(BindLocation):
bind_addr: Any
def __init__(self, path) -> None: ...
class AbstractSocket(BindLocation):
bind_addr: Any
def __init__(self, abstract_socket) -> None: ...
class Application:
@classmethod
def resolve(cls, full_path): ...
wsgi_app: Any
def __init__(self, wsgi_app) -> None: ...
def server_args(self, parsed_args): ...
def server(self, parsed_args): ...
class GatewayYo:
gateway: Any
def __init__(self, gateway) -> None: ...
def server(self, parsed_args): ...
def parse_wsgi_bind_location(bind_addr_string: str): ...
def parse_wsgi_bind_addr(bind_addr_string: str): ...
def main() -> None: ...

398
libs/cheroot/connections.py Normal file
View file

@ -0,0 +1,398 @@
"""Utilities to manage open connections."""
import io
import os
import socket
import threading
import time
import selectors
from contextlib import suppress
from . import errors
from ._compat import IS_WINDOWS
from .makefile import MakeFile
try:
import fcntl
except ImportError:
try:
from ctypes import windll, WinError
import ctypes.wintypes
_SetHandleInformation = windll.kernel32.SetHandleInformation
_SetHandleInformation.argtypes = [
ctypes.wintypes.HANDLE,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
]
_SetHandleInformation.restype = ctypes.wintypes.BOOL
except ImportError:
def prevent_socket_inheritance(sock):
"""Stub inheritance prevention.
Dummy function, since neither fcntl nor ctypes are available.
"""
pass
else:
def prevent_socket_inheritance(sock):
"""Mark the given socket fd as non-inheritable (Windows)."""
if not _SetHandleInformation(sock.fileno(), 1, 0):
raise WinError()
else:
def prevent_socket_inheritance(sock):
"""Mark the given socket fd as non-inheritable (POSIX)."""
fd = sock.fileno()
old_flags = fcntl.fcntl(fd, fcntl.F_GETFD)
fcntl.fcntl(fd, fcntl.F_SETFD, old_flags | fcntl.FD_CLOEXEC)
class _ThreadsafeSelector:
"""Thread-safe wrapper around a DefaultSelector.
There are 2 thread contexts in which it may be accessed:
* the selector thread
* one of the worker threads in workers/threadpool.py
The expected read/write patterns are:
* :py:func:`~iter`: selector thread
* :py:meth:`register`: selector thread and threadpool,
via :py:meth:`~cheroot.workers.threadpool.ThreadPool.put`
* :py:meth:`unregister`: selector thread only
Notably, this means :py:class:`_ThreadsafeSelector` never needs to worry
that connections will be removed behind its back.
The lock is held when iterating or modifying the selector but is not
required when :py:meth:`select()ing <selectors.BaseSelector.select>` on it.
"""
def __init__(self):
self._selector = selectors.DefaultSelector()
self._lock = threading.Lock()
def __len__(self):
with self._lock:
return len(self._selector.get_map() or {})
@property
def connections(self):
"""Retrieve connections registered with the selector."""
with self._lock:
mapping = self._selector.get_map() or {}
for _, (_, sock_fd, _, conn) in mapping.items():
yield (sock_fd, conn)
def register(self, fileobj, events, data=None):
"""Register ``fileobj`` with the selector."""
with self._lock:
return self._selector.register(fileobj, events, data)
def unregister(self, fileobj):
"""Unregister ``fileobj`` from the selector."""
with self._lock:
return self._selector.unregister(fileobj)
def select(self, timeout=None):
"""Return socket fd and data pairs from selectors.select call.
Returns entries ready to read in the form:
(socket_file_descriptor, connection)
"""
return (
(key.fd, key.data)
for key, _ in self._selector.select(timeout=timeout)
)
def close(self):
"""Close the selector."""
with self._lock:
self._selector.close()
class ConnectionManager:
"""Class which manages HTTPConnection objects.
This is for connections which are being kept-alive for follow-up requests.
"""
def __init__(self, server):
"""Initialize ConnectionManager object.
Args:
server (cheroot.server.HTTPServer): web server object
that uses this ConnectionManager instance.
"""
self._serving = False
self._stop_requested = False
self.server = server
self._selector = _ThreadsafeSelector()
self._selector.register(
server.socket.fileno(),
selectors.EVENT_READ, data=server,
)
def put(self, conn):
"""Put idle connection into the ConnectionManager to be managed.
:param conn: HTTP connection to be managed
:type conn: cheroot.server.HTTPConnection
"""
conn.last_used = time.time()
# if this conn doesn't have any more data waiting to be read,
# register it with the selector.
if conn.rfile.has_data():
self.server.process_conn(conn)
else:
self._selector.register(
conn.socket.fileno(), selectors.EVENT_READ, data=conn,
)
def _expire(self, threshold):
r"""Expire least recently used connections.
:param threshold: Connections that have not been used within this \
duration (in seconds), are considered expired and \
are closed and removed.
:type threshold: float
This should be called periodically.
"""
# find any connections still registered with the selector
# that have not been active recently enough.
timed_out_connections = [
(sock_fd, conn)
for (sock_fd, conn) in self._selector.connections
if conn != self.server and conn.last_used < threshold
]
for sock_fd, conn in timed_out_connections:
self._selector.unregister(sock_fd)
conn.close()
def stop(self):
"""Stop the selector loop in run() synchronously.
May take up to half a second.
"""
self._stop_requested = True
while self._serving:
time.sleep(0.01)
def run(self, expiration_interval):
"""Run the connections selector indefinitely.
Args:
expiration_interval (float): Interval, in seconds, at which
connections will be checked for expiration.
Connections that are ready to process are submitted via
self.server.process_conn()
Connections submitted for processing must be `put()`
back if they should be examined again for another request.
Can be shut down by calling `stop()`.
"""
self._serving = True
try:
self._run(expiration_interval)
finally:
self._serving = False
def _run(self, expiration_interval):
r"""Run connection handler loop until stop was requested.
:param expiration_interval: Interval, in seconds, at which \
connections will be checked for \
expiration.
:type expiration_interval: float
Use ``expiration_interval`` as ``select()`` timeout
to assure expired connections are closed in time.
On Windows cap the timeout to 0.05 seconds
as ``select()`` does not return when a socket is ready.
"""
last_expiration_check = time.time()
if IS_WINDOWS:
# 0.05 seconds are used as an empirically obtained balance between
# max connection delay and idle system load. Benchmarks show a
# mean processing time per connection of ~0.03 seconds on Linux
# and with 0.01 seconds timeout on Windows:
# https://github.com/cherrypy/cheroot/pull/352
# While this highly depends on system and hardware, 0.05 seconds
# max delay should hence usually not significantly increase the
# mean time/delay per connection, but significantly reduce idle
# system load by reducing socket loops to 1/5 with 0.01 seconds.
select_timeout = min(expiration_interval, 0.05)
else:
select_timeout = expiration_interval
while not self._stop_requested:
try:
active_list = self._selector.select(timeout=select_timeout)
except OSError:
self._remove_invalid_sockets()
continue
for (sock_fd, conn) in active_list:
if conn is self.server:
# New connection
new_conn = self._from_server_socket(self.server.socket)
if new_conn is not None:
self.server.process_conn(new_conn)
else:
# unregister connection from the selector until the server
# has read from it and returned it via put()
self._selector.unregister(sock_fd)
self.server.process_conn(conn)
now = time.time()
if (now - last_expiration_check) > expiration_interval:
self._expire(threshold=now - self.server.timeout)
last_expiration_check = now
def _remove_invalid_sockets(self):
"""Clean up the resources of any broken connections.
This method attempts to detect any connections in an invalid state,
unregisters them from the selector and closes the file descriptors of
the corresponding network sockets where possible.
"""
invalid_conns = []
for sock_fd, conn in self._selector.connections:
if conn is self.server:
continue
try:
os.fstat(sock_fd)
except OSError:
invalid_conns.append((sock_fd, conn))
for sock_fd, conn in invalid_conns:
self._selector.unregister(sock_fd)
# One of the reason on why a socket could cause an error
# is that the socket is already closed, ignore the
# socket error if we try to close it at this point.
with suppress(OSError):
conn.close()
def _from_server_socket(self, server_socket): # noqa: C901 # FIXME
try:
s, addr = server_socket.accept()
if self.server.stats['Enabled']:
self.server.stats['Accepts'] += 1
prevent_socket_inheritance(s)
if hasattr(s, 'settimeout'):
s.settimeout(self.server.timeout)
mf = MakeFile
ssl_env = {}
# if ssl cert and key are set, we try to be a secure HTTP server
if self.server.ssl_adapter is not None:
try:
s, ssl_env = self.server.ssl_adapter.wrap(s)
except errors.FatalSSLAlert as tls_connection_drop_error:
self.server.error_log(
f'Client {addr !s} lost — peer dropped the TLS '
'connection suddenly, during handshake: '
f'{tls_connection_drop_error !s}',
)
return
except errors.NoSSLError as http_over_https_err:
self.server.error_log(
f'Client {addr !s} attempted to speak plain HTTP into '
'a TCP connection configured for TLS-only traffic — '
'trying to send back a plain HTTP error response: '
f'{http_over_https_err !s}',
)
msg = (
'The client sent a plain HTTP request, but '
'this server only speaks HTTPS on this port.'
)
buf = [
'%s 400 Bad Request\r\n' % self.server.protocol,
'Content-Length: %s\r\n' % len(msg),
'Content-Type: text/plain\r\n\r\n',
msg,
]
wfile = mf(s, 'wb', io.DEFAULT_BUFFER_SIZE)
try:
wfile.write(''.join(buf).encode('ISO-8859-1'))
except OSError as ex:
if ex.args[0] not in errors.socket_errors_to_ignore:
raise
return
mf = self.server.ssl_adapter.makefile
# Re-apply our timeout since we may have a new socket object
if hasattr(s, 'settimeout'):
s.settimeout(self.server.timeout)
conn = self.server.ConnectionClass(self.server, s, mf)
if not isinstance(self.server.bind_addr, (str, bytes)):
# optional values
# Until we do DNS lookups, omit REMOTE_HOST
if addr is None: # sometimes this can happen
# figure out if AF_INET or AF_INET6.
if len(s.getsockname()) == 2:
# AF_INET
addr = ('0.0.0.0', 0)
else:
# AF_INET6
addr = ('::', 0)
conn.remote_addr = addr[0]
conn.remote_port = addr[1]
conn.ssl_env = ssl_env
return conn
except socket.timeout:
# The only reason for the timeout in start() is so we can
# notice keyboard interrupts on Win32, which don't interrupt
# accept() by default
return
except OSError as ex:
if self.server.stats['Enabled']:
self.server.stats['Socket Errors'] += 1
if ex.args[0] in errors.socket_error_eintr:
# I *think* this is right. EINTR should occur when a signal
# is received during the accept() call; all docs say retry
# the call, and I *think* I'm reading it right that Python
# will then go ahead and poll for and handle the signal
# elsewhere. See
# https://github.com/cherrypy/cherrypy/issues/707.
return
if ex.args[0] in errors.socket_errors_nonblocking:
# Just try again. See
# https://github.com/cherrypy/cherrypy/issues/479.
return
if ex.args[0] in errors.socket_errors_to_ignore:
# Our socket was closed.
# See https://github.com/cherrypy/cherrypy/issues/686.
return
raise
def close(self):
"""Close all monitored connections."""
for (_, conn) in self._selector.connections:
if conn is not self.server: # server closes its own socket
conn.close()
self._selector.close()
@property
def _num_connections(self):
"""Return the current number of connections.
Includes all connections registered with the selector,
minus one for the server socket, which is always registered
with the selector.
"""
return len(self._selector) - 1
@property
def can_add_keepalive_connection(self):
"""Flag whether it is allowed to add a new keep-alive connection."""
ka_limit = self.server.keep_alive_conn_limit
return ka_limit is None or self._num_connections < ka_limit

View file

@ -0,0 +1,23 @@
from typing import Any
def prevent_socket_inheritance(sock) -> None: ...
class _ThreadsafeSelector:
def __init__(self) -> None: ...
def __len__(self): ...
@property
def connections(self) -> None: ...
def register(self, fileobj, events, data: Any | None = ...): ...
def unregister(self, fileobj): ...
def select(self, timeout: Any | None = ...): ...
def close(self) -> None: ...
class ConnectionManager:
server: Any
def __init__(self, server) -> None: ...
def put(self, conn) -> None: ...
def stop(self) -> None: ...
def run(self, expiration_interval) -> None: ...
def close(self) -> None: ...
@property
def can_add_keepalive_connection(self): ...

80
libs/cheroot/errors.py Normal file
View file

@ -0,0 +1,80 @@
# -*- coding: utf-8 -*-
"""Collection of exceptions raised and/or processed by Cheroot."""
import errno
import sys
class MaxSizeExceeded(Exception):
"""Exception raised when a client sends more data then allowed under limit.
Depends on ``request.body.maxbytes`` config option if used within CherryPy.
"""
class NoSSLError(Exception):
"""Exception raised when a client speaks HTTP to an HTTPS socket."""
class FatalSSLAlert(Exception):
"""Exception raised when the SSL implementation signals a fatal alert."""
def plat_specific_errors(*errnames):
"""Return error numbers for all errors in ``errnames`` on this platform.
The :py:mod:`errno` module contains different global constants
depending on the specific platform (OS). This function will return
the list of numeric values for a given list of potential names.
"""
missing_attr = {None}
unique_nums = {getattr(errno, k, None) for k in errnames}
return list(unique_nums - missing_attr)
socket_error_eintr = plat_specific_errors('EINTR', 'WSAEINTR')
socket_errors_to_ignore = plat_specific_errors(
'EPIPE',
'EBADF', 'WSAEBADF',
'ENOTSOCK', 'WSAENOTSOCK',
'ETIMEDOUT', 'WSAETIMEDOUT',
'ECONNREFUSED', 'WSAECONNREFUSED',
'ECONNRESET', 'WSAECONNRESET',
'ECONNABORTED', 'WSAECONNABORTED',
'ENETRESET', 'WSAENETRESET',
'EHOSTDOWN', 'EHOSTUNREACH',
)
socket_errors_to_ignore.append('timed out')
socket_errors_to_ignore.append('The read operation timed out')
socket_errors_nonblocking = plat_specific_errors(
'EAGAIN', 'EWOULDBLOCK', 'WSAEWOULDBLOCK',
)
if sys.platform == 'darwin':
socket_errors_to_ignore.extend(plat_specific_errors('EPROTOTYPE'))
socket_errors_nonblocking.extend(plat_specific_errors('EPROTOTYPE'))
acceptable_sock_shutdown_error_codes = {
errno.ENOTCONN,
errno.EPIPE, errno.ESHUTDOWN, # corresponds to BrokenPipeError in Python 3
errno.ECONNRESET, # corresponds to ConnectionResetError in Python 3
}
"""Errors that may happen during the connection close sequence.
* ENOTCONN client is no longer connected
* EPIPE write on a pipe while the other end has been closed
* ESHUTDOWN write on a socket which has been shutdown for writing
* ECONNRESET connection is reset by the peer, we received a TCP RST packet
Refs:
* https://github.com/cherrypy/cheroot/issues/341#issuecomment-735884889
* https://bugs.python.org/issue30319
* https://bugs.python.org/issue30329
* https://github.com/python/cpython/commit/83a2c28
* https://github.com/python/cpython/blob/c39b52f/Lib/poplib.py#L297-L302
* https://docs.microsoft.com/windows/win32/api/winsock/nf-winsock-shutdown
"""
acceptable_sock_shutdown_exceptions = (BrokenPipeError, ConnectionResetError)

13
libs/cheroot/errors.pyi Normal file
View file

@ -0,0 +1,13 @@
from typing import List, Set, Tuple, Type
class MaxSizeExceeded(Exception): ...
class NoSSLError(Exception): ...
class FatalSSLAlert(Exception): ...
def plat_specific_errors(*errnames: str) -> List[int]: ...
socket_error_eintr: List[int]
socket_errors_to_ignore: List[int]
socket_errors_nonblocking: List[int]
acceptable_sock_shutdown_error_codes: Set[int]
acceptable_sock_shutdown_exceptions: Tuple[Type[Exception], ...]

76
libs/cheroot/makefile.py Normal file
View file

@ -0,0 +1,76 @@
"""Socket file object."""
import socket
# prefer slower Python-based io module
import _pyio as io
# Write only 16K at a time to sockets
SOCK_WRITE_BLOCKSIZE = 16384
class BufferedWriter(io.BufferedWriter):
"""Faux file object attached to a socket object."""
def write(self, b):
"""Write bytes to buffer."""
self._checkClosed()
if isinstance(b, str):
raise TypeError("can't write str to binary stream")
with self._write_lock:
self._write_buf.extend(b)
self._flush_unlocked()
return len(b)
def _flush_unlocked(self):
self._checkClosed('flush of closed file')
while self._write_buf:
try:
# ssl sockets only except 'bytes', not bytearrays
# so perhaps we should conditionally wrap this for perf?
n = self.raw.write(bytes(self._write_buf))
except io.BlockingIOError as e:
n = e.characters_written
del self._write_buf[:n]
class StreamReader(io.BufferedReader):
"""Socket stream reader."""
def __init__(self, sock, mode='r', bufsize=io.DEFAULT_BUFFER_SIZE):
"""Initialize socket stream reader."""
super().__init__(socket.SocketIO(sock, mode), bufsize)
self.bytes_read = 0
def read(self, *args, **kwargs):
"""Capture bytes read."""
val = super().read(*args, **kwargs)
self.bytes_read += len(val)
return val
def has_data(self):
"""Return true if there is buffered data to read."""
return len(self._read_buf) > self._read_pos
class StreamWriter(BufferedWriter):
"""Socket stream writer."""
def __init__(self, sock, mode='w', bufsize=io.DEFAULT_BUFFER_SIZE):
"""Initialize socket stream writer."""
super().__init__(socket.SocketIO(sock, mode), bufsize)
self.bytes_written = 0
def write(self, val, *args, **kwargs):
"""Capture bytes written."""
res = super().write(val, *args, **kwargs)
self.bytes_written += len(val)
return res
def MakeFile(sock, mode='r', bufsize=io.DEFAULT_BUFFER_SIZE):
"""File object attached to a socket object."""
cls = StreamReader if 'r' in mode else StreamWriter
return cls(sock, mode, bufsize)

19
libs/cheroot/makefile.pyi Normal file
View file

@ -0,0 +1,19 @@
import io
SOCK_WRITE_BLOCKSIZE: int
class BufferedWriter(io.BufferedWriter):
def write(self, b): ...
class StreamReader(io.BufferedReader):
bytes_read: int
def __init__(self, sock, mode: str = ..., bufsize=...) -> None: ...
def read(self, *args, **kwargs): ...
def has_data(self): ...
class StreamWriter(BufferedWriter):
bytes_written: int
def __init__(self, sock, mode: str = ..., bufsize=...) -> None: ...
def write(self, val, *args, **kwargs): ...
def MakeFile(sock, mode: str = ..., bufsize=...): ...

0
libs/cheroot/py.typed Normal file
View file

2231
libs/cheroot/server.py Normal file

File diff suppressed because it is too large Load diff

175
libs/cheroot/server.pyi Normal file
View file

@ -0,0 +1,175 @@
from typing import Any
class HeaderReader:
def __call__(self, rfile, hdict: Any | None = ...): ...
class DropUnderscoreHeaderReader(HeaderReader): ...
class SizeCheckWrapper:
rfile: Any
maxlen: Any
bytes_read: int
def __init__(self, rfile, maxlen) -> None: ...
def read(self, size: Any | None = ...): ...
def readline(self, size: Any | None = ...): ...
def readlines(self, sizehint: int = ...): ...
def close(self) -> None: ...
def __iter__(self): ...
def __next__(self): ...
next: Any
class KnownLengthRFile:
rfile: Any
remaining: Any
def __init__(self, rfile, content_length) -> None: ...
def read(self, size: Any | None = ...): ...
def readline(self, size: Any | None = ...): ...
def readlines(self, sizehint: int = ...): ...
def close(self) -> None: ...
def __iter__(self): ...
def __next__(self): ...
next: Any
class ChunkedRFile:
rfile: Any
maxlen: Any
bytes_read: int
buffer: Any
bufsize: Any
closed: bool
def __init__(self, rfile, maxlen, bufsize: int = ...) -> None: ...
def read(self, size: Any | None = ...): ...
def readline(self, size: Any | None = ...): ...
def readlines(self, sizehint: int = ...): ...
def read_trailer_lines(self) -> None: ...
def close(self) -> None: ...
class HTTPRequest:
server: Any
conn: Any
inheaders: Any
outheaders: Any
ready: bool
close_connection: bool
chunked_write: bool
header_reader: Any
started_request: bool
scheme: bytes
response_protocol: str
status: str
sent_headers: bool
chunked_read: bool
proxy_mode: Any
strict_mode: Any
def __init__(self, server, conn, proxy_mode: bool = ..., strict_mode: bool = ...) -> None: ...
rfile: Any
def parse_request(self) -> None: ...
uri: Any
method: Any
authority: Any
path: Any
qs: Any
request_protocol: Any
def read_request_line(self): ...
def read_request_headers(self): ...
def respond(self) -> None: ...
def simple_response(self, status, msg: str = ...) -> None: ...
def ensure_headers_sent(self) -> None: ...
def write(self, chunk) -> None: ...
def send_headers(self) -> None: ...
class HTTPConnection:
remote_addr: Any
remote_port: Any
ssl_env: Any
rbufsize: Any
wbufsize: Any
RequestHandlerClass: Any
peercreds_enabled: bool
peercreds_resolve_enabled: bool
last_used: Any
server: Any
socket: Any
rfile: Any
wfile: Any
requests_seen: int
def __init__(self, server, sock, makefile=...) -> None: ...
def communicate(self): ...
linger: bool
def close(self) -> None: ...
def get_peer_creds(self): ...
@property
def peer_pid(self): ...
@property
def peer_uid(self): ...
@property
def peer_gid(self): ...
def resolve_peer_creds(self): ...
@property
def peer_user(self): ...
@property
def peer_group(self): ...
class HTTPServer:
gateway: Any
minthreads: Any
maxthreads: Any
server_name: Any
protocol: str
request_queue_size: int
shutdown_timeout: int
timeout: int
expiration_interval: float
version: Any
software: Any
ready: bool
max_request_header_size: int
max_request_body_size: int
nodelay: bool
ConnectionClass: Any
ssl_adapter: Any
peercreds_enabled: bool
peercreds_resolve_enabled: bool
reuse_port: bool
keep_alive_conn_limit: int
requests: Any
def __init__(self, bind_addr, gateway, minthreads: int = ..., maxthreads: int = ..., server_name: Any | None = ..., peercreds_enabled: bool = ..., peercreds_resolve_enabled: bool = ..., reuse_port: bool = ...) -> None: ...
stats: Any
def clear_stats(self): ...
def runtime(self): ...
@property
def bind_addr(self): ...
@bind_addr.setter
def bind_addr(self, value) -> None: ...
def safe_start(self) -> None: ...
socket: Any
def prepare(self) -> None: ...
def serve(self) -> None: ...
def start(self) -> None: ...
@property
def can_add_keepalive_connection(self): ...
def put_conn(self, conn) -> None: ...
def error_log(self, msg: str = ..., level: int = ..., traceback: bool = ...) -> None: ...
def bind(self, family, type, proto: int = ...): ...
def bind_unix_socket(self, bind_addr): ...
@staticmethod
def _make_socket_reusable(socket_, bind_addr) -> None: ...
@classmethod
def prepare_socket(cls, bind_addr, family, type, proto, nodelay, ssl_adapter, reuse_port: bool = ...): ...
@staticmethod
def bind_socket(socket_, bind_addr): ...
@staticmethod
def resolve_real_bind_addr(socket_): ...
def process_conn(self, conn) -> None: ...
@property
def interrupt(self): ...
@interrupt.setter
def interrupt(self, interrupt) -> None: ...
def stop(self) -> None: ...
class Gateway:
req: Any
def __init__(self, req) -> None: ...
def respond(self) -> None: ...
def get_ssl_adapter_class(name: str = ...): ...

View file

@ -0,0 +1,46 @@
"""Implementation of the SSL adapter base interface."""
from abc import ABCMeta, abstractmethod
class Adapter(metaclass=ABCMeta):
"""Base class for SSL driver library adapters.
Required methods:
* ``wrap(sock) -> (wrapped socket, ssl environ dict)``
* ``makefile(sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE) ->
socket file object``
"""
@abstractmethod
def __init__(
self, certificate, private_key, certificate_chain=None,
ciphers=None,
):
"""Set up certificates, private key ciphers and reset context."""
self.certificate = certificate
self.private_key = private_key
self.certificate_chain = certificate_chain
self.ciphers = ciphers
self.context = None
@abstractmethod
def bind(self, sock):
"""Wrap and return the given socket."""
return sock
@abstractmethod
def wrap(self, sock):
"""Wrap and return the given socket, plus WSGI environ entries."""
raise NotImplementedError # pragma: no cover
@abstractmethod
def get_environ(self):
"""Return WSGI environ entries to be merged into each request."""
raise NotImplementedError # pragma: no cover
@abstractmethod
def makefile(self, sock, mode='r', bufsize=-1):
"""Return socket file object."""
raise NotImplementedError # pragma: no cover

View file

@ -0,0 +1,19 @@
from abc import abstractmethod, ABCMeta
from typing import Any
class Adapter(metaclass=ABCMeta):
certificate: Any
private_key: Any
certificate_chain: Any
ciphers: Any
context: Any
@abstractmethod
def __init__(self, certificate, private_key, certificate_chain: Any | None = ..., ciphers: Any | None = ...): ...
@abstractmethod
def bind(self, sock): ...
@abstractmethod
def wrap(self, sock): ...
@abstractmethod
def get_environ(self): ...
@abstractmethod
def makefile(self, sock, mode: str = ..., bufsize: int = ...): ...

448
libs/cheroot/ssl/builtin.py Normal file
View file

@ -0,0 +1,448 @@
"""
A library for integrating Python's builtin :py:mod:`ssl` library with Cheroot.
The :py:mod:`ssl` module must be importable for SSL functionality.
To use this module, set ``HTTPServer.ssl_adapter`` to an instance of
``BuiltinSSLAdapter``.
"""
import socket
import sys
import threading
from contextlib import suppress
try:
import ssl
except ImportError:
ssl = None
try:
from _pyio import DEFAULT_BUFFER_SIZE
except ImportError:
try:
from io import DEFAULT_BUFFER_SIZE
except ImportError:
DEFAULT_BUFFER_SIZE = -1
from . import Adapter
from .. import errors
from ..makefile import StreamReader, StreamWriter
from ..server import HTTPServer
def _assert_ssl_exc_contains(exc, *msgs):
"""Check whether SSL exception contains either of messages provided."""
if len(msgs) < 1:
raise TypeError(
'_assert_ssl_exc_contains() requires '
'at least one message to be passed.',
)
err_msg_lower = str(exc).lower()
return any(m.lower() in err_msg_lower for m in msgs)
def _loopback_for_cert_thread(context, server):
"""Wrap a socket in ssl and perform the server-side handshake."""
# As we only care about parsing the certificate, the failure of
# which will cause an exception in ``_loopback_for_cert``,
# we can safely ignore connection and ssl related exceptions. Ref:
# https://github.com/cherrypy/cheroot/issues/302#issuecomment-662592030
with suppress(ssl.SSLError, OSError):
with context.wrap_socket(
server, do_handshake_on_connect=True, server_side=True,
) as ssl_sock:
# in TLS 1.3 (Python 3.7+, OpenSSL 1.1.1+), the server
# sends the client session tickets that can be used to
# resume the TLS session on a new connection without
# performing the full handshake again. session tickets are
# sent as a post-handshake message at some _unspecified_
# time and thus a successful connection may be closed
# without the client having received the tickets.
# Unfortunately, on Windows (Python 3.8+), this is treated
# as an incomplete handshake on the server side and a
# ``ConnectionAbortedError`` is raised.
# TLS 1.3 support is still incomplete in Python 3.8;
# there is no way for the client to wait for tickets.
# While not necessary for retrieving the parsed certificate,
# we send a tiny bit of data over the connection in an
# attempt to give the server a chance to send the session
# tickets and close the connection cleanly.
# Note that, as this is essentially a race condition,
# the error may still occur ocasionally.
ssl_sock.send(b'0000')
def _loopback_for_cert(certificate, private_key, certificate_chain):
"""Create a loopback connection to parse a cert with a private key."""
context = ssl.create_default_context(cafile=certificate_chain)
context.load_cert_chain(certificate, private_key)
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
# Python 3+ Unix, Python 3.5+ Windows
client, server = socket.socketpair()
try:
# `wrap_socket` will block until the ssl handshake is complete.
# it must be called on both ends at the same time -> thread
# openssl will cache the peer's cert during a successful handshake
# and return it via `getpeercert` even after the socket is closed.
# when `close` is called, the SSL shutdown notice will be sent
# and then python will wait to receive the corollary shutdown.
thread = threading.Thread(
target=_loopback_for_cert_thread, args=(context, server),
)
try:
thread.start()
with context.wrap_socket(
client, do_handshake_on_connect=True,
server_side=False,
) as ssl_sock:
ssl_sock.recv(4)
return ssl_sock.getpeercert()
finally:
thread.join()
finally:
client.close()
server.close()
def _parse_cert(certificate, private_key, certificate_chain):
"""Parse a certificate."""
# loopback_for_cert uses socket.socketpair which was only
# introduced in Python 3.0 for *nix and 3.5 for Windows
# and requires OS support (AttributeError, OSError)
# it also requires a private key either in its own file
# or combined with the cert (SSLError)
with suppress(AttributeError, ssl.SSLError, OSError):
return _loopback_for_cert(certificate, private_key, certificate_chain)
# KLUDGE: using an undocumented, private, test method to parse a cert
# unfortunately, it is the only built-in way without a connection
# as a private, undocumented method, it may change at any time
# so be tolerant of *any* possible errors it may raise
with suppress(Exception):
return ssl._ssl._test_decode_cert(certificate)
return {}
def _sni_callback(sock, sni, context):
"""Handle the SNI callback to tag the socket with the SNI."""
sock.sni = sni
# return None to allow the TLS negotiation to continue
class BuiltinSSLAdapter(Adapter):
"""Wrapper for integrating Python's builtin :py:mod:`ssl` with Cheroot."""
certificate = None
"""The file name of the server SSL certificate."""
private_key = None
"""The file name of the server's private key file."""
certificate_chain = None
"""The file name of the certificate chain file."""
ciphers = None
"""The ciphers list of SSL."""
# from mod_ssl/pkg.sslmod/ssl_engine_vars.c ssl_var_lookup_ssl_cert
CERT_KEY_TO_ENV = {
'version': 'M_VERSION',
'serialNumber': 'M_SERIAL',
'notBefore': 'V_START',
'notAfter': 'V_END',
'subject': 'S_DN',
'issuer': 'I_DN',
'subjectAltName': 'SAN',
# not parsed by the Python standard library
# - A_SIG
# - A_KEY
# not provided by mod_ssl
# - OCSP
# - caIssuers
# - crlDistributionPoints
}
# from mod_ssl/pkg.sslmod/ssl_engine_vars.c ssl_var_lookup_ssl_cert_dn_rec
CERT_KEY_TO_LDAP_CODE = {
'countryName': 'C',
'stateOrProvinceName': 'ST',
# NOTE: mod_ssl also provides 'stateOrProvinceName' as 'SP'
# for compatibility with SSLeay
'localityName': 'L',
'organizationName': 'O',
'organizationalUnitName': 'OU',
'commonName': 'CN',
'title': 'T',
'initials': 'I',
'givenName': 'G',
'surname': 'S',
'description': 'D',
'userid': 'UID',
'emailAddress': 'Email',
# not provided by mod_ssl
# - dnQualifier: DNQ
# - domainComponent: DC
# - postalCode: PC
# - streetAddress: STREET
# - serialNumber
# - generationQualifier
# - pseudonym
# - jurisdictionCountryName
# - jurisdictionLocalityName
# - jurisdictionStateOrProvince
# - businessCategory
}
def __init__(
self, certificate, private_key, certificate_chain=None,
ciphers=None,
):
"""Set up context in addition to base class properties if available."""
if ssl is None:
raise ImportError('You must install the ssl module to use HTTPS.')
super(BuiltinSSLAdapter, self).__init__(
certificate, private_key, certificate_chain, ciphers,
)
self.context = ssl.create_default_context(
purpose=ssl.Purpose.CLIENT_AUTH,
cafile=certificate_chain,
)
self.context.load_cert_chain(certificate, private_key)
if self.ciphers is not None:
self.context.set_ciphers(ciphers)
self._server_env = self._make_env_cert_dict(
'SSL_SERVER',
_parse_cert(certificate, private_key, self.certificate_chain),
)
if not self._server_env:
return
cert = None
with open(certificate, mode='rt') as f:
cert = f.read()
# strip off any keys by only taking the first certificate
cert_start = cert.find(ssl.PEM_HEADER)
if cert_start == -1:
return
cert_end = cert.find(ssl.PEM_FOOTER, cert_start)
if cert_end == -1:
return
cert_end += len(ssl.PEM_FOOTER)
self._server_env['SSL_SERVER_CERT'] = cert[cert_start:cert_end]
@property
def context(self):
""":py:class:`~ssl.SSLContext` that will be used to wrap sockets."""
return self._context
@context.setter
def context(self, context):
"""Set the ssl ``context`` to use."""
self._context = context
# Python 3.7+
# if a context is provided via `cherrypy.config.update` then
# `self.context` will be set after `__init__`
# use a property to intercept it to add an SNI callback
# but don't override the user's callback
# TODO: chain callbacks
with suppress(AttributeError):
if ssl.HAS_SNI and context.sni_callback is None:
context.sni_callback = _sni_callback
def bind(self, sock):
"""Wrap and return the given socket."""
return super(BuiltinSSLAdapter, self).bind(sock)
def wrap(self, sock):
"""Wrap and return the given socket, plus WSGI environ entries."""
try:
s = self.context.wrap_socket(
sock, do_handshake_on_connect=True, server_side=True,
)
except (
ssl.SSLEOFError,
ssl.SSLZeroReturnError,
) as tls_connection_drop_error:
raise errors.FatalSSLAlert(
*tls_connection_drop_error.args,
) from tls_connection_drop_error
except ssl.SSLError as generic_tls_error:
peer_speaks_plain_http_over_https = (
generic_tls_error.errno == ssl.SSL_ERROR_SSL and
_assert_ssl_exc_contains(generic_tls_error, 'http request')
)
if peer_speaks_plain_http_over_https:
reraised_connection_drop_exc_cls = errors.NoSSLError
else:
reraised_connection_drop_exc_cls = errors.FatalSSLAlert
raise reraised_connection_drop_exc_cls(
*generic_tls_error.args,
) from generic_tls_error
except OSError as tcp_connection_drop_error:
raise errors.FatalSSLAlert(
*tcp_connection_drop_error.args,
) from tcp_connection_drop_error
return s, self.get_environ(s)
def get_environ(self, sock):
"""Create WSGI environ entries to be merged into each request."""
cipher = sock.cipher()
ssl_environ = {
'wsgi.url_scheme': 'https',
'HTTPS': 'on',
'SSL_PROTOCOL': cipher[1],
'SSL_CIPHER': cipher[0],
'SSL_CIPHER_EXPORT': '',
'SSL_CIPHER_USEKEYSIZE': cipher[2],
'SSL_VERSION_INTERFACE': '%s Python/%s' % (
HTTPServer.version, sys.version,
),
'SSL_VERSION_LIBRARY': ssl.OPENSSL_VERSION,
'SSL_CLIENT_VERIFY': 'NONE',
# 'NONE' - client did not provide a cert (overriden below)
}
# Python 3.3+
with suppress(AttributeError):
compression = sock.compression()
if compression is not None:
ssl_environ['SSL_COMPRESS_METHOD'] = compression
# Python 3.6+
with suppress(AttributeError):
ssl_environ['SSL_SESSION_ID'] = sock.session.id.hex()
with suppress(AttributeError):
target_cipher = cipher[:2]
for cip in sock.context.get_ciphers():
if target_cipher == (cip['name'], cip['protocol']):
ssl_environ['SSL_CIPHER_ALGKEYSIZE'] = cip['alg_bits']
break
# Python 3.7+ sni_callback
with suppress(AttributeError):
ssl_environ['SSL_TLS_SNI'] = sock.sni
if self.context and self.context.verify_mode != ssl.CERT_NONE:
client_cert = sock.getpeercert()
if client_cert:
# builtin ssl **ALWAYS** validates client certificates
# and terminates the connection on failure
ssl_environ['SSL_CLIENT_VERIFY'] = 'SUCCESS'
ssl_environ.update(
self._make_env_cert_dict('SSL_CLIENT', client_cert),
)
ssl_environ['SSL_CLIENT_CERT'] = ssl.DER_cert_to_PEM_cert(
sock.getpeercert(binary_form=True),
).strip()
ssl_environ.update(self._server_env)
# not supplied by the Python standard library (as of 3.8)
# - SSL_SESSION_RESUMED
# - SSL_SECURE_RENEG
# - SSL_CLIENT_CERT_CHAIN_n
# - SRP_USER
# - SRP_USERINFO
return ssl_environ
def _make_env_cert_dict(self, env_prefix, parsed_cert):
"""Return a dict of WSGI environment variables for a certificate.
E.g. SSL_CLIENT_M_VERSION, SSL_CLIENT_M_SERIAL, etc.
See https://httpd.apache.org/docs/2.4/mod/mod_ssl.html#envvars.
"""
if not parsed_cert:
return {}
env = {}
for cert_key, env_var in self.CERT_KEY_TO_ENV.items():
key = '%s_%s' % (env_prefix, env_var)
value = parsed_cert.get(cert_key)
if env_var == 'SAN':
env.update(self._make_env_san_dict(key, value))
elif env_var.endswith('_DN'):
env.update(self._make_env_dn_dict(key, value))
else:
env[key] = str(value)
# mod_ssl 2.1+; Python 3.2+
# number of days until the certificate expires
if 'notBefore' in parsed_cert:
remain = ssl.cert_time_to_seconds(parsed_cert['notAfter'])
remain -= ssl.cert_time_to_seconds(parsed_cert['notBefore'])
remain /= 60 * 60 * 24
env['%s_V_REMAIN' % (env_prefix,)] = str(int(remain))
return env
def _make_env_san_dict(self, env_prefix, cert_value):
"""Return a dict of WSGI environment variables for a certificate DN.
E.g. SSL_CLIENT_SAN_Email_0, SSL_CLIENT_SAN_DNS_0, etc.
See SSL_CLIENT_SAN_* at
https://httpd.apache.org/docs/2.4/mod/mod_ssl.html#envvars.
"""
if not cert_value:
return {}
env = {}
dns_count = 0
email_count = 0
for attr_name, val in cert_value:
if attr_name == 'DNS':
env['%s_DNS_%i' % (env_prefix, dns_count)] = val
dns_count += 1
elif attr_name == 'Email':
env['%s_Email_%i' % (env_prefix, email_count)] = val
email_count += 1
# other mod_ssl SAN vars:
# - SAN_OTHER_msUPN_n
return env
def _make_env_dn_dict(self, env_prefix, cert_value):
"""Return a dict of WSGI environment variables for a certificate DN.
E.g. SSL_CLIENT_S_DN_CN, SSL_CLIENT_S_DN_C, etc.
See SSL_CLIENT_S_DN_x509 at
https://httpd.apache.org/docs/2.4/mod/mod_ssl.html#envvars.
"""
if not cert_value:
return {}
dn = []
dn_attrs = {}
for rdn in cert_value:
for attr_name, val in rdn:
attr_code = self.CERT_KEY_TO_LDAP_CODE.get(attr_name)
dn.append('%s=%s' % (attr_code or attr_name, val))
if not attr_code:
continue
dn_attrs.setdefault(attr_code, [])
dn_attrs[attr_code].append(val)
env = {
env_prefix: ','.join(dn),
}
for attr_code, values in dn_attrs.items():
env['%s_%s' % (env_prefix, attr_code)] = ','.join(values)
if len(values) == 1:
continue
for i, val in enumerate(values):
env['%s_%s_%i' % (env_prefix, attr_code, i)] = val
return env
def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE):
"""Return socket file object."""
cls = StreamReader if 'r' in mode else StreamWriter
return cls(sock, mode, bufsize)

View file

@ -0,0 +1,17 @@
from typing import Any
from . import Adapter
DEFAULT_BUFFER_SIZE: int
class BuiltinSSLAdapter(Adapter):
CERT_KEY_TO_ENV: Any
CERT_KEY_TO_LDAP_CODE: Any
def __init__(self, certificate, private_key, certificate_chain: Any | None = ..., ciphers: Any | None = ...) -> None: ...
@property
def context(self): ...
@context.setter
def context(self, context) -> None: ...
def bind(self, sock): ...
def wrap(self, sock): ...
def get_environ(self, sock): ...
def makefile(self, sock, mode: str = ..., bufsize: int = ...): ...

View file

@ -0,0 +1,378 @@
"""
A library for integrating :doc:`pyOpenSSL <pyopenssl:index>` with Cheroot.
The :py:mod:`OpenSSL <pyopenssl:OpenSSL>` module must be importable
for SSL/TLS/HTTPS functionality.
You can obtain it from `here <https://github.com/pyca/pyopenssl>`_.
To use this module, set :py:attr:`HTTPServer.ssl_adapter
<cheroot.server.HTTPServer.ssl_adapter>` to an instance of
:py:class:`ssl.Adapter <cheroot.ssl.Adapter>`.
There are two ways to use :abbr:`TLS (Transport-Level Security)`:
Method One
----------
* :py:attr:`ssl_adapter.context
<cheroot.ssl.pyopenssl.pyOpenSSLAdapter.context>`: an instance of
:py:class:`SSL.Context <pyopenssl:OpenSSL.SSL.Context>`.
If this is not None, it is assumed to be an :py:class:`SSL.Context
<pyopenssl:OpenSSL.SSL.Context>` instance, and will be passed to
:py:class:`SSL.Connection <pyopenssl:OpenSSL.SSL.Connection>` on bind().
The developer is responsible for forming a valid :py:class:`Context
<pyopenssl:OpenSSL.SSL.Context>` object. This
approach is to be preferred for more flexibility, e.g. if the cert and
key are streams instead of files, or need decryption, or
:py:data:`SSL.SSLv3_METHOD <pyopenssl:OpenSSL.SSL.SSLv3_METHOD>`
is desired instead of the default :py:data:`SSL.SSLv23_METHOD
<pyopenssl:OpenSSL.SSL.SSLv3_METHOD>`, etc. Consult
the :doc:`pyOpenSSL <pyopenssl:api/ssl>` documentation for
complete options.
Method Two (shortcut)
---------------------
* :py:attr:`ssl_adapter.certificate
<cheroot.ssl.pyopenssl.pyOpenSSLAdapter.certificate>`: the file name
of the server's TLS certificate.
* :py:attr:`ssl_adapter.private_key
<cheroot.ssl.pyopenssl.pyOpenSSLAdapter.private_key>`: the file name
of the server's private key file.
Both are :py:data:`None` by default. If :py:attr:`ssl_adapter.context
<cheroot.ssl.pyopenssl.pyOpenSSLAdapter.context>` is :py:data:`None`,
but ``.private_key`` and ``.certificate`` are both given and valid, they
will be read, and the context will be automatically created from them.
.. spelling::
pyopenssl
"""
import socket
import sys
import threading
import time
try:
import OpenSSL.version
from OpenSSL import SSL
from OpenSSL import crypto
try:
ssl_conn_type = SSL.Connection
except AttributeError:
ssl_conn_type = SSL.ConnectionType
except ImportError:
SSL = None
from . import Adapter
from .. import errors, server as cheroot_server
from ..makefile import StreamReader, StreamWriter
class SSLFileobjectMixin:
"""Base mixin for a TLS socket stream."""
ssl_timeout = 3
ssl_retry = .01
# FIXME:
def _safe_call(self, is_reader, call, *args, **kwargs): # noqa: C901
"""Wrap the given call with TLS error-trapping.
is_reader: if False EOF errors will be raised. If True, EOF errors
will return "" (to emulate normal sockets).
"""
start = time.time()
while True:
try:
return call(*args, **kwargs)
except SSL.WantReadError:
# Sleep and try again. This is dangerous, because it means
# the rest of the stack has no way of differentiating
# between a "new handshake" error and "client dropped".
# Note this isn't an endless loop: there's a timeout below.
# Ref: https://stackoverflow.com/a/5133568/595220
time.sleep(self.ssl_retry)
except SSL.WantWriteError:
time.sleep(self.ssl_retry)
except SSL.SysCallError as e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return b''
errnum = e.args[0]
if is_reader and errnum in errors.socket_errors_to_ignore:
return b''
raise socket.error(errnum)
except SSL.Error as e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return b''
thirdarg = None
try:
thirdarg = e.args[0][0][2]
except IndexError:
pass
if thirdarg == 'http request':
# The client is talking HTTP to an HTTPS server.
raise errors.NoSSLError()
raise errors.FatalSSLAlert(*e.args)
if time.time() - start > self.ssl_timeout:
raise socket.timeout('timed out')
def recv(self, size):
"""Receive message of a size from the socket."""
return self._safe_call(
True,
super(SSLFileobjectMixin, self).recv,
size,
)
def readline(self, size=-1):
"""Receive message of a size from the socket.
Matches the following interface:
https://docs.python.org/3/library/io.html#io.IOBase.readline
"""
return self._safe_call(
True,
super(SSLFileobjectMixin, self).readline,
size,
)
def sendall(self, *args, **kwargs):
"""Send whole message to the socket."""
return self._safe_call(
False,
super(SSLFileobjectMixin, self).sendall,
*args, **kwargs,
)
def send(self, *args, **kwargs):
"""Send some part of message to the socket."""
return self._safe_call(
False,
super(SSLFileobjectMixin, self).send,
*args, **kwargs,
)
class SSLFileobjectStreamReader(SSLFileobjectMixin, StreamReader):
"""SSL file object attached to a socket object."""
class SSLFileobjectStreamWriter(SSLFileobjectMixin, StreamWriter):
"""SSL file object attached to a socket object."""
class SSLConnectionProxyMeta:
"""Metaclass for generating a bunch of proxy methods."""
def __new__(mcl, name, bases, nmspc):
"""Attach a list of proxy methods to a new class."""
proxy_methods = (
'get_context', 'pending', 'send', 'write', 'recv', 'read',
'renegotiate', 'bind', 'listen', 'connect', 'accept',
'setblocking', 'fileno', 'close', 'get_cipher_list',
'getpeername', 'getsockname', 'getsockopt', 'setsockopt',
'makefile', 'get_app_data', 'set_app_data', 'state_string',
'sock_shutdown', 'get_peer_certificate', 'want_read',
'want_write', 'set_connect_state', 'set_accept_state',
'connect_ex', 'sendall', 'settimeout', 'gettimeout',
'shutdown',
)
proxy_methods_no_args = (
'shutdown',
)
proxy_props = (
'family',
)
def lock_decorator(method):
"""Create a proxy method for a new class."""
def proxy_wrapper(self, *args):
self._lock.acquire()
try:
new_args = (
args[:] if method not in proxy_methods_no_args else []
)
return getattr(self._ssl_conn, method)(*new_args)
finally:
self._lock.release()
return proxy_wrapper
for m in proxy_methods:
nmspc[m] = lock_decorator(m)
nmspc[m].__name__ = m
def make_property(property_):
"""Create a proxy method for a new class."""
def proxy_prop_wrapper(self):
return getattr(self._ssl_conn, property_)
proxy_prop_wrapper.__name__ = property_
return property(proxy_prop_wrapper)
for p in proxy_props:
nmspc[p] = make_property(p)
# Doesn't work via super() for some reason.
# Falling back to type() instead:
return type(name, bases, nmspc)
class SSLConnection(metaclass=SSLConnectionProxyMeta):
r"""A thread-safe wrapper for an ``SSL.Connection``.
:param tuple args: the arguments to create the wrapped \
:py:class:`SSL.Connection(*args) \
<pyopenssl:OpenSSL.SSL.Connection>`
"""
def __init__(self, *args):
"""Initialize SSLConnection instance."""
self._ssl_conn = SSL.Connection(*args)
self._lock = threading.RLock()
class pyOpenSSLAdapter(Adapter):
"""A wrapper for integrating pyOpenSSL with Cheroot."""
certificate = None
"""The file name of the server's TLS certificate."""
private_key = None
"""The file name of the server's private key file."""
certificate_chain = None
"""Optional. The file name of CA's intermediate certificate bundle.
This is needed for cheaper "chained root" TLS certificates,
and should be left as :py:data:`None` if not required."""
context = None
"""
An instance of :py:class:`SSL.Context <pyopenssl:OpenSSL.SSL.Context>`.
"""
ciphers = None
"""The ciphers list of TLS."""
def __init__(
self, certificate, private_key, certificate_chain=None,
ciphers=None,
):
"""Initialize OpenSSL Adapter instance."""
if SSL is None:
raise ImportError('You must install pyOpenSSL to use HTTPS.')
super(pyOpenSSLAdapter, self).__init__(
certificate, private_key, certificate_chain, ciphers,
)
self._environ = None
def bind(self, sock):
"""Wrap and return the given socket."""
if self.context is None:
self.context = self.get_context()
conn = SSLConnection(self.context, sock)
self._environ = self.get_environ()
return conn
def wrap(self, sock):
"""Wrap and return the given socket, plus WSGI environ entries."""
# pyOpenSSL doesn't perform the handshake until the first read/write
# forcing the handshake to complete tends to result in the connection
# closing so we can't reliably access protocol/client cert for the env
return sock, self._environ.copy()
def get_context(self):
"""Return an ``SSL.Context`` from self attributes.
Ref: :py:class:`SSL.Context <pyopenssl:OpenSSL.SSL.Context>`
"""
# See https://code.activestate.com/recipes/442473/
c = SSL.Context(SSL.SSLv23_METHOD)
c.use_privatekey_file(self.private_key)
if self.certificate_chain:
c.load_verify_locations(self.certificate_chain)
c.use_certificate_file(self.certificate)
return c
def get_environ(self):
"""Return WSGI environ entries to be merged into each request."""
ssl_environ = {
'wsgi.url_scheme': 'https',
'HTTPS': 'on',
'SSL_VERSION_INTERFACE': '%s %s/%s Python/%s' % (
cheroot_server.HTTPServer.version,
OpenSSL.version.__title__, OpenSSL.version.__version__,
sys.version,
),
'SSL_VERSION_LIBRARY': SSL.SSLeay_version(
SSL.SSLEAY_VERSION,
).decode(),
}
if self.certificate:
# Server certificate attributes
with open(self.certificate, 'rb') as cert_file:
cert = crypto.load_certificate(
crypto.FILETYPE_PEM, cert_file.read(),
)
ssl_environ.update({
'SSL_SERVER_M_VERSION': cert.get_version(),
'SSL_SERVER_M_SERIAL': cert.get_serial_number(),
# 'SSL_SERVER_V_START':
# Validity of server's certificate (start time),
# 'SSL_SERVER_V_END':
# Validity of server's certificate (end time),
})
for prefix, dn in [
('I', cert.get_issuer()),
('S', cert.get_subject()),
]:
# X509Name objects don't seem to have a way to get the
# complete DN string. Use str() and slice it instead,
# because str(dn) == "<X509Name object '/C=US/ST=...'>"
dnstr = str(dn)[18:-2]
wsgikey = 'SSL_SERVER_%s_DN' % prefix
ssl_environ[wsgikey] = dnstr
# The DN should be of the form: /k1=v1/k2=v2, but we must allow
# for any value to contain slashes itself (in a URL).
while dnstr:
pos = dnstr.rfind('=')
dnstr, value = dnstr[:pos], dnstr[pos + 1:]
pos = dnstr.rfind('/')
dnstr, key = dnstr[:pos], dnstr[pos + 1:]
if key and value:
wsgikey = 'SSL_SERVER_%s_DN_%s' % (prefix, key)
ssl_environ[wsgikey] = value
return ssl_environ
def makefile(self, sock, mode='r', bufsize=-1):
"""Return socket file object."""
cls = (
SSLFileobjectStreamReader
if 'r' in mode else
SSLFileobjectStreamWriter
)
if SSL and isinstance(sock, ssl_conn_type):
wrapped_socket = cls(sock, mode, bufsize)
wrapped_socket.ssl_timeout = sock.gettimeout()
return wrapped_socket
# This is from past:
# TODO: figure out what it's meant for
else:
return cheroot_server.CP_fileobject(sock, mode, bufsize)

View file

@ -0,0 +1,31 @@
from . import Adapter
from ..makefile import StreamReader, StreamWriter
from OpenSSL import SSL
from typing import Any, Type
ssl_conn_type: Type[SSL.Connection]
class SSLFileobjectMixin:
ssl_timeout: int
ssl_retry: float
def recv(self, size): ...
def readline(self, size: int = ...): ...
def sendall(self, *args, **kwargs): ...
def send(self, *args, **kwargs): ...
class SSLFileobjectStreamReader(SSLFileobjectMixin, StreamReader): ... # type:ignore[misc]
class SSLFileobjectStreamWriter(SSLFileobjectMixin, StreamWriter): ... # type:ignore[misc]
class SSLConnectionProxyMeta:
def __new__(mcl, name, bases, nmspc): ...
class SSLConnection:
def __init__(self, *args) -> None: ...
class pyOpenSSLAdapter(Adapter):
def __init__(self, certificate, private_key, certificate_chain: Any | None = ..., ciphers: Any | None = ...) -> None: ...
def bind(self, sock): ...
def wrap(self, sock): ...
def get_environ(self): ...
def makefile(self, sock, mode: str = ..., bufsize: int = ...): ...
def get_context(self) -> SSL.Context: ...

View file

@ -0,0 +1 @@
"""Cheroot test suite."""

View file

@ -0,0 +1,43 @@
"""Local pytest plugin.
Contains hooks, which are tightly bound to the Cheroot framework
itself, useless for end-users' app testing.
"""
import pytest
pytest_version = tuple(map(int, pytest.__version__.split('.')))
def pytest_load_initial_conftests(early_config, parser, args):
"""Drop unfilterable warning ignores."""
if pytest_version < (6, 2, 0):
return
# pytest>=6.2.0 under Python 3.8:
# Refs:
# * https://docs.pytest.org/en/stable/usage.html#unraisable
# * https://github.com/pytest-dev/pytest/issues/5299
early_config._inicache['filterwarnings'].extend((
'ignore:Exception in thread CP Server Thread-:'
'pytest.PytestUnhandledThreadExceptionWarning:_pytest.threadexception',
'ignore:Exception in thread Thread-:'
'pytest.PytestUnhandledThreadExceptionWarning:_pytest.threadexception',
'ignore:Exception ignored in. '
'<socket.socket fd=-1, family=AddressFamily.AF_INET, '
'type=SocketKind.SOCK_STREAM, proto=.:'
'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
'ignore:Exception ignored in. '
'<socket.socket fd=-1, family=AddressFamily.AF_INET6, '
'type=SocketKind.SOCK_STREAM, proto=.:'
'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
'ignore:Exception ignored in. '
'<socket.socket fd=-1, family=AF_INET, '
'type=SocketKind.SOCK_STREAM, proto=.:'
'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
'ignore:Exception ignored in. '
'<socket.socket fd=-1, family=AF_INET6, '
'type=SocketKind.SOCK_STREAM, proto=.:'
'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
))

View file

@ -0,0 +1,108 @@
"""Pytest configuration module.
Contains fixtures, which are tightly bound to the Cheroot framework
itself, useless for end-users' app testing.
"""
import threading
import time
import pytest
from .._compat import IS_MACOS, IS_WINDOWS # noqa: WPS436
from ..server import Gateway, HTTPServer
from ..testing import ( # noqa: F401 # pylint: disable=unused-import
native_server,
thread_and_wsgi_server,
thread_and_native_server,
wsgi_server,
)
from ..testing import get_server_client
@pytest.fixture
def http_request_timeout():
"""Return a common HTTP request timeout for tests with queries."""
computed_timeout = 0.1
if IS_MACOS:
computed_timeout *= 2
if IS_WINDOWS:
computed_timeout *= 10
return computed_timeout
@pytest.fixture
# pylint: disable=redefined-outer-name
def wsgi_server_thread(thread_and_wsgi_server): # noqa: F811
"""Set up and tear down a Cheroot WSGI server instance.
This exposes the server thread.
"""
server_thread, _srv = thread_and_wsgi_server
return server_thread
@pytest.fixture
# pylint: disable=redefined-outer-name
def native_server_thread(thread_and_native_server): # noqa: F811
"""Set up and tear down a Cheroot HTTP server instance.
This exposes the server thread.
"""
server_thread, _srv = thread_and_native_server
return server_thread
@pytest.fixture
# pylint: disable=redefined-outer-name
def wsgi_server_client(wsgi_server): # noqa: F811
"""Create a test client out of given WSGI server."""
return get_server_client(wsgi_server)
@pytest.fixture
# pylint: disable=redefined-outer-name
def native_server_client(native_server): # noqa: F811
"""Create a test client out of given HTTP server."""
return get_server_client(native_server)
@pytest.fixture
def http_server():
"""Provision a server creator as a fixture."""
def start_srv():
bind_addr = yield
if bind_addr is None:
return
httpserver = make_http_server(bind_addr)
yield httpserver
yield httpserver
srv_creator = iter(start_srv())
next(srv_creator) # pylint: disable=stop-iteration-return
yield srv_creator
try:
while True:
httpserver = next(srv_creator)
if httpserver is not None:
httpserver.stop()
except StopIteration:
pass
def make_http_server(bind_addr):
"""Create and start an HTTP server bound to ``bind_addr``."""
httpserver = HTTPServer(
bind_addr=bind_addr,
gateway=Gateway,
)
threading.Thread(target=httpserver.safe_start).start()
while not httpserver.ready:
time.sleep(0.1)
return httpserver

168
libs/cheroot/test/helper.py Normal file
View file

@ -0,0 +1,168 @@
"""A library of helper functions for the Cheroot test suite."""
import datetime
import logging
import os
import sys
import time
import threading
import types
import http.client
import cheroot.server
import cheroot.wsgi
from cheroot.test import webtest
log = logging.getLogger(__name__)
thisdir = os.path.abspath(os.path.dirname(__file__))
config = {
'bind_addr': ('127.0.0.1', 54583),
'server': 'wsgi',
'wsgi_app': None,
}
class CherootWebCase(webtest.WebCase):
"""Helper class for a web app test suite."""
script_name = ''
scheme = 'http'
available_servers = {
'wsgi': cheroot.wsgi.Server,
'native': cheroot.server.HTTPServer,
}
@classmethod
def setup_class(cls):
"""Create and run one HTTP server per class."""
conf = config.copy()
conf.update(getattr(cls, 'config', {}))
s_class = conf.pop('server', 'wsgi')
server_factory = cls.available_servers.get(s_class)
if server_factory is None:
raise RuntimeError('Unknown server in config: %s' % conf['server'])
cls.httpserver = server_factory(**conf)
cls.HOST, cls.PORT = cls.httpserver.bind_addr
if cls.httpserver.ssl_adapter is None:
ssl = ''
cls.scheme = 'http'
else:
ssl = ' (ssl)'
cls.HTTP_CONN = http.client.HTTPSConnection
cls.scheme = 'https'
v = sys.version.split()[0]
log.info('Python version used to run this test script: %s', v)
log.info('Cheroot version: %s', cheroot.__version__)
log.info('HTTP server version: %s%s', cls.httpserver.protocol, ssl)
log.info('PID: %s', os.getpid())
if hasattr(cls, 'setup_server'):
# Clear the wsgi server so that
# it can be updated with the new root
cls.setup_server()
cls.start()
@classmethod
def teardown_class(cls):
"""Cleanup HTTP server."""
if hasattr(cls, 'setup_server'):
cls.stop()
@classmethod
def start(cls):
"""Load and start the HTTP server."""
threading.Thread(target=cls.httpserver.safe_start).start()
while not cls.httpserver.ready:
time.sleep(0.1)
@classmethod
def stop(cls):
"""Terminate HTTP server."""
cls.httpserver.stop()
td = getattr(cls, 'teardown', None)
if td:
td()
date_tolerance = 2
def assertEqualDates(self, dt1, dt2, seconds=None):
"""Assert ``abs(dt1 - dt2)`` is within ``Y`` seconds."""
if seconds is None:
seconds = self.date_tolerance
if dt1 > dt2:
diff = dt1 - dt2
else:
diff = dt2 - dt1
if not diff < datetime.timedelta(seconds=seconds):
raise AssertionError(
'%r and %r are not within %r seconds.' %
(dt1, dt2, seconds),
)
class Request:
"""HTTP request container."""
def __init__(self, environ):
"""Initialize HTTP request."""
self.environ = environ
class Response:
"""HTTP response container."""
def __init__(self):
"""Initialize HTTP response."""
self.status = '200 OK'
self.headers = {'Content-Type': 'text/html'}
self.body = None
def output(self):
"""Generate iterable response body object."""
if self.body is None:
return []
elif isinstance(self.body, str):
return [self.body.encode('iso-8859-1')]
elif isinstance(self.body, bytes):
return [self.body]
else:
return [x.encode('iso-8859-1') for x in self.body]
class Controller:
"""WSGI app for tests."""
def __call__(self, environ, start_response):
"""WSGI request handler."""
req, resp = Request(environ), Response()
try:
# Python 3 supports unicode attribute names
# Python 2 encodes them
handler = self.handlers[environ['PATH_INFO']]
except KeyError:
resp.status = '404 Not Found'
else:
output = handler(req, resp)
if (
output is not None
and not any(
resp.status.startswith(status_code)
for status_code in ('204', '304')
)
):
resp.body = output
try:
resp.headers.setdefault('Content-Length', str(len(output)))
except TypeError:
if not isinstance(output, types.GeneratorType):
raise
start_response(resp.status, resp.headers.items())
return resp.output()

View file

@ -0,0 +1,61 @@
"""Test suite for cross-python compatibility helpers."""
import pytest
from cheroot._compat import extract_bytes, ntob, ntou, bton
@pytest.mark.parametrize(
('func', 'inp', 'out'),
(
(ntob, 'bar', b'bar'),
(ntou, 'bar', u'bar'),
(bton, b'bar', 'bar'),
),
)
def test_compat_functions_positive(func, inp, out):
"""Check that compatibility functions work with correct input."""
assert func(inp, encoding='utf-8') == out
@pytest.mark.parametrize(
'func',
(
ntob,
ntou,
),
)
def test_compat_functions_negative_nonnative(func):
"""Check that compatibility functions fail loudly for incorrect input."""
non_native_test_str = b'bar'
with pytest.raises(TypeError):
func(non_native_test_str, encoding='utf-8')
def test_ntou_escape():
"""Check that ``ntou`` supports escape-encoding under Python 2."""
expected = u'hišřії'
actual = ntou('hi\u0161\u0159\u0456\u0457', encoding='escape')
assert actual == expected
@pytest.mark.parametrize(
('input_argument', 'expected_result'),
(
(b'qwerty', b'qwerty'),
(memoryview(b'asdfgh'), b'asdfgh'),
),
)
def test_extract_bytes(input_argument, expected_result):
"""Check that legitimate inputs produce bytes."""
assert extract_bytes(input_argument) == expected_result
def test_extract_bytes_invalid():
"""Ensure that invalid input causes exception to be raised."""
with pytest.raises(
ValueError,
match=r'^extract_bytes\(\) only accepts bytes '
'and memoryview/buffer$',
):
extract_bytes(u'some юнікод їїї')

View file

@ -0,0 +1,89 @@
"""Tests to verify the command line interface.
.. spelling::
cli
"""
import sys
import pytest
from cheroot.cli import (
Application,
parse_wsgi_bind_addr,
)
@pytest.mark.parametrize(
('raw_bind_addr', 'expected_bind_addr'),
(
# tcp/ip
('192.168.1.1:80', ('192.168.1.1', 80)),
# ipv6 ips has to be enclosed in brakets when specified in url form
('[::1]:8000', ('::1', 8000)),
('localhost:5000', ('localhost', 5000)),
# this is a valid input, but foo gets discarted
('foo@bar:5000', ('bar', 5000)),
('foo', ('foo', None)),
('123456789', ('123456789', None)),
# unix sockets
('/tmp/cheroot.sock', '/tmp/cheroot.sock'),
('/tmp/some-random-file-name', '/tmp/some-random-file-name'),
# abstract sockets
('@cheroot', '\x00cheroot'),
),
)
def test_parse_wsgi_bind_addr(raw_bind_addr, expected_bind_addr):
"""Check the parsing of the --bind option.
Verify some of the supported addresses and the expected return value.
"""
assert parse_wsgi_bind_addr(raw_bind_addr) == expected_bind_addr
@pytest.fixture
def wsgi_app(monkeypatch):
"""Return a WSGI app stub."""
class WSGIAppMock:
"""Mock of a wsgi module."""
def application(self):
"""Empty application method.
Default method to be called when no specific callable
is defined in the wsgi application identifier.
It has an empty body because we are expecting to verify that
the same method is return no the actual execution of it.
"""
def main(self):
"""Empty custom method (callable) inside the mocked WSGI app.
It has an empty body because we are expecting to verify that
the same method is return no the actual execution of it.
"""
app = WSGIAppMock()
# patch sys.modules, to include the an instance of WSGIAppMock
# under a specific namespace
monkeypatch.setitem(sys.modules, 'mypkg.wsgi', app)
return app
@pytest.mark.parametrize(
('app_name', 'app_method'),
(
(None, 'application'),
('application', 'application'),
('main', 'main'),
),
)
# pylint: disable=invalid-name
def test_Aplication_resolve(app_name, app_method, wsgi_app):
"""Check the wsgi application name conversion."""
if app_name is None:
wsgi_app_spec = 'mypkg.wsgi'
else:
wsgi_app_spec = 'mypkg.wsgi:{app_name}'.format(**locals())
expected_app = getattr(wsgi_app, app_method)
assert Application.resolve(wsgi_app_spec).wsgi_app == expected_app

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,455 @@
"""Tests for managing HTTP issues (malformed requests, etc)."""
import errno
import socket
import urllib.parse # noqa: WPS301
import pytest
from cheroot.test import helper
HTTP_BAD_REQUEST = 400
HTTP_LENGTH_REQUIRED = 411
HTTP_NOT_FOUND = 404
HTTP_REQUEST_ENTITY_TOO_LARGE = 413
HTTP_OK = 200
HTTP_VERSION_NOT_SUPPORTED = 505
class HelloController(helper.Controller):
"""Controller for serving WSGI apps."""
def hello(req, resp):
"""Render Hello world."""
return 'Hello world!'
def body_required(req, resp):
"""Render Hello world or set 411."""
if req.environ.get('Content-Length', None) is None:
resp.status = '411 Length Required'
return
return 'Hello world!'
def query_string(req, resp):
"""Render QUERY_STRING value."""
return req.environ.get('QUERY_STRING', '')
def asterisk(req, resp):
"""Render request method value."""
# pylint: disable=possibly-unused-variable
method = req.environ.get('REQUEST_METHOD', 'NO METHOD FOUND')
tmpl = 'Got asterisk URI path with {method} method'
return tmpl.format(**locals())
def _munge(string):
"""Encode PATH_INFO correctly depending on Python version.
WSGI 1.0 is a mess around unicode. Create endpoints
that match the PATH_INFO that it produces.
"""
return string.encode('utf-8').decode('latin-1')
handlers = {
'/hello': hello,
'/no_body': hello,
'/body_required': body_required,
'/query_string': query_string,
# FIXME: Unignore the pylint rules in pylint >= 2.15.4.
# Refs:
# * https://github.com/PyCQA/pylint/issues/6592
# * https://github.com/PyCQA/pylint/pull/7395
# pylint: disable-next=too-many-function-args
_munge('/привіт'): hello,
# pylint: disable-next=too-many-function-args
_munge('/Юххууу'): hello,
'/\xa0Ðblah key 0 900 4 data': hello,
'/*': asterisk,
}
def _get_http_response(connection, method='GET'):
return connection.response_class(connection.sock, method=method)
@pytest.fixture
def testing_server(wsgi_server_client):
"""Attach a WSGI app to the given server and preconfigure it."""
wsgi_server = wsgi_server_client.server_instance
wsgi_server.wsgi_app = HelloController()
wsgi_server.max_request_body_size = 30000000
wsgi_server.server_client = wsgi_server_client
return wsgi_server
@pytest.fixture
def test_client(testing_server):
"""Get and return a test client out of the given server."""
return testing_server.server_client
@pytest.fixture
def testing_server_with_defaults(wsgi_server_client):
"""Attach a WSGI app to the given server and preconfigure it."""
wsgi_server = wsgi_server_client.server_instance
wsgi_server.wsgi_app = HelloController()
wsgi_server.server_client = wsgi_server_client
return wsgi_server
@pytest.fixture
def test_client_with_defaults(testing_server_with_defaults):
"""Get and return a test client out of the given server."""
return testing_server_with_defaults.server_client
def test_http_connect_request(test_client):
"""Check that CONNECT query results in Method Not Allowed status."""
status_line = test_client.connect('/anything')[0]
actual_status = int(status_line[:3])
assert actual_status == 405
def test_normal_request(test_client):
"""Check that normal GET query succeeds."""
status_line, _, actual_resp_body = test_client.get('/hello')
actual_status = int(status_line[:3])
assert actual_status == HTTP_OK
assert actual_resp_body == b'Hello world!'
def test_query_string_request(test_client):
"""Check that GET param is parsed well."""
status_line, _, actual_resp_body = test_client.get(
'/query_string?test=True',
)
actual_status = int(status_line[:3])
assert actual_status == HTTP_OK
assert actual_resp_body == b'test=True'
@pytest.mark.parametrize(
'uri',
(
'/hello', # plain
'/query_string?test=True', # query
'/{0}?{1}={2}'.format( # quoted unicode
*map(urllib.parse.quote, ('Юххууу', 'ї', 'йо')),
),
),
)
def test_parse_acceptable_uri(test_client, uri):
"""Check that server responds with OK to valid GET queries."""
status_line = test_client.get(uri)[0]
actual_status = int(status_line[:3])
assert actual_status == HTTP_OK
def test_parse_uri_unsafe_uri(test_client):
"""Test that malicious URI does not allow HTTP injection.
This effectively checks that sending GET request with URL
/%A0%D0blah%20key%200%20900%204%20data
is not converted into
GET /
blah key 0 900 4 data
HTTP/1.1
which would be a security issue otherwise.
"""
c = test_client.get_connection()
resource = '/\xa0Ðblah key 0 900 4 data'.encode('latin-1')
quoted = urllib.parse.quote(resource)
assert quoted == '/%A0%D0blah%20key%200%20900%204%20data'
request = 'GET {quoted} HTTP/1.1'.format(**locals())
c._output(request.encode('utf-8'))
c._send_output()
response = _get_http_response(c, method='GET')
response.begin()
assert response.status == HTTP_OK
assert response.read(12) == b'Hello world!'
c.close()
def test_parse_uri_invalid_uri(test_client):
"""Check that server responds with Bad Request to invalid GET queries.
Invalid request line test case: it should only contain US-ASCII.
"""
c = test_client.get_connection()
c._output(u'GET /йопта! HTTP/1.1'.encode('utf-8'))
c._send_output()
response = _get_http_response(c, method='GET')
response.begin()
assert response.status == HTTP_BAD_REQUEST
assert response.read(21) == b'Malformed Request-URI'
c.close()
@pytest.mark.parametrize(
'uri',
(
'hello', # ascii
'привіт', # non-ascii
),
)
def test_parse_no_leading_slash_invalid(test_client, uri):
"""Check that server responds with Bad Request to invalid GET queries.
Invalid request line test case: it should have leading slash (be absolute).
"""
status_line, _, actual_resp_body = test_client.get(
urllib.parse.quote(uri),
)
actual_status = int(status_line[:3])
assert actual_status == HTTP_BAD_REQUEST
assert b'starting with a slash' in actual_resp_body
def test_parse_uri_absolute_uri(test_client):
"""Check that server responds with Bad Request to Absolute URI.
Only proxy servers should allow this.
"""
status_line, _, actual_resp_body = test_client.get('http://google.com/')
actual_status = int(status_line[:3])
assert actual_status == HTTP_BAD_REQUEST
expected_body = b'Absolute URI not allowed if server is not a proxy.'
assert actual_resp_body == expected_body
def test_parse_uri_asterisk_uri(test_client):
"""Check that server responds with OK to OPTIONS with "*" Absolute URI."""
status_line, _, actual_resp_body = test_client.options('*')
actual_status = int(status_line[:3])
assert actual_status == HTTP_OK
expected_body = b'Got asterisk URI path with OPTIONS method'
assert actual_resp_body == expected_body
def test_parse_uri_fragment_uri(test_client):
"""Check that server responds with Bad Request to URI with fragment."""
status_line, _, actual_resp_body = test_client.get(
'/hello?test=something#fake',
)
actual_status = int(status_line[:3])
assert actual_status == HTTP_BAD_REQUEST
expected_body = b'Illegal #fragment in Request-URI.'
assert actual_resp_body == expected_body
def test_no_content_length(test_client):
"""Test POST query with an empty body being successful."""
# "The presence of a message-body in a request is signaled by the
# inclusion of a Content-Length or Transfer-Encoding header field in
# the request's message-headers."
#
# Send a message with neither header and no body.
c = test_client.get_connection()
c.request('POST', '/no_body')
response = c.getresponse()
actual_resp_body = response.read()
actual_status = response.status
assert actual_status == HTTP_OK
assert actual_resp_body == b'Hello world!'
c.close() # deal with the resource warning
def test_content_length_required(test_client):
"""Test POST query with body failing because of missing Content-Length."""
# Now send a message that has no Content-Length, but does send a body.
# Verify that CP times out the socket and responds
# with 411 Length Required.
c = test_client.get_connection()
c.request('POST', '/body_required')
response = c.getresponse()
response.read()
actual_status = response.status
assert actual_status == HTTP_LENGTH_REQUIRED
c.close() # deal with the resource warning
@pytest.mark.xfail(
reason='https://github.com/cherrypy/cheroot/issues/106',
strict=False, # sometimes it passes
)
def test_large_request(test_client_with_defaults):
"""Test GET query with maliciously large Content-Length."""
# If the server's max_request_body_size is not set (i.e. is set to 0)
# then this will result in an `OverflowError: Python int too large to
# convert to C ssize_t` in the server.
# We expect that this should instead return that the request is too
# large.
c = test_client_with_defaults.get_connection()
c.putrequest('GET', '/hello')
c.putheader('Content-Length', str(2**64))
c.endheaders()
response = c.getresponse()
actual_status = response.status
assert actual_status == HTTP_REQUEST_ENTITY_TOO_LARGE
@pytest.mark.parametrize(
('request_line', 'status_code', 'expected_body'),
(
(
b'GET /', # missing proto
HTTP_BAD_REQUEST, b'Malformed Request-Line',
),
(
b'GET / HTTPS/1.1', # invalid proto
HTTP_BAD_REQUEST, b'Malformed Request-Line: bad protocol',
),
(
b'GET / HTTP/1', # invalid version
HTTP_BAD_REQUEST, b'Malformed Request-Line: bad version',
),
(
b'GET / HTTP/2.15', # invalid ver
HTTP_VERSION_NOT_SUPPORTED, b'Cannot fulfill request',
),
),
)
def test_malformed_request_line(
test_client, request_line,
status_code, expected_body,
):
"""Test missing or invalid HTTP version in Request-Line."""
c = test_client.get_connection()
c._output(request_line)
c._send_output()
response = _get_http_response(c, method='GET')
response.begin()
assert response.status == status_code
assert response.read(len(expected_body)) == expected_body
c.close()
def test_malformed_http_method(test_client):
"""Test non-uppercase HTTP method."""
c = test_client.get_connection()
c.putrequest('GeT', '/malformed_method_case')
c.putheader('Content-Type', 'text/plain')
c.endheaders()
response = c.getresponse()
actual_status = response.status
assert actual_status == HTTP_BAD_REQUEST
actual_resp_body = response.read(21)
assert actual_resp_body == b'Malformed method name'
c.close() # deal with the resource warning
def test_malformed_header(test_client):
"""Check that broken HTTP header results in Bad Request."""
c = test_client.get_connection()
c.putrequest('GET', '/')
c.putheader('Content-Type', 'text/plain')
# See https://www.bitbucket.org/cherrypy/cherrypy/issue/941
c._output(b'Re, 1.2.3.4#015#012')
c.endheaders()
response = c.getresponse()
actual_status = response.status
assert actual_status == HTTP_BAD_REQUEST
actual_resp_body = response.read(20)
assert actual_resp_body == b'Illegal header line.'
c.close() # deal with the resource warning
def test_request_line_split_issue_1220(test_client):
"""Check that HTTP request line of exactly 256 chars length is OK."""
Request_URI = (
'/hello?'
'intervenant-entreprise-evenement_classaction='
'evenement-mailremerciements'
'&_path=intervenant-entreprise-evenement'
'&intervenant-entreprise-evenement_action-id=19404'
'&intervenant-entreprise-evenement_id=19404'
'&intervenant-entreprise_id=28092'
)
assert len('GET %s HTTP/1.1\r\n' % Request_URI) == 256
actual_resp_body = test_client.get(Request_URI)[2]
assert actual_resp_body == b'Hello world!'
def test_garbage_in(test_client):
"""Test that server sends an error for garbage received over TCP."""
# Connect without SSL regardless of server.scheme
c = test_client.get_connection()
c._output(b'gjkgjklsgjklsgjkljklsg')
c._send_output()
response = c.response_class(c.sock, method='GET')
try:
response.begin()
actual_status = response.status
assert actual_status == HTTP_BAD_REQUEST
actual_resp_body = response.read(22)
assert actual_resp_body == b'Malformed Request-Line'
c.close()
except socket.error as ex:
# "Connection reset by peer" is also acceptable.
if ex.errno != errno.ECONNRESET:
raise
class CloseController:
"""Controller for testing the close callback."""
def __call__(self, environ, start_response):
"""Get the req to know header sent status."""
self.req = start_response.__self__.req
resp = CloseResponse(self.close)
start_response(resp.status, resp.headers.items())
return resp
def close(self):
"""Close, writing hello."""
self.req.write(b'hello')
class CloseResponse:
"""Dummy empty response to trigger the no body status."""
def __init__(self, close):
"""Use some defaults to ensure we have a header."""
self.status = '200 OK'
self.headers = {'Content-Type': 'text/html'}
self.close = close
def __getitem__(self, index):
"""Ensure we don't have a body."""
raise IndexError()
def output(self):
"""Return self to hook the close method."""
return self
@pytest.fixture
def testing_server_close(wsgi_server_client):
"""Attach a WSGI app to the given server and preconfigure it."""
wsgi_server = wsgi_server_client.server_instance
wsgi_server.wsgi_app = CloseController()
wsgi_server.max_request_body_size = 30000000
wsgi_server.server_client = wsgi_server_client
return wsgi_server
def test_send_header_before_closing(testing_server_close):
"""Test we are actually sending the headers before calling 'close'."""
_, _, resp_body = testing_server_close.server_client.get('/')
assert resp_body == b'hello'

View file

@ -0,0 +1,51 @@
"""Tests for the HTTP server."""
from cheroot.wsgi import PathInfoDispatcher
def wsgi_invoke(app, environ):
"""Serve 1 request from a WSGI application."""
response = {}
def start_response(status, headers):
response.update({
'status': status,
'headers': headers,
})
response['body'] = b''.join(
app(environ, start_response),
)
return response
def test_dispatch_no_script_name():
"""Dispatch despite lack of ``SCRIPT_NAME`` in environ."""
# Bare bones WSGI hello world app (from PEP 333).
def app(environ, start_response):
start_response(
'200 OK', [
('Content-Type', 'text/plain; charset=utf-8'),
],
)
return [u'Hello, world!'.encode('utf-8')]
# Build a dispatch table.
d = PathInfoDispatcher([
('/', app),
])
# Dispatch a request without `SCRIPT_NAME`.
response = wsgi_invoke(
d, {
'PATH_INFO': '/foo',
},
)
assert response == {
'status': '200 OK',
'headers': [
('Content-Type', 'text/plain; charset=utf-8'),
],
'body': b'Hello, world!',
}

View file

@ -0,0 +1,31 @@
"""Test suite for ``cheroot.errors``."""
import pytest
from cheroot import errors
from .._compat import IS_LINUX, IS_MACOS, IS_SOLARIS, IS_WINDOWS # noqa: WPS130
@pytest.mark.parametrize(
('err_names', 'err_nums'),
(
(('', 'some-nonsense-name'), []),
(
(
'EPROTOTYPE', 'EAGAIN', 'EWOULDBLOCK',
'WSAEWOULDBLOCK', 'EPIPE',
),
(91, 11, 32) if IS_LINUX else
(32, 35, 41) if IS_MACOS else
(98, 11, 32) if IS_SOLARIS else
(32, 10041, 11, 10035) if IS_WINDOWS else
(),
),
),
)
def test_plat_specific_errors(err_names, err_nums):
"""Test that ``plat_specific_errors`` gets correct error numbers list."""
actual_err_nums = errors.plat_specific_errors(*err_names)
assert len(actual_err_nums) == len(err_nums)
assert sorted(actual_err_nums) == sorted(err_nums)

View file

@ -0,0 +1,49 @@
"""Tests for :py:mod:`cheroot.makefile`."""
from cheroot import makefile
class MockSocket:
"""A mock socket."""
def __init__(self):
"""Initialize :py:class:`MockSocket`."""
self.messages = []
def recv_into(self, buf):
"""Simulate ``recv_into`` for Python 3."""
if not self.messages:
return 0
msg = self.messages.pop(0)
for index, byte in enumerate(msg):
buf[index] = byte
return len(msg)
def recv(self, size):
"""Simulate ``recv`` for Python 2."""
try:
return self.messages.pop(0)
except IndexError:
return ''
def send(self, val):
"""Simulate a send."""
return len(val)
def test_bytes_read():
"""Reader should capture bytes read."""
sock = MockSocket()
sock.messages.append(b'foo')
rfile = makefile.MakeFile(sock, 'r')
rfile.read()
assert rfile.bytes_read == 3
def test_bytes_written():
"""Writer should capture bytes written."""
sock = MockSocket()
sock.messages.append(b'foo')
wfile = makefile.MakeFile(sock, 'w')
wfile.write(b'bar')
assert wfile.bytes_written == 3

View file

@ -0,0 +1,558 @@
"""Tests for the HTTP server."""
import os
import queue
import socket
import tempfile
import threading
import types
import uuid
import urllib.parse # noqa: WPS301
import pytest
import requests
import requests_unixsocket
from pypytools.gc.custom import DefaultGc
from .._compat import bton, ntob
from .._compat import IS_LINUX, IS_MACOS, IS_WINDOWS, SYS_PLATFORM
from ..server import IS_UID_GID_RESOLVABLE, Gateway, HTTPServer
from ..workers.threadpool import ThreadPool
from ..testing import (
ANY_INTERFACE_IPV4,
ANY_INTERFACE_IPV6,
EPHEMERAL_PORT,
)
IS_SLOW_ENV = IS_MACOS or IS_WINDOWS
unix_only_sock_test = pytest.mark.skipif(
not hasattr(socket, 'AF_UNIX'),
reason='UNIX domain sockets are only available under UNIX-based OS',
)
non_macos_sock_test = pytest.mark.skipif(
IS_MACOS,
reason='Peercreds lookup does not work under macOS/BSD currently.',
)
@pytest.fixture(params=('abstract', 'file'))
def unix_sock_file(request):
"""Check that bound UNIX socket address is stored in server."""
name = 'unix_{request.param}_sock'.format(**locals())
return request.getfixturevalue(name)
@pytest.fixture
def unix_abstract_sock():
"""Return an abstract UNIX socket address."""
if not IS_LINUX:
pytest.skip(
'{os} does not support an abstract '
'socket namespace'.format(os=SYS_PLATFORM),
)
return b''.join((
b'\x00cheroot-test-socket',
ntob(str(uuid.uuid4())),
)).decode()
@pytest.fixture
def unix_file_sock():
"""Yield a unix file socket."""
tmp_sock_fh, tmp_sock_fname = tempfile.mkstemp()
yield tmp_sock_fname
os.close(tmp_sock_fh)
os.unlink(tmp_sock_fname)
def test_prepare_makes_server_ready():
"""Check that prepare() makes the server ready, and stop() clears it."""
httpserver = HTTPServer(
bind_addr=(ANY_INTERFACE_IPV4, EPHEMERAL_PORT),
gateway=Gateway,
)
assert not httpserver.ready
assert not httpserver.requests._threads
httpserver.prepare()
assert httpserver.ready
assert httpserver.requests._threads
for thr in httpserver.requests._threads:
assert thr.ready
httpserver.stop()
assert not httpserver.requests._threads
assert not httpserver.ready
def test_stop_interrupts_serve():
"""Check that stop() interrupts running of serve()."""
httpserver = HTTPServer(
bind_addr=(ANY_INTERFACE_IPV4, EPHEMERAL_PORT),
gateway=Gateway,
)
httpserver.prepare()
serve_thread = threading.Thread(target=httpserver.serve)
serve_thread.start()
serve_thread.join(0.5)
assert serve_thread.is_alive()
httpserver.stop()
serve_thread.join(0.5)
assert not serve_thread.is_alive()
@pytest.mark.parametrize(
'exc_cls',
(
IOError,
KeyboardInterrupt,
OSError,
RuntimeError,
),
)
def test_server_interrupt(exc_cls):
"""Check that assigning interrupt stops the server."""
interrupt_msg = 'should catch {uuid!s}'.format(uuid=uuid.uuid4())
raise_marker_sentinel = object()
httpserver = HTTPServer(
bind_addr=(ANY_INTERFACE_IPV4, EPHEMERAL_PORT),
gateway=Gateway,
)
result_q = queue.Queue()
def serve_thread():
# ensure we catch the exception on the serve() thread
try:
httpserver.serve()
except exc_cls as e:
if str(e) == interrupt_msg:
result_q.put(raise_marker_sentinel)
httpserver.prepare()
serve_thread = threading.Thread(target=serve_thread)
serve_thread.start()
serve_thread.join(0.5)
assert serve_thread.is_alive()
# this exception is raised on the serve() thread,
# not in the calling context.
httpserver.interrupt = exc_cls(interrupt_msg)
serve_thread.join(0.5)
assert not serve_thread.is_alive()
assert result_q.get_nowait() is raise_marker_sentinel
def test_serving_is_false_and_stop_returns_after_ctrlc():
"""Check that stop() interrupts running of serve()."""
httpserver = HTTPServer(
bind_addr=(ANY_INTERFACE_IPV4, EPHEMERAL_PORT),
gateway=Gateway,
)
httpserver.prepare()
# Simulate a Ctrl-C on the first call to `run`.
def raise_keyboard_interrupt(*args, **kwargs):
raise KeyboardInterrupt()
httpserver._connections._selector.select = raise_keyboard_interrupt
serve_thread = threading.Thread(target=httpserver.serve)
serve_thread.start()
# The thread should exit right away due to the interrupt.
serve_thread.join(
httpserver.expiration_interval * (4 if IS_SLOW_ENV else 2),
)
assert not serve_thread.is_alive()
assert not httpserver._connections._serving
httpserver.stop()
@pytest.mark.parametrize(
'ip_addr',
(
ANY_INTERFACE_IPV4,
ANY_INTERFACE_IPV6,
),
)
def test_bind_addr_inet(http_server, ip_addr):
"""Check that bound IP address is stored in server."""
httpserver = http_server.send((ip_addr, EPHEMERAL_PORT))
assert httpserver.bind_addr[0] == ip_addr
assert httpserver.bind_addr[1] != EPHEMERAL_PORT
@unix_only_sock_test
def test_bind_addr_unix(http_server, unix_sock_file):
"""Check that bound UNIX socket address is stored in server."""
httpserver = http_server.send(unix_sock_file)
assert httpserver.bind_addr == unix_sock_file
@unix_only_sock_test
def test_bind_addr_unix_abstract(http_server, unix_abstract_sock):
"""Check that bound UNIX abstract socket address is stored in server."""
httpserver = http_server.send(unix_abstract_sock)
assert httpserver.bind_addr == unix_abstract_sock
PEERCRED_IDS_URI = '/peer_creds/ids'
PEERCRED_TEXTS_URI = '/peer_creds/texts'
class _TestGateway(Gateway):
def respond(self):
req = self.req
conn = req.conn
req_uri = bton(req.uri)
if req_uri == PEERCRED_IDS_URI:
peer_creds = conn.peer_pid, conn.peer_uid, conn.peer_gid
self.send_payload('|'.join(map(str, peer_creds)))
return
elif req_uri == PEERCRED_TEXTS_URI:
self.send_payload('!'.join((conn.peer_user, conn.peer_group)))
return
return super(_TestGateway, self).respond()
def send_payload(self, payload):
req = self.req
req.status = b'200 OK'
req.ensure_headers_sent()
req.write(ntob(payload))
@pytest.fixture
def peercreds_enabled_server(http_server, unix_sock_file):
"""Construct a test server with ``peercreds_enabled``."""
httpserver = http_server.send(unix_sock_file)
httpserver.gateway = _TestGateway
httpserver.peercreds_enabled = True
return httpserver
@unix_only_sock_test
@non_macos_sock_test
@pytest.mark.flaky(reruns=3, reruns_delay=2)
def test_peercreds_unix_sock(http_request_timeout, peercreds_enabled_server):
"""Check that ``PEERCRED`` lookup works when enabled."""
httpserver = peercreds_enabled_server
bind_addr = httpserver.bind_addr
if isinstance(bind_addr, bytes):
bind_addr = bind_addr.decode()
# pylint: disable=possibly-unused-variable
quoted = urllib.parse.quote(bind_addr, safe='')
unix_base_uri = 'http+unix://{quoted}'.format(**locals())
expected_peercreds = os.getpid(), os.getuid(), os.getgid()
expected_peercreds = '|'.join(map(str, expected_peercreds))
with requests_unixsocket.monkeypatch():
peercreds_resp = requests.get(
unix_base_uri + PEERCRED_IDS_URI,
timeout=http_request_timeout,
)
peercreds_resp.raise_for_status()
assert peercreds_resp.text == expected_peercreds
peercreds_text_resp = requests.get(
unix_base_uri + PEERCRED_TEXTS_URI,
timeout=http_request_timeout,
)
assert peercreds_text_resp.status_code == 500
@pytest.mark.skipif(
not IS_UID_GID_RESOLVABLE,
reason='Modules `grp` and `pwd` are not available '
'under the current platform',
)
@unix_only_sock_test
@non_macos_sock_test
def test_peercreds_unix_sock_with_lookup(
http_request_timeout,
peercreds_enabled_server,
):
"""Check that ``PEERCRED`` resolution works when enabled."""
httpserver = peercreds_enabled_server
httpserver.peercreds_resolve_enabled = True
bind_addr = httpserver.bind_addr
if isinstance(bind_addr, bytes):
bind_addr = bind_addr.decode()
# pylint: disable=possibly-unused-variable
quoted = urllib.parse.quote(bind_addr, safe='')
unix_base_uri = 'http+unix://{quoted}'.format(**locals())
import grp
import pwd
expected_textcreds = (
pwd.getpwuid(os.getuid()).pw_name,
grp.getgrgid(os.getgid()).gr_name,
)
expected_textcreds = '!'.join(map(str, expected_textcreds))
with requests_unixsocket.monkeypatch():
peercreds_text_resp = requests.get(
unix_base_uri + PEERCRED_TEXTS_URI,
timeout=http_request_timeout,
)
peercreds_text_resp.raise_for_status()
assert peercreds_text_resp.text == expected_textcreds
@pytest.mark.skipif(
IS_WINDOWS,
reason='This regression test is for a Linux bug, '
'and the resource module is not available on Windows',
)
@pytest.mark.parametrize(
'resource_limit',
(
1024,
2048,
),
indirect=('resource_limit',),
)
@pytest.mark.usefixtures('many_open_sockets')
def test_high_number_of_file_descriptors(native_server_client, resource_limit):
"""Test the server does not crash with a high file-descriptor value.
This test shouldn't cause a server crash when trying to access
file-descriptor higher than 1024.
The earlier implementation used to rely on ``select()`` syscall that
doesn't support file descriptors with numbers higher than 1024.
"""
# We want to force the server to use a file-descriptor with
# a number above resource_limit
# Patch the method that processes
_old_process_conn = native_server_client.server_instance.process_conn
def native_process_conn(conn):
native_process_conn.filenos.add(conn.socket.fileno())
return _old_process_conn(conn)
native_process_conn.filenos = set()
native_server_client.server_instance.process_conn = native_process_conn
# Trigger a crash if select() is used in the implementation
native_server_client.connect('/')
# Ensure that at least one connection got accepted, otherwise the
# follow-up check wouldn't make sense
assert len(native_process_conn.filenos) > 0
# Check at least one of the sockets created are above the target number
assert any(fn >= resource_limit for fn in native_process_conn.filenos)
@pytest.mark.skipif(
not hasattr(socket, 'SO_REUSEPORT'),
reason='socket.SO_REUSEPORT is not supported on this platform',
)
@pytest.mark.parametrize(
'ip_addr',
(
ANY_INTERFACE_IPV4,
ANY_INTERFACE_IPV6,
),
)
def test_reuse_port(http_server, ip_addr, mocker):
"""Check that port initialized externally can be reused."""
family = socket.getaddrinfo(ip_addr, EPHEMERAL_PORT)[0][0]
s = socket.socket(family)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
s.bind((ip_addr, EPHEMERAL_PORT))
server = HTTPServer(
bind_addr=s.getsockname()[:2], gateway=Gateway, reuse_port=True,
)
spy = mocker.spy(server, 'prepare')
server.prepare()
server.stop()
s.close()
assert spy.spy_exception is None
ISSUE511 = IS_MACOS
if not IS_WINDOWS and not ISSUE511:
test_high_number_of_file_descriptors = pytest.mark.forked(
test_high_number_of_file_descriptors,
)
@pytest.fixture
def _garbage_bin():
"""Disable garbage collection when this fixture is in use."""
with DefaultGc().nogc():
yield
@pytest.fixture
def resource_limit(request):
"""Set the resource limit two times bigger then requested."""
resource = pytest.importorskip(
'resource',
reason='The "resource" module is Unix-specific',
)
# Get current resource limits to restore them later
soft_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE)
# We have to increase the nofile limit above 1024
# Otherwise we see a 'Too many files open' error, instead of
# an error due to the file descriptor number being too high
resource.setrlimit(
resource.RLIMIT_NOFILE,
(request.param * 2, hard_limit),
)
try: # noqa: WPS501
yield request.param
finally:
# Reset the resource limit back to the original soft limit
resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit))
@pytest.fixture
def many_open_sockets(request, resource_limit):
"""Allocate a lot of file descriptors by opening dummy sockets."""
# NOTE: `@pytest.mark.usefixtures` doesn't work on fixtures which
# NOTE: forces us to invoke this one dynamically to avoid having an
# NOTE: unused argument.
request.getfixturevalue('_garbage_bin')
# Hoard a lot of file descriptors by opening and storing a lot of sockets
test_sockets = []
# Open a lot of file descriptors, so the next one the server
# opens is a high number
try:
for _ in range(resource_limit):
sock = socket.socket()
test_sockets.append(sock)
# If we reach a high enough number, we don't need to open more
if sock.fileno() >= resource_limit:
break
# Check we opened enough descriptors to reach a high number
the_highest_fileno = test_sockets[-1].fileno()
assert the_highest_fileno >= resource_limit
yield the_highest_fileno
finally:
# Close our open resources
for test_socket in test_sockets:
test_socket.close()
@pytest.mark.parametrize(
('minthreads', 'maxthreads', 'inited_maxthreads'),
(
(
# NOTE: The docstring only mentions -1 to mean "no max", but other
# NOTE: negative numbers should also work.
1,
-2,
float('inf'),
),
(1, -1, float('inf')),
(1, 1, 1),
(1, 2, 2),
(1, float('inf'), float('inf')),
(2, -2, float('inf')),
(2, -1, float('inf')),
(2, 2, 2),
(2, float('inf'), float('inf')),
),
)
def test_threadpool_threadrange_set(minthreads, maxthreads, inited_maxthreads):
"""Test setting the number of threads in a ThreadPool.
The ThreadPool should properly set the min+max number of the threads to use
in the pool if those limits are valid.
"""
tp = ThreadPool(
server=None,
min=minthreads,
max=maxthreads,
)
assert tp.min == minthreads
assert tp.max == inited_maxthreads
@pytest.mark.parametrize(
('minthreads', 'maxthreads', 'error'),
(
(-1, -1, 'min=-1 must be > 0'),
(-1, 0, 'min=-1 must be > 0'),
(-1, 1, 'min=-1 must be > 0'),
(-1, 2, 'min=-1 must be > 0'),
(0, -1, 'min=0 must be > 0'),
(0, 0, 'min=0 must be > 0'),
(0, 1, 'min=0 must be > 0'),
(0, 2, 'min=0 must be > 0'),
(1, 0, 'Expected an integer or the infinity value for the `max` argument but got 0.'),
(1, 0.5, 'Expected an integer or the infinity value for the `max` argument but got 0.5.'),
(2, 0, 'Expected an integer or the infinity value for the `max` argument but got 0.'),
(2, '1', "Expected an integer or the infinity value for the `max` argument but got '1'."),
(2, 1, 'max=1 must be > min=2'),
),
)
def test_threadpool_invalid_threadrange(minthreads, maxthreads, error):
"""Test that a ThreadPool rejects invalid min/max values.
The ThreadPool should raise an error with the proper message when
initialized with an invalid min+max number of threads.
"""
with pytest.raises((ValueError, TypeError), match=error):
ThreadPool(
server=None,
min=minthreads,
max=maxthreads,
)
def test_threadpool_multistart_validation(monkeypatch):
"""Test for ThreadPool multi-start behavior.
Tests that when calling start() on a ThreadPool multiple times raises a
:exc:`RuntimeError`
"""
# replace _spawn_worker with a function that returns a placeholder to avoid
# actually starting any threads
monkeypatch.setattr(
ThreadPool,
'_spawn_worker',
lambda _: types.SimpleNamespace(ready=True),
)
tp = ThreadPool(server=None)
tp.start()
with pytest.raises(RuntimeError, match='Threadpools can only be started once.'):
tp.start()

View file

@ -0,0 +1,702 @@
"""Tests for TLS support."""
import functools
import json
import os
import ssl
import subprocess
import sys
import threading
import time
import traceback
import http.client
import OpenSSL.SSL
import pytest
import requests
import trustme
from .._compat import bton, ntob, ntou
from .._compat import IS_ABOVE_OPENSSL10, IS_CI, IS_PYPY
from .._compat import IS_LINUX, IS_MACOS, IS_WINDOWS
from ..server import HTTPServer, get_ssl_adapter_class
from ..testing import (
ANY_INTERFACE_IPV4,
ANY_INTERFACE_IPV6,
EPHEMERAL_PORT,
# get_server_client,
_get_conn_data,
_probe_ipv6_sock,
)
from ..wsgi import Gateway_10
IS_GITHUB_ACTIONS_WORKFLOW = bool(os.getenv('GITHUB_WORKFLOW'))
IS_WIN2016 = (
IS_WINDOWS
# pylint: disable=unsupported-membership-test
and b'Microsoft Windows Server 2016 Datacenter' in subprocess.check_output(
('systeminfo',),
)
)
IS_LIBRESSL_BACKEND = ssl.OPENSSL_VERSION.startswith('LibreSSL')
IS_PYOPENSSL_SSL_VERSION_1_0 = (
OpenSSL.SSL.SSLeay_version(OpenSSL.SSL.SSLEAY_VERSION).
startswith(b'OpenSSL 1.0.')
)
PY310_PLUS = sys.version_info[:2] >= (3, 10)
_stdlib_to_openssl_verify = {
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
ssl.CERT_REQUIRED:
OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
}
missing_ipv6 = pytest.mark.skipif(
not _probe_ipv6_sock('::1'),
reason=''
'IPv6 is disabled '
'(for example, under Travis CI '
'which runs under GCE supporting only IPv4)',
)
class HelloWorldGateway(Gateway_10):
"""Gateway responding with Hello World to root URI."""
def respond(self):
"""Respond with dummy content via HTTP."""
req = self.req
req_uri = bton(req.uri)
if req_uri == '/':
req.status = b'200 OK'
req.ensure_headers_sent()
req.write(b'Hello world!')
return
if req_uri == '/env':
req.status = b'200 OK'
req.ensure_headers_sent()
env = self.get_environ()
# drop files so that it can be json dumped
env.pop('wsgi.errors')
env.pop('wsgi.input')
print(env)
req.write(json.dumps(env).encode('utf-8'))
return
return super(HelloWorldGateway, self).respond()
def make_tls_http_server(bind_addr, ssl_adapter, request):
"""Create and start an HTTP server bound to ``bind_addr``."""
httpserver = HTTPServer(
bind_addr=bind_addr,
gateway=HelloWorldGateway,
)
# httpserver.gateway = HelloWorldGateway
httpserver.ssl_adapter = ssl_adapter
threading.Thread(target=httpserver.safe_start).start()
while not httpserver.ready:
time.sleep(0.1)
request.addfinalizer(httpserver.stop)
return httpserver
@pytest.fixture
def tls_http_server(request):
"""Provision a server creator as a fixture."""
return functools.partial(make_tls_http_server, request=request)
@pytest.fixture
def ca():
"""Provide a certificate authority via fixture."""
return trustme.CA()
@pytest.fixture
def tls_ca_certificate_pem_path(ca):
"""Provide a certificate authority certificate file via fixture."""
with ca.cert_pem.tempfile() as ca_cert_pem:
yield ca_cert_pem
@pytest.fixture
def tls_certificate(ca):
"""Provide a leaf certificate via fixture."""
interface, _host, _port = _get_conn_data(ANY_INTERFACE_IPV4)
return ca.issue_cert(ntou(interface))
@pytest.fixture
def tls_certificate_chain_pem_path(tls_certificate):
"""Provide a certificate chain PEM file path via fixture."""
with tls_certificate.private_key_and_cert_chain_pem.tempfile() as cert_pem:
yield cert_pem
@pytest.fixture
def tls_certificate_private_key_pem_path(tls_certificate):
"""Provide a certificate private key PEM file path via fixture."""
with tls_certificate.private_key_pem.tempfile() as cert_key_pem:
yield cert_key_pem
def _thread_except_hook(exceptions, args):
"""Append uncaught exception ``args`` in threads to ``exceptions``."""
if issubclass(args.exc_type, SystemExit):
return
# cannot store the exception, it references the thread's stack
exceptions.append((
args.exc_type,
str(args.exc_value),
''.join(
traceback.format_exception(
args.exc_type, args.exc_value, args.exc_traceback,
),
),
))
@pytest.fixture
def thread_exceptions():
"""Provide a list of uncaught exceptions from threads via a fixture.
Only catches exceptions on Python 3.8+.
The list contains: ``(type, str(value), str(traceback))``
"""
exceptions = []
# Python 3.8+
orig_hook = getattr(threading, 'excepthook', None)
if orig_hook is not None:
threading.excepthook = functools.partial(
_thread_except_hook, exceptions,
)
try:
yield exceptions
finally:
if orig_hook is not None:
threading.excepthook = orig_hook
@pytest.mark.parametrize(
'adapter_type',
(
'builtin',
'pyopenssl',
),
)
def test_ssl_adapters(
http_request_timeout,
tls_http_server, adapter_type,
tls_certificate,
tls_certificate_chain_pem_path,
tls_certificate_private_key_pem_path,
tls_ca_certificate_pem_path,
):
"""Test ability to connect to server via HTTPS using adapters."""
interface, _host, port = _get_conn_data(ANY_INTERFACE_IPV4)
tls_adapter_cls = get_ssl_adapter_class(name=adapter_type)
tls_adapter = tls_adapter_cls(
tls_certificate_chain_pem_path, tls_certificate_private_key_pem_path,
)
if adapter_type == 'pyopenssl':
tls_adapter.context = tls_adapter.get_context()
tls_certificate.configure_cert(tls_adapter.context)
tlshttpserver = tls_http_server((interface, port), tls_adapter)
# testclient = get_server_client(tlshttpserver)
# testclient.get('/')
interface, _host, port = _get_conn_data(
tlshttpserver.bind_addr,
)
resp = requests.get(
'https://{host!s}:{port!s}/'.format(host=interface, port=port),
timeout=http_request_timeout,
verify=tls_ca_certificate_pem_path,
)
assert resp.status_code == 200
assert resp.text == 'Hello world!'
@pytest.mark.parametrize( # noqa: C901 # FIXME
'adapter_type',
(
'builtin',
'pyopenssl',
),
)
@pytest.mark.parametrize(
('is_trusted_cert', 'tls_client_identity'),
(
(True, 'localhost'), (True, '127.0.0.1'),
(True, '*.localhost'), (True, 'not_localhost'),
(False, 'localhost'),
),
)
@pytest.mark.parametrize(
'tls_verify_mode',
(
ssl.CERT_NONE, # server shouldn't validate client cert
ssl.CERT_OPTIONAL, # same as CERT_REQUIRED in client mode, don't use
ssl.CERT_REQUIRED, # server should validate if client cert CA is OK
),
)
@pytest.mark.xfail(
IS_PYPY and IS_CI,
reason='Fails under PyPy in CI for unknown reason',
strict=False,
)
def test_tls_client_auth( # noqa: C901, WPS213 # FIXME
# FIXME: remove twisted logic, separate tests
http_request_timeout,
mocker,
tls_http_server, adapter_type,
ca,
tls_certificate,
tls_certificate_chain_pem_path,
tls_certificate_private_key_pem_path,
tls_ca_certificate_pem_path,
is_trusted_cert, tls_client_identity,
tls_verify_mode,
):
"""Verify that client TLS certificate auth works correctly."""
test_cert_rejection = (
tls_verify_mode != ssl.CERT_NONE
and not is_trusted_cert
)
interface, _host, port = _get_conn_data(ANY_INTERFACE_IPV4)
client_cert_root_ca = ca if is_trusted_cert else trustme.CA()
with mocker.mock_module.patch(
'idna.core.ulabel',
return_value=ntob(tls_client_identity),
):
client_cert = client_cert_root_ca.issue_cert(
ntou(tls_client_identity),
)
del client_cert_root_ca
with client_cert.private_key_and_cert_chain_pem.tempfile() as cl_pem:
tls_adapter_cls = get_ssl_adapter_class(name=adapter_type)
tls_adapter = tls_adapter_cls(
tls_certificate_chain_pem_path,
tls_certificate_private_key_pem_path,
)
if adapter_type == 'pyopenssl':
tls_adapter.context = tls_adapter.get_context()
tls_adapter.context.set_verify(
_stdlib_to_openssl_verify[tls_verify_mode],
lambda conn, cert, errno, depth, preverify_ok: preverify_ok,
)
else:
tls_adapter.context.verify_mode = tls_verify_mode
ca.configure_trust(tls_adapter.context)
tls_certificate.configure_cert(tls_adapter.context)
tlshttpserver = tls_http_server((interface, port), tls_adapter)
interface, _host, port = _get_conn_data(tlshttpserver.bind_addr)
make_https_request = functools.partial(
requests.get,
'https://{host!s}:{port!s}/'.format(host=interface, port=port),
# Don't wait for the first byte forever:
timeout=http_request_timeout,
# Server TLS certificate verification:
verify=tls_ca_certificate_pem_path,
# Client TLS certificate verification:
cert=cl_pem,
)
if not test_cert_rejection:
resp = make_https_request()
is_req_successful = resp.status_code == 200
if (
not is_req_successful
and IS_PYOPENSSL_SSL_VERSION_1_0
and adapter_type == 'builtin'
and tls_verify_mode == ssl.CERT_REQUIRED
and tls_client_identity == 'localhost'
and is_trusted_cert
):
pytest.xfail(
'OpenSSL 1.0 has problems with verifying client certs',
)
assert is_req_successful
assert resp.text == 'Hello world!'
resp.close()
return
# xfail some flaky tests
# https://github.com/cherrypy/cheroot/issues/237
issue_237 = (
IS_MACOS
and adapter_type == 'builtin'
and tls_verify_mode != ssl.CERT_NONE
)
if issue_237:
pytest.xfail('Test sometimes fails')
expected_ssl_errors = requests.exceptions.SSLError,
if IS_WINDOWS or IS_GITHUB_ACTIONS_WORKFLOW:
expected_ssl_errors += requests.exceptions.ConnectionError,
with pytest.raises(expected_ssl_errors) as ssl_err:
make_https_request().close()
try:
err_text = ssl_err.value.args[0].reason.args[0].args[0]
except AttributeError:
if IS_WINDOWS or IS_GITHUB_ACTIONS_WORKFLOW:
err_text = str(ssl_err.value)
else:
raise
if isinstance(err_text, int):
err_text = str(ssl_err.value)
expected_substrings = (
'sslv3 alert bad certificate' if IS_LIBRESSL_BACKEND
else 'tlsv1 alert unknown ca',
)
if IS_MACOS and IS_PYPY and adapter_type == 'pyopenssl':
expected_substrings = ('tlsv1 alert unknown ca',)
if (
tls_verify_mode in (
ssl.CERT_REQUIRED,
ssl.CERT_OPTIONAL,
)
and not is_trusted_cert
and tls_client_identity == 'localhost'
):
expected_substrings += (
'bad handshake: '
"SysCallError(10054, 'WSAECONNRESET')",
"('Connection aborted.', "
'OSError("(10054, \'WSAECONNRESET\')"))',
"('Connection aborted.', "
'OSError("(10054, \'WSAECONNRESET\')",))',
"('Connection aborted.', "
'error("(10054, \'WSAECONNRESET\')",))',
"('Connection aborted.', "
'ConnectionResetError(10054, '
"'An existing connection was forcibly closed "
"by the remote host', None, 10054, None))",
"('Connection aborted.', "
'error(10054, '
"'An existing connection was forcibly closed "
"by the remote host'))",
) if IS_WINDOWS else (
"('Connection aborted.', "
'OSError("(104, \'ECONNRESET\')"))',
"('Connection aborted.', "
'OSError("(104, \'ECONNRESET\')",))',
"('Connection aborted.', "
'error("(104, \'ECONNRESET\')",))',
"('Connection aborted.', "
"ConnectionResetError(104, 'Connection reset by peer'))",
"('Connection aborted.', "
"error(104, 'Connection reset by peer'))",
) if (
IS_GITHUB_ACTIONS_WORKFLOW
and IS_LINUX
) else (
"('Connection aborted.', "
"BrokenPipeError(32, 'Broken pipe'))",
)
if PY310_PLUS:
# FIXME: Figure out what's happening and correct the problem
expected_substrings += (
'SSLError(SSLEOFError(8, '
"'EOF occurred in violation of protocol (_ssl.c:",
)
if IS_GITHUB_ACTIONS_WORKFLOW and IS_WINDOWS and PY310_PLUS:
expected_substrings += (
"('Connection aborted.', "
'RemoteDisconnected('
"'Remote end closed connection without response'))",
)
assert any(e in err_text for e in expected_substrings)
@pytest.mark.parametrize( # noqa: C901 # FIXME
'adapter_type',
(
pytest.param(
'builtin',
marks=pytest.mark.xfail(
IS_MACOS and PY310_PLUS,
reason='Unclosed TLS resource warnings happen on macOS '
'under Python 3.10 (#508)',
strict=False,
),
),
'pyopenssl',
),
)
@pytest.mark.parametrize(
('tls_verify_mode', 'use_client_cert'),
(
(ssl.CERT_NONE, False),
(ssl.CERT_NONE, True),
(ssl.CERT_OPTIONAL, False),
(ssl.CERT_OPTIONAL, True),
(ssl.CERT_REQUIRED, True),
),
)
def test_ssl_env( # noqa: C901 # FIXME
thread_exceptions,
recwarn,
mocker,
http_request_timeout,
tls_http_server, adapter_type,
ca, tls_verify_mode, tls_certificate,
tls_certificate_chain_pem_path,
tls_certificate_private_key_pem_path,
tls_ca_certificate_pem_path,
use_client_cert,
):
"""Test the SSL environment generated by the SSL adapters."""
interface, _host, port = _get_conn_data(ANY_INTERFACE_IPV4)
with mocker.mock_module.patch(
'idna.core.ulabel',
return_value=ntob('127.0.0.1'),
):
client_cert = ca.issue_cert(ntou('127.0.0.1'))
with client_cert.private_key_and_cert_chain_pem.tempfile() as cl_pem:
tls_adapter_cls = get_ssl_adapter_class(name=adapter_type)
tls_adapter = tls_adapter_cls(
tls_certificate_chain_pem_path,
tls_certificate_private_key_pem_path,
)
if adapter_type == 'pyopenssl':
tls_adapter.context = tls_adapter.get_context()
tls_adapter.context.set_verify(
_stdlib_to_openssl_verify[tls_verify_mode],
lambda conn, cert, errno, depth, preverify_ok: preverify_ok,
)
else:
tls_adapter.context.verify_mode = tls_verify_mode
ca.configure_trust(tls_adapter.context)
tls_certificate.configure_cert(tls_adapter.context)
tlswsgiserver = tls_http_server((interface, port), tls_adapter)
interface, _host, port = _get_conn_data(tlswsgiserver.bind_addr)
resp = requests.get(
'https://' + interface + ':' + str(port) + '/env',
timeout=http_request_timeout,
verify=tls_ca_certificate_pem_path,
cert=cl_pem if use_client_cert else None,
)
env = json.loads(resp.content.decode('utf-8'))
# hard coded env
assert env['wsgi.url_scheme'] == 'https'
assert env['HTTPS'] == 'on'
# ensure these are present
for key in {'SSL_VERSION_INTERFACE', 'SSL_VERSION_LIBRARY'}:
assert key in env
# pyOpenSSL generates the env before the handshake completes
if adapter_type == 'pyopenssl':
return
for key in {'SSL_PROTOCOL', 'SSL_CIPHER'}:
assert key in env
# client certificate env
if tls_verify_mode == ssl.CERT_NONE or not use_client_cert:
assert env['SSL_CLIENT_VERIFY'] == 'NONE'
else:
assert env['SSL_CLIENT_VERIFY'] == 'SUCCESS'
with open(cl_pem, 'rt') as f:
assert env['SSL_CLIENT_CERT'] in f.read()
for key in {
'SSL_CLIENT_M_VERSION', 'SSL_CLIENT_M_SERIAL',
'SSL_CLIENT_I_DN', 'SSL_CLIENT_S_DN',
}:
assert key in env
# builtin ssl environment generation may use a loopback socket
# ensure no ResourceWarning was raised during the test
if IS_PYPY:
# NOTE: PyPy doesn't have ResourceWarning
# Ref: https://doc.pypy.org/en/latest/cpython_differences.html
return
for warn in recwarn:
if not issubclass(warn.category, ResourceWarning):
continue
# the tests can sporadically generate resource warnings
# due to timing issues
# all of these sporadic warnings appear to be about socket.socket
# and have been observed to come from requests connection pool
msg = str(warn.message)
if 'socket.socket' in msg:
pytest.xfail(
'\n'.join((
'Sometimes this test fails due to '
'a socket.socket ResourceWarning:',
msg,
)),
)
pytest.fail(msg)
# to perform the ssl handshake over that loopback socket,
# the builtin ssl environment generation uses a thread
for _, _, trace in thread_exceptions:
print(trace, file=sys.stderr)
assert not thread_exceptions, ': '.join((
thread_exceptions[0][0].__name__,
thread_exceptions[0][1],
))
@pytest.mark.parametrize(
'ip_addr',
(
ANY_INTERFACE_IPV4,
ANY_INTERFACE_IPV6,
),
)
def test_https_over_http_error(http_server, ip_addr):
"""Ensure that connecting over HTTPS to HTTP port is handled."""
httpserver = http_server.send((ip_addr, EPHEMERAL_PORT))
interface, _host, port = _get_conn_data(httpserver.bind_addr)
with pytest.raises(ssl.SSLError) as ssl_err:
http.client.HTTPSConnection(
'{interface}:{port}'.format(
interface=interface,
port=port,
),
).request('GET', '/')
expected_substring = (
'wrong version number' if IS_ABOVE_OPENSSL10
else 'unknown protocol'
)
assert expected_substring in ssl_err.value.args[-1]
@pytest.mark.parametrize(
'adapter_type',
(
'builtin',
'pyopenssl',
),
)
@pytest.mark.parametrize(
'ip_addr',
(
ANY_INTERFACE_IPV4,
pytest.param(ANY_INTERFACE_IPV6, marks=missing_ipv6),
),
)
@pytest.mark.flaky(reruns=3, reruns_delay=2)
def test_http_over_https_error(
http_request_timeout,
tls_http_server, adapter_type,
ca, ip_addr,
tls_certificate,
tls_certificate_chain_pem_path,
tls_certificate_private_key_pem_path,
):
"""Ensure that connecting over HTTP to HTTPS port is handled."""
# disable some flaky tests
# https://github.com/cherrypy/cheroot/issues/225
issue_225 = (
IS_MACOS
and adapter_type == 'builtin'
)
if issue_225:
pytest.xfail('Test fails in Travis-CI')
tls_adapter_cls = get_ssl_adapter_class(name=adapter_type)
tls_adapter = tls_adapter_cls(
tls_certificate_chain_pem_path, tls_certificate_private_key_pem_path,
)
if adapter_type == 'pyopenssl':
tls_adapter.context = tls_adapter.get_context()
tls_certificate.configure_cert(tls_adapter.context)
interface, _host, port = _get_conn_data(ip_addr)
tlshttpserver = tls_http_server((interface, port), tls_adapter)
interface, _host, port = _get_conn_data(
tlshttpserver.bind_addr,
)
fqdn = interface
if ip_addr is ANY_INTERFACE_IPV6:
fqdn = '[{fqdn}]'.format(**locals())
expect_fallback_response_over_plain_http = (
(
adapter_type == 'pyopenssl'
)
)
if expect_fallback_response_over_plain_http:
resp = requests.get(
'http://{host!s}:{port!s}/'.format(host=fqdn, port=port),
timeout=http_request_timeout,
)
assert resp.status_code == 400
assert resp.text == (
'The client sent a plain HTTP request, '
'but this server only speaks HTTPS on this port.'
)
return
with pytest.raises(requests.exceptions.ConnectionError) as ssl_err:
requests.get( # FIXME: make stdlib ssl behave like PyOpenSSL
'http://{host!s}:{port!s}/'.format(host=fqdn, port=port),
timeout=http_request_timeout,
)
if IS_LINUX:
expected_error_code, expected_error_text = (
104, 'Connection reset by peer',
)
if IS_MACOS:
expected_error_code, expected_error_text = (
54, 'Connection reset by peer',
)
if IS_WINDOWS:
expected_error_code, expected_error_text = (
10054,
'An existing connection was forcibly closed by the remote host',
)
underlying_error = ssl_err.value.args[0].args[-1]
err_text = str(underlying_error)
assert underlying_error.errno == expected_error_code, (
'The underlying error is {underlying_error!r}'.
format(**locals())
)
assert expected_error_text in err_text

View file

@ -0,0 +1,85 @@
"""Test wsgi."""
from concurrent.futures.thread import ThreadPoolExecutor
from traceback import print_tb
import pytest
import portend
import requests
from requests_toolbelt.sessions import BaseUrlSession as Session
from jaraco.context import ExceptionTrap
from cheroot import wsgi
from cheroot._compat import IS_MACOS, IS_WINDOWS
IS_SLOW_ENV = IS_MACOS or IS_WINDOWS
@pytest.fixture
def simple_wsgi_server():
"""Fucking simple wsgi server fixture (duh)."""
port = portend.find_available_local_port()
def app(_environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return [b'Hello world!']
host = '::'
addr = host, port
server = wsgi.Server(addr, app, timeout=600 if IS_SLOW_ENV else 20)
# pylint: disable=possibly-unused-variable
url = 'http://localhost:{port}/'.format(**locals())
# pylint: disable=possibly-unused-variable
with server._run_in_thread() as thread:
yield locals()
@pytest.mark.flaky(reruns=3, reruns_delay=2)
def test_connection_keepalive(simple_wsgi_server):
"""Test the connection keepalive works (duh)."""
session = Session(base_url=simple_wsgi_server['url'])
pooled = requests.adapters.HTTPAdapter(
pool_connections=1, pool_maxsize=1000,
)
session.mount('http://', pooled)
def do_request():
with ExceptionTrap(requests.exceptions.ConnectionError) as trap:
resp = session.get('info')
resp.raise_for_status()
print_tb(trap.tb)
return bool(trap)
with ThreadPoolExecutor(max_workers=10 if IS_SLOW_ENV else 50) as pool:
tasks = [
pool.submit(do_request)
for n in range(250 if IS_SLOW_ENV else 1000)
]
failures = sum(task.result() for task in tasks)
session.close()
assert not failures
def test_gateway_start_response_called_twice(monkeypatch):
"""Verify that repeat calls of ``Gateway.start_response()`` fail."""
monkeypatch.setattr(wsgi.Gateway, 'get_environ', lambda self: {})
wsgi_gateway = wsgi.Gateway(None)
wsgi_gateway.started_response = True
err_msg = '^WSGI start_response called a second time with no exc_info.$'
with pytest.raises(RuntimeError, match=err_msg):
wsgi_gateway.start_response('200', (), None)
def test_gateway_write_needs_start_response_called_before(monkeypatch):
"""Check that calling ``Gateway.write()`` needs started response."""
monkeypatch.setattr(wsgi.Gateway, 'get_environ', lambda self: {})
wsgi_gateway = wsgi.Gateway(None)
err_msg = '^WSGI write called before start_response.$'
with pytest.raises(RuntimeError, match=err_msg):
wsgi_gateway.write(None) # The actual arg value is unimportant

View file

@ -0,0 +1,588 @@
"""Extensions to unittest for web frameworks.
Use the :py:meth:`WebCase.getPage` method to request a page
from your HTTP server.
Framework Integration
=====================
If you have control over your server process, you can handle errors
in the server-side of the HTTP conversation a bit better. You must run
both the client (your :py:class:`WebCase` tests) and the server in the
same process (but in separate threads, obviously).
When an error occurs in the framework, call server_error. It will print
the traceback to stdout, and keep any assertions you have from running
(the assumption is that, if the server errors, the page output will not
be of further significance to your tests).
"""
import pprint
import re
import socket
import sys
import time
import traceback
import os
import json
import unittest # pylint: disable=deprecated-module,preferred-module
import warnings
import functools
import http.client
import urllib.parse
from more_itertools.more import always_iterable
import jaraco.functools
def interface(host):
"""Return an IP address for a client connection given the server host.
If the server is listening on '0.0.0.0' (INADDR_ANY)
or '::' (IN6ADDR_ANY), this will return the proper localhost.
"""
if host == '0.0.0.0':
# INADDR_ANY, which should respond on localhost.
return '127.0.0.1'
if host == '::':
# IN6ADDR_ANY, which should respond on localhost.
return '::1'
return host
try:
# Jython support
if sys.platform[:4] == 'java':
def getchar():
"""Get a key press."""
# Hopefully this is enough
return sys.stdin.read(1)
else:
# On Windows, msvcrt.getch reads a single char without output.
import msvcrt
def getchar():
"""Get a key press."""
return msvcrt.getch()
except ImportError:
# Unix getchr
import tty
import termios
def getchar():
"""Get a key press."""
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
# from jaraco.properties
class NonDataProperty:
"""Non-data property decorator."""
def __init__(self, fget):
"""Initialize a non-data property."""
assert fget is not None, 'fget cannot be none'
assert callable(fget), 'fget must be callable'
self.fget = fget
def __get__(self, obj, objtype=None):
"""Return a class property."""
if obj is None:
return self
return self.fget(obj)
class WebCase(unittest.TestCase):
"""Helper web test suite base."""
HOST = '127.0.0.1'
PORT = 8000
HTTP_CONN = http.client.HTTPConnection
PROTOCOL = 'HTTP/1.1'
scheme = 'http'
url = None
ssl_context = None
status = None
headers = None
body = None
encoding = 'utf-8'
time = None
@property
def _Conn(self):
"""Return HTTPConnection or HTTPSConnection based on self.scheme.
* from :py:mod:`python:http.client`.
"""
cls_name = '{scheme}Connection'.format(scheme=self.scheme.upper())
return getattr(http.client, cls_name)
def get_conn(self, auto_open=False):
"""Return a connection to our HTTP server."""
conn = self._Conn(self.interface(), self.PORT)
# Automatically re-connect?
conn.auto_open = auto_open
conn.connect()
return conn
def set_persistent(self, on=True, auto_open=False):
"""Make our HTTP_CONN persistent (or not).
If the 'on' argument is True (the default), then self.HTTP_CONN
will be set to an instance of HTTP(S)?Connection
to persist across requests.
As this class only allows for a single open connection, if
self already has an open connection, it will be closed.
"""
try:
self.HTTP_CONN.close()
except (TypeError, AttributeError):
pass
self.HTTP_CONN = (
self.get_conn(auto_open=auto_open)
if on
else self._Conn
)
@property
def persistent(self):
"""Presence of the persistent HTTP connection."""
return hasattr(self.HTTP_CONN, '__class__')
@persistent.setter
def persistent(self, on):
self.set_persistent(on)
def interface(self):
"""Return an IP address for a client connection.
If the server is listening on '0.0.0.0' (INADDR_ANY)
or '::' (IN6ADDR_ANY), this will return the proper localhost.
"""
return interface(self.HOST)
def getPage(
self, url, headers=None, method='GET', body=None,
protocol=None, raise_subcls=(),
):
"""Open the url with debugging support.
Return status, headers, body.
url should be the identifier passed to the server, typically a
server-absolute path and query string (sent between method and
protocol), and should only be an absolute URI if proxy support is
enabled in the server.
If the application under test generates absolute URIs, be sure
to wrap them first with :py:func:`strip_netloc`::
>>> class MyAppWebCase(WebCase):
... def getPage(url, *args, **kwargs):
... super(MyAppWebCase, self).getPage(
... cheroot.test.webtest.strip_netloc(url),
... *args, **kwargs
... )
``raise_subcls`` is passed through to :py:func:`openURL`.
"""
ServerError.on = False
if isinstance(url, str):
url = url.encode('utf-8')
if isinstance(body, str):
body = body.encode('utf-8')
# for compatibility, support raise_subcls is None
raise_subcls = raise_subcls or ()
self.url = url
self.time = None
start = time.time()
result = openURL(
url, headers, method, body, self.HOST, self.PORT,
self.HTTP_CONN, protocol or self.PROTOCOL,
raise_subcls=raise_subcls,
ssl_context=self.ssl_context,
)
self.time = time.time() - start
self.status, self.headers, self.body = result
# Build a list of request cookies from the previous response cookies.
self.cookies = [
('Cookie', v) for k, v in self.headers
if k.lower() == 'set-cookie'
]
if ServerError.on:
raise ServerError()
return result
@NonDataProperty
def interactive(self):
"""Determine whether tests are run in interactive mode.
Load interactivity setting from environment, where
the value can be numeric or a string like true or
False or 1 or 0.
"""
env_str = os.environ.get('WEBTEST_INTERACTIVE', 'True')
is_interactive = bool(json.loads(env_str.lower()))
if is_interactive:
warnings.warn(
'Interactive test failure interceptor support via '
'WEBTEST_INTERACTIVE environment variable is deprecated.',
DeprecationWarning,
)
return is_interactive
console_height = 30
def _handlewebError(self, msg): # noqa: C901 # FIXME
print('')
print(' ERROR: %s' % msg)
if not self.interactive:
raise self.failureException(msg)
p = (
' Show: '
'[B]ody [H]eaders [S]tatus [U]RL; '
'[I]gnore, [R]aise, or sys.e[X]it >> '
)
sys.stdout.write(p)
sys.stdout.flush()
while True:
i = getchar().upper()
if not isinstance(i, type('')):
i = i.decode('ascii')
if i not in 'BHSUIRX':
continue
print(i.upper()) # Also prints new line
if i == 'B':
for x, line in enumerate(self.body.splitlines()):
if (x + 1) % self.console_height == 0:
# The \r and comma should make the next line overwrite
sys.stdout.write('<-- More -->\r')
m = getchar().lower()
# Erase our "More" prompt
sys.stdout.write(' \r')
if m == 'q':
break
print(line)
elif i == 'H':
pprint.pprint(self.headers)
elif i == 'S':
print(self.status)
elif i == 'U':
print(self.url)
elif i == 'I':
# return without raising the normal exception
return
elif i == 'R':
raise self.failureException(msg)
elif i == 'X':
sys.exit()
sys.stdout.write(p)
sys.stdout.flush()
@property
def status_code(self): # noqa: D401; irrelevant for properties
"""Integer HTTP status code."""
return int(self.status[:3])
def status_matches(self, expected):
"""Check whether actual status matches expected."""
actual = (
self.status_code
if isinstance(expected, int) else
self.status
)
return expected == actual
def assertStatus(self, status, msg=None):
"""Fail if self.status != status.
status may be integer code, exact string status, or
iterable of allowed possibilities.
"""
if any(map(self.status_matches, always_iterable(status))):
return
tmpl = 'Status {self.status} does not match {status}'
msg = msg or tmpl.format(**locals())
self._handlewebError(msg)
def assertHeader(self, key, value=None, msg=None):
"""Fail if (key, [value]) not in self.headers."""
lowkey = key.lower()
for k, v in self.headers:
if k.lower() == lowkey:
if value is None or str(value) == v:
return v
if msg is None:
if value is None:
msg = '%r not in headers' % key
else:
msg = '%r:%r not in headers' % (key, value)
self._handlewebError(msg)
def assertHeaderIn(self, key, values, msg=None):
"""Fail if header indicated by key doesn't have one of the values."""
lowkey = key.lower()
for k, v in self.headers:
if k.lower() == lowkey:
matches = [value for value in values if str(value) == v]
if matches:
return matches
if msg is None:
msg = '%(key)r not in %(values)r' % vars()
self._handlewebError(msg)
def assertHeaderItemValue(self, key, value, msg=None):
"""Fail if the header does not contain the specified value."""
actual_value = self.assertHeader(key, msg=msg)
header_values = map(str.strip, actual_value.split(','))
if value in header_values:
return value
if msg is None:
msg = '%r not in %r' % (value, header_values)
self._handlewebError(msg)
def assertNoHeader(self, key, msg=None):
"""Fail if key in self.headers."""
lowkey = key.lower()
matches = [k for k, v in self.headers if k.lower() == lowkey]
if matches:
if msg is None:
msg = '%r in headers' % key
self._handlewebError(msg)
def assertNoHeaderItemValue(self, key, value, msg=None):
"""Fail if the header contains the specified value."""
lowkey = key.lower()
hdrs = self.headers
matches = [k for k, v in hdrs if k.lower() == lowkey and v == value]
if matches:
if msg is None:
msg = '%r:%r in %r' % (key, value, hdrs)
self._handlewebError(msg)
def assertBody(self, value, msg=None):
"""Fail if value != self.body."""
if isinstance(value, str):
value = value.encode(self.encoding)
if value != self.body:
if msg is None:
msg = 'expected body:\n%r\n\nactual body:\n%r' % (
value, self.body,
)
self._handlewebError(msg)
def assertInBody(self, value, msg=None):
"""Fail if value not in self.body."""
if isinstance(value, str):
value = value.encode(self.encoding)
if value not in self.body:
if msg is None:
msg = '%r not in body: %s' % (value, self.body)
self._handlewebError(msg)
def assertNotInBody(self, value, msg=None):
"""Fail if value in self.body."""
if isinstance(value, str):
value = value.encode(self.encoding)
if value in self.body:
if msg is None:
msg = '%r found in body' % value
self._handlewebError(msg)
def assertMatchesBody(self, pattern, msg=None, flags=0):
"""Fail if value (a regex pattern) is not in self.body."""
if isinstance(pattern, str):
pattern = pattern.encode(self.encoding)
if re.search(pattern, self.body, flags) is None:
if msg is None:
msg = 'No match for %r in body' % pattern
self._handlewebError(msg)
methods_with_bodies = ('POST', 'PUT', 'PATCH')
def cleanHeaders(headers, method, body, host, port):
"""Return request headers, with required headers added (if missing)."""
if headers is None:
headers = []
# Add the required Host request header if not present.
# [This specifies the host:port of the server, not the client.]
found = False
for k, _v in headers:
if k.lower() == 'host':
found = True
break
if not found:
if port == 80:
headers.append(('Host', host))
else:
headers.append(('Host', '%s:%s' % (host, port)))
if method in methods_with_bodies:
# Stick in default type and length headers if not present
found = False
for k, v in headers:
if k.lower() == 'content-type':
found = True
break
if not found:
headers.append(
('Content-Type', 'application/x-www-form-urlencoded'),
)
headers.append(('Content-Length', str(len(body or ''))))
return headers
def shb(response):
"""Return status, headers, body the way we like from a response."""
resp_status_line = '%s %s' % (response.status, response.reason)
return resp_status_line, response.getheaders(), response.read()
def openURL(*args, raise_subcls=(), **kwargs):
"""
Open a URL, retrying when it fails.
Specify ``raise_subcls`` (class or tuple of classes) to exclude
those socket.error subclasses from being suppressed and retried.
"""
opener = functools.partial(_open_url_once, *args, **kwargs)
def on_exception():
exc = sys.exc_info()[1]
if isinstance(exc, raise_subcls):
raise exc
time.sleep(0.5)
# Try up to 10 times
return jaraco.functools.retry_call(
opener,
retries=9,
cleanup=on_exception,
trap=socket.error,
)
def _open_url_once(
url, headers=None, method='GET', body=None,
host='127.0.0.1', port=8000, http_conn=http.client.HTTPConnection,
protocol='HTTP/1.1', ssl_context=None,
):
"""Open the given HTTP resource and return status, headers, and body."""
headers = cleanHeaders(headers, method, body, host, port)
# Allow http_conn to be a class or an instance
if hasattr(http_conn, 'host'):
conn = http_conn
else:
kw = {}
if ssl_context:
kw['context'] = ssl_context
conn = http_conn(interface(host), port, **kw)
conn._http_vsn_str = protocol
conn._http_vsn = int(''.join([x for x in protocol if x.isdigit()]))
if isinstance(url, bytes):
url = url.decode()
conn.putrequest(
method.upper(), url, skip_host=True,
skip_accept_encoding=True,
)
for key, value in headers:
conn.putheader(key, value.encode('Latin-1'))
conn.endheaders()
if body is not None:
conn.send(body)
# Handle response
response = conn.getresponse()
s, h, b = shb(response)
if not hasattr(http_conn, 'host'):
# We made our own conn instance. Close it.
conn.close()
return s, h, b
def strip_netloc(url):
"""Return absolute-URI path from URL.
Strip the scheme and host from the URL, returning the
server-absolute portion.
Useful for wrapping an absolute-URI for which only the
path is expected (such as in calls to :py:meth:`WebCase.getPage`).
.. testsetup::
from cheroot.test.webtest import strip_netloc
>>> strip_netloc('https://google.com/foo/bar?bing#baz')
'/foo/bar?bing'
>>> strip_netloc('//google.com/foo/bar?bing#baz')
'/foo/bar?bing'
>>> strip_netloc('/foo/bar?bing#baz')
'/foo/bar?bing'
"""
parsed = urllib.parse.urlparse(url)
_scheme, _netloc, path, params, query, _fragment = parsed
stripped = '', '', path, params, query, ''
return urllib.parse.urlunparse(stripped)
# Add any exceptions which your web framework handles
# normally (that you don't want server_error to trap).
ignored_exceptions = []
# You'll want set this to True when you can't guarantee
# that each response will immediately follow each request;
# for example, when handling requests via multiple threads.
ignore_all = False
class ServerError(Exception):
"""Exception for signalling server error."""
on = False
def server_error(exc=None):
"""Server debug hook.
Return True if exception handled, False if ignored.
You probably want to wrap this, so you can still handle an error using
your framework when it's ignored.
"""
if exc is None:
exc = sys.exc_info()
if ignore_all or exc[0] in ignored_exceptions:
return False
else:
ServerError.on = True
print('')
print(''.join(traceback.format_exception(*exc)))
return True

174
libs/cheroot/testing.py Normal file
View file

@ -0,0 +1,174 @@
"""Pytest fixtures and other helpers for doing testing by end-users."""
from contextlib import closing, contextmanager
import errno
import socket
import threading
import time
import http.client
import pytest
import cheroot.server
from cheroot.test import webtest
import cheroot.wsgi
EPHEMERAL_PORT = 0
NO_INTERFACE = None # Using this or '' will cause an exception
ANY_INTERFACE_IPV4 = '0.0.0.0'
ANY_INTERFACE_IPV6 = '::'
config = {
cheroot.wsgi.Server: {
'bind_addr': (NO_INTERFACE, EPHEMERAL_PORT),
'wsgi_app': None,
},
cheroot.server.HTTPServer: {
'bind_addr': (NO_INTERFACE, EPHEMERAL_PORT),
'gateway': cheroot.server.Gateway,
},
}
@contextmanager
def cheroot_server(server_factory): # noqa: WPS210
"""Set up and tear down a Cheroot server instance."""
conf = config[server_factory].copy()
bind_port = conf.pop('bind_addr')[-1]
for interface in ANY_INTERFACE_IPV6, ANY_INTERFACE_IPV4:
try:
actual_bind_addr = (interface, bind_port)
httpserver = server_factory( # create it
bind_addr=actual_bind_addr,
**conf,
)
except OSError:
pass
else:
break
httpserver.shutdown_timeout = 0 # Speed-up tests teardown
# FIXME: Expose this thread through a fixture so that it
# FIXME: could be awaited in tests.
server_thread = threading.Thread(target=httpserver.safe_start)
server_thread.start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
try:
yield server_thread, httpserver
finally:
httpserver.stop() # destroy it
server_thread.join() # wait for the thread to be turn down
@pytest.fixture
def thread_and_wsgi_server():
"""Set up and tear down a Cheroot WSGI server instance.
This emits a tuple of a thread and a server instance.
"""
with cheroot_server(cheroot.wsgi.Server) as (server_thread, srv):
yield server_thread, srv
@pytest.fixture
def thread_and_native_server():
"""Set up and tear down a Cheroot HTTP server instance.
This emits a tuple of a thread and a server instance.
"""
with cheroot_server(cheroot.server.HTTPServer) as (server_thread, srv):
yield server_thread, srv
@pytest.fixture
def wsgi_server(thread_and_wsgi_server): # noqa: WPS442
"""Set up and tear down a Cheroot WSGI server instance."""
_server_thread, srv = thread_and_wsgi_server
return srv
@pytest.fixture
def native_server(thread_and_native_server): # noqa: WPS442
"""Set up and tear down a Cheroot HTTP server instance."""
_server_thread, srv = thread_and_native_server
return srv
class _TestClient:
def __init__(self, server):
self._interface, self._host, self._port = _get_conn_data(
server.bind_addr,
)
self.server_instance = server
self._http_connection = self.get_connection()
def get_connection(self):
name = '{interface}:{port}'.format(
interface=self._interface,
port=self._port,
)
conn_cls = (
http.client.HTTPConnection
if self.server_instance.ssl_adapter is None else
http.client.HTTPSConnection
)
return conn_cls(name)
def request(
self, uri, method='GET', headers=None, http_conn=None,
protocol='HTTP/1.1',
):
return webtest.openURL(
uri, method=method,
headers=headers,
host=self._host, port=self._port,
http_conn=http_conn or self._http_connection,
protocol=protocol,
)
def __getattr__(self, attr_name):
def _wrapper(uri, **kwargs):
http_method = attr_name.upper()
return self.request(uri, method=http_method, **kwargs)
return _wrapper
def _probe_ipv6_sock(interface):
# Alternate way is to check IPs on interfaces using glibc, like:
# github.com/Gautier/minifail/blob/master/minifail/getifaddrs.py
try:
with closing(socket.socket(family=socket.AF_INET6)) as sock:
sock.bind((interface, 0))
except OSError as sock_err:
if sock_err.errno != errno.EADDRNOTAVAIL:
raise
else:
return True
return False
def _get_conn_data(bind_addr):
if isinstance(bind_addr, tuple):
host, port = bind_addr
else:
host, port = bind_addr, 0
interface = webtest.interface(host)
if ':' in interface and not _probe_ipv6_sock(interface):
interface = '127.0.0.1'
if ':' in host:
host = interface
return interface, host, port
def get_server_client(server):
"""Create and return a test client for the given server."""
return _TestClient(server)

17
libs/cheroot/testing.pyi Normal file
View file

@ -0,0 +1,17 @@
from typing import Any, Iterator, Optional, TypeVar
from .server import HTTPServer
from .wsgi import Server
T = TypeVar('T', bound=HTTPServer)
EPHEMERAL_PORT: int
NO_INTERFACE: Optional[str]
ANY_INTERFACE_IPV4: str
ANY_INTERFACE_IPV6: str
config: dict
def cheroot_server(server_factory: T) -> Iterator[T]: ...
def wsgi_server() -> Iterator[Server]: ...
def native_server() -> Iterator[HTTPServer]: ...
def get_server_client(server) -> Any: ...

View file

@ -0,0 +1 @@
"""HTTP workers pool."""

View file

View file

@ -0,0 +1,426 @@
"""A thread-based worker pool.
.. spelling::
joinable
"""
import collections
import logging
import threading
import time
import socket
import warnings
import queue
from jaraco.functools import pass_none
__all__ = ('WorkerThread', 'ThreadPool')
class TrueyZero:
"""Object which equals and does math like the integer 0 but evals True."""
def __add__(self, other):
return other
def __radd__(self, other):
return other
trueyzero = TrueyZero()
_SHUTDOWNREQUEST = object()
class WorkerThread(threading.Thread):
"""Thread which continuously polls a Queue for Connection objects.
Due to the timing issues of polling a Queue, a WorkerThread does not
check its own 'ready' flag after it has started. To stop the thread,
it is necessary to stick a _SHUTDOWNREQUEST object onto the Queue
(one for each running WorkerThread).
"""
conn = None
"""The current connection pulled off the Queue, or None."""
server = None
"""The HTTP Server which spawned this thread, and which owns the
Queue and is placing active connections into it."""
ready = False
"""A simple flag for the calling server to know when this thread
has begun polling the Queue."""
def __init__(self, server):
"""Initialize WorkerThread instance.
Args:
server (cheroot.server.HTTPServer): web server object
receiving this request
"""
self.ready = False
self.server = server
self.requests_seen = 0
self.bytes_read = 0
self.bytes_written = 0
self.start_time = None
self.work_time = 0
self.stats = {
'Requests': lambda s: self.requests_seen + (
self.start_time is None
and trueyzero
or self.conn.requests_seen
),
'Bytes Read': lambda s: self.bytes_read + (
self.start_time is None
and trueyzero
or self.conn.rfile.bytes_read
),
'Bytes Written': lambda s: self.bytes_written + (
self.start_time is None
and trueyzero
or self.conn.wfile.bytes_written
),
'Work Time': lambda s: self.work_time + (
self.start_time is None
and trueyzero
or time.time() - self.start_time
),
'Read Throughput': lambda s: s['Bytes Read'](s) / (
s['Work Time'](s) or 1e-6
),
'Write Throughput': lambda s: s['Bytes Written'](s) / (
s['Work Time'](s) or 1e-6
),
}
threading.Thread.__init__(self)
def run(self):
"""Set up incoming HTTP connection processing loop.
This is the thread's entry-point. It performs lop-layer
exception handling and interrupt processing.
:exc:`KeyboardInterrupt` and :exc:`SystemExit` bubbling up
from the inner-layer code constitute a global server interrupt
request. When they happen, the worker thread exits.
:raises BaseException: when an unexpected non-interrupt
exception leaks from the inner layers
# noqa: DAR401 KeyboardInterrupt SystemExit
"""
self.server.stats['Worker Threads'][self.name] = self.stats
self.ready = True
try:
self._process_connections_until_interrupted()
except (KeyboardInterrupt, SystemExit) as interrupt_exc:
interrupt_cause = interrupt_exc.__cause__ or interrupt_exc
self.server.error_log(
f'Setting the server interrupt flag to {interrupt_cause !r}',
level=logging.DEBUG,
)
self.server.interrupt = interrupt_cause
except BaseException as underlying_exc: # noqa: WPS424
# NOTE: This is the last resort logging with the last dying breath
# NOTE: of the worker. It is only reachable when exceptions happen
# NOTE: in the `finally` branch of the internal try/except block.
self.server.error_log(
'A fatal exception happened. Setting the server interrupt flag'
f' to {underlying_exc !r} and giving up.'
'\N{NEW LINE}\N{NEW LINE}'
'Please, report this on the Cheroot tracker at '
'<https://github.com/cherrypy/cheroot/issues/new/choose>, '
'providing a full reproducer with as much context and details as possible.',
level=logging.CRITICAL,
traceback=True,
)
self.server.interrupt = underlying_exc
raise
finally:
self.ready = False
def _process_connections_until_interrupted(self):
"""Process incoming HTTP connections in an infinite loop.
Retrieves incoming connections from thread pool, processing
them one by one.
:raises SystemExit: on the internal requests to stop the
server instance
"""
while True:
conn = self.server.requests.get()
if conn is _SHUTDOWNREQUEST:
return
self.conn = conn
is_stats_enabled = self.server.stats['Enabled']
if is_stats_enabled:
self.start_time = time.time()
keep_conn_open = False
try:
keep_conn_open = conn.communicate()
except ConnectionError as connection_error:
keep_conn_open = False # Drop the connection cleanly
self.server.error_log(
'Got a connection error while handling a '
f'connection from {conn.remote_addr !s}:'
f'{conn.remote_port !s} ({connection_error !s})',
level=logging.INFO,
)
continue
except (KeyboardInterrupt, SystemExit) as shutdown_request:
# Shutdown request
keep_conn_open = False # Drop the connection cleanly
self.server.error_log(
'Got a server shutdown request while handling a '
f'connection from {conn.remote_addr !s}:'
f'{conn.remote_port !s} ({shutdown_request !s})',
level=logging.DEBUG,
)
raise SystemExit(
str(shutdown_request),
) from shutdown_request
except BaseException as unhandled_error: # noqa: WPS424
# NOTE: Only a shutdown request should bubble up to the
# NOTE: external cleanup code. Otherwise, this thread dies.
# NOTE: If this were to happen, the threadpool would still
# NOTE: list a dead thread without knowing its state. And
# NOTE: the calling code would fail to schedule processing
# NOTE: of new requests.
self.server.error_log(
'Unhandled error while processing an incoming '
f'connection {unhandled_error !r}',
level=logging.ERROR,
traceback=True,
)
continue # Prevent the thread from dying
finally:
# NOTE: Any exceptions coming from within `finally` may
# NOTE: kill the thread, causing the threadpool to only
# NOTE: contain references to dead threads rendering the
# NOTE: server defunct, effectively meaning a DoS.
# NOTE: Ideally, things called here should process
# NOTE: everything recoverable internally. Any unhandled
# NOTE: errors will bubble up into the outer try/except
# NOTE: block. They will be treated as fatal and turned
# NOTE: into server shutdown requests and then reraised
# NOTE: unconditionally.
if keep_conn_open:
self.server.put_conn(conn)
else:
conn.close()
if is_stats_enabled:
self.requests_seen += conn.requests_seen
self.bytes_read += conn.rfile.bytes_read
self.bytes_written += conn.wfile.bytes_written
self.work_time += time.time() - self.start_time
self.start_time = None
self.conn = None
class ThreadPool:
"""A Request Queue for an HTTPServer which pools threads.
ThreadPool objects must provide min, get(), put(obj), start()
and stop(timeout) attributes.
"""
def __init__(
self, server, min=10, max=-1, accepted_queue_size=-1,
accepted_queue_timeout=10,
):
"""Initialize HTTP requests queue instance.
Args:
server (cheroot.server.HTTPServer): web server object
receiving this request
min (int): minimum number of worker threads
max (int): maximum number of worker threads (-1/inf for no max)
accepted_queue_size (int): maximum number of active
requests in queue
accepted_queue_timeout (int): timeout for putting request
into queue
:raises ValueError: if the min/max values are invalid
:raises TypeError: if the max is not an integer or inf
"""
if min < 1:
raise ValueError(f'min={min!s} must be > 0')
if max == float('inf'):
pass
elif not isinstance(max, int) or max == 0:
raise TypeError(
'Expected an integer or the infinity value for the `max` '
f'argument but got {max!r}.',
)
elif max < 0:
max = float('inf')
if max < min:
raise ValueError(
f'max={max!s} must be > min={min!s} (or infinity for no max)',
)
self.server = server
self.min = min
self.max = max
self._threads = []
self._queue = queue.Queue(maxsize=accepted_queue_size)
self._queue_put_timeout = accepted_queue_timeout
self.get = self._queue.get
self._pending_shutdowns = collections.deque()
def start(self):
"""Start the pool of threads.
:raises RuntimeError: if the pool is already started
"""
if self._threads:
raise RuntimeError('Threadpools can only be started once.')
self.grow(self.min)
@property
def idle(self): # noqa: D401; irrelevant for properties
"""Number of worker threads which are idle. Read-only.""" # noqa: D401
idles = len([t for t in self._threads if t.conn is None])
return max(idles - len(self._pending_shutdowns), 0)
def put(self, obj):
"""Put request into queue.
Args:
obj (:py:class:`~cheroot.server.HTTPConnection`): HTTP connection
waiting to be processed
"""
self._queue.put(obj, block=True, timeout=self._queue_put_timeout)
def _clear_dead_threads(self):
# Remove any dead threads from our list
for t in [t for t in self._threads if not t.is_alive()]:
self._threads.remove(t)
try:
self._pending_shutdowns.popleft()
except IndexError:
pass
def grow(self, amount):
"""Spawn new worker threads (not above self.max)."""
budget = max(self.max - len(self._threads), 0)
n_new = min(amount, budget)
workers = [self._spawn_worker() for i in range(n_new)]
for worker in workers:
while not worker.ready:
time.sleep(.1)
self._threads.extend(workers)
def _spawn_worker(self):
worker = WorkerThread(self.server)
worker.name = (
'CP Server {worker_name!s}'.
format(worker_name=worker.name)
)
worker.start()
return worker
def shrink(self, amount):
"""Kill off worker threads (not below self.min)."""
# Grow/shrink the pool if necessary.
# Remove any dead threads from our list
amount -= len(self._pending_shutdowns)
self._clear_dead_threads()
if amount <= 0:
return
# calculate the number of threads above the minimum
n_extra = max(len(self._threads) - self.min, 0)
# don't remove more than amount
n_to_remove = min(amount, n_extra)
# put shutdown requests on the queue equal to the number of threads
# to remove. As each request is processed by a worker, that worker
# will terminate and be culled from the list.
for _ in range(n_to_remove):
self._pending_shutdowns.append(None)
self._queue.put(_SHUTDOWNREQUEST)
def stop(self, timeout=5):
"""Terminate all worker threads.
Args:
timeout (int): time to wait for threads to stop gracefully
"""
# for compatability, negative timeouts are treated like None
# TODO: treat negative timeouts like already expired timeouts
if timeout is not None and timeout < 0:
timeout = None
warnings.warning(
'In the future, negative timeouts to Server.stop() '
'will be equivalent to a timeout of zero.',
stacklevel=2,
)
if timeout is not None:
endtime = time.time() + timeout
# Must shut down threads here so the code that calls
# this method can know when all threads are stopped.
for worker in self._threads:
self._queue.put(_SHUTDOWNREQUEST)
ignored_errors = (
# Raised when start_response called >1 time w/o exc_info or
# wsgi write is called before start_response. See cheroot#261
RuntimeError,
# Ignore repeated Ctrl-C. See cherrypy#691.
KeyboardInterrupt,
)
for worker in self._clear_threads():
remaining_time = timeout and endtime - time.time()
try:
worker.join(remaining_time)
if worker.is_alive():
# Timeout exhausted; forcibly shut down the socket.
self._force_close(worker.conn)
worker.join()
except ignored_errors:
pass
@staticmethod
@pass_none
def _force_close(conn):
if conn.rfile.closed:
return
try:
try:
conn.socket.shutdown(socket.SHUT_RD)
except TypeError:
# pyOpenSSL sockets don't take an arg
conn.socket.shutdown()
except OSError:
# shutdown sometimes fails (race with 'closed' check?)
# ref #238
pass
def _clear_threads(self):
"""Clear self._threads and yield all joinable threads."""
# threads = pop_all(self._threads)
threads, self._threads[:] = self._threads[:], []
return (
thread
for thread in threads
if thread is not threading.current_thread()
)
@property
def qsize(self):
"""Return the queue size."""
return self._queue.qsize()

View file

@ -0,0 +1,37 @@
import threading
from typing import Any
class TrueyZero:
def __add__(self, other): ...
def __radd__(self, other): ...
trueyzero: TrueyZero
class WorkerThread(threading.Thread):
conn: Any
server: Any
ready: bool
requests_seen: int
bytes_read: int
bytes_written: int
start_time: Any
work_time: int
stats: Any
def __init__(self, server): ...
def run(self) -> None: ...
class ThreadPool:
server: Any
min: Any
max: Any
get: Any
def __init__(self, server, min: int = ..., max: int = ..., accepted_queue_size: int = ..., accepted_queue_timeout: int = ...) -> None: ...
def start(self) -> None: ...
@property
def idle(self): ...
def put(self, obj) -> None: ...
def grow(self, amount) -> None: ...
def shrink(self, amount) -> None: ...
def stop(self, timeout: int = ...) -> None: ...
@property
def qsize(self) -> int: ...

412
libs/cheroot/wsgi.py Normal file
View file

@ -0,0 +1,412 @@
"""This class holds Cheroot WSGI server implementation.
Simplest example on how to use this server::
from cheroot import wsgi
def my_crazy_app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type','text/plain')]
start_response(status, response_headers)
return [b'Hello world!']
addr = '0.0.0.0', 8070
server = wsgi.Server(addr, my_crazy_app)
server.start()
The Cheroot WSGI server can serve as many WSGI applications
as you want in one instance by using a PathInfoDispatcher::
path_map = {
'/': my_crazy_app,
'/blog': my_blog_app,
}
d = wsgi.PathInfoDispatcher(path_map)
server = wsgi.Server(addr, d)
"""
import sys
from . import server
from .workers import threadpool
from ._compat import ntob, bton
class Server(server.HTTPServer):
"""A subclass of HTTPServer which calls a WSGI application."""
wsgi_version = (1, 0)
"""The version of WSGI to produce."""
def __init__(
self, bind_addr, wsgi_app, numthreads=10, server_name=None,
max=-1, request_queue_size=5, timeout=10, shutdown_timeout=5,
accepted_queue_size=-1, accepted_queue_timeout=10,
peercreds_enabled=False, peercreds_resolve_enabled=False,
reuse_port=False,
):
"""Initialize WSGI Server instance.
Args:
bind_addr (tuple): network interface to listen to
wsgi_app (callable): WSGI application callable
numthreads (int): number of threads for WSGI thread pool
server_name (str): web server name to be advertised via
Server HTTP header
max (int): maximum number of worker threads
request_queue_size (int): the 'backlog' arg to
socket.listen(); max queued connections
timeout (int): the timeout in seconds for accepted connections
shutdown_timeout (int): the total time, in seconds, to
wait for worker threads to cleanly exit
accepted_queue_size (int): maximum number of active
requests in queue
accepted_queue_timeout (int): timeout for putting request
into queue
"""
super(Server, self).__init__(
bind_addr,
gateway=wsgi_gateways[self.wsgi_version],
server_name=server_name,
peercreds_enabled=peercreds_enabled,
peercreds_resolve_enabled=peercreds_resolve_enabled,
reuse_port=reuse_port,
)
self.wsgi_app = wsgi_app
self.request_queue_size = request_queue_size
self.timeout = timeout
self.shutdown_timeout = shutdown_timeout
self.requests = threadpool.ThreadPool(
self, min=numthreads or 1, max=max,
accepted_queue_size=accepted_queue_size,
accepted_queue_timeout=accepted_queue_timeout,
)
@property
def numthreads(self):
"""Set minimum number of threads."""
return self.requests.min
@numthreads.setter
def numthreads(self, value):
self.requests.min = value
class Gateway(server.Gateway):
"""A base class to interface HTTPServer with WSGI."""
def __init__(self, req):
"""Initialize WSGI Gateway instance with request.
Args:
req (HTTPRequest): current HTTP request
"""
super(Gateway, self).__init__(req)
self.started_response = False
self.env = self.get_environ()
self.remaining_bytes_out = None
@classmethod
def gateway_map(cls):
"""Create a mapping of gateways and their versions.
Returns:
dict[tuple[int,int],class]: map of gateway version and
corresponding class
"""
return {gw.version: gw for gw in cls.__subclasses__()}
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version."""
raise NotImplementedError # pragma: no cover
def respond(self):
"""Process the current request.
From :pep:`333`:
The start_response callable must not actually transmit
the response headers. Instead, it must store them for the
server or gateway to transmit only after the first
iteration of the application return value that yields
a NON-EMPTY string, or upon the application's first
invocation of the write() callable.
"""
response = self.req.server.wsgi_app(self.env, self.start_response)
try:
for chunk in filter(None, response):
if not isinstance(chunk, bytes):
raise ValueError('WSGI Applications must yield bytes')
self.write(chunk)
finally:
# Send headers if not already sent
self.req.ensure_headers_sent()
if hasattr(response, 'close'):
response.close()
def start_response(self, status, headers, exc_info=None): # noqa: WPS238
"""WSGI callable to begin the HTTP response."""
# "The application may call start_response more than once,
# if and only if the exc_info argument is provided."
if self.started_response and not exc_info:
raise RuntimeError(
'WSGI start_response called a second '
'time with no exc_info.',
)
self.started_response = True
# "if exc_info is provided, and the HTTP headers have already been
# sent, start_response must raise an error, and should raise the
# exc_info tuple."
if self.req.sent_headers:
value = exc_info[1]
raise value
self.req.status = self._encode_status(status)
for k, v in headers:
if not isinstance(k, str):
raise TypeError(
'WSGI response header key %r is not of type str.' % k,
)
if not isinstance(v, str):
raise TypeError(
'WSGI response header value %r is not of type str.' % v,
)
if k.lower() == 'content-length':
self.remaining_bytes_out = int(v)
out_header = ntob(k), ntob(v)
self.req.outheaders.append(out_header)
return self.write
@staticmethod
def _encode_status(status):
"""Cast status to bytes representation of current Python version.
According to :pep:`3333`, when using Python 3, the response status
and headers must be bytes masquerading as Unicode; that is, they
must be of type "str" but are restricted to code points in the
"Latin-1" set.
"""
if not isinstance(status, str):
raise TypeError('WSGI response status is not of type str.')
return status.encode('ISO-8859-1')
def write(self, chunk):
"""WSGI callable to write unbuffered data to the client.
This method is also used internally by start_response (to write
data from the iterable returned by the WSGI application).
"""
if not self.started_response:
raise RuntimeError('WSGI write called before start_response.')
chunklen = len(chunk)
rbo = self.remaining_bytes_out
if rbo is not None and chunklen > rbo:
if not self.req.sent_headers:
# Whew. We can send a 500 to the client.
self.req.simple_response(
'500 Internal Server Error',
'The requested resource returned more bytes than the '
'declared Content-Length.',
)
else:
# Dang. We have probably already sent data. Truncate the chunk
# to fit (so the client doesn't hang) and raise an error later.
chunk = chunk[:rbo]
self.req.ensure_headers_sent()
self.req.write(chunk)
if rbo is not None:
rbo -= chunklen
if rbo < 0:
raise ValueError(
'Response body exceeds the declared Content-Length.',
)
class Gateway_10(Gateway):
"""A Gateway class to interface HTTPServer with WSGI 1.0.x."""
version = 1, 0
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version."""
req = self.req
req_conn = req.conn
env = {
# set a non-standard environ entry so the WSGI app can know what
# the *real* server protocol is (and what features to support).
# See http://www.faqs.org/rfcs/rfc2145.html.
'ACTUAL_SERVER_PROTOCOL': req.server.protocol,
'PATH_INFO': bton(req.path),
'QUERY_STRING': bton(req.qs),
'REMOTE_ADDR': req_conn.remote_addr or '',
'REMOTE_PORT': str(req_conn.remote_port or ''),
'REQUEST_METHOD': bton(req.method),
'REQUEST_URI': bton(req.uri),
'SCRIPT_NAME': '',
'SERVER_NAME': req.server.server_name,
# Bah. "SERVER_PROTOCOL" is actually the REQUEST protocol.
'SERVER_PROTOCOL': bton(req.request_protocol),
'SERVER_SOFTWARE': req.server.software,
'wsgi.errors': sys.stderr,
'wsgi.input': req.rfile,
'wsgi.input_terminated': bool(req.chunked_read),
'wsgi.multiprocess': False,
'wsgi.multithread': True,
'wsgi.run_once': False,
'wsgi.url_scheme': bton(req.scheme),
'wsgi.version': self.version,
}
if isinstance(req.server.bind_addr, str):
# AF_UNIX. This isn't really allowed by WSGI, which doesn't
# address unix domain sockets. But it's better than nothing.
env['SERVER_PORT'] = ''
try:
env['X_REMOTE_PID'] = str(req_conn.peer_pid)
env['X_REMOTE_UID'] = str(req_conn.peer_uid)
env['X_REMOTE_GID'] = str(req_conn.peer_gid)
env['X_REMOTE_USER'] = str(req_conn.peer_user)
env['X_REMOTE_GROUP'] = str(req_conn.peer_group)
env['REMOTE_USER'] = env['X_REMOTE_USER']
except RuntimeError:
"""Unable to retrieve peer creds data.
Unsupported by current kernel or socket error happened, or
unsupported socket type, or disabled.
"""
else:
env['SERVER_PORT'] = str(req.server.bind_addr[1])
# Request headers
env.update(
(
'HTTP_{header_name!s}'.
format(header_name=bton(k).upper().replace('-', '_')),
bton(v),
)
for k, v in req.inheaders.items()
)
# CONTENT_TYPE/CONTENT_LENGTH
ct = env.pop('HTTP_CONTENT_TYPE', None)
if ct is not None:
env['CONTENT_TYPE'] = ct
cl = env.pop('HTTP_CONTENT_LENGTH', None)
if cl is not None:
env['CONTENT_LENGTH'] = cl
if req.conn.ssl_env:
env.update(req.conn.ssl_env)
return env
class Gateway_u0(Gateway_10):
"""A Gateway class to interface HTTPServer with WSGI u.0.
WSGI u.0 is an experimental protocol, which uses Unicode for keys
and values in both Python 2 and Python 3.
"""
version = 'u', 0
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version."""
req = self.req
env_10 = super(Gateway_u0, self).get_environ()
env = dict(env_10.items())
# Request-URI
enc = env.setdefault('wsgi.url_encoding', 'utf-8')
try:
env['PATH_INFO'] = req.path.decode(enc)
env['QUERY_STRING'] = req.qs.decode(enc)
except UnicodeDecodeError:
# Fall back to latin 1 so apps can transcode if needed.
env['wsgi.url_encoding'] = 'ISO-8859-1'
env['PATH_INFO'] = env_10['PATH_INFO']
env['QUERY_STRING'] = env_10['QUERY_STRING']
env.update(env.items())
return env
wsgi_gateways = Gateway.gateway_map()
class PathInfoDispatcher:
"""A WSGI dispatcher for dispatch based on the PATH_INFO."""
def __init__(self, apps):
"""Initialize path info WSGI app dispatcher.
Args:
apps (dict[str,object]|list[tuple[str,object]]): URI prefix
and WSGI app pairs
"""
try:
apps = list(apps.items())
except AttributeError:
pass
# Sort the apps by len(path), descending
def by_path_len(app):
return len(app[0])
apps.sort(key=by_path_len, reverse=True)
# The path_prefix strings must start, but not end, with a slash.
# Use "" instead of "/".
self.apps = [(p.rstrip('/'), a) for p, a in apps]
def __call__(self, environ, start_response):
"""Process incoming WSGI request.
Ref: :pep:`3333`
Args:
environ (Mapping): a dict containing WSGI environment variables
start_response (callable): function, which sets response
status and headers
Returns:
list[bytes]: iterable containing bytes to be returned in
HTTP response body
"""
path = environ['PATH_INFO'] or '/'
for p, app in self.apps:
# The apps list should be sorted by length, descending.
if path.startswith('{path!s}/'.format(path=p)) or path == p:
environ = environ.copy()
environ['SCRIPT_NAME'] = environ.get('SCRIPT_NAME', '') + p
environ['PATH_INFO'] = path[len(p):]
return app(environ, start_response)
start_response(
'404 Not Found', [
('Content-Type', 'text/plain'),
('Content-Length', '0'),
],
)
return ['']
# compatibility aliases
globals().update(
WSGIServer=Server,
WSGIGateway=Gateway,
WSGIGateway_u0=Gateway_u0,
WSGIGateway_10=Gateway_10,
WSGIPathInfoDispatcher=PathInfoDispatcher,
)

49
libs/cheroot/wsgi.pyi Normal file
View file

@ -0,0 +1,49 @@
from . import server
from typing import Any
class Server(server.HTTPServer):
wsgi_version: Any
wsgi_app: Any
request_queue_size: Any
timeout: Any
shutdown_timeout: Any
requests: Any
def __init__(self, bind_addr, wsgi_app, numthreads: int = ..., server_name: Any | None = ..., max: int = ..., request_queue_size: int = ..., timeout: int = ..., shutdown_timeout: int = ..., accepted_queue_size: int = ..., accepted_queue_timeout: int = ..., peercreds_enabled: bool = ..., peercreds_resolve_enabled: bool = ..., reuse_port: bool = ...) -> None: ...
@property
def numthreads(self): ...
@numthreads.setter
def numthreads(self, value) -> None: ...
class Gateway(server.Gateway):
started_response: bool
env: Any
remaining_bytes_out: Any
def __init__(self, req) -> None: ...
@classmethod
def gateway_map(cls): ...
def get_environ(self) -> None: ...
def respond(self) -> None: ...
def start_response(self, status, headers, exc_info: Any | None = ...): ...
def write(self, chunk) -> None: ...
class Gateway_10(Gateway):
version: Any
def get_environ(self): ...
class Gateway_u0(Gateway_10):
version: Any
def get_environ(self): ...
wsgi_gateways: Any
class PathInfoDispatcher:
apps: Any
def __init__(self, apps): ...
def __call__(self, environ, start_response): ...
WSGIServer = Server
WSGIGateway = Gateway
WSGIGateway_u0 = Gateway_u0
WSGIGateway_10 = Gateway_10
WSGIPathInfoDispatcher = PathInfoDispatcher

View file

@ -0,0 +1 @@
pip

View file

@ -0,0 +1,30 @@
Copyright © 2004-2019, CherryPy Team (team@cherrypy.dev)
All rights reserved.
* * *
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of CherryPy nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View file

@ -0,0 +1,199 @@
Metadata-Version: 2.1
Name: CherryPy
Version: 18.10.0
Summary: Object-Oriented HTTP framework
Home-page: https://www.cherrypy.dev
Author: CherryPy Team
Author-email: team@cherrypy.dev
Project-URL: CI: AppVeyor, https://ci.appveyor.com/project/cherrypy/cherrypy
Project-URL: CI: Travis, https://travis-ci.org/cherrypy/cherrypy
Project-URL: CI: Circle, https://circleci.com/gh/cherrypy/cherrypy
Project-URL: CI: GitHub, https://github.com/cherrypy/cherrypy/actions
Project-URL: Docs: RTD, https://docs.cherrypy.dev
Project-URL: GitHub: issues, https://github.com/cherrypy/cherrypy/issues
Project-URL: GitHub: repo, https://github.com/cherrypy/cherrypy
Project-URL: Tidelift: funding, https://tidelift.com/subscription/pkg/pypi-cherrypy?utm_source=pypi-cherrypy&utm_medium=referral&utm_campaign=pypi
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: License :: Freely Distributable
Classifier: Operating System :: OS Independent
Classifier: Framework :: CherryPy
Classifier: License :: OSI Approved :: BSD License
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: Implementation
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: Jython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Internet :: WWW/HTTP
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Server
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
Requires-Python: >=3.6
License-File: LICENSE.md
Requires-Dist: cheroot >=8.2.1
Requires-Dist: portend >=2.1.1
Requires-Dist: more-itertools
Requires-Dist: zc.lockfile
Requires-Dist: jaraco.collections
Requires-Dist: importlib-metadata ; python_version <= "3.7"
Requires-Dist: pywin32 >=227 ; sys_platform == "win32" and implementation_name == "cpython" and python_version < "3.10"
Provides-Extra: docs
Requires-Dist: sphinx ; extra == 'docs'
Requires-Dist: docutils ; extra == 'docs'
Requires-Dist: alabaster ; extra == 'docs'
Requires-Dist: sphinxcontrib-apidoc >=0.3.0 ; extra == 'docs'
Requires-Dist: rst.linker >=1.11 ; extra == 'docs'
Requires-Dist: jaraco.packaging >=3.2 ; extra == 'docs'
Provides-Extra: json
Requires-Dist: simplejson ; extra == 'json'
Provides-Extra: memcached_session
Requires-Dist: python-memcached >=1.58 ; extra == 'memcached_session'
Provides-Extra: routes_dispatcher
Requires-Dist: routes >=2.3.1 ; extra == 'routes_dispatcher'
Provides-Extra: ssl
Requires-Dist: pyOpenSSL ; extra == 'ssl'
Provides-Extra: testing
Requires-Dist: objgraph ; extra == 'testing'
Requires-Dist: pytest >=5.3.5 ; extra == 'testing'
Requires-Dist: pytest-cov ; extra == 'testing'
Requires-Dist: pytest-forked ; extra == 'testing'
Requires-Dist: pytest-sugar ; extra == 'testing'
Requires-Dist: path.py ; extra == 'testing'
Requires-Dist: requests-toolbelt ; extra == 'testing'
Requires-Dist: pytest-services >=2 ; extra == 'testing'
Requires-Dist: setuptools ; extra == 'testing'
Provides-Extra: xcgi
Requires-Dist: flup ; extra == 'xcgi'
.. image:: https://raw.githubusercontent.com/vshymanskyy/StandWithUkraine/main/banner-direct.svg
:target: https://github.com/vshymanskyy/StandWithUkraine/blob/main/docs/README.md
:alt: SWUbanner
.. image:: https://img.shields.io/pypi/v/cherrypy.svg
:target: https://pypi.org/project/cherrypy
.. image:: https://tidelift.com/badges/package/pypi/CherryPy
:target: https://tidelift.com/subscription/pkg/pypi-cherrypy?utm_source=pypi-cherrypy&utm_medium=readme
:alt: CherryPy is available as part of the Tidelift Subscription
.. image:: https://img.shields.io/badge/Python%203%20only-pip%20install%20%22%3E%3D18.0.0%22-%234da45e.svg
:target: https://python3statement.org/
.. image:: https://img.shields.io/badge/Python%203%20and%202-pip%20install%20%22%3C18.0.0%22-%2349a7e9.svg
:target: https://python3statement.org/#sections40-timeline
.. image:: https://readthedocs.org/projects/cherrypy/badge/?version=latest
:target: https://docs.cherrypy.dev/en/latest/?badge=latest
.. image:: https://img.shields.io/badge/StackOverflow-CherryPy-blue.svg
:target: https://stackoverflow.com/questions/tagged/cheroot+or+cherrypy
.. image:: https://img.shields.io/badge/Mailing%20list-cherrypy--users-orange.svg
:target: https://groups.google.com/group/cherrypy-users
.. image:: https://img.shields.io/gitter/room/cherrypy/cherrypy.svg
:target: https://gitter.im/cherrypy/cherrypy
.. image:: https://img.shields.io/travis/cherrypy/cherrypy/master.svg?label=Linux%20build%20%40%20Travis%20CI
:target: https://travis-ci.org/cherrypy/cherrypy
.. image:: https://circleci.com/gh/cherrypy/cherrypy/tree/master.svg?style=svg
:target: https://circleci.com/gh/cherrypy/cherrypy/tree/master
.. image:: https://img.shields.io/appveyor/ci/CherryPy/cherrypy/master.svg?label=Windows%20build%20%40%20Appveyor
:target: https://ci.appveyor.com/project/CherryPy/cherrypy/branch/master
.. image:: https://img.shields.io/badge/license-BSD-blue.svg?maxAge=3600
:target: https://pypi.org/project/cheroot
.. image:: https://img.shields.io/pypi/pyversions/cherrypy.svg
:target: https://pypi.org/project/cherrypy
.. image:: https://badges.github.io/stability-badges/dist/stable.svg
:target: https://github.com/badges/stability-badges
:alt: stable
.. image:: https://api.codacy.com/project/badge/Grade/48b11060b5d249dc86e52dac2be2c715
:target: https://www.codacy.com/app/webknjaz/cherrypy-upstream?utm_source=github.com&utm_medium=referral&utm_content=cherrypy/cherrypy&utm_campaign=Badge_Grade
.. image:: https://codecov.io/gh/cherrypy/cherrypy/branch/master/graph/badge.svg
:target: https://codecov.io/gh/cherrypy/cherrypy
:alt: codecov
Welcome to the GitHub repository of `CherryPy <https://cherrypy.dev>`_!
CherryPy is a pythonic, object-oriented HTTP framework.
1. It allows building web applications in much the same way one would
build any other object-oriented program.
2. This design results in more concise and readable code developed faster.
It's all just properties and methods.
3. It is now more than ten years old and has proven fast and very
stable.
4. It is being used in production by many sites, from the simplest to
the most demanding.
5. And perhaps most importantly, it is fun to work with :-)
Here's how easy it is to write "Hello World" in CherryPy:
.. code:: python
import cherrypy
class HelloWorld(object):
@cherrypy.expose
def index(self):
return "Hello World!"
cherrypy.quickstart(HelloWorld())
And it continues to work that intuitively when systems grow, allowing
for the Python object model to be dynamically presented as a website
and/or API.
While CherryPy is one of the easiest and most intuitive frameworks out
there, the prerequisite for understanding the `CherryPy
documentation <https://docs.cherrypy.dev>`_ is that you have
a general understanding of Python and web development.
Additionally:
- Tutorials are included in the repository:
https://github.com/cherrypy/cherrypy/tree/master/cherrypy/tutorial
- A general wiki at:
https://github.com/cherrypy/cherrypy/wiki
If the docs are insufficient to address your needs, the CherryPy
community has several `avenues for support
<https://docs.cherrypy.dev/en/latest/support.html>`_.
For Enterprise
--------------
CherryPy is available as part of the Tidelift Subscription.
The CherryPy maintainers and the maintainers of thousands of other packages
are working with Tidelift to deliver one enterprise subscription that covers
all of the open source you use.
`Learn more <https://tidelift.com/subscription/pkg/pypi-cherrypy?utm_source=pypi-cherrypy&utm_medium=referral&utm_campaign=github>`_.
Contributing
------------
Please follow the `contribution guidelines
<https://docs.cherrypy.dev/en/latest/contribute.html>`_.
And by all means, absorb the `Zen of
CherryPy <https://github.com/cherrypy/cherrypy/wiki/The-Zen-of-CherryPy>`_.

View file

@ -0,0 +1,131 @@
../../bin/cherryd,sha256=_aFtOHNpB9aQFFCd1Qx_iU7muHWaRqwEgmL8hrdamME,237
cherrypy-18.10.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
cherrypy-18.10.0.dist-info/LICENSE.md,sha256=Ra3pM8KA7ON-PgHTqRr-7ZUFdGpFb3KtELCJfEnmUQQ,1511
cherrypy-18.10.0.dist-info/METADATA,sha256=Z3GL4Y53Km8-HAh7Tvg9iKxktyL-NVXfvzu0xZweAbE,8678
cherrypy-18.10.0.dist-info/RECORD,,
cherrypy-18.10.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
cherrypy-18.10.0.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
cherrypy-18.10.0.dist-info/entry_points.txt,sha256=2iMzTbV4_iDQUVEBHZGAuXaFAbW9naJoL4O9Gn7eDks,50
cherrypy-18.10.0.dist-info/top_level.txt,sha256=mOBE-r7Ej1kFrXNKlOj3yY9QfGA6Xkz6vZK7VNJF3YE,9
cherrypy/__init__.py,sha256=tVHYOgxCjWl8QDIqQrOmzPhdaWUn0ERcYwdvF6_uJXk,11431
cherrypy/__main__.py,sha256=gb89ZhhQ1kjYvU61axGuZt5twA4q3kfBa3WKrPT6D2k,107
cherrypy/_cpchecker.py,sha256=-hNyza2gsjrSaeJUTgcX-DBspO2WUJTlrlbTDOEk5Ys,14584
cherrypy/_cpcompat.py,sha256=2gZtFj0h6RRGlkY-R6WoEl-S8BBSIwCeuj7F2Tf3mU0,1992
cherrypy/_cpconfig.py,sha256=9DcS8yPNgixsma9ZT7jxPdF_w6qF2ClUbX8jrQab5iE,9646
cherrypy/_cpdispatch.py,sha256=q3HsxRrSgTEcL8yZHzqVlghlzbn_yNnQh5jOuIv1US4,25194
cherrypy/_cperror.py,sha256=hfgDE1NHPC5aA57DpE-AiKDqkG6R1nXVadR1t9t7toA,23111
cherrypy/_cplogging.py,sha256=9cO0QOuq1wrZ2P9NAQFD9iTNgT8GPqhD_P7VJMjY49g,16482
cherrypy/_cpmodpy.py,sha256=A5ajUoGcu6tcEttGux6gtPqJaezoqDx9jeS8hAcmbY4,11097
cherrypy/_cpnative_server.py,sha256=QGHnIJ8FIo-a8wxWwufUTFMOy8Q00-AkjxbKjg2ELpY,6677
cherrypy/_cpreqbody.py,sha256=1Sp0KmrD1lgpxi25OOzNbTlSuxRkuxWKXhttfPRDMjo,36404
cherrypy/_cprequest.py,sha256=4z8wHnVjwWEbm4OFuMf2h4kRODKj9Ur_YaUzy6j4l3I,34559
cherrypy/_cpserver.py,sha256=ThmjByeASzbdyiNgroUQRH5oDAgCimm-eiPOkjuEdN4,8364
cherrypy/_cptools.py,sha256=6XWSuaji8rZEju_n4E657PDVdCDDq6pLO4yiX7w_QaU,18167
cherrypy/_cptree.py,sha256=I3mo_CDSNfn9yHauF9HVFRzFtnpSZ1HAMj2SbCt5TFw,11025
cherrypy/_cpwsgi.py,sha256=9DUBSUkCFmsXwZ5IHuGn3uD8ZH9vbfB3Iy0q9K5mPBk,16438
cherrypy/_cpwsgi_server.py,sha256=5P7LjwY__uIXJslBjBKcjDVlUkaNexwp8UckqXPZb2M,4190
cherrypy/_helper.py,sha256=rzjGx67JSsEbVTH0WM5dzPf48-yRBcBtpppMJB6vips,11656
cherrypy/_json.py,sha256=o2SqswLKm8rpVhEuhaJaXpNPs8Z7E3pqdTXu6MJYwMU,439
cherrypy/daemon.py,sha256=kgiqlnWFx-PkLEcPPRJmXmB6zbI7X0cN2cCRtosow4c,3950
cherrypy/favicon.ico,sha256=jrNK5SnKfbnSFgX_xQyLX3khmeImw8IbbHgJVIsGci0,1406
cherrypy/lib/__init__.py,sha256=vlNIw8LC-X9ks8rJN5i6qPCNawt7Mlcnqj9NH9mB2fM,2745
cherrypy/lib/auth_basic.py,sha256=7ejxNsgkmn5iy7kcAYWcdYylMVpkiNTAXy2r-PiCs-U,4420
cherrypy/lib/auth_digest.py,sha256=jCykJUoN10aYAD9KY_3RIhBs2KwY1a2dW1Z9ygqW58A,15355
cherrypy/lib/caching.py,sha256=1qhdv7C1yL0z8oW6wySmw767mQA9H1NnCma-OamkoXk,17523
cherrypy/lib/covercp.py,sha256=FL3cw5WLoQzaFmJp_bo59rhddLzpSFW9ehJnj-JtkEU,11602
cherrypy/lib/cpstats.py,sha256=-1wAqix75_y0yrtveHOAoUpSDa9khUAulOjmFsiRxJI,22851
cherrypy/lib/cptools.py,sha256=z90xKaqkNfqgXdvP7ESCZr0rSOFYEvEqzU94QFHW_1k,23584
cherrypy/lib/encoding.py,sha256=J2RAImsuvgiW8HW0Ke3q5okG8t2gGvINYkOfoaYDX50,17036
cherrypy/lib/gctools.py,sha256=N4fsC9ysEDkJpw2OLu6XgEctqZQESTBFna3Hw--eivk,7344
cherrypy/lib/headers.py,sha256=MGUIjtKmu8obyUGmDyv7nDKp-FlDDeGTc56bnpzylgI,1106
cherrypy/lib/httputil.py,sha256=9OTGG_OmLlrzzqpyLehz5gPnqD8AEMJglmvzvSN5ZTw,17997
cherrypy/lib/jsontools.py,sha256=YQQmGQN4XKnSgB4u0lRl2MB9YkVeNR3-6YWyU76jUFQ,3641
cherrypy/lib/locking.py,sha256=bGcVX_s8spsqxdYG_V7vvgi0tB3mbkCX5rn4uAkbWdg,1269
cherrypy/lib/profiler.py,sha256=9FkhlVb0t2kGRfg02AwMZ1j6AH3byfyIhGL_QsvPkSE,6555
cherrypy/lib/reprconf.py,sha256=A7PW2aFmOjUQ-d4UTQByIpGMuDiBQQc7_nX2NNBY8-4,12336
cherrypy/lib/sessions.py,sha256=ZyU8DpZzFI_MkK7_HPbJTqPpqDyH_3rCPA4V6fsn5IM,30969
cherrypy/lib/static.py,sha256=7020LxgSJjtQcMpdpgijwY39HtgmDVpbXq5s16Pip-U,16613
cherrypy/lib/xmlrpcutil.py,sha256=UZqJsoBboSHjgsSiOfFcDwlubtSruhYyxpKdxutazQk,1684
cherrypy/process/__init__.py,sha256=RjaRqUG5U-ZhxAs7GBWN9PFR5hIK-9a9x3ZFwFjyW4Y,547
cherrypy/process/plugins.py,sha256=0fiwpkpsHqHNH1CNJkn6N8e8fflNGJB-Xx0Qsp2Zc2M,26881
cherrypy/process/servers.py,sha256=jbxvxakxHnwe-UteHK4sHwtNUs4SaXOtN-KaervmSS8,13420
cherrypy/process/win32.py,sha256=xKNi9HGaCr9PyLUdxOVsr6EI6g8rkr_k_nIY28Ndbc8,5789
cherrypy/process/wspbus.py,sha256=SqUu_BwEi8fW7A5nbKUgHM-TWlm7KsyzfV_lzTNOWos,21507
cherrypy/scaffold/__init__.py,sha256=XC3QKJ7LBW2zyf5gIUcJX7XHPEJwv1-72ylawf3j_bM,1996
cherrypy/scaffold/apache-fcgi.conf,sha256=0M10HHX8i2Or3r-gHoDglSQK4dZHd4Jhx4WhamJtuwc,930
cherrypy/scaffold/example.conf,sha256=EAqr2Sb1B1osc198dY1FV2A0wgnBmGsJf99_-GGexVU,62
cherrypy/scaffold/site.conf,sha256=pjUhF-ir1xzSsV7LqXGfyR6Ns_r_n3ATWw8OlfbgT3w,426
cherrypy/scaffold/static/made_with_cherrypy_small.png,sha256=VlSRvYj-pZzls-peicQhWpbqkdsZHtNhPtSfZV12BFQ,6347
cherrypy/test/__init__.py,sha256=fu7zoW-l3O5Ry66gQjUaVd47lt4OqTVMWax1WJyyfY8,394
cherrypy/test/_test_decorators.py,sha256=51tH-kM4H5JNRzS_eIUfvh50X3qtrasmqBiEg6CR3zQ,952
cherrypy/test/_test_states_demo.py,sha256=lVpbqHgHcIfdz1-i6xDbmzmD87V3JbwD_e4lK0Bagnw,1876
cherrypy/test/benchmark.py,sha256=wi-ir2An3824eVGyJFle4qpfZ9D5VBNOI7qkOdMZ4Ig,12697
cherrypy/test/checkerdemo.py,sha256=dc24EC2tHmManEdznyOt4NIYp_eiKLJM7NgKO05Nts0,1861
cherrypy/test/fastcgi.conf,sha256=0YsIPLmOg-NdGGqCCPpBERKGYy_zBU6LkRDyR41nvBE,686
cherrypy/test/fcgi.conf,sha256=neiD1sjiFblAJLUdlOSKiZ1uVl1eK3zM2_7LZQigkTs,486
cherrypy/test/helper.py,sha256=EA-fbXA00twEmJQVMVBHLfgX20BVmkmlY88tfueBDiM,16358
cherrypy/test/logtest.py,sha256=r-SatZp5AVVJi_eoZj26GH2bZvOQAbxOl30ZUZNDqfU,8364
cherrypy/test/modfastcgi.py,sha256=SblRPAhbo9uJLHISamWE7AyW2slrlWms76VvpnCqzLo,4607
cherrypy/test/modfcgid.py,sha256=7a5iRZ9VQyr5O2Pajgo_73j6rbMr_i02FG6sO1-Q-lM,4192
cherrypy/test/modpy.py,sha256=F6Ar3uxr1ckp5V5l6FNJEZnNHSB0SDtR5hGmYU010oY,4933
cherrypy/test/modwsgi.py,sha256=tJzjPqpAzp2ntbGUl7TBur9rI8JGLfOQrxmhejMxknY,4788
cherrypy/test/sessiondemo.py,sha256=DMdk_PpKXRONBNDYmOBgcFSGrAll_MZgZMTQUbiRK14,5529
cherrypy/test/static/404.html,sha256=9jnU0KKbdzHVq9EvhkGOEly92qNJbtCSXOsH0KFTL4U,92
cherrypy/test/static/dirback.jpg,sha256=eS_X3BSeu8OSu-GTYndM1tJkWoW_oVJp1O_mmUUGeo8,16585
cherrypy/test/static/index.html,sha256=cB6ALrLhcxEGyMNgOHzmnBvmRnPCW_u3ebZUqdCiHkQ,14
cherrypy/test/style.css,sha256=2Ypw_ziOWlY4dTZJlwsrorDLqLA1z485lgagaGemtKQ,17
cherrypy/test/test.pem,sha256=x6LrLPw2dBRyZwHXk6FhdSDNM3-Cv7DBXc8o4A19RhI,2254
cherrypy/test/test_auth_basic.py,sha256=JlLdHeyaEutMzWK7LX3HfSOVQWAKZXoUNL67sSgUMZQ,4499
cherrypy/test/test_auth_digest.py,sha256=pp49xOWFS_i8N_s8MQ6h_exbOxlQZjfjT-cxDvi701E,4454
cherrypy/test/test_bus.py,sha256=K_6pYUz4q6xBOh5tMK67d3n_X1VF1LJhS1mRsGh0JnQ,9960
cherrypy/test/test_caching.py,sha256=s6aA_P6mcasaEfPwX50c9A-UqwSiYdzMVp5GFQH08uQ,14386
cherrypy/test/test_config.py,sha256=lUxRUCBmDVBo8LK7yi8w5qvFcS3vw4YpFwl66TdRskQ,8836
cherrypy/test/test_config_server.py,sha256=D7jLqZawCOh2z6vGU-WjkupiJA0BxPywb8LuILL2JGA,4037
cherrypy/test/test_conn.py,sha256=g2e2CCaB_2UiFWVLniVZbk2YNrXqN9_J0M1FymMZ_F8,30744
cherrypy/test/test_core.py,sha256=hWZWp5TwAtLEMe_EQrptfU1LugEICnW6LApt1ViGBs8,30393
cherrypy/test/test_dynamicobjectmapping.py,sha256=99AQgjvnoFX_8wB1sOD5R61xR2YbdMv2KCVeKszf5hs,12226
cherrypy/test/test_encoding.py,sha256=lGUiNKrWQlHma4fV-mjR0vv-xylrw_v1E6j61CT6vBc,17535
cherrypy/test/test_etags.py,sha256=mzuKNjFXx67gHoqS_jaGEzjxJ025mzlLgepsW2DYETI,3093
cherrypy/test/test_http.py,sha256=wpIbX0b4Q-pUSzP8q8mqA0DGe-KLA6QgeRg-GpcBTAI,11153
cherrypy/test/test_httputil.py,sha256=gA3u7bt1vV2Av3T1I0CaEGEdYV9kGDEPD80al1IbPE8,2412
cherrypy/test/test_iterator.py,sha256=siygtCR27EoDoOwyr8Bvk-Gt31dJTLAsS-V3WRYE4PM,5754
cherrypy/test/test_json.py,sha256=rVfzyCwSMf79bcZ8aYBA_180FJxcHY9jFT5_0M6-pSc,2860
cherrypy/test/test_logging.py,sha256=bEWnt7R-UJ_dYOsDQ1EiEwwmpLLUhPj9X2MfQNcfxoQ,9141
cherrypy/test/test_mime.py,sha256=-6HpcAIGtN56nWRQclxvSKhNLW5e_bYRO6kDtM3DlN8,4538
cherrypy/test/test_misc_tools.py,sha256=Ixjq2IAJZ1BTuV-i_AUKw_OsjYru_i9RPm1gIWyWt_E,7094
cherrypy/test/test_native.py,sha256=rtow-ShYRkd2oEBtDksU6e06_L0BvZToFJFngeeGF34,971
cherrypy/test/test_objectmapping.py,sha256=KTMqAizhWBGCgDFp1n8msno4ylYPnmvWZ1cWHzzUgO0,14504
cherrypy/test/test_params.py,sha256=p4DfugiKWxF9nPX5Gs7arGhUmpx_eeZhWyS5yCXCUj4,1862
cherrypy/test/test_plugins.py,sha256=h0C9ri4xinHk70AQIOJgRfzjKylGZ9FhbX2Ifvvixto,340
cherrypy/test/test_proxy.py,sha256=XPdi3O_izRLtvu3UTJE-WTBVmn4DEng7irrfUD69srU,5630
cherrypy/test/test_refleaks.py,sha256=HK55E9JtRFc28FhnnTLV9DrM1k82ZmPEVdHYyp525K0,1555
cherrypy/test/test_request_obj.py,sha256=UMccBZZO-HpMz5gnK8uwczw1vwgrv8JOL7BKhnhtOFc,37395
cherrypy/test/test_routes.py,sha256=m0MvSqurFk42PuSp5vF8ue40-vnhPNwC2EGTqkDozo4,2583
cherrypy/test/test_session.py,sha256=Ugi_y_ACOtl1caCLDClUiPOoPk8nz0yEKRHsJP3xu-s,18080
cherrypy/test/test_sessionauthenticate.py,sha256=zhVUpN3FWPaZbMKQcTrDQiaI-RXjlwrJi7ssqbzhmU8,2013
cherrypy/test/test_states.py,sha256=z0Apr1wKhTmuPqCNRx87RMLSgxsPsoAL5osLN7oQ3Kg,16740
cherrypy/test/test_static.py,sha256=4VQpLumcxWGNFwaLuCgOS11pyla1gpGgQNlKhMOopws,16702
cherrypy/test/test_tools.py,sha256=HMfwXvON6heQjjoqnb7ch-PuDiZ6OdLv-zHft9NNuZk,17833
cherrypy/test/test_tutorials.py,sha256=FVI8_-H-owh83hWZfnUGx0saapUyAVTlxuU1aQPvqGs,6928
cherrypy/test/test_virtualhost.py,sha256=ap_e1gM7PERVN4mU70zc5RD1pVoSdN-te-B_uIAlV8g,4021
cherrypy/test/test_wsgi_ns.py,sha256=PuoUe2EUwZk5z0yLIw9NdRkd_P4VKU7Cckj8n0QKSJo,2812
cherrypy/test/test_wsgi_unix_socket.py,sha256=lzHqySH39uDJSn1GGnj67syM6HrhVQcM_7TlKma0o4A,2218
cherrypy/test/test_wsgi_vhost.py,sha256=4uZ8_luFHiQJ6uxQeHFJtjemug8UiPcKmnwwclj0dkw,1034
cherrypy/test/test_wsgiapps.py,sha256=1SdQGuWVcVCTiSvizDdIOekuvQLCybRXUKF2dpV2NTs,3997
cherrypy/test/test_xmlrpc.py,sha256=DQfgzjIMcQP_gOi8el1QQ5dkfPaRbyZ6CDdPL9ZIgWo,4584
cherrypy/test/webtest.py,sha256=uRwMw_why3KeXGZXdHE7-GfJag4ziL9KZmDGx4Q7Jbg,262
cherrypy/tutorial/README.rst,sha256=v77BbhuiK44TxqeYPk3PwqV09Dg5AKWFdp7re04KdEo,617
cherrypy/tutorial/__init__.py,sha256=cmLXfvQI0L6trCXwDzR0WE1bu4JZYt301HJRNhjZOBc,85
cherrypy/tutorial/custom_error.html,sha256=9cMEb83zwct9i-fJlyl7yvBSNexF7yEIWOoxH8lpllQ,404
cherrypy/tutorial/pdf_file.pdf,sha256=-WuAfJ9i1vbUT9EcKKvUaNGfbPQsNeFx-qsB4gsxWUg,11961
cherrypy/tutorial/tut01_helloworld.py,sha256=Zr6KxJYAfr55kfHK6ga47fVC0_fJSUlgRSadnNm2zUk,1012
cherrypy/tutorial/tut02_expose_methods.py,sha256=ikz6QOGLknEZm0k-f9BR18deItQ3UO8yYg1NW13kB8g,801
cherrypy/tutorial/tut03_get_and_post.py,sha256=bY_cTha4zIkokv585yioQkM-S2a7GfetTE3ovl3-7cw,1587
cherrypy/tutorial/tut04_complex_site.py,sha256=PCxyUVKG-L_YUgt1D5Q5t0_IUiGXbcGyf_L00K8cH8c,2948
cherrypy/tutorial/tut05_derived_objects.py,sha256=u0LBnUTW8DnexEAtowhJ0sjeAqp6rS065TM2QXfDY1I,2141
cherrypy/tutorial/tut06_default_method.py,sha256=3Wx34fL_4P3M_dhu6RQdpXQeSriIJpSsy72IqpNQ6ns,2264
cherrypy/tutorial/tut07_sessions.py,sha256=fQo-v_ol5CjXiq4vdsm7Dh1us6DVDzbqAIouRkmLeR8,1228
cherrypy/tutorial/tut08_generators_and_yield.py,sha256=m5GfOtNDoGxMd1rw5tCRg3o9cTyt-e1gR-ALoLRLiQw,1288
cherrypy/tutorial/tut09_files.py,sha256=qcelN09_k62zWVC-daId5I4i-fw6TWA4WddUbG4j8B8,3463
cherrypy/tutorial/tut10_http_errors.py,sha256=6GllO8SI-6Xs6R8hRwHe7jUzGxYJxLlxwxgaOJC9i8Y,2706
cherrypy/tutorial/tutorial.conf,sha256=9ENgfRDyopHuignr_aHeMaWoC562xThbmlgF6zg4oEE,96

View file

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: setuptools (75.3.2)
Root-Is-Purelib: true
Tag: py3-none-any

View file

@ -0,0 +1,2 @@
[console_scripts]
cherryd = cherrypy.__main__:run

View file

@ -0,0 +1 @@
cherrypy

376
libs/cherrypy/__init__.py Normal file
View file

@ -0,0 +1,376 @@
"""CherryPy is a pythonic, object-oriented HTTP framework.
CherryPy consists of not one, but four separate API layers.
The APPLICATION LAYER is the simplest. CherryPy applications are written as
a tree of classes and methods, where each branch in the tree corresponds to
a branch in the URL path. Each method is a 'page handler', which receives
GET and POST params as keyword arguments, and returns or yields the (HTML)
body of the response. The special method name 'index' is used for paths
that end in a slash, and the special method name 'default' is used to
handle multiple paths via a single handler. This layer also includes:
* the 'exposed' attribute (and cherrypy.expose)
* cherrypy.quickstart()
* _cp_config attributes
* cherrypy.tools (including cherrypy.session)
* cherrypy.url()
The ENVIRONMENT LAYER is used by developers at all levels. It provides
information about the current request and response, plus the application
and server environment, via a (default) set of top-level objects:
* cherrypy.request
* cherrypy.response
* cherrypy.engine
* cherrypy.server
* cherrypy.tree
* cherrypy.config
* cherrypy.thread_data
* cherrypy.log
* cherrypy.HTTPError, NotFound, and HTTPRedirect
* cherrypy.lib
The EXTENSION LAYER allows advanced users to construct and share their own
plugins. It consists of:
* Hook API
* Tool API
* Toolbox API
* Dispatch API
* Config Namespace API
Finally, there is the CORE LAYER, which uses the core API's to construct
the default components which are available at higher layers. You can think
of the default components as the 'reference implementation' for CherryPy.
Megaframeworks (and advanced users) may replace the default components
with customized or extended components. The core API's are:
* Application API
* Engine API
* Request API
* Server API
* WSGI API
These API's are described in the `CherryPy specification
<https://github.com/cherrypy/cherrypy/wiki/CherryPySpec>`_.
"""
try:
import importlib.metadata as importlib_metadata
except ImportError:
# fall back for python <= 3.7
# This try/except can be removed with py <= 3.7 support
import importlib_metadata
from threading import local as _local
from ._cperror import (
HTTPError, HTTPRedirect, InternalRedirect,
NotFound, CherryPyException,
)
from . import _cpdispatch as dispatch
from ._cptools import default_toolbox as tools, Tool
from ._helper import expose, popargs, url
from . import _cprequest, _cpserver, _cptree, _cplogging, _cpconfig
import cherrypy.lib.httputil as _httputil
from ._cptree import Application
from . import _cpwsgi as wsgi
from . import process
try:
from .process import win32
engine = win32.Win32Bus()
engine.console_control_handler = win32.ConsoleCtrlHandler(engine)
del win32
except ImportError:
engine = process.bus
from . import _cpchecker
__all__ = (
'HTTPError', 'HTTPRedirect', 'InternalRedirect',
'NotFound', 'CherryPyException',
'dispatch', 'tools', 'Tool', 'Application',
'wsgi', 'process', 'tree', 'engine',
'quickstart', 'serving', 'request', 'response', 'thread_data',
'log', 'expose', 'popargs', 'url', 'config',
)
__import__('cherrypy._cptools')
__import__('cherrypy._cprequest')
tree = _cptree.Tree()
try:
__version__ = importlib_metadata.version('cherrypy')
except Exception:
__version__ = 'unknown'
engine.listeners['before_request'] = set()
engine.listeners['after_request'] = set()
engine.autoreload = process.plugins.Autoreloader(engine)
engine.autoreload.subscribe()
engine.thread_manager = process.plugins.ThreadManager(engine)
engine.thread_manager.subscribe()
engine.signal_handler = process.plugins.SignalHandler(engine)
class _HandleSignalsPlugin(object):
"""Handle signals from other processes.
Based on the configured platform handlers above.
"""
def __init__(self, bus):
self.bus = bus
def subscribe(self):
"""Add the handlers based on the platform."""
if hasattr(self.bus, 'signal_handler'):
self.bus.signal_handler.subscribe()
if hasattr(self.bus, 'console_control_handler'):
self.bus.console_control_handler.subscribe()
engine.signals = _HandleSignalsPlugin(engine)
server = _cpserver.Server()
server.subscribe()
def quickstart(root=None, script_name='', config=None):
"""Mount the given root, start the builtin server (and engine), then block.
root: an instance of a "controller class" (a collection of page handler
methods) which represents the root of the application.
script_name: a string containing the "mount point" of the application.
This should start with a slash, and be the path portion of the URL
at which to mount the given root. For example, if root.index() will
handle requests to "http://www.example.com:8080/dept/app1/", then
the script_name argument would be "/dept/app1".
It MUST NOT end in a slash. If the script_name refers to the root
of the URI, it MUST be an empty string (not "/").
config: a file or dict containing application config. If this contains
a [global] section, those entries will be used in the global
(site-wide) config.
"""
if config:
_global_conf_alias.update(config)
tree.mount(root, script_name, config)
engine.signals.subscribe()
engine.start()
engine.block()
class _Serving(_local):
"""An interface for registering request and response objects.
Rather than have a separate "thread local" object for the request
and the response, this class works as a single threadlocal container
for both objects (and any others which developers wish to define).
In this way, we can easily dump those objects when we stop/start a
new HTTP conversation, yet still refer to them as module-level
globals in a thread-safe way.
"""
request = _cprequest.Request(_httputil.Host('127.0.0.1', 80),
_httputil.Host('127.0.0.1', 1111))
"""The request object for the current thread.
In the main thread, and any threads which are not receiving HTTP
requests, this is None.
"""
response = _cprequest.Response()
"""The response object for the current thread.
In the main thread, and any threads which are not receiving HTTP
requests, this is None.
"""
def load(self, request, response):
self.request = request
self.response = response
def clear(self):
"""Remove all attributes of self."""
self.__dict__.clear()
serving = _Serving()
class _ThreadLocalProxy(object):
__slots__ = ['__attrname__', '__dict__']
def __init__(self, attrname):
self.__attrname__ = attrname
def __getattr__(self, name):
child = getattr(serving, self.__attrname__)
return getattr(child, name)
def __setattr__(self, name, value):
if name in ('__attrname__', ):
object.__setattr__(self, name, value)
else:
child = getattr(serving, self.__attrname__)
setattr(child, name, value)
def __delattr__(self, name):
child = getattr(serving, self.__attrname__)
delattr(child, name)
@property
def __dict__(self):
child = getattr(serving, self.__attrname__)
d = child.__class__.__dict__.copy()
d.update(child.__dict__)
return d
def __getitem__(self, key):
child = getattr(serving, self.__attrname__)
return child[key]
def __setitem__(self, key, value):
child = getattr(serving, self.__attrname__)
child[key] = value
def __delitem__(self, key):
child = getattr(serving, self.__attrname__)
del child[key]
def __contains__(self, key):
child = getattr(serving, self.__attrname__)
return key in child
def __len__(self):
child = getattr(serving, self.__attrname__)
return len(child)
def __nonzero__(self):
child = getattr(serving, self.__attrname__)
return bool(child)
# Python 3
__bool__ = __nonzero__
# Create request and response object (the same objects will be used
# throughout the entire life of the webserver, but will redirect
# to the "serving" object)
request = _ThreadLocalProxy('request')
response = _ThreadLocalProxy('response')
# Create thread_data object as a thread-specific all-purpose storage
class _ThreadData(_local):
"""A container for thread-specific data."""
thread_data = _ThreadData()
# Monkeypatch pydoc to allow help() to go through the threadlocal proxy.
# Jan 2007: no Googleable examples of anyone else replacing pydoc.resolve.
# The only other way would be to change what is returned from type(request)
# and that's not possible in pure Python (you'd have to fake ob_type).
def _cherrypy_pydoc_resolve(thing, forceload=0):
"""Given an object or a path to an object, get the object and its name."""
if isinstance(thing, _ThreadLocalProxy):
thing = getattr(serving, thing.__attrname__)
return _pydoc._builtin_resolve(thing, forceload)
try:
import pydoc as _pydoc
_pydoc._builtin_resolve = _pydoc.resolve
_pydoc.resolve = _cherrypy_pydoc_resolve
except ImportError:
pass
class _GlobalLogManager(_cplogging.LogManager):
"""A site-wide LogManager; routes to app.log or global log as appropriate.
This :class:`LogManager<cherrypy._cplogging.LogManager>` implements
cherrypy.log() and cherrypy.log.access(). If either
function is called during a request, the message will be sent to the
logger for the current Application. If they are called outside of a
request, the message will be sent to the site-wide logger.
"""
def __call__(self, *args, **kwargs):
"""Log the given message to the app.log or global log.
Log the given message to the app.log or global log as
appropriate.
"""
# Do NOT use try/except here. See
# https://github.com/cherrypy/cherrypy/issues/945
if hasattr(request, 'app') and hasattr(request.app, 'log'):
log = request.app.log
else:
log = self
return log.error(*args, **kwargs)
def access(self):
"""Log an access message to the app.log or global log.
Log the given message to the app.log or global log as
appropriate.
"""
try:
return request.app.log.access()
except AttributeError:
return _cplogging.LogManager.access(self)
log = _GlobalLogManager()
# Set a default screen handler on the global log.
log.screen = True
log.error_file = ''
# Using an access file makes CP about 10% slower. Leave off by default.
log.access_file = ''
@engine.subscribe('log')
def _buslog(msg, level):
log.error(msg, 'ENGINE', severity=level)
# Use _global_conf_alias so quickstart can use 'config' as an arg
# without shadowing cherrypy.config.
config = _global_conf_alias = _cpconfig.Config()
config.defaults = {
'tools.log_tracebacks.on': True,
'tools.log_headers.on': True,
'tools.trailing_slash.on': True,
'tools.encode.on': True
}
config.namespaces['log'] = lambda k, v: setattr(log, k, v)
config.namespaces['checker'] = lambda k, v: setattr(checker, k, v)
# Must reset to get our defaults applied.
config.reset()
checker = _cpchecker.Checker()
engine.subscribe('start', checker)

View file

@ -0,0 +1,5 @@
"""CherryPy'd cherryd daemon runner."""
from cherrypy.daemon import run
__name__ == '__main__' and run()

326
libs/cherrypy/_cpchecker.py Normal file
View file

@ -0,0 +1,326 @@
"""Checker for CherryPy sites and mounted apps."""
import os
import warnings
import builtins
import cherrypy
class Checker(object):
"""A checker for CherryPy sites and their mounted applications.
When this object is called at engine startup, it executes each
of its own methods whose names start with ``check_``. If you wish
to disable selected checks, simply add a line in your global
config which sets the appropriate method to False::
[global]
checker.check_skipped_app_config = False
You may also dynamically add or replace ``check_*`` methods in this way.
"""
on = True
"""If True (the default), run all checks; if False, turn off all checks."""
def __init__(self):
"""Initialize Checker instance."""
self._populate_known_types()
def __call__(self):
"""Run all check_* methods."""
if self.on:
oldformatwarning = warnings.formatwarning
warnings.formatwarning = self.formatwarning
try:
for name in dir(self):
if name.startswith('check_'):
method = getattr(self, name)
if method and hasattr(method, '__call__'):
method()
finally:
warnings.formatwarning = oldformatwarning
def formatwarning(self, message, category, filename, lineno, line=None):
"""Format a warning."""
return 'CherryPy Checker:\n%s\n\n' % message
# This value should be set inside _cpconfig.
global_config_contained_paths = False
def check_app_config_entries_dont_start_with_script_name(self):
"""Check for App config with sections that repeat script_name."""
for sn, app in cherrypy.tree.apps.items():
if not isinstance(app, cherrypy.Application):
continue
if not app.config:
continue
if sn == '':
continue
sn_atoms = sn.strip('/').split('/')
for key in app.config.keys():
key_atoms = key.strip('/').split('/')
if key_atoms[:len(sn_atoms)] == sn_atoms:
warnings.warn(
'The application mounted at %r has config '
'entries that start with its script name: %r' % (sn,
key))
def check_site_config_entries_in_app_config(self):
"""Check for mounted Applications that have site-scoped config."""
for sn, app in cherrypy.tree.apps.items():
if not isinstance(app, cherrypy.Application):
continue
msg = []
for section, entries in app.config.items():
if section.startswith('/'):
for key, value in entries.items():
for n in ('engine.', 'server.', 'tree.', 'checker.'):
if key.startswith(n):
msg.append('[%s] %s = %s' %
(section, key, value))
if msg:
msg.insert(0,
'The application mounted at %r contains the '
'following config entries, which are only allowed '
'in site-wide config. Move them to a [global] '
'section and pass them to cherrypy.config.update() '
'instead of tree.mount().' % sn)
warnings.warn(os.linesep.join(msg))
def check_skipped_app_config(self):
"""Check for mounted Applications that have no config."""
for sn, app in cherrypy.tree.apps.items():
if not isinstance(app, cherrypy.Application):
continue
if not app.config:
msg = 'The Application mounted at %r has an empty config.' % sn
if self.global_config_contained_paths:
msg += (' It looks like the config you passed to '
'cherrypy.config.update() contains application-'
'specific sections. You must explicitly pass '
'application config via '
'cherrypy.tree.mount(..., config=app_config)')
warnings.warn(msg)
return
def check_app_config_brackets(self):
"""Check for App config with extraneous brackets in section names."""
for sn, app in cherrypy.tree.apps.items():
if not isinstance(app, cherrypy.Application):
continue
if not app.config:
continue
for key in app.config.keys():
if key.startswith('[') or key.endswith(']'):
warnings.warn(
'The application mounted at %r has config '
'section names with extraneous brackets: %r. '
'Config *files* need brackets; config *dicts* '
'(e.g. passed to tree.mount) do not.' % (sn, key))
def check_static_paths(self):
"""Check Application config for incorrect static paths."""
# Use the dummy Request object in the main thread.
request = cherrypy.request
for sn, app in cherrypy.tree.apps.items():
if not isinstance(app, cherrypy.Application):
continue
request.app = app
for section in app.config:
# get_resource will populate request.config
request.get_resource(section + '/dummy.html')
conf = request.config.get
if conf('tools.staticdir.on', False):
msg = ''
root = conf('tools.staticdir.root')
dir = conf('tools.staticdir.dir')
if dir is None:
msg = 'tools.staticdir.dir is not set.'
else:
fulldir = ''
if os.path.isabs(dir):
fulldir = dir
if root:
msg = ('dir is an absolute path, even '
'though a root is provided.')
testdir = os.path.join(root, dir[1:])
if os.path.exists(testdir):
msg += (
'\nIf you meant to serve the '
'filesystem folder at %r, remove the '
'leading slash from dir.' % (testdir,))
else:
if not root:
msg = (
'dir is a relative path and '
'no root provided.')
else:
fulldir = os.path.join(root, dir)
if not os.path.isabs(fulldir):
msg = ('%r is not an absolute path.' % (
fulldir,))
if fulldir and not os.path.exists(fulldir):
if msg:
msg += '\n'
msg += ('%r (root + dir) is not an existing '
'filesystem path.' % fulldir)
if msg:
warnings.warn('%s\nsection: [%s]\nroot: %r\ndir: %r'
% (msg, section, root, dir))
# -------------------------- Compatibility -------------------------- #
obsolete = {
'server.default_content_type': 'tools.response_headers.headers',
'log_access_file': 'log.access_file',
'log_config_options': None,
'log_file': 'log.error_file',
'log_file_not_found': None,
'log_request_headers': 'tools.log_headers.on',
'log_to_screen': 'log.screen',
'show_tracebacks': 'request.show_tracebacks',
'throw_errors': 'request.throw_errors',
'profiler.on': ('cherrypy.tree.mount(profiler.make_app('
'cherrypy.Application(Root())))'),
}
deprecated = {}
def _compat(self, config):
"""Process config and warn on each obsolete or deprecated entry."""
for section, conf in config.items():
if isinstance(conf, dict):
for k in conf:
if k in self.obsolete:
warnings.warn('%r is obsolete. Use %r instead.\n'
'section: [%s]' %
(k, self.obsolete[k], section))
elif k in self.deprecated:
warnings.warn('%r is deprecated. Use %r instead.\n'
'section: [%s]' %
(k, self.deprecated[k], section))
else:
if section in self.obsolete:
warnings.warn('%r is obsolete. Use %r instead.'
% (section, self.obsolete[section]))
elif section in self.deprecated:
warnings.warn('%r is deprecated. Use %r instead.'
% (section, self.deprecated[section]))
def check_compatibility(self):
"""Process config and warn on each obsolete or deprecated entry."""
self._compat(cherrypy.config)
for sn, app in cherrypy.tree.apps.items():
if not isinstance(app, cherrypy.Application):
continue
self._compat(app.config)
# ------------------------ Known Namespaces ------------------------ #
extra_config_namespaces = []
def _known_ns(self, app):
ns = ['wsgi']
ns.extend(app.toolboxes)
ns.extend(app.namespaces)
ns.extend(app.request_class.namespaces)
ns.extend(cherrypy.config.namespaces)
ns += self.extra_config_namespaces
for section, conf in app.config.items():
is_path_section = section.startswith('/')
if is_path_section and isinstance(conf, dict):
for k in conf:
atoms = k.split('.')
if len(atoms) > 1:
if atoms[0] not in ns:
# Spit out a special warning if a known
# namespace is preceded by "cherrypy."
if atoms[0] == 'cherrypy' and atoms[1] in ns:
msg = (
'The config entry %r is invalid; '
'try %r instead.\nsection: [%s]'
% (k, '.'.join(atoms[1:]), section))
else:
msg = (
'The config entry %r is invalid, '
'because the %r config namespace '
'is unknown.\n'
'section: [%s]' % (k, atoms[0], section))
warnings.warn(msg)
elif atoms[0] == 'tools':
if atoms[1] not in dir(cherrypy.tools):
msg = (
'The config entry %r may be invalid, '
'because the %r tool was not found.\n'
'section: [%s]' % (k, atoms[1], section))
warnings.warn(msg)
def check_config_namespaces(self):
"""Process config and warn on each unknown config namespace."""
for sn, app in cherrypy.tree.apps.items():
if not isinstance(app, cherrypy.Application):
continue
self._known_ns(app)
# -------------------------- Config Types -------------------------- #
known_config_types = {}
def _populate_known_types(self):
b = [x for x in vars(builtins).values()
if type(x) is type(str)]
def traverse(obj, namespace):
for name in dir(obj):
# Hack for 3.2's warning about body_params
if name == 'body_params':
continue
vtype = type(getattr(obj, name, None))
if vtype in b:
self.known_config_types[namespace + '.' + name] = vtype
traverse(cherrypy.request, 'request')
traverse(cherrypy.response, 'response')
traverse(cherrypy.server, 'server')
traverse(cherrypy.engine, 'engine')
traverse(cherrypy.log, 'log')
def _known_types(self, config):
msg = ('The config entry %r in section %r is of type %r, '
'which does not match the expected type %r.')
for section, conf in config.items():
if not isinstance(conf, dict):
conf = {section: conf}
for k, v in conf.items():
if v is not None:
expected_type = self.known_config_types.get(k, None)
vtype = type(v)
if expected_type and vtype != expected_type:
warnings.warn(msg % (k, section, vtype.__name__,
expected_type.__name__))
def check_config_types(self):
"""Assert that config values are of the same type as default values."""
self._known_types(cherrypy.config)
for sn, app in cherrypy.tree.apps.items():
if not isinstance(app, cherrypy.Application):
continue
self._known_types(app.config)
# -------------------- Specific config warnings -------------------- #
def check_localhost(self):
"""Warn if any socket_host is 'localhost'.
See #711.
"""
for k, v in cherrypy.config.items():
if k == 'server.socket_host' and v == 'localhost':
warnings.warn("The use of 'localhost' as a socket host can "
'cause problems on newer systems, since '
"'localhost' can map to either an IPv4 or an "
"IPv6 address. You should use '127.0.0.1' "
"or '[::1]' instead.")

View file

@ -0,0 +1,59 @@
"""Compatibility code for using CherryPy with various versions of Python.
To retain compatibility with older Python versions, this module provides a
useful abstraction over the differences between Python versions, sometimes by
preferring a newer idiom, sometimes an older one, and sometimes a custom one.
In particular, Python 2 uses str and '' for byte strings, while Python 3
uses str and '' for unicode strings. We will call each of these the 'native
string' type for each version. Because of this major difference, this module
provides
two functions: 'ntob', which translates native strings (of type 'str') into
byte strings regardless of Python version, and 'ntou', which translates native
strings to unicode strings.
Try not to use the compatibility functions 'ntob', 'ntou', 'tonative'.
They were created with Python 2.3-2.5 compatibility in mind.
Instead, use unicode literals (from __future__) and bytes literals
and their .encode/.decode methods as needed.
"""
import http.client
def ntob(n, encoding='ISO-8859-1'):
"""Return the given native string as a byte string in the given
encoding.
"""
assert_native(n)
# In Python 3, the native string type is unicode
return n.encode(encoding)
def ntou(n, encoding='ISO-8859-1'):
"""Return the given native string as a unicode string with the given
encoding.
"""
assert_native(n)
# In Python 3, the native string type is unicode
return n
def tonative(n, encoding='ISO-8859-1'):
"""Return the given string as a native string in the given encoding."""
# In Python 3, the native string type is unicode
if isinstance(n, bytes):
return n.decode(encoding)
return n
def assert_native(n):
if not isinstance(n, str):
raise TypeError('n must be a native str (got %s)' % type(n).__name__)
# Some platforms don't expose HTTPSConnection, so handle it separately
HTTPSConnection = getattr(http.client, 'HTTPSConnection', None)
text_or_bytes = str, bytes

295
libs/cherrypy/_cpconfig.py Normal file
View file

@ -0,0 +1,295 @@
"""Configuration system for CherryPy.
Configuration in CherryPy is implemented via dictionaries. Keys are strings
which name the mapped value, which may be of any type.
Architecture
------------
CherryPy Requests are part of an Application, which runs in a global context,
and configuration data may apply to any of those three scopes:
Global
Configuration entries which apply everywhere are stored in
cherrypy.config.
Application
Entries which apply to each mounted application are stored
on the Application object itself, as 'app.config'. This is a two-level
dict where each key is a path, or "relative URL" (for example, "/" or
"/path/to/my/page"), and each value is a config dict. Usually, this
data is provided in the call to tree.mount(root(), config=conf),
although you may also use app.merge(conf).
Request
Each Request object possesses a single 'Request.config' dict.
Early in the request process, this dict is populated by merging global
config entries, Application entries (whose path equals or is a parent
of Request.path_info), and any config acquired while looking up the
page handler (see next).
Declaration
-----------
Configuration data may be supplied as a Python dictionary, as a filename,
or as an open file object. When you supply a filename or file, CherryPy
uses Python's builtin ConfigParser; you declare Application config by
writing each path as a section header::
[/path/to/my/page]
request.stream = True
To declare global configuration entries, place them in a [global] section.
You may also declare config entries directly on the classes and methods
(page handlers) that make up your CherryPy application via the ``_cp_config``
attribute, set with the ``cherrypy.config`` decorator. For example::
@cherrypy.config(**{'tools.gzip.on': True})
class Demo:
@cherrypy.expose
@cherrypy.config(**{'request.show_tracebacks': False})
def index(self):
return "Hello world"
.. note::
This behavior is only guaranteed for the default dispatcher.
Other dispatchers may have different restrictions on where
you can attach config attributes.
Namespaces
----------
Configuration keys are separated into namespaces by the first "." in the key.
Current namespaces:
engine
Controls the 'application engine', including autoreload.
These can only be declared in the global config.
tree
Grafts cherrypy.Application objects onto cherrypy.tree.
These can only be declared in the global config.
hooks
Declares additional request-processing functions.
log
Configures the logging for each application.
These can only be declared in the global or / config.
request
Adds attributes to each Request.
response
Adds attributes to each Response.
server
Controls the default HTTP server via cherrypy.server.
These can only be declared in the global config.
tools
Runs and configures additional request-processing packages.
wsgi
Adds WSGI middleware to an Application's "pipeline".
These can only be declared in the app's root config ("/").
checker
Controls the 'checker', which looks for common errors in
app state (including config) when the engine starts.
Global config only.
The only key that does not exist in a namespace is the "environment" entry.
This special entry 'imports' other config entries from a template stored in
cherrypy._cpconfig.environments[environment]. It only applies to the global
config, and only when you use cherrypy.config.update.
You can define your own namespaces to be called at the Global, Application,
or Request level, by adding a named handler to cherrypy.config.namespaces,
app.namespaces, or app.request_class.namespaces. The name can
be any string, and the handler must be either a callable or a (Python 2.5
style) context manager.
"""
import cherrypy
from cherrypy._cpcompat import text_or_bytes
from cherrypy.lib import reprconf
def _if_filename_register_autoreload(ob):
"""Register for autoreload if ob is a string (presumed filename)."""
is_filename = isinstance(ob, text_or_bytes)
is_filename and cherrypy.engine.autoreload.files.add(ob)
def merge(base, other):
"""Merge one app config (from a dict, file, or filename) into another.
If the given config is a filename, it will be appended to the list
of files to monitor for "autoreload" changes.
"""
_if_filename_register_autoreload(other)
# Load other into base
for section, value_map in reprconf.Parser.load(other).items():
if not isinstance(value_map, dict):
raise ValueError(
'Application config must include section headers, but the '
"config you tried to merge doesn't have any sections. "
'Wrap your config in another dict with paths as section '
"headers, for example: {'/': config}.")
base.setdefault(section, {}).update(value_map)
class Config(reprconf.Config):
"""The 'global' configuration data for the entire CherryPy process."""
def update(self, config):
"""Update self from a dict, file or filename."""
_if_filename_register_autoreload(config)
super(Config, self).update(config)
def _apply(self, config):
"""Update self from a dict."""
if isinstance(config.get('global'), dict):
if len(config) > 1:
cherrypy.checker.global_config_contained_paths = True
config = config['global']
if 'tools.staticdir.dir' in config:
config['tools.staticdir.section'] = 'global'
super(Config, self)._apply(config)
@staticmethod
def __call__(**kwargs):
"""Decorate for page handlers to set _cp_config."""
def tool_decorator(f):
_Vars(f).setdefault('_cp_config', {}).update(kwargs)
return f
return tool_decorator
class _Vars(object):
"""Adapter allowing setting a default attribute on a function or class."""
def __init__(self, target):
self.target = target
def setdefault(self, key, default):
if not hasattr(self.target, key):
setattr(self.target, key, default)
return getattr(self.target, key)
# Sphinx begin config.environments
Config.environments = environments = {
'staging': {
'engine.autoreload.on': False,
'checker.on': False,
'tools.log_headers.on': False,
'request.show_tracebacks': False,
'request.show_mismatched_params': False,
},
'production': {
'engine.autoreload.on': False,
'checker.on': False,
'tools.log_headers.on': False,
'request.show_tracebacks': False,
'request.show_mismatched_params': False,
'log.screen': False,
},
'embedded': {
# For use with CherryPy embedded in another deployment stack.
'engine.autoreload.on': False,
'checker.on': False,
'tools.log_headers.on': False,
'request.show_tracebacks': False,
'request.show_mismatched_params': False,
'log.screen': False,
'engine.SIGHUP': None,
'engine.SIGTERM': None,
},
'test_suite': {
'engine.autoreload.on': False,
'checker.on': False,
'tools.log_headers.on': False,
'request.show_tracebacks': True,
'request.show_mismatched_params': True,
'log.screen': False,
},
}
# Sphinx end config.environments
def _server_namespace_handler(k, v):
"""Config handler for the "server" namespace."""
atoms = k.split('.', 1)
if len(atoms) > 1:
# Special-case config keys of the form 'server.servername.socket_port'
# to configure additional HTTP servers.
if not hasattr(cherrypy, 'servers'):
cherrypy.servers = {}
servername, k = atoms
if servername not in cherrypy.servers:
from cherrypy import _cpserver
cherrypy.servers[servername] = _cpserver.Server()
# On by default, but 'on = False' can unsubscribe it (see below).
cherrypy.servers[servername].subscribe()
if k == 'on':
if v:
cherrypy.servers[servername].subscribe()
else:
cherrypy.servers[servername].unsubscribe()
else:
setattr(cherrypy.servers[servername], k, v)
else:
setattr(cherrypy.server, k, v)
Config.namespaces['server'] = _server_namespace_handler
def _engine_namespace_handler(k, v):
"""Config handler for the "engine" namespace."""
engine = cherrypy.engine
if k in {'SIGHUP', 'SIGTERM'}:
engine.subscribe(k, v)
return
if '.' in k:
plugin, attrname = k.split('.', 1)
plugin = getattr(engine, plugin)
op = 'subscribe' if v else 'unsubscribe'
sub_unsub = getattr(plugin, op, None)
if attrname == 'on' and callable(sub_unsub):
sub_unsub()
return
setattr(plugin, attrname, v)
else:
setattr(engine, k, v)
Config.namespaces['engine'] = _engine_namespace_handler
def _tree_namespace_handler(k, v):
"""Namespace handler for the 'tree' config namespace."""
if isinstance(v, dict):
for script_name, app in v.items():
cherrypy.tree.graft(app, script_name)
msg = 'Mounted: %s on %s' % (app, script_name or '/')
cherrypy.engine.log(msg)
else:
cherrypy.tree.graft(v, v.script_name)
cherrypy.engine.log('Mounted: %s on %s' % (v, v.script_name or '/'))
Config.namespaces['tree'] = _tree_namespace_handler

View file

@ -0,0 +1,676 @@
"""CherryPy dispatchers.
A 'dispatcher' is the object which looks up the 'page handler' callable
and collects config for the current request based on the path_info,
other request attributes, and the application architecture. The core
calls the dispatcher as early as possible, passing it a 'path_info'
argument.
The default dispatcher discovers the page handler by matching path_info
to a hierarchical arrangement of objects, starting at request.app.root.
"""
import string
import sys
import types
try:
classtype = (type, types.ClassType)
except AttributeError:
classtype = type
import cherrypy
class PageHandler(object):
"""Callable which sets response.body."""
def __init__(self, callable, *args, **kwargs):
self.callable = callable
self.args = args
self.kwargs = kwargs
@property
def args(self):
"""The ordered args should be accessible from post dispatch hooks."""
return cherrypy.serving.request.args
@args.setter
def args(self, args):
cherrypy.serving.request.args = args
return cherrypy.serving.request.args
@property
def kwargs(self):
"""The named kwargs should be accessible from post dispatch hooks."""
return cherrypy.serving.request.kwargs
@kwargs.setter
def kwargs(self, kwargs):
cherrypy.serving.request.kwargs = kwargs
return cherrypy.serving.request.kwargs
def __call__(self):
try:
return self.callable(*self.args, **self.kwargs)
except TypeError:
x = sys.exc_info()[1]
try:
test_callable_spec(self.callable, self.args, self.kwargs)
except cherrypy.HTTPError:
raise sys.exc_info()[1]
except Exception:
raise x
raise
def test_callable_spec(callable, callable_args, callable_kwargs):
"""Inspect callable and test to see if the given args are suitable for it.
When an error occurs during the handler's invoking stage there are 2
erroneous cases:
1. Too many parameters passed to a function which doesn't define
one of *args or **kwargs.
2. Too little parameters are passed to the function.
There are 3 sources of parameters to a cherrypy handler.
1. query string parameters are passed as keyword parameters to the
handler.
2. body parameters are also passed as keyword parameters.
3. when partial matching occurs, the final path atoms are passed as
positional args.
Both the query string and path atoms are part of the URI. If they are
incorrect, then a 404 Not Found should be raised. Conversely the body
parameters are part of the request; if they are invalid a 400 Bad Request.
"""
show_mismatched_params = getattr(
cherrypy.serving.request, 'show_mismatched_params', False)
try:
(args, varargs, varkw, defaults) = getargspec(callable)
except TypeError:
if isinstance(callable, object) and hasattr(callable, '__call__'):
(args, varargs, varkw,
defaults) = getargspec(callable.__call__)
else:
# If it wasn't one of our own types, re-raise
# the original error
raise
if args and (
# For callable objects, which have a __call__(self) method
hasattr(callable, '__call__') or
# For normal methods
inspect.ismethod(callable)
):
# Strip 'self'
args = args[1:]
arg_usage = dict([(arg, 0,) for arg in args])
vararg_usage = 0
varkw_usage = 0
extra_kwargs = set()
for i, value in enumerate(callable_args):
try:
arg_usage[args[i]] += 1
except IndexError:
vararg_usage += 1
for key in callable_kwargs.keys():
try:
arg_usage[key] += 1
except KeyError:
varkw_usage += 1
extra_kwargs.add(key)
# figure out which args have defaults.
args_with_defaults = args[-len(defaults or []):]
for i, val in enumerate(defaults or []):
# Defaults take effect only when the arg hasn't been used yet.
if arg_usage[args_with_defaults[i]] == 0:
arg_usage[args_with_defaults[i]] += 1
missing_args = []
multiple_args = []
for key, usage in arg_usage.items():
if usage == 0:
missing_args.append(key)
elif usage > 1:
multiple_args.append(key)
if missing_args:
# In the case where the method allows body arguments
# there are 3 potential errors:
# 1. not enough query string parameters -> 404
# 2. not enough body parameters -> 400
# 3. not enough path parts (partial matches) -> 404
#
# We can't actually tell which case it is,
# so I'm raising a 404 because that covers 2/3 of the
# possibilities
#
# In the case where the method does not allow body
# arguments it's definitely a 404.
message = None
if show_mismatched_params:
message = 'Missing parameters: %s' % ','.join(missing_args)
raise cherrypy.HTTPError(404, message=message)
# the extra positional arguments come from the path - 404 Not Found
if not varargs and vararg_usage > 0:
raise cherrypy.HTTPError(404)
body_params = cherrypy.serving.request.body.params or {}
body_params = set(body_params.keys())
qs_params = set(callable_kwargs.keys()) - body_params
if multiple_args:
if qs_params.intersection(set(multiple_args)):
# If any of the multiple parameters came from the query string then
# it's a 404 Not Found
error = 404
else:
# Otherwise it's a 400 Bad Request
error = 400
message = None
if show_mismatched_params:
message = 'Multiple values for parameters: '\
'%s' % ','.join(multiple_args)
raise cherrypy.HTTPError(error, message=message)
if not varkw and varkw_usage > 0:
# If there were extra query string parameters, it's a 404 Not Found
extra_qs_params = set(qs_params).intersection(extra_kwargs)
if extra_qs_params:
message = None
if show_mismatched_params:
message = 'Unexpected query string '\
'parameters: %s' % ', '.join(extra_qs_params)
raise cherrypy.HTTPError(404, message=message)
# If there were any extra body parameters, it's a 400 Not Found
extra_body_params = set(body_params).intersection(extra_kwargs)
if extra_body_params:
message = None
if show_mismatched_params:
message = 'Unexpected body parameters: '\
'%s' % ', '.join(extra_body_params)
raise cherrypy.HTTPError(400, message=message)
try:
import inspect
except ImportError:
def test_callable_spec(callable, args, kwargs): # noqa: F811
return None
else:
def getargspec(callable):
return inspect.getfullargspec(callable)[:4]
class LateParamPageHandler(PageHandler):
"""When passing cherrypy.request.params to the page handler, we do not
want to capture that dict too early; we want to give tools like the
decoding tool a chance to modify the params dict in-between the lookup
of the handler and the actual calling of the handler. This subclass
takes that into account, and allows request.params to be 'bound late'
(it's more complicated than that, but that's the effect).
"""
@property
def kwargs(self):
"""Page handler kwargs (with cherrypy.request.params copied in)."""
kwargs = cherrypy.serving.request.params.copy()
if self._kwargs:
kwargs.update(self._kwargs)
return kwargs
@kwargs.setter
def kwargs(self, kwargs):
cherrypy.serving.request.kwargs = kwargs
self._kwargs = kwargs
if sys.version_info < (3, 0):
punctuation_to_underscores = string.maketrans(
string.punctuation, '_' * len(string.punctuation))
def validate_translator(t):
if not isinstance(t, str) or len(t) != 256:
raise ValueError(
'The translate argument must be a str of len 256.')
else:
punctuation_to_underscores = str.maketrans(
string.punctuation, '_' * len(string.punctuation))
def validate_translator(t):
if not isinstance(t, dict):
raise ValueError('The translate argument must be a dict.')
class Dispatcher(object):
"""CherryPy Dispatcher which walks a tree of objects to find a handler.
The tree is rooted at cherrypy.request.app.root, and each
hierarchical component in the path_info argument is matched to a
corresponding nested attribute of the root object. Matching handlers
must have an 'exposed' attribute which evaluates to True. The
special method name "index" matches a URI which ends in a slash
("/"). The special method name "default" may match a portion of the
path_info (but only when no longer substring of the path_info
matches some other object).
This is the default, built-in dispatcher for CherryPy.
"""
dispatch_method_name = '_cp_dispatch'
"""
The name of the dispatch method that nodes may optionally implement
to provide their own dynamic dispatch algorithm.
"""
def __init__(self, dispatch_method_name=None,
translate=punctuation_to_underscores):
validate_translator(translate)
self.translate = translate
if dispatch_method_name:
self.dispatch_method_name = dispatch_method_name
def __call__(self, path_info):
"""Set handler and config for the current request."""
request = cherrypy.serving.request
func, vpath = self.find_handler(path_info)
if func:
# Decode any leftover %2F in the virtual_path atoms.
vpath = [x.replace('%2F', '/') for x in vpath]
request.handler = LateParamPageHandler(func, *vpath)
else:
request.handler = cherrypy.NotFound()
def find_handler(self, path):
"""Return the appropriate page handler, plus any virtual path.
This will return two objects. The first will be a callable,
which can be used to generate page output. Any parameters from
the query string or request body will be sent to that callable
as keyword arguments.
The callable is found by traversing the application's tree,
starting from cherrypy.request.app.root, and matching path
components to successive objects in the tree. For example, the
URL "/path/to/handler" might return root.path.to.handler.
The second object returned will be a list of names which are
'virtual path' components: parts of the URL which are dynamic,
and were not used when looking up the handler. These virtual
path components are passed to the handler as positional
arguments.
"""
request = cherrypy.serving.request
app = request.app
root = app.root
dispatch_name = self.dispatch_method_name
# Get config for the root object/path.
fullpath = [x for x in path.strip('/').split('/') if x] + ['index']
fullpath_len = len(fullpath)
segleft = fullpath_len
nodeconf = {}
if hasattr(root, '_cp_config'):
nodeconf.update(root._cp_config)
if '/' in app.config:
nodeconf.update(app.config['/'])
object_trail = [['root', root, nodeconf, segleft]]
node = root
iternames = fullpath[:]
while iternames:
name = iternames[0]
# map to legal Python identifiers (e.g. replace '.' with '_')
objname = name.translate(self.translate)
nodeconf = {}
subnode = getattr(node, objname, None)
pre_len = len(iternames)
if subnode is None:
dispatch = getattr(node, dispatch_name, None)
if dispatch and hasattr(dispatch, '__call__') and not \
getattr(dispatch, 'exposed', False) and \
pre_len > 1:
# Don't expose the hidden 'index' token to _cp_dispatch
# We skip this if pre_len == 1 since it makes no sense
# to call a dispatcher when we have no tokens left.
index_name = iternames.pop()
subnode = dispatch(vpath=iternames)
iternames.append(index_name)
else:
# We didn't find a path, but keep processing in case there
# is a default() handler.
iternames.pop(0)
else:
# We found the path, remove the vpath entry
iternames.pop(0)
segleft = len(iternames)
if segleft > pre_len:
# No path segment was removed. Raise an error.
raise cherrypy.CherryPyException(
'A vpath segment was added. Custom dispatchers may only '
'remove elements. While trying to process '
'{0} in {1}'.format(name, fullpath)
)
elif segleft == pre_len:
# Assume that the handler used the current path segment, but
# did not pop it. This allows things like
# return getattr(self, vpath[0], None)
iternames.pop(0)
segleft -= 1
node = subnode
if node is not None:
# Get _cp_config attached to this node.
if hasattr(node, '_cp_config'):
nodeconf.update(node._cp_config)
# Mix in values from app.config for this path.
existing_len = fullpath_len - pre_len
if existing_len != 0:
curpath = '/' + '/'.join(fullpath[0:existing_len])
else:
curpath = ''
new_segs = fullpath[fullpath_len - pre_len:fullpath_len - segleft]
for seg in new_segs:
curpath += '/' + seg
if curpath in app.config:
nodeconf.update(app.config[curpath])
object_trail.append([name, node, nodeconf, segleft])
def set_conf():
"""Collapse all object_trail config into cherrypy.request.config.
"""
base = cherrypy.config.copy()
# Note that we merge the config from each node
# even if that node was None.
for name, obj, conf, segleft in object_trail:
base.update(conf)
if 'tools.staticdir.dir' in conf:
base['tools.staticdir.section'] = '/' + \
'/'.join(fullpath[0:fullpath_len - segleft])
return base
# Try successive objects (reverse order)
num_candidates = len(object_trail) - 1
for i in range(num_candidates, -1, -1):
name, candidate, nodeconf, segleft = object_trail[i]
if candidate is None:
continue
# Try a "default" method on the current leaf.
if hasattr(candidate, 'default'):
defhandler = candidate.default
if getattr(defhandler, 'exposed', False):
# Insert any extra _cp_config from the default handler.
conf = getattr(defhandler, '_cp_config', {})
object_trail.insert(
i + 1, ['default', defhandler, conf, segleft])
request.config = set_conf()
# See https://github.com/cherrypy/cherrypy/issues/613
request.is_index = path.endswith('/')
return defhandler, fullpath[fullpath_len - segleft:-1]
# Uncomment the next line to restrict positional params to
# "default".
# if i < num_candidates - 2: continue
# Try the current leaf.
if getattr(candidate, 'exposed', False):
request.config = set_conf()
if i == num_candidates:
# We found the extra ".index". Mark request so tools
# can redirect if path_info has no trailing slash.
request.is_index = True
else:
# We're not at an 'index' handler. Mark request so tools
# can redirect if path_info has NO trailing slash.
# Note that this also includes handlers which take
# positional parameters (virtual paths).
request.is_index = False
return candidate, fullpath[fullpath_len - segleft:-1]
# We didn't find anything
request.config = set_conf()
return None, []
class MethodDispatcher(Dispatcher):
"""Additional dispatch based on cherrypy.request.method.upper().
Methods named GET, POST, etc will be called on an exposed class. The
method names must be all caps; the appropriate Allow header will be
output showing all capitalized method names as allowable HTTP verbs.
Note that the containing class must be exposed, not the methods.
"""
def __call__(self, path_info):
"""Set handler and config for the current request."""
request = cherrypy.serving.request
resource, vpath = self.find_handler(path_info)
if resource:
# Set Allow header
avail = [m for m in dir(resource) if m.isupper()]
if 'GET' in avail and 'HEAD' not in avail:
avail.append('HEAD')
avail.sort()
cherrypy.serving.response.headers['Allow'] = ', '.join(avail)
# Find the subhandler
meth = request.method.upper()
func = getattr(resource, meth, None)
if func is None and meth == 'HEAD':
func = getattr(resource, 'GET', None)
if func:
# Grab any _cp_config on the subhandler.
if hasattr(func, '_cp_config'):
request.config.update(func._cp_config)
# Decode any leftover %2F in the virtual_path atoms.
vpath = [x.replace('%2F', '/') for x in vpath]
request.handler = LateParamPageHandler(func, *vpath)
else:
request.handler = cherrypy.HTTPError(405)
else:
request.handler = cherrypy.NotFound()
class RoutesDispatcher(object):
"""A Routes based dispatcher for CherryPy."""
def __init__(self, full_result=False, **mapper_options):
"""Routes dispatcher.
Set full_result to True if you wish the controller and the
action to be passed on to the page handler parameters. By
default they won't be.
"""
import routes
self.full_result = full_result
self.controllers = {}
self.mapper = routes.Mapper(**mapper_options)
self.mapper.controller_scan = self.controllers.keys
def connect(self, name, route, controller, **kwargs):
self.controllers[name] = controller
self.mapper.connect(name, route, controller=name, **kwargs)
def redirect(self, url):
raise cherrypy.HTTPRedirect(url)
def __call__(self, path_info):
"""Set handler and config for the current request."""
func = self.find_handler(path_info)
if func:
cherrypy.serving.request.handler = LateParamPageHandler(func)
else:
cherrypy.serving.request.handler = cherrypy.NotFound()
def find_handler(self, path_info):
"""Find the right page handler, and set request.config."""
import routes
request = cherrypy.serving.request
config = routes.request_config()
config.mapper = self.mapper
if hasattr(request, 'wsgi_environ'):
config.environ = request.wsgi_environ
config.host = request.headers.get('Host', None)
config.protocol = request.scheme
config.redirect = self.redirect
result = self.mapper.match(path_info)
config.mapper_dict = result
params = {}
if result:
params = result.copy()
if not self.full_result:
params.pop('controller', None)
params.pop('action', None)
request.params.update(params)
# Get config for the root object/path.
request.config = base = cherrypy.config.copy()
curpath = ''
def merge(nodeconf):
if 'tools.staticdir.dir' in nodeconf:
nodeconf['tools.staticdir.section'] = curpath or '/'
base.update(nodeconf)
app = request.app
root = app.root
if hasattr(root, '_cp_config'):
merge(root._cp_config)
if '/' in app.config:
merge(app.config['/'])
# Mix in values from app.config.
atoms = [x for x in path_info.split('/') if x]
if atoms:
last = atoms.pop()
else:
last = None
for atom in atoms:
curpath = '/'.join((curpath, atom))
if curpath in app.config:
merge(app.config[curpath])
handler = None
if result:
controller = result.get('controller')
controller = self.controllers.get(controller, controller)
if controller:
if isinstance(controller, classtype):
controller = controller()
# Get config from the controller.
if hasattr(controller, '_cp_config'):
merge(controller._cp_config)
action = result.get('action')
if action is not None:
handler = getattr(controller, action, None)
# Get config from the handler
if hasattr(handler, '_cp_config'):
merge(handler._cp_config)
else:
handler = controller
# Do the last path atom here so it can
# override the controller's _cp_config.
if last:
curpath = '/'.join((curpath, last))
if curpath in app.config:
merge(app.config[curpath])
return handler
def XMLRPCDispatcher(next_dispatcher=Dispatcher()):
from cherrypy.lib import xmlrpcutil
def xmlrpc_dispatch(path_info):
path_info = xmlrpcutil.patched_path(path_info)
return next_dispatcher(path_info)
return xmlrpc_dispatch
def VirtualHost(next_dispatcher=Dispatcher(), use_x_forwarded_host=True,
**domains):
"""Select a different handler based on the Host header.
This can be useful when running multiple sites within one CP server.
It allows several domains to point to different parts of a single
website structure. For example::
http://www.domain.example -> root
http://www.domain2.example -> root/domain2/
http://www.domain2.example:443 -> root/secure
can be accomplished via the following config::
[/]
request.dispatch = cherrypy.dispatch.VirtualHost(
**{'www.domain2.example': '/domain2',
'www.domain2.example:443': '/secure',
})
next_dispatcher
The next dispatcher object in the dispatch chain.
The VirtualHost dispatcher adds a prefix to the URL and calls
another dispatcher. Defaults to cherrypy.dispatch.Dispatcher().
use_x_forwarded_host
If True (the default), any "X-Forwarded-Host"
request header will be used instead of the "Host" header. This
is commonly added by HTTP servers (such as Apache) when proxying.
``**domains``
A dict of {host header value: virtual prefix} pairs.
The incoming "Host" request header is looked up in this dict,
and, if a match is found, the corresponding "virtual prefix"
value will be prepended to the URL path before calling the
next dispatcher. Note that you often need separate entries
for "example.com" and "www.example.com". In addition, "Host"
headers may contain the port number.
"""
from cherrypy.lib import httputil
def vhost_dispatch(path_info):
request = cherrypy.serving.request
header = request.headers.get
domain = header('Host', '')
if use_x_forwarded_host:
domain = header('X-Forwarded-Host', domain)
prefix = domains.get(domain, '')
if prefix:
path_info = httputil.urljoin(prefix, path_info)
result = next_dispatcher(path_info)
# Touch up staticdir config. See
# https://github.com/cherrypy/cherrypy/issues/614.
section = request.config.get('tools.staticdir.section')
if section:
section = section[len(prefix):]
request.config['tools.staticdir.section'] = section
return result
return vhost_dispatch

618
libs/cherrypy/_cperror.py Normal file
View file

@ -0,0 +1,618 @@
"""Exception classes for CherryPy.
CherryPy provides (and uses) exceptions for declaring that the HTTP response
should be a status other than the default "200 OK". You can ``raise`` them like
normal Python exceptions. You can also call them and they will raise
themselves; this means you can set an
:class:`HTTPError<cherrypy._cperror.HTTPError>`
or :class:`HTTPRedirect<cherrypy._cperror.HTTPRedirect>` as the
:attr:`request.handler<cherrypy._cprequest.Request.handler>`.
.. _redirectingpost:
Redirecting POST
================
When you GET a resource and are redirected by the server to another Location,
there's generally no problem since GET is both a "safe method" (there should
be no side-effects) and an "idempotent method" (multiple calls are no different
than a single call).
POST, however, is neither safe nor idempotent--if you
charge a credit card, you don't want to be charged twice by a redirect!
For this reason, *none* of the 3xx responses permit a user-agent (browser) to
resubmit a POST on redirection without first confirming the action with the
user:
===== ================================= ===========
300 Multiple Choices Confirm with the user
301 Moved Permanently Confirm with the user
302 Found (Object moved temporarily) Confirm with the user
303 See Other GET the new URI; no confirmation
304 Not modified for conditional GET only;
POST should not raise this error
305 Use Proxy Confirm with the user
307 Temporary Redirect Confirm with the user
308 Permanent Redirect No confirmation
===== ================================= ===========
However, browsers have historically implemented these restrictions poorly;
in particular, many browsers do not force the user to confirm 301, 302
or 307 when redirecting POST. For this reason, CherryPy defaults to 303,
which most user-agents appear to have implemented correctly. Therefore, if
you raise HTTPRedirect for a POST request, the user-agent will most likely
attempt to GET the new URI (without asking for confirmation from the user).
We realize this is confusing for developers, but it's the safest thing we
could do. You are of course free to raise ``HTTPRedirect(uri, status=302)``
or any other 3xx status if you know what you're doing, but given the
environment, we couldn't let any of those be the default.
Custom Error Handling
=====================
.. image:: /refman/cperrors.gif
Anticipated HTTP responses
--------------------------
The 'error_page' config namespace can be used to provide custom HTML output for
expected responses (like 404 Not Found). Supply a filename from which the
output will be read. The contents will be interpolated with the values
%(status)s, %(message)s, %(traceback)s, and %(version)s using plain old Python
`string formatting
<http://docs.python.org/2/library/stdtypes.html#string-formatting-operations>`_.
::
_cp_config = {
'error_page.404': os.path.join(localDir, "static/index.html")
}
Beginning in version 3.1, you may also provide a function or other callable as
an error_page entry. It will be passed the same status, message, traceback and
version arguments that are interpolated into templates::
def error_page_402(status, message, traceback, version):
return "Error %s - Well, I'm very sorry but you haven't paid!" % status
cherrypy.config.update({'error_page.402': error_page_402})
Also in 3.1, in addition to the numbered error codes, you may also supply
"error_page.default" to handle all codes which do not have their own error_page
entry.
Unanticipated errors
--------------------
CherryPy also has a generic error handling mechanism: whenever an unanticipated
error occurs in your code, it will call
:func:`Request.error_response<cherrypy._cprequest.Request.error_response>` to
set the response status, headers, and body. By default, this is the same
output as
:class:`HTTPError(500) <cherrypy._cperror.HTTPError>`. If you want to provide
some other behavior, you generally replace "request.error_response".
Here is some sample code that shows how to display a custom error message and
send an e-mail containing the error::
from cherrypy import _cperror
def handle_error():
cherrypy.response.status = 500
cherrypy.response.body = [
"<html><body>Sorry, an error occurred</body></html>"
]
sendMail('error@domain.com',
'Error in your web app',
_cperror.format_exc())
@cherrypy.config(**{'request.error_response': handle_error})
class Root:
pass
Note that you have to explicitly set
:attr:`response.body <cherrypy._cprequest.Response.body>`
and not simply return an error message as a result.
"""
import io
import contextlib
import urllib.parse
from sys import exc_info as _exc_info
from traceback import format_exception as _format_exception
from xml.sax import saxutils
import html
from more_itertools import always_iterable
import cherrypy
from cherrypy._cpcompat import ntob
from cherrypy._cpcompat import tonative
from cherrypy._helper import classproperty
from cherrypy.lib import httputil as _httputil
class CherryPyException(Exception):
"""A base class for CherryPy exceptions."""
pass
class InternalRedirect(CherryPyException):
"""Exception raised to switch to the handler for a different URL.
This exception will redirect processing to another path within the
site (without informing the client). Provide the new path as an
argument when raising the exception. Provide any params in the
querystring for the new URL.
"""
def __init__(self, path, query_string=''):
self.request = cherrypy.serving.request
self.query_string = query_string
if '?' in path:
# Separate any params included in the path
path, self.query_string = path.split('?', 1)
# Note that urljoin will "do the right thing" whether url is:
# 1. a URL relative to root (e.g. "/dummy")
# 2. a URL relative to the current path
# Note that any query string will be discarded.
path = urllib.parse.urljoin(self.request.path_info, path)
# Set a 'path' member attribute so that code which traps this
# error can have access to it.
self.path = path
CherryPyException.__init__(self, path, self.query_string)
class HTTPRedirect(CherryPyException):
"""Exception raised when the request should be redirected.
This exception will force a HTTP redirect to the URL or URL's you give it.
The new URL must be passed as the first argument to the Exception,
e.g., HTTPRedirect(newUrl). Multiple URLs are allowed in a list.
If a URL is absolute, it will be used as-is. If it is relative, it is
assumed to be relative to the current cherrypy.request.path_info.
If one of the provided URL is a unicode object, it will be encoded
using the default encoding or the one passed in parameter.
There are multiple types of redirect, from which you can select via the
``status`` argument. If you do not provide a ``status`` arg, it defaults to
303 (or 302 if responding with HTTP/1.0).
Examples::
raise cherrypy.HTTPRedirect("")
raise cherrypy.HTTPRedirect("/abs/path", 307)
raise cherrypy.HTTPRedirect(["path1", "path2?a=1&b=2"], 301)
See :ref:`redirectingpost` for additional caveats.
"""
urls = None
"""The list of URL's to emit."""
encoding = 'utf-8'
"""The encoding when passed urls are not native strings."""
def __init__(self, urls, status=None, encoding=None):
self.urls = abs_urls = [
# Note that urljoin will "do the right thing" whether url is:
# 1. a complete URL with host (e.g. "http://www.example.com/test")
# 2. a URL relative to root (e.g. "/dummy")
# 3. a URL relative to the current path
# Note that any query string in cherrypy.request is discarded.
urllib.parse.urljoin(
cherrypy.url(),
tonative(url, encoding or self.encoding),
)
for url in always_iterable(urls)
]
status = (
int(status)
if status is not None
else self.default_status
)
if not 300 <= status <= 399:
raise ValueError('status must be between 300 and 399.')
CherryPyException.__init__(self, abs_urls, status)
@classproperty
def default_status(cls):
"""The default redirect status for the request.
RFC 2616 indicates a 301 response code fits our goal; however,
browser support for 301 is quite messy. Use 302/303 instead. See
http://www.alanflavell.org.uk/www/post-redirect.html
"""
return 303 if cherrypy.serving.request.protocol >= (1, 1) else 302
@property
def status(self):
"""The integer HTTP status code to emit."""
_, status = self.args[:2]
return status
def set_response(self):
"""Modify cherrypy.response status, headers, and body to represent
self.
CherryPy uses this internally, but you can also use it to create
an HTTPRedirect object and set its output without *raising* the
exception.
"""
response = cherrypy.serving.response
response.status = status = self.status
if status in (300, 301, 302, 303, 307, 308):
response.headers['Content-Type'] = 'text/html;charset=utf-8'
# "The ... URI SHOULD be given by the Location field
# in the response."
response.headers['Location'] = self.urls[0]
# "Unless the request method was HEAD, the entity of the response
# SHOULD contain a short hypertext note with a hyperlink to the
# new URI(s)."
msg = {
300: 'This resource can be found at ',
301: 'This resource has permanently moved to ',
302: 'This resource resides temporarily at ',
303: 'This resource can be found at ',
307: 'This resource has moved temporarily to ',
308: 'This resource has been moved to ',
}[status]
msg += '<a href=%s>%s</a>.'
msgs = [
msg % (saxutils.quoteattr(u), html.escape(u, quote=False))
for u in self.urls
]
response.body = ntob('<br />\n'.join(msgs), 'utf-8')
# Previous code may have set C-L, so we have to reset it
# (allow finalize to set it).
response.headers.pop('Content-Length', None)
elif status == 304:
# Not Modified.
# "The response MUST include the following header fields:
# Date, unless its omission is required by section 14.18.1"
# The "Date" header should have been set in Response.__init__
# "...the response SHOULD NOT include other entity-headers."
for key in ('Allow', 'Content-Encoding', 'Content-Language',
'Content-Length', 'Content-Location', 'Content-MD5',
'Content-Range', 'Content-Type', 'Expires',
'Last-Modified'):
if key in response.headers:
del response.headers[key]
# "The 304 response MUST NOT contain a message-body."
response.body = None
# Previous code may have set C-L, so we have to reset it.
response.headers.pop('Content-Length', None)
elif status == 305:
# Use Proxy.
# self.urls[0] should be the URI of the proxy.
response.headers['Location'] = ntob(self.urls[0], 'utf-8')
response.body = None
# Previous code may have set C-L, so we have to reset it.
response.headers.pop('Content-Length', None)
else:
raise ValueError('The %s status code is unknown.' % status)
def __call__(self):
"""Use this exception as a request.handler (raise self)."""
raise self
def clean_headers(status):
"""Remove any headers which should not apply to an error response."""
response = cherrypy.serving.response
# Remove headers which applied to the original content,
# but do not apply to the error page.
respheaders = response.headers
for key in ['Accept-Ranges', 'Age', 'ETag', 'Location', 'Retry-After',
'Vary', 'Content-Encoding', 'Content-Length', 'Expires',
'Content-Location', 'Content-MD5', 'Last-Modified']:
if key in respheaders:
del respheaders[key]
if status != 416:
# A server sending a response with status code 416 (Requested
# range not satisfiable) SHOULD include a Content-Range field
# with a byte-range-resp-spec of "*". The instance-length
# specifies the current length of the selected resource.
# A response with status code 206 (Partial Content) MUST NOT
# include a Content-Range field with a byte-range- resp-spec of "*".
if 'Content-Range' in respheaders:
del respheaders['Content-Range']
class HTTPError(CherryPyException):
"""Exception used to return an HTTP error code (4xx-5xx) to the client.
This exception can be used to automatically send a response using a
http status code, with an appropriate error page. It takes an optional
``status`` argument (which must be between 400 and 599); it defaults to 500
("Internal Server Error"). It also takes an optional ``message`` argument,
which will be returned in the response body. See
`RFC2616 <http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.4>`_
for a complete list of available error codes and when to use them.
Examples::
raise cherrypy.HTTPError(403)
raise cherrypy.HTTPError(
"403 Forbidden", "You are not allowed to access this resource.")
"""
status = None
"""The HTTP status code.
May be of type int or str (with a Reason-Phrase).
"""
code = None
"""The integer HTTP status code."""
reason = None
"""The HTTP Reason-Phrase string."""
def __init__(self, status=500, message=None):
self.status = status
try:
self.code, self.reason, defaultmsg = _httputil.valid_status(status)
except ValueError:
raise self.__class__(500, _exc_info()[1].args[0])
if self.code < 400 or self.code > 599:
raise ValueError('status must be between 400 and 599.')
# See http://www.python.org/dev/peps/pep-0352/
# self.message = message
self._message = message or defaultmsg
CherryPyException.__init__(self, status, message)
def set_response(self):
"""Modify cherrypy.response status, headers, and body to represent
self.
CherryPy uses this internally, but you can also use it to create
an HTTPError object and set its output without *raising* the
exception.
"""
response = cherrypy.serving.response
clean_headers(self.code)
# In all cases, finalize will be called after this method,
# so don't bother cleaning up response values here.
response.status = self.status
tb = None
if cherrypy.serving.request.show_tracebacks:
tb = format_exc()
response.headers.pop('Content-Length', None)
content = self.get_error_page(self.status, traceback=tb,
message=self._message)
response.body = content
_be_ie_unfriendly(self.code)
def get_error_page(self, *args, **kwargs):
return get_error_page(*args, **kwargs)
def __call__(self):
"""Use this exception as a request.handler (raise self)."""
raise self
@classmethod
@contextlib.contextmanager
def handle(cls, exception, status=500, message=''):
"""Translate exception into an HTTPError."""
try:
yield
except exception as exc:
raise cls(status, message or str(exc))
class NotFound(HTTPError):
"""Exception raised when a URL could not be mapped to any handler (404).
This is equivalent to raising :class:`HTTPError("404 Not Found")
<cherrypy._cperror.HTTPError>`.
"""
def __init__(self, path=None):
if path is None:
request = cherrypy.serving.request
path = request.script_name + request.path_info
self.args = (path,)
HTTPError.__init__(self, 404, "The path '%s' was not found." % path)
_HTTPErrorTemplate = '''<!DOCTYPE html PUBLIC
"-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8"></meta>
<title>%(status)s</title>
<style type="text/css">
#powered_by {
margin-top: 20px;
border-top: 2px solid black;
font-style: italic;
}
#traceback {
color: red;
}
</style>
</head>
<body>
<h2>%(status)s</h2>
<p>%(message)s</p>
<pre id="traceback">%(traceback)s</pre>
<div id="powered_by">
<span>
Powered by <a href="http://www.cherrypy.dev">CherryPy %(version)s</a>
</span>
</div>
</body>
</html>
'''
def get_error_page(status, **kwargs):
"""Return an HTML page, containing a pretty error response.
status should be an int or a str. kwargs will be interpolated into
the page template.
"""
try:
code, reason, message = _httputil.valid_status(status)
except ValueError:
raise cherrypy.HTTPError(500, _exc_info()[1].args[0])
# We can't use setdefault here, because some
# callers send None for kwarg values.
if kwargs.get('status') is None:
kwargs['status'] = '%s %s' % (code, reason)
if kwargs.get('message') is None:
kwargs['message'] = message
if kwargs.get('traceback') is None:
kwargs['traceback'] = ''
if kwargs.get('version') is None:
kwargs['version'] = cherrypy.__version__
for k, v in kwargs.items():
if v is None:
kwargs[k] = ''
else:
kwargs[k] = html.escape(kwargs[k], quote=False)
# Use a custom template or callable for the error page?
pages = cherrypy.serving.request.error_page
error_page = pages.get(code) or pages.get('default')
# Default template, can be overridden below.
template = _HTTPErrorTemplate
if error_page:
try:
if hasattr(error_page, '__call__'):
# The caller function may be setting headers manually,
# so we delegate to it completely. We may be returning
# an iterator as well as a string here.
#
# We *must* make sure any content is not unicode.
result = error_page(**kwargs)
if cherrypy.lib.is_iterator(result):
from cherrypy.lib.encoding import UTF8StreamEncoder
return UTF8StreamEncoder(result)
elif isinstance(result, str):
return result.encode('utf-8')
else:
if not isinstance(result, bytes):
raise ValueError(
'error page function did not '
'return a bytestring, str or an '
'iterator - returned object of type %s.'
% (type(result).__name__))
return result
else:
# Load the template from this path.
with io.open(error_page, newline='') as f:
template = f.read()
except Exception:
e = _format_exception(*_exc_info())[-1]
m = kwargs['message']
if m:
m += '<br />'
m += 'In addition, the custom error page failed:\n<br />%s' % e
kwargs['message'] = m
response = cherrypy.serving.response
response.headers['Content-Type'] = 'text/html;charset=utf-8'
result = template % kwargs
return result.encode('utf-8')
_ie_friendly_error_sizes = {
400: 512, 403: 256, 404: 512, 405: 256,
406: 512, 408: 512, 409: 512, 410: 256,
500: 512, 501: 512, 505: 512,
}
def _be_ie_unfriendly(status):
response = cherrypy.serving.response
# For some statuses, Internet Explorer 5+ shows "friendly error
# messages" instead of our response.body if the body is smaller
# than a given size. Fix this by returning a body over that size
# (by adding whitespace).
# See http://support.microsoft.com/kb/q218155/
s = _ie_friendly_error_sizes.get(status, 0)
if s:
s += 1
# Since we are issuing an HTTP error status, we assume that
# the entity is short, and we should just collapse it.
content = response.collapse_body()
content_length = len(content)
if content_length and content_length < s:
# IN ADDITION: the response must be written to IE
# in one chunk or it will still get replaced! Bah.
content = content + (b' ' * (s - content_length))
response.body = content
response.headers['Content-Length'] = str(len(content))
def format_exc(exc=None):
"""Return exc (or sys.exc_info if None), formatted."""
try:
if exc is None:
exc = _exc_info()
if exc == (None, None, None):
return ''
import traceback
return ''.join(traceback.format_exception(*exc))
finally:
del exc
def bare_error(extrabody=None):
"""Produce status, headers, body for a critical error.
Returns a triple without calling any other questionable functions,
so it should be as error-free as possible. Call it from an HTTP
server if you get errors outside of the request.
If extrabody is None, a friendly but rather unhelpful error message
is set in the body. If extrabody is a string, it will be appended
as-is to the body.
"""
# The whole point of this function is to be a last line-of-defense
# in handling errors. That is, it must not raise any errors itself;
# it cannot be allowed to fail. Therefore, don't add to it!
# In particular, don't call any other CP functions.
body = b'Unrecoverable error in the server.'
if extrabody is not None:
if not isinstance(extrabody, bytes):
extrabody = extrabody.encode('utf-8')
body += b'\n' + extrabody
return (b'500 Internal Server Error',
[(b'Content-Type', b'text/plain'),
(b'Content-Length', ntob(str(len(body)), 'ISO-8859-1'))],
[body])

458
libs/cherrypy/_cplogging.py Normal file
View file

@ -0,0 +1,458 @@
"""
Simple config
=============
Although CherryPy uses the :mod:`Python logging module <logging>`, it does so
behind the scenes so that simple logging is simple, but complicated logging
is still possible. "Simple" logging means that you can log to the screen
(i.e. console/stdout) or to a file, and that you can easily have separate
error and access log files.
Here are the simplified logging settings. You use these by adding lines to
your config file or dict. You should set these at either the global level or
per application (see next), but generally not both.
* ``log.screen``: Set this to True to have both "error" and "access" messages
printed to stdout.
* ``log.access_file``: Set this to an absolute filename where you want
"access" messages written.
* ``log.error_file``: Set this to an absolute filename where you want "error"
messages written.
Many events are automatically logged; to log your own application events, call
:func:`cherrypy.log`.
Architecture
============
Separate scopes
---------------
CherryPy provides log managers at both the global and application layers.
This means you can have one set of logging rules for your entire site,
and another set of rules specific to each application. The global log
manager is found at :func:`cherrypy.log`, and the log manager for each
application is found at :attr:`app.log<cherrypy._cptree.Application.log>`.
If you're inside a request, the latter is reachable from
``cherrypy.request.app.log``; if you're outside a request, you'll have to
obtain a reference to the ``app``: either the return value of
:func:`tree.mount()<cherrypy._cptree.Tree.mount>` or, if you used
:func:`quickstart()<cherrypy.quickstart>` instead, via
``cherrypy.tree.apps['/']``.
By default, the global logs are named "cherrypy.error" and "cherrypy.access",
and the application logs are named "cherrypy.error.2378745" and
"cherrypy.access.2378745" (the number is the id of the Application object).
This means that the application logs "bubble up" to the site logs, so if your
application has no log handlers, the site-level handlers will still log the
messages.
Errors vs. Access
-----------------
Each log manager handles both "access" messages (one per HTTP request) and
"error" messages (everything else). Note that the "error" log is not just for
errors! The format of access messages is highly formalized, but the error log
isn't--it receives messages from a variety of sources (including full error
tracebacks, if enabled).
If you are logging the access log and error log to the same source, then there
is a possibility that a specially crafted error message may replicate an access
log message as described in CWE-117. In this case it is the application
developer's responsibility to manually escape data before
using CherryPy's log()
functionality, or they may create an application that is vulnerable to CWE-117.
This would be achieved by using a custom handler escape any special characters,
and attached as described below.
Custom Handlers
===============
The simple settings above work by manipulating Python's standard :mod:`logging`
module. So when you need something more complex, the full power of the standard
module is yours to exploit. You can borrow or create custom handlers, formats,
filters, and much more. Here's an example that skips the standard FileHandler
and uses a RotatingFileHandler instead:
::
#python
log = app.log
# Remove the default FileHandlers if present.
log.error_file = ""
log.access_file = ""
maxBytes = getattr(log, "rot_maxBytes", 10000000)
backupCount = getattr(log, "rot_backupCount", 1000)
# Make a new RotatingFileHandler for the error log.
fname = getattr(log, "rot_error_file", "error.log")
h = handlers.RotatingFileHandler(fname, 'a', maxBytes, backupCount)
h.setLevel(DEBUG)
h.setFormatter(_cplogging.logfmt)
log.error_log.addHandler(h)
# Make a new RotatingFileHandler for the access log.
fname = getattr(log, "rot_access_file", "access.log")
h = handlers.RotatingFileHandler(fname, 'a', maxBytes, backupCount)
h.setLevel(DEBUG)
h.setFormatter(_cplogging.logfmt)
log.access_log.addHandler(h)
The ``rot_*`` attributes are pulled straight from the application log object.
Since "log.*" config entries simply set attributes on the log object, you can
add custom attributes to your heart's content. Note that these handlers are
used ''instead'' of the default, simple handlers outlined above (so don't set
the "log.error_file" config entry, for example).
"""
import datetime
import logging
import os
import sys
import cherrypy
from cherrypy import _cperror
# Silence the no-handlers "warning" (stderr write!) in stdlib logging
logging.Logger.manager.emittedNoHandlerWarning = 1
logfmt = logging.Formatter('%(message)s')
class NullHandler(logging.Handler):
"""A no-op logging handler to silence the logging.lastResort handler."""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
class LogManager(object):
"""An object to assist both simple and advanced logging.
``cherrypy.log`` is an instance of this class.
"""
appid = None
"""The id() of the Application object which owns this log manager.
If this is a global log manager, appid is None.
"""
error_log = None
"""The actual :class:`logging.Logger` instance for error messages."""
access_log = None
"""The actual :class:`logging.Logger` instance for access messages."""
access_log_format = '{h} {l} {u} {t} "{r}" {s} {b} "{f}" "{a}"'
logger_root = None
"""The "top-level" logger name.
This string will be used as the first segment in the Logger names.
The default is "cherrypy", for example, in which case the Logger names
will be of the form::
cherrypy.error.<appid>
cherrypy.access.<appid>
"""
def __init__(self, appid=None, logger_root='cherrypy'):
self.logger_root = logger_root
self.appid = appid
if appid is None:
self.error_log = logging.getLogger('%s.error' % logger_root)
self.access_log = logging.getLogger('%s.access' % logger_root)
else:
self.error_log = logging.getLogger(
'%s.error.%s' % (logger_root, appid))
self.access_log = logging.getLogger(
'%s.access.%s' % (logger_root, appid))
self.error_log.setLevel(logging.INFO)
self.access_log.setLevel(logging.INFO)
# Silence the no-handlers "warning" (stderr write!) in stdlib logging
self.error_log.addHandler(NullHandler())
self.access_log.addHandler(NullHandler())
cherrypy.engine.subscribe('graceful', self.reopen_files)
def reopen_files(self):
"""Close and reopen all file handlers."""
for log in (self.error_log, self.access_log):
for h in log.handlers:
if isinstance(h, logging.FileHandler):
h.acquire()
h.stream.close()
h.stream = open(h.baseFilename, h.mode)
h.release()
def error(self, msg='', context='', severity=logging.INFO,
traceback=False):
"""Write the given ``msg`` to the error log.
This is not just for errors! Applications may call this at any time
to log application-specific information.
If ``traceback`` is True, the traceback of the current exception
(if any) will be appended to ``msg``.
"""
exc_info = None
if traceback:
exc_info = _cperror._exc_info()
self.error_log.log(
severity,
' '.join((self.time(), context, msg)),
exc_info=exc_info,
)
def __call__(self, *args, **kwargs):
"""An alias for ``error``."""
return self.error(*args, **kwargs)
def access(self):
"""Write to the access log (in Apache/NCSA Combined Log format).
See the
`apache documentation
<http://httpd.apache.org/docs/current/logs.html#combined>`_
for format details.
CherryPy calls this automatically for you. Note there are no arguments;
it collects the data itself from
:class:`cherrypy.request<cherrypy._cprequest.Request>`.
Like Apache started doing in 2.0.46, non-printable and other special
characters in %r (and we expand that to all parts) are escaped using
\\xhh sequences, where hh stands for the hexadecimal representation
of the raw byte. Exceptions from this rule are " and \\, which are
escaped by prepending a backslash, and all whitespace characters,
which are written in their C-style notation (\\n, \\t, etc).
"""
request = cherrypy.serving.request
remote = request.remote
response = cherrypy.serving.response
outheaders = response.headers
inheaders = request.headers
if response.output_status is None:
status = '-'
else:
status = response.output_status.split(b' ', 1)[0]
status = status.decode('ISO-8859-1')
atoms = {'h': remote.name or remote.ip,
'l': '-',
'u': getattr(request, 'login', None) or '-',
't': self.time(),
'r': request.request_line,
's': status,
'b': dict.get(outheaders, 'Content-Length', '') or '-',
'f': dict.get(inheaders, 'Referer', ''),
'a': dict.get(inheaders, 'User-Agent', ''),
'o': dict.get(inheaders, 'Host', '-'),
'i': request.unique_id,
'z': LazyRfc3339UtcTime(),
}
for k, v in atoms.items():
if not isinstance(v, str):
v = str(v)
v = v.replace('"', '\\"').encode('utf8')
# Fortunately, repr(str) escapes unprintable chars, \n, \t, etc
# and backslash for us. All we have to do is strip the quotes.
v = repr(v)[2:-1]
# in python 3.0 the repr of bytes (as returned by encode)
# uses double \'s. But then the logger escapes them yet, again
# resulting in quadruple slashes. Remove the extra one here.
v = v.replace('\\\\', '\\')
# Escape double-quote.
atoms[k] = v
try:
self.access_log.log(
logging.INFO, self.access_log_format.format(**atoms))
except Exception:
self(traceback=True)
def time(self):
"""Return now() in Apache Common Log Format (no timezone)."""
now = datetime.datetime.now()
monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun',
'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
month = monthnames[now.month - 1].capitalize()
return ('[%02d/%s/%04d:%02d:%02d:%02d]' %
(now.day, month, now.year, now.hour, now.minute, now.second))
def _get_builtin_handler(self, log, key):
for h in log.handlers:
if getattr(h, '_cpbuiltin', None) == key:
return h
# ------------------------- Screen handlers ------------------------- #
def _set_screen_handler(self, log, enable, stream=None):
h = self._get_builtin_handler(log, 'screen')
if enable:
if not h:
if stream is None:
stream = sys.stderr
h = logging.StreamHandler(stream)
h.setFormatter(logfmt)
h._cpbuiltin = 'screen'
log.addHandler(h)
elif h:
log.handlers.remove(h)
@property
def screen(self):
"""Turn stderr/stdout logging on or off.
If you set this to True, it'll add the appropriate StreamHandler
for you. If you set it to False, it will remove the handler.
"""
h = self._get_builtin_handler
has_h = h(self.error_log, 'screen') or h(self.access_log, 'screen')
return bool(has_h)
@screen.setter
def screen(self, newvalue):
self._set_screen_handler(self.error_log, newvalue, stream=sys.stderr)
self._set_screen_handler(self.access_log, newvalue, stream=sys.stdout)
# -------------------------- File handlers -------------------------- #
def _add_builtin_file_handler(self, log, fname):
h = logging.FileHandler(fname)
h.setFormatter(logfmt)
h._cpbuiltin = 'file'
log.addHandler(h)
def _set_file_handler(self, log, filename):
h = self._get_builtin_handler(log, 'file')
if filename:
if h:
if h.baseFilename != os.path.abspath(filename):
h.close()
log.handlers.remove(h)
self._add_builtin_file_handler(log, filename)
else:
self._add_builtin_file_handler(log, filename)
else:
if h:
h.close()
log.handlers.remove(h)
@property
def error_file(self):
"""The filename for self.error_log.
If you set this to a string, it'll add the appropriate FileHandler for
you. If you set it to ``None`` or ``''``, it will remove the handler.
"""
h = self._get_builtin_handler(self.error_log, 'file')
if h:
return h.baseFilename
return ''
@error_file.setter
def error_file(self, newvalue):
self._set_file_handler(self.error_log, newvalue)
@property
def access_file(self):
"""The filename for self.access_log.
If you set this to a string, it'll add the appropriate FileHandler for
you. If you set it to ``None`` or ``''``, it will remove the handler.
"""
h = self._get_builtin_handler(self.access_log, 'file')
if h:
return h.baseFilename
return ''
@access_file.setter
def access_file(self, newvalue):
self._set_file_handler(self.access_log, newvalue)
# ------------------------- WSGI handlers ------------------------- #
def _set_wsgi_handler(self, log, enable):
h = self._get_builtin_handler(log, 'wsgi')
if enable:
if not h:
h = WSGIErrorHandler()
h.setFormatter(logfmt)
h._cpbuiltin = 'wsgi'
log.addHandler(h)
elif h:
log.handlers.remove(h)
@property
def wsgi(self):
"""Write errors to wsgi.errors.
If you set this to True, it'll add the appropriate
:class:`WSGIErrorHandler<cherrypy._cplogging.WSGIErrorHandler>` for you
(which writes errors to ``wsgi.errors``).
If you set it to False, it will remove the handler.
"""
return bool(self._get_builtin_handler(self.error_log, 'wsgi'))
@wsgi.setter
def wsgi(self, newvalue):
self._set_wsgi_handler(self.error_log, newvalue)
class WSGIErrorHandler(logging.Handler):
"A handler class which writes logging records to environ['wsgi.errors']."
def flush(self):
"""Flushes the stream."""
try:
stream = cherrypy.serving.request.wsgi_environ.get('wsgi.errors')
except (AttributeError, KeyError):
pass
else:
stream.flush()
def emit(self, record):
"""Emit a record."""
try:
stream = cherrypy.serving.request.wsgi_environ.get('wsgi.errors')
except (AttributeError, KeyError):
pass
else:
try:
msg = self.format(record)
fs = '%s\n'
import types
# if no unicode support...
if not hasattr(types, 'UnicodeType'):
stream.write(fs % msg)
else:
try:
stream.write(fs % msg)
except UnicodeError:
stream.write(fs % msg.encode('UTF-8'))
self.flush()
except Exception:
self.handleError(record)
class LazyRfc3339UtcTime(object):
def __str__(self):
"""Return datetime in RFC3339 UTC Format."""
iso_formatted_now = datetime.datetime.now(
datetime.timezone.utc,
).isoformat('T')
return f'{iso_formatted_now!s}Z'

351
libs/cherrypy/_cpmodpy.py Normal file
View file

@ -0,0 +1,351 @@
"""Native adapter for serving CherryPy via mod_python.
Basic usage:
##########################################
# Application in a module called myapp.py
##########################################
import cherrypy
class Root:
@cherrypy.expose
def index(self):
return 'Hi there, Ho there, Hey there'
# We will use this method from the mod_python configuration
# as the entry point to our application
def setup_server():
cherrypy.tree.mount(Root())
cherrypy.config.update({'environment': 'production',
'log.screen': False,
'show_tracebacks': False})
##########################################
# mod_python settings for apache2
# This should reside in your httpd.conf
# or a file that will be loaded at
# apache startup
##########################################
# Start
DocumentRoot "/"
Listen 8080
LoadModule python_module /usr/lib/apache2/modules/mod_python.so
<Location "/">
PythonPath "sys.path+['/path/to/my/application']"
SetHandler python-program
PythonHandler cherrypy._cpmodpy::handler
PythonOption cherrypy.setup myapp::setup_server
PythonDebug On
</Location>
# End
The actual path to your mod_python.so is dependent on your
environment. In this case we suppose a global mod_python
installation on a Linux distribution such as Ubuntu.
We do set the PythonPath configuration setting so that
your application can be found by from the user running
the apache2 instance. Of course if your application
resides in the global site-package this won't be needed.
Then restart apache2 and access http://127.0.0.1:8080
"""
import io
import logging
import os
import re
import sys
from more_itertools import always_iterable
import cherrypy
from cherrypy._cperror import format_exc, bare_error
from cherrypy.lib import httputil
# ------------------------------ Request-handling
def setup(req):
from mod_python import apache
# Run any setup functions defined by a "PythonOption cherrypy.setup"
# directive.
options = req.get_options()
if 'cherrypy.setup' in options:
for function in options['cherrypy.setup'].split():
atoms = function.split('::', 1)
if len(atoms) == 1:
mod = __import__(atoms[0], globals(), locals())
else:
modname, fname = atoms
mod = __import__(modname, globals(), locals(), [fname])
func = getattr(mod, fname)
func()
cherrypy.config.update({'log.screen': False,
'tools.ignore_headers.on': True,
'tools.ignore_headers.headers': ['Range'],
})
engine = cherrypy.engine
if hasattr(engine, 'signal_handler'):
engine.signal_handler.unsubscribe()
if hasattr(engine, 'console_control_handler'):
engine.console_control_handler.unsubscribe()
engine.autoreload.unsubscribe()
cherrypy.server.unsubscribe()
@engine.subscribe('log')
def _log(msg, level):
newlevel = apache.APLOG_ERR
if logging.DEBUG >= level:
newlevel = apache.APLOG_DEBUG
elif logging.INFO >= level:
newlevel = apache.APLOG_INFO
elif logging.WARNING >= level:
newlevel = apache.APLOG_WARNING
# On Windows, req.server is required or the msg will vanish. See
# http://www.modpython.org/pipermail/mod_python/2003-October/014291.html
# Also, "When server is not specified...LogLevel does not apply..."
apache.log_error(msg, newlevel, req.server)
engine.start()
def cherrypy_cleanup(data):
engine.exit()
try:
# apache.register_cleanup wasn't available until 3.1.4.
apache.register_cleanup(cherrypy_cleanup)
except AttributeError:
req.server.register_cleanup(req, cherrypy_cleanup)
class _ReadOnlyRequest:
expose = ('read', 'readline', 'readlines')
def __init__(self, req):
for method in self.expose:
self.__dict__[method] = getattr(req, method)
recursive = False
_isSetUp = False
def handler(req):
from mod_python import apache
try:
global _isSetUp
if not _isSetUp:
setup(req)
_isSetUp = True
# Obtain a Request object from CherryPy
local = req.connection.local_addr
local = httputil.Host(
local[0], local[1], req.connection.local_host or '')
remote = req.connection.remote_addr
remote = httputil.Host(
remote[0], remote[1], req.connection.remote_host or '')
scheme = req.parsed_uri[0] or 'http'
req.get_basic_auth_pw()
try:
# apache.mpm_query only became available in mod_python 3.1
q = apache.mpm_query
threaded = q(apache.AP_MPMQ_IS_THREADED)
forked = q(apache.AP_MPMQ_IS_FORKED)
except AttributeError:
bad_value = ("You must provide a PythonOption '%s', "
"either 'on' or 'off', when running a version "
'of mod_python < 3.1')
options = req.get_options()
threaded = options.get('multithread', '').lower()
if threaded == 'on':
threaded = True
elif threaded == 'off':
threaded = False
else:
raise ValueError(bad_value % 'multithread')
forked = options.get('multiprocess', '').lower()
if forked == 'on':
forked = True
elif forked == 'off':
forked = False
else:
raise ValueError(bad_value % 'multiprocess')
sn = cherrypy.tree.script_name(req.uri or '/')
if sn is None:
send_response(req, '404 Not Found', [], '')
else:
app = cherrypy.tree.apps[sn]
method = req.method
path = req.uri
qs = req.args or ''
reqproto = req.protocol
headers = list(req.headers_in.copy().items())
rfile = _ReadOnlyRequest(req)
prev = None
try:
redirections = []
while True:
request, response = app.get_serving(local, remote, scheme,
'HTTP/1.1')
request.login = req.user
request.multithread = bool(threaded)
request.multiprocess = bool(forked)
request.app = app
request.prev = prev
# Run the CherryPy Request object and obtain the response
try:
request.run(method, path, qs, reqproto, headers, rfile)
break
except cherrypy.InternalRedirect:
ir = sys.exc_info()[1]
app.release_serving()
prev = request
if not recursive:
if ir.path in redirections:
raise RuntimeError(
'InternalRedirector visited the same URL '
'twice: %r' % ir.path)
else:
# Add the *previous* path_info + qs to
# redirections.
if qs:
qs = '?' + qs
redirections.append(sn + path + qs)
# Munge environment and try again.
method = 'GET'
path = ir.path
qs = ir.query_string
rfile = io.BytesIO()
send_response(
req, response.output_status, response.header_list,
response.body, response.stream)
finally:
app.release_serving()
except Exception:
tb = format_exc()
cherrypy.log(tb, 'MOD_PYTHON', severity=logging.ERROR)
s, h, b = bare_error()
send_response(req, s, h, b)
return apache.OK
def send_response(req, status, headers, body, stream=False):
# Set response status
req.status = int(status[:3])
# Set response headers
req.content_type = 'text/plain'
for header, value in headers:
if header.lower() == 'content-type':
req.content_type = value
continue
req.headers_out.add(header, value)
if stream:
# Flush now so the status and headers are sent immediately.
req.flush()
# Set response body
for seg in always_iterable(body):
req.write(seg)
# --------------- Startup tools for CherryPy + mod_python --------------- #
try:
import subprocess
def popen(fullcmd):
p = subprocess.Popen(fullcmd, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
close_fds=True)
return p.stdout
except ImportError:
def popen(fullcmd):
pipein, pipeout = os.popen4(fullcmd)
return pipeout
def read_process(cmd, args=''):
fullcmd = '%s %s' % (cmd, args)
pipeout = popen(fullcmd)
try:
firstline = pipeout.readline()
cmd_not_found = re.search(
b'(not recognized|No such file|not found)',
firstline,
re.IGNORECASE
)
if cmd_not_found:
raise IOError('%s must be on your system path.' % cmd)
output = firstline + pipeout.read()
finally:
pipeout.close()
return output
class ModPythonServer(object):
template = """
# Apache2 server configuration file for running CherryPy with mod_python.
DocumentRoot "/"
Listen %(port)s
LoadModule python_module modules/mod_python.so
<Location %(loc)s>
SetHandler python-program
PythonHandler %(handler)s
PythonDebug On
%(opts)s
</Location>
"""
def __init__(self, loc='/', port=80, opts=None, apache_path='apache',
handler='cherrypy._cpmodpy::handler'):
self.loc = loc
self.port = port
self.opts = opts
self.apache_path = apache_path
self.handler = handler
def start(self):
opts = ''.join([' PythonOption %s %s\n' % (k, v)
for k, v in self.opts])
conf_data = self.template % {'port': self.port,
'loc': self.loc,
'opts': opts,
'handler': self.handler,
}
mpconf = os.path.join(os.path.dirname(__file__), 'cpmodpy.conf')
with open(mpconf, 'wb') as f:
f.write(conf_data)
response = read_process(self.apache_path, '-k start -f %s' % mpconf)
self.ready = True
return response
def stop(self):
os.popen('apache -k stop')
self.ready = False

View file

@ -0,0 +1,168 @@
"""Native adapter for serving CherryPy via its builtin server."""
import logging
import sys
import io
import cheroot.server
import cherrypy
from cherrypy._cperror import format_exc, bare_error
from cherrypy.lib import httputil
from ._cpcompat import tonative
class NativeGateway(cheroot.server.Gateway):
"""Native gateway implementation allowing to bypass WSGI."""
recursive = False
def respond(self):
"""Obtain response from CherryPy machinery and then send it."""
req = self.req
try:
# Obtain a Request object from CherryPy
local = req.server.bind_addr # FIXME: handle UNIX sockets
local = tonative(local[0]), local[1]
local = httputil.Host(local[0], local[1], '')
remote = tonative(req.conn.remote_addr), req.conn.remote_port
remote = httputil.Host(remote[0], remote[1], '')
scheme = tonative(req.scheme)
sn = cherrypy.tree.script_name(tonative(req.uri or '/'))
if sn is None:
self.send_response('404 Not Found', [], [''])
else:
app = cherrypy.tree.apps[sn]
method = tonative(req.method)
path = tonative(req.path)
qs = tonative(req.qs or '')
headers = (
(tonative(h), tonative(v))
for h, v in req.inheaders.items()
)
rfile = req.rfile
prev = None
try:
redirections = []
while True:
request, response = app.get_serving(
local, remote, scheme, 'HTTP/1.1')
request.multithread = True
request.multiprocess = False
request.app = app
request.prev = prev
# Run the CherryPy Request object and obtain the
# response
try:
request.run(
method, path, qs,
tonative(req.request_protocol),
headers, rfile,
)
break
except cherrypy.InternalRedirect:
ir = sys.exc_info()[1]
app.release_serving()
prev = request
if not self.recursive:
if ir.path in redirections:
raise RuntimeError(
'InternalRedirector visited the same '
'URL twice: %r' % ir.path)
else:
# Add the *previous* path_info + qs to
# redirections.
if qs:
qs = '?' + qs
redirections.append(sn + path + qs)
# Munge environment and try again.
method = 'GET'
path = ir.path
qs = ir.query_string
rfile = io.BytesIO()
self.send_response(
response.output_status, response.header_list,
response.body)
finally:
app.release_serving()
except Exception:
tb = format_exc()
# print tb
cherrypy.log(tb, 'NATIVE_ADAPTER', severity=logging.ERROR)
s, h, b = bare_error()
self.send_response(s, h, b)
def send_response(self, status, headers, body):
"""Send response to HTTP request."""
req = self.req
# Set response status
req.status = status or b'500 Server Error'
# Set response headers
for header, value in headers:
req.outheaders.append((header, value))
if (req.ready and not req.sent_headers):
req.sent_headers = True
req.send_headers()
# Set response body
for seg in body:
req.write(seg)
class CPHTTPServer(cheroot.server.HTTPServer):
"""Wrapper for cheroot.server.HTTPServer.
cheroot has been designed to not reference CherryPy in any way, so
that it can be used in other frameworks and applications. Therefore,
we wrap it here, so we can apply some attributes from config ->
cherrypy.server -> HTTPServer.
"""
def __init__(self, server_adapter=cherrypy.server):
"""Initialize CPHTTPServer."""
self.server_adapter = server_adapter
server_name = (self.server_adapter.socket_host or
self.server_adapter.socket_file or
None)
cheroot.server.HTTPServer.__init__(
self, server_adapter.bind_addr, NativeGateway,
minthreads=server_adapter.thread_pool,
maxthreads=server_adapter.thread_pool_max,
server_name=server_name)
self.max_request_header_size = (
self.server_adapter.max_request_header_size or 0)
self.max_request_body_size = (
self.server_adapter.max_request_body_size or 0)
self.request_queue_size = self.server_adapter.socket_queue_size
self.timeout = self.server_adapter.socket_timeout
self.shutdown_timeout = self.server_adapter.shutdown_timeout
self.protocol = self.server_adapter.protocol_version
self.nodelay = self.server_adapter.nodelay
ssl_module = self.server_adapter.ssl_module or 'pyopenssl'
if self.server_adapter.ssl_context:
adapter_class = cheroot.server.get_ssl_adapter_class(ssl_module)
self.ssl_adapter = adapter_class(
self.server_adapter.ssl_certificate,
self.server_adapter.ssl_private_key,
self.server_adapter.ssl_certificate_chain,
self.server_adapter.ssl_ciphers)
self.ssl_adapter.context = self.server_adapter.ssl_context
elif self.server_adapter.ssl_certificate:
adapter_class = cheroot.server.get_ssl_adapter_class(ssl_module)
self.ssl_adapter = adapter_class(
self.server_adapter.ssl_certificate,
self.server_adapter.ssl_private_key,
self.server_adapter.ssl_certificate_chain,
self.server_adapter.ssl_ciphers)

996
libs/cherrypy/_cpreqbody.py Normal file
View file

@ -0,0 +1,996 @@
"""Request body processing for CherryPy.
.. versionadded:: 3.2
Application authors have complete control over the parsing of HTTP request
entities. In short,
:attr:`cherrypy.request.body<cherrypy._cprequest.Request.body>`
is now always set to an instance of
:class:`RequestBody<cherrypy._cpreqbody.RequestBody>`,
and *that* class is a subclass of :class:`Entity<cherrypy._cpreqbody.Entity>`.
When an HTTP request includes an entity body, it is often desirable to
provide that information to applications in a form other than the raw bytes.
Different content types demand different approaches. Examples:
* For a GIF file, we want the raw bytes in a stream.
* An HTML form is better parsed into its component fields, and each text field
decoded from bytes to unicode.
* A JSON body should be deserialized into a Python dict or list.
When the request contains a Content-Type header, the media type is used as a
key to look up a value in the
:attr:`request.body.processors<cherrypy._cpreqbody.Entity.processors>` dict.
If the full media
type is not found, then the major type is tried; for example, if no processor
is found for the 'image/jpeg' type, then we look for a processor for the
'image' types altogether. If neither the full type nor the major type has a
matching processor, then a default processor is used
(:func:`default_proc<cherrypy._cpreqbody.Entity.default_proc>`). For most
types, this means no processing is done, and the body is left unread as a
raw byte stream. Processors are configurable in an 'on_start_resource' hook.
Some processors, especially those for the 'text' types, attempt to decode bytes
to unicode. If the Content-Type request header includes a 'charset' parameter,
this is used to decode the entity. Otherwise, one or more default charsets may
be attempted, although this decision is up to each processor. If a processor
successfully decodes an Entity or Part, it should set the
:attr:`charset<cherrypy._cpreqbody.Entity.charset>` attribute
on the Entity or Part to the name of the successful charset, so that
applications can easily re-encode or transcode the value if they wish.
If the Content-Type of the request entity is of major type 'multipart', then
the above parsing process, and possibly a decoding process, is performed for
each part.
For both the full entity and multipart parts, a Content-Disposition header may
be used to fill :attr:`name<cherrypy._cpreqbody.Entity.name>` and
:attr:`filename<cherrypy._cpreqbody.Entity.filename>` attributes on the
request.body or the Part.
.. _custombodyprocessors:
Custom Processors
=================
You can add your own processors for any specific or major MIME type. Simply add
it to the :attr:`processors<cherrypy._cprequest.Entity.processors>` dict in a
hook/tool that runs at ``on_start_resource`` or ``before_request_body``.
Here's the built-in JSON tool for an example::
def json_in(force=True, debug=False):
request = cherrypy.serving.request
def json_processor(entity):
'''Read application/json data into request.json.'''
if not entity.headers.get("Content-Length", ""):
raise cherrypy.HTTPError(411)
body = entity.fp.read()
try:
request.json = json_decode(body)
except ValueError:
raise cherrypy.HTTPError(400, 'Invalid JSON document')
if force:
request.body.processors.clear()
request.body.default_proc = cherrypy.HTTPError(
415, 'Expected an application/json content type')
request.body.processors['application/json'] = json_processor
We begin by defining a new ``json_processor`` function to stick in the
``processors`` dictionary. All processor functions take a single argument,
the ``Entity`` instance they are to process. It will be called whenever a
request is received (for those URI's where the tool is turned on) which
has a ``Content-Type`` of "application/json".
First, it checks for a valid ``Content-Length`` (raising 411 if not valid),
then reads the remaining bytes on the socket. The ``fp`` object knows its
own length, so it won't hang waiting for data that never arrives. It will
return when all data has been read. Then, we decode those bytes using
Python's built-in ``json`` module, and stick the decoded result onto
``request.json`` . If it cannot be decoded, we raise 400.
If the "force" argument is True (the default), the ``Tool`` clears the
``processors`` dict so that request entities of other ``Content-Types``
aren't parsed at all. Since there's no entry for those invalid MIME
types, the ``default_proc`` method of ``cherrypy.request.body`` is
called. But this does nothing by default (usually to provide the page
handler an opportunity to handle it.)
But in our case, we want to raise 415, so we replace
``request.body.default_proc``
with the error (``HTTPError`` instances, when called, raise themselves).
If we were defining a custom processor, we can do so without making a ``Tool``.
Just add the config entry::
request.body.processors = {'application/json': json_processor}
Note that you can only replace the ``processors`` dict wholesale this way,
not update the existing one.
"""
try:
from io import DEFAULT_BUFFER_SIZE
except ImportError:
DEFAULT_BUFFER_SIZE = 8192
import re
import sys
import tempfile
from urllib.parse import unquote
import cheroot.server
import cherrypy
from cherrypy._cpcompat import ntou
from cherrypy.lib import httputil
def unquote_plus(bs):
"""Bytes version of urllib.parse.unquote_plus."""
bs = bs.replace(b'+', b' ')
atoms = bs.split(b'%')
for i in range(1, len(atoms)):
item = atoms[i]
try:
pct = int(item[:2], 16)
atoms[i] = bytes([pct]) + item[2:]
except ValueError:
pass
return b''.join(atoms)
# ------------------------------- Processors -------------------------------- #
def process_urlencoded(entity):
"""Read application/x-www-form-urlencoded data into entity.params."""
qs = entity.fp.read()
for charset in entity.attempt_charsets:
try:
params = {}
for aparam in qs.split(b'&'):
for pair in aparam.split(b';'):
if not pair:
continue
atoms = pair.split(b'=', 1)
if len(atoms) == 1:
atoms.append(b'')
key = unquote_plus(atoms[0]).decode(charset)
value = unquote_plus(atoms[1]).decode(charset)
if key in params:
if not isinstance(params[key], list):
params[key] = [params[key]]
params[key].append(value)
else:
params[key] = value
except UnicodeDecodeError:
pass
else:
entity.charset = charset
break
else:
raise cherrypy.HTTPError(
400, 'The request entity could not be decoded. The following '
'charsets were attempted: %s' % repr(entity.attempt_charsets))
# Now that all values have been successfully parsed and decoded,
# apply them to the entity.params dict.
for key, value in params.items():
if key in entity.params:
if not isinstance(entity.params[key], list):
entity.params[key] = [entity.params[key]]
entity.params[key].append(value)
else:
entity.params[key] = value
def process_multipart(entity):
"""Read all multipart parts into entity.parts."""
ib = ''
if 'boundary' in entity.content_type.params:
# http://tools.ietf.org/html/rfc2046#section-5.1.1
# "The grammar for parameters on the Content-type field is such that it
# is often necessary to enclose the boundary parameter values in quotes
# on the Content-type line"
ib = entity.content_type.params['boundary'].strip('"')
if not re.match('^[ -~]{0,200}[!-~]$', ib):
raise ValueError('Invalid boundary in multipart form: %r' % (ib,))
ib = ('--' + ib).encode('ascii')
# Find the first marker
while True:
b = entity.readline()
if not b:
return
b = b.strip()
if b == ib:
break
# Read all parts
while True:
part = entity.part_class.from_fp(entity.fp, ib)
entity.parts.append(part)
part.process()
if part.fp.done:
break
def process_multipart_form_data(entity):
"""Read all multipart/form-data parts into entity.parts or entity.params.
"""
process_multipart(entity)
kept_parts = []
for part in entity.parts:
if part.name is None:
kept_parts.append(part)
else:
if part.filename is None:
# It's a regular field
value = part.fullvalue()
else:
# It's a file upload. Retain the whole part so consumer code
# has access to its .file and .filename attributes.
value = part
if part.name in entity.params:
if not isinstance(entity.params[part.name], list):
entity.params[part.name] = [entity.params[part.name]]
entity.params[part.name].append(value)
else:
entity.params[part.name] = value
entity.parts = kept_parts
def _old_process_multipart(entity):
"""The behavior of 3.2 and lower.
Deprecated and will be changed in 3.3.
"""
process_multipart(entity)
params = entity.params
for part in entity.parts:
if part.name is None:
key = ntou('parts')
else:
key = part.name
if part.filename is None:
# It's a regular field
value = part.fullvalue()
else:
# It's a file upload. Retain the whole part so consumer code
# has access to its .file and .filename attributes.
value = part
if key in params:
if not isinstance(params[key], list):
params[key] = [params[key]]
params[key].append(value)
else:
params[key] = value
# -------------------------------- Entities --------------------------------- #
class Entity(object):
"""An HTTP request body, or MIME multipart body.
This class collects information about the HTTP request entity. When a
given entity is of MIME type "multipart", each part is parsed into its own
Entity instance, and the set of parts stored in
:attr:`entity.parts<cherrypy._cpreqbody.Entity.parts>`.
Between the ``before_request_body`` and ``before_handler`` tools, CherryPy
tries to process the request body (if any) by calling
:func:`request.body.process<cherrypy._cpreqbody.RequestBody.process>`.
This uses the ``content_type`` of the Entity to look up a suitable
processor in
:attr:`Entity.processors<cherrypy._cpreqbody.Entity.processors>`,
a dict.
If a matching processor cannot be found for the complete Content-Type,
it tries again using the major type. For example, if a request with an
entity of type "image/jpeg" arrives, but no processor can be found for
that complete type, then one is sought for the major type "image". If a
processor is still not found, then the
:func:`default_proc<cherrypy._cpreqbody.Entity.default_proc>` method
of the Entity is called (which does nothing by default; you can
override this too).
CherryPy includes processors for the "application/x-www-form-urlencoded"
type, the "multipart/form-data" type, and the "multipart" major type.
CherryPy 3.2 processes these types almost exactly as older versions.
Parts are passed as arguments to the page handler using their
``Content-Disposition.name`` if given, otherwise in a generic "parts"
argument. Each such part is either a string, or the
:class:`Part<cherrypy._cpreqbody.Part>` itself if it's a file. (In this
case it will have ``file`` and ``filename`` attributes, or possibly a
``value`` attribute). Each Part is itself a subclass of
Entity, and has its own ``process`` method and ``processors`` dict.
There is a separate processor for the "multipart" major type which is more
flexible, and simply stores all multipart parts in
:attr:`request.body.parts<cherrypy._cpreqbody.Entity.parts>`. You can
enable it with::
cherrypy.request.body.processors['multipart'] = \
_cpreqbody.process_multipart
in an ``on_start_resource`` tool.
"""
# http://tools.ietf.org/html/rfc2046#section-4.1.2:
# "The default character set, which must be assumed in the
# absence of a charset parameter, is US-ASCII."
# However, many browsers send data in utf-8 with no charset.
attempt_charsets = ['utf-8']
r"""A list of strings, each of which should be a known encoding.
When the Content-Type of the request body warrants it, each of the given
encodings will be tried in order. The first one to successfully decode the
entity without raising an error is stored as
:attr:`entity.charset<cherrypy._cpreqbody.Entity.charset>`. This defaults
to ``['utf-8']`` (plus 'ISO-8859-1' for "text/\*" types, as required by
`HTTP/1.1
<http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1>`_),
but ``['us-ascii', 'utf-8']`` for multipart parts.
"""
charset = None
"""The successful decoding; see "attempt_charsets" above."""
content_type = None
"""The value of the Content-Type request header.
If the Entity is part of a multipart payload, this will be the
Content-Type given in the MIME headers for this part.
"""
default_content_type = 'application/x-www-form-urlencoded'
"""This defines a default ``Content-Type`` to use if no Content-Type header
is given.
The empty string is used for RequestBody, which results in the
request body not being read or parsed at all. This is by design; a missing
``Content-Type`` header in the HTTP request entity is an error at best,
and a security hole at worst. For multipart parts, however, the MIME spec
declares that a part with no Content-Type defaults to "text/plain"
(see :class:`Part<cherrypy._cpreqbody.Part>`).
"""
filename = None
"""The ``Content-Disposition.filename`` header, if available."""
fp = None
"""The readable socket file object."""
headers = None
"""A dict of request/multipart header names and values.
This is a copy of the ``request.headers`` for the ``request.body``;
for multipart parts, it is the set of headers for that part.
"""
length = None
"""The value of the ``Content-Length`` header, if provided."""
name = None
"""The "name" parameter of the ``Content-Disposition`` header, if any."""
params = None
"""
If the request Content-Type is 'application/x-www-form-urlencoded' or
multipart, this will be a dict of the params pulled from the entity
body; that is, it will be the portion of request.params that come
from the message body (sometimes called "POST params", although they
can be sent with various HTTP method verbs). This value is set between
the 'before_request_body' and 'before_handler' hooks (assuming that
process_request_body is True)."""
processors = {'application/x-www-form-urlencoded': process_urlencoded,
'multipart/form-data': process_multipart_form_data,
'multipart': process_multipart,
}
"""A dict of Content-Type names to processor methods."""
parts = None
"""A list of Part instances if ``Content-Type`` is of major type
"multipart"."""
part_class = None
"""The class used for multipart parts.
You can replace this with custom subclasses to alter the processing
of multipart parts.
"""
def __init__(self, fp, headers, params=None, parts=None):
# Make an instance-specific copy of the class processors
# so Tools, etc. can replace them per-request.
self.processors = self.processors.copy()
self.fp = fp
self.headers = headers
if params is None:
params = {}
self.params = params
if parts is None:
parts = []
self.parts = parts
# Content-Type
self.content_type = headers.elements('Content-Type')
if self.content_type:
self.content_type = self.content_type[0]
else:
self.content_type = httputil.HeaderElement.from_str(
self.default_content_type)
# Copy the class 'attempt_charsets', prepending any Content-Type
# charset
dec = self.content_type.params.get('charset', None)
if dec:
self.attempt_charsets = [dec] + [c for c in self.attempt_charsets
if c != dec]
else:
self.attempt_charsets = self.attempt_charsets[:]
# Length
self.length = None
clen = headers.get('Content-Length', None)
# If Transfer-Encoding is 'chunked', ignore any Content-Length.
if (
clen is not None and
'chunked' not in headers.get('Transfer-Encoding', '')
):
try:
self.length = int(clen)
except ValueError:
pass
# Content-Disposition
self.name = None
self.filename = None
disp = headers.elements('Content-Disposition')
if disp:
disp = disp[0]
if 'name' in disp.params:
self.name = disp.params['name']
if self.name.startswith('"') and self.name.endswith('"'):
self.name = self.name[1:-1]
if 'filename' in disp.params:
self.filename = disp.params['filename']
if (
self.filename.startswith('"') and
self.filename.endswith('"')
):
self.filename = self.filename[1:-1]
if 'filename*' in disp.params:
# @see https://tools.ietf.org/html/rfc5987
encoding, lang, filename = disp.params['filename*'].split("'")
self.filename = unquote(str(filename), encoding)
def read(self, size=None, fp_out=None):
return self.fp.read(size, fp_out)
def readline(self, size=None):
return self.fp.readline(size)
def readlines(self, sizehint=None):
return self.fp.readlines(sizehint)
def __iter__(self):
return self
def __next__(self):
line = self.readline()
if not line:
raise StopIteration
return line
def next(self):
return self.__next__()
def read_into_file(self, fp_out=None):
"""Read the request body into fp_out (or make_file() if None).
Return fp_out.
"""
if fp_out is None:
fp_out = self.make_file()
self.read(fp_out=fp_out)
return fp_out
def make_file(self):
"""Return a file-like object into which the request body will be read.
By default, this will return a TemporaryFile. Override as needed.
See also :attr:`cherrypy._cpreqbody.Part.maxrambytes`.
"""
return tempfile.TemporaryFile()
def fullvalue(self):
"""Return this entity as a string, whether stored in a file or not."""
if self.file:
# It was stored in a tempfile. Read it.
self.file.seek(0)
value = self.file.read()
self.file.seek(0)
else:
value = self.value
value = self.decode_entity(value)
return value
def decode_entity(self, value):
"""Return a given byte encoded value as a string."""
for charset in self.attempt_charsets:
try:
value = value.decode(charset)
except UnicodeDecodeError:
pass
else:
self.charset = charset
return value
else:
raise cherrypy.HTTPError(
400,
'The request entity could not be decoded. The following '
'charsets were attempted: %s' % repr(self.attempt_charsets)
)
def process(self):
"""Execute the best-match processor for the given media type."""
proc = None
ct = self.content_type.value
try:
proc = self.processors[ct]
except KeyError:
toptype = ct.split('/', 1)[0]
try:
proc = self.processors[toptype]
except KeyError:
pass
if proc is None:
self.default_proc()
else:
proc(self)
def default_proc(self):
"""Called if a more-specific processor is not found for the
``Content-Type``.
"""
# Leave the fp alone for someone else to read. This works fine
# for request.body, but the Part subclasses need to override this
# so they can move on to the next part.
pass
class Part(Entity):
"""A MIME part entity, part of a multipart entity."""
# "The default character set, which must be assumed in the absence of a
# charset parameter, is US-ASCII."
attempt_charsets = ['us-ascii', 'utf-8']
r"""A list of strings, each of which should be a known encoding.
When the Content-Type of the request body warrants it, each of the given
encodings will be tried in order. The first one to successfully decode the
entity without raising an error is stored as
:attr:`entity.charset<cherrypy._cpreqbody.Entity.charset>`. This defaults
to ``['utf-8']`` (plus 'ISO-8859-1' for "text/\*" types, as required by
`HTTP/1.1
<http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1>`_),
but ``['us-ascii', 'utf-8']`` for multipart parts.
"""
boundary = None
"""The MIME multipart boundary."""
default_content_type = 'text/plain'
"""This defines a default ``Content-Type`` to use if no Content-Type header
is given. The empty string is used for RequestBody, which results in the
request body not being read or parsed at all. This is by design; a missing
``Content-Type`` header in the HTTP request entity is an error at best,
and a security hole at worst. For multipart parts, however (this class),
the MIME spec declares that a part with no Content-Type defaults to
"text/plain".
"""
# This is the default in stdlib cgi. We may want to increase it.
maxrambytes = 1000
"""The threshold of bytes after which point the ``Part`` will store
its data in a file (generated by
:func:`make_file<cherrypy._cprequest.Entity.make_file>`)
instead of a string. Defaults to 1000, just like the :mod:`cgi`
module in Python's standard library.
"""
def __init__(self, fp, headers, boundary):
Entity.__init__(self, fp, headers)
self.boundary = boundary
self.file = None
self.value = None
@classmethod
def from_fp(cls, fp, boundary):
headers = cls.read_headers(fp)
return cls(fp, headers, boundary)
@classmethod
def read_headers(cls, fp):
headers = httputil.HeaderMap()
while True:
line = fp.readline()
if not line:
# No more data--illegal end of headers
raise EOFError('Illegal end of headers.')
if line == b'\r\n':
# Normal end of headers
break
if not line.endswith(b'\r\n'):
raise ValueError('MIME requires CRLF terminators: %r' % line)
if line[0] in b' \t':
# It's a continuation line.
v = line.strip().decode('ISO-8859-1')
else:
k, v = line.split(b':', 1)
k = k.strip().decode('ISO-8859-1')
v = v.strip().decode('ISO-8859-1')
existing = headers.get(k)
if existing:
v = ', '.join((existing, v))
headers[k] = v
return headers
def read_lines_to_boundary(self, fp_out=None):
"""Read bytes from self.fp and return or write them to a file.
If the 'fp_out' argument is None (the default), all bytes read
are returned in a single byte string.
If the 'fp_out' argument is not None, it must be a file-like
object that supports the 'write' method; all bytes read will be
written to the fp, and that fp is returned.
"""
endmarker = self.boundary + b'--'
delim = b''
prev_lf = True
lines = []
seen = 0
while True:
line = self.fp.readline(1 << 16)
if not line:
raise EOFError('Illegal end of multipart body.')
if line.startswith(b'--') and prev_lf:
strippedline = line.strip()
if strippedline == self.boundary:
break
if strippedline == endmarker:
self.fp.finish()
break
line = delim + line
if line.endswith(b'\r\n'):
delim = b'\r\n'
line = line[:-2]
prev_lf = True
elif line.endswith(b'\n'):
delim = b'\n'
line = line[:-1]
prev_lf = True
else:
delim = b''
prev_lf = False
if fp_out is None:
lines.append(line)
seen += len(line)
if seen > self.maxrambytes:
fp_out = self.make_file()
for line in lines:
fp_out.write(line)
else:
fp_out.write(line)
if fp_out is None:
result = b''.join(lines)
return result
else:
fp_out.seek(0)
return fp_out
def default_proc(self):
"""Called if a more-specific processor is not found for the
``Content-Type``.
"""
if self.filename:
# Always read into a file if a .filename was given.
self.file = self.read_into_file()
else:
result = self.read_lines_to_boundary()
if isinstance(result, bytes):
self.value = result
else:
self.file = result
def read_into_file(self, fp_out=None):
"""Read the request body into fp_out (or make_file() if None).
Return fp_out.
"""
if fp_out is None:
fp_out = self.make_file()
self.read_lines_to_boundary(fp_out=fp_out)
return fp_out
Entity.part_class = Part
inf = float('inf')
class SizedReader:
def __init__(self, fp, length, maxbytes, bufsize=DEFAULT_BUFFER_SIZE,
has_trailers=False):
# Wrap our fp in a buffer so peek() works
self.fp = fp
self.length = length
self.maxbytes = maxbytes
self.buffer = b''
self.bufsize = bufsize
self.bytes_read = 0
self.done = False
self.has_trailers = has_trailers
def read(self, size=None, fp_out=None):
"""Read bytes from the request body and return or write them to a file.
A number of bytes less than or equal to the 'size' argument are
read off the socket. The actual number of bytes read are tracked
in self.bytes_read. The number may be smaller than 'size' when
1) the client sends fewer bytes, 2) the 'Content-Length' request
header specifies fewer bytes than requested, or 3) the number of
bytes read exceeds self.maxbytes (in which case, 413 is raised).
If the 'fp_out' argument is None (the default), all bytes read
are returned in a single byte string.
If the 'fp_out' argument is not None, it must be a file-like
object that supports the 'write' method; all bytes read will be
written to the fp, and None is returned.
"""
if self.length is None:
if size is None:
remaining = inf
else:
remaining = size
else:
remaining = self.length - self.bytes_read
if size and size < remaining:
remaining = size
if remaining == 0:
self.finish()
if fp_out is None:
return b''
else:
return None
chunks = []
# Read bytes from the buffer.
if self.buffer:
if remaining is inf:
data = self.buffer
self.buffer = b''
else:
data = self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
datalen = len(data)
remaining -= datalen
# Check lengths.
self.bytes_read += datalen
if self.maxbytes and self.bytes_read > self.maxbytes:
raise cherrypy.HTTPError(413)
# Store the data.
if fp_out is None:
chunks.append(data)
else:
fp_out.write(data)
# Read bytes from the socket.
while remaining > 0:
chunksize = min(remaining, self.bufsize)
try:
data = self.fp.read(chunksize)
except Exception:
e = sys.exc_info()[1]
if e.__class__.__name__ == 'MaxSizeExceeded':
# Post data is too big
raise cherrypy.HTTPError(
413, 'Maximum request length: %r' % e.args[1])
else:
raise
if not data:
self.finish()
break
datalen = len(data)
remaining -= datalen
# Check lengths.
self.bytes_read += datalen
if self.maxbytes and self.bytes_read > self.maxbytes:
raise cherrypy.HTTPError(413)
# Store the data.
if fp_out is None:
chunks.append(data)
else:
fp_out.write(data)
if fp_out is None:
return b''.join(chunks)
def readline(self, size=None):
"""Read a line from the request body and return it."""
chunks = []
while size is None or size > 0:
chunksize = self.bufsize
if size is not None and size < self.bufsize:
chunksize = size
data = self.read(chunksize)
if not data:
break
pos = data.find(b'\n') + 1
if pos:
chunks.append(data[:pos])
remainder = data[pos:]
self.buffer += remainder
self.bytes_read -= len(remainder)
break
else:
chunks.append(data)
return b''.join(chunks)
def readlines(self, sizehint=None):
"""Read lines from the request body and return them."""
if self.length is not None:
if sizehint is None:
sizehint = self.length - self.bytes_read
else:
sizehint = min(sizehint, self.length - self.bytes_read)
lines = []
seen = 0
while True:
line = self.readline()
if not line:
break
lines.append(line)
seen += len(line)
if seen >= sizehint:
break
return lines
def finish(self):
self.done = True
if self.has_trailers and hasattr(self.fp, 'read_trailer_lines'):
self.trailers = {}
try:
for line in self.fp.read_trailer_lines():
if line[0] in b' \t':
# It's a continuation line.
v = line.strip()
else:
try:
k, v = line.split(b':', 1)
except ValueError:
raise ValueError('Illegal header line.')
k = k.strip().title()
v = v.strip()
if k in cheroot.server.comma_separated_headers:
existing = self.trailers.get(k)
if existing:
v = b', '.join((existing, v))
self.trailers[k] = v
except Exception:
e = sys.exc_info()[1]
if e.__class__.__name__ == 'MaxSizeExceeded':
# Post data is too big
raise cherrypy.HTTPError(
413, 'Maximum request length: %r' % e.args[1])
else:
raise
class RequestBody(Entity):
"""The entity of the HTTP request."""
bufsize = 8 * 1024
"""The buffer size used when reading the socket."""
# Don't parse the request body at all if the client didn't provide
# a Content-Type header. See
# https://github.com/cherrypy/cherrypy/issues/790
default_content_type = ''
"""This defines a default ``Content-Type`` to use if no Content-Type header
is given. The empty string is used for RequestBody, which results in the
request body not being read or parsed at all. This is by design; a missing
``Content-Type`` header in the HTTP request entity is an error at best,
and a security hole at worst. For multipart parts, however, the MIME spec
declares that a part with no Content-Type defaults to "text/plain"
(see :class:`Part<cherrypy._cpreqbody.Part>`).
"""
maxbytes = None
"""Raise ``MaxSizeExceeded`` if more bytes than this are read from
the socket.
"""
def __init__(self, fp, headers, params=None, request_params=None):
Entity.__init__(self, fp, headers, params)
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1
# When no explicit charset parameter is provided by the
# sender, media subtypes of the "text" type are defined
# to have a default charset value of "ISO-8859-1" when
# received via HTTP.
if self.content_type.value.startswith('text/'):
for c in ('ISO-8859-1', 'iso-8859-1', 'Latin-1', 'latin-1'):
if c in self.attempt_charsets:
break
else:
self.attempt_charsets.append('ISO-8859-1')
# Temporary fix while deprecating passing .parts as .params.
self.processors['multipart'] = _old_process_multipart
if request_params is None:
request_params = {}
self.request_params = request_params
def process(self):
"""Process the request entity based on its Content-Type."""
# "The presence of a message-body in a request is signaled by the
# inclusion of a Content-Length or Transfer-Encoding header field in
# the request's message-headers."
# It is possible to send a POST request with no body, for example;
# however, app developers are responsible in that case to set
# cherrypy.request.process_body to False so this method isn't called.
h = cherrypy.serving.request.headers
if 'Content-Length' not in h and 'Transfer-Encoding' not in h:
raise cherrypy.HTTPError(411)
self.fp = SizedReader(self.fp, self.length,
self.maxbytes, bufsize=self.bufsize,
has_trailers='Trailer' in h)
super(RequestBody, self).process()
# Body params should also be a part of the request_params
# add them in here.
request_params = self.request_params
for key, value in self.params.items():
if key in request_params:
if not isinstance(request_params[key], list):
request_params[key] = [request_params[key]]
request_params[key].append(value)
else:
request_params[key] = value

970
libs/cherrypy/_cprequest.py Normal file
View file

@ -0,0 +1,970 @@
import sys
import time
import collections
import operator
from http.cookies import SimpleCookie, CookieError
import uuid
from more_itertools import consume
import cherrypy
from cherrypy._cpcompat import ntob
from cherrypy import _cpreqbody
from cherrypy._cperror import format_exc, bare_error
from cherrypy.lib import httputil, reprconf, encoding
class Hook(object):
"""A callback and its metadata: failsafe, priority, and kwargs."""
callback = None
"""
The bare callable that this Hook object is wrapping, which will
be called when the Hook is called."""
failsafe = False
"""
If True, the callback is guaranteed to run even if other callbacks
from the same call point raise exceptions."""
priority = 50
"""Defines the order of execution for a list of Hooks.
Priority numbers should be limited to the closed interval [0, 100],
but values outside this range are acceptable, as are fractional
values.
"""
kwargs = {}
"""
A set of keyword arguments that will be passed to the
callable on each call."""
def __init__(self, callback, failsafe=None, priority=None, **kwargs):
self.callback = callback
if failsafe is None:
failsafe = getattr(callback, 'failsafe', False)
self.failsafe = failsafe
if priority is None:
priority = getattr(callback, 'priority', 50)
self.priority = priority
self.kwargs = kwargs
def __lt__(self, other):
"""
Hooks sort by priority, ascending, such that
hooks of lower priority are run first.
"""
return self.priority < other.priority
def __call__(self):
"""Run self.callback(**self.kwargs)."""
return self.callback(**self.kwargs)
def __repr__(self):
cls = self.__class__
return ('%s.%s(callback=%r, failsafe=%r, priority=%r, %s)'
% (cls.__module__, cls.__name__, self.callback,
self.failsafe, self.priority,
', '.join(['%s=%r' % (k, v)
for k, v in self.kwargs.items()])))
class HookMap(dict):
"""A map of call points to lists of callbacks (Hook objects)."""
def __new__(cls, points=None):
d = dict.__new__(cls)
for p in points or []:
d[p] = []
return d
def __init__(self, *a, **kw):
pass
def attach(self, point, callback, failsafe=None, priority=None, **kwargs):
"""Append a new Hook made from the supplied arguments."""
self[point].append(Hook(callback, failsafe, priority, **kwargs))
def run(self, point):
"""Execute all registered Hooks (callbacks) for the given point."""
self.run_hooks(iter(sorted(self[point])))
@classmethod
def run_hooks(cls, hooks):
"""Execute the indicated hooks, trapping errors.
Hooks with ``.failsafe == True`` are guaranteed to run
even if others at the same hookpoint fail. In this case,
log the failure and proceed on to the next hook. The only
way to stop all processing from one of these hooks is
to raise a BaseException like SystemExit or
KeyboardInterrupt and stop the whole server.
"""
assert isinstance(hooks, collections.abc.Iterator)
quiet_errors = (
cherrypy.HTTPError,
cherrypy.HTTPRedirect,
cherrypy.InternalRedirect,
)
safe = filter(operator.attrgetter('failsafe'), hooks)
for hook in hooks:
try:
hook()
except quiet_errors:
cls.run_hooks(safe)
raise
except Exception:
cherrypy.log(traceback=True, severity=40)
cls.run_hooks(safe)
raise
def __copy__(self):
newmap = self.__class__()
# We can't just use 'update' because we want copies of the
# mutable values (each is a list) as well.
for k, v in self.items():
newmap[k] = v[:]
return newmap
copy = __copy__
def __repr__(self):
cls = self.__class__
return '%s.%s(points=%r)' % (
cls.__module__,
cls.__name__,
list(self)
)
# Config namespace handlers
def hooks_namespace(k, v):
"""Attach bare hooks declared in config."""
# Use split again to allow multiple hooks for a single
# hookpoint per path (e.g. "hooks.before_handler.1").
# Little-known fact you only get from reading source ;)
hookpoint = k.split('.', 1)[0]
if isinstance(v, str):
v = cherrypy.lib.reprconf.attributes(v)
if not isinstance(v, Hook):
v = Hook(v)
cherrypy.serving.request.hooks[hookpoint].append(v)
def request_namespace(k, v):
"""Attach request attributes declared in config."""
# Provides config entries to set request.body attrs (like
# attempt_charsets).
if k[:5] == 'body.':
setattr(cherrypy.serving.request.body, k[5:], v)
else:
setattr(cherrypy.serving.request, k, v)
def response_namespace(k, v):
"""Attach response attributes declared in config."""
# Provides config entries to set default response headers
# http://cherrypy.dev/ticket/889
if k[:8] == 'headers.':
cherrypy.serving.response.headers[k.split('.', 1)[1]] = v
else:
setattr(cherrypy.serving.response, k, v)
def error_page_namespace(k, v):
"""Attach error pages declared in config."""
if k != 'default':
k = int(k)
cherrypy.serving.request.error_page[k] = v
hookpoints = ['on_start_resource', 'before_request_body',
'before_handler', 'before_finalize',
'on_end_resource', 'on_end_request',
'before_error_response', 'after_error_response']
class Request(object):
"""An HTTP request.
This object represents the metadata of an HTTP request message; that
is, it contains attributes which describe the environment in which
the request URL, headers, and body were sent (if you want tools to
interpret the headers and body, those are elsewhere, mostly in
Tools). This 'metadata' consists of socket data, transport
characteristics, and the Request-Line. This object also contains
data regarding the configuration in effect for the given URL, and
the execution plan for generating a response.
"""
prev = None
"""The previous Request object (if any).
This should be None unless we are processing an InternalRedirect.
"""
# Conversation/connection attributes
local = httputil.Host('127.0.0.1', 80)
'An httputil.Host(ip, port, hostname) object for the server socket.'
remote = httputil.Host('127.0.0.1', 1111)
'An httputil.Host(ip, port, hostname) object for the client socket.'
scheme = 'http'
"""The protocol used between client and server.
In most cases, this will be either 'http' or 'https'.
"""
server_protocol = 'HTTP/1.1'
"""
The HTTP version for which the HTTP server is at least
conditionally compliant."""
base = ''
"""The (scheme://host) portion of the requested URL.
In some cases (e.g. when proxying via mod_rewrite), this may contain
path segments which cherrypy.url uses when constructing url's, but
which otherwise are ignored by CherryPy. Regardless, this value MUST
NOT end in a slash.
"""
# Request-Line attributes
request_line = ''
"""The complete Request-Line received from the client.
This is a single string consisting of the request method, URI, and
protocol version (joined by spaces). Any final CRLF is removed.
"""
method = 'GET'
"""Indicates the HTTP method to be performed on the resource identified by
the Request-URI.
Common methods include GET, HEAD, POST, PUT, and DELETE. CherryPy
allows any extension method; however, various HTTP servers and
gateways may restrict the set of allowable methods. CherryPy
applications SHOULD restrict the set (on a per-URI basis).
"""
query_string = ''
"""
The query component of the Request-URI, a string of information to be
interpreted by the resource. The query portion of a URI follows the
path component, and is separated by a '?'. For example, the URI
'http://www.cherrypy.dev/wiki?a=3&b=4' has the query component,
'a=3&b=4'."""
query_string_encoding = 'utf8'
"""
The encoding expected for query string arguments after % HEX HEX decoding).
If a query string is provided that cannot be decoded with this encoding,
404 is raised (since technically it's a different URI). If you want
arbitrary encodings to not error, set this to 'Latin-1'; you can then
encode back to bytes and re-decode to whatever encoding you like later.
"""
protocol = (1, 1)
"""The HTTP protocol version corresponding to the set
of features which should be allowed in the response. If BOTH
the client's request message AND the server's level of HTTP
compliance is HTTP/1.1, this attribute will be the tuple (1, 1).
If either is 1.0, this attribute will be the tuple (1, 0).
Lower HTTP protocol versions are not explicitly supported."""
params = {}
"""
A dict which combines query string (GET) and request entity (POST)
variables. This is populated in two stages: GET params are added
before the 'on_start_resource' hook, and POST params are added
between the 'before_request_body' and 'before_handler' hooks.
"""
# Message attributes
header_list = []
"""A list of the HTTP request headers as (name, value) tuples.
In general, you should use request.headers (a dict) instead.
"""
headers = httputil.HeaderMap()
"""A dict-like object containing the request headers.
Keys are header
names (in Title-Case format); however, you may get and set them in
a case-insensitive manner. That is, headers['Content-Type'] and
headers['content-type'] refer to the same value. Values are header
values (decoded according to :rfc:`2047` if necessary). See also:
httputil.HeaderMap, httputil.HeaderElement.
"""
cookie = SimpleCookie()
"""See help(Cookie)."""
rfile = None
"""
If the request included an entity (body), it will be available
as a stream in this attribute. However, the rfile will normally
be read for you between the 'before_request_body' hook and the
'before_handler' hook, and the resulting string is placed into
either request.params or the request.body attribute.
You may disable the automatic consumption of the rfile by setting
request.process_request_body to False, either in config for the desired
path, or in an 'on_start_resource' or 'before_request_body' hook.
WARNING: In almost every case, you should not attempt to read from the
rfile stream after CherryPy's automatic mechanism has read it. If you
turn off the automatic parsing of rfile, you should read exactly the
number of bytes specified in request.headers['Content-Length'].
Ignoring either of these warnings may result in a hung request thread
or in corruption of the next (pipelined) request.
"""
process_request_body = True
"""
If True, the rfile (if any) is automatically read and parsed,
and the result placed into request.params or request.body."""
methods_with_bodies = ('POST', 'PUT', 'PATCH')
"""
A sequence of HTTP methods for which CherryPy will automatically
attempt to read a body from the rfile. If you are going to change
this property, modify it on the configuration (recommended)
or on the "hook point" `on_start_resource`.
"""
body = None
"""
If the request Content-Type is 'application/x-www-form-urlencoded'
or multipart, this will be None. Otherwise, this will be an instance
of :class:`RequestBody<cherrypy._cpreqbody.RequestBody>` (which you
can .read()); this value is set between the 'before_request_body' and
'before_handler' hooks (assuming that process_request_body is True).
"""
# Dispatch attributes
dispatch = cherrypy.dispatch.Dispatcher()
"""
The object which looks up the 'page handler' callable and collects
config for the current request based on the path_info, other
request attributes, and the application architecture. The core
calls the dispatcher as early as possible, passing it a 'path_info'
argument.
The default dispatcher discovers the page handler by matching
path_info to a hierarchical arrangement of objects, starting at
request.app.root. See help(cherrypy.dispatch) for more information.
"""
script_name = ''
"""The 'mount point' of the application which is handling this request.
This attribute MUST NOT end in a slash. If the script_name refers to
the root of the URI, it MUST be an empty string (not "/").
"""
path_info = '/'
"""The 'relative path' portion of the Request-URI.
This is relative to the script_name ('mount point') of the
application which is handling this request.
"""
login = None
"""
When authentication is used during the request processing this is
set to 'False' if it failed and to the 'username' value if it succeeded.
The default 'None' implies that no authentication happened."""
# Note that cherrypy.url uses "if request.app:" to determine whether
# the call is during a real HTTP request or not. So leave this None.
app = None
"""The cherrypy.Application object which is handling this request."""
handler = None
"""
The function, method, or other callable which CherryPy will call to
produce the response. The discovery of the handler and the arguments
it will receive are determined by the request.dispatch object.
By default, the handler is discovered by walking a tree of objects
starting at request.app.root, and is then passed all HTTP params
(from the query string and POST body) as keyword arguments."""
toolmaps = {}
"""
A nested dict of all Toolboxes and Tools in effect for this request,
of the form: {Toolbox.namespace: {Tool.name: config dict}}."""
config = None
"""A flat dict of all configuration entries which apply to the current
request.
These entries are collected from global config, application config
(based on request.path_info), and from handler config (exactly how
is governed by the request.dispatch object in effect for this
request; by default, handler config can be attached anywhere in the
tree between request.app.root and the final handler, and inherits
downward).
"""
is_index = None
"""
This will be True if the current request is mapped to an 'index'
resource handler (also, a 'default' handler if path_info ends with
a slash). The value may be used to automatically redirect the
user-agent to a 'more canonical' URL which either adds or removes
the trailing slash. See cherrypy.tools.trailing_slash."""
hooks = HookMap(hookpoints)
"""A HookMap (dict-like object) of the form: {hookpoint: [hook, ...]}.
Each key is a str naming the hook point, and each value is a list
of hooks which will be called at that hook point during this request.
The list of hooks is generally populated as early as possible (mostly
from Tools specified in config), but may be extended at any time.
See also: _cprequest.Hook, _cprequest.HookMap, and cherrypy.tools.
"""
error_response = cherrypy.HTTPError(500).set_response
"""
The no-arg callable which will handle unexpected, untrapped errors
during request processing. This is not used for expected exceptions
(like NotFound, HTTPError, or HTTPRedirect) which are raised in
response to expected conditions (those should be customized either
via request.error_page or by overriding HTTPError.set_response).
By default, error_response uses HTTPError(500) to return a generic
error response to the user-agent."""
error_page = {}
"""A dict of {error code: response filename or callable} pairs.
The error code must be an int representing a given HTTP error code,
or the string 'default', which will be used if no matching entry is
found for a given numeric code.
If a filename is provided, the file should contain a Python string-
formatting template, and can expect by default to receive format
values with the mapping keys %(status)s, %(message)s, %(traceback)s,
and %(version)s. The set of format mappings can be extended by
overriding HTTPError.set_response.
If a callable is provided, it will be called by default with keyword
arguments 'status', 'message', 'traceback', and 'version', as for a
string-formatting template. The callable must return a string or
iterable of strings which will be set to response.body. It may also
override headers or perform any other processing.
If no entry is given for an error code, and no 'default' entry
exists, a default template will be used.
"""
show_tracebacks = True
"""
If True, unexpected errors encountered during request processing will
include a traceback in the response body."""
show_mismatched_params = True
"""
If True, mismatched parameters encountered during PageHandler invocation
processing will be included in the response body."""
throws = (KeyboardInterrupt, SystemExit, cherrypy.InternalRedirect)
"""The sequence of exceptions which Request.run does not trap."""
throw_errors = False
"""
If True, Request.run will not trap any errors (except HTTPRedirect and
HTTPError, which are more properly called 'exceptions', not errors)."""
closed = False
"""True once the close method has been called, False otherwise."""
stage = None
"""A string containing the stage reached in the request-handling process.
This is useful when debugging a live server with hung requests.
"""
unique_id = None
"""A lazy object generating and memorizing UUID4 on ``str()`` render."""
namespaces = reprconf.NamespaceSet(
**{'hooks': hooks_namespace,
'request': request_namespace,
'response': response_namespace,
'error_page': error_page_namespace,
'tools': cherrypy.tools,
})
def __init__(self, local_host, remote_host, scheme='http',
server_protocol='HTTP/1.1'):
"""Populate a new Request object.
local_host should be an httputil.Host object with the server
info. remote_host should be an httputil.Host object with the
client info. scheme should be a string, either "http" or
"https".
"""
self.local = local_host
self.remote = remote_host
self.scheme = scheme
self.server_protocol = server_protocol
self.closed = False
# Put a *copy* of the class error_page into self.
self.error_page = self.error_page.copy()
# Put a *copy* of the class namespaces into self.
self.namespaces = self.namespaces.copy()
self.stage = None
self.unique_id = LazyUUID4()
def close(self):
"""Run cleanup code.
(Core)
"""
if not self.closed:
self.closed = True
self.stage = 'on_end_request'
self.hooks.run('on_end_request')
self.stage = 'close'
def run(self, method, path, query_string, req_protocol, headers, rfile):
r"""Process the Request. (Core)
method, path, query_string, and req_protocol should be pulled directly
from the Request-Line (e.g. "GET /path?key=val HTTP/1.0").
path
This should be %XX-unquoted, but query_string should not be.
When using Python 2, they both MUST be byte strings,
not unicode strings.
When using Python 3, they both MUST be unicode strings,
not byte strings, and preferably not bytes \x00-\xFF
disguised as unicode.
headers
A list of (name, value) tuples.
rfile
A file-like object containing the HTTP request entity.
When run() is done, the returned object should have 3 attributes:
* status, e.g. "200 OK"
* header_list, a list of (name, value) tuples
* body, an iterable yielding strings
Consumer code (HTTP servers) should then access these response
attributes to build the outbound stream.
"""
response = cherrypy.serving.response
self.stage = 'run'
try:
self.error_response = cherrypy.HTTPError(500).set_response
self.method = method
path = path or '/'
self.query_string = query_string or ''
self.params = {}
# Compare request and server HTTP protocol versions, in case our
# server does not support the requested protocol. Limit our output
# to min(req, server). We want the following output:
# request server actual written supported response
# protocol protocol response protocol feature set
# a 1.0 1.0 1.0 1.0
# b 1.0 1.1 1.1 1.0
# c 1.1 1.0 1.0 1.0
# d 1.1 1.1 1.1 1.1
# Notice that, in (b), the response will be "HTTP/1.1" even though
# the client only understands 1.0. RFC 2616 10.5.6 says we should
# only return 505 if the _major_ version is different.
rp = int(req_protocol[5]), int(req_protocol[7])
sp = int(self.server_protocol[5]), int(self.server_protocol[7])
self.protocol = min(rp, sp)
response.headers.protocol = self.protocol
# Rebuild first line of the request (e.g. "GET /path HTTP/1.0").
url = path
if query_string:
url += '?' + query_string
self.request_line = '%s %s %s' % (method, url, req_protocol)
self.header_list = list(headers)
self.headers = httputil.HeaderMap()
self.rfile = rfile
self.body = None
self.cookie = SimpleCookie()
self.handler = None
# path_info should be the path from the
# app root (script_name) to the handler.
self.script_name = self.app.script_name
self.path_info = pi = path[len(self.script_name):]
self.stage = 'respond'
self.respond(pi)
except self.throws:
raise
except Exception:
if self.throw_errors:
raise
else:
# Failure in setup, error handler or finalize. Bypass them.
# Can't use handle_error because we may not have hooks yet.
cherrypy.log(traceback=True, severity=40)
if self.show_tracebacks:
body = format_exc()
else:
body = ''
r = bare_error(body)
response.output_status, response.header_list, response.body = r
if self.method == 'HEAD':
# HEAD requests MUST NOT return a message-body in the response.
response.body = []
try:
cherrypy.log.access()
except Exception:
cherrypy.log.error(traceback=True)
return response
def respond(self, path_info):
"""Generate a response for the resource at self.path_info.
(Core)
"""
try:
try:
try:
self._do_respond(path_info)
except (cherrypy.HTTPRedirect, cherrypy.HTTPError):
inst = sys.exc_info()[1]
inst.set_response()
self.stage = 'before_finalize (HTTPError)'
self.hooks.run('before_finalize')
cherrypy.serving.response.finalize()
finally:
self.stage = 'on_end_resource'
self.hooks.run('on_end_resource')
except self.throws:
raise
except Exception:
if self.throw_errors:
raise
self.handle_error()
def _do_respond(self, path_info):
response = cherrypy.serving.response
if self.app is None:
raise cherrypy.NotFound()
self.hooks = self.__class__.hooks.copy()
self.toolmaps = {}
# Get the 'Host' header, so we can HTTPRedirect properly.
self.stage = 'process_headers'
self.process_headers()
self.stage = 'get_resource'
self.get_resource(path_info)
self.body = _cpreqbody.RequestBody(
self.rfile, self.headers, request_params=self.params)
self.namespaces(self.config)
self.stage = 'on_start_resource'
self.hooks.run('on_start_resource')
# Parse the querystring
self.stage = 'process_query_string'
self.process_query_string()
# Process the body
if self.process_request_body:
if self.method not in self.methods_with_bodies:
self.process_request_body = False
self.stage = 'before_request_body'
self.hooks.run('before_request_body')
if self.process_request_body:
self.body.process()
# Run the handler
self.stage = 'before_handler'
self.hooks.run('before_handler')
if self.handler:
self.stage = 'handler'
response.body = self.handler()
# Finalize
self.stage = 'before_finalize'
self.hooks.run('before_finalize')
response.finalize()
def process_query_string(self):
"""Parse the query string into Python structures.
(Core)
"""
try:
p = httputil.parse_query_string(
self.query_string, encoding=self.query_string_encoding)
except UnicodeDecodeError:
raise cherrypy.HTTPError(
404, 'The given query string could not be processed. Query '
'strings for this resource must be encoded with %r.' %
self.query_string_encoding)
self.params.update(p)
def process_headers(self):
"""Parse HTTP header data into Python structures.
(Core)
"""
# Process the headers into self.headers
headers = self.headers
for name, value in self.header_list:
# Call title() now (and use dict.__method__(headers))
# so title doesn't have to be called twice.
name = name.title()
value = value.strip()
headers[name] = httputil.decode_TEXT_maybe(value)
# Some clients, notably Konquoror, supply multiple
# cookies on different lines with the same key. To
# handle this case, store all cookies in self.cookie.
if name == 'Cookie':
try:
self.cookie.load(value)
except CookieError as exc:
raise cherrypy.HTTPError(400, str(exc))
if not dict.__contains__(headers, 'Host'):
# All Internet-based HTTP/1.1 servers MUST respond with a 400
# (Bad Request) status code to any HTTP/1.1 request message
# which lacks a Host header field.
if self.protocol >= (1, 1):
msg = "HTTP/1.1 requires a 'Host' request header."
raise cherrypy.HTTPError(400, msg)
else:
headers['Host'] = httputil.SanitizedHost(dict.get(headers, 'Host'))
host = dict.get(headers, 'Host')
if not host:
host = self.local.name or self.local.ip
self.base = '%s://%s' % (self.scheme, host)
def get_resource(self, path):
"""Call a dispatcher (which sets self.handler and .config).
(Core)
"""
# First, see if there is a custom dispatch at this URI. Custom
# dispatchers can only be specified in app.config, not in _cp_config
# (since custom dispatchers may not even have an app.root).
dispatch = self.app.find_config(
path, 'request.dispatch', self.dispatch)
# dispatch() should set self.handler and self.config
dispatch(path)
def handle_error(self):
"""Handle the last unanticipated exception.
(Core)
"""
try:
self.hooks.run('before_error_response')
if self.error_response:
self.error_response()
self.hooks.run('after_error_response')
cherrypy.serving.response.finalize()
except cherrypy.HTTPRedirect:
inst = sys.exc_info()[1]
inst.set_response()
cherrypy.serving.response.finalize()
class ResponseBody(object):
"""The body of the HTTP response (the response entity)."""
unicode_err = ('Page handlers MUST return bytes. Use tools.encode '
'if you wish to return unicode.')
def __get__(self, obj, objclass=None):
if obj is None:
# When calling on the class instead of an instance...
return self
else:
return obj._body
def __set__(self, obj, value):
# Convert the given value to an iterable object.
if isinstance(value, str):
raise ValueError(self.unicode_err)
elif isinstance(value, list):
# every item in a list must be bytes...
if any(isinstance(item, str) for item in value):
raise ValueError(self.unicode_err)
obj._body = encoding.prepare_iter(value)
class Response(object):
"""An HTTP Response, including status, headers, and body."""
status = ''
"""The HTTP Status-Code and Reason-Phrase."""
header_list = []
"""A list of the HTTP response headers as (name, value) tuples.
In general, you should use response.headers (a dict) instead. This
attribute is generated from response.headers and is not valid until
after the finalize phase.
"""
headers = httputil.HeaderMap()
"""
A dict-like object containing the response headers. Keys are header
names (in Title-Case format); however, you may get and set them in
a case-insensitive manner. That is, headers['Content-Type'] and
headers['content-type'] refer to the same value. Values are header
values (decoded according to :rfc:`2047` if necessary).
.. seealso:: classes :class:`HeaderMap`, :class:`HeaderElement`
"""
cookie = SimpleCookie()
"""See help(Cookie)."""
body = ResponseBody()
"""The body (entity) of the HTTP response."""
time = None
"""The value of time.time() when created.
Use in HTTP dates.
"""
stream = False
"""If False, buffer the response body."""
def __init__(self):
self.status = None
self.header_list = None
self._body = []
self.time = time.time()
self.headers = httputil.HeaderMap()
# Since we know all our keys are titled strings, we can
# bypass HeaderMap.update and get a big speed boost.
dict.update(self.headers, {
'Content-Type': 'text/html',
'Server': 'CherryPy/' + cherrypy.__version__,
'Date': httputil.HTTPDate(self.time),
})
self.cookie = SimpleCookie()
def collapse_body(self):
"""Collapse self.body to a single string; replace it and return it."""
new_body = b''.join(self.body)
self.body = new_body
return new_body
def _flush_body(self):
"""Discard self.body but consume any generator such that any
finalization can occur, such as is required by caching.tee_output()."""
consume(iter(self.body))
def finalize(self):
"""Transform headers (and cookies) into self.header_list.
(Core)
"""
try:
code, reason, _ = httputil.valid_status(self.status)
except ValueError:
raise cherrypy.HTTPError(500, sys.exc_info()[1].args[0])
headers = self.headers
self.status = '%s %s' % (code, reason)
self.output_status = ntob(str(code), 'ascii') + \
b' ' + headers.encode(reason)
if self.stream:
# The upshot: wsgiserver will chunk the response if
# you pop Content-Length (or set it explicitly to None).
# Note that lib.static sets C-L to the file's st_size.
if dict.get(headers, 'Content-Length') is None:
dict.pop(headers, 'Content-Length', None)
elif code < 200 or code in (204, 205, 304):
# "All 1xx (informational), 204 (no content),
# and 304 (not modified) responses MUST NOT
# include a message-body."
dict.pop(headers, 'Content-Length', None)
self._flush_body()
self.body = b''
else:
# Responses which are not streamed should have a Content-Length,
# but allow user code to set Content-Length if desired.
if dict.get(headers, 'Content-Length') is None:
content = self.collapse_body()
dict.__setitem__(headers, 'Content-Length', len(content))
# Transform our header dict into a list of tuples.
self.header_list = h = headers.output()
cookie = self.cookie.output()
if cookie:
for line in cookie.split('\r\n'):
name, value = line.split(': ', 1)
if isinstance(name, str):
name = name.encode('ISO-8859-1')
if isinstance(value, str):
value = headers.encode(value)
h.append((name, value))
class LazyUUID4(object):
def __str__(self):
"""Return UUID4 and keep it for future calls."""
return str(self.uuid4)
@property
def uuid4(self):
"""Provide unique id on per-request basis using UUID4.
It's evaluated lazily on render.
"""
try:
self._uuid4
except AttributeError:
# evaluate on first access
self._uuid4 = uuid.uuid4()
return self._uuid4

253
libs/cherrypy/_cpserver.py Normal file
View file

@ -0,0 +1,253 @@
"""Manage HTTP servers with CherryPy."""
import cherrypy
from cherrypy.lib.reprconf import attributes
from cherrypy._cpcompat import text_or_bytes
from cherrypy.process.servers import ServerAdapter
__all__ = ('Server', )
class Server(ServerAdapter):
"""An adapter for an HTTP server.
You can set attributes (like socket_host and socket_port)
on *this* object (which is probably cherrypy.server), and call
quickstart. For example::
cherrypy.server.socket_port = 80
cherrypy.quickstart()
"""
socket_port = 8080
"""The TCP port on which to listen for connections."""
_socket_host = '127.0.0.1'
@property
def socket_host(self): # noqa: D401; irrelevant for properties
"""The hostname or IP address on which to listen for connections.
Host values may be any IPv4 or IPv6 address, or any valid hostname.
The string 'localhost' is a synonym for '127.0.0.1' (or '::1', if
your hosts file prefers IPv6). The string '0.0.0.0' is a special
IPv4 entry meaning "any active interface" (INADDR_ANY), and '::'
is the similar IN6ADDR_ANY for IPv6. The empty string or None are
not allowed.
"""
return self._socket_host
@socket_host.setter
def socket_host(self, value):
if value == '':
raise ValueError("The empty string ('') is not an allowed value. "
"Use '0.0.0.0' instead to listen on all active "
'interfaces (INADDR_ANY).')
self._socket_host = value
socket_file = None
"""If given, the name of the UNIX socket to use instead of TCP/IP.
When this option is not None, the `socket_host` and `socket_port` options
are ignored.
"""
socket_queue_size = 5
"""The 'backlog' argument to socket.listen(); specifies the maximum number
of queued connections (default 5)."""
socket_timeout = 10
"""The timeout in seconds for accepted connections (default 10)."""
accepted_queue_size = -1
"""The maximum number of requests which will be queued up before
the server refuses to accept it (default -1, meaning no limit)."""
accepted_queue_timeout = 10
"""The timeout in seconds for attempting to add a request to the
queue when the queue is full (default 10)."""
shutdown_timeout = 5
"""The time to wait for HTTP worker threads to clean up."""
protocol_version = 'HTTP/1.1'
"""The version string to write in the Status-Line of all HTTP responses,
for example, "HTTP/1.1" (the default). Depending on the HTTP server used,
this should also limit the supported features used in the response."""
thread_pool = 10
"""The number of worker threads to start up in the pool."""
thread_pool_max = -1
"""The maximum size of the worker-thread pool.
Use -1 to indicate no limit.
"""
max_request_header_size = 500 * 1024
"""The maximum number of bytes allowable in the request headers.
If exceeded, the HTTP server should return "413 Request Entity Too
Large".
"""
max_request_body_size = 100 * 1024 * 1024
"""The maximum number of bytes allowable in the request body.
If exceeded, the HTTP server should return "413 Request Entity Too
Large".
"""
instance = None
"""If not None, this should be an HTTP server instance (such as
cheroot.wsgi.Server) which cherrypy.server will control.
Use this when you need
more control over object instantiation than is available in the various
configuration options."""
ssl_context = None
"""When using PyOpenSSL, an instance of SSL.Context."""
ssl_certificate = None
"""The filename of the SSL certificate to use."""
ssl_certificate_chain = None
"""When using PyOpenSSL, the certificate chain to pass to
Context.load_verify_locations."""
ssl_private_key = None
"""The filename of the private key to use with SSL."""
ssl_ciphers = None
"""The ciphers list of SSL."""
ssl_module = 'builtin'
"""The name of a registered SSL adaptation module to use with
the builtin WSGI server. Builtin options are: 'builtin' (to
use the SSL library built into recent versions of Python).
You may also register your own classes in the
cheroot.server.ssl_adapters dict.
"""
statistics = False
"""Turns statistics-gathering on or off for aware HTTP servers."""
nodelay = True
"""If True (the default since 3.1), sets the TCP_NODELAY socket option."""
wsgi_version = (1, 0)
"""The WSGI version tuple to use with the builtin WSGI server.
The provided options are (1, 0) [which includes support for PEP
3333, which declares it covers WSGI version 1.0.1 but still mandates
the wsgi.version (1, 0)] and ('u', 0), an experimental unicode
version. You may create and register your own experimental versions
of the WSGI protocol by adding custom classes to the
cheroot.server.wsgi_gateways dict.
"""
peercreds = False
"""If True, peer cred lookup for UNIX domain socket will put to WSGI env.
This information will then be available through WSGI env vars:
* X_REMOTE_PID
* X_REMOTE_UID
* X_REMOTE_GID
"""
peercreds_resolve = False
"""If True, username/group will be looked up in the OS from peercreds.
This information will then be available through WSGI env vars:
* REMOTE_USER
* X_REMOTE_USER
* X_REMOTE_GROUP
"""
def __init__(self):
"""Initialize Server instance."""
self.bus = cherrypy.engine
self.httpserver = None
self.interrupt = None
self.running = False
def httpserver_from_self(self, httpserver=None):
"""Return a (httpserver, bind_addr) pair based on self attributes."""
if httpserver is None:
httpserver = self.instance
if httpserver is None:
from cherrypy import _cpwsgi_server
httpserver = _cpwsgi_server.CPWSGIServer(self)
if isinstance(httpserver, text_or_bytes):
# Is anyone using this? Can I add an arg?
httpserver = attributes(httpserver)(self)
return httpserver, self.bind_addr
def start(self):
"""Start the HTTP server."""
if not self.httpserver:
self.httpserver, self.bind_addr = self.httpserver_from_self()
super(Server, self).start()
start.priority = 75
@property
def bind_addr(self):
"""Return bind address.
A (host, port) tuple for TCP sockets or a str for Unix domain
sockets.
"""
if self.socket_file:
return self.socket_file
if self.socket_host is None and self.socket_port is None:
return None
return (self.socket_host, self.socket_port)
@bind_addr.setter
def bind_addr(self, value):
if value is None:
self.socket_file = None
self.socket_host = None
self.socket_port = None
elif isinstance(value, text_or_bytes):
self.socket_file = value
self.socket_host = None
self.socket_port = None
else:
try:
self.socket_host, self.socket_port = value
self.socket_file = None
except ValueError:
raise ValueError('bind_addr must be a (host, port) tuple '
'(for TCP sockets) or a string (for Unix '
'domain sockets), not %r' % value)
def base(self):
"""Return the base for this server.
e.i. scheme://host[:port] or sock file
"""
if self.socket_file:
return self.socket_file
host = self.socket_host
if host in ('0.0.0.0', '::'):
# 0.0.0.0 is INADDR_ANY and :: is IN6ADDR_ANY.
# Look up the host name, which should be the
# safest thing to spit out in a URL.
import socket
host = socket.gethostname()
port = self.socket_port
if self.ssl_certificate:
scheme = 'https'
if port != 443:
host += ':%s' % port
else:
scheme = 'http'
if port != 80:
host += ':%s' % port
return '%s://%s' % (scheme, host)

497
libs/cherrypy/_cptools.py Normal file
View file

@ -0,0 +1,497 @@
"""CherryPy tools. A "tool" is any helper, adapted to CP.
Tools are usually designed to be used in a variety of ways (although
some may only offer one if they choose):
Library calls
All tools are callables that can be used wherever needed.
The arguments are straightforward and should be detailed within the
docstring.
Function decorators
All tools, when called, may be used as decorators which configure
individual CherryPy page handlers (methods on the CherryPy tree).
That is, "@tools.anytool()" should "turn on" the tool via the
decorated function's _cp_config attribute.
CherryPy config
If a tool exposes a "_setup" callable, it will be called
once per Request (if the feature is "turned on" via config).
Tools may be implemented as any object with a namespace. The builtins
are generally either modules or instances of the tools.Tool class.
"""
import cherrypy
from cherrypy._helper import expose
from cherrypy.lib import cptools, encoding, static, jsontools
from cherrypy.lib import sessions as _sessions, xmlrpcutil as _xmlrpc
from cherrypy.lib import caching as _caching
from cherrypy.lib import auth_basic, auth_digest
def _getargs(func):
"""Return the names of all static arguments to the given function."""
# Use this instead of importing inspect for less mem overhead.
import types
if isinstance(func, types.MethodType):
func = func.__func__
co = func.__code__
return co.co_varnames[:co.co_argcount]
_attr_error = (
'CherryPy Tools cannot be turned on directly. Instead, turn them '
'on via config, or use them as decorators on your page handlers.'
)
class Tool(object):
"""A registered function for use with CherryPy request-processing hooks.
help(tool.callable) should give you more information about this
Tool.
"""
namespace = 'tools'
def __init__(self, point, callable, name=None, priority=50):
self._point = point
self.callable = callable
self._name = name
self._priority = priority
self.__doc__ = self.callable.__doc__
self._setargs()
@property
def on(self):
raise AttributeError(_attr_error)
@on.setter
def on(self, value):
raise AttributeError(_attr_error)
def _setargs(self):
"""Copy func parameter names to obj attributes."""
try:
for arg in _getargs(self.callable):
setattr(self, arg, None)
except (TypeError, AttributeError):
if hasattr(self.callable, '__call__'):
for arg in _getargs(self.callable.__call__):
setattr(self, arg, None)
# IronPython 1.0 raises NotImplementedError because
# inspect.getargspec tries to access Python bytecode
# in co_code attribute.
except NotImplementedError:
pass
# IronPython 1B1 may raise IndexError in some cases,
# but if we trap it here it doesn't prevent CP from working.
except IndexError:
pass
def _merged_args(self, d=None):
"""Return a dict of configuration entries for this Tool."""
if d:
conf = d.copy()
else:
conf = {}
tm = cherrypy.serving.request.toolmaps[self.namespace]
if self._name in tm:
conf.update(tm[self._name])
if 'on' in conf:
del conf['on']
return conf
def __call__(self, *args, **kwargs):
"""Compile-time decorator (turn on the tool in config).
For example::
@expose
@tools.proxy()
def whats_my_base(self):
return cherrypy.request.base
"""
if args:
raise TypeError('The %r Tool does not accept positional '
'arguments; you must use keyword arguments.'
% self._name)
def tool_decorator(f):
if not hasattr(f, '_cp_config'):
f._cp_config = {}
subspace = self.namespace + '.' + self._name + '.'
f._cp_config[subspace + 'on'] = True
for k, v in kwargs.items():
f._cp_config[subspace + k] = v
return f
return tool_decorator
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call
this method when the tool is "turned on" in config.
"""
conf = self._merged_args()
p = conf.pop('priority', None)
if p is None:
p = getattr(self.callable, 'priority', self._priority)
cherrypy.serving.request.hooks.attach(self._point, self.callable,
priority=p, **conf)
class HandlerTool(Tool):
"""Tool which is called 'before main', that may skip normal handlers.
If the tool successfully handles the request (by setting
response.body), if should return True. This will cause CherryPy to
skip any 'normal' page handler. If the tool did not handle the
request, it should return False to tell CherryPy to continue on and
call the normal page handler. If the tool is declared AS a page
handler (see the 'handler' method), returning False will raise
NotFound.
"""
def __init__(self, callable, name=None):
Tool.__init__(self, 'before_handler', callable, name)
def handler(self, *args, **kwargs):
"""Use this tool as a CherryPy page handler.
For example::
class Root:
nav = tools.staticdir.handler(section="/nav", dir="nav",
root=absDir)
"""
@expose
def handle_func(*a, **kw):
handled = self.callable(*args, **self._merged_args(kwargs))
if not handled:
raise cherrypy.NotFound()
return cherrypy.serving.response.body
return handle_func
def _wrapper(self, **kwargs):
if self.callable(**kwargs):
cherrypy.serving.request.handler = None
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call
this method when the tool is "turned on" in config.
"""
conf = self._merged_args()
p = conf.pop('priority', None)
if p is None:
p = getattr(self.callable, 'priority', self._priority)
cherrypy.serving.request.hooks.attach(self._point, self._wrapper,
priority=p, **conf)
class HandlerWrapperTool(Tool):
"""Tool which wraps request.handler in a provided wrapper function.
The 'newhandler' arg must be a handler wrapper function that takes a
'next_handler' argument, plus ``*args`` and ``**kwargs``. Like all
page handler
functions, it must return an iterable for use as cherrypy.response.body.
For example, to allow your 'inner' page handlers to return dicts
which then get interpolated into a template::
def interpolator(next_handler, *args, **kwargs):
filename = cherrypy.request.config.get('template')
cherrypy.response.template = env.get_template(filename)
response_dict = next_handler(*args, **kwargs)
return cherrypy.response.template.render(**response_dict)
cherrypy.tools.jinja = HandlerWrapperTool(interpolator)
"""
def __init__(self, newhandler, point='before_handler', name=None,
priority=50):
self.newhandler = newhandler
self._point = point
self._name = name
self._priority = priority
def callable(self, *args, **kwargs):
innerfunc = cherrypy.serving.request.handler
def wrap(*args, **kwargs):
return self.newhandler(innerfunc, *args, **kwargs)
cherrypy.serving.request.handler = wrap
class ErrorTool(Tool):
"""Tool which is used to replace the default request.error_response."""
def __init__(self, callable, name=None):
Tool.__init__(self, None, callable, name)
def _wrapper(self):
self.callable(**self._merged_args())
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call
this method when the tool is "turned on" in config.
"""
cherrypy.serving.request.error_response = self._wrapper
# Builtin tools #
class SessionTool(Tool):
"""Session Tool for CherryPy.
sessions.locking
When 'implicit' (the default), the session will be locked for you,
just before running the page handler.
When 'early', the session will be locked before reading the request
body. This is off by default for safety reasons; for example,
a large upload would block the session, denying an AJAX
progress meter
(`issue <https://github.com/cherrypy/cherrypy/issues/630>`_).
When 'explicit' (or any other value), you need to call
cherrypy.session.acquire_lock() yourself before using
session data.
"""
def __init__(self):
# _sessions.init must be bound after headers are read
Tool.__init__(self, 'before_request_body', _sessions.init)
def _lock_session(self):
cherrypy.serving.session.acquire_lock()
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call
this method when the tool is "turned on" in config.
"""
hooks = cherrypy.serving.request.hooks
conf = self._merged_args()
p = conf.pop('priority', None)
if p is None:
p = getattr(self.callable, 'priority', self._priority)
hooks.attach(self._point, self.callable, priority=p, **conf)
locking = conf.pop('locking', 'implicit')
if locking == 'implicit':
hooks.attach('before_handler', self._lock_session)
elif locking == 'early':
# Lock before the request body (but after _sessions.init runs!)
hooks.attach('before_request_body', self._lock_session,
priority=60)
else:
# Don't lock
pass
hooks.attach('before_finalize', _sessions.save)
hooks.attach('on_end_request', _sessions.close)
def regenerate(self):
"""Drop the current session and make a new one (with a new id)."""
sess = cherrypy.serving.session
sess.regenerate()
# Grab cookie-relevant tool args
relevant = 'path', 'path_header', 'name', 'timeout', 'domain', 'secure'
conf = dict(
(k, v)
for k, v in self._merged_args().items()
if k in relevant
)
_sessions.set_response_cookie(**conf)
class XMLRPCController(object):
"""A Controller (page handler collection) for XML-RPC.
To use it, have your controllers subclass this base class (it will
turn on the tool for you).
You can also supply the following optional config entries::
tools.xmlrpc.encoding: 'utf-8'
tools.xmlrpc.allow_none: 0
XML-RPC is a rather discontinuous layer over HTTP; dispatching to the
appropriate handler must first be performed according to the URL, and
then a second dispatch step must take place according to the RPC method
specified in the request body. It also allows a superfluous "/RPC2"
prefix in the URL, supplies its own handler args in the body, and
requires a 200 OK "Fault" response instead of 404 when the desired
method is not found.
Therefore, XML-RPC cannot be implemented for CherryPy via a Tool alone.
This Controller acts as the dispatch target for the first half (based
on the URL); it then reads the RPC method from the request body and
does its own second dispatch step based on that method. It also reads
body params, and returns a Fault on error.
The XMLRPCDispatcher strips any /RPC2 prefix; if you aren't using /RPC2
in your URL's, you can safely skip turning on the XMLRPCDispatcher.
Otherwise, you need to use declare it in config::
request.dispatch: cherrypy.dispatch.XMLRPCDispatcher()
"""
# Note we're hard-coding this into the 'tools' namespace. We could do
# a huge amount of work to make it relocatable, but the only reason why
# would be if someone actually disabled the default_toolbox. Meh.
_cp_config = {'tools.xmlrpc.on': True}
@expose
def default(self, *vpath, **params):
rpcparams, rpcmethod = _xmlrpc.process_body()
subhandler = self
for attr in str(rpcmethod).split('.'):
subhandler = getattr(subhandler, attr, None)
if subhandler and getattr(subhandler, 'exposed', False):
body = subhandler(*(vpath + rpcparams), **params)
else:
# https://github.com/cherrypy/cherrypy/issues/533
# if a method is not found, an xmlrpclib.Fault should be returned
# raising an exception here will do that; see
# cherrypy.lib.xmlrpcutil.on_error
raise Exception('method "%s" is not supported' % attr)
conf = cherrypy.serving.request.toolmaps['tools'].get('xmlrpc', {})
_xmlrpc.respond(body,
conf.get('encoding', 'utf-8'),
conf.get('allow_none', 0))
return cherrypy.serving.response.body
class SessionAuthTool(HandlerTool):
pass
class CachingTool(Tool):
"""Caching Tool for CherryPy."""
def _wrapper(self, **kwargs):
request = cherrypy.serving.request
if _caching.get(**kwargs):
request.handler = None
else:
if request.cacheable:
# Note the devious technique here of adding hooks on the fly
request.hooks.attach('before_finalize', _caching.tee_output,
priority=100)
_wrapper.priority = 90
def _setup(self):
"""Hook caching into cherrypy.request."""
conf = self._merged_args()
p = conf.pop('priority', None)
cherrypy.serving.request.hooks.attach('before_handler', self._wrapper,
priority=p, **conf)
class Toolbox(object):
"""A collection of Tools.
This object also functions as a config namespace handler for itself.
Custom toolboxes should be added to each Application's toolboxes
dict.
"""
def __init__(self, namespace):
self.namespace = namespace
def __setattr__(self, name, value):
# If the Tool._name is None, supply it from the attribute name.
if isinstance(value, Tool):
if value._name is None:
value._name = name
value.namespace = self.namespace
object.__setattr__(self, name, value)
def __enter__(self):
"""Populate request.toolmaps from tools specified in config."""
cherrypy.serving.request.toolmaps[self.namespace] = map = {}
def populate(k, v):
toolname, arg = k.split('.', 1)
bucket = map.setdefault(toolname, {})
bucket[arg] = v
return populate
def __exit__(self, exc_type, exc_val, exc_tb):
"""Run tool._setup() for each tool in our toolmap."""
map = cherrypy.serving.request.toolmaps.get(self.namespace)
if map:
for name, settings in map.items():
if settings.get('on', False):
tool = getattr(self, name)
tool._setup()
def register(self, point, **kwargs):
"""
Return a decorator which registers the function
at the given hook point.
"""
def decorator(func):
attr_name = kwargs.get('name', func.__name__)
tool = Tool(point, func, **kwargs)
setattr(self, attr_name, tool)
return func
return decorator
default_toolbox = _d = Toolbox('tools')
_d.session_auth = SessionAuthTool(cptools.session_auth)
_d.allow = Tool('on_start_resource', cptools.allow)
_d.proxy = Tool('before_request_body', cptools.proxy, priority=30)
_d.response_headers = Tool('on_start_resource', cptools.response_headers)
_d.log_tracebacks = Tool('before_error_response', cptools.log_traceback)
_d.log_headers = Tool('before_error_response', cptools.log_request_headers)
_d.log_hooks = Tool('on_end_request', cptools.log_hooks, priority=100)
_d.err_redirect = ErrorTool(cptools.redirect)
_d.etags = Tool('before_finalize', cptools.validate_etags, priority=75)
_d.decode = Tool('before_request_body', encoding.decode)
# the order of encoding, gzip, caching is important
_d.encode = Tool('before_handler', encoding.ResponseEncoder, priority=70)
_d.gzip = Tool('before_finalize', encoding.gzip, priority=80)
_d.staticdir = HandlerTool(static.staticdir)
_d.staticfile = HandlerTool(static.staticfile)
_d.sessions = SessionTool()
_d.xmlrpc = ErrorTool(_xmlrpc.on_error)
_d.caching = CachingTool('before_handler', _caching.get, 'caching')
_d.expires = Tool('before_finalize', _caching.expires)
_d.ignore_headers = Tool('before_request_body', cptools.ignore_headers)
_d.referer = Tool('before_request_body', cptools.referer)
_d.trailing_slash = Tool('before_handler', cptools.trailing_slash, priority=60)
_d.flatten = Tool('before_finalize', cptools.flatten)
_d.accept = Tool('on_start_resource', cptools.accept)
_d.redirect = Tool('on_start_resource', cptools.redirect)
_d.autovary = Tool('on_start_resource', cptools.autovary, priority=0)
_d.json_in = Tool('before_request_body', jsontools.json_in, priority=30)
_d.json_out = Tool('before_handler', jsontools.json_out, priority=30)
_d.auth_basic = Tool('before_handler', auth_basic.basic_auth, priority=1)
_d.auth_digest = Tool('before_handler', auth_digest.digest_auth, priority=1)
_d.params = Tool('before_handler', cptools.convert_params, priority=15)
del _d, cptools, encoding, static

314
libs/cherrypy/_cptree.py Normal file
View file

@ -0,0 +1,314 @@
"""CherryPy Application and Tree objects."""
import os
import cherrypy
from cherrypy import _cpconfig, _cplogging, _cprequest, _cpwsgi, tools
from cherrypy.lib import httputil, reprconf
class Application(object):
"""A CherryPy Application.
Servers and gateways should not instantiate Request objects
directly. Instead, they should ask an Application object for a
request object.
An instance of this class may also be used as a WSGI callable (WSGI
application object) for itself.
"""
root = None
"""The top-most container of page handlers for this app.
Handlers should be arranged in a hierarchy of attributes, matching
the expected URI hierarchy; the default dispatcher then searches
this hierarchy for a matching handler. When using a dispatcher other
than the default, this value may be None.
"""
config = {}
"""A dict of {path: pathconf} pairs, where 'pathconf' is itself a dict
of {key: value} pairs."""
namespaces = reprconf.NamespaceSet()
toolboxes = {'tools': cherrypy.tools}
log = None
"""A LogManager instance.
See _cplogging.
"""
wsgiapp = None
"""A CPWSGIApp instance.
See _cpwsgi.
"""
request_class = _cprequest.Request
response_class = _cprequest.Response
relative_urls = False
def __init__(self, root, script_name='', config=None):
"""Initialize Application with given root."""
self.log = _cplogging.LogManager(id(self), cherrypy.log.logger_root)
self.root = root
self.script_name = script_name
self.wsgiapp = _cpwsgi.CPWSGIApp(self)
self.namespaces = self.namespaces.copy()
self.namespaces['log'] = lambda k, v: setattr(self.log, k, v)
self.namespaces['wsgi'] = self.wsgiapp.namespace_handler
self.config = self.__class__.config.copy()
if config:
self.merge(config)
def __repr__(self):
"""Generate a representation of the Application instance."""
return '%s.%s(%r, %r)' % (self.__module__, self.__class__.__name__,
self.root, self.script_name)
script_name_doc = """The URI "mount point" for this app. A mount point
is that portion of the URI which is constant for all URIs that are
serviced by this application; it does not include scheme, host, or proxy
("virtual host") portions of the URI.
For example, if script_name is "/my/cool/app", then the URL
"http://www.example.com/my/cool/app/page1" might be handled by a
"page1" method on the root object.
The value of script_name MUST NOT end in a slash. If the script_name
refers to the root of the URI, it MUST be an empty string (not "/").
If script_name is explicitly set to None, then the script_name will be
provided for each call from request.wsgi_environ['SCRIPT_NAME'].
"""
@property
def script_name(self): # noqa: D401; irrelevant for properties
"""The URI "mount point" for this app.
A mount point is that portion of the URI which is constant for
all URIs that are serviced by this application; it does not
include scheme, host, or proxy ("virtual host") portions of the
URI.
For example, if script_name is "/my/cool/app", then the URL "
http://www.example.com/my/cool/app/page1"
might be handled by a
"page1" method on the root object.
The value of script_name MUST NOT end in a slash. If the script_name
refers to the root of the URI, it MUST be an empty string (not "/").
If script_name is explicitly set to None, then the script_name will be
provided for each call from request.wsgi_environ['SCRIPT_NAME'].
"""
if self._script_name is not None:
return self._script_name
# A `_script_name` with a value of None signals that the script name
# should be pulled from WSGI environ.
return cherrypy.serving.request.wsgi_environ['SCRIPT_NAME'].rstrip('/')
@script_name.setter
def script_name(self, value):
if value:
value = value.rstrip('/')
self._script_name = value
def merge(self, config):
"""Merge the given config into self.config."""
_cpconfig.merge(self.config, config)
# Handle namespaces specified in config.
self.namespaces(self.config.get('/', {}))
def find_config(self, path, key, default=None):
"""Return the most-specific value for key along path, or default."""
trail = path or '/'
while trail:
nodeconf = self.config.get(trail, {})
if key in nodeconf:
return nodeconf[key]
lastslash = trail.rfind('/')
if lastslash == -1:
break
elif lastslash == 0 and trail != '/':
trail = '/'
else:
trail = trail[:lastslash]
return default
def get_serving(self, local, remote, scheme, sproto):
"""Create and return a Request and Response object."""
req = self.request_class(local, remote, scheme, sproto)
req.app = self
for name, toolbox in self.toolboxes.items():
req.namespaces[name] = toolbox
resp = self.response_class()
cherrypy.serving.load(req, resp)
cherrypy.engine.publish('acquire_thread')
cherrypy.engine.publish('before_request')
return req, resp
def release_serving(self):
"""Release the current serving (request and response)."""
req = cherrypy.serving.request
cherrypy.engine.publish('after_request')
try:
req.close()
except Exception:
cherrypy.log(traceback=True, severity=40)
cherrypy.serving.clear()
def __call__(self, environ, start_response):
"""Call a WSGI-callable."""
return self.wsgiapp(environ, start_response)
class Tree(object):
"""A registry of CherryPy applications, mounted at diverse points.
An instance of this class may also be used as a WSGI callable (WSGI
application object), in which case it dispatches to all mounted
apps.
"""
apps = {}
"""
A dict of the form {script name: application}, where "script name"
is a string declaring the URI mount point (no trailing slash), and
"application" is an instance of cherrypy.Application (or an arbitrary
WSGI callable if you happen to be using a WSGI server)."""
def __init__(self):
"""Initialize registry Tree."""
self.apps = {}
def mount(self, root, script_name='', config=None):
"""Mount a new app from a root object, script_name, and config.
root
An instance of a "controller class" (a collection of page
handler methods) which represents the root of the application.
This may also be an Application instance, or None if using
a dispatcher other than the default.
script_name
A string containing the "mount point" of the application.
This should start with a slash, and be the path portion of the
URL at which to mount the given root. For example, if root.index()
will handle requests to "http://www.example.com:8080/dept/app1/",
then the script_name argument would be "/dept/app1".
It MUST NOT end in a slash. If the script_name refers to the
root of the URI, it MUST be an empty string (not "/").
config
A file or dict containing application config.
"""
if script_name is None:
raise TypeError(
"The 'script_name' argument may not be None. Application "
'objects may, however, possess a script_name of None (in '
'order to inpect the WSGI environ for SCRIPT_NAME upon each '
'request). You cannot mount such Applications on this Tree; '
'you must pass them to a WSGI server interface directly.')
# Next line both 1) strips trailing slash and 2) maps "/" -> "".
script_name = script_name.rstrip('/')
if isinstance(root, Application):
app = root
if script_name != '' and script_name != app.script_name:
raise ValueError(
'Cannot specify a different script name and pass an '
'Application instance to cherrypy.mount')
script_name = app.script_name
else:
app = Application(root, script_name)
# If mounted at "", add favicon.ico
needs_favicon = (
script_name == ''
and root is not None
and not hasattr(root, 'favicon_ico')
)
if needs_favicon:
favicon = os.path.join(
os.getcwd(),
os.path.dirname(__file__),
'favicon.ico',
)
root.favicon_ico = tools.staticfile.handler(favicon)
if config:
app.merge(config)
self.apps[script_name] = app
return app
def graft(self, wsgi_callable, script_name=''):
"""Mount a wsgi callable at the given script_name."""
# Next line both 1) strips trailing slash and 2) maps "/" -> "".
script_name = script_name.rstrip('/')
self.apps[script_name] = wsgi_callable
def script_name(self, path=None):
"""Return the script_name of the app at the given path, or None.
If path is None, cherrypy.request is used.
"""
if path is None:
try:
request = cherrypy.serving.request
path = httputil.urljoin(request.script_name,
request.path_info)
except AttributeError:
return None
while True:
if path in self.apps:
return path
if path == '':
return None
# Move one node up the tree and try again.
path = path[:path.rfind('/')]
def __call__(self, environ, start_response):
"""Pre-initialize WSGI env and call WSGI-callable."""
# If you're calling this, then you're probably setting SCRIPT_NAME
# to '' (some WSGI servers always set SCRIPT_NAME to '').
# Try to look up the app using the full path.
env1x = environ
path = httputil.urljoin(env1x.get('SCRIPT_NAME', ''),
env1x.get('PATH_INFO', ''))
sn = self.script_name(path or '/')
if sn is None:
start_response('404 Not Found', [])
return []
app = self.apps[sn]
# Correct the SCRIPT_NAME and PATH_INFO environ entries.
environ = environ.copy()
environ['SCRIPT_NAME'] = sn
environ['PATH_INFO'] = path[len(sn.rstrip('/')):]
return app(environ, start_response)

459
libs/cherrypy/_cpwsgi.py Normal file
View file

@ -0,0 +1,459 @@
"""WSGI interface (see PEP 333 and 3333).
Note that WSGI environ keys and values are 'native strings'; that is,
whatever the type of "" is. For Python 2, that's a byte string; for
Python 3, it's a unicode string. But PEP 3333 says: "even if Python's
str type is actually Unicode "under the hood", the content of native
strings must still be translatable to bytes via the Latin-1 encoding!"
"""
import sys as _sys
import io
import cherrypy as _cherrypy
from cherrypy._cpcompat import ntou
from cherrypy import _cperror
from cherrypy.lib import httputil
from cherrypy.lib import is_closable_iterator
def downgrade_wsgi_ux_to_1x(environ):
"""Return a new environ dict for WSGI 1.x from the given WSGI u.x environ.
"""
env1x = {}
url_encoding = environ[ntou('wsgi.url_encoding')]
for k, v in environ.copy().items():
if k in [ntou('PATH_INFO'), ntou('SCRIPT_NAME'), ntou('QUERY_STRING')]:
v = v.encode(url_encoding)
elif isinstance(v, str):
v = v.encode('ISO-8859-1')
env1x[k.encode('ISO-8859-1')] = v
return env1x
class VirtualHost(object):
"""Select a different WSGI application based on the Host header.
This can be useful when running multiple sites within one CP server.
It allows several domains to point to different applications. For example::
root = Root()
RootApp = cherrypy.Application(root)
Domain2App = cherrypy.Application(root)
SecureApp = cherrypy.Application(Secure())
vhost = cherrypy._cpwsgi.VirtualHost(
RootApp,
domains={
'www.domain2.example': Domain2App,
'www.domain2.example:443': SecureApp,
},
)
cherrypy.tree.graft(vhost)
"""
default = None
"""Required.
The default WSGI application.
"""
use_x_forwarded_host = True
"""If True (the default), any "X-Forwarded-Host"
request header will be used instead of the "Host" header. This
is commonly added by HTTP servers (such as Apache) when proxying."""
domains = {}
"""A dict of {host header value: application} pairs.
The incoming "Host" request header is looked up in this dict, and,
if a match is found, the corresponding WSGI application will be
called instead of the default. Note that you often need separate
entries for "example.com" and "www.example.com". In addition, "Host"
headers may contain the port number.
"""
def __init__(self, default, domains=None, use_x_forwarded_host=True):
self.default = default
self.domains = domains or {}
self.use_x_forwarded_host = use_x_forwarded_host
def __call__(self, environ, start_response):
domain = environ.get('HTTP_HOST', '')
if self.use_x_forwarded_host:
domain = environ.get('HTTP_X_FORWARDED_HOST', domain)
nextapp = self.domains.get(domain)
if nextapp is None:
nextapp = self.default
return nextapp(environ, start_response)
class InternalRedirector(object):
"""WSGI middleware that handles raised cherrypy.InternalRedirect."""
def __init__(self, nextapp, recursive=False):
self.nextapp = nextapp
self.recursive = recursive
def __call__(self, environ, start_response):
redirections = []
while True:
environ = environ.copy()
try:
return self.nextapp(environ, start_response)
except _cherrypy.InternalRedirect:
ir = _sys.exc_info()[1]
sn = environ.get('SCRIPT_NAME', '')
path = environ.get('PATH_INFO', '')
qs = environ.get('QUERY_STRING', '')
# Add the *previous* path_info + qs to redirections.
old_uri = sn + path
if qs:
old_uri += '?' + qs
redirections.append(old_uri)
if not self.recursive:
# Check to see if the new URI has been redirected to
# already
new_uri = sn + ir.path
if ir.query_string:
new_uri += '?' + ir.query_string
if new_uri in redirections:
ir.request.close()
tmpl = (
'InternalRedirector visited the same URL twice: %r'
)
raise RuntimeError(tmpl % new_uri)
# Munge the environment and try again.
environ['REQUEST_METHOD'] = 'GET'
environ['PATH_INFO'] = ir.path
environ['QUERY_STRING'] = ir.query_string
environ['wsgi.input'] = io.BytesIO()
environ['CONTENT_LENGTH'] = '0'
environ['cherrypy.previous_request'] = ir.request
class ExceptionTrapper(object):
"""WSGI middleware that traps exceptions."""
def __init__(self, nextapp, throws=(KeyboardInterrupt, SystemExit)):
self.nextapp = nextapp
self.throws = throws
def __call__(self, environ, start_response):
return _TrappedResponse(
self.nextapp,
environ,
start_response,
self.throws
)
class _TrappedResponse(object):
response = iter([])
def __init__(self, nextapp, environ, start_response, throws):
self.nextapp = nextapp
self.environ = environ
self.start_response = start_response
self.throws = throws
self.started_response = False
self.response = self.trap(
self.nextapp, self.environ, self.start_response,
)
self.iter_response = iter(self.response)
def __iter__(self):
self.started_response = True
return self
def __next__(self):
return self.trap(next, self.iter_response)
def close(self):
if hasattr(self.response, 'close'):
self.response.close()
def trap(self, func, *args, **kwargs):
try:
return func(*args, **kwargs)
except self.throws:
raise
except StopIteration:
raise
except Exception:
tb = _cperror.format_exc()
_cherrypy.log(tb, severity=40)
if not _cherrypy.request.show_tracebacks:
tb = ''
s, h, b = _cperror.bare_error(tb)
if True:
# What fun.
s = s.decode('ISO-8859-1')
h = [
(k.decode('ISO-8859-1'), v.decode('ISO-8859-1'))
for k, v in h
]
if self.started_response:
# Empty our iterable (so future calls raise StopIteration)
self.iter_response = iter([])
else:
self.iter_response = iter(b)
try:
self.start_response(s, h, _sys.exc_info())
except Exception:
# "The application must not trap any exceptions raised by
# start_response, if it called start_response with exc_info.
# Instead, it should allow such exceptions to propagate
# back to the server or gateway."
# But we still log and call close() to clean up ourselves.
_cherrypy.log(traceback=True, severity=40)
raise
if self.started_response:
return b''.join(b)
else:
return b
# WSGI-to-CP Adapter #
class AppResponse(object):
"""WSGI response iterable for CherryPy applications."""
def __init__(self, environ, start_response, cpapp):
self.cpapp = cpapp
try:
self.environ = environ
self.run()
r = _cherrypy.serving.response
outstatus = r.output_status
if not isinstance(outstatus, bytes):
raise TypeError('response.output_status is not a byte string.')
outheaders = []
for k, v in r.header_list:
if not isinstance(k, bytes):
tmpl = 'response.header_list key %r is not a byte string.'
raise TypeError(tmpl % k)
if not isinstance(v, bytes):
tmpl = (
'response.header_list value %r is not a byte string.'
)
raise TypeError(tmpl % v)
outheaders.append((k, v))
if True:
# According to PEP 3333, when using Python 3, the response
# status and headers must be bytes masquerading as unicode;
# that is, they must be of type "str" but are restricted to
# code points in the "latin-1" set.
outstatus = outstatus.decode('ISO-8859-1')
outheaders = [
(k.decode('ISO-8859-1'), v.decode('ISO-8859-1'))
for k, v in outheaders
]
self.iter_response = iter(r.body)
self.write = start_response(outstatus, outheaders)
except BaseException:
self.close()
raise
def __iter__(self):
return self
def __next__(self):
return next(self.iter_response)
def close(self):
"""Close and de-reference the current request and response.
(Core)
"""
streaming = _cherrypy.serving.response.stream
self.cpapp.release_serving()
# We avoid the expense of examining the iterator to see if it's
# closable unless we are streaming the response, as that's the
# only situation where we are going to have an iterator which
# may not have been exhausted yet.
if streaming and is_closable_iterator(self.iter_response):
iter_close = self.iter_response.close
try:
iter_close()
except Exception:
_cherrypy.log(traceback=True, severity=40)
def run(self):
"""Create a Request object using environ."""
env = self.environ.get
local = httputil.Host(
'',
int(env('SERVER_PORT', 80) or -1),
env('SERVER_NAME', ''),
)
remote = httputil.Host(
env('REMOTE_ADDR', ''),
int(env('REMOTE_PORT', -1) or -1),
env('REMOTE_HOST', ''),
)
scheme = env('wsgi.url_scheme')
sproto = env('ACTUAL_SERVER_PROTOCOL', 'HTTP/1.1')
request, resp = self.cpapp.get_serving(local, remote, scheme, sproto)
# LOGON_USER is served by IIS, and is the name of the
# user after having been mapped to a local account.
# Both IIS and Apache set REMOTE_USER, when possible.
request.login = env('LOGON_USER') or env('REMOTE_USER') or None
request.multithread = self.environ['wsgi.multithread']
request.multiprocess = self.environ['wsgi.multiprocess']
request.wsgi_environ = self.environ
request.prev = env('cherrypy.previous_request', None)
meth = self.environ['REQUEST_METHOD']
path = httputil.urljoin(
self.environ.get('SCRIPT_NAME', ''),
self.environ.get('PATH_INFO', ''),
)
qs = self.environ.get('QUERY_STRING', '')
path, qs = self.recode_path_qs(path, qs) or (path, qs)
rproto = self.environ.get('SERVER_PROTOCOL')
headers = self.translate_headers(self.environ)
rfile = self.environ['wsgi.input']
request.run(meth, path, qs, rproto, headers, rfile)
headerNames = {
'HTTP_CGI_AUTHORIZATION': 'Authorization',
'CONTENT_LENGTH': 'Content-Length',
'CONTENT_TYPE': 'Content-Type',
'REMOTE_HOST': 'Remote-Host',
'REMOTE_ADDR': 'Remote-Addr',
}
def recode_path_qs(self, path, qs):
# This isn't perfect; if the given PATH_INFO is in the
# wrong encoding, it may fail to match the appropriate config
# section URI. But meh.
old_enc = self.environ.get('wsgi.url_encoding', 'ISO-8859-1')
new_enc = self.cpapp.find_config(
self.environ.get('PATH_INFO', ''),
'request.uri_encoding', 'utf-8',
)
if new_enc.lower() == old_enc.lower():
return
# Even though the path and qs are unicode, the WSGI server
# is required by PEP 3333 to coerce them to ISO-8859-1
# masquerading as unicode. So we have to encode back to
# bytes and then decode again using the "correct" encoding.
try:
return (
path.encode(old_enc).decode(new_enc),
qs.encode(old_enc).decode(new_enc),
)
except (UnicodeEncodeError, UnicodeDecodeError):
# Just pass them through without transcoding and hope.
pass
def translate_headers(self, environ):
"""Translate CGI-environ header names to HTTP header names."""
for cgiName in environ:
# We assume all incoming header keys are uppercase already.
if cgiName in self.headerNames:
yield self.headerNames[cgiName], environ[cgiName]
elif cgiName[:5] == 'HTTP_':
# Hackish attempt at recovering original header names.
translatedHeader = cgiName[5:].replace('_', '-')
yield translatedHeader, environ[cgiName]
class CPWSGIApp(object):
"""A WSGI application object for a CherryPy Application."""
pipeline = [
('ExceptionTrapper', ExceptionTrapper),
('InternalRedirector', InternalRedirector),
]
"""A list of (name, wsgiapp) pairs.
Each 'wsgiapp' MUST be a constructor that takes an initial,
positional 'nextapp' argument, plus optional keyword arguments, and
returns a WSGI application (that takes environ and start_response
arguments). The 'name' can be any you choose, and will correspond to
keys in self.config.
"""
head = None
"""Rather than nest all apps in the pipeline on each call, it's only
done the first time, and the result is memoized into self.head. Set
this to None again if you change self.pipeline after calling self."""
config = {}
"""A dict whose keys match names listed in the pipeline.
Each value is a further dict which will be passed to the
corresponding named WSGI callable (from the pipeline) as keyword
arguments.
"""
response_class = AppResponse
"""The class to instantiate and return as the next app in the WSGI chain.
"""
def __init__(self, cpapp, pipeline=None):
self.cpapp = cpapp
self.pipeline = self.pipeline[:]
if pipeline:
self.pipeline.extend(pipeline)
self.config = self.config.copy()
def tail(self, environ, start_response):
"""WSGI application callable for the actual CherryPy application.
You probably shouldn't call this; call self.__call__ instead, so
that any WSGI middleware in self.pipeline can run first.
"""
return self.response_class(environ, start_response, self.cpapp)
def __call__(self, environ, start_response):
head = self.head
if head is None:
# Create and nest the WSGI apps in our pipeline (in reverse order).
# Then memoize the result in self.head.
head = self.tail
for name, callable in self.pipeline[::-1]:
conf = self.config.get(name, {})
head = callable(head, **conf)
self.head = head
return head(environ, start_response)
def namespace_handler(self, k, v):
"""Config handler for the 'wsgi' namespace."""
if k == 'pipeline':
# Note this allows multiple 'wsgi.pipeline' config entries
# (but each entry will be processed in a 'random' order).
# It should also allow developers to set default middleware
# in code (passed to self.__init__) that deployers can add to
# (but not remove) via config.
self.pipeline.extend(v)
elif k == 'response_class':
self.response_class = v
else:
name, arg = k.split('.', 1)
bucket = self.config.setdefault(name, {})
bucket[arg] = v

Some files were not shown because too many files have changed in this diff Show more