Merge branch 'feature/UpdateTornado' into dev

This commit is contained in:
JackDandy 2024-06-06 21:17:19 +01:00
commit 64ca9b6f00
11 changed files with 97 additions and 83 deletions

View file

@ -7,6 +7,7 @@
* Update filelock 3.12.4 (c1163ae) to 3.14.0 (8556141) * Update filelock 3.12.4 (c1163ae) to 3.14.0 (8556141)
* Update idna library 3.4 (cab054c) to 3.7 (1d365e1) * Update idna library 3.4 (cab054c) to 3.7 (1d365e1)
* Update Requests library 2.31.0 (8812812) to 2.32.3 (0e322af) * Update Requests library 2.31.0 (8812812) to 2.32.3 (0e322af)
* Update Tornado Web Server 6.4 (b3f2a4b) to 6.4.1 (2a0e1d1)
* Update urllib3 2.0.7 (56f01e0) to 2.2.1 (54d6edf) * Update urllib3 2.0.7 (56f01e0) to 2.2.1 (54d6edf)

View file

@ -22,8 +22,8 @@
# is zero for an official release, positive for a development branch, # is zero for an official release, positive for a development branch,
# or negative for a release candidate or beta (after the base version # or negative for a release candidate or beta (after the base version
# number has been incremented) # number has been incremented)
version = "6.4" version = "6.4.1"
version_info = (6, 4, 0, 0) version_info = (6, 4, 0, 1)
import importlib import importlib
import typing import typing

View file

@ -118,6 +118,7 @@ def run_on_executor(*args: Any, **kwargs: Any) -> Callable:
The ``callback`` argument was removed. The ``callback`` argument was removed.
""" """
# Fully type-checking decorators is tricky, and this one is # Fully type-checking decorators is tricky, and this one is
# discouraged anyway so it doesn't have all the generic magic. # discouraged anyway so it doesn't have all the generic magic.
def run_on_executor_decorator(fn: Callable) -> Callable[..., Future]: def run_on_executor_decorator(fn: Callable) -> Callable[..., Future]:

View file

@ -19,6 +19,7 @@ import collections
import functools import functools
import logging import logging
import pycurl import pycurl
import re
import threading import threading
import time import time
from io import BytesIO from io import BytesIO
@ -44,6 +45,8 @@ if typing.TYPE_CHECKING:
curl_log = logging.getLogger("tornado.curl_httpclient") curl_log = logging.getLogger("tornado.curl_httpclient")
CR_OR_LF_RE = re.compile(b"\r|\n")
class CurlAsyncHTTPClient(AsyncHTTPClient): class CurlAsyncHTTPClient(AsyncHTTPClient):
def initialize( # type: ignore def initialize( # type: ignore
@ -347,14 +350,15 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
if "Pragma" not in request.headers: if "Pragma" not in request.headers:
request.headers["Pragma"] = "" request.headers["Pragma"] = ""
curl.setopt( encoded_headers = [
pycurl.HTTPHEADER,
[
b"%s: %s" b"%s: %s"
% (native_str(k).encode("ASCII"), native_str(v).encode("ISO8859-1")) % (native_str(k).encode("ASCII"), native_str(v).encode("ISO8859-1"))
for k, v in request.headers.get_all() for k, v in request.headers.get_all()
], ]
) for line in encoded_headers:
if CR_OR_LF_RE.search(line):
raise ValueError("Illegal characters in header (CR or LF): %r" % line)
curl.setopt(pycurl.HTTPHEADER, encoded_headers)
curl.setopt( curl.setopt(
pycurl.HEADERFUNCTION, pycurl.HEADERFUNCTION,

View file

@ -66,6 +66,7 @@ function to extend this mechanism.
via ``singledispatch``. via ``singledispatch``.
""" """
import asyncio import asyncio
import builtins import builtins
import collections import collections
@ -165,13 +166,11 @@ def _fake_ctx_run(f: Callable[..., _T], *args: Any, **kw: Any) -> _T:
@overload @overload
def coroutine( def coroutine(
func: Callable[..., "Generator[Any, Any, _T]"] func: Callable[..., "Generator[Any, Any, _T]"]
) -> Callable[..., "Future[_T]"]: ) -> Callable[..., "Future[_T]"]: ...
...
@overload @overload
def coroutine(func: Callable[..., _T]) -> Callable[..., "Future[_T]"]: def coroutine(func: Callable[..., _T]) -> Callable[..., "Future[_T]"]: ...
...
def coroutine( def coroutine(

View file

@ -38,6 +38,8 @@ from tornado.util import GzipDecompressor
from typing import cast, Optional, Type, Awaitable, Callable, Union, Tuple from typing import cast, Optional, Type, Awaitable, Callable, Union, Tuple
CR_OR_LF_RE = re.compile(b"\r|\n")
class _QuietException(Exception): class _QuietException(Exception):
def __init__(self) -> None: def __init__(self) -> None:
@ -389,14 +391,11 @@ class HTTP1Connection(httputil.HTTPConnection):
self._request_start_line = start_line self._request_start_line = start_line
lines.append(utf8("%s %s HTTP/1.1" % (start_line[0], start_line[1]))) lines.append(utf8("%s %s HTTP/1.1" % (start_line[0], start_line[1])))
# Client requests with a non-empty body must have either a # Client requests with a non-empty body must have either a
# Content-Length or a Transfer-Encoding. # Content-Length or a Transfer-Encoding. If Content-Length is not
# present we'll add our Transfer-Encoding below.
self._chunking_output = ( self._chunking_output = (
start_line.method in ("POST", "PUT", "PATCH") start_line.method in ("POST", "PUT", "PATCH")
and "Content-Length" not in headers and "Content-Length" not in headers
and (
"Transfer-Encoding" not in headers
or headers["Transfer-Encoding"] == "chunked"
)
) )
else: else:
assert isinstance(start_line, httputil.ResponseStartLine) assert isinstance(start_line, httputil.ResponseStartLine)
@ -418,9 +417,6 @@ class HTTP1Connection(httputil.HTTPConnection):
and (start_line.code < 100 or start_line.code >= 200) and (start_line.code < 100 or start_line.code >= 200)
# No need to chunk the output if a Content-Length is specified. # No need to chunk the output if a Content-Length is specified.
and "Content-Length" not in headers and "Content-Length" not in headers
# Applications are discouraged from touching Transfer-Encoding,
# but if they do, leave it alone.
and "Transfer-Encoding" not in headers
) )
# If connection to a 1.1 client will be closed, inform client # If connection to a 1.1 client will be closed, inform client
if ( if (
@ -453,8 +449,8 @@ class HTTP1Connection(httputil.HTTPConnection):
) )
lines.extend(line.encode("latin1") for line in header_lines) lines.extend(line.encode("latin1") for line in header_lines)
for line in lines: for line in lines:
if b"\n" in line: if CR_OR_LF_RE.search(line):
raise ValueError("Newline in header: " + repr(line)) raise ValueError("Illegal characters (CR or LF) in header: %r" % line)
future = None future = None
if self.stream.closed(): if self.stream.closed():
future = self._write_future = Future() future = self._write_future = Future()
@ -560,7 +556,7 @@ class HTTP1Connection(httputil.HTTPConnection):
return connection_header != "close" return connection_header != "close"
elif ( elif (
"Content-Length" in headers "Content-Length" in headers
or headers.get("Transfer-Encoding", "").lower() == "chunked" or is_transfer_encoding_chunked(headers)
or getattr(start_line, "method", None) in ("HEAD", "GET") or getattr(start_line, "method", None) in ("HEAD", "GET")
): ):
# start_line may be a request or response start line; only # start_line may be a request or response start line; only
@ -598,13 +594,6 @@ class HTTP1Connection(httputil.HTTPConnection):
delegate: httputil.HTTPMessageDelegate, delegate: httputil.HTTPMessageDelegate,
) -> Optional[Awaitable[None]]: ) -> Optional[Awaitable[None]]:
if "Content-Length" in headers: if "Content-Length" in headers:
if "Transfer-Encoding" in headers:
# Response cannot contain both Content-Length and
# Transfer-Encoding headers.
# http://tools.ietf.org/html/rfc7230#section-3.3.3
raise httputil.HTTPInputError(
"Response with both Transfer-Encoding and Content-Length"
)
if "," in headers["Content-Length"]: if "," in headers["Content-Length"]:
# Proxies sometimes cause Content-Length headers to get # Proxies sometimes cause Content-Length headers to get
# duplicated. If all the values are identical then we can # duplicated. If all the values are identical then we can
@ -631,20 +620,22 @@ class HTTP1Connection(httputil.HTTPConnection):
else: else:
content_length = None content_length = None
is_chunked = is_transfer_encoding_chunked(headers)
if code == 204: if code == 204:
# This response code is not allowed to have a non-empty body, # This response code is not allowed to have a non-empty body,
# and has an implicit length of zero instead of read-until-close. # and has an implicit length of zero instead of read-until-close.
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3 # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3
if "Transfer-Encoding" in headers or content_length not in (None, 0): if is_chunked or content_length not in (None, 0):
raise httputil.HTTPInputError( raise httputil.HTTPInputError(
"Response with code %d should not have body" % code "Response with code %d should not have body" % code
) )
content_length = 0 content_length = 0
if is_chunked:
return self._read_chunked_body(delegate)
if content_length is not None: if content_length is not None:
return self._read_fixed_body(content_length, delegate) return self._read_fixed_body(content_length, delegate)
if headers.get("Transfer-Encoding", "").lower() == "chunked":
return self._read_chunked_body(delegate)
if self.is_client: if self.is_client:
return self._read_body_until_close(delegate) return self._read_body_until_close(delegate)
return None return None
@ -867,3 +858,33 @@ def parse_hex_int(s: str) -> int:
if HEXDIGITS.fullmatch(s) is None: if HEXDIGITS.fullmatch(s) is None:
raise ValueError("not a hexadecimal integer: %r" % s) raise ValueError("not a hexadecimal integer: %r" % s)
return int(s, 16) return int(s, 16)
def is_transfer_encoding_chunked(headers: httputil.HTTPHeaders) -> bool:
"""Returns true if the headers specify Transfer-Encoding: chunked.
Raise httputil.HTTPInputError if any other transfer encoding is used.
"""
# Note that transfer-encoding is an area in which postel's law can lead
# us astray. If a proxy and a backend server are liberal in what they accept,
# but accept slightly different things, this can lead to mismatched framing
# and request smuggling issues. Therefore we are as strict as possible here
# (even technically going beyond the requirements of the RFCs: a value of
# ",chunked" is legal but doesn't appear in practice for legitimate traffic)
if "Transfer-Encoding" not in headers:
return False
if "Content-Length" in headers:
# Message cannot contain both Content-Length and
# Transfer-Encoding headers.
# http://tools.ietf.org/html/rfc7230#section-3.3.3
raise httputil.HTTPInputError(
"Message with both Transfer-Encoding and Content-Length"
)
if headers["Transfer-Encoding"].lower() == "chunked":
return True
# We do not support any transfer-encodings other than chunked, and we do not
# expect to add any support because the concept of transfer-encoding has
# been removed in HTTP/2.
raise httputil.HTTPInputError(
"Unsupported Transfer-Encoding %s" % headers["Transfer-Encoding"]
)

View file

@ -62,6 +62,9 @@ if typing.TYPE_CHECKING:
from asyncio import Future # noqa: F401 from asyncio import Future # noqa: F401
import unittest # noqa: F401 import unittest # noqa: F401
# To be used with str.strip() and related methods.
HTTP_WHITESPACE = " \t"
@lru_cache(1000) @lru_cache(1000)
def _normalize_header(name: str) -> str: def _normalize_header(name: str) -> str:
@ -171,7 +174,7 @@ class HTTPHeaders(collections.abc.MutableMapping):
# continuation of a multi-line header # continuation of a multi-line header
if self._last_key is None: if self._last_key is None:
raise HTTPInputError("first header line cannot start with whitespace") raise HTTPInputError("first header line cannot start with whitespace")
new_part = " " + line.lstrip() new_part = " " + line.lstrip(HTTP_WHITESPACE)
self._as_list[self._last_key][-1] += new_part self._as_list[self._last_key][-1] += new_part
self._dict[self._last_key] += new_part self._dict[self._last_key] += new_part
else: else:
@ -179,7 +182,7 @@ class HTTPHeaders(collections.abc.MutableMapping):
name, value = line.split(":", 1) name, value = line.split(":", 1)
except ValueError: except ValueError:
raise HTTPInputError("no colon in header line") raise HTTPInputError("no colon in header line")
self.add(name, value.strip()) self.add(name, value.strip(HTTP_WHITESPACE))
@classmethod @classmethod
def parse(cls, headers: str) -> "HTTPHeaders": def parse(cls, headers: str) -> "HTTPHeaders":

View file

@ -1376,7 +1376,7 @@ class SSLIOStream(IOStream):
return return
elif err.args[0] in (ssl.SSL_ERROR_EOF, ssl.SSL_ERROR_ZERO_RETURN): elif err.args[0] in (ssl.SSL_ERROR_EOF, ssl.SSL_ERROR_ZERO_RETURN):
return self.close(exc_info=err) return self.close(exc_info=err)
elif err.args[0] == ssl.SSL_ERROR_SSL: elif err.args[0] in (ssl.SSL_ERROR_SSL, ssl.SSL_ERROR_SYSCALL):
try: try:
peer = self.socket.getpeername() peer = self.socket.getpeername()
except Exception: except Exception:

View file

@ -429,9 +429,9 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
self.request.method == "POST" self.request.method == "POST"
and "Content-Type" not in self.request.headers and "Content-Type" not in self.request.headers
): ):
self.request.headers[ self.request.headers["Content-Type"] = (
"Content-Type" "application/x-www-form-urlencoded"
] = "application/x-www-form-urlencoded" )
if self.request.decompress_response: if self.request.decompress_response:
self.request.headers["Accept-Encoding"] = "gzip" self.request.headers["Accept-Encoding"] = "gzip"
req_path = (self.parsed.path or "/") + ( req_path = (self.parsed.path or "/") + (

View file

@ -84,39 +84,6 @@ def get_async_test_timeout() -> float:
return 5 return 5
class _TestMethodWrapper(object):
"""Wraps a test method to raise an error if it returns a value.
This is mainly used to detect undecorated generators (if a test
method yields it must use a decorator to consume the generator),
but will also detect other kinds of return values (these are not
necessarily errors, but we alert anyway since there is no good
reason to return a value from a test).
"""
def __init__(self, orig_method: Callable) -> None:
self.orig_method = orig_method
self.__wrapped__ = orig_method
def __call__(self, *args: Any, **kwargs: Any) -> None:
result = self.orig_method(*args, **kwargs)
if isinstance(result, Generator) or inspect.iscoroutine(result):
raise TypeError(
"Generator and coroutine test methods should be"
" decorated with tornado.testing.gen_test"
)
elif result is not None:
raise ValueError("Return value from test method ignored: %r" % result)
def __getattr__(self, name: str) -> Any:
"""Proxy all unknown attributes to the original method.
This is important for some of the decorators in the `unittest`
module, such as `unittest.skipIf`.
"""
return getattr(self.orig_method, name)
class AsyncTestCase(unittest.TestCase): class AsyncTestCase(unittest.TestCase):
"""`~unittest.TestCase` subclass for testing `.IOLoop`-based """`~unittest.TestCase` subclass for testing `.IOLoop`-based
asynchronous code. asynchronous code.
@ -173,12 +140,6 @@ class AsyncTestCase(unittest.TestCase):
self.__stop_args = None # type: Any self.__stop_args = None # type: Any
self.__timeout = None # type: Optional[object] self.__timeout = None # type: Optional[object]
# It's easy to forget the @gen_test decorator, but if you do
# the test will silently be ignored because nothing will consume
# the generator. Replace the test method with a wrapper that will
# make sure it's not an undecorated generator.
setattr(self, methodName, _TestMethodWrapper(getattr(self, methodName)))
# Not used in this class itself, but used by @gen_test # Not used in this class itself, but used by @gen_test
self._test_generator = None # type: Optional[Union[Generator, Coroutine]] self._test_generator = None # type: Optional[Union[Generator, Coroutine]]
@ -289,6 +250,30 @@ class AsyncTestCase(unittest.TestCase):
self.__rethrow() self.__rethrow()
return ret return ret
def _callTestMethod(self, method: Callable) -> None:
"""Run the given test method, raising an error if it returns non-None.
Failure to decorate asynchronous test methods with ``@gen_test`` can lead to tests
incorrectly passing.
Remove this override when Python 3.10 support is dropped. This check (in the form of a
DeprecationWarning) became a part of the standard library in 3.11.
Note that ``_callTestMethod`` is not documented as a public interface. However, it is
present in all supported versions of Python (3.8+), and if it goes away in the future that's
OK because we can just remove this override as noted above.
"""
# Calling super()._callTestMethod would hide the return value, even in python 3.8-3.10
# where the check isn't being done for us.
result = method()
if isinstance(result, Generator) or inspect.iscoroutine(result):
raise TypeError(
"Generator and coroutine test methods should be"
" decorated with tornado.testing.gen_test"
)
elif result is not None:
raise ValueError("Return value from test method ignored: %r" % result)
def stop(self, _arg: Any = None, **kwargs: Any) -> None: def stop(self, _arg: Any = None, **kwargs: Any) -> None:
"""Stops the `.IOLoop`, causing one pending (or future) call to `wait()` """Stops the `.IOLoop`, causing one pending (or future) call to `wait()`
to return. to return.

View file

@ -1392,9 +1392,9 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
# from the server). # from the server).
# TODO: set server parameters for deflate extension # TODO: set server parameters for deflate extension
# if requested in self.compression_options. # if requested in self.compression_options.
request.headers[ request.headers["Sec-WebSocket-Extensions"] = (
"Sec-WebSocket-Extensions" "permessage-deflate; client_max_window_bits"
] = "permessage-deflate; client_max_window_bits" )
# Websocket connection is currently unable to follow redirects # Websocket connection is currently unable to follow redirects
request.follow_redirects = False request.follow_redirects = False