Update Tornado Web Server 6.2.0 (a4f08a3) → 6.3.dev1 (7186b86).

This commit is contained in:
JackDandy 2023-03-07 15:15:39 +00:00
parent 80879bc91d
commit 9d8462f4ef
23 changed files with 516 additions and 384 deletions

View file

@ -2,6 +2,7 @@
* Update html5lib 1.1 (f87487a) to 1.2-dev (3e500bb)
* Update package resource API 63.2.0 (3ae44cd) to 67.5.1 (f51eccd)
* Update Tornado Web Server 6.2.0 (a4f08a3) to 6.3.0 (7186b86)
* Update urllib3 1.26.13 (25fbd5f) to 1.26.14 (a06c05c)
* Change remove calls to legacy py2 fix encoding function
* Change requirements for pure py3

View file

@ -22,5 +22,46 @@
# is zero for an official release, positive for a development branch,
# or negative for a release candidate or beta (after the base version
# number has been incremented)
version = "6.2"
version_info = (6, 2, 0, 0)
version = "6.3.dev1"
version_info = (6, 3, 0, -100)
import importlib
import typing
__all__ = [
"auth",
"autoreload",
"concurrent",
"curl_httpclient",
"escape",
"gen",
"http1connection",
"httpclient",
"httpserver",
"httputil",
"ioloop",
"iostream",
"locale",
"locks",
"log",
"netutil",
"options",
"platform",
"process",
"queues",
"routing",
"simple_httpclient",
"tcpclient",
"tcpserver",
"template",
"testing",
"util",
"web",
]
# Copied from https://peps.python.org/pep-0562/
def __getattr__(name: str) -> typing.Any:
if name in __all__:
return importlib.import_module("." + name, __name__)
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")

View file

@ -15,66 +15,66 @@
"""Data used by the tornado.locale module."""
LOCALE_NAMES = {
"af_ZA": {"name_en": u"Afrikaans", "name": u"Afrikaans"},
"am_ET": {"name_en": u"Amharic", "name": u"አማርኛ"},
"ar_AR": {"name_en": u"Arabic", "name": u"العربية"},
"bg_BG": {"name_en": u"Bulgarian", "name": u"Български"},
"bn_IN": {"name_en": u"Bengali", "name": u"বাংলা"},
"bs_BA": {"name_en": u"Bosnian", "name": u"Bosanski"},
"ca_ES": {"name_en": u"Catalan", "name": u"Català"},
"cs_CZ": {"name_en": u"Czech", "name": u"Čeština"},
"cy_GB": {"name_en": u"Welsh", "name": u"Cymraeg"},
"da_DK": {"name_en": u"Danish", "name": u"Dansk"},
"de_DE": {"name_en": u"German", "name": u"Deutsch"},
"el_GR": {"name_en": u"Greek", "name": u"Ελληνικά"},
"en_GB": {"name_en": u"English (UK)", "name": u"English (UK)"},
"en_US": {"name_en": u"English (US)", "name": u"English (US)"},
"es_ES": {"name_en": u"Spanish (Spain)", "name": u"Español (España)"},
"es_LA": {"name_en": u"Spanish", "name": u"Español"},
"et_EE": {"name_en": u"Estonian", "name": u"Eesti"},
"eu_ES": {"name_en": u"Basque", "name": u"Euskara"},
"fa_IR": {"name_en": u"Persian", "name": u"فارسی"},
"fi_FI": {"name_en": u"Finnish", "name": u"Suomi"},
"fr_CA": {"name_en": u"French (Canada)", "name": u"Français (Canada)"},
"fr_FR": {"name_en": u"French", "name": u"Français"},
"ga_IE": {"name_en": u"Irish", "name": u"Gaeilge"},
"gl_ES": {"name_en": u"Galician", "name": u"Galego"},
"he_IL": {"name_en": u"Hebrew", "name": u"עברית"},
"hi_IN": {"name_en": u"Hindi", "name": u"हिन्दी"},
"hr_HR": {"name_en": u"Croatian", "name": u"Hrvatski"},
"hu_HU": {"name_en": u"Hungarian", "name": u"Magyar"},
"id_ID": {"name_en": u"Indonesian", "name": u"Bahasa Indonesia"},
"is_IS": {"name_en": u"Icelandic", "name": u"Íslenska"},
"it_IT": {"name_en": u"Italian", "name": u"Italiano"},
"ja_JP": {"name_en": u"Japanese", "name": u"日本語"},
"ko_KR": {"name_en": u"Korean", "name": u"한국어"},
"lt_LT": {"name_en": u"Lithuanian", "name": u"Lietuvių"},
"lv_LV": {"name_en": u"Latvian", "name": u"Latviešu"},
"mk_MK": {"name_en": u"Macedonian", "name": u"Македонски"},
"ml_IN": {"name_en": u"Malayalam", "name": u"മലയാളം"},
"ms_MY": {"name_en": u"Malay", "name": u"Bahasa Melayu"},
"nb_NO": {"name_en": u"Norwegian (bokmal)", "name": u"Norsk (bokmål)"},
"nl_NL": {"name_en": u"Dutch", "name": u"Nederlands"},
"nn_NO": {"name_en": u"Norwegian (nynorsk)", "name": u"Norsk (nynorsk)"},
"pa_IN": {"name_en": u"Punjabi", "name": u"ਪੰਜਾਬੀ"},
"pl_PL": {"name_en": u"Polish", "name": u"Polski"},
"pt_BR": {"name_en": u"Portuguese (Brazil)", "name": u"Português (Brasil)"},
"pt_PT": {"name_en": u"Portuguese (Portugal)", "name": u"Português (Portugal)"},
"ro_RO": {"name_en": u"Romanian", "name": u"Română"},
"ru_RU": {"name_en": u"Russian", "name": u"Русский"},
"sk_SK": {"name_en": u"Slovak", "name": u"Slovenčina"},
"sl_SI": {"name_en": u"Slovenian", "name": u"Slovenščina"},
"sq_AL": {"name_en": u"Albanian", "name": u"Shqip"},
"sr_RS": {"name_en": u"Serbian", "name": u"Српски"},
"sv_SE": {"name_en": u"Swedish", "name": u"Svenska"},
"sw_KE": {"name_en": u"Swahili", "name": u"Kiswahili"},
"ta_IN": {"name_en": u"Tamil", "name": u"தமிழ்"},
"te_IN": {"name_en": u"Telugu", "name": u"తెలుగు"},
"th_TH": {"name_en": u"Thai", "name": u"ภาษาไทย"},
"tl_PH": {"name_en": u"Filipino", "name": u"Filipino"},
"tr_TR": {"name_en": u"Turkish", "name": u"Türkçe"},
"uk_UA": {"name_en": u"Ukraini ", "name": u"Українська"},
"vi_VN": {"name_en": u"Vietnamese", "name": u"Tiếng Việt"},
"zh_CN": {"name_en": u"Chinese (Simplified)", "name": u"中文(简体)"},
"zh_TW": {"name_en": u"Chinese (Traditional)", "name": u"中文(繁體)"},
"af_ZA": {"name_en": "Afrikaans", "name": "Afrikaans"},
"am_ET": {"name_en": "Amharic", "name": "አማርኛ"},
"ar_AR": {"name_en": "Arabic", "name": "العربية"},
"bg_BG": {"name_en": "Bulgarian", "name": "Български"},
"bn_IN": {"name_en": "Bengali", "name": "বাংলা"},
"bs_BA": {"name_en": "Bosnian", "name": "Bosanski"},
"ca_ES": {"name_en": "Catalan", "name": "Català"},
"cs_CZ": {"name_en": "Czech", "name": "Čeština"},
"cy_GB": {"name_en": "Welsh", "name": "Cymraeg"},
"da_DK": {"name_en": "Danish", "name": "Dansk"},
"de_DE": {"name_en": "German", "name": "Deutsch"},
"el_GR": {"name_en": "Greek", "name": "Ελληνικά"},
"en_GB": {"name_en": "English (UK)", "name": "English (UK)"},
"en_US": {"name_en": "English (US)", "name": "English (US)"},
"es_ES": {"name_en": "Spanish (Spain)", "name": "Español (España)"},
"es_LA": {"name_en": "Spanish", "name": "Español"},
"et_EE": {"name_en": "Estonian", "name": "Eesti"},
"eu_ES": {"name_en": "Basque", "name": "Euskara"},
"fa_IR": {"name_en": "Persian", "name": "فارسی"},
"fi_FI": {"name_en": "Finnish", "name": "Suomi"},
"fr_CA": {"name_en": "French (Canada)", "name": "Français (Canada)"},
"fr_FR": {"name_en": "French", "name": "Français"},
"ga_IE": {"name_en": "Irish", "name": "Gaeilge"},
"gl_ES": {"name_en": "Galician", "name": "Galego"},
"he_IL": {"name_en": "Hebrew", "name": "עברית"},
"hi_IN": {"name_en": "Hindi", "name": "हिन्दी"},
"hr_HR": {"name_en": "Croatian", "name": "Hrvatski"},
"hu_HU": {"name_en": "Hungarian", "name": "Magyar"},
"id_ID": {"name_en": "Indonesian", "name": "Bahasa Indonesia"},
"is_IS": {"name_en": "Icelandic", "name": "Íslenska"},
"it_IT": {"name_en": "Italian", "name": "Italiano"},
"ja_JP": {"name_en": "Japanese", "name": "日本語"},
"ko_KR": {"name_en": "Korean", "name": "한국어"},
"lt_LT": {"name_en": "Lithuanian", "name": "Lietuvių"},
"lv_LV": {"name_en": "Latvian", "name": "Latviešu"},
"mk_MK": {"name_en": "Macedonian", "name": "Македонски"},
"ml_IN": {"name_en": "Malayalam", "name": "മലയാളം"},
"ms_MY": {"name_en": "Malay", "name": "Bahasa Melayu"},
"nb_NO": {"name_en": "Norwegian (bokmal)", "name": "Norsk (bokmål)"},
"nl_NL": {"name_en": "Dutch", "name": "Nederlands"},
"nn_NO": {"name_en": "Norwegian (nynorsk)", "name": "Norsk (nynorsk)"},
"pa_IN": {"name_en": "Punjabi", "name": "ਪੰਜਾਬੀ"},
"pl_PL": {"name_en": "Polish", "name": "Polski"},
"pt_BR": {"name_en": "Portuguese (Brazil)", "name": "Português (Brasil)"},
"pt_PT": {"name_en": "Portuguese (Portugal)", "name": "Português (Portugal)"},
"ro_RO": {"name_en": "Romanian", "name": "Română"},
"ru_RU": {"name_en": "Russian", "name": "Русский"},
"sk_SK": {"name_en": "Slovak", "name": "Slovenčina"},
"sl_SI": {"name_en": "Slovenian", "name": "Slovenščina"},
"sq_AL": {"name_en": "Albanian", "name": "Shqip"},
"sr_RS": {"name_en": "Serbian", "name": "Српски"},
"sv_SE": {"name_en": "Swedish", "name": "Svenska"},
"sw_KE": {"name_en": "Swahili", "name": "Kiswahili"},
"ta_IN": {"name_en": "Tamil", "name": "தமிழ்"},
"te_IN": {"name_en": "Telugu", "name": "తెలుగు"},
"th_TH": {"name_en": "Thai", "name": "ภาษาไทย"},
"tl_PH": {"name_en": "Filipino", "name": "Filipino"},
"tr_TR": {"name_en": "Turkish", "name": "Türkçe"},
"uk_UA": {"name_en": "Ukraini ", "name": "Українська"},
"vi_VN": {"name_en": "Vietnamese", "name": "Tiếng Việt"},
"zh_CN": {"name_en": "Chinese (Simplified)", "name": "中文(简体)"},
"zh_TW": {"name_en": "Chinese (Traditional)", "name": "中文(繁體)"},
}

View file

@ -42,7 +42,7 @@ Example usage for Google OAuth:
user = await self.get_authenticated_user(
redirect_uri='http://your.site.com/auth/google',
code=self.get_argument('code'))
# Save the user with e.g. set_secure_cookie
# Save the user with e.g. set_signed_cookie
else:
self.authorize_redirect(
redirect_uri='http://your.site.com/auth/google',
@ -136,7 +136,7 @@ class OpenIdMixin(object):
args = dict(
(k, v[-1]) for k, v in handler.request.arguments.items()
) # type: Dict[str, Union[str, bytes]]
args["openid.mode"] = u"check_authentication"
args["openid.mode"] = "check_authentication"
url = self._OPENID_ENDPOINT # type: ignore
if http_client is None:
http_client = self.get_auth_http_client()
@ -211,14 +211,14 @@ class OpenIdMixin(object):
for key in handler.request.arguments:
if (
key.startswith("openid.ns.")
and handler.get_argument(key) == u"http://openid.net/srv/ax/1.0"
and handler.get_argument(key) == "http://openid.net/srv/ax/1.0"
):
ax_ns = key[10:]
break
def get_ax_arg(uri: str) -> str:
if not ax_ns:
return u""
return ""
prefix = "openid." + ax_ns + ".type."
ax_name = None
for name in handler.request.arguments.keys():
@ -227,8 +227,8 @@ class OpenIdMixin(object):
ax_name = "openid." + ax_ns + ".value." + part
break
if not ax_name:
return u""
return handler.get_argument(ax_name, u"")
return ""
return handler.get_argument(ax_name, "")
email = get_ax_arg("http://axschema.org/contact/email")
name = get_ax_arg("http://axschema.org/namePerson")
@ -247,7 +247,7 @@ class OpenIdMixin(object):
if name:
user["name"] = name
elif name_parts:
user["name"] = u" ".join(name_parts)
user["name"] = " ".join(name_parts)
elif email:
user["name"] = email.split("@")[0]
if email:
@ -694,7 +694,7 @@ class TwitterMixin(OAuthMixin):
async def get(self):
if self.get_argument("oauth_token", None):
user = await self.get_authenticated_user()
# Save the user using e.g. set_secure_cookie()
# Save the user using e.g. set_signed_cookie()
else:
await self.authorize_redirect()
@ -855,8 +855,28 @@ class GoogleOAuth2Mixin(OAuth2Mixin):
_OAUTH_NO_CALLBACKS = False
_OAUTH_SETTINGS_KEY = "google_oauth"
def get_google_oauth_settings(self) -> Dict[str, str]:
"""Return the Google OAuth 2.0 credentials that you created with
[Google Cloud
Platform](https://console.cloud.google.com/apis/credentials). The dict
format is::
{
"key": "your_client_id", "secret": "your_client_secret"
}
If your credentials are stored differently (e.g. in a db) you can
override this method for custom provision.
"""
handler = cast(RequestHandler, self)
return handler.settings[self._OAUTH_SETTINGS_KEY]
async def get_authenticated_user(
self, redirect_uri: str, code: str
self,
redirect_uri: str,
code: str,
client_id: Optional[str] = None,
client_secret: Optional[str] = None,
) -> Dict[str, Any]:
"""Handles the login for the Google user, returning an access token.
@ -883,11 +903,11 @@ class GoogleOAuth2Mixin(OAuth2Mixin):
"https://www.googleapis.com/oauth2/v1/userinfo",
access_token=access["access_token"])
# Save the user and access token with
# e.g. set_secure_cookie.
# e.g. set_signed_cookie.
else:
self.authorize_redirect(
redirect_uri='http://your.site.com/auth/google',
client_id=self.settings['google_oauth']['key'],
client_id=self.get_google_oauth_settings()['key'],
scope=['profile', 'email'],
response_type='code',
extra_params={'approval_prompt': 'auto'})
@ -899,14 +919,20 @@ class GoogleOAuth2Mixin(OAuth2Mixin):
The ``callback`` argument was removed. Use the returned awaitable object instead.
""" # noqa: E501
handler = cast(RequestHandler, self)
if client_id is None or client_secret is None:
settings = self.get_google_oauth_settings()
if client_id is None:
client_id = settings["key"]
if client_secret is None:
client_secret = settings["secret"]
http = self.get_auth_http_client()
body = urllib.parse.urlencode(
{
"redirect_uri": redirect_uri,
"code": code,
"client_id": handler.settings[self._OAUTH_SETTINGS_KEY]["key"],
"client_secret": handler.settings[self._OAUTH_SETTINGS_KEY]["secret"],
"client_id": client_id,
"client_secret": client_secret,
"grant_type": "authorization_code",
}
)
@ -951,7 +977,7 @@ class FacebookGraphMixin(OAuth2Mixin):
client_id=self.settings["facebook_api_key"],
client_secret=self.settings["facebook_secret"],
code=self.get_argument("code"))
# Save the user with e.g. set_secure_cookie
# Save the user with e.g. set_signed_cookie
else:
self.authorize_redirect(
redirect_uri='/auth/facebookgraph/',

View file

@ -36,11 +36,11 @@ from tornado.httpclient import (
)
from tornado.log import app_log
from typing import Dict, Any, Callable, Union, Tuple, Optional
from typing import Dict, Any, Callable, Union, Optional
import typing
if typing.TYPE_CHECKING:
from typing import Deque # noqa: F401
from typing import Deque, Tuple # noqa: F401
curl_log = logging.getLogger("tornado.curl_httpclient")

View file

@ -368,7 +368,7 @@ def linkify(
# have a status bar, such as Safari by default)
params += ' title="%s"' % href
return u'<a href="%s"%s>%s</a>' % (href, params, url)
return '<a href="%s"%s>%s</a>' % (href, params, url)
# First HTML-escape so that our strings are all safe.
# The regex is modified to avoid character entites other than &amp; so

View file

@ -743,7 +743,7 @@ class Runner(object):
self.running = False
self.finished = False
self.io_loop = IOLoop.current()
if self.handle_yield(first_yielded):
if self.ctx_run(self.handle_yield, first_yielded):
gen = result_future = first_yielded = None # type: ignore
self.ctx_run(self.run)
@ -763,21 +763,25 @@ class Runner(object):
return
self.future = None
try:
exc_info = None
try:
value = future.result()
except Exception:
exc_info = sys.exc_info()
future = None
except Exception as e:
# Save the exception for later. It's important that
# gen.throw() not be called inside this try/except block
# because that makes sys.exc_info behave unexpectedly.
exc: Optional[Exception] = e
else:
exc = None
finally:
future = None
if exc_info is not None:
if exc is not None:
try:
yielded = self.gen.throw(*exc_info) # type: ignore
yielded = self.gen.throw(exc)
finally:
# Break up a reference to itself
# for faster GC on CPython.
exc_info = None
# Break up a circular reference for faster GC on
# CPython.
del exc
else:
yielded = self.gen.send(value)

View file

@ -83,7 +83,7 @@ class IOLoop(Configurable):
import functools
import socket
import tornado.ioloop
import tornado
from tornado.iostream import IOStream
async def handle_connection(connection, address):
@ -123,8 +123,7 @@ class IOLoop(Configurable):
and instead initialize the `asyncio` event loop and use `IOLoop.current()`.
In some cases, such as in test frameworks when initializing an `IOLoop`
to be run in a secondary thread, it may be appropriate to construct
an `IOLoop` with ``IOLoop(make_current=False)``. Constructing an `IOLoop`
without the ``make_current=False`` argument is deprecated since Tornado 6.2.
an `IOLoop` with ``IOLoop(make_current=False)``.
In general, an `IOLoop` cannot survive a fork or be shared across processes
in any way. When multiple processes are being used, each process should
@ -145,12 +144,10 @@ class IOLoop(Configurable):
cannot be used on Python 3 except to redundantly specify the `asyncio`
event loop.
.. deprecated:: 6.2
It is deprecated to create an event loop that is "current" but not
running. This means it is deprecated to pass
``make_current=True`` to the ``IOLoop`` constructor, or to create
an ``IOLoop`` while no asyncio event loop is running unless
``make_current=False`` is used.
.. versionchanged:: 6.3
``make_current=True`` is now the default when creating an IOLoop -
previously the default was to make the event loop current if there wasn't
already a current one.
"""
# These constants were originally based on constants from the epoll module.
@ -263,17 +260,20 @@ class IOLoop(Configurable):
"""
try:
loop = asyncio.get_event_loop()
except (RuntimeError, AssertionError):
except RuntimeError:
if not instance:
return None
raise
# Create a new asyncio event loop for this thread.
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
return IOLoop._ioloop_for_asyncio[loop]
except KeyError:
if instance:
from tornado.platform.asyncio import AsyncIOMainLoop
current = AsyncIOMainLoop(make_current=True) # type: Optional[IOLoop]
current = AsyncIOMainLoop() # type: Optional[IOLoop]
else:
current = None
return current
@ -295,12 +295,17 @@ class IOLoop(Configurable):
This method also sets the current `asyncio` event loop.
.. deprecated:: 6.2
The concept of an event loop that is "current" without
currently running is deprecated in asyncio since Python
3.10. All related functionality in Tornado is also
deprecated. Instead, start the event loop with `asyncio.run`
before interacting with it.
Setting and clearing the current event loop through Tornado is
deprecated. Use ``asyncio.set_event_loop`` instead if you need this.
"""
warnings.warn(
"make_current is deprecated; start the event loop first",
DeprecationWarning,
stacklevel=2,
)
self._make_current()
def _make_current(self) -> None:
# The asyncio event loops override this method.
raise NotImplementedError()
@ -344,16 +349,9 @@ class IOLoop(Configurable):
return AsyncIOLoop
def initialize(self, make_current: Optional[bool] = None) -> None:
if make_current is None:
if IOLoop.current(instance=False) is None:
self.make_current()
elif make_current:
current = IOLoop.current(instance=False)
# AsyncIO loops can already be current by this point.
if current is not None and current is not self:
raise RuntimeError("current IOLoop already exists")
self.make_current()
def initialize(self, make_current: bool = True) -> None:
if make_current:
self._make_current()
def close(self, all_fds: bool = False) -> None:
"""Closes the `IOLoop`, freeing any resources used.

View file

@ -195,11 +195,9 @@ class _StreamBuffer(object):
pos += size
size = 0
else:
# Amortized O(1) shrink for Python 2
pos += size
if len(b) <= 2 * pos:
del typing.cast(bytearray, b)[:pos]
pos = 0
del typing.cast(bytearray, b)[:pos]
pos = 0
size = 0
assert size == 0
@ -254,7 +252,6 @@ class BaseIOStream(object):
self.max_write_buffer_size = max_write_buffer_size
self.error = None # type: Optional[BaseException]
self._read_buffer = bytearray()
self._read_buffer_pos = 0
self._read_buffer_size = 0
self._user_read_buffer = False
self._after_user_read_buffer = None # type: Optional[bytearray]
@ -451,21 +448,17 @@ class BaseIOStream(object):
available_bytes = self._read_buffer_size
n = len(buf)
if available_bytes >= n:
end = self._read_buffer_pos + n
buf[:] = memoryview(self._read_buffer)[self._read_buffer_pos : end]
del self._read_buffer[:end]
buf[:] = memoryview(self._read_buffer)[:n]
del self._read_buffer[:n]
self._after_user_read_buffer = self._read_buffer
elif available_bytes > 0:
buf[:available_bytes] = memoryview(self._read_buffer)[
self._read_buffer_pos :
]
buf[:available_bytes] = memoryview(self._read_buffer)[:]
# Set up the supplied buffer as our temporary read buffer.
# The original (if it had any data remaining) has been
# saved for later.
self._user_read_buffer = True
self._read_buffer = buf
self._read_buffer_pos = 0
self._read_buffer_size = available_bytes
self._read_bytes = n
self._read_partial = partial
@ -818,7 +811,6 @@ class BaseIOStream(object):
if self._user_read_buffer:
self._read_buffer = self._after_user_read_buffer or bytearray()
self._after_user_read_buffer = None
self._read_buffer_pos = 0
self._read_buffer_size = len(self._read_buffer)
self._user_read_buffer = False
result = size # type: Union[int, bytes]
@ -931,20 +923,17 @@ class BaseIOStream(object):
# since large merges are relatively expensive and get undone in
# _consume().
if self._read_buffer:
loc = self._read_buffer.find(
self._read_delimiter, self._read_buffer_pos
)
loc = self._read_buffer.find(self._read_delimiter)
if loc != -1:
loc -= self._read_buffer_pos
delimiter_len = len(self._read_delimiter)
self._check_max_bytes(self._read_delimiter, loc + delimiter_len)
return loc + delimiter_len
self._check_max_bytes(self._read_delimiter, self._read_buffer_size)
elif self._read_regex is not None:
if self._read_buffer:
m = self._read_regex.search(self._read_buffer, self._read_buffer_pos)
m = self._read_regex.search(self._read_buffer)
if m is not None:
loc = m.end() - self._read_buffer_pos
loc = m.end()
self._check_max_bytes(self._read_regex, loc)
return loc
self._check_max_bytes(self._read_regex, self._read_buffer_size)
@ -1001,19 +990,9 @@ class BaseIOStream(object):
return b""
assert loc <= self._read_buffer_size
# Slice the bytearray buffer into bytes, without intermediate copying
b = (
memoryview(self._read_buffer)[
self._read_buffer_pos : self._read_buffer_pos + loc
]
).tobytes()
self._read_buffer_pos += loc
b = (memoryview(self._read_buffer)[:loc]).tobytes()
self._read_buffer_size -= loc
# Amortized O(1) shrink
# (this heuristic is implemented natively in Python 3.4+
# but is replicated here for Python 2)
if self._read_buffer_pos > self._read_buffer_size:
del self._read_buffer[: self._read_buffer_pos]
self._read_buffer_pos = 0
del self._read_buffer[:loc]
return b
def _check_closed(self) -> None:
@ -1092,9 +1071,8 @@ class IOStream(BaseIOStream):
.. testcode::
import tornado.ioloop
import tornado.iostream
import socket
import tornado
async def main():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)

View file

@ -268,7 +268,7 @@ class Locale(object):
def __init__(self, code: str) -> None:
self.code = code
self.name = LOCALE_NAMES.get(code, {}).get("name", u"Unknown")
self.name = LOCALE_NAMES.get(code, {}).get("name", "Unknown")
self.rtl = False
for prefix in ["fa", "ar", "he"]:
if self.code.startswith(prefix):
@ -406,7 +406,7 @@ class Locale(object):
str_time = "%d:%02d" % (local_date.hour, local_date.minute)
elif self.code == "zh_CN":
str_time = "%s%d:%02d" % (
(u"\u4e0a\u5348", u"\u4e0b\u5348")[local_date.hour >= 12],
("\u4e0a\u5348", "\u4e0b\u5348")[local_date.hour >= 12],
local_date.hour % 12 or 12,
local_date.minute,
)
@ -458,7 +458,7 @@ class Locale(object):
return ""
if len(parts) == 1:
return parts[0]
comma = u" \u0648 " if self.code.startswith("fa") else u", "
comma = " \u0648 " if self.code.startswith("fa") else ", "
return _("%(commas)s and %(last)s") % {
"commas": comma.join(parts[:-1]),
"last": parts[len(parts) - 1],

View file

@ -44,10 +44,10 @@ if hasattr(ssl, "OP_NO_COMPRESSION"):
# module-import time, the import lock is already held by the main thread,
# leading to deadlock. Avoid it by caching the idna encoder on the main
# thread now.
u"foo".encode("idna")
"foo".encode("idna")
# For undiagnosed reasons, 'latin1' codec may also need to be preloaded.
u"foo".encode("latin1")
"foo".encode("latin1")
# Default backlog used when calling sock.listen()
_DEFAULT_BACKLOG = 128
@ -115,7 +115,7 @@ def bind_sockets(
sys.platform == "darwin"
and address == "localhost"
and af == socket.AF_INET6
and sockaddr[3] != 0
and sockaddr[3] != 0 # type: ignore
):
# Mac OS X includes a link-local address fe80::1%lo0 in the
# getaddrinfo results for 'localhost'. However, the firewall

View file

@ -56,7 +56,7 @@ Your ``main()`` method can parse the command line or parse a config file with
either `parse_command_line` or `parse_config_file`::
import myapp.db, myapp.server
import tornado.options
import tornado
if __name__ == '__main__':
tornado.options.parse_command_line()
@ -427,7 +427,9 @@ class OptionParser(object):
% (option.name, option.type.__name__)
)
if type(config[name]) == str and option.type != str:
if type(config[name]) == str and (
option.type != str or option.multiple
):
option.parse(config[name])
else:
option.set(config[name])

View file

@ -36,10 +36,10 @@ import warnings
from tornado.gen import convert_yielded
from tornado.ioloop import IOLoop, _Selectable
from typing import Any, TypeVar, Awaitable, Callable, Union, Optional, List, Tuple, Dict
from typing import Any, TypeVar, Awaitable, Callable, Union, Optional, List, Dict
if typing.TYPE_CHECKING:
from typing import Set # noqa: F401
from typing import Set, Tuple # noqa: F401
from typing_extensions import Protocol
class _HasFileno(Protocol):
@ -74,20 +74,6 @@ def _atexit_callback() -> None:
atexit.register(_atexit_callback)
if sys.version_info >= (3, 10):
def _get_event_loop() -> asyncio.AbstractEventLoop:
try:
return asyncio.get_running_loop()
except RuntimeError:
pass
return asyncio.get_event_loop_policy().get_event_loop()
else:
from asyncio import get_event_loop as _get_event_loop
class BaseAsyncIOLoop(IOLoop):
def initialize( # type: ignore
@ -206,15 +192,7 @@ class BaseAsyncIOLoop(IOLoop):
handler_func(fileobj, events)
def start(self) -> None:
try:
old_loop = _get_event_loop()
except (RuntimeError, AssertionError):
old_loop = None # type: ignore
try:
asyncio.set_event_loop(self.asyncio_loop)
self.asyncio_loop.run_forever()
finally:
asyncio.set_event_loop(old_loop)
self.asyncio_loop.run_forever()
def stop(self) -> None:
self.asyncio_loop.stop()
@ -298,7 +276,7 @@ class AsyncIOMainLoop(BaseAsyncIOLoop):
def initialize(self, **kwargs: Any) -> None: # type: ignore
super().initialize(asyncio.get_event_loop(), **kwargs)
def make_current(self) -> None:
def _make_current(self) -> None:
# AsyncIOMainLoop already refers to the current asyncio loop so
# nothing to do here.
pass
@ -349,12 +327,7 @@ class AsyncIOLoop(BaseAsyncIOLoop):
self._clear_current()
super().close(all_fds=all_fds)
def make_current(self) -> None:
warnings.warn(
"make_current is deprecated; start the event loop first",
DeprecationWarning,
stacklevel=2,
)
def _make_current(self) -> None:
if not self.is_current:
try:
self.old_asyncio = asyncio.get_event_loop()
@ -672,10 +645,18 @@ class AddThreadSelectorEventLoop(asyncio.AbstractEventLoop):
self._writers[fd] = functools.partial(callback, *args)
self._wake_selector()
def remove_reader(self, fd: "_FileDescriptorLike") -> None:
del self._readers[fd]
def remove_reader(self, fd: "_FileDescriptorLike") -> bool:
try:
del self._readers[fd]
except KeyError:
return False
self._wake_selector()
return True
def remove_writer(self, fd: "_FileDescriptorLike") -> None:
del self._writers[fd]
def remove_writer(self, fd: "_FileDescriptorLike") -> bool:
try:
del self._writers[fd]
except KeyError:
return False
self._wake_selector()
return True

View file

@ -15,14 +15,15 @@ if typing.TYPE_CHECKING:
class CaresResolver(Resolver):
"""Name resolver based on the c-ares library.
This is a non-blocking and non-threaded resolver. It may not produce
the same results as the system resolver, but can be used for non-blocking
This is a non-blocking and non-threaded resolver. It may not produce the
same results as the system resolver, but can be used for non-blocking
resolution when threads cannot be used.
c-ares fails to resolve some names when ``family`` is ``AF_UNSPEC``,
so it is only recommended for use in ``AF_INET`` (i.e. IPv4). This is
the default for ``tornado.simple_httpclient``, but other libraries
may default to ``AF_UNSPEC``.
``pycares`` will not return a mix of ``AF_INET`` and ``AF_INET6`` when
``family`` is ``AF_UNSPEC``, so it is only recommended for use in
``AF_INET`` (i.e. IPv4). This is the default for
``tornado.simple_httpclient``, but other libraries may default to
``AF_UNSPEC``.
.. versionchanged:: 5.0
The ``io_loop`` argument (deprecated since version 4.1) has been removed.

View file

@ -381,7 +381,7 @@ class PriorityQueue(Queue):
def _put(self, item: _T) -> None:
heapq.heappush(self._queue, item)
def _get(self) -> _T:
def _get(self) -> _T: # type: ignore[type-var]
return heapq.heappop(self._queue)
@ -418,5 +418,5 @@ class LifoQueue(Queue):
def _put(self, item: _T) -> None:
self._queue.append(item)
def _get(self) -> _T:
def _get(self) -> _T: # type: ignore[type-var]
return self._queue.pop()

View file

@ -547,7 +547,7 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
value: Optional[BaseException],
tb: Optional[TracebackType],
) -> bool:
if self.final_callback:
if self.final_callback is not None:
self._remove_timeout()
if isinstance(value, StreamClosedError):
if value.real_error is None:

View file

@ -21,6 +21,7 @@ import socket
import numbers
import datetime
import ssl
import typing
from tornado.concurrent import Future, future_add_done_callback
from tornado.ioloop import IOLoop
@ -29,7 +30,10 @@ from tornado import gen
from tornado.netutil import Resolver
from tornado.gen import TimeoutError
from typing import Any, Union, Dict, Tuple, List, Callable, Iterator, Optional, Set
from typing import Any, Union, Dict, Tuple, List, Callable, Iterator, Optional
if typing.TYPE_CHECKING:
from typing import Set # noqa(F401)
_INITIAL_CONNECT_TIMEOUT = 0.3

View file

@ -246,9 +246,7 @@ class TCPServer(object):
.. deprecated:: 6.2
Use either ``listen()`` or ``add_sockets()`` instead of ``bind()``
and ``start()``. The ``bind()/start()`` pattern depends on
interfaces that have been deprecated in Python 3.10 and will be
removed in future versions of Python.
and ``start()``.
"""
sockets = bind_sockets(
port,
@ -295,9 +293,7 @@ class TCPServer(object):
.. deprecated:: 6.2
Use either ``listen()`` or ``add_sockets()`` instead of ``bind()``
and ``start()``. The ``bind()/start()`` pattern depends on
interfaces that have been deprecated in Python 3.10 and will be
removed in future versions of Python.
and ``start()``.
"""
assert not self._started
self._started = True

View file

@ -135,7 +135,8 @@ class AsyncTestCase(unittest.TestCase):
By default, a new `.IOLoop` is constructed for each test and is available
as ``self.io_loop``. If the code being tested requires a
global `.IOLoop`, subclasses should override `get_new_ioloop` to return it.
reused global `.IOLoop`, subclasses should override `get_new_ioloop` to return it,
although this is deprecated as of Tornado 6.3.
The `.IOLoop`'s ``start`` and ``stop`` methods should not be
called directly. Instead, use `self.stop <stop>` and `self.wait
@ -162,17 +163,6 @@ class AsyncTestCase(unittest.TestCase):
response = self.wait()
# Test contents of response
self.assertIn("FriendFeed", response.body)
.. deprecated:: 6.2
AsyncTestCase and AsyncHTTPTestCase are deprecated due to changes
in future versions of Python (after 3.10). The interfaces used
in this class are incompatible with the deprecation and intended
removal of certain methods related to the idea of a "current"
event loop while no event loop is actually running. Use
`unittest.IsolatedAsyncioTestCase` instead. Note that this class
does not emit DeprecationWarnings until better migration guidance
can be provided.
"""
def __init__(self, methodName: str = "runTest") -> None:
@ -193,49 +183,22 @@ class AsyncTestCase(unittest.TestCase):
self._test_generator = None # type: Optional[Union[Generator, Coroutine]]
def setUp(self) -> None:
setup_with_context_manager(self, warnings.catch_warnings())
warnings.filterwarnings(
"ignore",
message="There is no current event loop",
category=DeprecationWarning,
module=r"tornado\..*",
)
py_ver = sys.version_info
if ((3, 10, 0) <= py_ver < (3, 10, 9)) or ((3, 11, 0) <= py_ver <= (3, 11, 1)):
# Early releases in the Python 3.10 and 3.1 series had deprecation
# warnings that were later reverted; we must suppress them here.
setup_with_context_manager(self, warnings.catch_warnings())
warnings.filterwarnings(
"ignore",
message="There is no current event loop",
category=DeprecationWarning,
module=r"tornado\..*",
)
super().setUp()
# NOTE: this code attempts to navigate deprecation warnings introduced
# in Python 3.10. The idea of an implicit current event loop is
# deprecated in that version, with the intention that tests like this
# explicitly create a new event loop and run on it. However, other
# packages such as pytest-asyncio (as of version 0.16.0) still rely on
# the implicit current event loop and we want to be compatible with them
# (even when run on 3.10, but not, of course, on the future version of
# python that removes the get/set_event_loop methods completely).
#
# Deprecation warnings were introduced inconsistently:
# asyncio.get_event_loop warns, but
# asyncio.get_event_loop_policy().get_event_loop does not. Similarly,
# none of the set_event_loop methods warn, although comments on
# https://bugs.python.org/issue39529 indicate that they are also
# intended for future removal.
#
# Therefore, we first attempt to access the event loop with the
# (non-warning) policy method, and if it fails, fall back to creating a
# new event loop. We do not have effective test coverage of the
# new event loop case; this will have to be watched when/if
# get_event_loop is actually removed.
self.should_close_asyncio_loop = False
try:
self.asyncio_loop = asyncio.get_event_loop_policy().get_event_loop()
except Exception:
self.asyncio_loop = asyncio.new_event_loop()
self.should_close_asyncio_loop = True
async def get_loop() -> IOLoop:
return self.get_new_ioloop()
self.io_loop = self.asyncio_loop.run_until_complete(get_loop())
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
self.io_loop.make_current()
if type(self).get_new_ioloop is not AsyncTestCase.get_new_ioloop:
warnings.warn("get_new_ioloop is deprecated", DeprecationWarning)
self.io_loop = self.get_new_ioloop()
asyncio.set_event_loop(self.io_loop.asyncio_loop) # type: ignore[attr-defined]
def tearDown(self) -> None:
# Native coroutines tend to produce warnings if they're not
@ -270,17 +233,13 @@ class AsyncTestCase(unittest.TestCase):
# Clean up Subprocess, so it can be used again with a new ioloop.
Subprocess.uninitialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
self.io_loop.clear_current()
asyncio.set_event_loop(None)
if not isinstance(self.io_loop, _NON_OWNED_IOLOOPS):
# Try to clean up any file descriptors left open in the ioloop.
# This avoids leaks, especially when tests are run repeatedly
# in the same process with autoreload (because curl does not
# set FD_CLOEXEC on its file descriptors)
self.io_loop.close(all_fds=True)
if self.should_close_asyncio_loop:
self.asyncio_loop.close()
super().tearDown()
# In case an exception escaped or the StackContext caught an exception
# when there wasn't a wait() to re-raise it, do so here.
@ -298,6 +257,9 @@ class AsyncTestCase(unittest.TestCase):
singletons using the default `.IOLoop`) or if a per-test event
loop is being provided by another system (such as
``pytest-asyncio``).
.. deprecated:: 6.3
This method will be removed in Tornado 7.0.
"""
return IOLoop(make_current=False)
@ -435,10 +397,6 @@ class AsyncHTTPTestCase(AsyncTestCase):
like ``http_client.fetch()``, into a synchronous operation. If you need
to do other asynchronous operations in tests, you'll probably need to use
``stop()`` and ``wait()`` yourself.
.. deprecated:: 6.2
`AsyncTestCase` and `AsyncHTTPTestCase` are deprecated due to changes
in Python 3.10; see comments on `AsyncTestCase` for more details.
"""
def setUp(self) -> None:
@ -672,7 +630,7 @@ def gen_test( # noqa: F811
if self._test_generator is not None and getattr(
self._test_generator, "cr_running", True
):
self._test_generator.throw(type(e), e)
self._test_generator.throw(e)
# In case the test contains an overly broad except
# clause, we may get back here.
# Coroutine was stopped or didn't raise a useful stack trace,
@ -724,28 +682,37 @@ class ExpectLog(logging.Filter):
) -> None:
"""Constructs an ExpectLog context manager.
:param logger: Logger object (or name of logger) to watch. Pass
an empty string to watch the root logger.
:param regex: Regular expression to match. Any log entries on
the specified logger that match this regex will be suppressed.
:param required: If true, an exception will be raised if the end of
the ``with`` statement is reached without matching any log entries.
:param logger: Logger object (or name of logger) to watch. Pass an
empty string to watch the root logger.
:param regex: Regular expression to match. Any log entries on the
specified logger that match this regex will be suppressed.
:param required: If true, an exception will be raised if the end of the
``with`` statement is reached without matching any log entries.
:param level: A constant from the ``logging`` module indicating the
expected log level. If this parameter is provided, only log messages
at this level will be considered to match. Additionally, the
supplied ``logger`` will have its level adjusted if necessary
(for the duration of the ``ExpectLog`` to enable the expected
message.
supplied ``logger`` will have its level adjusted if necessary (for
the duration of the ``ExpectLog`` to enable the expected message.
.. versionchanged:: 6.1
Added the ``level`` parameter.
.. deprecated:: 6.3
In Tornado 7.0, only ``WARNING`` and higher logging levels will be
matched by default. To match ``INFO`` and lower levels, the ``level``
argument must be used. This is changing to minimize differences
between ``tornado.testing.main`` (which enables ``INFO`` logs by
default) and most other test runners (including those in IDEs)
which have ``INFO`` logs disabled by default.
"""
if isinstance(logger, basestring_type):
logger = logging.getLogger(logger)
self.logger = logger
self.regex = re.compile(regex)
self.required = required
self.matched = False
# matched and deprecated_level_matched are a counter for the respective event.
self.matched = 0
self.deprecated_level_matched = 0
self.logged_stack = False
self.level = level
self.orig_level = None # type: Optional[int]
@ -755,13 +722,20 @@ class ExpectLog(logging.Filter):
self.logged_stack = True
message = record.getMessage()
if self.regex.match(message):
if self.level is None and record.levelno < logging.WARNING:
# We're inside the logging machinery here so generating a DeprecationWarning
# here won't be reported cleanly (if warnings-as-errors is enabled, the error
# just gets swallowed by the logging module), and even if it were it would
# have the wrong stack trace. Just remember this fact and report it in
# __exit__ instead.
self.deprecated_level_matched += 1
if self.level is not None and record.levelno != self.level:
app_log.warning(
"Got expected log message %r at unexpected level (%s vs %s)"
% (message, logging.getLevelName(self.level), record.levelname)
)
return True
self.matched = True
self.matched += 1
return False
return True
@ -783,6 +757,15 @@ class ExpectLog(logging.Filter):
self.logger.removeFilter(self)
if not typ and self.required and not self.matched:
raise Exception("did not get expected log message")
if (
not typ
and self.required
and (self.deprecated_level_matched >= self.matched)
):
warnings.warn(
"ExpectLog matched at INFO or below without level argument",
DeprecationWarning,
)
# From https://nedbatchelder.com/blog/201508/using_context_managers_in_test_setup.html

View file

@ -23,7 +23,7 @@ Here is a simple "Hello, world" example app:
.. testcode::
import asyncio
import tornado.web
import tornado
class MainHandler(tornado.web.RequestHandler):
def get(self):
@ -166,7 +166,7 @@ May be overridden by passing a ``version`` keyword argument.
"""
DEFAULT_SIGNED_VALUE_MIN_VERSION = 1
"""The oldest signed value accepted by `.RequestHandler.get_secure_cookie`.
"""The oldest signed value accepted by `.RequestHandler.get_signed_cookie`.
May be overridden by passing a ``min_version`` keyword argument.
@ -210,7 +210,7 @@ class RequestHandler(object):
self,
application: "Application",
request: httputil.HTTPServerRequest,
**kwargs: Any
**kwargs: Any,
) -> None:
super().__init__()
@ -603,21 +603,28 @@ class RequestHandler(object):
expires: Optional[Union[float, Tuple, datetime.datetime]] = None,
path: str = "/",
expires_days: Optional[float] = None,
**kwargs: Any
# Keyword-only args start here for historical reasons.
*,
max_age: Optional[int] = None,
httponly: bool = False,
secure: bool = False,
samesite: Optional[str] = None,
) -> None:
"""Sets an outgoing cookie name/value with the given options.
Newly-set cookies are not immediately visible via `get_cookie`;
they are not present until the next request.
expires may be a numeric timestamp as returned by `time.time`,
a time tuple as returned by `time.gmtime`, or a
`datetime.datetime` object.
Most arguments are passed directly to `http.cookies.Morsel` directly.
See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie
for more information.
``expires`` may be a numeric timestamp as returned by `time.time`,
a time tuple as returned by `time.gmtime`, or a
`datetime.datetime` object. ``expires_days`` is provided as a convenience
to set an expiration time in days from today (if both are set, ``expires``
is used).
Additional keyword arguments are set on the cookies.Morsel
directly.
See https://docs.python.org/3/library/http.cookies.html#http.cookies.Morsel
for available attributes.
"""
# The cookie library only accepts type str, in both python 2 and 3
name = escape.native_str(name)
@ -641,56 +648,82 @@ class RequestHandler(object):
morsel["expires"] = httputil.format_timestamp(expires)
if path:
morsel["path"] = path
for k, v in kwargs.items():
if k == "max_age":
k = "max-age"
if max_age:
# Note change from _ to -.
morsel["max-age"] = str(max_age)
if httponly:
# Note that SimpleCookie ignores the value here. The presense of an
# httponly (or secure) key is treated as true.
morsel["httponly"] = True
if secure:
morsel["secure"] = True
if samesite:
morsel["samesite"] = samesite
# skip falsy values for httponly and secure flags because
# SimpleCookie sets them regardless
if k in ["httponly", "secure"] and not v:
continue
morsel[k] = v
def clear_cookie(
self, name: str, path: str = "/", domain: Optional[str] = None
) -> None:
def clear_cookie(self, name: str, **kwargs: Any) -> None:
"""Deletes the cookie with the given name.
Due to limitations of the cookie protocol, you must pass the same
path and domain to clear a cookie as were used when that cookie
was set (but there is no way to find out on the server side
which values were used for a given cookie).
This method accepts the same arguments as `set_cookie`, except for
``expires`` and ``max_age``. Clearing a cookie requires the same
``domain`` and ``path`` arguments as when it was set. In some cases the
``samesite`` and ``secure`` arguments are also required to match. Other
arguments are ignored.
Similar to `set_cookie`, the effect of this method will not be
seen until the following request.
.. versionchanged:: 6.3
Now accepts all keyword arguments that ``set_cookie`` does.
The ``samesite`` and ``secure`` flags have recently become
required for clearing ``samesite="none"`` cookies.
"""
for excluded_arg in ["expires", "max_age"]:
if excluded_arg in kwargs:
raise TypeError(
f"clear_cookie() got an unexpected keyword argument '{excluded_arg}'"
)
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
self.set_cookie(name, value="", path=path, expires=expires, domain=domain)
self.set_cookie(name, value="", expires=expires, **kwargs)
def clear_all_cookies(self, path: str = "/", domain: Optional[str] = None) -> None:
"""Deletes all the cookies the user sent with this request.
def clear_all_cookies(self, **kwargs: Any) -> None:
"""Attempt to delete all the cookies the user sent with this request.
See `clear_cookie` for more information on the path and domain
parameters.
See `clear_cookie` for more information on keyword arguments. Due to
limitations of the cookie protocol, it is impossible to determine on the
server side which values are necessary for the ``domain``, ``path``,
``samesite``, or ``secure`` arguments, this method can only be
successful if you consistently use the same values for these arguments
when setting cookies.
Similar to `set_cookie`, the effect of this method will not be
seen until the following request.
Similar to `set_cookie`, the effect of this method will not be seen
until the following request.
.. versionchanged:: 3.2
Added the ``path`` and ``domain`` parameters.
.. versionchanged:: 6.3
Now accepts all keyword arguments that ``set_cookie`` does.
.. deprecated:: 6.3
The increasingly complex rules governing cookies have made it
impossible for a ``clear_all_cookies`` method to work reliably
since all we know about cookies are their names. Applications
should generally use ``clear_cookie`` one at a time instead.
"""
for name in self.request.cookies:
self.clear_cookie(name, path=path, domain=domain)
self.clear_cookie(name, **kwargs)
def set_secure_cookie(
def set_signed_cookie(
self,
name: str,
value: Union[str, bytes],
expires_days: Optional[float] = 30,
version: Optional[int] = None,
**kwargs: Any
**kwargs: Any,
) -> None:
"""Signs and timestamps a cookie so it cannot be forged.
@ -698,11 +731,11 @@ class RequestHandler(object):
to use this method. It should be a long, random sequence of bytes
to be used as the HMAC secret for the signature.
To read a cookie set with this method, use `get_secure_cookie()`.
To read a cookie set with this method, use `get_signed_cookie()`.
Note that the ``expires_days`` parameter sets the lifetime of the
cookie in the browser, but is independent of the ``max_age_days``
parameter to `get_secure_cookie`.
parameter to `get_signed_cookie`.
A value of None limits the lifetime to the current browser session.
Secure cookies may contain arbitrary byte values, not just unicode
@ -715,22 +748,30 @@ class RequestHandler(object):
Added the ``version`` argument. Introduced cookie version 2
and made it the default.
.. versionchanged:: 6.3
Renamed from ``set_secure_cookie`` to ``set_signed_cookie`` to
avoid confusion with other uses of "secure" in cookie attributes
and prefixes. The old name remains as an alias.
"""
self.set_cookie(
name,
self.create_signed_value(name, value, version=version),
expires_days=expires_days,
**kwargs
**kwargs,
)
set_secure_cookie = set_signed_cookie
def create_signed_value(
self, name: str, value: Union[str, bytes], version: Optional[int] = None
) -> bytes:
"""Signs and timestamps a string so it cannot be forged.
Normally used via set_secure_cookie, but provided as a separate
Normally used via set_signed_cookie, but provided as a separate
method for non-cookie uses. To decode a value not stored
as a cookie use the optional value argument to get_secure_cookie.
as a cookie use the optional value argument to get_signed_cookie.
.. versionchanged:: 3.2.1
@ -749,7 +790,7 @@ class RequestHandler(object):
secret, name, value, version=version, key_version=key_version
)
def get_secure_cookie(
def get_signed_cookie(
self,
name: str,
value: Optional[str] = None,
@ -763,12 +804,19 @@ class RequestHandler(object):
Similar to `get_cookie`, this method only returns cookies that
were present in the request. It does not see outgoing cookies set by
`set_secure_cookie` in this handler.
`set_signed_cookie` in this handler.
.. versionchanged:: 3.2.1
Added the ``min_version`` argument. Introduced cookie version 2;
both versions 1 and 2 are accepted by default.
.. versionchanged:: 6.3
Renamed from ``get_secure_cookie`` to ``get_signed_cookie`` to
avoid confusion with other uses of "secure" in cookie attributes
and prefixes. The old name remains as an alias.
"""
self.require_setting("cookie_secret", "secure cookies")
if value is None:
@ -781,12 +829,22 @@ class RequestHandler(object):
min_version=min_version,
)
def get_secure_cookie_key_version(
get_secure_cookie = get_signed_cookie
def get_signed_cookie_key_version(
self, name: str, value: Optional[str] = None
) -> Optional[int]:
"""Returns the signing key version of the secure cookie.
The version is returned as int.
.. versionchanged:: 6.3
Renamed from ``get_secure_cookie_key_version`` to
``set_signed_cookie_key_version`` to avoid confusion with other
uses of "secure" in cookie attributes and prefixes. The old name
remains as an alias.
"""
self.require_setting("cookie_secret", "secure cookies")
if value is None:
@ -795,6 +853,8 @@ class RequestHandler(object):
return None
return get_signature_key_version(value)
get_secure_cookie_key_version = get_signed_cookie_key_version
def redirect(
self, url: str, permanent: bool = False, status: Optional[int] = None
) -> None:
@ -1321,7 +1381,7 @@ class RequestHandler(object):
and is cached for future access::
def get_current_user(self):
user_cookie = self.get_secure_cookie("user")
user_cookie = self.get_signed_cookie("user")
if user_cookie:
return json.loads(user_cookie)
return None
@ -1331,7 +1391,7 @@ class RequestHandler(object):
@gen.coroutine
def prepare(self):
user_id_cookie = self.get_secure_cookie("user_id")
user_id_cookie = self.get_signed_cookie("user_id")
if user_id_cookie:
self.current_user = yield load_user(user_id_cookie)
@ -1643,7 +1703,7 @@ class RequestHandler(object):
# Find all weak and strong etag values from If-None-Match header
# because RFC 7232 allows multiple etag values in a single header.
etags = re.findall(
br'\*|(?:W/)?"[^"]*"', utf8(self.request.headers.get("If-None-Match", ""))
rb'\*|(?:W/)?"[^"]*"', utf8(self.request.headers.get("If-None-Match", ""))
)
if not computed_etag or not etags:
return False
@ -1676,20 +1736,16 @@ class RequestHandler(object):
)
# If XSRF cookies are turned on, reject form submissions without
# the proper cookie
if (
self.request.method
not in (
"GET",
"HEAD",
"OPTIONS",
)
and self.application.settings.get("xsrf_cookies")
):
if self.request.method not in (
"GET",
"HEAD",
"OPTIONS",
) and self.application.settings.get("xsrf_cookies"):
self.check_xsrf_cookie()
result = self.prepare()
if result is not None:
result = await result
result = await result # type: ignore
if self._prepared_future is not None:
# Tell the Application we've finished with prepare()
# and are ready for the body to arrive.
@ -1848,7 +1904,7 @@ def stream_request_body(cls: Type[_RequestHandlerType]) -> Type[_RequestHandlerT
* The regular HTTP method (``post``, ``put``, etc) will be called after
the entire body has been read.
See the `file receiver demo <https://github.com/tornadoweb/tornado/tree/master/demos/file_upload/>`_
See the `file receiver demo <https://github.com/tornadoweb/tornado/tree/stable/demos/file_upload/>`_
for example usage.
""" # noqa: E501
if not issubclass(cls, RequestHandler):
@ -2046,7 +2102,7 @@ class Application(ReversibleRouter):
handlers: Optional[_RuleList] = None,
default_host: Optional[str] = None,
transforms: Optional[List[Type["OutputTransform"]]] = None,
**settings: Any
**settings: Any,
) -> None:
if transforms is None:
self.transforms = [] # type: List[Type[OutputTransform]]
@ -2106,7 +2162,7 @@ class Application(ReversibleRouter):
backlog: int = tornado.netutil._DEFAULT_BACKLOG,
flags: Optional[int] = None,
reuse_port: bool = False,
**kwargs: Any
**kwargs: Any,
) -> HTTPServer:
"""Starts an HTTP server for this application on the given port.
@ -2393,7 +2449,7 @@ class HTTPError(Exception):
status_code: int = 500,
log_message: Optional[str] = None,
*args: Any,
**kwargs: Any
**kwargs: Any,
) -> None:
self.status_code = status_code
self.log_message = log_message
@ -3441,7 +3497,7 @@ def create_signed_value(
# A leading version number in decimal
# with no leading zeros, followed by a pipe.
_signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$")
_signed_value_version_re = re.compile(rb"^([1-9][0-9]*)\|(.*)$")
def _get_version(value: bytes) -> int:

View file

@ -23,7 +23,6 @@ import hashlib
import os
import sys
import struct
import tornado.escape
import tornado.web
from urllib.parse import urlparse
import zlib
@ -34,6 +33,7 @@ from tornado import gen, httpclient, httputil
from tornado.ioloop import IOLoop, PeriodicCallback
from tornado.iostream import StreamClosedError, IOStream
from tornado.log import gen_log, app_log
from tornado.netutil import Resolver
from tornado import simple_httpclient
from tornado.queues import Queue
from tornado.tcpclient import TCPClient
@ -822,7 +822,7 @@ class WebSocketProtocol13(WebSocketProtocol):
self._masked_frame = None
self._frame_mask = None # type: Optional[bytes]
self._frame_length = None
self._fragmented_message_buffer = None # type: Optional[bytes]
self._fragmented_message_buffer = None # type: Optional[bytearray]
self._fragmented_message_opcode = None
self._waiting = None # type: object
self._compression_options = params.compression_options
@ -1177,10 +1177,10 @@ class WebSocketProtocol13(WebSocketProtocol):
# nothing to continue
self._abort()
return
self._fragmented_message_buffer += data
self._fragmented_message_buffer.extend(data)
if is_final_frame:
opcode = self._fragmented_message_opcode
data = self._fragmented_message_buffer
data = bytes(self._fragmented_message_buffer)
self._fragmented_message_buffer = None
else: # start of new data message
if self._fragmented_message_buffer is not None:
@ -1189,7 +1189,7 @@ class WebSocketProtocol13(WebSocketProtocol):
return
if not is_final_frame:
self._fragmented_message_opcode = opcode
self._fragmented_message_buffer = data
self._fragmented_message_buffer = bytearray(data)
if is_final_frame:
handled_future = self._handle_message(opcode, data)
@ -1362,6 +1362,7 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
ping_timeout: Optional[float] = None,
max_message_size: int = _default_max_message_size,
subprotocols: Optional[List[str]] = [],
resolver: Optional[Resolver] = None,
) -> None:
self.connect_future = Future() # type: Future[WebSocketClientConnection]
self.read_queue = Queue(1) # type: Queue[Union[None, str, bytes]]
@ -1402,7 +1403,7 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
# Websocket connection is currently unable to follow redirects
request.follow_redirects = False
self.tcp_client = TCPClient()
self.tcp_client = TCPClient(resolver=resolver)
super().__init__(
None,
request,

View file

@ -27,12 +27,15 @@ container.
"""
import sys
import concurrent.futures
from io import BytesIO
import tornado
import sys
from tornado.concurrent import dummy_executor
from tornado import escape
from tornado import httputil
from tornado.ioloop import IOLoop
from tornado.log import access_log
from typing import List, Tuple, Optional, Callable, Any, Dict, Text
@ -54,20 +57,28 @@ def to_wsgi_str(s: bytes) -> str:
class WSGIContainer(object):
r"""Makes a WSGI-compatible function runnable on Tornado's HTTP server.
r"""Makes a WSGI-compatible application runnable on Tornado's HTTP server.
.. warning::
WSGI is a *synchronous* interface, while Tornado's concurrency model
is based on single-threaded asynchronous execution. This means that
running a WSGI app with Tornado's `WSGIContainer` is *less scalable*
than running the same app in a multi-threaded WSGI server like
``gunicorn`` or ``uwsgi``. Use `WSGIContainer` only when there are
benefits to combining Tornado and WSGI in the same process that
outweigh the reduced scalability.
is based on single-threaded *asynchronous* execution. Many of Tornado's
distinguishing features are not available in WSGI mode, including efficient
long-polling and websockets. The primary purpose of `WSGIContainer` is
to support both WSGI applications and native Tornado ``RequestHandlers`` in
a single process. WSGI-only applications are likely to be better off
with a dedicated WSGI server such as ``gunicorn`` or ``uwsgi``.
Wrap a WSGI function in a `WSGIContainer` and pass it to `.HTTPServer` to
run it. For example::
Wrap a WSGI application in a `WSGIContainer` to make it implement the Tornado
`.HTTPServer` ``request_callback`` interface. The `WSGIContainer` object can
then be passed to classes from the `tornado.routing` module,
`tornado.web.FallbackHandler`, or to `.HTTPServer` directly.
This class is intended to let other frameworks (Django, Flask, etc)
run on the Tornado HTTP server and I/O loop.
Realistic usage will be more complicated, but the simplest possible example uses a
hand-written WSGI application with `.HTTPServer`::
def simple_app(environ, start_response):
status = "200 OK"
@ -83,18 +94,46 @@ class WSGIContainer(object):
asyncio.run(main())
This class is intended to let other frameworks (Django, web.py, etc)
run on the Tornado HTTP server and I/O loop.
The recommended pattern is to use the `tornado.routing` module to set up routing
rules between your WSGI application and, typically, a `tornado.web.Application`.
Alternatively, `tornado.web.Application` can be used as the top-level router
and `tornado.web.FallbackHandler` can embed a `WSGIContainer` within it.
The `tornado.web.FallbackHandler` class is often useful for mixing
Tornado and WSGI apps in the same server. See
https://github.com/bdarnell/django-tornado-demo for a complete example.
If the ``executor`` argument is provided, the WSGI application will be executed
on that executor. This must be an instance of `concurrent.futures.Executor`,
typically a ``ThreadPoolExecutor`` (``ProcessPoolExecutor`` is not supported).
If no ``executor`` is given, the application will run on the event loop thread in
Tornado 6.3; this will change to use an internal thread pool by default in
Tornado 7.0.
.. warning::
By default, the WSGI application is executed on the event loop's thread. This
limits the server to one request at a time (per process), making it less scalable
than most other WSGI servers. It is therefore highly recommended that you pass
a ``ThreadPoolExecutor`` when constructing the `WSGIContainer`, after verifying
that your application is thread-safe. The default will change to use a
``ThreadPoolExecutor`` in Tornado 7.0.
.. versionadded:: 6.3
The ``executor`` parameter.
.. deprecated:: 6.3
The default behavior of running the WSGI application on the event loop thread
is deprecated and will change in Tornado 7.0 to use a thread pool by default.
"""
def __init__(self, wsgi_application: "WSGIAppType") -> None:
def __init__(
self,
wsgi_application: "WSGIAppType",
executor: Optional[concurrent.futures.Executor] = None,
) -> None:
self.wsgi_application = wsgi_application
self.executor = dummy_executor if executor is None else executor
def __call__(self, request: httputil.HTTPServerRequest) -> None:
IOLoop.current().spawn_callback(self.handle_request, request)
async def handle_request(self, request: httputil.HTTPServerRequest) -> None:
data = {} # type: Dict[str, Any]
response = [] # type: List[bytes]
@ -113,15 +152,33 @@ class WSGIContainer(object):
data["headers"] = headers
return response.append
app_response = self.wsgi_application(
WSGIContainer.environ(request), start_response
loop = IOLoop.current()
app_response = await loop.run_in_executor(
self.executor,
self.wsgi_application,
self.environ(request),
start_response,
)
try:
response.extend(app_response)
body = b"".join(response)
app_response_iter = iter(app_response)
def next_chunk() -> Optional[bytes]:
try:
return next(app_response_iter)
except StopIteration:
# StopIteration is special and is not allowed to pass through
# coroutines normally.
return None
while True:
chunk = await loop.run_in_executor(self.executor, next_chunk)
if chunk is None:
break
response.append(chunk)
finally:
if hasattr(app_response, "close"):
app_response.close() # type: ignore
body = b"".join(response)
if not data:
raise Exception("WSGI app did not call start_response")
@ -147,9 +204,12 @@ class WSGIContainer(object):
request.connection.finish()
self._log(status_code, request)
@staticmethod
def environ(request: httputil.HTTPServerRequest) -> Dict[Text, Any]:
"""Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment."""
def environ(self, request: httputil.HTTPServerRequest) -> Dict[Text, Any]:
"""Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment.
.. versionchanged:: 6.3
No longer a static method.
"""
hostport = request.host.split(":")
if len(hostport) == 2:
host = hostport[0]
@ -172,7 +232,7 @@ class WSGIContainer(object):
"wsgi.url_scheme": request.protocol,
"wsgi.input": BytesIO(escape.utf8(request.body)),
"wsgi.errors": sys.stderr,
"wsgi.multithread": False,
"wsgi.multithread": self.executor is not dummy_executor,
"wsgi.multiprocess": True,
"wsgi.run_once": False,
}

View file

@ -320,7 +320,7 @@ class BaseHandler(RouteHandler):
def get_current_user(self):
if sickgear.WEB_USERNAME or sickgear.WEB_PASSWORD:
return self.get_secure_cookie('sickgear-session-%s' % helpers.md5_for_text(sickgear.WEB_PORT))
return self.get_signed_cookie('sickgear-session-%s' % helpers.md5_for_text(sickgear.WEB_PORT))
return True
def get_image(self, image):
@ -401,7 +401,7 @@ class LoginHandler(BaseHandler):
httponly=True)
if sickgear.ENABLE_HTTPS:
params.update(dict(secure=True))
self.set_secure_cookie('sickgear-session-%s' % helpers.md5_for_text(sickgear.WEB_PORT),
self.set_signed_cookie('sickgear-session-%s' % helpers.md5_for_text(sickgear.WEB_PORT),
sickgear.COOKIE_SECRET, **params)
self.redirect(self.get_argument('next', '/home/'))
else: