mirror of
https://github.com/SickGear/SickGear.git
synced 2024-11-25 14:25:05 +00:00
Merge branch 'feature/UpdateTornado' into dev
This commit is contained in:
commit
3f3dc75edc
23 changed files with 516 additions and 384 deletions
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
* Update html5lib 1.1 (f87487a) to 1.2-dev (3e500bb)
|
* Update html5lib 1.1 (f87487a) to 1.2-dev (3e500bb)
|
||||||
* Update package resource API 63.2.0 (3ae44cd) to 67.5.1 (f51eccd)
|
* Update package resource API 63.2.0 (3ae44cd) to 67.5.1 (f51eccd)
|
||||||
|
* Update Tornado Web Server 6.2.0 (a4f08a3) to 6.3.0 (7186b86)
|
||||||
* Update urllib3 1.26.13 (25fbd5f) to 1.26.14 (a06c05c)
|
* Update urllib3 1.26.13 (25fbd5f) to 1.26.14 (a06c05c)
|
||||||
* Change remove calls to legacy py2 fix encoding function
|
* Change remove calls to legacy py2 fix encoding function
|
||||||
* Change requirements for pure py3
|
* Change requirements for pure py3
|
||||||
|
|
|
@ -22,5 +22,46 @@
|
||||||
# is zero for an official release, positive for a development branch,
|
# is zero for an official release, positive for a development branch,
|
||||||
# or negative for a release candidate or beta (after the base version
|
# or negative for a release candidate or beta (after the base version
|
||||||
# number has been incremented)
|
# number has been incremented)
|
||||||
version = "6.2"
|
version = "6.3.dev1"
|
||||||
version_info = (6, 2, 0, 0)
|
version_info = (6, 3, 0, -100)
|
||||||
|
|
||||||
|
import importlib
|
||||||
|
import typing
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"auth",
|
||||||
|
"autoreload",
|
||||||
|
"concurrent",
|
||||||
|
"curl_httpclient",
|
||||||
|
"escape",
|
||||||
|
"gen",
|
||||||
|
"http1connection",
|
||||||
|
"httpclient",
|
||||||
|
"httpserver",
|
||||||
|
"httputil",
|
||||||
|
"ioloop",
|
||||||
|
"iostream",
|
||||||
|
"locale",
|
||||||
|
"locks",
|
||||||
|
"log",
|
||||||
|
"netutil",
|
||||||
|
"options",
|
||||||
|
"platform",
|
||||||
|
"process",
|
||||||
|
"queues",
|
||||||
|
"routing",
|
||||||
|
"simple_httpclient",
|
||||||
|
"tcpclient",
|
||||||
|
"tcpserver",
|
||||||
|
"template",
|
||||||
|
"testing",
|
||||||
|
"util",
|
||||||
|
"web",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# Copied from https://peps.python.org/pep-0562/
|
||||||
|
def __getattr__(name: str) -> typing.Any:
|
||||||
|
if name in __all__:
|
||||||
|
return importlib.import_module("." + name, __name__)
|
||||||
|
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
||||||
|
|
|
@ -15,66 +15,66 @@
|
||||||
"""Data used by the tornado.locale module."""
|
"""Data used by the tornado.locale module."""
|
||||||
|
|
||||||
LOCALE_NAMES = {
|
LOCALE_NAMES = {
|
||||||
"af_ZA": {"name_en": u"Afrikaans", "name": u"Afrikaans"},
|
"af_ZA": {"name_en": "Afrikaans", "name": "Afrikaans"},
|
||||||
"am_ET": {"name_en": u"Amharic", "name": u"አማርኛ"},
|
"am_ET": {"name_en": "Amharic", "name": "አማርኛ"},
|
||||||
"ar_AR": {"name_en": u"Arabic", "name": u"العربية"},
|
"ar_AR": {"name_en": "Arabic", "name": "العربية"},
|
||||||
"bg_BG": {"name_en": u"Bulgarian", "name": u"Български"},
|
"bg_BG": {"name_en": "Bulgarian", "name": "Български"},
|
||||||
"bn_IN": {"name_en": u"Bengali", "name": u"বাংলা"},
|
"bn_IN": {"name_en": "Bengali", "name": "বাংলা"},
|
||||||
"bs_BA": {"name_en": u"Bosnian", "name": u"Bosanski"},
|
"bs_BA": {"name_en": "Bosnian", "name": "Bosanski"},
|
||||||
"ca_ES": {"name_en": u"Catalan", "name": u"Català"},
|
"ca_ES": {"name_en": "Catalan", "name": "Català"},
|
||||||
"cs_CZ": {"name_en": u"Czech", "name": u"Čeština"},
|
"cs_CZ": {"name_en": "Czech", "name": "Čeština"},
|
||||||
"cy_GB": {"name_en": u"Welsh", "name": u"Cymraeg"},
|
"cy_GB": {"name_en": "Welsh", "name": "Cymraeg"},
|
||||||
"da_DK": {"name_en": u"Danish", "name": u"Dansk"},
|
"da_DK": {"name_en": "Danish", "name": "Dansk"},
|
||||||
"de_DE": {"name_en": u"German", "name": u"Deutsch"},
|
"de_DE": {"name_en": "German", "name": "Deutsch"},
|
||||||
"el_GR": {"name_en": u"Greek", "name": u"Ελληνικά"},
|
"el_GR": {"name_en": "Greek", "name": "Ελληνικά"},
|
||||||
"en_GB": {"name_en": u"English (UK)", "name": u"English (UK)"},
|
"en_GB": {"name_en": "English (UK)", "name": "English (UK)"},
|
||||||
"en_US": {"name_en": u"English (US)", "name": u"English (US)"},
|
"en_US": {"name_en": "English (US)", "name": "English (US)"},
|
||||||
"es_ES": {"name_en": u"Spanish (Spain)", "name": u"Español (España)"},
|
"es_ES": {"name_en": "Spanish (Spain)", "name": "Español (España)"},
|
||||||
"es_LA": {"name_en": u"Spanish", "name": u"Español"},
|
"es_LA": {"name_en": "Spanish", "name": "Español"},
|
||||||
"et_EE": {"name_en": u"Estonian", "name": u"Eesti"},
|
"et_EE": {"name_en": "Estonian", "name": "Eesti"},
|
||||||
"eu_ES": {"name_en": u"Basque", "name": u"Euskara"},
|
"eu_ES": {"name_en": "Basque", "name": "Euskara"},
|
||||||
"fa_IR": {"name_en": u"Persian", "name": u"فارسی"},
|
"fa_IR": {"name_en": "Persian", "name": "فارسی"},
|
||||||
"fi_FI": {"name_en": u"Finnish", "name": u"Suomi"},
|
"fi_FI": {"name_en": "Finnish", "name": "Suomi"},
|
||||||
"fr_CA": {"name_en": u"French (Canada)", "name": u"Français (Canada)"},
|
"fr_CA": {"name_en": "French (Canada)", "name": "Français (Canada)"},
|
||||||
"fr_FR": {"name_en": u"French", "name": u"Français"},
|
"fr_FR": {"name_en": "French", "name": "Français"},
|
||||||
"ga_IE": {"name_en": u"Irish", "name": u"Gaeilge"},
|
"ga_IE": {"name_en": "Irish", "name": "Gaeilge"},
|
||||||
"gl_ES": {"name_en": u"Galician", "name": u"Galego"},
|
"gl_ES": {"name_en": "Galician", "name": "Galego"},
|
||||||
"he_IL": {"name_en": u"Hebrew", "name": u"עברית"},
|
"he_IL": {"name_en": "Hebrew", "name": "עברית"},
|
||||||
"hi_IN": {"name_en": u"Hindi", "name": u"हिन्दी"},
|
"hi_IN": {"name_en": "Hindi", "name": "हिन्दी"},
|
||||||
"hr_HR": {"name_en": u"Croatian", "name": u"Hrvatski"},
|
"hr_HR": {"name_en": "Croatian", "name": "Hrvatski"},
|
||||||
"hu_HU": {"name_en": u"Hungarian", "name": u"Magyar"},
|
"hu_HU": {"name_en": "Hungarian", "name": "Magyar"},
|
||||||
"id_ID": {"name_en": u"Indonesian", "name": u"Bahasa Indonesia"},
|
"id_ID": {"name_en": "Indonesian", "name": "Bahasa Indonesia"},
|
||||||
"is_IS": {"name_en": u"Icelandic", "name": u"Íslenska"},
|
"is_IS": {"name_en": "Icelandic", "name": "Íslenska"},
|
||||||
"it_IT": {"name_en": u"Italian", "name": u"Italiano"},
|
"it_IT": {"name_en": "Italian", "name": "Italiano"},
|
||||||
"ja_JP": {"name_en": u"Japanese", "name": u"日本語"},
|
"ja_JP": {"name_en": "Japanese", "name": "日本語"},
|
||||||
"ko_KR": {"name_en": u"Korean", "name": u"한국어"},
|
"ko_KR": {"name_en": "Korean", "name": "한국어"},
|
||||||
"lt_LT": {"name_en": u"Lithuanian", "name": u"Lietuvių"},
|
"lt_LT": {"name_en": "Lithuanian", "name": "Lietuvių"},
|
||||||
"lv_LV": {"name_en": u"Latvian", "name": u"Latviešu"},
|
"lv_LV": {"name_en": "Latvian", "name": "Latviešu"},
|
||||||
"mk_MK": {"name_en": u"Macedonian", "name": u"Македонски"},
|
"mk_MK": {"name_en": "Macedonian", "name": "Македонски"},
|
||||||
"ml_IN": {"name_en": u"Malayalam", "name": u"മലയാളം"},
|
"ml_IN": {"name_en": "Malayalam", "name": "മലയാളം"},
|
||||||
"ms_MY": {"name_en": u"Malay", "name": u"Bahasa Melayu"},
|
"ms_MY": {"name_en": "Malay", "name": "Bahasa Melayu"},
|
||||||
"nb_NO": {"name_en": u"Norwegian (bokmal)", "name": u"Norsk (bokmål)"},
|
"nb_NO": {"name_en": "Norwegian (bokmal)", "name": "Norsk (bokmål)"},
|
||||||
"nl_NL": {"name_en": u"Dutch", "name": u"Nederlands"},
|
"nl_NL": {"name_en": "Dutch", "name": "Nederlands"},
|
||||||
"nn_NO": {"name_en": u"Norwegian (nynorsk)", "name": u"Norsk (nynorsk)"},
|
"nn_NO": {"name_en": "Norwegian (nynorsk)", "name": "Norsk (nynorsk)"},
|
||||||
"pa_IN": {"name_en": u"Punjabi", "name": u"ਪੰਜਾਬੀ"},
|
"pa_IN": {"name_en": "Punjabi", "name": "ਪੰਜਾਬੀ"},
|
||||||
"pl_PL": {"name_en": u"Polish", "name": u"Polski"},
|
"pl_PL": {"name_en": "Polish", "name": "Polski"},
|
||||||
"pt_BR": {"name_en": u"Portuguese (Brazil)", "name": u"Português (Brasil)"},
|
"pt_BR": {"name_en": "Portuguese (Brazil)", "name": "Português (Brasil)"},
|
||||||
"pt_PT": {"name_en": u"Portuguese (Portugal)", "name": u"Português (Portugal)"},
|
"pt_PT": {"name_en": "Portuguese (Portugal)", "name": "Português (Portugal)"},
|
||||||
"ro_RO": {"name_en": u"Romanian", "name": u"Română"},
|
"ro_RO": {"name_en": "Romanian", "name": "Română"},
|
||||||
"ru_RU": {"name_en": u"Russian", "name": u"Русский"},
|
"ru_RU": {"name_en": "Russian", "name": "Русский"},
|
||||||
"sk_SK": {"name_en": u"Slovak", "name": u"Slovenčina"},
|
"sk_SK": {"name_en": "Slovak", "name": "Slovenčina"},
|
||||||
"sl_SI": {"name_en": u"Slovenian", "name": u"Slovenščina"},
|
"sl_SI": {"name_en": "Slovenian", "name": "Slovenščina"},
|
||||||
"sq_AL": {"name_en": u"Albanian", "name": u"Shqip"},
|
"sq_AL": {"name_en": "Albanian", "name": "Shqip"},
|
||||||
"sr_RS": {"name_en": u"Serbian", "name": u"Српски"},
|
"sr_RS": {"name_en": "Serbian", "name": "Српски"},
|
||||||
"sv_SE": {"name_en": u"Swedish", "name": u"Svenska"},
|
"sv_SE": {"name_en": "Swedish", "name": "Svenska"},
|
||||||
"sw_KE": {"name_en": u"Swahili", "name": u"Kiswahili"},
|
"sw_KE": {"name_en": "Swahili", "name": "Kiswahili"},
|
||||||
"ta_IN": {"name_en": u"Tamil", "name": u"தமிழ்"},
|
"ta_IN": {"name_en": "Tamil", "name": "தமிழ்"},
|
||||||
"te_IN": {"name_en": u"Telugu", "name": u"తెలుగు"},
|
"te_IN": {"name_en": "Telugu", "name": "తెలుగు"},
|
||||||
"th_TH": {"name_en": u"Thai", "name": u"ภาษาไทย"},
|
"th_TH": {"name_en": "Thai", "name": "ภาษาไทย"},
|
||||||
"tl_PH": {"name_en": u"Filipino", "name": u"Filipino"},
|
"tl_PH": {"name_en": "Filipino", "name": "Filipino"},
|
||||||
"tr_TR": {"name_en": u"Turkish", "name": u"Türkçe"},
|
"tr_TR": {"name_en": "Turkish", "name": "Türkçe"},
|
||||||
"uk_UA": {"name_en": u"Ukraini ", "name": u"Українська"},
|
"uk_UA": {"name_en": "Ukraini ", "name": "Українська"},
|
||||||
"vi_VN": {"name_en": u"Vietnamese", "name": u"Tiếng Việt"},
|
"vi_VN": {"name_en": "Vietnamese", "name": "Tiếng Việt"},
|
||||||
"zh_CN": {"name_en": u"Chinese (Simplified)", "name": u"中文(简体)"},
|
"zh_CN": {"name_en": "Chinese (Simplified)", "name": "中文(简体)"},
|
||||||
"zh_TW": {"name_en": u"Chinese (Traditional)", "name": u"中文(繁體)"},
|
"zh_TW": {"name_en": "Chinese (Traditional)", "name": "中文(繁體)"},
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,7 +42,7 @@ Example usage for Google OAuth:
|
||||||
user = await self.get_authenticated_user(
|
user = await self.get_authenticated_user(
|
||||||
redirect_uri='http://your.site.com/auth/google',
|
redirect_uri='http://your.site.com/auth/google',
|
||||||
code=self.get_argument('code'))
|
code=self.get_argument('code'))
|
||||||
# Save the user with e.g. set_secure_cookie
|
# Save the user with e.g. set_signed_cookie
|
||||||
else:
|
else:
|
||||||
self.authorize_redirect(
|
self.authorize_redirect(
|
||||||
redirect_uri='http://your.site.com/auth/google',
|
redirect_uri='http://your.site.com/auth/google',
|
||||||
|
@ -136,7 +136,7 @@ class OpenIdMixin(object):
|
||||||
args = dict(
|
args = dict(
|
||||||
(k, v[-1]) for k, v in handler.request.arguments.items()
|
(k, v[-1]) for k, v in handler.request.arguments.items()
|
||||||
) # type: Dict[str, Union[str, bytes]]
|
) # type: Dict[str, Union[str, bytes]]
|
||||||
args["openid.mode"] = u"check_authentication"
|
args["openid.mode"] = "check_authentication"
|
||||||
url = self._OPENID_ENDPOINT # type: ignore
|
url = self._OPENID_ENDPOINT # type: ignore
|
||||||
if http_client is None:
|
if http_client is None:
|
||||||
http_client = self.get_auth_http_client()
|
http_client = self.get_auth_http_client()
|
||||||
|
@ -211,14 +211,14 @@ class OpenIdMixin(object):
|
||||||
for key in handler.request.arguments:
|
for key in handler.request.arguments:
|
||||||
if (
|
if (
|
||||||
key.startswith("openid.ns.")
|
key.startswith("openid.ns.")
|
||||||
and handler.get_argument(key) == u"http://openid.net/srv/ax/1.0"
|
and handler.get_argument(key) == "http://openid.net/srv/ax/1.0"
|
||||||
):
|
):
|
||||||
ax_ns = key[10:]
|
ax_ns = key[10:]
|
||||||
break
|
break
|
||||||
|
|
||||||
def get_ax_arg(uri: str) -> str:
|
def get_ax_arg(uri: str) -> str:
|
||||||
if not ax_ns:
|
if not ax_ns:
|
||||||
return u""
|
return ""
|
||||||
prefix = "openid." + ax_ns + ".type."
|
prefix = "openid." + ax_ns + ".type."
|
||||||
ax_name = None
|
ax_name = None
|
||||||
for name in handler.request.arguments.keys():
|
for name in handler.request.arguments.keys():
|
||||||
|
@ -227,8 +227,8 @@ class OpenIdMixin(object):
|
||||||
ax_name = "openid." + ax_ns + ".value." + part
|
ax_name = "openid." + ax_ns + ".value." + part
|
||||||
break
|
break
|
||||||
if not ax_name:
|
if not ax_name:
|
||||||
return u""
|
return ""
|
||||||
return handler.get_argument(ax_name, u"")
|
return handler.get_argument(ax_name, "")
|
||||||
|
|
||||||
email = get_ax_arg("http://axschema.org/contact/email")
|
email = get_ax_arg("http://axschema.org/contact/email")
|
||||||
name = get_ax_arg("http://axschema.org/namePerson")
|
name = get_ax_arg("http://axschema.org/namePerson")
|
||||||
|
@ -247,7 +247,7 @@ class OpenIdMixin(object):
|
||||||
if name:
|
if name:
|
||||||
user["name"] = name
|
user["name"] = name
|
||||||
elif name_parts:
|
elif name_parts:
|
||||||
user["name"] = u" ".join(name_parts)
|
user["name"] = " ".join(name_parts)
|
||||||
elif email:
|
elif email:
|
||||||
user["name"] = email.split("@")[0]
|
user["name"] = email.split("@")[0]
|
||||||
if email:
|
if email:
|
||||||
|
@ -694,7 +694,7 @@ class TwitterMixin(OAuthMixin):
|
||||||
async def get(self):
|
async def get(self):
|
||||||
if self.get_argument("oauth_token", None):
|
if self.get_argument("oauth_token", None):
|
||||||
user = await self.get_authenticated_user()
|
user = await self.get_authenticated_user()
|
||||||
# Save the user using e.g. set_secure_cookie()
|
# Save the user using e.g. set_signed_cookie()
|
||||||
else:
|
else:
|
||||||
await self.authorize_redirect()
|
await self.authorize_redirect()
|
||||||
|
|
||||||
|
@ -855,8 +855,28 @@ class GoogleOAuth2Mixin(OAuth2Mixin):
|
||||||
_OAUTH_NO_CALLBACKS = False
|
_OAUTH_NO_CALLBACKS = False
|
||||||
_OAUTH_SETTINGS_KEY = "google_oauth"
|
_OAUTH_SETTINGS_KEY = "google_oauth"
|
||||||
|
|
||||||
|
def get_google_oauth_settings(self) -> Dict[str, str]:
|
||||||
|
"""Return the Google OAuth 2.0 credentials that you created with
|
||||||
|
[Google Cloud
|
||||||
|
Platform](https://console.cloud.google.com/apis/credentials). The dict
|
||||||
|
format is::
|
||||||
|
|
||||||
|
{
|
||||||
|
"key": "your_client_id", "secret": "your_client_secret"
|
||||||
|
}
|
||||||
|
|
||||||
|
If your credentials are stored differently (e.g. in a db) you can
|
||||||
|
override this method for custom provision.
|
||||||
|
"""
|
||||||
|
handler = cast(RequestHandler, self)
|
||||||
|
return handler.settings[self._OAUTH_SETTINGS_KEY]
|
||||||
|
|
||||||
async def get_authenticated_user(
|
async def get_authenticated_user(
|
||||||
self, redirect_uri: str, code: str
|
self,
|
||||||
|
redirect_uri: str,
|
||||||
|
code: str,
|
||||||
|
client_id: Optional[str] = None,
|
||||||
|
client_secret: Optional[str] = None,
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""Handles the login for the Google user, returning an access token.
|
"""Handles the login for the Google user, returning an access token.
|
||||||
|
|
||||||
|
@ -883,11 +903,11 @@ class GoogleOAuth2Mixin(OAuth2Mixin):
|
||||||
"https://www.googleapis.com/oauth2/v1/userinfo",
|
"https://www.googleapis.com/oauth2/v1/userinfo",
|
||||||
access_token=access["access_token"])
|
access_token=access["access_token"])
|
||||||
# Save the user and access token with
|
# Save the user and access token with
|
||||||
# e.g. set_secure_cookie.
|
# e.g. set_signed_cookie.
|
||||||
else:
|
else:
|
||||||
self.authorize_redirect(
|
self.authorize_redirect(
|
||||||
redirect_uri='http://your.site.com/auth/google',
|
redirect_uri='http://your.site.com/auth/google',
|
||||||
client_id=self.settings['google_oauth']['key'],
|
client_id=self.get_google_oauth_settings()['key'],
|
||||||
scope=['profile', 'email'],
|
scope=['profile', 'email'],
|
||||||
response_type='code',
|
response_type='code',
|
||||||
extra_params={'approval_prompt': 'auto'})
|
extra_params={'approval_prompt': 'auto'})
|
||||||
|
@ -899,14 +919,20 @@ class GoogleOAuth2Mixin(OAuth2Mixin):
|
||||||
|
|
||||||
The ``callback`` argument was removed. Use the returned awaitable object instead.
|
The ``callback`` argument was removed. Use the returned awaitable object instead.
|
||||||
""" # noqa: E501
|
""" # noqa: E501
|
||||||
handler = cast(RequestHandler, self)
|
|
||||||
|
if client_id is None or client_secret is None:
|
||||||
|
settings = self.get_google_oauth_settings()
|
||||||
|
if client_id is None:
|
||||||
|
client_id = settings["key"]
|
||||||
|
if client_secret is None:
|
||||||
|
client_secret = settings["secret"]
|
||||||
http = self.get_auth_http_client()
|
http = self.get_auth_http_client()
|
||||||
body = urllib.parse.urlencode(
|
body = urllib.parse.urlencode(
|
||||||
{
|
{
|
||||||
"redirect_uri": redirect_uri,
|
"redirect_uri": redirect_uri,
|
||||||
"code": code,
|
"code": code,
|
||||||
"client_id": handler.settings[self._OAUTH_SETTINGS_KEY]["key"],
|
"client_id": client_id,
|
||||||
"client_secret": handler.settings[self._OAUTH_SETTINGS_KEY]["secret"],
|
"client_secret": client_secret,
|
||||||
"grant_type": "authorization_code",
|
"grant_type": "authorization_code",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -951,7 +977,7 @@ class FacebookGraphMixin(OAuth2Mixin):
|
||||||
client_id=self.settings["facebook_api_key"],
|
client_id=self.settings["facebook_api_key"],
|
||||||
client_secret=self.settings["facebook_secret"],
|
client_secret=self.settings["facebook_secret"],
|
||||||
code=self.get_argument("code"))
|
code=self.get_argument("code"))
|
||||||
# Save the user with e.g. set_secure_cookie
|
# Save the user with e.g. set_signed_cookie
|
||||||
else:
|
else:
|
||||||
self.authorize_redirect(
|
self.authorize_redirect(
|
||||||
redirect_uri='/auth/facebookgraph/',
|
redirect_uri='/auth/facebookgraph/',
|
||||||
|
|
|
@ -36,11 +36,11 @@ from tornado.httpclient import (
|
||||||
)
|
)
|
||||||
from tornado.log import app_log
|
from tornado.log import app_log
|
||||||
|
|
||||||
from typing import Dict, Any, Callable, Union, Tuple, Optional
|
from typing import Dict, Any, Callable, Union, Optional
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
from typing import Deque # noqa: F401
|
from typing import Deque, Tuple # noqa: F401
|
||||||
|
|
||||||
curl_log = logging.getLogger("tornado.curl_httpclient")
|
curl_log = logging.getLogger("tornado.curl_httpclient")
|
||||||
|
|
||||||
|
|
|
@ -368,7 +368,7 @@ def linkify(
|
||||||
# have a status bar, such as Safari by default)
|
# have a status bar, such as Safari by default)
|
||||||
params += ' title="%s"' % href
|
params += ' title="%s"' % href
|
||||||
|
|
||||||
return u'<a href="%s"%s>%s</a>' % (href, params, url)
|
return '<a href="%s"%s>%s</a>' % (href, params, url)
|
||||||
|
|
||||||
# First HTML-escape so that our strings are all safe.
|
# First HTML-escape so that our strings are all safe.
|
||||||
# The regex is modified to avoid character entites other than & so
|
# The regex is modified to avoid character entites other than & so
|
||||||
|
|
|
@ -743,7 +743,7 @@ class Runner(object):
|
||||||
self.running = False
|
self.running = False
|
||||||
self.finished = False
|
self.finished = False
|
||||||
self.io_loop = IOLoop.current()
|
self.io_loop = IOLoop.current()
|
||||||
if self.handle_yield(first_yielded):
|
if self.ctx_run(self.handle_yield, first_yielded):
|
||||||
gen = result_future = first_yielded = None # type: ignore
|
gen = result_future = first_yielded = None # type: ignore
|
||||||
self.ctx_run(self.run)
|
self.ctx_run(self.run)
|
||||||
|
|
||||||
|
@ -763,21 +763,25 @@ class Runner(object):
|
||||||
return
|
return
|
||||||
self.future = None
|
self.future = None
|
||||||
try:
|
try:
|
||||||
exc_info = None
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
value = future.result()
|
value = future.result()
|
||||||
except Exception:
|
except Exception as e:
|
||||||
exc_info = sys.exc_info()
|
# Save the exception for later. It's important that
|
||||||
future = None
|
# gen.throw() not be called inside this try/except block
|
||||||
|
# because that makes sys.exc_info behave unexpectedly.
|
||||||
|
exc: Optional[Exception] = e
|
||||||
|
else:
|
||||||
|
exc = None
|
||||||
|
finally:
|
||||||
|
future = None
|
||||||
|
|
||||||
if exc_info is not None:
|
if exc is not None:
|
||||||
try:
|
try:
|
||||||
yielded = self.gen.throw(*exc_info) # type: ignore
|
yielded = self.gen.throw(exc)
|
||||||
finally:
|
finally:
|
||||||
# Break up a reference to itself
|
# Break up a circular reference for faster GC on
|
||||||
# for faster GC on CPython.
|
# CPython.
|
||||||
exc_info = None
|
del exc
|
||||||
else:
|
else:
|
||||||
yielded = self.gen.send(value)
|
yielded = self.gen.send(value)
|
||||||
|
|
||||||
|
|
|
@ -83,7 +83,7 @@ class IOLoop(Configurable):
|
||||||
import functools
|
import functools
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
import tornado.ioloop
|
import tornado
|
||||||
from tornado.iostream import IOStream
|
from tornado.iostream import IOStream
|
||||||
|
|
||||||
async def handle_connection(connection, address):
|
async def handle_connection(connection, address):
|
||||||
|
@ -123,8 +123,7 @@ class IOLoop(Configurable):
|
||||||
and instead initialize the `asyncio` event loop and use `IOLoop.current()`.
|
and instead initialize the `asyncio` event loop and use `IOLoop.current()`.
|
||||||
In some cases, such as in test frameworks when initializing an `IOLoop`
|
In some cases, such as in test frameworks when initializing an `IOLoop`
|
||||||
to be run in a secondary thread, it may be appropriate to construct
|
to be run in a secondary thread, it may be appropriate to construct
|
||||||
an `IOLoop` with ``IOLoop(make_current=False)``. Constructing an `IOLoop`
|
an `IOLoop` with ``IOLoop(make_current=False)``.
|
||||||
without the ``make_current=False`` argument is deprecated since Tornado 6.2.
|
|
||||||
|
|
||||||
In general, an `IOLoop` cannot survive a fork or be shared across processes
|
In general, an `IOLoop` cannot survive a fork or be shared across processes
|
||||||
in any way. When multiple processes are being used, each process should
|
in any way. When multiple processes are being used, each process should
|
||||||
|
@ -145,12 +144,10 @@ class IOLoop(Configurable):
|
||||||
cannot be used on Python 3 except to redundantly specify the `asyncio`
|
cannot be used on Python 3 except to redundantly specify the `asyncio`
|
||||||
event loop.
|
event loop.
|
||||||
|
|
||||||
.. deprecated:: 6.2
|
.. versionchanged:: 6.3
|
||||||
It is deprecated to create an event loop that is "current" but not
|
``make_current=True`` is now the default when creating an IOLoop -
|
||||||
running. This means it is deprecated to pass
|
previously the default was to make the event loop current if there wasn't
|
||||||
``make_current=True`` to the ``IOLoop`` constructor, or to create
|
already a current one.
|
||||||
an ``IOLoop`` while no asyncio event loop is running unless
|
|
||||||
``make_current=False`` is used.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# These constants were originally based on constants from the epoll module.
|
# These constants were originally based on constants from the epoll module.
|
||||||
|
@ -263,17 +260,20 @@ class IOLoop(Configurable):
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
except (RuntimeError, AssertionError):
|
except RuntimeError:
|
||||||
if not instance:
|
if not instance:
|
||||||
return None
|
return None
|
||||||
raise
|
# Create a new asyncio event loop for this thread.
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(loop)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return IOLoop._ioloop_for_asyncio[loop]
|
return IOLoop._ioloop_for_asyncio[loop]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
if instance:
|
if instance:
|
||||||
from tornado.platform.asyncio import AsyncIOMainLoop
|
from tornado.platform.asyncio import AsyncIOMainLoop
|
||||||
|
|
||||||
current = AsyncIOMainLoop(make_current=True) # type: Optional[IOLoop]
|
current = AsyncIOMainLoop() # type: Optional[IOLoop]
|
||||||
else:
|
else:
|
||||||
current = None
|
current = None
|
||||||
return current
|
return current
|
||||||
|
@ -295,12 +295,17 @@ class IOLoop(Configurable):
|
||||||
This method also sets the current `asyncio` event loop.
|
This method also sets the current `asyncio` event loop.
|
||||||
|
|
||||||
.. deprecated:: 6.2
|
.. deprecated:: 6.2
|
||||||
The concept of an event loop that is "current" without
|
Setting and clearing the current event loop through Tornado is
|
||||||
currently running is deprecated in asyncio since Python
|
deprecated. Use ``asyncio.set_event_loop`` instead if you need this.
|
||||||
3.10. All related functionality in Tornado is also
|
|
||||||
deprecated. Instead, start the event loop with `asyncio.run`
|
|
||||||
before interacting with it.
|
|
||||||
"""
|
"""
|
||||||
|
warnings.warn(
|
||||||
|
"make_current is deprecated; start the event loop first",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
self._make_current()
|
||||||
|
|
||||||
|
def _make_current(self) -> None:
|
||||||
# The asyncio event loops override this method.
|
# The asyncio event loops override this method.
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@ -344,16 +349,9 @@ class IOLoop(Configurable):
|
||||||
|
|
||||||
return AsyncIOLoop
|
return AsyncIOLoop
|
||||||
|
|
||||||
def initialize(self, make_current: Optional[bool] = None) -> None:
|
def initialize(self, make_current: bool = True) -> None:
|
||||||
if make_current is None:
|
if make_current:
|
||||||
if IOLoop.current(instance=False) is None:
|
self._make_current()
|
||||||
self.make_current()
|
|
||||||
elif make_current:
|
|
||||||
current = IOLoop.current(instance=False)
|
|
||||||
# AsyncIO loops can already be current by this point.
|
|
||||||
if current is not None and current is not self:
|
|
||||||
raise RuntimeError("current IOLoop already exists")
|
|
||||||
self.make_current()
|
|
||||||
|
|
||||||
def close(self, all_fds: bool = False) -> None:
|
def close(self, all_fds: bool = False) -> None:
|
||||||
"""Closes the `IOLoop`, freeing any resources used.
|
"""Closes the `IOLoop`, freeing any resources used.
|
||||||
|
|
|
@ -195,11 +195,9 @@ class _StreamBuffer(object):
|
||||||
pos += size
|
pos += size
|
||||||
size = 0
|
size = 0
|
||||||
else:
|
else:
|
||||||
# Amortized O(1) shrink for Python 2
|
|
||||||
pos += size
|
pos += size
|
||||||
if len(b) <= 2 * pos:
|
del typing.cast(bytearray, b)[:pos]
|
||||||
del typing.cast(bytearray, b)[:pos]
|
pos = 0
|
||||||
pos = 0
|
|
||||||
size = 0
|
size = 0
|
||||||
|
|
||||||
assert size == 0
|
assert size == 0
|
||||||
|
@ -254,7 +252,6 @@ class BaseIOStream(object):
|
||||||
self.max_write_buffer_size = max_write_buffer_size
|
self.max_write_buffer_size = max_write_buffer_size
|
||||||
self.error = None # type: Optional[BaseException]
|
self.error = None # type: Optional[BaseException]
|
||||||
self._read_buffer = bytearray()
|
self._read_buffer = bytearray()
|
||||||
self._read_buffer_pos = 0
|
|
||||||
self._read_buffer_size = 0
|
self._read_buffer_size = 0
|
||||||
self._user_read_buffer = False
|
self._user_read_buffer = False
|
||||||
self._after_user_read_buffer = None # type: Optional[bytearray]
|
self._after_user_read_buffer = None # type: Optional[bytearray]
|
||||||
|
@ -451,21 +448,17 @@ class BaseIOStream(object):
|
||||||
available_bytes = self._read_buffer_size
|
available_bytes = self._read_buffer_size
|
||||||
n = len(buf)
|
n = len(buf)
|
||||||
if available_bytes >= n:
|
if available_bytes >= n:
|
||||||
end = self._read_buffer_pos + n
|
buf[:] = memoryview(self._read_buffer)[:n]
|
||||||
buf[:] = memoryview(self._read_buffer)[self._read_buffer_pos : end]
|
del self._read_buffer[:n]
|
||||||
del self._read_buffer[:end]
|
|
||||||
self._after_user_read_buffer = self._read_buffer
|
self._after_user_read_buffer = self._read_buffer
|
||||||
elif available_bytes > 0:
|
elif available_bytes > 0:
|
||||||
buf[:available_bytes] = memoryview(self._read_buffer)[
|
buf[:available_bytes] = memoryview(self._read_buffer)[:]
|
||||||
self._read_buffer_pos :
|
|
||||||
]
|
|
||||||
|
|
||||||
# Set up the supplied buffer as our temporary read buffer.
|
# Set up the supplied buffer as our temporary read buffer.
|
||||||
# The original (if it had any data remaining) has been
|
# The original (if it had any data remaining) has been
|
||||||
# saved for later.
|
# saved for later.
|
||||||
self._user_read_buffer = True
|
self._user_read_buffer = True
|
||||||
self._read_buffer = buf
|
self._read_buffer = buf
|
||||||
self._read_buffer_pos = 0
|
|
||||||
self._read_buffer_size = available_bytes
|
self._read_buffer_size = available_bytes
|
||||||
self._read_bytes = n
|
self._read_bytes = n
|
||||||
self._read_partial = partial
|
self._read_partial = partial
|
||||||
|
@ -818,7 +811,6 @@ class BaseIOStream(object):
|
||||||
if self._user_read_buffer:
|
if self._user_read_buffer:
|
||||||
self._read_buffer = self._after_user_read_buffer or bytearray()
|
self._read_buffer = self._after_user_read_buffer or bytearray()
|
||||||
self._after_user_read_buffer = None
|
self._after_user_read_buffer = None
|
||||||
self._read_buffer_pos = 0
|
|
||||||
self._read_buffer_size = len(self._read_buffer)
|
self._read_buffer_size = len(self._read_buffer)
|
||||||
self._user_read_buffer = False
|
self._user_read_buffer = False
|
||||||
result = size # type: Union[int, bytes]
|
result = size # type: Union[int, bytes]
|
||||||
|
@ -931,20 +923,17 @@ class BaseIOStream(object):
|
||||||
# since large merges are relatively expensive and get undone in
|
# since large merges are relatively expensive and get undone in
|
||||||
# _consume().
|
# _consume().
|
||||||
if self._read_buffer:
|
if self._read_buffer:
|
||||||
loc = self._read_buffer.find(
|
loc = self._read_buffer.find(self._read_delimiter)
|
||||||
self._read_delimiter, self._read_buffer_pos
|
|
||||||
)
|
|
||||||
if loc != -1:
|
if loc != -1:
|
||||||
loc -= self._read_buffer_pos
|
|
||||||
delimiter_len = len(self._read_delimiter)
|
delimiter_len = len(self._read_delimiter)
|
||||||
self._check_max_bytes(self._read_delimiter, loc + delimiter_len)
|
self._check_max_bytes(self._read_delimiter, loc + delimiter_len)
|
||||||
return loc + delimiter_len
|
return loc + delimiter_len
|
||||||
self._check_max_bytes(self._read_delimiter, self._read_buffer_size)
|
self._check_max_bytes(self._read_delimiter, self._read_buffer_size)
|
||||||
elif self._read_regex is not None:
|
elif self._read_regex is not None:
|
||||||
if self._read_buffer:
|
if self._read_buffer:
|
||||||
m = self._read_regex.search(self._read_buffer, self._read_buffer_pos)
|
m = self._read_regex.search(self._read_buffer)
|
||||||
if m is not None:
|
if m is not None:
|
||||||
loc = m.end() - self._read_buffer_pos
|
loc = m.end()
|
||||||
self._check_max_bytes(self._read_regex, loc)
|
self._check_max_bytes(self._read_regex, loc)
|
||||||
return loc
|
return loc
|
||||||
self._check_max_bytes(self._read_regex, self._read_buffer_size)
|
self._check_max_bytes(self._read_regex, self._read_buffer_size)
|
||||||
|
@ -1001,19 +990,9 @@ class BaseIOStream(object):
|
||||||
return b""
|
return b""
|
||||||
assert loc <= self._read_buffer_size
|
assert loc <= self._read_buffer_size
|
||||||
# Slice the bytearray buffer into bytes, without intermediate copying
|
# Slice the bytearray buffer into bytes, without intermediate copying
|
||||||
b = (
|
b = (memoryview(self._read_buffer)[:loc]).tobytes()
|
||||||
memoryview(self._read_buffer)[
|
|
||||||
self._read_buffer_pos : self._read_buffer_pos + loc
|
|
||||||
]
|
|
||||||
).tobytes()
|
|
||||||
self._read_buffer_pos += loc
|
|
||||||
self._read_buffer_size -= loc
|
self._read_buffer_size -= loc
|
||||||
# Amortized O(1) shrink
|
del self._read_buffer[:loc]
|
||||||
# (this heuristic is implemented natively in Python 3.4+
|
|
||||||
# but is replicated here for Python 2)
|
|
||||||
if self._read_buffer_pos > self._read_buffer_size:
|
|
||||||
del self._read_buffer[: self._read_buffer_pos]
|
|
||||||
self._read_buffer_pos = 0
|
|
||||||
return b
|
return b
|
||||||
|
|
||||||
def _check_closed(self) -> None:
|
def _check_closed(self) -> None:
|
||||||
|
@ -1092,9 +1071,8 @@ class IOStream(BaseIOStream):
|
||||||
|
|
||||||
.. testcode::
|
.. testcode::
|
||||||
|
|
||||||
import tornado.ioloop
|
|
||||||
import tornado.iostream
|
|
||||||
import socket
|
import socket
|
||||||
|
import tornado
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
|
||||||
|
|
|
@ -268,7 +268,7 @@ class Locale(object):
|
||||||
|
|
||||||
def __init__(self, code: str) -> None:
|
def __init__(self, code: str) -> None:
|
||||||
self.code = code
|
self.code = code
|
||||||
self.name = LOCALE_NAMES.get(code, {}).get("name", u"Unknown")
|
self.name = LOCALE_NAMES.get(code, {}).get("name", "Unknown")
|
||||||
self.rtl = False
|
self.rtl = False
|
||||||
for prefix in ["fa", "ar", "he"]:
|
for prefix in ["fa", "ar", "he"]:
|
||||||
if self.code.startswith(prefix):
|
if self.code.startswith(prefix):
|
||||||
|
@ -406,7 +406,7 @@ class Locale(object):
|
||||||
str_time = "%d:%02d" % (local_date.hour, local_date.minute)
|
str_time = "%d:%02d" % (local_date.hour, local_date.minute)
|
||||||
elif self.code == "zh_CN":
|
elif self.code == "zh_CN":
|
||||||
str_time = "%s%d:%02d" % (
|
str_time = "%s%d:%02d" % (
|
||||||
(u"\u4e0a\u5348", u"\u4e0b\u5348")[local_date.hour >= 12],
|
("\u4e0a\u5348", "\u4e0b\u5348")[local_date.hour >= 12],
|
||||||
local_date.hour % 12 or 12,
|
local_date.hour % 12 or 12,
|
||||||
local_date.minute,
|
local_date.minute,
|
||||||
)
|
)
|
||||||
|
@ -458,7 +458,7 @@ class Locale(object):
|
||||||
return ""
|
return ""
|
||||||
if len(parts) == 1:
|
if len(parts) == 1:
|
||||||
return parts[0]
|
return parts[0]
|
||||||
comma = u" \u0648 " if self.code.startswith("fa") else u", "
|
comma = " \u0648 " if self.code.startswith("fa") else ", "
|
||||||
return _("%(commas)s and %(last)s") % {
|
return _("%(commas)s and %(last)s") % {
|
||||||
"commas": comma.join(parts[:-1]),
|
"commas": comma.join(parts[:-1]),
|
||||||
"last": parts[len(parts) - 1],
|
"last": parts[len(parts) - 1],
|
||||||
|
|
|
@ -44,10 +44,10 @@ if hasattr(ssl, "OP_NO_COMPRESSION"):
|
||||||
# module-import time, the import lock is already held by the main thread,
|
# module-import time, the import lock is already held by the main thread,
|
||||||
# leading to deadlock. Avoid it by caching the idna encoder on the main
|
# leading to deadlock. Avoid it by caching the idna encoder on the main
|
||||||
# thread now.
|
# thread now.
|
||||||
u"foo".encode("idna")
|
"foo".encode("idna")
|
||||||
|
|
||||||
# For undiagnosed reasons, 'latin1' codec may also need to be preloaded.
|
# For undiagnosed reasons, 'latin1' codec may also need to be preloaded.
|
||||||
u"foo".encode("latin1")
|
"foo".encode("latin1")
|
||||||
|
|
||||||
# Default backlog used when calling sock.listen()
|
# Default backlog used when calling sock.listen()
|
||||||
_DEFAULT_BACKLOG = 128
|
_DEFAULT_BACKLOG = 128
|
||||||
|
@ -115,7 +115,7 @@ def bind_sockets(
|
||||||
sys.platform == "darwin"
|
sys.platform == "darwin"
|
||||||
and address == "localhost"
|
and address == "localhost"
|
||||||
and af == socket.AF_INET6
|
and af == socket.AF_INET6
|
||||||
and sockaddr[3] != 0
|
and sockaddr[3] != 0 # type: ignore
|
||||||
):
|
):
|
||||||
# Mac OS X includes a link-local address fe80::1%lo0 in the
|
# Mac OS X includes a link-local address fe80::1%lo0 in the
|
||||||
# getaddrinfo results for 'localhost'. However, the firewall
|
# getaddrinfo results for 'localhost'. However, the firewall
|
||||||
|
|
|
@ -56,7 +56,7 @@ Your ``main()`` method can parse the command line or parse a config file with
|
||||||
either `parse_command_line` or `parse_config_file`::
|
either `parse_command_line` or `parse_config_file`::
|
||||||
|
|
||||||
import myapp.db, myapp.server
|
import myapp.db, myapp.server
|
||||||
import tornado.options
|
import tornado
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
tornado.options.parse_command_line()
|
tornado.options.parse_command_line()
|
||||||
|
@ -427,7 +427,9 @@ class OptionParser(object):
|
||||||
% (option.name, option.type.__name__)
|
% (option.name, option.type.__name__)
|
||||||
)
|
)
|
||||||
|
|
||||||
if type(config[name]) == str and option.type != str:
|
if type(config[name]) == str and (
|
||||||
|
option.type != str or option.multiple
|
||||||
|
):
|
||||||
option.parse(config[name])
|
option.parse(config[name])
|
||||||
else:
|
else:
|
||||||
option.set(config[name])
|
option.set(config[name])
|
||||||
|
|
|
@ -36,10 +36,10 @@ import warnings
|
||||||
from tornado.gen import convert_yielded
|
from tornado.gen import convert_yielded
|
||||||
from tornado.ioloop import IOLoop, _Selectable
|
from tornado.ioloop import IOLoop, _Selectable
|
||||||
|
|
||||||
from typing import Any, TypeVar, Awaitable, Callable, Union, Optional, List, Tuple, Dict
|
from typing import Any, TypeVar, Awaitable, Callable, Union, Optional, List, Dict
|
||||||
|
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
from typing import Set # noqa: F401
|
from typing import Set, Tuple # noqa: F401
|
||||||
from typing_extensions import Protocol
|
from typing_extensions import Protocol
|
||||||
|
|
||||||
class _HasFileno(Protocol):
|
class _HasFileno(Protocol):
|
||||||
|
@ -74,20 +74,6 @@ def _atexit_callback() -> None:
|
||||||
|
|
||||||
atexit.register(_atexit_callback)
|
atexit.register(_atexit_callback)
|
||||||
|
|
||||||
if sys.version_info >= (3, 10):
|
|
||||||
|
|
||||||
def _get_event_loop() -> asyncio.AbstractEventLoop:
|
|
||||||
try:
|
|
||||||
return asyncio.get_running_loop()
|
|
||||||
except RuntimeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return asyncio.get_event_loop_policy().get_event_loop()
|
|
||||||
|
|
||||||
|
|
||||||
else:
|
|
||||||
from asyncio import get_event_loop as _get_event_loop
|
|
||||||
|
|
||||||
|
|
||||||
class BaseAsyncIOLoop(IOLoop):
|
class BaseAsyncIOLoop(IOLoop):
|
||||||
def initialize( # type: ignore
|
def initialize( # type: ignore
|
||||||
|
@ -206,15 +192,7 @@ class BaseAsyncIOLoop(IOLoop):
|
||||||
handler_func(fileobj, events)
|
handler_func(fileobj, events)
|
||||||
|
|
||||||
def start(self) -> None:
|
def start(self) -> None:
|
||||||
try:
|
self.asyncio_loop.run_forever()
|
||||||
old_loop = _get_event_loop()
|
|
||||||
except (RuntimeError, AssertionError):
|
|
||||||
old_loop = None # type: ignore
|
|
||||||
try:
|
|
||||||
asyncio.set_event_loop(self.asyncio_loop)
|
|
||||||
self.asyncio_loop.run_forever()
|
|
||||||
finally:
|
|
||||||
asyncio.set_event_loop(old_loop)
|
|
||||||
|
|
||||||
def stop(self) -> None:
|
def stop(self) -> None:
|
||||||
self.asyncio_loop.stop()
|
self.asyncio_loop.stop()
|
||||||
|
@ -298,7 +276,7 @@ class AsyncIOMainLoop(BaseAsyncIOLoop):
|
||||||
def initialize(self, **kwargs: Any) -> None: # type: ignore
|
def initialize(self, **kwargs: Any) -> None: # type: ignore
|
||||||
super().initialize(asyncio.get_event_loop(), **kwargs)
|
super().initialize(asyncio.get_event_loop(), **kwargs)
|
||||||
|
|
||||||
def make_current(self) -> None:
|
def _make_current(self) -> None:
|
||||||
# AsyncIOMainLoop already refers to the current asyncio loop so
|
# AsyncIOMainLoop already refers to the current asyncio loop so
|
||||||
# nothing to do here.
|
# nothing to do here.
|
||||||
pass
|
pass
|
||||||
|
@ -349,12 +327,7 @@ class AsyncIOLoop(BaseAsyncIOLoop):
|
||||||
self._clear_current()
|
self._clear_current()
|
||||||
super().close(all_fds=all_fds)
|
super().close(all_fds=all_fds)
|
||||||
|
|
||||||
def make_current(self) -> None:
|
def _make_current(self) -> None:
|
||||||
warnings.warn(
|
|
||||||
"make_current is deprecated; start the event loop first",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
if not self.is_current:
|
if not self.is_current:
|
||||||
try:
|
try:
|
||||||
self.old_asyncio = asyncio.get_event_loop()
|
self.old_asyncio = asyncio.get_event_loop()
|
||||||
|
@ -672,10 +645,18 @@ class AddThreadSelectorEventLoop(asyncio.AbstractEventLoop):
|
||||||
self._writers[fd] = functools.partial(callback, *args)
|
self._writers[fd] = functools.partial(callback, *args)
|
||||||
self._wake_selector()
|
self._wake_selector()
|
||||||
|
|
||||||
def remove_reader(self, fd: "_FileDescriptorLike") -> None:
|
def remove_reader(self, fd: "_FileDescriptorLike") -> bool:
|
||||||
del self._readers[fd]
|
try:
|
||||||
|
del self._readers[fd]
|
||||||
|
except KeyError:
|
||||||
|
return False
|
||||||
self._wake_selector()
|
self._wake_selector()
|
||||||
|
return True
|
||||||
|
|
||||||
def remove_writer(self, fd: "_FileDescriptorLike") -> None:
|
def remove_writer(self, fd: "_FileDescriptorLike") -> bool:
|
||||||
del self._writers[fd]
|
try:
|
||||||
|
del self._writers[fd]
|
||||||
|
except KeyError:
|
||||||
|
return False
|
||||||
self._wake_selector()
|
self._wake_selector()
|
||||||
|
return True
|
||||||
|
|
|
@ -15,14 +15,15 @@ if typing.TYPE_CHECKING:
|
||||||
class CaresResolver(Resolver):
|
class CaresResolver(Resolver):
|
||||||
"""Name resolver based on the c-ares library.
|
"""Name resolver based on the c-ares library.
|
||||||
|
|
||||||
This is a non-blocking and non-threaded resolver. It may not produce
|
This is a non-blocking and non-threaded resolver. It may not produce the
|
||||||
the same results as the system resolver, but can be used for non-blocking
|
same results as the system resolver, but can be used for non-blocking
|
||||||
resolution when threads cannot be used.
|
resolution when threads cannot be used.
|
||||||
|
|
||||||
c-ares fails to resolve some names when ``family`` is ``AF_UNSPEC``,
|
``pycares`` will not return a mix of ``AF_INET`` and ``AF_INET6`` when
|
||||||
so it is only recommended for use in ``AF_INET`` (i.e. IPv4). This is
|
``family`` is ``AF_UNSPEC``, so it is only recommended for use in
|
||||||
the default for ``tornado.simple_httpclient``, but other libraries
|
``AF_INET`` (i.e. IPv4). This is the default for
|
||||||
may default to ``AF_UNSPEC``.
|
``tornado.simple_httpclient``, but other libraries may default to
|
||||||
|
``AF_UNSPEC``.
|
||||||
|
|
||||||
.. versionchanged:: 5.0
|
.. versionchanged:: 5.0
|
||||||
The ``io_loop`` argument (deprecated since version 4.1) has been removed.
|
The ``io_loop`` argument (deprecated since version 4.1) has been removed.
|
||||||
|
|
|
@ -381,7 +381,7 @@ class PriorityQueue(Queue):
|
||||||
def _put(self, item: _T) -> None:
|
def _put(self, item: _T) -> None:
|
||||||
heapq.heappush(self._queue, item)
|
heapq.heappush(self._queue, item)
|
||||||
|
|
||||||
def _get(self) -> _T:
|
def _get(self) -> _T: # type: ignore[type-var]
|
||||||
return heapq.heappop(self._queue)
|
return heapq.heappop(self._queue)
|
||||||
|
|
||||||
|
|
||||||
|
@ -418,5 +418,5 @@ class LifoQueue(Queue):
|
||||||
def _put(self, item: _T) -> None:
|
def _put(self, item: _T) -> None:
|
||||||
self._queue.append(item)
|
self._queue.append(item)
|
||||||
|
|
||||||
def _get(self) -> _T:
|
def _get(self) -> _T: # type: ignore[type-var]
|
||||||
return self._queue.pop()
|
return self._queue.pop()
|
||||||
|
|
|
@ -547,7 +547,7 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
|
||||||
value: Optional[BaseException],
|
value: Optional[BaseException],
|
||||||
tb: Optional[TracebackType],
|
tb: Optional[TracebackType],
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if self.final_callback:
|
if self.final_callback is not None:
|
||||||
self._remove_timeout()
|
self._remove_timeout()
|
||||||
if isinstance(value, StreamClosedError):
|
if isinstance(value, StreamClosedError):
|
||||||
if value.real_error is None:
|
if value.real_error is None:
|
||||||
|
|
|
@ -21,6 +21,7 @@ import socket
|
||||||
import numbers
|
import numbers
|
||||||
import datetime
|
import datetime
|
||||||
import ssl
|
import ssl
|
||||||
|
import typing
|
||||||
|
|
||||||
from tornado.concurrent import Future, future_add_done_callback
|
from tornado.concurrent import Future, future_add_done_callback
|
||||||
from tornado.ioloop import IOLoop
|
from tornado.ioloop import IOLoop
|
||||||
|
@ -29,7 +30,10 @@ from tornado import gen
|
||||||
from tornado.netutil import Resolver
|
from tornado.netutil import Resolver
|
||||||
from tornado.gen import TimeoutError
|
from tornado.gen import TimeoutError
|
||||||
|
|
||||||
from typing import Any, Union, Dict, Tuple, List, Callable, Iterator, Optional, Set
|
from typing import Any, Union, Dict, Tuple, List, Callable, Iterator, Optional
|
||||||
|
|
||||||
|
if typing.TYPE_CHECKING:
|
||||||
|
from typing import Set # noqa(F401)
|
||||||
|
|
||||||
_INITIAL_CONNECT_TIMEOUT = 0.3
|
_INITIAL_CONNECT_TIMEOUT = 0.3
|
||||||
|
|
||||||
|
|
|
@ -246,9 +246,7 @@ class TCPServer(object):
|
||||||
|
|
||||||
.. deprecated:: 6.2
|
.. deprecated:: 6.2
|
||||||
Use either ``listen()`` or ``add_sockets()`` instead of ``bind()``
|
Use either ``listen()`` or ``add_sockets()`` instead of ``bind()``
|
||||||
and ``start()``. The ``bind()/start()`` pattern depends on
|
and ``start()``.
|
||||||
interfaces that have been deprecated in Python 3.10 and will be
|
|
||||||
removed in future versions of Python.
|
|
||||||
"""
|
"""
|
||||||
sockets = bind_sockets(
|
sockets = bind_sockets(
|
||||||
port,
|
port,
|
||||||
|
@ -295,9 +293,7 @@ class TCPServer(object):
|
||||||
|
|
||||||
.. deprecated:: 6.2
|
.. deprecated:: 6.2
|
||||||
Use either ``listen()`` or ``add_sockets()`` instead of ``bind()``
|
Use either ``listen()`` or ``add_sockets()`` instead of ``bind()``
|
||||||
and ``start()``. The ``bind()/start()`` pattern depends on
|
and ``start()``.
|
||||||
interfaces that have been deprecated in Python 3.10 and will be
|
|
||||||
removed in future versions of Python.
|
|
||||||
"""
|
"""
|
||||||
assert not self._started
|
assert not self._started
|
||||||
self._started = True
|
self._started = True
|
||||||
|
|
|
@ -135,7 +135,8 @@ class AsyncTestCase(unittest.TestCase):
|
||||||
|
|
||||||
By default, a new `.IOLoop` is constructed for each test and is available
|
By default, a new `.IOLoop` is constructed for each test and is available
|
||||||
as ``self.io_loop``. If the code being tested requires a
|
as ``self.io_loop``. If the code being tested requires a
|
||||||
global `.IOLoop`, subclasses should override `get_new_ioloop` to return it.
|
reused global `.IOLoop`, subclasses should override `get_new_ioloop` to return it,
|
||||||
|
although this is deprecated as of Tornado 6.3.
|
||||||
|
|
||||||
The `.IOLoop`'s ``start`` and ``stop`` methods should not be
|
The `.IOLoop`'s ``start`` and ``stop`` methods should not be
|
||||||
called directly. Instead, use `self.stop <stop>` and `self.wait
|
called directly. Instead, use `self.stop <stop>` and `self.wait
|
||||||
|
@ -162,17 +163,6 @@ class AsyncTestCase(unittest.TestCase):
|
||||||
response = self.wait()
|
response = self.wait()
|
||||||
# Test contents of response
|
# Test contents of response
|
||||||
self.assertIn("FriendFeed", response.body)
|
self.assertIn("FriendFeed", response.body)
|
||||||
|
|
||||||
.. deprecated:: 6.2
|
|
||||||
|
|
||||||
AsyncTestCase and AsyncHTTPTestCase are deprecated due to changes
|
|
||||||
in future versions of Python (after 3.10). The interfaces used
|
|
||||||
in this class are incompatible with the deprecation and intended
|
|
||||||
removal of certain methods related to the idea of a "current"
|
|
||||||
event loop while no event loop is actually running. Use
|
|
||||||
`unittest.IsolatedAsyncioTestCase` instead. Note that this class
|
|
||||||
does not emit DeprecationWarnings until better migration guidance
|
|
||||||
can be provided.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, methodName: str = "runTest") -> None:
|
def __init__(self, methodName: str = "runTest") -> None:
|
||||||
|
@ -193,49 +183,22 @@ class AsyncTestCase(unittest.TestCase):
|
||||||
self._test_generator = None # type: Optional[Union[Generator, Coroutine]]
|
self._test_generator = None # type: Optional[Union[Generator, Coroutine]]
|
||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
setup_with_context_manager(self, warnings.catch_warnings())
|
py_ver = sys.version_info
|
||||||
warnings.filterwarnings(
|
if ((3, 10, 0) <= py_ver < (3, 10, 9)) or ((3, 11, 0) <= py_ver <= (3, 11, 1)):
|
||||||
"ignore",
|
# Early releases in the Python 3.10 and 3.1 series had deprecation
|
||||||
message="There is no current event loop",
|
# warnings that were later reverted; we must suppress them here.
|
||||||
category=DeprecationWarning,
|
setup_with_context_manager(self, warnings.catch_warnings())
|
||||||
module=r"tornado\..*",
|
warnings.filterwarnings(
|
||||||
)
|
"ignore",
|
||||||
|
message="There is no current event loop",
|
||||||
|
category=DeprecationWarning,
|
||||||
|
module=r"tornado\..*",
|
||||||
|
)
|
||||||
super().setUp()
|
super().setUp()
|
||||||
# NOTE: this code attempts to navigate deprecation warnings introduced
|
if type(self).get_new_ioloop is not AsyncTestCase.get_new_ioloop:
|
||||||
# in Python 3.10. The idea of an implicit current event loop is
|
warnings.warn("get_new_ioloop is deprecated", DeprecationWarning)
|
||||||
# deprecated in that version, with the intention that tests like this
|
self.io_loop = self.get_new_ioloop()
|
||||||
# explicitly create a new event loop and run on it. However, other
|
asyncio.set_event_loop(self.io_loop.asyncio_loop) # type: ignore[attr-defined]
|
||||||
# packages such as pytest-asyncio (as of version 0.16.0) still rely on
|
|
||||||
# the implicit current event loop and we want to be compatible with them
|
|
||||||
# (even when run on 3.10, but not, of course, on the future version of
|
|
||||||
# python that removes the get/set_event_loop methods completely).
|
|
||||||
#
|
|
||||||
# Deprecation warnings were introduced inconsistently:
|
|
||||||
# asyncio.get_event_loop warns, but
|
|
||||||
# asyncio.get_event_loop_policy().get_event_loop does not. Similarly,
|
|
||||||
# none of the set_event_loop methods warn, although comments on
|
|
||||||
# https://bugs.python.org/issue39529 indicate that they are also
|
|
||||||
# intended for future removal.
|
|
||||||
#
|
|
||||||
# Therefore, we first attempt to access the event loop with the
|
|
||||||
# (non-warning) policy method, and if it fails, fall back to creating a
|
|
||||||
# new event loop. We do not have effective test coverage of the
|
|
||||||
# new event loop case; this will have to be watched when/if
|
|
||||||
# get_event_loop is actually removed.
|
|
||||||
self.should_close_asyncio_loop = False
|
|
||||||
try:
|
|
||||||
self.asyncio_loop = asyncio.get_event_loop_policy().get_event_loop()
|
|
||||||
except Exception:
|
|
||||||
self.asyncio_loop = asyncio.new_event_loop()
|
|
||||||
self.should_close_asyncio_loop = True
|
|
||||||
|
|
||||||
async def get_loop() -> IOLoop:
|
|
||||||
return self.get_new_ioloop()
|
|
||||||
|
|
||||||
self.io_loop = self.asyncio_loop.run_until_complete(get_loop())
|
|
||||||
with warnings.catch_warnings():
|
|
||||||
warnings.simplefilter("ignore", DeprecationWarning)
|
|
||||||
self.io_loop.make_current()
|
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
def tearDown(self) -> None:
|
||||||
# Native coroutines tend to produce warnings if they're not
|
# Native coroutines tend to produce warnings if they're not
|
||||||
|
@ -270,17 +233,13 @@ class AsyncTestCase(unittest.TestCase):
|
||||||
|
|
||||||
# Clean up Subprocess, so it can be used again with a new ioloop.
|
# Clean up Subprocess, so it can be used again with a new ioloop.
|
||||||
Subprocess.uninitialize()
|
Subprocess.uninitialize()
|
||||||
with warnings.catch_warnings():
|
asyncio.set_event_loop(None)
|
||||||
warnings.simplefilter("ignore", DeprecationWarning)
|
|
||||||
self.io_loop.clear_current()
|
|
||||||
if not isinstance(self.io_loop, _NON_OWNED_IOLOOPS):
|
if not isinstance(self.io_loop, _NON_OWNED_IOLOOPS):
|
||||||
# Try to clean up any file descriptors left open in the ioloop.
|
# Try to clean up any file descriptors left open in the ioloop.
|
||||||
# This avoids leaks, especially when tests are run repeatedly
|
# This avoids leaks, especially when tests are run repeatedly
|
||||||
# in the same process with autoreload (because curl does not
|
# in the same process with autoreload (because curl does not
|
||||||
# set FD_CLOEXEC on its file descriptors)
|
# set FD_CLOEXEC on its file descriptors)
|
||||||
self.io_loop.close(all_fds=True)
|
self.io_loop.close(all_fds=True)
|
||||||
if self.should_close_asyncio_loop:
|
|
||||||
self.asyncio_loop.close()
|
|
||||||
super().tearDown()
|
super().tearDown()
|
||||||
# In case an exception escaped or the StackContext caught an exception
|
# In case an exception escaped or the StackContext caught an exception
|
||||||
# when there wasn't a wait() to re-raise it, do so here.
|
# when there wasn't a wait() to re-raise it, do so here.
|
||||||
|
@ -298,6 +257,9 @@ class AsyncTestCase(unittest.TestCase):
|
||||||
singletons using the default `.IOLoop`) or if a per-test event
|
singletons using the default `.IOLoop`) or if a per-test event
|
||||||
loop is being provided by another system (such as
|
loop is being provided by another system (such as
|
||||||
``pytest-asyncio``).
|
``pytest-asyncio``).
|
||||||
|
|
||||||
|
.. deprecated:: 6.3
|
||||||
|
This method will be removed in Tornado 7.0.
|
||||||
"""
|
"""
|
||||||
return IOLoop(make_current=False)
|
return IOLoop(make_current=False)
|
||||||
|
|
||||||
|
@ -435,10 +397,6 @@ class AsyncHTTPTestCase(AsyncTestCase):
|
||||||
like ``http_client.fetch()``, into a synchronous operation. If you need
|
like ``http_client.fetch()``, into a synchronous operation. If you need
|
||||||
to do other asynchronous operations in tests, you'll probably need to use
|
to do other asynchronous operations in tests, you'll probably need to use
|
||||||
``stop()`` and ``wait()`` yourself.
|
``stop()`` and ``wait()`` yourself.
|
||||||
|
|
||||||
.. deprecated:: 6.2
|
|
||||||
`AsyncTestCase` and `AsyncHTTPTestCase` are deprecated due to changes
|
|
||||||
in Python 3.10; see comments on `AsyncTestCase` for more details.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
|
@ -672,7 +630,7 @@ def gen_test( # noqa: F811
|
||||||
if self._test_generator is not None and getattr(
|
if self._test_generator is not None and getattr(
|
||||||
self._test_generator, "cr_running", True
|
self._test_generator, "cr_running", True
|
||||||
):
|
):
|
||||||
self._test_generator.throw(type(e), e)
|
self._test_generator.throw(e)
|
||||||
# In case the test contains an overly broad except
|
# In case the test contains an overly broad except
|
||||||
# clause, we may get back here.
|
# clause, we may get back here.
|
||||||
# Coroutine was stopped or didn't raise a useful stack trace,
|
# Coroutine was stopped or didn't raise a useful stack trace,
|
||||||
|
@ -724,28 +682,37 @@ class ExpectLog(logging.Filter):
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Constructs an ExpectLog context manager.
|
"""Constructs an ExpectLog context manager.
|
||||||
|
|
||||||
:param logger: Logger object (or name of logger) to watch. Pass
|
:param logger: Logger object (or name of logger) to watch. Pass an
|
||||||
an empty string to watch the root logger.
|
empty string to watch the root logger.
|
||||||
:param regex: Regular expression to match. Any log entries on
|
:param regex: Regular expression to match. Any log entries on the
|
||||||
the specified logger that match this regex will be suppressed.
|
specified logger that match this regex will be suppressed.
|
||||||
:param required: If true, an exception will be raised if the end of
|
:param required: If true, an exception will be raised if the end of the
|
||||||
the ``with`` statement is reached without matching any log entries.
|
``with`` statement is reached without matching any log entries.
|
||||||
:param level: A constant from the ``logging`` module indicating the
|
:param level: A constant from the ``logging`` module indicating the
|
||||||
expected log level. If this parameter is provided, only log messages
|
expected log level. If this parameter is provided, only log messages
|
||||||
at this level will be considered to match. Additionally, the
|
at this level will be considered to match. Additionally, the
|
||||||
supplied ``logger`` will have its level adjusted if necessary
|
supplied ``logger`` will have its level adjusted if necessary (for
|
||||||
(for the duration of the ``ExpectLog`` to enable the expected
|
the duration of the ``ExpectLog`` to enable the expected message.
|
||||||
message.
|
|
||||||
|
|
||||||
.. versionchanged:: 6.1
|
.. versionchanged:: 6.1
|
||||||
Added the ``level`` parameter.
|
Added the ``level`` parameter.
|
||||||
|
|
||||||
|
.. deprecated:: 6.3
|
||||||
|
In Tornado 7.0, only ``WARNING`` and higher logging levels will be
|
||||||
|
matched by default. To match ``INFO`` and lower levels, the ``level``
|
||||||
|
argument must be used. This is changing to minimize differences
|
||||||
|
between ``tornado.testing.main`` (which enables ``INFO`` logs by
|
||||||
|
default) and most other test runners (including those in IDEs)
|
||||||
|
which have ``INFO`` logs disabled by default.
|
||||||
"""
|
"""
|
||||||
if isinstance(logger, basestring_type):
|
if isinstance(logger, basestring_type):
|
||||||
logger = logging.getLogger(logger)
|
logger = logging.getLogger(logger)
|
||||||
self.logger = logger
|
self.logger = logger
|
||||||
self.regex = re.compile(regex)
|
self.regex = re.compile(regex)
|
||||||
self.required = required
|
self.required = required
|
||||||
self.matched = False
|
# matched and deprecated_level_matched are a counter for the respective event.
|
||||||
|
self.matched = 0
|
||||||
|
self.deprecated_level_matched = 0
|
||||||
self.logged_stack = False
|
self.logged_stack = False
|
||||||
self.level = level
|
self.level = level
|
||||||
self.orig_level = None # type: Optional[int]
|
self.orig_level = None # type: Optional[int]
|
||||||
|
@ -755,13 +722,20 @@ class ExpectLog(logging.Filter):
|
||||||
self.logged_stack = True
|
self.logged_stack = True
|
||||||
message = record.getMessage()
|
message = record.getMessage()
|
||||||
if self.regex.match(message):
|
if self.regex.match(message):
|
||||||
|
if self.level is None and record.levelno < logging.WARNING:
|
||||||
|
# We're inside the logging machinery here so generating a DeprecationWarning
|
||||||
|
# here won't be reported cleanly (if warnings-as-errors is enabled, the error
|
||||||
|
# just gets swallowed by the logging module), and even if it were it would
|
||||||
|
# have the wrong stack trace. Just remember this fact and report it in
|
||||||
|
# __exit__ instead.
|
||||||
|
self.deprecated_level_matched += 1
|
||||||
if self.level is not None and record.levelno != self.level:
|
if self.level is not None and record.levelno != self.level:
|
||||||
app_log.warning(
|
app_log.warning(
|
||||||
"Got expected log message %r at unexpected level (%s vs %s)"
|
"Got expected log message %r at unexpected level (%s vs %s)"
|
||||||
% (message, logging.getLevelName(self.level), record.levelname)
|
% (message, logging.getLevelName(self.level), record.levelname)
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
self.matched = True
|
self.matched += 1
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -783,6 +757,15 @@ class ExpectLog(logging.Filter):
|
||||||
self.logger.removeFilter(self)
|
self.logger.removeFilter(self)
|
||||||
if not typ and self.required and not self.matched:
|
if not typ and self.required and not self.matched:
|
||||||
raise Exception("did not get expected log message")
|
raise Exception("did not get expected log message")
|
||||||
|
if (
|
||||||
|
not typ
|
||||||
|
and self.required
|
||||||
|
and (self.deprecated_level_matched >= self.matched)
|
||||||
|
):
|
||||||
|
warnings.warn(
|
||||||
|
"ExpectLog matched at INFO or below without level argument",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# From https://nedbatchelder.com/blog/201508/using_context_managers_in_test_setup.html
|
# From https://nedbatchelder.com/blog/201508/using_context_managers_in_test_setup.html
|
||||||
|
|
|
@ -23,7 +23,7 @@ Here is a simple "Hello, world" example app:
|
||||||
.. testcode::
|
.. testcode::
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import tornado.web
|
import tornado
|
||||||
|
|
||||||
class MainHandler(tornado.web.RequestHandler):
|
class MainHandler(tornado.web.RequestHandler):
|
||||||
def get(self):
|
def get(self):
|
||||||
|
@ -166,7 +166,7 @@ May be overridden by passing a ``version`` keyword argument.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
DEFAULT_SIGNED_VALUE_MIN_VERSION = 1
|
DEFAULT_SIGNED_VALUE_MIN_VERSION = 1
|
||||||
"""The oldest signed value accepted by `.RequestHandler.get_secure_cookie`.
|
"""The oldest signed value accepted by `.RequestHandler.get_signed_cookie`.
|
||||||
|
|
||||||
May be overridden by passing a ``min_version`` keyword argument.
|
May be overridden by passing a ``min_version`` keyword argument.
|
||||||
|
|
||||||
|
@ -210,7 +210,7 @@ class RequestHandler(object):
|
||||||
self,
|
self,
|
||||||
application: "Application",
|
application: "Application",
|
||||||
request: httputil.HTTPServerRequest,
|
request: httputil.HTTPServerRequest,
|
||||||
**kwargs: Any
|
**kwargs: Any,
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
|
@ -603,21 +603,28 @@ class RequestHandler(object):
|
||||||
expires: Optional[Union[float, Tuple, datetime.datetime]] = None,
|
expires: Optional[Union[float, Tuple, datetime.datetime]] = None,
|
||||||
path: str = "/",
|
path: str = "/",
|
||||||
expires_days: Optional[float] = None,
|
expires_days: Optional[float] = None,
|
||||||
**kwargs: Any
|
# Keyword-only args start here for historical reasons.
|
||||||
|
*,
|
||||||
|
max_age: Optional[int] = None,
|
||||||
|
httponly: bool = False,
|
||||||
|
secure: bool = False,
|
||||||
|
samesite: Optional[str] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Sets an outgoing cookie name/value with the given options.
|
"""Sets an outgoing cookie name/value with the given options.
|
||||||
|
|
||||||
Newly-set cookies are not immediately visible via `get_cookie`;
|
Newly-set cookies are not immediately visible via `get_cookie`;
|
||||||
they are not present until the next request.
|
they are not present until the next request.
|
||||||
|
|
||||||
expires may be a numeric timestamp as returned by `time.time`,
|
Most arguments are passed directly to `http.cookies.Morsel` directly.
|
||||||
a time tuple as returned by `time.gmtime`, or a
|
See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie
|
||||||
`datetime.datetime` object.
|
for more information.
|
||||||
|
|
||||||
|
``expires`` may be a numeric timestamp as returned by `time.time`,
|
||||||
|
a time tuple as returned by `time.gmtime`, or a
|
||||||
|
`datetime.datetime` object. ``expires_days`` is provided as a convenience
|
||||||
|
to set an expiration time in days from today (if both are set, ``expires``
|
||||||
|
is used).
|
||||||
|
|
||||||
Additional keyword arguments are set on the cookies.Morsel
|
|
||||||
directly.
|
|
||||||
See https://docs.python.org/3/library/http.cookies.html#http.cookies.Morsel
|
|
||||||
for available attributes.
|
|
||||||
"""
|
"""
|
||||||
# The cookie library only accepts type str, in both python 2 and 3
|
# The cookie library only accepts type str, in both python 2 and 3
|
||||||
name = escape.native_str(name)
|
name = escape.native_str(name)
|
||||||
|
@ -641,56 +648,82 @@ class RequestHandler(object):
|
||||||
morsel["expires"] = httputil.format_timestamp(expires)
|
morsel["expires"] = httputil.format_timestamp(expires)
|
||||||
if path:
|
if path:
|
||||||
morsel["path"] = path
|
morsel["path"] = path
|
||||||
for k, v in kwargs.items():
|
if max_age:
|
||||||
if k == "max_age":
|
# Note change from _ to -.
|
||||||
k = "max-age"
|
morsel["max-age"] = str(max_age)
|
||||||
|
if httponly:
|
||||||
|
# Note that SimpleCookie ignores the value here. The presense of an
|
||||||
|
# httponly (or secure) key is treated as true.
|
||||||
|
morsel["httponly"] = True
|
||||||
|
if secure:
|
||||||
|
morsel["secure"] = True
|
||||||
|
if samesite:
|
||||||
|
morsel["samesite"] = samesite
|
||||||
|
|
||||||
# skip falsy values for httponly and secure flags because
|
def clear_cookie(self, name: str, **kwargs: Any) -> None:
|
||||||
# SimpleCookie sets them regardless
|
|
||||||
if k in ["httponly", "secure"] and not v:
|
|
||||||
continue
|
|
||||||
|
|
||||||
morsel[k] = v
|
|
||||||
|
|
||||||
def clear_cookie(
|
|
||||||
self, name: str, path: str = "/", domain: Optional[str] = None
|
|
||||||
) -> None:
|
|
||||||
"""Deletes the cookie with the given name.
|
"""Deletes the cookie with the given name.
|
||||||
|
|
||||||
Due to limitations of the cookie protocol, you must pass the same
|
This method accepts the same arguments as `set_cookie`, except for
|
||||||
path and domain to clear a cookie as were used when that cookie
|
``expires`` and ``max_age``. Clearing a cookie requires the same
|
||||||
was set (but there is no way to find out on the server side
|
``domain`` and ``path`` arguments as when it was set. In some cases the
|
||||||
which values were used for a given cookie).
|
``samesite`` and ``secure`` arguments are also required to match. Other
|
||||||
|
arguments are ignored.
|
||||||
|
|
||||||
Similar to `set_cookie`, the effect of this method will not be
|
Similar to `set_cookie`, the effect of this method will not be
|
||||||
seen until the following request.
|
seen until the following request.
|
||||||
|
|
||||||
|
.. versionchanged:: 6.3
|
||||||
|
|
||||||
|
Now accepts all keyword arguments that ``set_cookie`` does.
|
||||||
|
The ``samesite`` and ``secure`` flags have recently become
|
||||||
|
required for clearing ``samesite="none"`` cookies.
|
||||||
"""
|
"""
|
||||||
|
for excluded_arg in ["expires", "max_age"]:
|
||||||
|
if excluded_arg in kwargs:
|
||||||
|
raise TypeError(
|
||||||
|
f"clear_cookie() got an unexpected keyword argument '{excluded_arg}'"
|
||||||
|
)
|
||||||
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
|
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
|
||||||
self.set_cookie(name, value="", path=path, expires=expires, domain=domain)
|
self.set_cookie(name, value="", expires=expires, **kwargs)
|
||||||
|
|
||||||
def clear_all_cookies(self, path: str = "/", domain: Optional[str] = None) -> None:
|
def clear_all_cookies(self, **kwargs: Any) -> None:
|
||||||
"""Deletes all the cookies the user sent with this request.
|
"""Attempt to delete all the cookies the user sent with this request.
|
||||||
|
|
||||||
See `clear_cookie` for more information on the path and domain
|
See `clear_cookie` for more information on keyword arguments. Due to
|
||||||
parameters.
|
limitations of the cookie protocol, it is impossible to determine on the
|
||||||
|
server side which values are necessary for the ``domain``, ``path``,
|
||||||
|
``samesite``, or ``secure`` arguments, this method can only be
|
||||||
|
successful if you consistently use the same values for these arguments
|
||||||
|
when setting cookies.
|
||||||
|
|
||||||
Similar to `set_cookie`, the effect of this method will not be
|
Similar to `set_cookie`, the effect of this method will not be seen
|
||||||
seen until the following request.
|
until the following request.
|
||||||
|
|
||||||
.. versionchanged:: 3.2
|
.. versionchanged:: 3.2
|
||||||
|
|
||||||
Added the ``path`` and ``domain`` parameters.
|
Added the ``path`` and ``domain`` parameters.
|
||||||
|
|
||||||
|
.. versionchanged:: 6.3
|
||||||
|
|
||||||
|
Now accepts all keyword arguments that ``set_cookie`` does.
|
||||||
|
|
||||||
|
.. deprecated:: 6.3
|
||||||
|
|
||||||
|
The increasingly complex rules governing cookies have made it
|
||||||
|
impossible for a ``clear_all_cookies`` method to work reliably
|
||||||
|
since all we know about cookies are their names. Applications
|
||||||
|
should generally use ``clear_cookie`` one at a time instead.
|
||||||
"""
|
"""
|
||||||
for name in self.request.cookies:
|
for name in self.request.cookies:
|
||||||
self.clear_cookie(name, path=path, domain=domain)
|
self.clear_cookie(name, **kwargs)
|
||||||
|
|
||||||
def set_secure_cookie(
|
def set_signed_cookie(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
value: Union[str, bytes],
|
value: Union[str, bytes],
|
||||||
expires_days: Optional[float] = 30,
|
expires_days: Optional[float] = 30,
|
||||||
version: Optional[int] = None,
|
version: Optional[int] = None,
|
||||||
**kwargs: Any
|
**kwargs: Any,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Signs and timestamps a cookie so it cannot be forged.
|
"""Signs and timestamps a cookie so it cannot be forged.
|
||||||
|
|
||||||
|
@ -698,11 +731,11 @@ class RequestHandler(object):
|
||||||
to use this method. It should be a long, random sequence of bytes
|
to use this method. It should be a long, random sequence of bytes
|
||||||
to be used as the HMAC secret for the signature.
|
to be used as the HMAC secret for the signature.
|
||||||
|
|
||||||
To read a cookie set with this method, use `get_secure_cookie()`.
|
To read a cookie set with this method, use `get_signed_cookie()`.
|
||||||
|
|
||||||
Note that the ``expires_days`` parameter sets the lifetime of the
|
Note that the ``expires_days`` parameter sets the lifetime of the
|
||||||
cookie in the browser, but is independent of the ``max_age_days``
|
cookie in the browser, but is independent of the ``max_age_days``
|
||||||
parameter to `get_secure_cookie`.
|
parameter to `get_signed_cookie`.
|
||||||
A value of None limits the lifetime to the current browser session.
|
A value of None limits the lifetime to the current browser session.
|
||||||
|
|
||||||
Secure cookies may contain arbitrary byte values, not just unicode
|
Secure cookies may contain arbitrary byte values, not just unicode
|
||||||
|
@ -715,22 +748,30 @@ class RequestHandler(object):
|
||||||
|
|
||||||
Added the ``version`` argument. Introduced cookie version 2
|
Added the ``version`` argument. Introduced cookie version 2
|
||||||
and made it the default.
|
and made it the default.
|
||||||
|
|
||||||
|
.. versionchanged:: 6.3
|
||||||
|
|
||||||
|
Renamed from ``set_secure_cookie`` to ``set_signed_cookie`` to
|
||||||
|
avoid confusion with other uses of "secure" in cookie attributes
|
||||||
|
and prefixes. The old name remains as an alias.
|
||||||
"""
|
"""
|
||||||
self.set_cookie(
|
self.set_cookie(
|
||||||
name,
|
name,
|
||||||
self.create_signed_value(name, value, version=version),
|
self.create_signed_value(name, value, version=version),
|
||||||
expires_days=expires_days,
|
expires_days=expires_days,
|
||||||
**kwargs
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
set_secure_cookie = set_signed_cookie
|
||||||
|
|
||||||
def create_signed_value(
|
def create_signed_value(
|
||||||
self, name: str, value: Union[str, bytes], version: Optional[int] = None
|
self, name: str, value: Union[str, bytes], version: Optional[int] = None
|
||||||
) -> bytes:
|
) -> bytes:
|
||||||
"""Signs and timestamps a string so it cannot be forged.
|
"""Signs and timestamps a string so it cannot be forged.
|
||||||
|
|
||||||
Normally used via set_secure_cookie, but provided as a separate
|
Normally used via set_signed_cookie, but provided as a separate
|
||||||
method for non-cookie uses. To decode a value not stored
|
method for non-cookie uses. To decode a value not stored
|
||||||
as a cookie use the optional value argument to get_secure_cookie.
|
as a cookie use the optional value argument to get_signed_cookie.
|
||||||
|
|
||||||
.. versionchanged:: 3.2.1
|
.. versionchanged:: 3.2.1
|
||||||
|
|
||||||
|
@ -749,7 +790,7 @@ class RequestHandler(object):
|
||||||
secret, name, value, version=version, key_version=key_version
|
secret, name, value, version=version, key_version=key_version
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_secure_cookie(
|
def get_signed_cookie(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
value: Optional[str] = None,
|
value: Optional[str] = None,
|
||||||
|
@ -763,12 +804,19 @@ class RequestHandler(object):
|
||||||
|
|
||||||
Similar to `get_cookie`, this method only returns cookies that
|
Similar to `get_cookie`, this method only returns cookies that
|
||||||
were present in the request. It does not see outgoing cookies set by
|
were present in the request. It does not see outgoing cookies set by
|
||||||
`set_secure_cookie` in this handler.
|
`set_signed_cookie` in this handler.
|
||||||
|
|
||||||
.. versionchanged:: 3.2.1
|
.. versionchanged:: 3.2.1
|
||||||
|
|
||||||
Added the ``min_version`` argument. Introduced cookie version 2;
|
Added the ``min_version`` argument. Introduced cookie version 2;
|
||||||
both versions 1 and 2 are accepted by default.
|
both versions 1 and 2 are accepted by default.
|
||||||
|
|
||||||
|
.. versionchanged:: 6.3
|
||||||
|
|
||||||
|
Renamed from ``get_secure_cookie`` to ``get_signed_cookie`` to
|
||||||
|
avoid confusion with other uses of "secure" in cookie attributes
|
||||||
|
and prefixes. The old name remains as an alias.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.require_setting("cookie_secret", "secure cookies")
|
self.require_setting("cookie_secret", "secure cookies")
|
||||||
if value is None:
|
if value is None:
|
||||||
|
@ -781,12 +829,22 @@ class RequestHandler(object):
|
||||||
min_version=min_version,
|
min_version=min_version,
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_secure_cookie_key_version(
|
get_secure_cookie = get_signed_cookie
|
||||||
|
|
||||||
|
def get_signed_cookie_key_version(
|
||||||
self, name: str, value: Optional[str] = None
|
self, name: str, value: Optional[str] = None
|
||||||
) -> Optional[int]:
|
) -> Optional[int]:
|
||||||
"""Returns the signing key version of the secure cookie.
|
"""Returns the signing key version of the secure cookie.
|
||||||
|
|
||||||
The version is returned as int.
|
The version is returned as int.
|
||||||
|
|
||||||
|
.. versionchanged:: 6.3
|
||||||
|
|
||||||
|
Renamed from ``get_secure_cookie_key_version`` to
|
||||||
|
``set_signed_cookie_key_version`` to avoid confusion with other
|
||||||
|
uses of "secure" in cookie attributes and prefixes. The old name
|
||||||
|
remains as an alias.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.require_setting("cookie_secret", "secure cookies")
|
self.require_setting("cookie_secret", "secure cookies")
|
||||||
if value is None:
|
if value is None:
|
||||||
|
@ -795,6 +853,8 @@ class RequestHandler(object):
|
||||||
return None
|
return None
|
||||||
return get_signature_key_version(value)
|
return get_signature_key_version(value)
|
||||||
|
|
||||||
|
get_secure_cookie_key_version = get_signed_cookie_key_version
|
||||||
|
|
||||||
def redirect(
|
def redirect(
|
||||||
self, url: str, permanent: bool = False, status: Optional[int] = None
|
self, url: str, permanent: bool = False, status: Optional[int] = None
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -1321,7 +1381,7 @@ class RequestHandler(object):
|
||||||
and is cached for future access::
|
and is cached for future access::
|
||||||
|
|
||||||
def get_current_user(self):
|
def get_current_user(self):
|
||||||
user_cookie = self.get_secure_cookie("user")
|
user_cookie = self.get_signed_cookie("user")
|
||||||
if user_cookie:
|
if user_cookie:
|
||||||
return json.loads(user_cookie)
|
return json.loads(user_cookie)
|
||||||
return None
|
return None
|
||||||
|
@ -1331,7 +1391,7 @@ class RequestHandler(object):
|
||||||
|
|
||||||
@gen.coroutine
|
@gen.coroutine
|
||||||
def prepare(self):
|
def prepare(self):
|
||||||
user_id_cookie = self.get_secure_cookie("user_id")
|
user_id_cookie = self.get_signed_cookie("user_id")
|
||||||
if user_id_cookie:
|
if user_id_cookie:
|
||||||
self.current_user = yield load_user(user_id_cookie)
|
self.current_user = yield load_user(user_id_cookie)
|
||||||
|
|
||||||
|
@ -1643,7 +1703,7 @@ class RequestHandler(object):
|
||||||
# Find all weak and strong etag values from If-None-Match header
|
# Find all weak and strong etag values from If-None-Match header
|
||||||
# because RFC 7232 allows multiple etag values in a single header.
|
# because RFC 7232 allows multiple etag values in a single header.
|
||||||
etags = re.findall(
|
etags = re.findall(
|
||||||
br'\*|(?:W/)?"[^"]*"', utf8(self.request.headers.get("If-None-Match", ""))
|
rb'\*|(?:W/)?"[^"]*"', utf8(self.request.headers.get("If-None-Match", ""))
|
||||||
)
|
)
|
||||||
if not computed_etag or not etags:
|
if not computed_etag or not etags:
|
||||||
return False
|
return False
|
||||||
|
@ -1676,20 +1736,16 @@ class RequestHandler(object):
|
||||||
)
|
)
|
||||||
# If XSRF cookies are turned on, reject form submissions without
|
# If XSRF cookies are turned on, reject form submissions without
|
||||||
# the proper cookie
|
# the proper cookie
|
||||||
if (
|
if self.request.method not in (
|
||||||
self.request.method
|
"GET",
|
||||||
not in (
|
"HEAD",
|
||||||
"GET",
|
"OPTIONS",
|
||||||
"HEAD",
|
) and self.application.settings.get("xsrf_cookies"):
|
||||||
"OPTIONS",
|
|
||||||
)
|
|
||||||
and self.application.settings.get("xsrf_cookies")
|
|
||||||
):
|
|
||||||
self.check_xsrf_cookie()
|
self.check_xsrf_cookie()
|
||||||
|
|
||||||
result = self.prepare()
|
result = self.prepare()
|
||||||
if result is not None:
|
if result is not None:
|
||||||
result = await result
|
result = await result # type: ignore
|
||||||
if self._prepared_future is not None:
|
if self._prepared_future is not None:
|
||||||
# Tell the Application we've finished with prepare()
|
# Tell the Application we've finished with prepare()
|
||||||
# and are ready for the body to arrive.
|
# and are ready for the body to arrive.
|
||||||
|
@ -1848,7 +1904,7 @@ def stream_request_body(cls: Type[_RequestHandlerType]) -> Type[_RequestHandlerT
|
||||||
* The regular HTTP method (``post``, ``put``, etc) will be called after
|
* The regular HTTP method (``post``, ``put``, etc) will be called after
|
||||||
the entire body has been read.
|
the entire body has been read.
|
||||||
|
|
||||||
See the `file receiver demo <https://github.com/tornadoweb/tornado/tree/master/demos/file_upload/>`_
|
See the `file receiver demo <https://github.com/tornadoweb/tornado/tree/stable/demos/file_upload/>`_
|
||||||
for example usage.
|
for example usage.
|
||||||
""" # noqa: E501
|
""" # noqa: E501
|
||||||
if not issubclass(cls, RequestHandler):
|
if not issubclass(cls, RequestHandler):
|
||||||
|
@ -2046,7 +2102,7 @@ class Application(ReversibleRouter):
|
||||||
handlers: Optional[_RuleList] = None,
|
handlers: Optional[_RuleList] = None,
|
||||||
default_host: Optional[str] = None,
|
default_host: Optional[str] = None,
|
||||||
transforms: Optional[List[Type["OutputTransform"]]] = None,
|
transforms: Optional[List[Type["OutputTransform"]]] = None,
|
||||||
**settings: Any
|
**settings: Any,
|
||||||
) -> None:
|
) -> None:
|
||||||
if transforms is None:
|
if transforms is None:
|
||||||
self.transforms = [] # type: List[Type[OutputTransform]]
|
self.transforms = [] # type: List[Type[OutputTransform]]
|
||||||
|
@ -2106,7 +2162,7 @@ class Application(ReversibleRouter):
|
||||||
backlog: int = tornado.netutil._DEFAULT_BACKLOG,
|
backlog: int = tornado.netutil._DEFAULT_BACKLOG,
|
||||||
flags: Optional[int] = None,
|
flags: Optional[int] = None,
|
||||||
reuse_port: bool = False,
|
reuse_port: bool = False,
|
||||||
**kwargs: Any
|
**kwargs: Any,
|
||||||
) -> HTTPServer:
|
) -> HTTPServer:
|
||||||
"""Starts an HTTP server for this application on the given port.
|
"""Starts an HTTP server for this application on the given port.
|
||||||
|
|
||||||
|
@ -2393,7 +2449,7 @@ class HTTPError(Exception):
|
||||||
status_code: int = 500,
|
status_code: int = 500,
|
||||||
log_message: Optional[str] = None,
|
log_message: Optional[str] = None,
|
||||||
*args: Any,
|
*args: Any,
|
||||||
**kwargs: Any
|
**kwargs: Any,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.status_code = status_code
|
self.status_code = status_code
|
||||||
self.log_message = log_message
|
self.log_message = log_message
|
||||||
|
@ -3441,7 +3497,7 @@ def create_signed_value(
|
||||||
|
|
||||||
# A leading version number in decimal
|
# A leading version number in decimal
|
||||||
# with no leading zeros, followed by a pipe.
|
# with no leading zeros, followed by a pipe.
|
||||||
_signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$")
|
_signed_value_version_re = re.compile(rb"^([1-9][0-9]*)\|(.*)$")
|
||||||
|
|
||||||
|
|
||||||
def _get_version(value: bytes) -> int:
|
def _get_version(value: bytes) -> int:
|
||||||
|
|
|
@ -23,7 +23,6 @@ import hashlib
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import struct
|
import struct
|
||||||
import tornado.escape
|
|
||||||
import tornado.web
|
import tornado.web
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
import zlib
|
import zlib
|
||||||
|
@ -34,6 +33,7 @@ from tornado import gen, httpclient, httputil
|
||||||
from tornado.ioloop import IOLoop, PeriodicCallback
|
from tornado.ioloop import IOLoop, PeriodicCallback
|
||||||
from tornado.iostream import StreamClosedError, IOStream
|
from tornado.iostream import StreamClosedError, IOStream
|
||||||
from tornado.log import gen_log, app_log
|
from tornado.log import gen_log, app_log
|
||||||
|
from tornado.netutil import Resolver
|
||||||
from tornado import simple_httpclient
|
from tornado import simple_httpclient
|
||||||
from tornado.queues import Queue
|
from tornado.queues import Queue
|
||||||
from tornado.tcpclient import TCPClient
|
from tornado.tcpclient import TCPClient
|
||||||
|
@ -822,7 +822,7 @@ class WebSocketProtocol13(WebSocketProtocol):
|
||||||
self._masked_frame = None
|
self._masked_frame = None
|
||||||
self._frame_mask = None # type: Optional[bytes]
|
self._frame_mask = None # type: Optional[bytes]
|
||||||
self._frame_length = None
|
self._frame_length = None
|
||||||
self._fragmented_message_buffer = None # type: Optional[bytes]
|
self._fragmented_message_buffer = None # type: Optional[bytearray]
|
||||||
self._fragmented_message_opcode = None
|
self._fragmented_message_opcode = None
|
||||||
self._waiting = None # type: object
|
self._waiting = None # type: object
|
||||||
self._compression_options = params.compression_options
|
self._compression_options = params.compression_options
|
||||||
|
@ -1177,10 +1177,10 @@ class WebSocketProtocol13(WebSocketProtocol):
|
||||||
# nothing to continue
|
# nothing to continue
|
||||||
self._abort()
|
self._abort()
|
||||||
return
|
return
|
||||||
self._fragmented_message_buffer += data
|
self._fragmented_message_buffer.extend(data)
|
||||||
if is_final_frame:
|
if is_final_frame:
|
||||||
opcode = self._fragmented_message_opcode
|
opcode = self._fragmented_message_opcode
|
||||||
data = self._fragmented_message_buffer
|
data = bytes(self._fragmented_message_buffer)
|
||||||
self._fragmented_message_buffer = None
|
self._fragmented_message_buffer = None
|
||||||
else: # start of new data message
|
else: # start of new data message
|
||||||
if self._fragmented_message_buffer is not None:
|
if self._fragmented_message_buffer is not None:
|
||||||
|
@ -1189,7 +1189,7 @@ class WebSocketProtocol13(WebSocketProtocol):
|
||||||
return
|
return
|
||||||
if not is_final_frame:
|
if not is_final_frame:
|
||||||
self._fragmented_message_opcode = opcode
|
self._fragmented_message_opcode = opcode
|
||||||
self._fragmented_message_buffer = data
|
self._fragmented_message_buffer = bytearray(data)
|
||||||
|
|
||||||
if is_final_frame:
|
if is_final_frame:
|
||||||
handled_future = self._handle_message(opcode, data)
|
handled_future = self._handle_message(opcode, data)
|
||||||
|
@ -1362,6 +1362,7 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
|
||||||
ping_timeout: Optional[float] = None,
|
ping_timeout: Optional[float] = None,
|
||||||
max_message_size: int = _default_max_message_size,
|
max_message_size: int = _default_max_message_size,
|
||||||
subprotocols: Optional[List[str]] = [],
|
subprotocols: Optional[List[str]] = [],
|
||||||
|
resolver: Optional[Resolver] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.connect_future = Future() # type: Future[WebSocketClientConnection]
|
self.connect_future = Future() # type: Future[WebSocketClientConnection]
|
||||||
self.read_queue = Queue(1) # type: Queue[Union[None, str, bytes]]
|
self.read_queue = Queue(1) # type: Queue[Union[None, str, bytes]]
|
||||||
|
@ -1402,7 +1403,7 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
|
||||||
# Websocket connection is currently unable to follow redirects
|
# Websocket connection is currently unable to follow redirects
|
||||||
request.follow_redirects = False
|
request.follow_redirects = False
|
||||||
|
|
||||||
self.tcp_client = TCPClient()
|
self.tcp_client = TCPClient(resolver=resolver)
|
||||||
super().__init__(
|
super().__init__(
|
||||||
None,
|
None,
|
||||||
request,
|
request,
|
||||||
|
|
|
@ -27,12 +27,15 @@ container.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sys
|
import concurrent.futures
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
import tornado
|
import tornado
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from tornado.concurrent import dummy_executor
|
||||||
from tornado import escape
|
from tornado import escape
|
||||||
from tornado import httputil
|
from tornado import httputil
|
||||||
|
from tornado.ioloop import IOLoop
|
||||||
from tornado.log import access_log
|
from tornado.log import access_log
|
||||||
|
|
||||||
from typing import List, Tuple, Optional, Callable, Any, Dict, Text
|
from typing import List, Tuple, Optional, Callable, Any, Dict, Text
|
||||||
|
@ -54,20 +57,28 @@ def to_wsgi_str(s: bytes) -> str:
|
||||||
|
|
||||||
|
|
||||||
class WSGIContainer(object):
|
class WSGIContainer(object):
|
||||||
r"""Makes a WSGI-compatible function runnable on Tornado's HTTP server.
|
r"""Makes a WSGI-compatible application runnable on Tornado's HTTP server.
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
|
|
||||||
WSGI is a *synchronous* interface, while Tornado's concurrency model
|
WSGI is a *synchronous* interface, while Tornado's concurrency model
|
||||||
is based on single-threaded asynchronous execution. This means that
|
is based on single-threaded *asynchronous* execution. Many of Tornado's
|
||||||
running a WSGI app with Tornado's `WSGIContainer` is *less scalable*
|
distinguishing features are not available in WSGI mode, including efficient
|
||||||
than running the same app in a multi-threaded WSGI server like
|
long-polling and websockets. The primary purpose of `WSGIContainer` is
|
||||||
``gunicorn`` or ``uwsgi``. Use `WSGIContainer` only when there are
|
to support both WSGI applications and native Tornado ``RequestHandlers`` in
|
||||||
benefits to combining Tornado and WSGI in the same process that
|
a single process. WSGI-only applications are likely to be better off
|
||||||
outweigh the reduced scalability.
|
with a dedicated WSGI server such as ``gunicorn`` or ``uwsgi``.
|
||||||
|
|
||||||
Wrap a WSGI function in a `WSGIContainer` and pass it to `.HTTPServer` to
|
Wrap a WSGI application in a `WSGIContainer` to make it implement the Tornado
|
||||||
run it. For example::
|
`.HTTPServer` ``request_callback`` interface. The `WSGIContainer` object can
|
||||||
|
then be passed to classes from the `tornado.routing` module,
|
||||||
|
`tornado.web.FallbackHandler`, or to `.HTTPServer` directly.
|
||||||
|
|
||||||
|
This class is intended to let other frameworks (Django, Flask, etc)
|
||||||
|
run on the Tornado HTTP server and I/O loop.
|
||||||
|
|
||||||
|
Realistic usage will be more complicated, but the simplest possible example uses a
|
||||||
|
hand-written WSGI application with `.HTTPServer`::
|
||||||
|
|
||||||
def simple_app(environ, start_response):
|
def simple_app(environ, start_response):
|
||||||
status = "200 OK"
|
status = "200 OK"
|
||||||
|
@ -83,18 +94,46 @@ class WSGIContainer(object):
|
||||||
|
|
||||||
asyncio.run(main())
|
asyncio.run(main())
|
||||||
|
|
||||||
This class is intended to let other frameworks (Django, web.py, etc)
|
The recommended pattern is to use the `tornado.routing` module to set up routing
|
||||||
run on the Tornado HTTP server and I/O loop.
|
rules between your WSGI application and, typically, a `tornado.web.Application`.
|
||||||
|
Alternatively, `tornado.web.Application` can be used as the top-level router
|
||||||
|
and `tornado.web.FallbackHandler` can embed a `WSGIContainer` within it.
|
||||||
|
|
||||||
The `tornado.web.FallbackHandler` class is often useful for mixing
|
If the ``executor`` argument is provided, the WSGI application will be executed
|
||||||
Tornado and WSGI apps in the same server. See
|
on that executor. This must be an instance of `concurrent.futures.Executor`,
|
||||||
https://github.com/bdarnell/django-tornado-demo for a complete example.
|
typically a ``ThreadPoolExecutor`` (``ProcessPoolExecutor`` is not supported).
|
||||||
|
If no ``executor`` is given, the application will run on the event loop thread in
|
||||||
|
Tornado 6.3; this will change to use an internal thread pool by default in
|
||||||
|
Tornado 7.0.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
By default, the WSGI application is executed on the event loop's thread. This
|
||||||
|
limits the server to one request at a time (per process), making it less scalable
|
||||||
|
than most other WSGI servers. It is therefore highly recommended that you pass
|
||||||
|
a ``ThreadPoolExecutor`` when constructing the `WSGIContainer`, after verifying
|
||||||
|
that your application is thread-safe. The default will change to use a
|
||||||
|
``ThreadPoolExecutor`` in Tornado 7.0.
|
||||||
|
|
||||||
|
.. versionadded:: 6.3
|
||||||
|
The ``executor`` parameter.
|
||||||
|
|
||||||
|
.. deprecated:: 6.3
|
||||||
|
The default behavior of running the WSGI application on the event loop thread
|
||||||
|
is deprecated and will change in Tornado 7.0 to use a thread pool by default.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, wsgi_application: "WSGIAppType") -> None:
|
def __init__(
|
||||||
|
self,
|
||||||
|
wsgi_application: "WSGIAppType",
|
||||||
|
executor: Optional[concurrent.futures.Executor] = None,
|
||||||
|
) -> None:
|
||||||
self.wsgi_application = wsgi_application
|
self.wsgi_application = wsgi_application
|
||||||
|
self.executor = dummy_executor if executor is None else executor
|
||||||
|
|
||||||
def __call__(self, request: httputil.HTTPServerRequest) -> None:
|
def __call__(self, request: httputil.HTTPServerRequest) -> None:
|
||||||
|
IOLoop.current().spawn_callback(self.handle_request, request)
|
||||||
|
|
||||||
|
async def handle_request(self, request: httputil.HTTPServerRequest) -> None:
|
||||||
data = {} # type: Dict[str, Any]
|
data = {} # type: Dict[str, Any]
|
||||||
response = [] # type: List[bytes]
|
response = [] # type: List[bytes]
|
||||||
|
|
||||||
|
@ -113,15 +152,33 @@ class WSGIContainer(object):
|
||||||
data["headers"] = headers
|
data["headers"] = headers
|
||||||
return response.append
|
return response.append
|
||||||
|
|
||||||
app_response = self.wsgi_application(
|
loop = IOLoop.current()
|
||||||
WSGIContainer.environ(request), start_response
|
app_response = await loop.run_in_executor(
|
||||||
|
self.executor,
|
||||||
|
self.wsgi_application,
|
||||||
|
self.environ(request),
|
||||||
|
start_response,
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
response.extend(app_response)
|
app_response_iter = iter(app_response)
|
||||||
body = b"".join(response)
|
|
||||||
|
def next_chunk() -> Optional[bytes]:
|
||||||
|
try:
|
||||||
|
return next(app_response_iter)
|
||||||
|
except StopIteration:
|
||||||
|
# StopIteration is special and is not allowed to pass through
|
||||||
|
# coroutines normally.
|
||||||
|
return None
|
||||||
|
|
||||||
|
while True:
|
||||||
|
chunk = await loop.run_in_executor(self.executor, next_chunk)
|
||||||
|
if chunk is None:
|
||||||
|
break
|
||||||
|
response.append(chunk)
|
||||||
finally:
|
finally:
|
||||||
if hasattr(app_response, "close"):
|
if hasattr(app_response, "close"):
|
||||||
app_response.close() # type: ignore
|
app_response.close() # type: ignore
|
||||||
|
body = b"".join(response)
|
||||||
if not data:
|
if not data:
|
||||||
raise Exception("WSGI app did not call start_response")
|
raise Exception("WSGI app did not call start_response")
|
||||||
|
|
||||||
|
@ -147,9 +204,12 @@ class WSGIContainer(object):
|
||||||
request.connection.finish()
|
request.connection.finish()
|
||||||
self._log(status_code, request)
|
self._log(status_code, request)
|
||||||
|
|
||||||
@staticmethod
|
def environ(self, request: httputil.HTTPServerRequest) -> Dict[Text, Any]:
|
||||||
def environ(request: httputil.HTTPServerRequest) -> Dict[Text, Any]:
|
"""Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment.
|
||||||
"""Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment."""
|
|
||||||
|
.. versionchanged:: 6.3
|
||||||
|
No longer a static method.
|
||||||
|
"""
|
||||||
hostport = request.host.split(":")
|
hostport = request.host.split(":")
|
||||||
if len(hostport) == 2:
|
if len(hostport) == 2:
|
||||||
host = hostport[0]
|
host = hostport[0]
|
||||||
|
@ -172,7 +232,7 @@ class WSGIContainer(object):
|
||||||
"wsgi.url_scheme": request.protocol,
|
"wsgi.url_scheme": request.protocol,
|
||||||
"wsgi.input": BytesIO(escape.utf8(request.body)),
|
"wsgi.input": BytesIO(escape.utf8(request.body)),
|
||||||
"wsgi.errors": sys.stderr,
|
"wsgi.errors": sys.stderr,
|
||||||
"wsgi.multithread": False,
|
"wsgi.multithread": self.executor is not dummy_executor,
|
||||||
"wsgi.multiprocess": True,
|
"wsgi.multiprocess": True,
|
||||||
"wsgi.run_once": False,
|
"wsgi.run_once": False,
|
||||||
}
|
}
|
||||||
|
|
|
@ -320,7 +320,7 @@ class BaseHandler(RouteHandler):
|
||||||
|
|
||||||
def get_current_user(self):
|
def get_current_user(self):
|
||||||
if sickgear.WEB_USERNAME or sickgear.WEB_PASSWORD:
|
if sickgear.WEB_USERNAME or sickgear.WEB_PASSWORD:
|
||||||
return self.get_secure_cookie('sickgear-session-%s' % helpers.md5_for_text(sickgear.WEB_PORT))
|
return self.get_signed_cookie('sickgear-session-%s' % helpers.md5_for_text(sickgear.WEB_PORT))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def get_image(self, image):
|
def get_image(self, image):
|
||||||
|
@ -401,7 +401,7 @@ class LoginHandler(BaseHandler):
|
||||||
httponly=True)
|
httponly=True)
|
||||||
if sickgear.ENABLE_HTTPS:
|
if sickgear.ENABLE_HTTPS:
|
||||||
params.update(dict(secure=True))
|
params.update(dict(secure=True))
|
||||||
self.set_secure_cookie('sickgear-session-%s' % helpers.md5_for_text(sickgear.WEB_PORT),
|
self.set_signed_cookie('sickgear-session-%s' % helpers.md5_for_text(sickgear.WEB_PORT),
|
||||||
sickgear.COOKIE_SECRET, **params)
|
sickgear.COOKIE_SECRET, **params)
|
||||||
self.redirect(self.get_argument('next', '/home/'))
|
self.redirect(self.get_argument('next', '/home/'))
|
||||||
else:
|
else:
|
||||||
|
|
Loading…
Reference in a new issue