Update Tornado Web Server 4.3.dev1 (1b6157d) to 4.4.dev1 (c2b4d05).

This commit is contained in:
JackDandy 2015-12-23 04:15:36 +00:00
parent 81bd0ab4d9
commit f0b81ff846
46 changed files with 805 additions and 443 deletions

View file

@ -2,6 +2,7 @@
* Update unidecode library 0.04.11 to 0.04.18 (fd57cbf)
* Update xmltodict library 0.9.2 (579a005) to 0.9.2 (eac0031)
* Update Tornado Web Server 4.3.dev1 (1b6157d) to 4.4.dev1 (c2b4d05)
### 0.11.0 (2016-01-10 22:30:00 UTC)

View file

@ -25,5 +25,5 @@ from __future__ import absolute_import, division, print_function, with_statement
# is zero for an official release, positive for a development branch,
# or negative for a release candidate or beta (after the base version
# number has been incremented)
version = "4.3.dev1"
version_info = (4, 3, 0, -100)
version = "4.4.dev1"
version_info = (4, 4, 0, -100)

View file

@ -0,0 +1,94 @@
#!/usr/bin/env python
# coding: utf-8
#
# Copyright 2012 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Data used by the tornado.locale module."""
from __future__ import absolute_import, division, print_function, with_statement
# NOTE: This file is supposed to contain unicode strings, which is
# exactly what you'd get with e.g. u"Español" in most python versions.
# However, Python 3.2 doesn't support the u"" syntax, so we use a u()
# function instead. tornado.util.u cannot be used because it doesn't
# support non-ascii characters on python 2.
# When we drop support for Python 3.2, we can remove the parens
# and make these plain unicode strings.
from tornado.escape import to_unicode as u
LOCALE_NAMES = {
"af_ZA": {"name_en": u"Afrikaans", "name": u"Afrikaans"},
"am_ET": {"name_en": u"Amharic", "name": u"አማርኛ"},
"ar_AR": {"name_en": u"Arabic", "name": u"العربية"},
"bg_BG": {"name_en": u"Bulgarian", "name": u"Български"},
"bn_IN": {"name_en": u"Bengali", "name": u"বাংলা"},
"bs_BA": {"name_en": u"Bosnian", "name": u"Bosanski"},
"ca_ES": {"name_en": u"Catalan", "name": u"Català"},
"cs_CZ": {"name_en": u"Czech", "name": u"Čeština"},
"cy_GB": {"name_en": u"Welsh", "name": u"Cymraeg"},
"da_DK": {"name_en": u"Danish", "name": u"Dansk"},
"de_DE": {"name_en": u"German", "name": u"Deutsch"},
"el_GR": {"name_en": u"Greek", "name": u"Ελληνικά"},
"en_GB": {"name_en": u"English (UK)", "name": u"English (UK)"},
"en_US": {"name_en": u"English (US)", "name": u"English (US)"},
"es_ES": {"name_en": u"Spanish (Spain)", "name": u"Español (España)"},
"es_LA": {"name_en": u"Spanish", "name": u"Español"},
"et_EE": {"name_en": u"Estonian", "name": u"Eesti"},
"eu_ES": {"name_en": u"Basque", "name": u"Euskara"},
"fa_IR": {"name_en": u"Persian", "name": u"فارسی"},
"fi_FI": {"name_en": u"Finnish", "name": u"Suomi"},
"fr_CA": {"name_en": u"French (Canada)", "name": u"Français (Canada)"},
"fr_FR": {"name_en": u"French", "name": u"Français"},
"ga_IE": {"name_en": u"Irish", "name": u"Gaeilge"},
"gl_ES": {"name_en": u"Galician", "name": u"Galego"},
"he_IL": {"name_en": u"Hebrew", "name": u"עברית"},
"hi_IN": {"name_en": u"Hindi", "name": u"हिन्दी"},
"hr_HR": {"name_en": u"Croatian", "name": u"Hrvatski"},
"hu_HU": {"name_en": u"Hungarian", "name": u"Magyar"},
"id_ID": {"name_en": u"Indonesian", "name": u"Bahasa Indonesia"},
"is_IS": {"name_en": u"Icelandic", "name": u"Íslenska"},
"it_IT": {"name_en": u"Italian", "name": u"Italiano"},
"ja_JP": {"name_en": u"Japanese", "name": u"日本語"},
"ko_KR": {"name_en": u"Korean", "name": u"한국어"},
"lt_LT": {"name_en": u"Lithuanian", "name": u"Lietuvių"},
"lv_LV": {"name_en": u"Latvian", "name": u"Latviešu"},
"mk_MK": {"name_en": u"Macedonian", "name": u"Македонски"},
"ml_IN": {"name_en": u"Malayalam", "name": u"മലയാളം"},
"ms_MY": {"name_en": u"Malay", "name": u"Bahasa Melayu"},
"nb_NO": {"name_en": u"Norwegian (bokmal)", "name": u"Norsk (bokmål)"},
"nl_NL": {"name_en": u"Dutch", "name": u"Nederlands"},
"nn_NO": {"name_en": u"Norwegian (nynorsk)", "name": u"Norsk (nynorsk)"},
"pa_IN": {"name_en": u"Punjabi", "name": u"ਪੰਜਾਬੀ"},
"pl_PL": {"name_en": u"Polish", "name": u"Polski"},
"pt_BR": {"name_en": u"Portuguese (Brazil)", "name": u"Português (Brasil)"},
"pt_PT": {"name_en": u"Portuguese (Portugal)", "name": u"Português (Portugal)"},
"ro_RO": {"name_en": u"Romanian", "name": u"Română"},
"ru_RU": {"name_en": u"Russian", "name": u"Русский"},
"sk_SK": {"name_en": u"Slovak", "name": u"Slovenčina"},
"sl_SI": {"name_en": u"Slovenian", "name": u"Slovenščina"},
"sq_AL": {"name_en": u"Albanian", "name": u"Shqip"},
"sr_RS": {"name_en": u"Serbian", "name": u"Српски"},
"sv_SE": {"name_en": u"Swedish", "name": u"Svenska"},
"sw_KE": {"name_en": u"Swahili", "name": u"Kiswahili"},
"ta_IN": {"name_en": u"Tamil", "name": u"தமிழ்"},
"te_IN": {"name_en": u"Telugu", "name": u"తెలుగు"},
"th_TH": {"name_en": u"Thai", "name": u"ภาษาไทย"},
"tl_PH": {"name_en": u"Filipino", "name": u"Filipino"},
"tr_TR": {"name_en": u"Turkish", "name": u"Türkçe"},
"uk_UA": {"name_en": u"Ukraini ", "name": u"Українська"},
"vi_VN": {"name_en": u"Vietnamese", "name": u"Tiếng Việt"},
"zh_CN": {"name_en": u"Chinese (Simplified)", "name": u"中文(简体)"},
"zh_TW": {"name_en": u"Chinese (Traditional)", "name": u"中文(繁體)"},
}

View file

@ -75,14 +75,14 @@ import hmac
import time
import uuid
from tornado.concurrent import TracebackFuture, return_future
from tornado.concurrent import TracebackFuture, return_future, chain_future
from tornado import gen
from tornado import httpclient
from tornado import escape
from tornado.httputil import url_concat
from tornado.log import gen_log
from tornado.stack_context import ExceptionStackContext
from tornado.util import u, unicode_type, ArgReplacer
from tornado.util import unicode_type, ArgReplacer
try:
import urlparse # py2
@ -188,7 +188,7 @@ class OpenIdMixin(object):
"""
# Verify the OpenID response via direct request to the OP
args = dict((k, v[-1]) for k, v in self.request.arguments.items())
args["openid.mode"] = u("check_authentication")
args["openid.mode"] = u"check_authentication"
url = self._OPENID_ENDPOINT
if http_client is None:
http_client = self.get_auth_http_client()
@ -255,13 +255,13 @@ class OpenIdMixin(object):
ax_ns = None
for name in self.request.arguments:
if name.startswith("openid.ns.") and \
self.get_argument(name) == u("http://openid.net/srv/ax/1.0"):
self.get_argument(name) == u"http://openid.net/srv/ax/1.0":
ax_ns = name[10:]
break
def get_ax_arg(uri):
if not ax_ns:
return u("")
return u""
prefix = "openid." + ax_ns + ".type."
ax_name = None
for name in self.request.arguments.keys():
@ -270,8 +270,8 @@ class OpenIdMixin(object):
ax_name = "openid." + ax_ns + ".value." + part
break
if not ax_name:
return u("")
return self.get_argument(ax_name, u(""))
return u""
return self.get_argument(ax_name, u"")
email = get_ax_arg("http://axschema.org/contact/email")
name = get_ax_arg("http://axschema.org/namePerson")
@ -985,7 +985,7 @@ class FacebookGraphMixin(OAuth2Mixin):
future.set_exception(AuthError('Facebook auth error: %s' % str(response)))
return
args = escape.parse_qs_bytes(escape.native_str(response.body))
args = urlparse.parse_qs(escape.native_str(response.body))
session = {
"access_token": args["access_token"][-1],
"expires": args.get("expires")
@ -1062,8 +1062,13 @@ class FacebookGraphMixin(OAuth2Mixin):
Added the ability to override ``self._FACEBOOK_BASE_URL``.
"""
url = self._FACEBOOK_BASE_URL + path
return self.oauth2_request(url, callback, access_token,
post_args, **args)
# Thanks to the _auth_return_future decorator, our "callback"
# argument is a Future, which we cannot pass as a callback to
# oauth2_request. Instead, have oauth2_request return a
# future and chain them together.
oauth_future = self.oauth2_request(url, access_token=access_token,
post_args=post_args, **args)
chain_future(oauth_future, callback)
def _oauth_signature(consumer_token, method, url, parameters={}, token=None):

View file

@ -177,6 +177,15 @@ class Future(object):
def __await__(self):
return (yield self)
"""))
else:
# Py2-compatible version for use with cython.
def __await__(self):
result = yield self
# StopIteration doesn't take args before py33,
# but Cython recognizes the args tuple.
e = StopIteration()
e.args = (result,)
raise e
def cancel(self):
"""Cancel the operation, if possible.
@ -373,6 +382,7 @@ def run_on_executor(*args, **kwargs):
def run_on_executor_decorator(fn):
executor = kwargs.get("executor", "executor")
io_loop = kwargs.get("io_loop", "io_loop")
@functools.wraps(fn)
def wrapper(self, *args, **kwargs):
callback = kwargs.pop("callback", None)

View file

@ -221,6 +221,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
# _process_queue() is called from
# _finish_pending_requests the exceptions have
# nowhere to go.
self._free_list.append(curl)
callback(HTTPResponse(
request=request,
code=599,
@ -387,17 +388,28 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
else:
raise KeyError('unknown method ' + request.method)
# Handle curl's cryptic options for every individual HTTP method
if request.method == "GET":
if request.body is not None:
raise ValueError('Body must be None for GET request')
elif request.method in ("POST", "PUT") or request.body:
if request.body is None:
body_expected = request.method in ("POST", "PATCH", "PUT")
body_present = request.body is not None
if not request.allow_nonstandard_methods:
# Some HTTP methods nearly always have bodies while others
# almost never do. Fail in this case unless the user has
# opted out of sanity checks with allow_nonstandard_methods.
if ((body_expected and not body_present) or
(body_present and not body_expected)):
raise ValueError(
'Body must not be None for "%s" request'
% request.method)
'Body must %sbe None for method %s (unless '
'allow_nonstandard_methods is true)' %
('not ' if body_expected else '', request.method))
request_buffer = BytesIO(utf8(request.body))
if body_expected or body_present:
if request.method == "GET":
# Even with `allow_nonstandard_methods` we disallow
# GET with a body (because libcurl doesn't allow it
# unless we use CUSTOMREQUEST). While the spec doesn't
# forbid clients from sending a body, it arguably
# disallows the server from doing anything with them.
raise ValueError('Body must be None for GET request')
request_buffer = BytesIO(utf8(request.body or ''))
def ioctl(cmd):
if cmd == curl.IOCMD_RESTARTREAD:
@ -405,10 +417,10 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
curl.setopt(pycurl.READFUNCTION, request_buffer.read)
curl.setopt(pycurl.IOCTLFUNCTION, ioctl)
if request.method == "POST":
curl.setopt(pycurl.POSTFIELDSIZE, len(request.body))
curl.setopt(pycurl.POSTFIELDSIZE, len(request.body or ''))
else:
curl.setopt(pycurl.UPLOAD, True)
curl.setopt(pycurl.INFILESIZE, len(request.body))
curl.setopt(pycurl.INFILESIZE, len(request.body or ''))
if request.auth_username is not None:
userpwd = "%s:%s" % (request.auth_username, request.auth_password or '')
@ -454,7 +466,8 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
if header_callback is not None:
self.io_loop.add_callback(header_callback, header_line)
# header_line as returned by curl includes the end-of-line characters.
header_line = header_line.strip()
# whitespace at the start should be preserved to allow multi-line headers
header_line = header_line.rstrip()
if header_line.startswith("HTTP/"):
headers.clear()
try:

View file

@ -25,7 +25,7 @@ from __future__ import absolute_import, division, print_function, with_statement
import re
import sys
from tornado.util import unicode_type, basestring_type, u
from tornado.util import unicode_type, basestring_type
try:
from urllib.parse import parse_qs as _parse_qs # py3
@ -366,7 +366,7 @@ def linkify(text, shorten=False, extra_params="",
# have a status bar, such as Safari by default)
params += ' title="%s"' % href
return u('<a href="%s"%s>%s</a>') % (href, params, url)
return u'<a href="%s"%s>%s</a>' % (href, params, url)
# First HTML-escape so that our strings are all safe.
# The regex is modified to avoid character entites other than &amp; so

View file

@ -79,6 +79,7 @@ from __future__ import absolute_import, division, print_function, with_statement
import collections
import functools
import itertools
import os
import sys
import textwrap
import types
@ -90,23 +91,38 @@ from tornado import stack_context
from tornado.util import raise_exc_info
try:
from functools import singledispatch # py34+
except ImportError as e:
try:
from singledispatch import singledispatch # backport
from functools import singledispatch # py34+
except ImportError:
from singledispatch import singledispatch # backport
except ImportError:
# In most cases, singledispatch is required (to avoid
# difficult-to-diagnose problems in which the functionality
# available differs depending on which invisble packages are
# installed). However, in Google App Engine third-party
# dependencies are more trouble so we allow this module to be
# imported without it.
if 'APPENGINE_RUNTIME' not in os.environ:
raise
singledispatch = None
try:
try:
from collections.abc import Generator as GeneratorType # py35+
except ImportError:
from backports_abc import Generator as GeneratorType
try:
from inspect import isawaitable # py35+
except ImportError:
from backports_abc import isawaitable
except ImportError:
if 'APPENGINE_RUNTIME' not in os.environ:
raise
from types import GeneratorType
try:
from inspect import isawaitable # py35+
except ImportError:
def isawaitable(x): return False
def isawaitable(x):
return False
try:
import builtins # py3
@ -138,6 +154,21 @@ class TimeoutError(Exception):
"""Exception raised by ``with_timeout``."""
def _value_from_stopiteration(e):
try:
# StopIteration has a value attribute beginning in py33.
# So does our Return class.
return e.value
except AttributeError:
pass
try:
# Cython backports coroutine functionality by putting the value in
# e.args[0].
return e.args[0]
except (AttributeError, IndexError):
return None
def engine(func):
"""Callback-oriented decorator for asynchronous generators.
@ -222,6 +253,7 @@ def _make_coroutine_wrapper(func, replace_callback):
# to be used with 'await'.
if hasattr(types, 'coroutine'):
func = types.coroutine(func)
@functools.wraps(func)
def wrapper(*args, **kwargs):
future = TracebackFuture()
@ -234,7 +266,7 @@ def _make_coroutine_wrapper(func, replace_callback):
try:
result = func(*args, **kwargs)
except (Return, StopIteration) as e:
result = getattr(e, 'value', None)
result = _value_from_stopiteration(e)
except Exception:
future.set_exc_info(sys.exc_info())
return future
@ -255,7 +287,7 @@ def _make_coroutine_wrapper(func, replace_callback):
'stack_context inconsistency (probably caused '
'by yield within a "with StackContext" block)'))
except (StopIteration, Return) as e:
future.set_result(getattr(e, 'value', None))
future.set_result(_value_from_stopiteration(e))
except Exception:
future.set_exc_info(sys.exc_info())
else:
@ -300,6 +332,8 @@ class Return(Exception):
def __init__(self, value=None):
super(Return, self).__init__()
self.value = value
# Cython recognizes subclasses of StopIteration with a .args tuple.
self.args = (value,)
class WaitIterator(object):
@ -584,27 +618,91 @@ class YieldFuture(YieldPoint):
return self.result_fn()
class Multi(YieldPoint):
def _contains_yieldpoint(children):
"""Returns True if ``children`` contains any YieldPoints.
``children`` may be a dict or a list, as used by `MultiYieldPoint`
and `multi_future`.
"""
if isinstance(children, dict):
return any(isinstance(i, YieldPoint) for i in children.values())
if isinstance(children, list):
return any(isinstance(i, YieldPoint) for i in children)
return False
def multi(children, quiet_exceptions=()):
"""Runs multiple asynchronous operations in parallel.
Takes a list of ``YieldPoints`` or ``Futures`` and returns a list of
their responses. It is not necessary to call `Multi` explicitly,
since the engine will do so automatically when the generator yields
a list of ``YieldPoints`` or a mixture of ``YieldPoints`` and ``Futures``.
``children`` may either be a list or a dict whose values are
yieldable objects. ``multi()`` returns a new yieldable
object that resolves to a parallel structure containing their
results. If ``children`` is a list, the result is a list of
results in the same order; if it is a dict, the result is a dict
with the same keys.
Instead of a list, the argument may also be a dictionary whose values are
Futures, in which case a parallel dictionary is returned mapping the same
keys to their results.
That is, ``results = yield multi(list_of_futures)`` is equivalent
to::
It is not normally necessary to call this class directly, as it
will be created automatically as needed. However, calling it directly
allows you to use the ``quiet_exceptions`` argument to control
the logging of multiple exceptions.
results = []
for future in list_of_futures:
results.append(yield future)
If any children raise exceptions, ``multi()`` will raise the first
one. All others will be logged, unless they are of types
contained in the ``quiet_exceptions`` argument.
If any of the inputs are `YieldPoints <YieldPoint>`, the returned
yieldable object is a `YieldPoint`. Otherwise, returns a `.Future`.
This means that the result of `multi` can be used in a native
coroutine if and only if all of its children can be.
In a ``yield``-based coroutine, it is not normally necessary to
call this function directly, since the coroutine runner will
do it automatically when a list or dict is yielded. However,
it is necessary in ``await``-based coroutines, or to pass
the ``quiet_exceptions`` argument.
This function is available under the names ``multi()`` and ``Multi()``
for historical reasons.
.. versionchanged:: 4.2
If multiple yieldables fail, any exceptions after the first
(which is raised) will be logged. Added the ``quiet_exceptions``
argument to suppress this logging for selected exception types.
.. versionchanged:: 4.3
Replaced the class ``Multi`` and the function ``multi_future``
with a unified function ``multi``. Added support for yieldables
other than `YieldPoint` and `.Future`.
"""
if _contains_yieldpoint(children):
return MultiYieldPoint(children, quiet_exceptions=quiet_exceptions)
else:
return multi_future(children, quiet_exceptions=quiet_exceptions)
Multi = multi
class MultiYieldPoint(YieldPoint):
"""Runs multiple asynchronous operations in parallel.
This class is similar to `multi`, but it always creates a stack
context even when no children require it. It is not compatible with
native coroutines.
.. versionchanged:: 4.2
If multiple ``YieldPoints`` fail, any exceptions after the first
(which is raised) will be logged. Added the ``quiet_exceptions``
argument to suppress this logging for selected exception types.
.. versionchanged:: 4.3
Renamed from ``Multi`` to ``MultiYieldPoint``. The name ``Multi``
remains as an alias for the equivalent `multi` function.
.. deprecated:: 4.3
Use `multi` instead.
"""
def __init__(self, children, quiet_exceptions=()):
self.keys = None
@ -613,6 +711,8 @@ class Multi(YieldPoint):
children = children.values()
self.children = []
for i in children:
if not isinstance(i, YieldPoint):
i = convert_yielded(i)
if is_future(i):
i = YieldFuture(i)
self.children.append(i)
@ -654,25 +754,8 @@ class Multi(YieldPoint):
def multi_future(children, quiet_exceptions=()):
"""Wait for multiple asynchronous futures in parallel.
Takes a list of ``Futures`` or other yieldable objects (with the
exception of the legacy `.YieldPoint` interfaces) and returns a
new Future that resolves when all the other Futures are done. If
all the ``Futures`` succeeded, the returned Future's result is a
list of their results. If any failed, the returned Future raises
the exception of the first one to fail.
Instead of a list, the argument may also be a dictionary whose values are
Futures, in which case a parallel dictionary is returned mapping the same
keys to their results.
It is not normally necessary to call `multi_future` explcitly,
since the engine will do so automatically when the generator
yields a list of ``Futures``. However, calling it directly
allows you to use the ``quiet_exceptions`` argument to control
the logging of multiple exceptions.
This function is faster than the `Multi` `YieldPoint` because it
does not require the creation of a stack context.
This function is similar to `multi`, but does not support
`YieldPoints <YieldPoint>`.
.. versionadded:: 4.0
@ -681,8 +764,8 @@ def multi_future(children, quiet_exceptions=()):
raised) will be logged. Added the ``quiet_exceptions``
argument to suppress this logging for selected exception types.
.. versionchanged:: 4.3
Added support for other yieldable objects.
.. deprecated:: 4.3
Use `multi` instead.
"""
if isinstance(children, dict):
keys = list(children.keys())
@ -732,6 +815,11 @@ def maybe_future(x):
it is wrapped in a new `.Future`. This is suitable for use as
``result = yield gen.maybe_future(f())`` when you don't know whether
``f()`` returns a `.Future` or not.
.. deprecated:: 4.3
This function only handles ``Futures``, not other yieldable objects.
Instead of `maybe_future`, check for the non-future result types
you expect (often just ``None``), and ``yield`` anything unknown.
"""
if is_future(x):
return x
@ -944,7 +1032,7 @@ class Runner(object):
raise LeakedCallbackError(
"finished without waiting for callbacks %r" %
self.pending_callbacks)
self.result_future.set_result(getattr(e, 'value', None))
self.result_future.set_result(_value_from_stopiteration(e))
self.result_future = None
self._deactivate_stack_context()
return
@ -962,13 +1050,9 @@ class Runner(object):
def handle_yield(self, yielded):
# Lists containing YieldPoints require stack contexts;
# other lists are handled via multi_future in convert_yielded.
if (isinstance(yielded, list) and
any(isinstance(f, YieldPoint) for f in yielded)):
yielded = Multi(yielded)
elif (isinstance(yielded, dict) and
any(isinstance(f, YieldPoint) for f in yielded.values())):
yielded = Multi(yielded)
# other lists are handled in convert_yielded.
if _contains_yieldpoint(yielded):
yielded = multi(yielded)
if isinstance(yielded, YieldPoint):
# YieldPoints are too closely coupled to the Runner to go
@ -1051,15 +1135,66 @@ def _argument_adapter(callback):
callback(None)
return wrapper
# Convert Awaitables into Futures. It is unfortunately possible
# to have infinite recursion here if those Awaitables assume that
# we're using a different coroutine runner and yield objects
# we don't understand. If that happens, the solution is to
# register that runner's yieldable objects with convert_yielded.
if sys.version_info >= (3, 3):
exec(textwrap.dedent("""
@coroutine
def _wrap_awaitable(x):
if hasattr(x, '__await__'):
x = x.__await__()
return (yield from x)
"""))
else:
# Py2-compatible version for use with Cython.
# Copied from PEP 380.
@coroutine
def _wrap_awaitable(x):
raise NotImplementedError()
if hasattr(x, '__await__'):
_i = x.__await__()
else:
_i = iter(x)
try:
_y = next(_i)
except StopIteration as _e:
_r = _value_from_stopiteration(_e)
else:
while 1:
try:
_s = yield _y
except GeneratorExit as _e:
try:
_m = _i.close
except AttributeError:
pass
else:
_m()
raise _e
except BaseException as _e:
_x = sys.exc_info()
try:
_m = _i.throw
except AttributeError:
raise _e
else:
try:
_y = _m(*_x)
except StopIteration as _e:
_r = _value_from_stopiteration(_e)
break
else:
try:
if _s is None:
_y = next(_i)
else:
_y = _i.send(_s)
except StopIteration as _e:
_r = _value_from_stopiteration(_e)
break
raise Return(_r)
def convert_yielded(yielded):
@ -1076,10 +1211,9 @@ def convert_yielded(yielded):
.. versionadded:: 4.1
"""
# Lists and dicts containing YieldPoints were handled separately
# via Multi().
# Lists and dicts containing YieldPoints were handled earlier.
if isinstance(yielded, (list, dict)):
return multi_future(yielded)
return multi(yielded)
elif is_future(yielded):
return yielded
elif isawaitable(yielded):
@ -1089,3 +1223,19 @@ def convert_yielded(yielded):
if singledispatch is not None:
convert_yielded = singledispatch(convert_yielded)
try:
# If we can import t.p.asyncio, do it for its side effect
# (registering asyncio.Future with convert_yielded).
# It's ugly to do this here, but it prevents a cryptic
# infinite recursion in _wrap_awaitable.
# Note that even with this, asyncio integration is unlikely
# to work unless the application also configures AsyncIOLoop,
# but at least the error messages in that case are more
# comprehensible than a stack overflow.
import tornado.platform.asyncio
except ImportError:
pass
else:
# Reference the imported module to make pyflakes happy.
tornado

View file

@ -342,7 +342,7 @@ class HTTP1Connection(httputil.HTTPConnection):
'Transfer-Encoding' not in headers)
else:
self._response_start_line = start_line
lines.append(utf8('HTTP/1.1 %s %s' % (start_line[1], start_line[2])))
lines.append(utf8('HTTP/1.1 %d %s' % (start_line[1], start_line[2])))
self._chunking_output = (
# TODO: should this use
# self._request_start_line.version or
@ -515,6 +515,12 @@ class HTTP1Connection(httputil.HTTPConnection):
def _read_body(self, code, headers, delegate):
if "Content-Length" in headers:
if "Transfer-Encoding" in headers:
# Response cannot contain both Content-Length and
# Transfer-Encoding headers.
# http://tools.ietf.org/html/rfc7230#section-3.3.3
raise httputil.HTTPInputError(
"Response with both Transfer-Encoding and Content-Length")
if "," in headers["Content-Length"]:
# Proxies sometimes cause Content-Length headers to get
# duplicated. If all the values are identical then we can
@ -558,7 +564,9 @@ class HTTP1Connection(httputil.HTTPConnection):
content_length -= len(body)
if not self._write_finished or self.is_client:
with _ExceptionLoggingContext(app_log):
yield gen.maybe_future(delegate.data_received(body))
ret = delegate.data_received(body)
if ret is not None:
yield ret
@gen.coroutine
def _read_chunked_body(self, delegate):
@ -579,7 +587,9 @@ class HTTP1Connection(httputil.HTTPConnection):
bytes_to_read -= len(chunk)
if not self._write_finished or self.is_client:
with _ExceptionLoggingContext(app_log):
yield gen.maybe_future(delegate.data_received(chunk))
ret = delegate.data_received(chunk)
if ret is not None:
yield ret
# chunk ends with \r\n
crlf = yield self.stream.read_bytes(2)
assert crlf == b"\r\n"
@ -619,11 +629,14 @@ class _GzipMessageDelegate(httputil.HTTPMessageDelegate):
decompressed = self._decompressor.decompress(
compressed_data, self._chunk_size)
if decompressed:
yield gen.maybe_future(
self._delegate.data_received(decompressed))
ret = self._delegate.data_received(decompressed)
if ret is not None:
yield ret
compressed_data = self._decompressor.unconsumed_tail
else:
yield gen.maybe_future(self._delegate.data_received(chunk))
ret = self._delegate.data_received(chunk)
if ret is not None:
yield ret
def finish(self):
if self._decompressor is not None:
@ -698,8 +711,7 @@ class HTTP1ServerConnection(object):
# This exception was already logged.
conn.close()
return
except Exception as e:
if 1 != e.errno:
except Exception:
gen_log.error("Uncaught exception", exc_info=True)
conn.close()
return

View file

@ -211,10 +211,12 @@ class AsyncHTTPClient(Configurable):
kwargs: ``HTTPRequest(request, **kwargs)``
This method returns a `.Future` whose result is an
`HTTPResponse`. By default, the ``Future`` will raise an `HTTPError`
if the request returned a non-200 response code. Instead, if
``raise_error`` is set to False, the response will always be
returned regardless of the response code.
`HTTPResponse`. By default, the ``Future`` will raise an
`HTTPError` if the request returned a non-200 response code
(other errors may also be raised if the server could not be
contacted). Instead, if ``raise_error`` is set to False, the
response will always be returned regardless of the response
code.
If a ``callback`` is given, it will be invoked with the `HTTPResponse`.
In the callback interface, `HTTPError` is not automatically raised.
@ -603,9 +605,12 @@ class HTTPError(Exception):
"""
def __init__(self, code, message=None, response=None):
self.code = code
message = message or httputil.responses.get(code, "Unknown")
self.message = message or httputil.responses.get(code, "Unknown")
self.response = response
Exception.__init__(self, "HTTP %d: %s" % (self.code, message))
super(HTTPError, self).__init__(code, message, response)
def __str__(self):
return "HTTP %d: %s" % (self.code, self.message)
class _RequestProxy(object):

View file

@ -98,7 +98,7 @@ class _NormalizedHeaderCache(dict):
_normalized_headers = _NormalizedHeaderCache(1000)
class HTTPHeaders(dict):
class HTTPHeaders(collections.MutableMapping):
"""A dictionary that maintains ``Http-Header-Case`` for all keys.
Supports multiple values per key via a pair of new methods,
@ -127,9 +127,7 @@ class HTTPHeaders(dict):
Set-Cookie: C=D
"""
def __init__(self, *args, **kwargs):
# Don't pass args or kwargs to dict.__init__, as it will bypass
# our __setitem__
dict.__init__(self)
self._dict = {}
self._as_list = {}
self._last_key = None
if (len(args) == 1 and len(kwargs) == 0 and
@ -148,9 +146,7 @@ class HTTPHeaders(dict):
norm_name = _normalized_headers[name]
self._last_key = norm_name
if norm_name in self:
# bypass our override of __setitem__ since it modifies _as_list
dict.__setitem__(self, norm_name,
native_str(self[norm_name]) + ',' +
self._dict[norm_name] = (native_str(self[norm_name]) + ',' +
native_str(value))
self._as_list[norm_name].append(value)
else:
@ -183,8 +179,7 @@ class HTTPHeaders(dict):
# continuation of a multi-line header
new_part = ' ' + line.lstrip()
self._as_list[self._last_key][-1] += new_part
dict.__setitem__(self, self._last_key,
self[self._last_key] + new_part)
self._dict[self._last_key] += new_part
else:
name, value = line.split(":", 1)
self.add(name, value.strip())
@ -203,54 +198,36 @@ class HTTPHeaders(dict):
h.parse_line(line)
return h
# dict implementation overrides
# MutableMapping abstract method implementations.
def __setitem__(self, name, value):
norm_name = _normalized_headers[name]
dict.__setitem__(self, norm_name, value)
self._dict[norm_name] = value
self._as_list[norm_name] = [value]
def __getitem__(self, name):
return dict.__getitem__(self, _normalized_headers[name])
return self._dict[_normalized_headers[name]]
def __delitem__(self, name):
norm_name = _normalized_headers[name]
dict.__delitem__(self, norm_name)
del self._dict[norm_name]
del self._as_list[norm_name]
def __contains__(self, name):
norm_name = _normalized_headers[name]
return dict.__contains__(self, norm_name)
def __len__(self):
return len(self._dict)
def get(self, name, default=None):
return dict.get(self, _normalized_headers[name], default)
def update(self, *args, **kwargs):
# dict.update bypasses our __setitem__
for k, v in dict(*args, **kwargs).items():
self[k] = v
def __iter__(self):
return iter(self._dict)
def copy(self):
# default implementation returns dict(self), not the subclass
# defined in dict but not in MutableMapping.
return HTTPHeaders(self)
# Use our overridden copy method for the copy.copy module.
# This makes shallow copies one level deeper, but preserves
# the appearance that HTTPHeaders is a single container.
__copy__ = copy
def __deepcopy__(self, memo_dict):
# Our values are immutable strings, so our standard copy is
# effectively a deep copy.
return self.copy()
def __reduce_ex__(self, v):
# We must override dict.__reduce_ex__ to pickle ourselves
# correctly.
return HTTPHeaders, (), list(self.get_all())
def __setstate__(self, state):
for k, v in state:
self.add(k, v)
class HTTPServerRequest(object):
"""A single HTTP request.

View file

@ -400,10 +400,12 @@ class IOLoop(Configurable):
def run_sync(self, func, timeout=None):
"""Starts the `IOLoop`, runs the given function, and stops the loop.
If the function returns a `.Future`, the `IOLoop` will run
until the future is resolved. If it raises an exception, the
`IOLoop` will stop and the exception will be re-raised to the
caller.
The function must return either a yieldable object or
``None``. If the function returns a yieldable object, the
`IOLoop` will run until the yieldable is resolved (and
`run_sync()` will return the yieldable's result). If it raises
an exception, the `IOLoop` will stop and the exception will be
re-raised to the caller.
The keyword-only argument ``timeout`` may be used to set
a maximum duration for the function. If the timeout expires,
@ -418,12 +420,18 @@ class IOLoop(Configurable):
if __name__ == '__main__':
IOLoop.current().run_sync(main)
.. versionchanged:: 4.3
Returning a non-``None``, non-yieldable value is now an error.
"""
future_cell = [None]
def run():
try:
result = func()
if result is not None:
from tornado.gen import convert_yielded
result = convert_yielded(result)
except Exception:
future_cell[0] = TracebackFuture()
future_cell[0].set_exc_info(sys.exc_info())
@ -590,11 +598,20 @@ class IOLoop(Configurable):
"""
try:
ret = callback()
if ret is not None and is_future(ret):
if ret is not None:
from tornado import gen
# Functions that return Futures typically swallow all
# exceptions and store them in the Future. If a Future
# makes it out to the IOLoop, ensure its exception (if any)
# gets logged too.
try:
ret = gen.convert_yielded(ret)
except gen.BadYieldError:
# It's not unusual for add_callback to be used with
# methods returning a non-None and non-yieldable
# result, which should just be ignored.
pass
else:
self.add_future(ret, lambda f: f.result())
except Exception:
self.handle_callback_exception(callback)
@ -909,38 +926,40 @@ class PollIOLoop(IOLoop):
self._cancellations += 1
def add_callback(self, callback, *args, **kwargs):
if thread.get_ident() != self._thread_ident:
# If we're not on the IOLoop's thread, we need to synchronize
# with other threads, or waking logic will induce a race.
with self._callback_lock:
if self._closing:
raise RuntimeError("IOLoop is closing")
return
list_empty = not self._callbacks
self._callbacks.append(functools.partial(
stack_context.wrap(callback), *args, **kwargs))
if list_empty and thread.get_ident() != self._thread_ident:
# If we're in the IOLoop's thread, we know it's not currently
# polling. If we're not, and we added the first callback to an
# empty list, we may need to wake it up (it may wake up on its
# own, but an occasional extra wake is harmless). Waking
# up a polling IOLoop is relatively expensive, so we try to
# avoid it when we can.
if list_empty:
# If we're not in the IOLoop's thread, and we added the
# first callback to an empty list, we may need to wake it
# up (it may wake up on its own, but an occasional extra
# wake is harmless). Waking up a polling IOLoop is
# relatively expensive, so we try to avoid it when we can.
self._waker.wake()
else:
if self._closing:
return
# If we're on the IOLoop's thread, we don't need the lock,
# since we don't need to wake anyone, just add the
# callback. Blindly insert into self._callbacks. This is
# safe even from signal handlers because the GIL makes
# list.append atomic. One subtlety is that if the signal
# is interrupting another thread holding the
# _callback_lock block in IOLoop.start, we may modify
# either the old or new version of self._callbacks, but
# either way will work.
self._callbacks.append(functools.partial(
stack_context.wrap(callback), *args, **kwargs))
def add_callback_from_signal(self, callback, *args, **kwargs):
with stack_context.NullContext():
if thread.get_ident() != self._thread_ident:
# if the signal is handled on another thread, we can add
# it normally (modulo the NullContext)
self.add_callback(callback, *args, **kwargs)
else:
# If we're on the IOLoop's thread, we cannot use
# the regular add_callback because it may deadlock on
# _callback_lock. Blindly insert into self._callbacks.
# This is safe because the GIL makes list.append atomic.
# One subtlety is that if the signal interrupted the
# _callback_lock block in IOLoop.start, we may modify
# either the old or new version of self._callbacks,
# but either way will work.
self._callbacks.append(functools.partial(
stack_context.wrap(callback), *args, **kwargs))
class _Timeout(object):

View file

@ -648,7 +648,6 @@ class BaseIOStream(object):
except UnsatisfiableReadError:
raise
except Exception as e:
if 1 != e.errno:
gen_log.warning("error on read: %s" % e)
self.close(exc_info=True)
return
@ -726,9 +725,12 @@ class BaseIOStream(object):
to read (i.e. the read returns EWOULDBLOCK or equivalent). On
error closes the socket and raises an exception.
"""
while True:
try:
chunk = self.read_from_fd()
except (socket.error, IOError, OSError) as e:
if errno_from_exception(e) == errno.EINTR:
continue
# ssl.SSLError is a subclass of socket.error
if self._is_connreset(e):
# Treat ECONNRESET as a connection close rather than
@ -738,6 +740,7 @@ class BaseIOStream(object):
return
self.close(exc_info=True)
raise
break
if chunk is None:
return 0
self._read_buffer.append(chunk)
@ -1275,10 +1278,11 @@ class SSLIOStream(IOStream):
raise
except socket.error as err:
# Some port scans (e.g. nmap in -sT mode) have been known
# to cause do_handshake to raise EBADF, so make that error
# quiet as well.
# to cause do_handshake to raise EBADF and ENOTCONN, so make
# those errors quiet as well.
# https://groups.google.com/forum/?fromgroups#!topic/python-tornado/ApucKJat1_0
if self._is_connreset(err) or err.args[0] == errno.EBADF:
if (self._is_connreset(err) or
err.args[0] in (errno.EBADF, errno.ENOTCONN)):
return self.close(exc_info=True)
raise
except AttributeError:

View file

@ -51,7 +51,6 @@ import re
from tornado import escape
from tornado.log import gen_log
from tornado.util import u
from tornado._locale_data import LOCALE_NAMES
@ -61,6 +60,7 @@ _supported_locales = frozenset([_default_locale])
_use_gettext = False
CONTEXT_SEPARATOR = "\x04"
def get(*locale_codes):
"""Returns the closest match for the given locale codes.
@ -273,7 +273,7 @@ class Locale(object):
def __init__(self, code, translations):
self.code = code
self.name = LOCALE_NAMES.get(code, {}).get("name", u("Unknown"))
self.name = LOCALE_NAMES.get(code, {}).get("name", u"Unknown")
self.rtl = False
for prefix in ["fa", "ar", "he"]:
if self.code.startswith(prefix):
@ -375,7 +375,7 @@ class Locale(object):
str_time = "%d:%02d" % (local_date.hour, local_date.minute)
elif self.code == "zh_CN":
str_time = "%s%d:%02d" % (
(u('\u4e0a\u5348'), u('\u4e0b\u5348'))[local_date.hour >= 12],
(u'\u4e0a\u5348', u'\u4e0b\u5348')[local_date.hour >= 12],
local_date.hour % 12 or 12, local_date.minute)
else:
str_time = "%d:%02d %s" % (
@ -421,7 +421,7 @@ class Locale(object):
return ""
if len(parts) == 1:
return parts[0]
comma = u(' \u0648 ') if self.code.startswith("fa") else u(", ")
comma = u' \u0648 ' if self.code.startswith("fa") else u", "
return _("%(commas)s and %(last)s") % {
"commas": comma.join(parts[:-1]),
"last": parts[len(parts) - 1],

View file

@ -465,7 +465,7 @@ class Lock(object):
...
... # Now the lock is released.
.. versionchanged:: 3.5
.. versionchanged:: 4.3
Added ``async with`` support in Python 3.5.
"""

View file

@ -190,10 +190,22 @@ def enable_pretty_logging(options=None, logger=None):
logger = logging.getLogger()
logger.setLevel(getattr(logging, options.logging.upper()))
if options.log_file_prefix:
rotate_mode = options.log_rotate_mode
if rotate_mode == 'size':
channel = logging.handlers.RotatingFileHandler(
filename=options.log_file_prefix,
maxBytes=options.log_file_max_size,
backupCount=options.log_file_num_backups)
elif rotate_mode == 'time':
channel = logging.handlers.TimedRotatingFileHandler(
filename=options.log_file_prefix,
when=options.log_rotate_when,
interval=options.log_rotate_interval,
backupCount=options.log_file_num_backups)
else:
error_message = 'The value of log_rotate_mode option should be ' +\
'"size" or "time", not "%s".' % rotate_mode
raise ValueError(error_message)
channel.setFormatter(LogFormatter(color=False))
logger.addHandler(channel)
@ -235,4 +247,13 @@ def define_logging_options(options=None):
options.define("log_file_num_backups", type=int, default=10,
help="number of log files to keep")
options.define("log_rotate_when", type=str, default='midnight',
help=("specify the type of TimedRotatingFileHandler interval "
"other options:('S', 'M', 'H', 'D', 'W0'-'W6')"))
options.define("log_rotate_interval", type=int, default=1,
help="The interval value of timed rotating")
options.define("log_rotate_mode", type=str, default='size',
help="The mode of rotating files(time or size)")
options.add_parse_callback(lambda: enable_pretty_logging(options))

View file

@ -27,7 +27,7 @@ import stat
from tornado.concurrent import dummy_executor, run_on_executor
from tornado.ioloop import IOLoop
from tornado.platform.auto import set_close_exec
from tornado.util import u, Configurable, errno_from_exception
from tornado.util import Configurable, errno_from_exception
try:
import ssl
@ -96,7 +96,7 @@ else:
# module-import time, the import lock is already held by the main thread,
# leading to deadlock. Avoid it by caching the idna encoder on the main
# thread now.
u('foo').encode('idna')
u'foo'.encode('idna')
# These errnos indicate that a non-blocking operation must be retried
# at a later time. On most platforms they're the same value, but on
@ -111,7 +111,7 @@ _DEFAULT_BACKLOG = 128
def bind_sockets(port, address=None, family=socket.AF_UNSPEC,
backlog=_DEFAULT_BACKLOG, flags=None):
backlog=_DEFAULT_BACKLOG, flags=None, reuse_port=False):
"""Creates listening sockets bound to the given port and address.
Returns a list of socket objects (multiple sockets are returned if
@ -130,7 +130,14 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC,
``flags`` is a bitmask of AI_* flags to `~socket.getaddrinfo`, like
``socket.AI_PASSIVE | socket.AI_NUMERICHOST``.
``resuse_port`` option sets ``SO_REUSEPORT`` option for every socket
in the list. If your platform doesn't support this option ValueError will
be raised.
"""
if reuse_port and not hasattr(socket, "SO_REUSEPORT"):
raise ValueError("the platform doesn't support SO_REUSEPORT")
sockets = []
if address == "":
address = None
@ -165,6 +172,8 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC,
set_close_exec(sock.fileno())
if os.name != 'nt':
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if reuse_port:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
if af == socket.AF_INET6:
# On linux, ipv6 sockets accept ipv4 too by default,
# but this makes it impossible to bind to both

View file

@ -41,6 +41,12 @@ either::
# or
tornado.options.parse_config_file("/etc/server.conf")
.. note:
When using tornado.options.parse_command_line or
tornado.options.parse_config_file, the only options that are set are
ones that were previously defined with tornado.options.define.
Command line formats are what you would expect (``--myoption=myvalue``).
Config files are just Python files. Global names become options, e.g.::
@ -132,8 +138,10 @@ class OptionParser(object):
return name in self._options
def __getitem__(self, name):
name = self._normalize_name(name)
return self._options[name].value()
return self.__getattr__(name)
def __setitem__(self, name, value):
return self.__setattr__(name, value)
def items(self):
"""A sequence of (name, value) pairs.
@ -487,19 +495,17 @@ class _Option(object):
pass
raise Error('Unrecognized date/time format: %r' % value)
_TIMEDELTA_ABBREVS = [
('hours', ['h']),
('minutes', ['m', 'min']),
('seconds', ['s', 'sec']),
('milliseconds', ['ms']),
('microseconds', ['us']),
('days', ['d']),
('weeks', ['w']),
]
_TIMEDELTA_ABBREV_DICT = dict(
(abbrev, full) for full, abbrevs in _TIMEDELTA_ABBREVS
for abbrev in abbrevs)
_TIMEDELTA_ABBREV_DICT = {
'h': 'hours',
'm': 'minutes',
'min': 'minutes',
's': 'seconds',
'sec': 'seconds',
'ms': 'milliseconds',
'us': 'microseconds',
'd': 'days',
'w': 'weeks',
}
_FLOAT_PATTERN = r'[-+]?(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][-+]?\d+)?'

View file

@ -1,12 +1,22 @@
"""Bridges between the `asyncio` module and Tornado IOLoop.
This is a work in progress and interfaces are subject to change.
.. versionadded:: 3.2
To test:
python3.4 -m tornado.test.runtests --ioloop=tornado.platform.asyncio.AsyncIOLoop
python3.4 -m tornado.test.runtests --ioloop=tornado.platform.asyncio.AsyncIOMainLoop
(the tests log a few warnings with AsyncIOMainLoop because they leave some
unfinished callbacks on the event loop that fail when it resumes)
This module integrates Tornado with the ``asyncio`` module introduced
in Python 3.4 (and available `as a separate download
<https://pypi.python.org/pypi/asyncio>`_ for Python 3.3). This makes
it possible to combine the two libraries on the same event loop.
Most applications should use `AsyncIOMainLoop` to run Tornado on the
default ``asyncio`` event loop. Applications that need to run event
loops on multiple threads may use `AsyncIOLoop` to create multiple
loops.
.. note::
Tornado requires the `~asyncio.BaseEventLoop.add_reader` family of methods,
so it is not compatible with the `~asyncio.ProactorEventLoop` on Windows.
Use the `~asyncio.SelectorEventLoop` instead.
"""
from __future__ import absolute_import, division, print_function, with_statement
@ -140,12 +150,33 @@ class BaseAsyncIOLoop(IOLoop):
class AsyncIOMainLoop(BaseAsyncIOLoop):
"""``AsyncIOMainLoop`` creates an `.IOLoop` that corresponds to the
current ``asyncio`` event loop (i.e. the one returned by
``asyncio.get_event_loop()``). Recommended usage::
from tornado.platform.asyncio import AsyncIOMainLoop
import asyncio
AsyncIOMainLoop().install()
asyncio.get_event_loop().run_forever()
"""
def initialize(self, **kwargs):
super(AsyncIOMainLoop, self).initialize(asyncio.get_event_loop(),
close_loop=False, **kwargs)
class AsyncIOLoop(BaseAsyncIOLoop):
"""``AsyncIOLoop`` is an `.IOLoop` that runs on an ``asyncio`` event loop.
This class follows the usual Tornado semantics for creating new
``IOLoops``; these loops are not necessarily related to the
``asyncio`` default event loop. Recommended usage::
from tornado.ioloop import IOLoop
IOLoop.configure('tornado.platform.asyncio.AsyncIOLoop')
IOLoop.current().start()
Each ``AsyncIOLoop`` creates a new ``asyncio.EventLoop``; this object
can be accessed with the ``asyncio_loop`` attribute.
"""
def initialize(self, **kwargs):
loop = asyncio.new_event_loop()
try:
@ -158,14 +189,25 @@ class AsyncIOLoop(BaseAsyncIOLoop):
def to_tornado_future(asyncio_future):
"""Convert an ``asyncio.Future`` to a `tornado.concurrent.Future`."""
"""Convert an `asyncio.Future` to a `tornado.concurrent.Future`.
.. versionadded:: 4.1
"""
tf = tornado.concurrent.Future()
tornado.concurrent.chain_future(asyncio_future, tf)
return tf
def to_asyncio_future(tornado_future):
"""Convert a `tornado.concurrent.Future` to an ``asyncio.Future``."""
"""Convert a Tornado yieldable object to an `asyncio.Future`.
.. versionadded:: 4.1
.. versionchanged:: 4.3
Now accepts any yieldable object, not just
`tornado.concurrent.Future`.
"""
tornado_future = convert_yielded(tornado_future)
af = asyncio.Future()
tornado.concurrent.chain_future(tornado_future, af)
return af

View file

@ -47,8 +47,13 @@ try:
except ImportError:
pass
try:
from time import monotonic as monotonic_time
# monotonic can provide a monotonic function in versions of python before
# 3.3, too.
from monotonic import monotonic as monotonic_time
except ImportError:
try:
from time import monotonic as monotonic_time
except ImportError:
monotonic_time = None
__all__ = ['Waker', 'set_close_exec', 'monotonic_time']

View file

@ -12,10 +12,6 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Note: This module's docs are not currently extracted automatically,
# so changes must be made manually to twisted.rst
# TODO: refactor doc build process to use an appropriate virtualenv
"""Bridges between the Twisted reactor and Tornado IOLoop.
This module lets you run applications and libraries written for
@ -23,45 +19,6 @@ Twisted in a Tornado application. It can be used in two modes,
depending on which library's underlying event loop you want to use.
This module has been tested with Twisted versions 11.0.0 and newer.
Twisted on Tornado
------------------
`TornadoReactor` implements the Twisted reactor interface on top of
the Tornado IOLoop. To use it, simply call `install` at the beginning
of the application::
import tornado.platform.twisted
tornado.platform.twisted.install()
from twisted.internet import reactor
When the app is ready to start, call `IOLoop.current().start()`
instead of `reactor.run()`.
It is also possible to create a non-global reactor by calling
`tornado.platform.twisted.TornadoReactor(io_loop)`. However, if
the `IOLoop` and reactor are to be short-lived (such as those used in
unit tests), additional cleanup may be required. Specifically, it is
recommended to call::
reactor.fireSystemEvent('shutdown')
reactor.disconnectAll()
before closing the `IOLoop`.
Tornado on Twisted
------------------
`TwistedIOLoop` implements the Tornado IOLoop interface on top of the Twisted
reactor. Recommended usage::
from tornado.platform.twisted import TwistedIOLoop
from twisted.internet import reactor
TwistedIOLoop().install()
# Set up your tornado application as usual using `IOLoop.instance`
reactor.run()
`TwistedIOLoop` always uses the global Twisted reactor.
"""
from __future__ import absolute_import, division, print_function, with_statement
@ -144,12 +101,27 @@ class TornadoDelayedCall(object):
class TornadoReactor(PosixReactorBase):
"""Twisted reactor built on the Tornado IOLoop.
Since it is intended to be used in applications where the top-level
event loop is ``io_loop.start()`` rather than ``reactor.run()``,
it is implemented a little differently than other Twisted reactors.
We override `mainLoop` instead of `doIteration` and must implement
timed call functionality on top of `IOLoop.add_timeout` rather than
using the implementation in `PosixReactorBase`.
`TornadoReactor` implements the Twisted reactor interface on top of
the Tornado IOLoop. To use it, simply call `install` at the beginning
of the application::
import tornado.platform.twisted
tornado.platform.twisted.install()
from twisted.internet import reactor
When the app is ready to start, call ``IOLoop.current().start()``
instead of ``reactor.run()``.
It is also possible to create a non-global reactor by calling
``tornado.platform.twisted.TornadoReactor(io_loop)``. However, if
the `.IOLoop` and reactor are to be short-lived (such as those used in
unit tests), additional cleanup may be required. Specifically, it is
recommended to call::
reactor.fireSystemEvent('shutdown')
reactor.disconnectAll()
before closing the `.IOLoop`.
.. versionchanged:: 4.1
The ``io_loop`` argument is deprecated.
@ -191,7 +163,6 @@ class TornadoReactor(PosixReactorBase):
# IReactorThreads
def callFromThread(self, f, *args, **kw):
"""See `twisted.internet.interfaces.IReactorThreads.callFromThread`"""
assert callable(f), "%s is not callable" % f
with NullContext():
# This NullContext is mainly for an edge case when running
@ -237,7 +208,6 @@ class TornadoReactor(PosixReactorBase):
writer.writeConnectionLost(failure.Failure(err))
def addReader(self, reader):
"""Add a FileDescriptor for notification of data available to read."""
if reader in self._readers:
# Don't add the reader if it's already there
return
@ -257,7 +227,6 @@ class TornadoReactor(PosixReactorBase):
IOLoop.READ)
def addWriter(self, writer):
"""Add a FileDescriptor for notification of data available to write."""
if writer in self._writers:
return
fd = writer.fileno()
@ -276,7 +245,6 @@ class TornadoReactor(PosixReactorBase):
IOLoop.WRITE)
def removeReader(self, reader):
"""Remove a Selectable for notification of data available to read."""
if reader in self._readers:
fd = self._readers.pop(reader)
(_, writer) = self._fds[fd]
@ -293,7 +261,6 @@ class TornadoReactor(PosixReactorBase):
self._io_loop.remove_handler(fd)
def removeWriter(self, writer):
"""Remove a Selectable for notification of data available to write."""
if writer in self._writers:
fd = self._writers.pop(writer)
(reader, _) = self._fds[fd]
@ -334,6 +301,14 @@ class TornadoReactor(PosixReactorBase):
raise NotImplementedError("doIteration")
def mainLoop(self):
# Since this class is intended to be used in applications
# where the top-level event loop is ``io_loop.start()`` rather
# than ``reactor.run()``, it is implemented a little
# differently than other Twisted reactors. We override
# ``mainLoop`` instead of ``doIteration`` and must implement
# timed call functionality on top of `.IOLoop.add_timeout`
# rather than using the implementation in
# ``PosixReactorBase``.
self._io_loop.start()
@ -364,8 +339,17 @@ class _TestReactor(TornadoReactor):
def install(io_loop=None):
"""Install this package as the default Twisted reactor.
``install()`` must be called very early in the startup process,
before most other twisted-related imports. Conversely, because it
initializes the `.IOLoop`, it cannot be called before
`.fork_processes` or multi-process `~.TCPServer.start`. These
conflicting requirements make it difficult to use `.TornadoReactor`
in multi-process mode, and an external process manager such as
``supervisord`` is recommended instead.
.. versionchanged:: 4.1
The ``io_loop`` argument is deprecated.
"""
if not io_loop:
io_loop = tornado.ioloop.IOLoop.current()
@ -408,8 +392,17 @@ class _FD(object):
class TwistedIOLoop(tornado.ioloop.IOLoop):
"""IOLoop implementation that runs on Twisted.
`TwistedIOLoop` implements the Tornado IOLoop interface on top of
the Twisted reactor. Recommended usage::
from tornado.platform.twisted import TwistedIOLoop
from twisted.internet import reactor
TwistedIOLoop().install()
# Set up your tornado application as usual using `IOLoop.instance`
reactor.run()
Uses the global Twisted reactor by default. To create multiple
`TwistedIOLoops` in the same process, you must pass a unique reactor
``TwistedIOLoops`` in the same process, you must pass a unique reactor
when constructing each one.
Not compatible with `tornado.process.Subprocess.set_exit_callback`

View file

@ -50,7 +50,14 @@ except NameError:
# Re-export this exception for convenience.
CalledProcessError = subprocess.CalledProcessError
try:
CalledProcessError = subprocess.CalledProcessError
except AttributeError:
# The subprocess module exists in Google App Engine, but is empty.
# This module isn't very useful in that case, but it should
# at least be importable.
if 'APPENGINE_RUNTIME' not in os.environ:
raise
def cpu_count():

View file

@ -1,8 +1,8 @@
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, with_statement
from tornado.concurrent import is_future
from tornado.escape import utf8, _unicode
from tornado import gen
from tornado.httpclient import HTTPResponse, HTTPError, AsyncHTTPClient, main, _RequestProxy
from tornado import httputil
from tornado.http1connection import HTTP1Connection, HTTP1ConnectionParameters
@ -391,7 +391,9 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
self.connection.write(self.request.body)
elif self.request.body_producer is not None:
fut = self.request.body_producer(self.connection.write)
if is_future(fut):
if fut is not None:
fut = gen.convert_yielded(fut)
def on_body_written(fut):
fut.result()
self.connection.finish()
@ -462,9 +464,12 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
if self.request.expect_100_continue and first_line.code == 100:
self._write_body(False)
return
self.headers = headers
self.code = first_line.code
self.reason = first_line.reason
self.headers = headers
if self._should_follow_redirect():
return
if self.request.header_callback is not None:
# Reassemble the start line.
@ -473,14 +478,17 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
self.request.header_callback("%s: %s\r\n" % (k, v))
self.request.header_callback('\r\n')
def _should_follow_redirect(self):
return (self.request.follow_redirects and
self.request.max_redirects > 0 and
self.code in (301, 302, 303, 307))
def finish(self):
data = b''.join(self.chunks)
self._remove_timeout()
original_request = getattr(self.request, "original_request",
self.request)
if (self.request.follow_redirects and
self.request.max_redirects > 0 and
self.code in (301, 302, 303, 307)):
if self._should_follow_redirect():
assert isinstance(self.request, _RequestProxy)
new_request = copy.copy(self.request.request)
new_request.url = urlparse.urljoin(self.request.url,
@ -527,6 +535,9 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
self.stream.close()
def data_received(self, chunk):
if self._should_follow_redirect():
# We're going to follow a redirect so just discard the body.
return
if self.request.streaming_callback is not None:
self.request.streaming_callback(chunk)
else:

View file

@ -41,12 +41,12 @@ static struct PyModuleDef speedupsmodule = {
};
PyMODINIT_FUNC
PyInit_speedups() {
PyInit_speedups(void) {
return PyModule_Create(&speedupsmodule);
}
#else // Python 2.x
PyMODINIT_FUNC
initspeedups() {
initspeedups(void) {
Py_InitModule("tornado.speedups", methods);
}
#endif

View file

@ -147,7 +147,7 @@ class TCPServer(object):
"""Singular version of `add_sockets`. Takes a single socket object."""
self.add_sockets([socket])
def bind(self, port, address=None, family=socket.AF_UNSPEC, backlog=128):
def bind(self, port, address=None, family=socket.AF_UNSPEC, backlog=128, reuse_port=False):
"""Binds this server to the given port on the given address.
To start the server, call `start`. If you want to run this server
@ -168,7 +168,7 @@ class TCPServer(object):
on multiple ports or interfaces.
"""
sockets = bind_sockets(port, address=address, family=family,
backlog=backlog)
backlog=backlog, reuse_port=reuse_port)
if self._started:
self.add_sockets(sockets)
else:

View file

@ -271,7 +271,7 @@ class Template(object):
.. versionchanged:: 4.3
Added ``whitespace`` parameter; deprecated ``compress_whitespace``.
"""
self.name = name
self.name = escape.native_str(name)
if compress_whitespace is not _UNSET:
# Convert deprecated compress_whitespace (bool) to whitespace (str).

View file

@ -34,13 +34,13 @@ from tornado.log import gen_log, app_log
from tornado.stack_context import ExceptionStackContext
from tornado.util import raise_exc_info, basestring_type
import functools
import inspect
import logging
import os
import re
import signal
import socket
import sys
import types
try:
from cStringIO import StringIO # py2
@ -52,6 +52,12 @@ try:
except ImportError:
from types import GeneratorType
if sys.version_info >= (3, 5):
iscoroutine = inspect.iscoroutine
iscoroutinefunction = inspect.iscoroutinefunction
else:
iscoroutine = iscoroutinefunction = lambda f: False
# Tornado's own test suite requires the updated unittest module
# (either py27+ or unittest2) so tornado.test.util enforces
# this requirement, but for other users of tornado.testing we want
@ -86,12 +92,13 @@ def get_unused_port():
return port
def bind_unused_port():
def bind_unused_port(reuse_port=False):
"""Binds a server socket to an available port on localhost.
Returns a tuple (socket, port).
"""
[sock] = netutil.bind_sockets(None, 'localhost', family=socket.AF_INET)
sock = netutil.bind_sockets(None, '127.0.0.1', family=socket.AF_INET,
reuse_port=reuse_port)[0]
port = sock.getsockname()[1]
return sock, port
@ -123,9 +130,9 @@ class _TestMethodWrapper(object):
def __call__(self, *args, **kwargs):
result = self.orig_method(*args, **kwargs)
if isinstance(result, GeneratorType):
raise TypeError("Generator test methods should be decorated with "
"tornado.testing.gen_test")
if isinstance(result, GeneratorType) or iscoroutine(result):
raise TypeError("Generator and coroutine test methods should be"
" decorated with tornado.testing.gen_test")
elif result is not None:
raise ValueError("Return value from test method ignored: %r" %
result)
@ -499,12 +506,15 @@ def gen_test(func=None, timeout=None):
@functools.wraps(f)
def pre_coroutine(self, *args, **kwargs):
result = f(self, *args, **kwargs)
if isinstance(result, GeneratorType):
if isinstance(result, GeneratorType) or iscoroutine(result):
self._test_generator = result
else:
self._test_generator = None
return result
if iscoroutinefunction(f):
coro = pre_coroutine
else:
coro = gen.coroutine(pre_coroutine)
@functools.wraps(coro)
@ -515,8 +525,8 @@ def gen_test(func=None, timeout=None):
timeout=timeout)
except TimeoutError as e:
# run_sync raises an error with an unhelpful traceback.
# If we throw it back into the generator the stack trace
# will be replaced by the point where the test is stopped.
# Throw it back into the generator or coroutine so the stack
# trace is replaced by the point where the test is stopped.
self._test_generator.throw(e)
# In case the test contains an overly broad except clause,
# we may get back here. In this case re-raise the original

View file

@ -84,19 +84,10 @@ class GzipDecompressor(object):
return self.decompressobj.flush()
# Fake unicode literal support: Python 3.2 doesn't have the u'' marker for
# literal strings, and alternative solutions like "from __future__ import
# unicode_literals" have other problems (see PEP 414). u() can be applied
# to ascii strings that include \u escapes (but they must not contain
# literal non-ascii characters).
if not isinstance(b'', type('')):
def u(s):
return s
unicode_type = str
basestring_type = str
else:
def u(s):
return s.decode('unicode_escape')
# These names don't exist in py3, so use noqa comments to disable
# warnings in flake8.
unicode_type = unicode # noqa
@ -290,11 +281,26 @@ class ArgReplacer(object):
def __init__(self, func, name):
self.name = name
try:
self.arg_pos = getargspec(func).args.index(self.name)
self.arg_pos = self._getargnames(func).index(name)
except ValueError:
# Not a positional parameter
self.arg_pos = None
def _getargnames(self, func):
try:
return getargspec(func).args
except TypeError:
if hasattr(func, 'func_code'):
# Cython-generated code has all the attributes needed
# by inspect.getargspec, but the inspect module only
# works with ordinary functions. Inline the portion of
# getargspec that we need here. Note that for static
# functions the @cython.binding(True) decorator must
# be used (for methods it works out of the box).
code = func.func_code
return code.co_varnames[:code.co_argcount]
raise
def get_old_value(self, args, kwargs, default=None):
"""Returns the old value of the named argument without replacing it.

View file

@ -56,9 +56,7 @@ request.
"""
from __future__ import (absolute_import, division,
print_function, with_statement)
from __future__ import absolute_import, division, print_function, with_statement
import base64
import binascii
@ -81,7 +79,7 @@ import traceback
import types
from io import BytesIO
from tornado.concurrent import Future, is_future
from tornado.concurrent import Future
from tornado import escape
from tornado import gen
from tornado import httputil
@ -185,7 +183,7 @@ class RequestHandler(object):
self.initialize(**kwargs)
def initialize(self):
"""Hook for subclass initialization.
"""Hook for subclass initialization. Called for each request.
A dictionary passed as the third argument of a url spec will be
supplied as keyword arguments to initialize().
@ -649,7 +647,6 @@ class RequestHandler(object):
value = self.get_cookie(name)
return get_signature_key_version(value)
def redirect(self, url, permanent=False, status=None):
"""Sends a redirect to the given (optionally relative) URL.
@ -692,9 +689,6 @@ class RequestHandler(object):
message += ". Lists not accepted for security reasons; see http://www.tornadoweb.org/en/stable/web.html#tornado.web.RequestHandler.write"
raise TypeError(message)
if isinstance(chunk, dict):
if 'unwrap_json' in chunk:
chunk = chunk['unwrap_json']
else:
chunk = escape.json_encode(chunk)
self.set_header("Content-Type", "application/json; charset=UTF-8")
chunk = utf8(chunk)
@ -1069,12 +1063,33 @@ class RequestHandler(object):
def current_user(self):
"""The authenticated user for this request.
This is a cached version of `get_current_user`, which you can
override to set the user based on, e.g., a cookie. If that
method is not overridden, this method always returns None.
This is set in one of two ways:
We lazy-load the current user the first time this method is called
and cache the result after that.
* A subclass may override `get_current_user()`, which will be called
automatically the first time ``self.current_user`` is accessed.
`get_current_user()` will only be called once per request,
and is cached for future access::
def get_current_user(self):
user_cookie = self.get_secure_cookie("user")
if user_cookie:
return json.loads(user_cookie)
return None
* It may be set as a normal variable, typically from an overridden
`prepare()`::
@gen.coroutine
def prepare(self):
user_id_cookie = self.get_secure_cookie("user_id")
if user_id_cookie:
self.current_user = yield load_user(user_id_cookie)
Note that `prepare()` may be a coroutine while `get_current_user()`
may not, so the latter form is necessary if loading the user requires
asynchronous operations.
The user object may any type of the application's choosing.
"""
if not hasattr(self, "_current_user"):
self._current_user = self.get_current_user()
@ -1085,7 +1100,10 @@ class RequestHandler(object):
self._current_user = value
def get_current_user(self):
"""Override to determine the current user from, e.g., a cookie."""
"""Override to determine the current user from, e.g., a cookie.
This method may not be a coroutine.
"""
return None
def get_login_url(self):
@ -1123,10 +1141,19 @@ class RequestHandler(object):
cookies will be converted to version 2 when this method is called
unless the ``xsrf_cookie_version`` `Application` setting is
set to 1.
.. versionchanged:: 4.3
The ``xsrf_cookie_kwargs`` `Application` setting may be
used to supply additional cookie options (which will be
passed directly to `set_cookie`). For example,
``xsrf_cookie_kwargs=dict(httponly=True, secure=True)``
will set the ``secure`` and ``httponly`` flags on the
``_xsrf`` cookie.
"""
if not hasattr(self, "_xsrf_token"):
version, token, timestamp = self._get_raw_xsrf_token()
output_version = self.settings.get("xsrf_cookie_version", 2)
cookie_kwargs = self.settings.get("xsrf_cookie_kwargs", {})
if output_version == 1:
self._xsrf_token = binascii.b2a_hex(token)
elif output_version == 2:
@ -1142,7 +1169,8 @@ class RequestHandler(object):
if version is None:
expires_days = 30 if self.current_user else None
self.set_cookie("_xsrf", self._xsrf_token,
expires_days=expires_days)
expires_days=expires_days,
**cookie_kwargs)
return self._xsrf_token
def _get_raw_xsrf_token(self):
@ -1453,7 +1481,7 @@ class RequestHandler(object):
if isinstance(e, Finish):
# Not an error; just finish the request without logging.
if not self._finished:
self.finish()
self.finish(*e.args)
return
try:
self.log_exception(*sys.exc_info())
@ -1557,9 +1585,12 @@ def asynchronous(method):
.. testoutput::
:hide:
.. versionadded:: 3.1
.. versionchanged:: 3.1
The ability to use ``@gen.coroutine`` without ``@asynchronous``.
.. versionchanged:: 4.3 Returning anything but ``None`` or a
yieldable object from a method decorated with ``@asynchronous``
is an error. Such return values were previously ignored silently.
"""
# Delay the IOLoop import because it's not available on app engine.
from tornado.ioloop import IOLoop
@ -1570,7 +1601,8 @@ def asynchronous(method):
with stack_context.ExceptionStackContext(
self._stack_context_handle_exception):
result = method(self, *args, **kwargs)
if is_future(result):
if result is not None:
result = gen.convert_yielded(result)
# If @asynchronous is used with @gen.coroutine, (but
# not @gen.engine), we can automatically finish the
# request when the future resolves. Additionally,
@ -1691,7 +1723,7 @@ class Application(httputil.HTTPServerConnectionDelegate):
(fully-qualified) name.
Each tuple can contain additional elements, which correspond to the
arguments to the `URLSpec` constructor. (Prior to Tornado 3.2, this
arguments to the `URLSpec` constructor. (Prior to Tornado 3.2,
only tuples of two or three elements were allowed).
A dictionary may be passed as the third element of the tuple,
@ -1780,12 +1812,18 @@ class Application(httputil.HTTPServerConnectionDelegate):
Note that after calling this method you still need to call
``IOLoop.current().start()`` to start the server.
Returns the `.HTTPServer` object.
.. versionchanged:: 4.3
Now returns the `.HTTPServer` object.
"""
# import is here rather than top level because HTTPServer
# is not importable on appengine
from tornado.httpserver import HTTPServer
server = HTTPServer(self, **kwargs)
server.listen(port, address)
return server
def add_handlers(self, host_pattern, host_handlers):
"""Appends the given handlers to our handler list.
@ -2013,7 +2051,7 @@ class _RequestDispatcher(httputil.HTTPMessageDelegate):
# except handler, and we cannot easily access the IOLoop here to
# call add_future (because of the requirement to remain compatible
# with WSGI)
f = self.handler._execute(transforms, *self.path_args,
self.handler._execute(transforms, *self.path_args,
**self.path_kwargs)
# If we are streaming the request body, then execute() is finished
# when the handler has prepared to receive the body. If not,
@ -2043,7 +2081,7 @@ class HTTPError(Exception):
determined automatically from ``status_code``, but can be used
to use a non-standard numeric code.
"""
def __init__(self, status_code, log_message=None, *args, **kwargs):
def __init__(self, status_code=500, log_message=None, *args, **kwargs):
self.status_code = status_code
self.log_message = log_message
self.args = args
@ -2064,10 +2102,14 @@ class HTTPError(Exception):
class Finish(Exception):
"""An exception that ends the request without producing an error response.
When `Finish` is raised in a `RequestHandler`, the request will end
(calling `RequestHandler.finish` if it hasn't already been called),
but the outgoing response will not be modified and the error-handling
methods (including `RequestHandler.write_error`) will not be called.
When `Finish` is raised in a `RequestHandler`, the request will
end (calling `RequestHandler.finish` if it hasn't already been
called), but the error-handling methods (including
`RequestHandler.write_error`) will not be called.
If `Finish()` was created with no arguments, the pending response
will be sent as-is. If `Finish()` was given an argument, that
argument will be passed to `RequestHandler.finish()`.
This can be a more convenient way to implement custom error pages
than overriding ``write_error`` (especially in library code)::
@ -2076,6 +2118,10 @@ class Finish(Exception):
self.set_status(401)
self.set_header('WWW-Authenticate', 'Basic realm="something"')
raise Finish()
.. versionchanged:: 4.3
Arguments passed to ``Finish()`` will be passed on to
`RequestHandler.finish`.
"""
pass
@ -2384,7 +2430,14 @@ class StaticFileHandler(RequestHandler):
# We must add it back to `root` so that we only match files
# in a directory named `root` instead of files starting with
# that prefix.
root = os.path.abspath(root) + os.path.sep
root = os.path.abspath(root)
if not root.endswith(os.path.sep):
# abspath always removes a trailing slash, except when
# root is '/'. This is an unusual case, but several projects
# have independently discovered this technique to disable
# Tornado's path validation and (hopefully) do their own,
# so we need to support it.
root += os.path.sep
# The trailing slash also needs to be temporarily added back
# the requested path so a request to root/ will match.
if not (absolute_path + os.path.sep).startswith(root):

View file

@ -16,8 +16,7 @@ the protocol (known as "draft 76") and are not compatible with this module.
Removed support for the draft 76 protocol version.
"""
from __future__ import (absolute_import, division,
print_function, with_statement)
from __future__ import absolute_import, division, print_function, with_statement
# Author: Jacob Kristhammar, 2010
import base64
@ -129,8 +128,7 @@ class WebSocketHandler(tornado.web.RequestHandler):
to accept it before the websocket connection will succeed.
"""
def __init__(self, application, request, **kwargs):
tornado.web.RequestHandler.__init__(self, application, request,
**kwargs)
super(WebSocketHandler, self).__init__(application, request, **kwargs)
self.ws_connection = None
self.close_code = None
self.close_reason = None
@ -208,12 +206,15 @@ class WebSocketHandler(tornado.web.RequestHandler):
.. versionchanged:: 3.2
`WebSocketClosedError` was added (previously a closed connection
would raise an `AttributeError`)
.. versionchanged:: 4.3
Returns a `.Future` which can be used for flow control.
"""
if self.ws_connection is None:
raise WebSocketClosedError()
if isinstance(message, dict):
message = tornado.escape.json_encode(message)
self.ws_connection.write_message(message, binary=binary)
return self.ws_connection.write_message(message, binary=binary)
def select_subprotocol(self, subprotocols):
"""Invoked when a new WebSocket requests specific subprotocols.
@ -671,7 +672,7 @@ class WebSocketProtocol13(WebSocketProtocol):
frame += data
self._wire_bytes_out += len(frame)
try:
self.stream.write(frame)
return self.stream.write(frame)
except StreamClosedError:
self._abort()
@ -688,7 +689,7 @@ class WebSocketProtocol13(WebSocketProtocol):
if self._compressor:
message = self._compressor.compress(message)
flags |= self.RSV1
self._write_frame(True, opcode, message, flags=flags)
return self._write_frame(True, opcode, message, flags=flags)
def write_ping(self, data):
"""Send ping frame."""
@ -708,7 +709,7 @@ class WebSocketProtocol13(WebSocketProtocol):
reserved_bits = header & self.RSV_MASK
self._frame_opcode = header & self.OPCODE_MASK
self._frame_opcode_is_control = self._frame_opcode & 0x8
if self._decompressor is not None:
if self._decompressor is not None and self._frame_opcode != 0:
self._frame_compressed = bool(reserved_bits & self.RSV1)
reserved_bits &= ~self.RSV1
if reserved_bits:
@ -970,7 +971,7 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
def write_message(self, message, binary=False):
"""Sends a message to the WebSocket server."""
self.protocol.write_message(message, binary)
return self.protocol.write_message(message, binary)
def read_message(self, callback=None):
"""Reads a message from the WebSocket server.
@ -1024,7 +1025,7 @@ def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None,
style, the application typically calls
`~.WebSocketClientConnection.read_message` in a loop::
conn = yield websocket_connection(loop)
conn = yield websocket_connect(url)
while True:
msg = yield conn.read_message()
if msg is None: break

View file

@ -1,92 +0,0 @@
#!/usr/bin/env python
# coding: utf-8
#
# Copyright 2012 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Data used by the tornado.locale module."""
# NOTE: This file is supposed to contain unicode strings, which is
# exactly what you'd get with e.g. u"Español" in most python versions.
# However, Python 3.2 doesn't support the u"" syntax, so we use a u()
# function instead. tornado.util.u cannot be used because it doesn't
# support non-ascii characters on python 2.
# When we drop support for Python 3.2, we can remove the parens
# and make these plain unicode strings.
from tornado.escape import to_unicode as u
LOCALE_NAMES = {
"af_ZA": {"name_en": u("Afrikaans"), "name": u("Afrikaans")},
"am_ET": {"name_en": u("Amharic"), "name": u("አማርኛ")},
"ar_AR": {"name_en": u("Arabic"), "name": u("العربية")},
"bg_BG": {"name_en": u("Bulgarian"), "name": u("Български")},
"bn_IN": {"name_en": u("Bengali"), "name": u("বাংলা")},
"bs_BA": {"name_en": u("Bosnian"), "name": u("Bosanski")},
"ca_ES": {"name_en": u("Catalan"), "name": u("Català")},
"cs_CZ": {"name_en": u("Czech"), "name": u("Čeština")},
"cy_GB": {"name_en": u("Welsh"), "name": u("Cymraeg")},
"da_DK": {"name_en": u("Danish"), "name": u("Dansk")},
"de_DE": {"name_en": u("German"), "name": u("Deutsch")},
"el_GR": {"name_en": u("Greek"), "name": u("Ελληνικά")},
"en_GB": {"name_en": u("English (UK)"), "name": u("English (UK)")},
"en_US": {"name_en": u("English (US)"), "name": u("English (US)")},
"es_ES": {"name_en": u("Spanish (Spain)"), "name": u("Español (España)")},
"es_LA": {"name_en": u("Spanish"), "name": u("Español")},
"et_EE": {"name_en": u("Estonian"), "name": u("Eesti")},
"eu_ES": {"name_en": u("Basque"), "name": u("Euskara")},
"fa_IR": {"name_en": u("Persian"), "name": u("فارسی")},
"fi_FI": {"name_en": u("Finnish"), "name": u("Suomi")},
"fr_CA": {"name_en": u("French (Canada)"), "name": u("Français (Canada)")},
"fr_FR": {"name_en": u("French"), "name": u("Français")},
"ga_IE": {"name_en": u("Irish"), "name": u("Gaeilge")},
"gl_ES": {"name_en": u("Galician"), "name": u("Galego")},
"he_IL": {"name_en": u("Hebrew"), "name": u("עברית")},
"hi_IN": {"name_en": u("Hindi"), "name": u("हिन्दी")},
"hr_HR": {"name_en": u("Croatian"), "name": u("Hrvatski")},
"hu_HU": {"name_en": u("Hungarian"), "name": u("Magyar")},
"id_ID": {"name_en": u("Indonesian"), "name": u("Bahasa Indonesia")},
"is_IS": {"name_en": u("Icelandic"), "name": u("Íslenska")},
"it_IT": {"name_en": u("Italian"), "name": u("Italiano")},
"ja_JP": {"name_en": u("Japanese"), "name": u("日本語")},
"ko_KR": {"name_en": u("Korean"), "name": u("한국어")},
"lt_LT": {"name_en": u("Lithuanian"), "name": u("Lietuvių")},
"lv_LV": {"name_en": u("Latvian"), "name": u("Latviešu")},
"mk_MK": {"name_en": u("Macedonian"), "name": u("Македонски")},
"ml_IN": {"name_en": u("Malayalam"), "name": u("മലയാളം")},
"ms_MY": {"name_en": u("Malay"), "name": u("Bahasa Melayu")},
"nb_NO": {"name_en": u("Norwegian (bokmal)"), "name": u("Norsk (bokmål)")},
"nl_NL": {"name_en": u("Dutch"), "name": u("Nederlands")},
"nn_NO": {"name_en": u("Norwegian (nynorsk)"), "name": u("Norsk (nynorsk)")},
"pa_IN": {"name_en": u("Punjabi"), "name": u("ਪੰਜਾਬੀ")},
"pl_PL": {"name_en": u("Polish"), "name": u("Polski")},
"pt_BR": {"name_en": u("Portuguese (Brazil)"), "name": u("Português (Brasil)")},
"pt_PT": {"name_en": u("Portuguese (Portugal)"), "name": u("Português (Portugal)")},
"ro_RO": {"name_en": u("Romanian"), "name": u("Română")},
"ru_RU": {"name_en": u("Russian"), "name": u("Русский")},
"sk_SK": {"name_en": u("Slovak"), "name": u("Slovenčina")},
"sl_SI": {"name_en": u("Slovenian"), "name": u("Slovenščina")},
"sq_AL": {"name_en": u("Albanian"), "name": u("Shqip")},
"sr_RS": {"name_en": u("Serbian"), "name": u("Српски")},
"sv_SE": {"name_en": u("Swedish"), "name": u("Svenska")},
"sw_KE": {"name_en": u("Swahili"), "name": u("Kiswahili")},
"ta_IN": {"name_en": u("Tamil"), "name": u("தமிழ்")},
"te_IN": {"name_en": u("Telugu"), "name": u("తెలుగు")},
"th_TH": {"name_en": u("Thai"), "name": u("ภาษาไทย")},
"tl_PH": {"name_en": u("Filipino"), "name": u("Filipino")},
"tr_TR": {"name_en": u("Turkish"), "name": u("Türkçe")},
"uk_UA": {"name_en": u("Ukraini "), "name": u("Українська")},
"vi_VN": {"name_en": u("Vietnamese"), "name": u("Tiếng Việt")},
"zh_CN": {"name_en": u("Chinese (Simplified)"), "name": u("中文(简体)")},
"zh_TW": {"name_en": u("Chinese (Traditional)"), "name": u("中文(繁體)")},
}