From 68446ea5d6b3a07e89df4af3fa573d4e0327870b Mon Sep 17 00:00:00 2001 From: JackDandy Date: Fri, 30 Sep 2016 23:40:12 +0100 Subject: [PATCH] Update Tornado Web Server 4.4.dev1 (c2b4d05) to 4.5.dev1 (92f29b8). --- CHANGES.md | 3 +- lib/tornado/__init__.py | 4 +- lib/tornado/_locale_data.py | 9 -- lib/tornado/auth.py | 28 ++--- lib/tornado/autoreload.py | 4 +- lib/tornado/concurrent.py | 12 +- lib/tornado/curl_httpclient.py | 16 ++- lib/tornado/escape.py | 43 +++---- lib/tornado/gen.py | 52 ++++++-- lib/tornado/http1connection.py | 40 +++++-- lib/tornado/httpclient.py | 45 ++++--- lib/tornado/httputil.py | 50 +++++--- lib/tornado/ioloop.py | 41 ++++--- lib/tornado/iostream.py | 11 +- lib/tornado/locale.py | 11 +- lib/tornado/locks.py | 4 +- lib/tornado/log.py | 12 +- lib/tornado/netutil.py | 27 +++-- lib/tornado/options.py | 10 +- lib/tornado/platform/asyncio.py | 17 ++- lib/tornado/platform/caresresolver.py | 10 +- lib/tornado/platform/interface.py | 3 + lib/tornado/platform/twisted.py | 37 +++--- lib/tornado/platform/windows.py | 6 +- lib/tornado/process.py | 18 +-- lib/tornado/queues.py | 4 +- lib/tornado/simple_httpclient.py | 42 +++++-- lib/tornado/tcpclient.py | 14 ++- lib/tornado/tcpserver.py | 25 +++- lib/tornado/template.py | 23 ++-- lib/tornado/testing.py | 56 +++++---- lib/tornado/util.py | 165 +++++++++++++++++++------- lib/tornado/web.py | 122 ++++++++++++------- lib/tornado/websocket.py | 26 ++-- lib/tornado/wsgi.py | 6 +- 35 files changed, 633 insertions(+), 363 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index f747457d..bbac6b35 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -3,7 +3,7 @@ * Add strict Python version check (equal to, or higher than 2.7.9 and less than 3.0), **exit** if incorrect version * Update unidecode library 0.04.11 to 0.04.18 (fd57cbf) * Update xmltodict library 0.9.2 (579a005) to 0.9.2 (eac0031) -* Update Tornado Web Server 4.3.dev1 (1b6157d) to 4.4.dev1 (c2b4d05) +* Update Tornado Web Server 4.3.dev1 (1b6157d) to 4.5.dev1 (92f29b8) * Update change to suppress reporting of Tornado exception error 1 to updated package (ref:hacks.txt) * Change API response header for JSON content type and the return of JSONP data * Remove redundant MultipartPostHandler @@ -184,6 +184,7 @@ * Fix join clean up * Fix add custom torrent RSS * Remove ILT torrent provider +* Update Tornado Web Server 4.3.dev1 (1b6157d) to 4.4.dev1 (c2b4d05) ### 0.11.15 (2016-09-13 19:50:00 UTC) diff --git a/lib/tornado/__init__.py b/lib/tornado/__init__.py index 18b17198..e856a5fe 100644 --- a/lib/tornado/__init__.py +++ b/lib/tornado/__init__.py @@ -25,5 +25,5 @@ from __future__ import absolute_import, division, print_function, with_statement # is zero for an official release, positive for a development branch, # or negative for a release candidate or beta (after the base version # number has been incremented) -version = "4.4.dev1" -version_info = (4, 4, 0, -100) +version = "4.5.dev1" +version_info = (4, 5, 0, -100) diff --git a/lib/tornado/_locale_data.py b/lib/tornado/_locale_data.py index 26531282..e073afe5 100644 --- a/lib/tornado/_locale_data.py +++ b/lib/tornado/_locale_data.py @@ -19,15 +19,6 @@ from __future__ import absolute_import, division, print_function, with_statement -# NOTE: This file is supposed to contain unicode strings, which is -# exactly what you'd get with e.g. u"Español" in most python versions. -# However, Python 3.2 doesn't support the u"" syntax, so we use a u() -# function instead. tornado.util.u cannot be used because it doesn't -# support non-ascii characters on python 2. -# When we drop support for Python 3.2, we can remove the parens -# and make these plain unicode strings. -from tornado.escape import to_unicode as u - LOCALE_NAMES = { "af_ZA": {"name_en": u"Afrikaans", "name": u"Afrikaans"}, "am_ET": {"name_en": u"Amharic", "name": u"አማርኛ"}, diff --git a/lib/tornado/auth.py b/lib/tornado/auth.py index 05ac3d1e..44144061 100644 --- a/lib/tornado/auth.py +++ b/lib/tornado/auth.py @@ -82,22 +82,15 @@ from tornado import escape from tornado.httputil import url_concat from tornado.log import gen_log from tornado.stack_context import ExceptionStackContext -from tornado.util import unicode_type, ArgReplacer +from tornado.util import unicode_type, ArgReplacer, PY3 -try: - import urlparse # py2 -except ImportError: - import urllib.parse as urlparse # py3 - -try: - import urllib.parse as urllib_parse # py3 -except ImportError: - import urllib as urllib_parse # py2 - -try: - long # py2 -except NameError: - long = int # py3 +if PY3: + import urllib.parse as urlparse + import urllib.parse as urllib_parse + long = int +else: + import urlparse + import urllib as urllib_parse class AuthError(Exception): @@ -290,7 +283,7 @@ class OpenIdMixin(object): if name: user["name"] = name elif name_parts: - user["name"] = u(" ").join(name_parts) + user["name"] = u" ".join(name_parts) elif email: user["name"] = email.split("@")[0] if email: @@ -996,6 +989,9 @@ class FacebookGraphMixin(OAuth2Mixin): callback=functools.partial( self._on_get_user_info, future, session, fields), access_token=session["access_token"], + appsecret_proof=hmac.new(key=client_secret.encode('utf8'), + msg=session["access_token"].encode('utf8'), + digestmod=hashlib.sha256).hexdigest(), fields=",".join(fields) ) diff --git a/lib/tornado/autoreload.py b/lib/tornado/autoreload.py index 1cbf26c6..5e0d00d1 100644 --- a/lib/tornado/autoreload.py +++ b/lib/tornado/autoreload.py @@ -83,7 +83,7 @@ if __name__ == "__main__": import functools import logging import os -import pkgutil +import pkgutil # type: ignore import sys import traceback import types @@ -112,7 +112,7 @@ _has_execv = sys.platform != 'win32' _watched_files = set() _reload_hooks = [] _reload_attempted = False -_io_loops = weakref.WeakKeyDictionary() +_io_loops = weakref.WeakKeyDictionary() # type: ignore def start(io_loop=None, check_time=500): diff --git a/lib/tornado/concurrent.py b/lib/tornado/concurrent.py index 5f8cdc41..05205f73 100644 --- a/lib/tornado/concurrent.py +++ b/lib/tornado/concurrent.py @@ -38,6 +38,11 @@ try: except ImportError: futures = None +try: + import typing +except ImportError: + typing = None + # Can the garbage collector handle cycles that include __del__ methods? # This is true in cpython beginning with version 3.4 (PEP 442). @@ -338,7 +343,7 @@ class Future(object): TracebackFuture = Future if futures is None: - FUTURES = Future + FUTURES = Future # type: typing.Union[type, typing.Tuple[type, ...]] else: FUTURES = (futures.Future, Future) @@ -500,8 +505,9 @@ def chain_future(a, b): assert future is a if b.done(): return - if (isinstance(a, TracebackFuture) and isinstance(b, TracebackFuture) - and a.exc_info() is not None): + if (isinstance(a, TracebackFuture) and + isinstance(b, TracebackFuture) and + a.exc_info() is not None): b.set_exc_info(a.exc_info()) elif a.exception() is not None: b.set_exception(a.exception()) diff --git a/lib/tornado/curl_httpclient.py b/lib/tornado/curl_httpclient.py index 6dadedd9..bef78419 100644 --- a/lib/tornado/curl_httpclient.py +++ b/lib/tornado/curl_httpclient.py @@ -21,7 +21,7 @@ from __future__ import absolute_import, division, print_function, with_statement import collections import functools import logging -import pycurl +import pycurl # type: ignore import threading import time from io import BytesIO @@ -278,6 +278,9 @@ class CurlAsyncHTTPClient(AsyncHTTPClient): if curl_log.isEnabledFor(logging.DEBUG): curl.setopt(pycurl.VERBOSE, 1) curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug) + if hasattr(pycurl,'PROTOCOLS'): # PROTOCOLS first appeared in pycurl 7.19.5 (2014-07-12) + curl.setopt(pycurl.PROTOCOLS, pycurl.PROTO_HTTP|pycurl.PROTO_HTTPS) + curl.setopt(pycurl.REDIR_PROTOCOLS, pycurl.PROTO_HTTP|pycurl.PROTO_HTTPS) return curl def _curl_setup_request(self, curl, request, buffer, headers): @@ -342,6 +345,15 @@ class CurlAsyncHTTPClient(AsyncHTTPClient): credentials = '%s:%s' % (request.proxy_username, request.proxy_password) curl.setopt(pycurl.PROXYUSERPWD, credentials) + + if (request.proxy_auth_mode is None or + request.proxy_auth_mode == "basic"): + curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_BASIC) + elif request.proxy_auth_mode == "digest": + curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_DIGEST) + else: + raise ValueError( + "Unsupported proxy_auth_mode %s" % request.proxy_auth_mode) else: curl.setopt(pycurl.PROXY, '') curl.unsetopt(pycurl.PROXYUSERPWD) @@ -462,7 +474,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient): request.prepare_curl_callback(curl) def _curl_header_callback(self, headers, header_callback, header_line): - header_line = native_str(header_line) + header_line = native_str(header_line.decode('latin1')) if header_callback is not None: self.io_loop.add_callback(header_callback, header_line) # header_line as returned by curl includes the end-of-line characters. diff --git a/lib/tornado/escape.py b/lib/tornado/escape.py index 23cc9cde..7a3b0e03 100644 --- a/lib/tornado/escape.py +++ b/lib/tornado/escape.py @@ -22,32 +22,26 @@ have crept in over time. from __future__ import absolute_import, division, print_function, with_statement -import re -import sys - -from tornado.util import unicode_type, basestring_type - -try: - from urllib.parse import parse_qs as _parse_qs # py3 -except ImportError: - from urlparse import parse_qs as _parse_qs # Python 2.6+ - -try: - import htmlentitydefs # py2 -except ImportError: - import html.entities as htmlentitydefs # py3 - -try: - import urllib.parse as urllib_parse # py3 -except ImportError: - import urllib as urllib_parse # py2 - import json +import re + +from tornado.util import PY3, unicode_type, basestring_type + +if PY3: + from urllib.parse import parse_qs as _parse_qs + import html.entities as htmlentitydefs + import urllib.parse as urllib_parse + unichr = chr +else: + from urlparse import parse_qs as _parse_qs + import htmlentitydefs + import urllib as urllib_parse try: - unichr -except NameError: - unichr = chr + import typing # noqa +except ImportError: + pass + _XHTML_ESCAPE_RE = re.compile('[&<>"\']') _XHTML_ESCAPE_DICT = {'&': '&', '<': '<', '>': '>', '"': '"', @@ -116,7 +110,7 @@ def url_escape(value, plus=True): # python 3 changed things around enough that we need two separate # implementations of url_unescape. We also need our own implementation # of parse_qs since python 3's version insists on decoding everything. -if sys.version_info[0] < 3: +if not PY3: def url_unescape(value, encoding='utf-8', plus=True): """Decodes the given value from a URL. @@ -191,6 +185,7 @@ _UTF8_TYPES = (bytes, type(None)) def utf8(value): + # type: (typing.Union[bytes,unicode_type,None])->typing.Union[bytes,None] """Converts a string argument to a byte string. If the argument is already a byte string or None, it is returned unchanged. diff --git a/lib/tornado/gen.py b/lib/tornado/gen.py index bf184e54..73f9ba10 100644 --- a/lib/tornado/gen.py +++ b/lib/tornado/gen.py @@ -83,16 +83,18 @@ import os import sys import textwrap import types +import weakref from tornado.concurrent import Future, TracebackFuture, is_future, chain_future from tornado.ioloop import IOLoop from tornado.log import app_log from tornado import stack_context -from tornado.util import raise_exc_info +from tornado.util import PY3, raise_exc_info try: try: - from functools import singledispatch # py34+ + # py34+ + from functools import singledispatch # type: ignore except ImportError: from singledispatch import singledispatch # backport except ImportError: @@ -108,12 +110,14 @@ except ImportError: try: try: - from collections.abc import Generator as GeneratorType # py35+ + # py35+ + from collections.abc import Generator as GeneratorType # type: ignore except ImportError: - from backports_abc import Generator as GeneratorType + from backports_abc import Generator as GeneratorType # type: ignore try: - from inspect import isawaitable # py35+ + # py35+ + from inspect import isawaitable # type: ignore except ImportError: from backports_abc import isawaitable except ImportError: @@ -121,12 +125,12 @@ except ImportError: raise from types import GeneratorType - def isawaitable(x): + def isawaitable(x): # type: ignore return False -try: - import builtins # py3 -except ImportError: +if PY3: + import builtins +else: import __builtin__ as builtins @@ -241,6 +245,24 @@ def coroutine(func, replace_callback=True): """ return _make_coroutine_wrapper(func, replace_callback=True) +# Ties lifetime of runners to their result futures. Github Issue #1769 +# Generators, like any object in Python, must be strong referenced +# in order to not be cleaned up by the garbage collector. When using +# coroutines, the Runner object is what strong-refs the inner +# generator. However, the only item that strong-reffed the Runner +# was the last Future that the inner generator yielded (via the +# Future's internal done_callback list). Usually this is enough, but +# it is also possible for this Future to not have any strong references +# other than other objects referenced by the Runner object (usually +# when using other callback patterns and/or weakrefs). In this +# situation, if a garbage collection ran, a cycle would be detected and +# Runner objects could be destroyed along with their inner generators +# and everything in their local scope. +# This map provides strong references to Runner objects as long as +# their result future objects also have strong references (typically +# from the parent coroutine's Runner). This keeps the coroutine's +# Runner alive. +_futures_to_runners = weakref.WeakKeyDictionary() def _make_coroutine_wrapper(func, replace_callback): """The inner workings of ``@gen.coroutine`` and ``@gen.engine``. @@ -291,7 +313,7 @@ def _make_coroutine_wrapper(func, replace_callback): except Exception: future.set_exc_info(sys.exc_info()) else: - Runner(result, future, yielded) + _futures_to_runners[future] = Runner(result, future, yielded) try: return future finally: @@ -830,7 +852,7 @@ def maybe_future(x): def with_timeout(timeout, future, io_loop=None, quiet_exceptions=()): - """Wraps a `.Future` in a timeout. + """Wraps a `.Future` (or other yieldable object) in a timeout. Raises `TimeoutError` if the input future does not complete before ``timeout``, which may be specified in any form allowed by @@ -841,15 +863,18 @@ def with_timeout(timeout, future, io_loop=None, quiet_exceptions=()): will be logged unless it is of a type contained in ``quiet_exceptions`` (which may be an exception type or a sequence of types). - Currently only supports Futures, not other `YieldPoint` classes. + Does not support `YieldPoint` subclasses. .. versionadded:: 4.0 .. versionchanged:: 4.1 Added the ``quiet_exceptions`` argument and the logging of unhandled exceptions. + + .. versionchanged:: 4.4 + Added support for yieldable objects other than `.Future`. """ - # TODO: allow yield points in addition to futures? + # TODO: allow YieldPoints in addition to other yieldables? # Tricky to do with stack_context semantics. # # It's tempting to optimize this by cancelling the input future on timeout @@ -857,6 +882,7 @@ def with_timeout(timeout, future, io_loop=None, quiet_exceptions=()): # one waiting on the input future, so cancelling it might disrupt other # callers and B) concurrent futures can only be cancelled while they are # in the queue, so cancellation cannot reliably bound our waiting time. + future = convert_yielded(future) result = Future() chain_future(future, result) if io_loop is None: diff --git a/lib/tornado/http1connection.py b/lib/tornado/http1connection.py index 29c6cd9a..7ee83161 100644 --- a/lib/tornado/http1connection.py +++ b/lib/tornado/http1connection.py @@ -30,7 +30,7 @@ from tornado import httputil from tornado import iostream from tornado.log import gen_log, app_log from tornado import stack_context -from tornado.util import GzipDecompressor +from tornado.util import GzipDecompressor, PY3 class _QuietException(Exception): @@ -351,7 +351,7 @@ class HTTP1Connection(httputil.HTTPConnection): # 304 responses have no body (not even a zero-length body), and so # should not have either Content-Length or Transfer-Encoding. # headers. - start_line.code != 304 and + start_line.code not in (204, 304) and # No need to chunk the output if a Content-Length is specified. 'Content-Length' not in headers and # Applications are discouraged from touching Transfer-Encoding, @@ -359,8 +359,8 @@ class HTTP1Connection(httputil.HTTPConnection): 'Transfer-Encoding' not in headers) # If a 1.0 client asked for keep-alive, add the header. if (self._request_start_line.version == 'HTTP/1.0' and - (self._request_headers.get('Connection', '').lower() - == 'keep-alive')): + (self._request_headers.get('Connection', '').lower() == + 'keep-alive')): headers['Connection'] = 'Keep-Alive' if self._chunking_output: headers['Transfer-Encoding'] = 'chunked' @@ -372,7 +372,14 @@ class HTTP1Connection(httputil.HTTPConnection): self._expected_content_remaining = int(headers['Content-Length']) else: self._expected_content_remaining = None - lines.extend([utf8(n) + b": " + utf8(v) for n, v in headers.get_all()]) + # TODO: headers are supposed to be of type str, but we still have some + # cases that let bytes slip through. Remove these native_str calls when those + # are fixed. + header_lines = (native_str(n) + ": " + native_str(v) for n, v in headers.get_all()) + if PY3: + lines.extend(l.encode('latin1') for l in header_lines) + else: + lines.extend(header_lines) for line in lines: if b'\n' in line: raise ValueError('Newline in header: ' + repr(line)) @@ -479,9 +486,11 @@ class HTTP1Connection(httputil.HTTPConnection): connection_header = connection_header.lower() if start_line.version == "HTTP/1.1": return connection_header != "close" - elif ("Content-Length" in headers - or headers.get("Transfer-Encoding", "").lower() == "chunked" - or start_line.method in ("HEAD", "GET")): + elif ("Content-Length" in headers or + headers.get("Transfer-Encoding", "").lower() == "chunked" or + getattr(start_line, 'method', None) in ("HEAD", "GET")): + # start_line may be a request or reponse start line; only + # the former has a method attribute. return connection_header == "keep-alive" return False @@ -531,7 +540,13 @@ class HTTP1Connection(httputil.HTTPConnection): "Multiple unequal Content-Lengths: %r" % headers["Content-Length"]) headers["Content-Length"] = pieces[0] - content_length = int(headers["Content-Length"]) + + try: + content_length = int(headers["Content-Length"]) + except ValueError: + # Handles non-integer Content-Length value. + raise httputil.HTTPInputError( + "Only integer Content-Length is allowed: %s" % headers["Content-Length"]) if content_length > self._max_body_size: raise httputil.HTTPInputError("Content-Length too long") @@ -550,7 +565,7 @@ class HTTP1Connection(httputil.HTTPConnection): if content_length is not None: return self._read_fixed_body(content_length, delegate) - if headers.get("Transfer-Encoding") == "chunked": + if headers.get("Transfer-Encoding", "").lower() == "chunked": return self._read_chunked_body(delegate) if self.is_client: return self._read_body_until_close(delegate) @@ -711,9 +726,8 @@ class HTTP1ServerConnection(object): # This exception was already logged. conn.close() return - except Exception as e: - if 1 != e.errno: - gen_log.error("Uncaught exception", exc_info=True) + except Exception: + gen_log.error("Uncaught exception", exc_info=True) conn.close() return if not ret: diff --git a/lib/tornado/httpclient.py b/lib/tornado/httpclient.py index 25b17d03..13f81e2f 100644 --- a/lib/tornado/httpclient.py +++ b/lib/tornado/httpclient.py @@ -25,7 +25,7 @@ to switch to ``curl_httpclient`` for reasons such as the following: Note that if you are using ``curl_httpclient``, it is highly recommended that you use a recent version of ``libcurl`` and ``pycurl``. Currently the minimum supported version of libcurl is -7.21.1, and the minimum version of pycurl is 7.18.2. It is highly +7.22.0, and the minimum version of pycurl is 7.18.2. It is highly recommended that your ``libcurl`` installation is built with asynchronous DNS resolver (threaded or c-ares), otherwise you may encounter various problems with request timeouts (for more @@ -61,7 +61,7 @@ class HTTPClient(object): http_client = httpclient.HTTPClient() try: response = http_client.fetch("http://www.google.com/") - print response.body + print(response.body) except httpclient.HTTPError as e: # HTTPError is raised for non-200 responses; the response # can be found in e.response. @@ -108,14 +108,14 @@ class AsyncHTTPClient(Configurable): Example usage:: - def handle_request(response): + def handle_response(response): if response.error: - print "Error:", response.error + print("Error: %s" % response.error) else: - print response.body + print(response.body) http_client = AsyncHTTPClient() - http_client.fetch("http://www.google.com/", handle_request) + http_client.fetch("http://www.google.com/", handle_response) The constructor for this class is magic in several respects: It actually creates an instance of an implementation-specific @@ -227,6 +227,9 @@ class AsyncHTTPClient(Configurable): raise RuntimeError("fetch() called on closed AsyncHTTPClient") if not isinstance(request, HTTPRequest): request = HTTPRequest(url=request, **kwargs) + else: + if kwargs: + raise ValueError("kwargs can't be used if request is an HTTPRequest object") # We may modify this (to add Host, Accept-Encoding, etc), # so make sure we don't modify the caller's object. This is also # where normal dicts get converted to HTTPHeaders objects. @@ -307,10 +310,10 @@ class HTTPRequest(object): network_interface=None, streaming_callback=None, header_callback=None, prepare_curl_callback=None, proxy_host=None, proxy_port=None, proxy_username=None, - proxy_password=None, allow_nonstandard_methods=None, - validate_cert=None, ca_certs=None, - allow_ipv6=None, - client_key=None, client_cert=None, body_producer=None, + proxy_password=None, proxy_auth_mode=None, + allow_nonstandard_methods=None, validate_cert=None, + ca_certs=None, allow_ipv6=None, client_key=None, + client_cert=None, body_producer=None, expect_100_continue=False, decompress_response=None, ssl_options=None): r"""All parameters except ``url`` are optional. @@ -369,12 +372,14 @@ class HTTPRequest(object): a ``pycurl.Curl`` object to allow the application to make additional ``setopt`` calls. :arg string proxy_host: HTTP proxy hostname. To use proxies, - ``proxy_host`` and ``proxy_port`` must be set; ``proxy_username`` and - ``proxy_pass`` are optional. Proxies are currently only supported - with ``curl_httpclient``. + ``proxy_host`` and ``proxy_port`` must be set; ``proxy_username``, + ``proxy_pass`` and ``proxy_auth_mode`` are optional. Proxies are + currently only supported with ``curl_httpclient``. :arg int proxy_port: HTTP proxy port :arg string proxy_username: HTTP proxy username :arg string proxy_password: HTTP proxy password + :arg string proxy_auth_mode: HTTP proxy Authentication mode; + default is "basic". supports "basic" and "digest" :arg bool allow_nonstandard_methods: Allow unknown values for ``method`` argument? :arg bool validate_cert: For HTTPS requests, validate the server's @@ -427,6 +432,7 @@ class HTTPRequest(object): self.proxy_port = proxy_port self.proxy_username = proxy_username self.proxy_password = proxy_password + self.proxy_auth_mode = proxy_auth_mode self.url = url self.method = method self.body = body @@ -527,7 +533,7 @@ class HTTPResponse(object): * buffer: ``cStringIO`` object for response body - * body: response body as string (created on demand from ``self.buffer``) + * body: response body as bytes (created on demand from ``self.buffer``) * error: Exception object, if any @@ -569,7 +575,8 @@ class HTTPResponse(object): self.request_time = request_time self.time_info = time_info or {} - def _get_body(self): + @property + def body(self): if self.buffer is None: return None elif self._body is None: @@ -577,8 +584,6 @@ class HTTPResponse(object): return self._body - body = property(_get_body) - def rethrow(self): """If there was an error on the request, raise an `HTTPError`.""" if self.error: @@ -612,6 +617,12 @@ class HTTPError(Exception): def __str__(self): return "HTTP %d: %s" % (self.code, self.message) + # There is a cyclic reference between self and self.response, + # which breaks the default __repr__ implementation. + # (especially on pypy, which doesn't have the same recursion + # detection as cpython). + __repr__ = __str__ + class _RequestProxy(object): """Combines an object with a dictionary of defaults. diff --git a/lib/tornado/httputil.py b/lib/tornado/httputil.py index 471df54f..9ca840db 100644 --- a/lib/tornado/httputil.py +++ b/lib/tornado/httputil.py @@ -33,33 +33,36 @@ import time from tornado.escape import native_str, parse_qs_bytes, utf8 from tornado.log import gen_log -from tornado.util import ObjectDict +from tornado.util import ObjectDict, PY3 -try: - import Cookie # py2 -except ImportError: - import http.cookies as Cookie # py3 +if PY3: + import http.cookies as Cookie + from http.client import responses + from urllib.parse import urlencode +else: + import Cookie + from httplib import responses + from urllib import urlencode -try: - from httplib import responses # py2 -except ImportError: - from http.client import responses # py3 # responses is unused in this file, but we re-export it to other files. # Reference it so pyflakes doesn't complain. responses -try: - from urllib import urlencode # py2 -except ImportError: - from urllib.parse import urlencode # py3 - try: from ssl import SSLError except ImportError: # ssl is unavailable on app engine. - class SSLError(Exception): + class _SSLError(Exception): pass + # Hack around a mypy limitation. We can't simply put "type: ignore" + # on the class definition itself; must go through an assignment. + SSLError = _SSLError # type: ignore + +try: + import typing +except ImportError: + pass # RFC 7230 section 3.5: a recipient MAY recognize a single LF as a line @@ -127,8 +130,8 @@ class HTTPHeaders(collections.MutableMapping): Set-Cookie: C=D """ def __init__(self, *args, **kwargs): - self._dict = {} - self._as_list = {} + self._dict = {} # type: typing.Dict[str, str] + self._as_list = {} # type: typing.Dict[str, typing.List[str]] self._last_key = None if (len(args) == 1 and len(kwargs) == 0 and isinstance(args[0], HTTPHeaders)): @@ -142,6 +145,7 @@ class HTTPHeaders(collections.MutableMapping): # new public methods def add(self, name, value): + # type: (str, str) -> None """Adds a new value for the given key.""" norm_name = _normalized_headers[name] self._last_key = norm_name @@ -158,6 +162,7 @@ class HTTPHeaders(collections.MutableMapping): return self._as_list.get(norm_name, []) def get_all(self): + # type: () -> typing.Iterable[typing.Tuple[str, str]] """Returns an iterable of all (name, value) pairs. If a header has multiple values, multiple pairs will be @@ -206,6 +211,7 @@ class HTTPHeaders(collections.MutableMapping): self._as_list[norm_name] = [value] def __getitem__(self, name): + # type: (str) -> str return self._dict[_normalized_headers[name]] def __delitem__(self, name): @@ -228,6 +234,14 @@ class HTTPHeaders(collections.MutableMapping): # the appearance that HTTPHeaders is a single container. __copy__ = copy + def __str__(self): + lines = [] + for name, value in self.get_all(): + lines.append("%s: %s\n" % (name, value)) + return "".join(lines) + + __unicode__ = __str__ + class HTTPServerRequest(object): """A single HTTP request. @@ -743,7 +757,7 @@ def parse_multipart_form_data(boundary, data, arguments, files): name = disp_params["name"] if disp_params.get("filename"): ctype = headers.get("Content-Type", "application/unknown") - files.setdefault(name, []).append(HTTPFile( + files.setdefault(name, []).append(HTTPFile( # type: ignore filename=disp_params["filename"], body=value, content_type=ctype)) else: diff --git a/lib/tornado/ioloop.py b/lib/tornado/ioloop.py index c23cb33e..d6183176 100644 --- a/lib/tornado/ioloop.py +++ b/lib/tornado/ioloop.py @@ -45,20 +45,20 @@ import math from tornado.concurrent import TracebackFuture, is_future from tornado.log import app_log, gen_log +from tornado.platform.auto import set_close_exec, Waker from tornado import stack_context -from tornado.util import Configurable, errno_from_exception, timedelta_to_seconds +from tornado.util import PY3, Configurable, errno_from_exception, timedelta_to_seconds try: import signal except ImportError: signal = None -try: - import thread # py2 -except ImportError: - import _thread as thread # py3 -from tornado.platform.auto import set_close_exec, Waker +if PY3: + import _thread as thread +else: + import thread _POLL_TIMEOUT = 3600.0 @@ -172,6 +172,10 @@ class IOLoop(Configurable): This is normally not necessary as `instance()` will create an `IOLoop` on demand, but you may want to call `install` to use a custom subclass of `IOLoop`. + + When using an `IOLoop` subclass, `install` must be called prior + to creating any objects that implicitly create their own + `IOLoop` (e.g., :class:`tornado.httpclient.AsyncHTTPClient`). """ assert not IOLoop.initialized() IOLoop._instance = self @@ -612,10 +616,14 @@ class IOLoop(Configurable): # result, which should just be ignored. pass else: - self.add_future(ret, lambda f: f.result()) + self.add_future(ret, self._discard_future_result) except Exception: self.handle_callback_exception(callback) + def _discard_future_result(self, future): + """Avoid unhandled-exception warnings from spawned coroutines.""" + future.result() + def handle_callback_exception(self, callback): """This method is called whenever a callback run by the `IOLoop` throws an exception. @@ -814,8 +822,8 @@ class PollIOLoop(IOLoop): due_timeouts.append(heapq.heappop(self._timeouts)) else: break - if (self._cancellations > 512 - and self._cancellations > (len(self._timeouts) >> 1)): + if (self._cancellations > 512 and + self._cancellations > (len(self._timeouts) >> 1)): # Clean up the timeout queue when it gets large and it's # more than half cancellations. self._cancellations = 0 @@ -874,7 +882,7 @@ class PollIOLoop(IOLoop): # Pop one fd at a time from the set of pending fds and run # its handler. Since that handler may perform actions on # other file descriptors, there may be reentrant calls to - # this IOLoop that update self._events + # this IOLoop that modify self._events self._events.update(event_pairs) while self._events: fd, events = self._events.popitem() @@ -966,26 +974,24 @@ class _Timeout(object): """An IOLoop timeout, a UNIX timestamp and a callback""" # Reduce memory overhead when there are lots of pending callbacks - __slots__ = ['deadline', 'callback', 'tiebreaker'] + __slots__ = ['deadline', 'callback', 'tdeadline'] def __init__(self, deadline, callback, io_loop): if not isinstance(deadline, numbers.Real): raise TypeError("Unsupported deadline %r" % deadline) self.deadline = deadline self.callback = callback - self.tiebreaker = next(io_loop._timeout_counter) + self.tdeadline = (deadline, next(io_loop._timeout_counter)) # Comparison methods to sort by deadline, with object id as a tiebreaker # to guarantee a consistent ordering. The heapq module uses __le__ # in python2.5, and __lt__ in 2.6+ (sort() and most other comparisons # use __lt__). def __lt__(self, other): - return ((self.deadline, self.tiebreaker) < - (other.deadline, other.tiebreaker)) + return self.tdeadline < other.tdeadline def __le__(self, other): - return ((self.deadline, self.tiebreaker) <= - (other.deadline, other.tiebreaker)) + return self.tdeadline <= other.tdeadline class PeriodicCallback(object): @@ -1048,6 +1054,7 @@ class PeriodicCallback(object): if self._next_timeout <= current_time: callback_time_sec = self.callback_time / 1000.0 - self._next_timeout += (math.floor((current_time - self._next_timeout) / callback_time_sec) + 1) * callback_time_sec + self._next_timeout += (math.floor((current_time - self._next_timeout) / + callback_time_sec) + 1) * callback_time_sec self._timeout = self.io_loop.add_timeout(self._next_timeout, self._run) diff --git a/lib/tornado/iostream.py b/lib/tornado/iostream.py index 7f071421..bcf44414 100644 --- a/lib/tornado/iostream.py +++ b/lib/tornado/iostream.py @@ -58,7 +58,7 @@ except ImportError: _ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN) if hasattr(errno, "WSAEWOULDBLOCK"): - _ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) + _ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) # type: ignore # These errnos indicate that a connection has been abruptly terminated. # They should be caught and handled less noisily than other errors. @@ -66,7 +66,7 @@ _ERRNO_CONNRESET = (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE, errno.ETIMEDOUT) if hasattr(errno, "WSAECONNRESET"): - _ERRNO_CONNRESET += (errno.WSAECONNRESET, errno.WSAECONNABORTED, errno.WSAETIMEDOUT) + _ERRNO_CONNRESET += (errno.WSAECONNRESET, errno.WSAECONNABORTED, errno.WSAETIMEDOUT) # type: ignore if sys.platform == 'darwin': # OSX appears to have a race condition that causes send(2) to return @@ -74,13 +74,13 @@ if sys.platform == 'darwin': # http://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ # Since the socket is being closed anyway, treat this as an ECONNRESET # instead of an unexpected error. - _ERRNO_CONNRESET += (errno.EPROTOTYPE,) + _ERRNO_CONNRESET += (errno.EPROTOTYPE,) # type: ignore # More non-portable errnos: _ERRNO_INPROGRESS = (errno.EINPROGRESS,) if hasattr(errno, "WSAEINPROGRESS"): - _ERRNO_INPROGRESS += (errno.WSAEINPROGRESS,) + _ERRNO_INPROGRESS += (errno.WSAEINPROGRESS,) # type: ignore class StreamClosedError(IOError): @@ -648,8 +648,7 @@ class BaseIOStream(object): except UnsatisfiableReadError: raise except Exception as e: - if 1 != e.errno: - gen_log.warning("error on read: %s" % e) + gen_log.warning("error on read: %s" % e) self.close(exc_info=True) return if pos is not None: diff --git a/lib/tornado/locale.py b/lib/tornado/locale.py index 0a1b0770..c1cb6792 100644 --- a/lib/tornado/locale.py +++ b/lib/tornado/locale.py @@ -19,7 +19,7 @@ To load a locale and generate a translated string:: user_locale = tornado.locale.get("es_LA") - print user_locale.translate("Sign out") + print(user_locale.translate("Sign out")) `tornado.locale.get()` returns the closest matching locale, not necessarily the specific locale you requested. You can support pluralization with @@ -28,7 +28,7 @@ additional arguments to `~Locale.translate()`, e.g.:: people = [...] message = user_locale.translate( "%(list)s is online", "%(list)s are online", len(people)) - print message % {"list": user_locale.list(people)} + print(message % {"list": user_locale.list(people)}) The first string is chosen if ``len(people) == 1``, otherwise the second string is chosen. @@ -51,11 +51,12 @@ import re from tornado import escape from tornado.log import gen_log +from tornado.util import PY3 from tornado._locale_data import LOCALE_NAMES _default_locale = "en_US" -_translations = {} +_translations = {} # type: dict _supported_locales = frozenset([_default_locale]) _use_gettext = False CONTEXT_SEPARATOR = "\x04" @@ -147,11 +148,11 @@ def load_translations(directory, encoding=None): # in most cases but is common with CSV files because Excel # cannot read utf-8 files without a BOM. encoding = 'utf-8-sig' - try: + if PY3: # python 3: csv.reader requires a file open in text mode. # Force utf8 to avoid dependence on $LANG environment variable. f = open(full_path, "r", encoding=encoding) - except TypeError: + else: # python 2: csv can only handle byte strings (in ascii-compatible # encodings), which we decode below. Transcode everything into # utf8 before passing it to csv.reader. diff --git a/lib/tornado/locks.py b/lib/tornado/locks.py index abf5eade..d84a9a87 100644 --- a/lib/tornado/locks.py +++ b/lib/tornado/locks.py @@ -14,13 +14,13 @@ from __future__ import absolute_import, division, print_function, with_statement -__all__ = ['Condition', 'Event', 'Semaphore', 'BoundedSemaphore', 'Lock'] - import collections from tornado import gen, ioloop from tornado.concurrent import Future +__all__ = ['Condition', 'Event', 'Semaphore', 'BoundedSemaphore', 'Lock'] + class _TimeoutGarbageCollector(object): """Base class for objects that periodically clean up timed-out waiters. diff --git a/lib/tornado/log.py b/lib/tornado/log.py index 040889a9..ac1bb95e 100644 --- a/lib/tornado/log.py +++ b/lib/tornado/log.py @@ -38,7 +38,7 @@ from tornado.escape import _unicode from tornado.util import unicode_type, basestring_type try: - import curses + import curses # type: ignore except ImportError: curses = None @@ -77,8 +77,8 @@ class LogFormatter(logging.Formatter): * Robust against str/bytes encoding problems. This formatter is enabled automatically by - `tornado.options.parse_command_line` (unless ``--logging=none`` is - used). + `tornado.options.parse_command_line` or `tornado.options.parse_config_file` + (unless ``--logging=none`` is used). """ DEFAULT_FORMAT = '%(color)s[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s' DEFAULT_DATE_FORMAT = '%y%m%d %H:%M:%S' @@ -183,7 +183,8 @@ def enable_pretty_logging(options=None, logger=None): and `tornado.options.parse_config_file`. """ if options is None: - from tornado.options import options + import tornado.options + options = tornado.options.options if options.logging is None or options.logging.lower() == 'none': return if logger is None: @@ -228,7 +229,8 @@ def define_logging_options(options=None): """ if options is None: # late import to prevent cycle - from tornado.options import options + import tornado.options + options = tornado.options.options options.define("logging", default="info", help=("Set the Python log level. If 'none', tornado won't touch the " "logging configuration."), diff --git a/lib/tornado/netutil.py b/lib/tornado/netutil.py index 3c2b6164..7bf93213 100644 --- a/lib/tornado/netutil.py +++ b/lib/tornado/netutil.py @@ -27,7 +27,7 @@ import stat from tornado.concurrent import dummy_executor, run_on_executor from tornado.ioloop import IOLoop from tornado.platform.auto import set_close_exec -from tornado.util import Configurable, errno_from_exception +from tornado.util import PY3, Configurable, errno_from_exception try: import ssl @@ -44,20 +44,18 @@ except ImportError: else: raise -try: - xrange # py2 -except NameError: - xrange = range # py3 +if PY3: + xrange = range if hasattr(ssl, 'match_hostname') and hasattr(ssl, 'CertificateError'): # python 3.2+ ssl_match_hostname = ssl.match_hostname SSLCertificateError = ssl.CertificateError elif ssl is None: - ssl_match_hostname = SSLCertificateError = None + ssl_match_hostname = SSLCertificateError = None # type: ignore else: import backports.ssl_match_hostname ssl_match_hostname = backports.ssl_match_hostname.match_hostname - SSLCertificateError = backports.ssl_match_hostname.CertificateError + SSLCertificateError = backports.ssl_match_hostname.CertificateError # type: ignore if hasattr(ssl, 'SSLContext'): if hasattr(ssl, 'create_default_context'): @@ -104,7 +102,7 @@ u'foo'.encode('idna') _ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN) if hasattr(errno, "WSAEWOULDBLOCK"): - _ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) + _ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) # type: ignore # Default backlog used when calling sock.listen() _DEFAULT_BACKLOG = 128 @@ -131,7 +129,7 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC, ``flags`` is a bitmask of AI_* flags to `~socket.getaddrinfo`, like ``socket.AI_PASSIVE | socket.AI_NUMERICHOST``. - ``resuse_port`` option sets ``SO_REUSEPORT`` option for every socket + ``reuse_port`` option sets ``SO_REUSEPORT`` option for every socket in the list. If your platform doesn't support this option ValueError will be raised. """ @@ -334,6 +332,11 @@ class Resolver(Configurable): port)`` pair for IPv4; additional fields may be present for IPv6). If a ``callback`` is passed, it will be run with the result as an argument when it is complete. + + :raises IOError: if the address cannot be resolved. + + .. versionchanged:: 4.4 + Standardized all implementations to raise `IOError`. """ raise NotImplementedError() @@ -413,8 +416,8 @@ class ThreadedResolver(ExecutorResolver): All ``ThreadedResolvers`` share a single thread pool, whose size is set by the first one to be created. """ - _threadpool = None - _threadpool_pid = None + _threadpool = None # type: ignore + _threadpool_pid = None # type: int def initialize(self, io_loop=None, num_threads=10): threadpool = ThreadedResolver._create_threadpool(num_threads) @@ -518,4 +521,4 @@ def ssl_wrap_socket(socket, ssl_options, server_hostname=None, **kwargs): else: return context.wrap_socket(socket, **kwargs) else: - return ssl.wrap_socket(socket, **dict(context, **kwargs)) + return ssl.wrap_socket(socket, **dict(context, **kwargs)) # type: ignore diff --git a/lib/tornado/options.py b/lib/tornado/options.py index 40169fb8..2fbb32ad 100644 --- a/lib/tornado/options.py +++ b/lib/tornado/options.py @@ -43,8 +43,8 @@ either:: .. note: - When using tornado.options.parse_command_line or - tornado.options.parse_config_file, the only options that are set are + When using tornado.options.parse_command_line or + tornado.options.parse_config_file, the only options that are set are ones that were previously defined with tornado.options.define. Command line formats are what you would expect (``--myoption=myvalue``). @@ -308,8 +308,12 @@ class OptionParser(object): .. versionchanged:: 4.1 Config files are now always interpreted as utf-8 instead of the system default encoding. + + .. versionchanged:: 4.4 + The special variable ``__file__`` is available inside config + files, specifying the absolute path to the config file itself. """ - config = {} + config = {'__file__': os.path.abspath(path)} with open(path, 'rb') as f: exec_in(native_str(f.read()), config, config) for name in config: diff --git a/lib/tornado/platform/asyncio.py b/lib/tornado/platform/asyncio.py index bf0428ec..9556da61 100644 --- a/lib/tornado/platform/asyncio.py +++ b/lib/tornado/platform/asyncio.py @@ -14,9 +14,9 @@ loops. .. note:: - Tornado requires the `~asyncio.BaseEventLoop.add_reader` family of methods, - so it is not compatible with the `~asyncio.ProactorEventLoop` on Windows. - Use the `~asyncio.SelectorEventLoop` instead. + Tornado requires the `~asyncio.AbstractEventLoop.add_reader` family of + methods, so it is not compatible with the `~asyncio.ProactorEventLoop` on + Windows. Use the `~asyncio.SelectorEventLoop` instead. """ from __future__ import absolute_import, division, print_function, with_statement @@ -30,11 +30,11 @@ from tornado import stack_context try: # Import the real asyncio module for py33+ first. Older versions of the # trollius backport also use this name. - import asyncio + import asyncio # type: ignore except ImportError as e: # Asyncio itself isn't available; see if trollius is (backport to py26+). try: - import trollius as asyncio + import trollius as asyncio # type: ignore except ImportError: # Re-raise the original asyncio error, not the trollius one. raise e @@ -141,6 +141,8 @@ class BaseAsyncIOLoop(IOLoop): def add_callback(self, callback, *args, **kwargs): if self.closing: + # TODO: this is racy; we need a lock to ensure that the + # loop isn't closed during call_soon_threadsafe. raise RuntimeError("IOLoop is closing") self.asyncio_loop.call_soon_threadsafe( self._run_callback, @@ -158,6 +160,9 @@ class AsyncIOMainLoop(BaseAsyncIOLoop): import asyncio AsyncIOMainLoop().install() asyncio.get_event_loop().run_forever() + + See also :meth:`tornado.ioloop.IOLoop.install` for general notes on + installing alternative IOLoops. """ def initialize(self, **kwargs): super(AsyncIOMainLoop, self).initialize(asyncio.get_event_loop(), @@ -213,4 +218,4 @@ def to_asyncio_future(tornado_future): return af if hasattr(convert_yielded, 'register'): - convert_yielded.register(asyncio.Future, to_tornado_future) + convert_yielded.register(asyncio.Future, to_tornado_future) # type: ignore diff --git a/lib/tornado/platform/caresresolver.py b/lib/tornado/platform/caresresolver.py index 5559614f..4205de30 100644 --- a/lib/tornado/platform/caresresolver.py +++ b/lib/tornado/platform/caresresolver.py @@ -1,5 +1,5 @@ from __future__ import absolute_import, division, print_function, with_statement -import pycares +import pycares # type: ignore import socket from tornado import gen @@ -61,8 +61,8 @@ class CaresResolver(Resolver): assert not callback_args.kwargs result, error = callback_args.args if error: - raise Exception('C-Ares returned error %s: %s while resolving %s' % - (error, pycares.errno.strerror(error), host)) + raise IOError('C-Ares returned error %s: %s while resolving %s' % + (error, pycares.errno.strerror(error), host)) addresses = result.addresses addrinfo = [] for address in addresses: @@ -73,7 +73,7 @@ class CaresResolver(Resolver): else: address_family = socket.AF_UNSPEC if family != socket.AF_UNSPEC and family != address_family: - raise Exception('Requested socket family %d but got %d' % - (family, address_family)) + raise IOError('Requested socket family %d but got %d' % + (family, address_family)) addrinfo.append((address_family, (address, port))) raise gen.Return(addrinfo) diff --git a/lib/tornado/platform/interface.py b/lib/tornado/platform/interface.py index 07da6bab..cc062391 100644 --- a/lib/tornado/platform/interface.py +++ b/lib/tornado/platform/interface.py @@ -61,3 +61,6 @@ class Waker(object): def close(self): """Closes the waker's file descriptor(s).""" raise NotImplementedError() + +def monotonic_time(): + raise NotImplementedError() diff --git a/lib/tornado/platform/twisted.py b/lib/tornado/platform/twisted.py index d3a4e75d..92157c7c 100644 --- a/lib/tornado/platform/twisted.py +++ b/lib/tornado/platform/twisted.py @@ -29,19 +29,18 @@ import numbers import socket import sys -import twisted.internet.abstract -from twisted.internet.defer import Deferred -from twisted.internet.posixbase import PosixReactorBase -from twisted.internet.interfaces import \ - IReactorFDSet, IDelayedCall, IReactorTime, IReadDescriptor, IWriteDescriptor -from twisted.python import failure, log -from twisted.internet import error -import twisted.names.cache -import twisted.names.client -import twisted.names.hosts -import twisted.names.resolve +import twisted.internet.abstract # type: ignore +from twisted.internet.defer import Deferred # type: ignore +from twisted.internet.posixbase import PosixReactorBase # type: ignore +from twisted.internet.interfaces import IReactorFDSet, IDelayedCall, IReactorTime, IReadDescriptor, IWriteDescriptor # type: ignore +from twisted.python import failure, log # type: ignore +from twisted.internet import error # type: ignore +import twisted.names.cache # type: ignore +import twisted.names.client # type: ignore +import twisted.names.hosts # type: ignore +import twisted.names.resolve # type: ignore -from zope.interface import implementer +from zope.interface import implementer # type: ignore from tornado.concurrent import Future from tornado.escape import utf8 @@ -354,7 +353,7 @@ def install(io_loop=None): if not io_loop: io_loop = tornado.ioloop.IOLoop.current() reactor = TornadoReactor(io_loop) - from twisted.internet.main import installReactor + from twisted.internet.main import installReactor # type: ignore installReactor(reactor) return reactor @@ -408,11 +407,14 @@ class TwistedIOLoop(tornado.ioloop.IOLoop): Not compatible with `tornado.process.Subprocess.set_exit_callback` because the ``SIGCHLD`` handlers used by Tornado and Twisted conflict with each other. + + See also :meth:`tornado.ioloop.IOLoop.install` for general notes on + installing alternative IOLoops. """ def initialize(self, reactor=None, **kwargs): super(TwistedIOLoop, self).initialize(**kwargs) if reactor is None: - import twisted.internet.reactor + import twisted.internet.reactor # type: ignore reactor = twisted.internet.reactor self.reactor = reactor self.fds = {} @@ -554,7 +556,10 @@ class TwistedResolver(Resolver): deferred = self.resolver.getHostByName(utf8(host)) resolved = yield gen.Task(deferred.addBoth) if isinstance(resolved, failure.Failure): - resolved.raiseException() + try: + resolved.raiseException() + except twisted.names.error.DomainError as e: + raise IOError(e) elif twisted.internet.abstract.isIPAddress(resolved): resolved_family = socket.AF_INET elif twisted.internet.abstract.isIPv6Address(resolved): @@ -570,7 +575,7 @@ class TwistedResolver(Resolver): raise gen.Return(result) if hasattr(gen.convert_yielded, 'register'): - @gen.convert_yielded.register(Deferred) + @gen.convert_yielded.register(Deferred) # type: ignore def _(d): f = Future() diff --git a/lib/tornado/platform/windows.py b/lib/tornado/platform/windows.py index 817bdca1..9a319f27 100644 --- a/lib/tornado/platform/windows.py +++ b/lib/tornado/platform/windows.py @@ -3,8 +3,8 @@ from __future__ import absolute_import, division, print_function, with_statement -import ctypes -import ctypes.wintypes +import ctypes # type: ignore +import ctypes.wintypes # type: ignore # See: http://msdn.microsoft.com/en-us/library/ms724935(VS.85).aspx SetHandleInformation = ctypes.windll.kernel32.SetHandleInformation @@ -17,4 +17,4 @@ HANDLE_FLAG_INHERIT = 0x00000001 def set_close_exec(fd): success = SetHandleInformation(fd, HANDLE_FLAG_INHERIT, 0) if not success: - raise ctypes.GetLastError() + raise ctypes.WinError() diff --git a/lib/tornado/process.py b/lib/tornado/process.py index daa9677b..df61eba6 100644 --- a/lib/tornado/process.py +++ b/lib/tornado/process.py @@ -35,7 +35,7 @@ from tornado.iostream import PipeIOStream from tornado.log import gen_log from tornado.platform.auto import set_close_exec from tornado import stack_context -from tornado.util import errno_from_exception +from tornado.util import errno_from_exception, PY3 try: import multiprocessing @@ -43,11 +43,8 @@ except ImportError: # Multiprocessing is not available on Google App Engine. multiprocessing = None -try: - long # py2 -except NameError: - long = int # py3 - +if PY3: + long = int # Re-export this exception for convenience. try: @@ -147,6 +144,7 @@ def fork_processes(num_processes, max_restarts=100): else: children[pid] = i return None + for i in range(num_processes): id = start_child(i) if id is not None: @@ -204,13 +202,19 @@ class Subprocess(object): attribute of the resulting Subprocess a `.PipeIOStream`. * A new keyword argument ``io_loop`` may be used to pass in an IOLoop. + The ``Subprocess.STREAM`` option and the ``set_exit_callback`` and + ``wait_for_exit`` methods do not work on Windows. There is + therefore no reason to use this class instead of + ``subprocess.Popen`` on that platform. + .. versionchanged:: 4.1 The ``io_loop`` argument is deprecated. + """ STREAM = object() _initialized = False - _waiting = {} + _waiting = {} # type: ignore def __init__(self, *args, **kwargs): self.io_loop = kwargs.pop('io_loop', None) or ioloop.IOLoop.current() diff --git a/lib/tornado/queues.py b/lib/tornado/queues.py index 129b204e..b8e9b569 100644 --- a/lib/tornado/queues.py +++ b/lib/tornado/queues.py @@ -14,8 +14,6 @@ from __future__ import absolute_import, division, print_function, with_statement -__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'QueueFull', 'QueueEmpty'] - import collections import heapq @@ -23,6 +21,8 @@ from tornado import gen, ioloop from tornado.concurrent import Future from tornado.locks import Event +__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'QueueFull', 'QueueEmpty'] + class QueueEmpty(Exception): """Raised by `.Queue.get_nowait` when the queue has no items.""" diff --git a/lib/tornado/simple_httpclient.py b/lib/tornado/simple_httpclient.py index 37b0bc27..adcf38b2 100644 --- a/lib/tornado/simple_httpclient.py +++ b/lib/tornado/simple_httpclient.py @@ -11,6 +11,7 @@ from tornado.netutil import Resolver, OverrideResolver, _client_ssl_defaults from tornado.log import gen_log from tornado import stack_context from tornado.tcpclient import TCPClient +from tornado.util import PY3 import base64 import collections @@ -22,10 +23,10 @@ import sys from io import BytesIO -try: - import urlparse # py2 -except ImportError: - import urllib.parse as urlparse # py3 +if PY3: + import urllib.parse as urlparse +else: + import urlparse try: import ssl @@ -126,7 +127,7 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient): timeout_handle = self.io_loop.add_timeout( self.io_loop.time() + min(request.connect_timeout, request.request_timeout), - functools.partial(self._on_timeout, key)) + functools.partial(self._on_timeout, key, "in request queue")) else: timeout_handle = None self.waiting[key] = (request, callback, timeout_handle) @@ -167,11 +168,20 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient): self.io_loop.remove_timeout(timeout_handle) del self.waiting[key] - def _on_timeout(self, key): + def _on_timeout(self, key, info=None): + """Timeout callback of request. + + Construct a timeout HTTPResponse when a timeout occurs. + + :arg object key: A simple object to mark the request. + :info string key: More detailed timeout information. + """ request, callback, timeout_handle = self.waiting[key] self.queue.remove((key, request, callback)) + + error_message = "Timeout {0}".format(info) if info else "Timeout" timeout_response = HTTPResponse( - request, 599, error=HTTPError(599, "Timeout"), + request, 599, error=HTTPError(599, error_message), request_time=self.io_loop.time() - request.start_time) self.io_loop.add_callback(callback, timeout_response) del self.waiting[key] @@ -229,7 +239,7 @@ class _HTTPConnection(httputil.HTTPMessageDelegate): if timeout: self._timeout = self.io_loop.add_timeout( self.start_time + timeout, - stack_context.wrap(self._on_timeout)) + stack_context.wrap(functools.partial(self._on_timeout, "while connecting"))) self.tcp_client.connect(host, port, af=af, ssl_options=ssl_options, max_buffer_size=self.max_buffer_size, @@ -284,10 +294,17 @@ class _HTTPConnection(httputil.HTTPMessageDelegate): return ssl_options return None - def _on_timeout(self): + def _on_timeout(self, info=None): + """Timeout callback of _HTTPConnection instance. + + Raise a timeout HTTPError when a timeout occurs. + + :info string key: More detailed timeout information. + """ self._timeout = None + error_message = "Timeout {0}".format(info) if info else "Timeout" if self.final_callback is not None: - raise HTTPError(599, "Timeout") + raise HTTPError(599, error_message) def _remove_timeout(self): if self._timeout is not None: @@ -307,13 +324,14 @@ class _HTTPConnection(httputil.HTTPMessageDelegate): if self.request.request_timeout: self._timeout = self.io_loop.add_timeout( self.start_time + self.request.request_timeout, - stack_context.wrap(self._on_timeout)) + stack_context.wrap(functools.partial(self._on_timeout, "during request"))) if (self.request.method not in self._SUPPORTED_METHODS and not self.request.allow_nonstandard_methods): raise KeyError("unknown method %s" % self.request.method) for key in ('network_interface', 'proxy_host', 'proxy_port', - 'proxy_username', 'proxy_password'): + 'proxy_username', 'proxy_password', + 'proxy_auth_mode'): if getattr(self.request, key, None): raise NotImplementedError('%s not supported' % key) if "Connection" not in self.request.headers: diff --git a/lib/tornado/tcpclient.py b/lib/tornado/tcpclient.py index f594d91b..11146860 100644 --- a/lib/tornado/tcpclient.py +++ b/lib/tornado/tcpclient.py @@ -177,7 +177,13 @@ class TCPClient(object): def _create_stream(self, max_buffer_size, af, addr): # Always connect in plaintext; we'll convert to ssl if necessary # after one connection has completed. - stream = IOStream(socket.socket(af), - io_loop=self.io_loop, - max_buffer_size=max_buffer_size) - return stream.connect(addr) + try: + stream = IOStream(socket.socket(af), + io_loop=self.io_loop, + max_buffer_size=max_buffer_size) + except socket.error as e: + fu = Future() + fu.set_exception(e) + return fu + else: + return stream.connect(addr) diff --git a/lib/tornado/tcpserver.py b/lib/tornado/tcpserver.py index 2fe4cc9c..54837f7a 100644 --- a/lib/tornado/tcpserver.py +++ b/lib/tornado/tcpserver.py @@ -39,7 +39,21 @@ class TCPServer(object): r"""A non-blocking, single-threaded TCP server. To use `TCPServer`, define a subclass which overrides the `handle_stream` - method. + method. For example, a simple echo server could be defined like this:: + + from tornado.tcpserver import TCPServer + from tornado.iostream import StreamClosedError + from tornado import gen + + class EchoServer(TCPServer): + @gen.coroutine + def handle_stream(self, stream, address): + while True: + try: + data = yield stream.read_until(b"\n") + yield stream.write(data) + except StreamClosedError: + break To make this server serve SSL traffic, send the ``ssl_options`` keyword argument with an `ssl.SSLContext` object. For compatibility with older @@ -147,7 +161,8 @@ class TCPServer(object): """Singular version of `add_sockets`. Takes a single socket object.""" self.add_sockets([socket]) - def bind(self, port, address=None, family=socket.AF_UNSPEC, backlog=128, reuse_port=False): + def bind(self, port, address=None, family=socket.AF_UNSPEC, backlog=128, + reuse_port=False): """Binds this server to the given port on the given address. To start the server, call `start`. If you want to run this server @@ -162,10 +177,14 @@ class TCPServer(object): both will be used if available. The ``backlog`` argument has the same meaning as for - `socket.listen `. + `socket.listen `. The ``reuse_port`` argument + has the same meaning as for `.bind_sockets`. This method may be called multiple times prior to `start` to listen on multiple ports or interfaces. + + .. versionchanged:: 4.4 + Added the ``reuse_port`` argument. """ sockets = bind_sockets(port, address=address, family=family, backlog=backlog, reuse_port=reuse_port) diff --git a/lib/tornado/template.py b/lib/tornado/template.py index fa588991..67c61e6b 100644 --- a/lib/tornado/template.py +++ b/lib/tornado/template.py @@ -19,13 +19,13 @@ Basic usage looks like:: t = template.Template("{{ myvalue }}") - print t.generate(myvalue="XXX") + print(t.generate(myvalue="XXX")) `Loader` is a class that loads templates from a root directory and caches the compiled templates:: loader = template.Loader("/home/btaylor") - print loader.load("test.html").generate(myvalue="XXX") + print(loader.load("test.html").generate(myvalue="XXX")) We compile all templates to raw Python. Error-reporting is currently... uh, interesting. Syntax for the templates:: @@ -94,12 +94,15 @@ Syntax Reference Template expressions are surrounded by double curly braces: ``{{ ... }}``. The contents may be any python expression, which will be escaped according to the current autoescape setting and inserted into the output. Other -template directives use ``{% %}``. These tags may be escaped as ``{{!`` -and ``{%!`` if you need to include a literal ``{{`` or ``{%`` in the output. +template directives use ``{% %}``. To comment out a section so that it is omitted from the output, surround it with ``{# ... #}``. +These tags may be escaped as ``{{!``, ``{%!``, and ``{#!`` +if you need to include a literal ``{{``, ``{%``, or ``{#`` in the output. + + ``{% apply *function* %}...{% end %}`` Applies a function to the output of all template code between ``apply`` and ``end``:: @@ -204,12 +207,12 @@ import threading from tornado import escape from tornado.log import app_log -from tornado.util import ObjectDict, exec_in, unicode_type +from tornado.util import ObjectDict, exec_in, unicode_type, PY3 -try: - from cStringIO import StringIO # py2 -except ImportError: - from io import StringIO # py3 +if PY3: + from io import StringIO +else: + from cStringIO import StringIO _DEFAULT_AUTOESCAPE = "xhtml_escape" _UNSET = object() @@ -665,7 +668,7 @@ class ParseError(Exception): .. versionchanged:: 4.3 Added ``filename`` and ``lineno`` attributes. """ - def __init__(self, message, filename, lineno): + def __init__(self, message, filename=None, lineno=0): self.message = message # The names "filename" and "lineno" are chosen for consistency # with python SyntaxError. diff --git a/lib/tornado/testing.py b/lib/tornado/testing.py index 119234d0..902bfdfc 100644 --- a/lib/tornado/testing.py +++ b/lib/tornado/testing.py @@ -2,7 +2,7 @@ """Support classes for automated testing. * `AsyncTestCase` and `AsyncHTTPTestCase`: Subclasses of unittest.TestCase - with additional support for testing asynchronous (`.IOLoop` based) code. + with additional support for testing asynchronous (`.IOLoop`-based) code. * `ExpectLog` and `LogTrapTestCase`: Make test logs less spammy. @@ -23,16 +23,16 @@ try: except ImportError: # These modules are not importable on app engine. Parts of this module # won't work, but e.g. LogTrapTestCase and main() will. - AsyncHTTPClient = None - gen = None - HTTPServer = None - IOLoop = None - netutil = None - SimpleAsyncHTTPClient = None - Subprocess = None + AsyncHTTPClient = None # type: ignore + gen = None # type: ignore + HTTPServer = None # type: ignore + IOLoop = None # type: ignore + netutil = None # type: ignore + SimpleAsyncHTTPClient = None # type: ignore + Subprocess = None # type: ignore from tornado.log import gen_log, app_log from tornado.stack_context import ExceptionStackContext -from tornado.util import raise_exc_info, basestring_type +from tornado.util import raise_exc_info, basestring_type, PY3 import functools import inspect import logging @@ -42,19 +42,19 @@ import signal import socket import sys -try: - from cStringIO import StringIO # py2 -except ImportError: - from io import StringIO # py3 +if PY3: + from io import StringIO +else: + from cStringIO import StringIO try: - from collections.abc import Generator as GeneratorType # py35+ + from collections.abc import Generator as GeneratorType # type: ignore except ImportError: - from types import GeneratorType + from types import GeneratorType # type: ignore if sys.version_info >= (3, 5): - iscoroutine = inspect.iscoroutine - iscoroutinefunction = inspect.iscoroutinefunction + iscoroutine = inspect.iscoroutine # type: ignore + iscoroutinefunction = inspect.iscoroutinefunction # type: ignore else: iscoroutine = iscoroutinefunction = lambda f: False @@ -62,16 +62,16 @@ else: # (either py27+ or unittest2) so tornado.test.util enforces # this requirement, but for other users of tornado.testing we want # to allow the older version if unitest2 is not available. -if sys.version_info >= (3,): +if PY3: # On python 3, mixing unittest2 and unittest (including doctest) # doesn't seem to work, so always use unittest. import unittest else: # On python 2, prefer unittest2 when available. try: - import unittest2 as unittest + import unittest2 as unittest # type: ignore except ImportError: - import unittest + import unittest # type: ignore _next_port = 10000 @@ -96,9 +96,13 @@ def bind_unused_port(reuse_port=False): """Binds a server socket to an available port on localhost. Returns a tuple (socket, port). + + .. versionchanged:: 4.4 + Always binds to ``127.0.0.1`` without resolving the name + ``localhost``. """ sock = netutil.bind_sockets(None, '127.0.0.1', family=socket.AF_INET, - reuse_port=reuse_port)[0] + reuse_port=reuse_port)[0] port = sock.getsockname()[1] return sock, port @@ -123,7 +127,7 @@ class _TestMethodWrapper(object): method yields it must use a decorator to consume the generator), but will also detect other kinds of return values (these are not necessarily errors, but we alert anyway since there is no good - reason to return a value from a test. + reason to return a value from a test). """ def __init__(self, orig_method): self.orig_method = orig_method @@ -208,8 +212,8 @@ class AsyncTestCase(unittest.TestCase): self.assertIn("FriendFeed", response.body) self.stop() """ - def __init__(self, methodName='runTest', **kwargs): - super(AsyncTestCase, self).__init__(methodName, **kwargs) + def __init__(self, methodName='runTest'): + super(AsyncTestCase, self).__init__(methodName) self.__stopped = False self.__running = False self.__failure = None @@ -547,7 +551,7 @@ def gen_test(func=None, timeout=None): # Without this attribute, nosetests will try to run gen_test as a test # anywhere it is imported. -gen_test.__test__ = False +gen_test.__test__ = False # type: ignore class LogTrapTestCase(unittest.TestCase): @@ -617,7 +621,7 @@ class ExpectLog(logging.Filter): an empty string to watch the root logger. :param regex: Regular expression to match. Any log entries on the specified logger that match this regex will be suppressed. - :param required: If true, an exeption will be raised if the end of + :param required: If true, an exception will be raised if the end of the ``with`` statement is reached without matching any log entries. """ if isinstance(logger, basestring_type): diff --git a/lib/tornado/util.py b/lib/tornado/util.py index 5e083961..28e74e7d 100644 --- a/lib/tornado/util.py +++ b/lib/tornado/util.py @@ -14,33 +14,70 @@ from __future__ import absolute_import, division, print_function, with_statement import array import os +import re import sys import zlib +PY3 = sys.version_info >= (3,) -try: - xrange # py2 -except NameError: - xrange = range # py3 +if PY3: + xrange = range # inspect.getargspec() raises DeprecationWarnings in Python 3.5. # The two functions have compatible interfaces for the parts we need. +if PY3: + from inspect import getfullargspec as getargspec +else: + from inspect import getargspec + +# Aliases for types that are spelled differently in different Python +# versions. bytes_type is deprecated and no longer used in Tornado +# itself but is left in case anyone outside Tornado is using it. +bytes_type = bytes +if PY3: + unicode_type = str + basestring_type = str +else: + # The names unicode and basestring don't exist in py3 so silence flake8. + unicode_type = unicode # noqa + basestring_type = basestring # noqa + + try: - from inspect import getfullargspec as getargspec # py3 + import typing # noqa + from typing import cast + + _ObjectDictBase = typing.Dict[str, typing.Any] except ImportError: - from inspect import getargspec # py2 + _ObjectDictBase = dict + + def cast(typ, x): + return x +else: + # More imports that are only needed in type comments. + import datetime # noqa + import types # noqa + from typing import Any, AnyStr, Union, Optional, Dict, Mapping # noqa + from typing import Tuple, Match, Callable # noqa + + if PY3: + _BaseString = str + else: + _BaseString = Union[bytes, unicode_type] -class ObjectDict(dict): +class ObjectDict(_ObjectDictBase): """Makes a dictionary behave like an object, with attribute-style access. """ def __getattr__(self, name): + # type: (str) -> Any try: return self[name] except KeyError: raise AttributeError(name) def __setattr__(self, name, value): + # type: (str, Any) -> None self[name] = value @@ -57,6 +94,7 @@ class GzipDecompressor(object): self.decompressobj = zlib.decompressobj(16 + zlib.MAX_WBITS) def decompress(self, value, max_length=None): + # type: (bytes, Optional[int]) -> bytes """Decompress a chunk, returning newly-available data. Some data may be buffered for later processing; `flush` must @@ -71,11 +109,13 @@ class GzipDecompressor(object): @property def unconsumed_tail(self): + # type: () -> bytes """Returns the unconsumed portion left over """ return self.decompressobj.unconsumed_tail def flush(self): + # type: () -> bytes """Return any remaining buffered data not yet returned by decompress. Also checks for errors such as truncated input. @@ -84,17 +124,8 @@ class GzipDecompressor(object): return self.decompressobj.flush() -if not isinstance(b'', type('')): - unicode_type = str - basestring_type = str -else: - # These names don't exist in py3, so use noqa comments to disable - # warnings in flake8. - unicode_type = unicode # noqa - basestring_type = basestring # noqa - - def import_object(name): + # type: (_BaseString) -> Any """Imports an object by name. import_object('x') is equivalent to 'import x'. @@ -112,8 +143,8 @@ def import_object(name): ... ImportError: No module named missing_module """ - if isinstance(name, unicode_type) and str is not unicode_type: - # On python 2 a byte string is required. + if not isinstance(name, str): + # on python 2 a byte string is required. name = name.encode('utf-8') if name.count('.') == 0: return __import__(name, None, None) @@ -126,35 +157,35 @@ def import_object(name): raise ImportError("No module named %s" % parts[-1]) -# Deprecated alias that was used before we dropped py25 support. -# Left here in case anyone outside Tornado is using it. -bytes_type = bytes +# Stubs to make mypy happy (and later for actual type-checking). +def raise_exc_info(exc_info): + # type: (Tuple[type, BaseException, types.TracebackType]) -> None + pass -if sys.version_info > (3,): + +def exec_in(code, glob, loc=None): + # type: (Any, Dict[str, Any], Optional[Mapping[str, Any]]) -> Any + if isinstance(code, basestring_type): + # exec(string) inherits the caller's future imports; compile + # the string first to prevent that. + code = compile(code, '', 'exec', dont_inherit=True) + exec(code, glob, loc) + + +if PY3: exec(""" def raise_exc_info(exc_info): raise exc_info[1].with_traceback(exc_info[2]) - -def exec_in(code, glob, loc=None): - if isinstance(code, str): - code = compile(code, '', 'exec', dont_inherit=True) - exec(code, glob, loc) """) else: exec(""" def raise_exc_info(exc_info): raise exc_info[0], exc_info[1], exc_info[2] - -def exec_in(code, glob, loc=None): - if isinstance(code, basestring): - # exec(string) inherits the caller's future imports; compile - # the string first to prevent that. - code = compile(code, '', 'exec', dont_inherit=True) - exec code in glob, loc """) def errno_from_exception(e): + # type: (BaseException) -> Optional[int] """Provides the errno from an Exception object. There are cases that the errno attribute was not set so we pull @@ -165,13 +196,40 @@ def errno_from_exception(e): """ if hasattr(e, 'errno'): - return e.errno + return e.errno # type: ignore elif e.args: return e.args[0] else: return None +_alphanum = frozenset( + "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789") + + +def _re_unescape_replacement(match): + # type: (Match[str]) -> str + group = match.group(1) + if group[0] in _alphanum: + raise ValueError("cannot unescape '\\\\%s'" % group[0]) + return group + +_re_unescape_pattern = re.compile(r'\\(.)', re.DOTALL) + + +def re_unescape(s): + # type: (str) -> str + """Unescape a string escaped by `re.escape`. + + May raise ``ValueError`` for regular expressions which could not + have been produced by `re.escape` (for example, strings containing + ``\d`` cannot be unescaped). + + .. versionadded:: 4.4 + """ + return _re_unescape_pattern.sub(_re_unescape_replacement, s) + + class Configurable(object): """Base class for configurable interfaces. @@ -192,8 +250,8 @@ class Configurable(object): `configurable_base` and `configurable_default`, and use the instance method `initialize` instead of ``__init__``. """ - __impl_class = None - __impl_kwargs = None + __impl_class = None # type: type + __impl_kwargs = None # type: Dict[str, Any] def __new__(cls, *args, **kwargs): base = cls.configurable_base() @@ -214,6 +272,9 @@ class Configurable(object): @classmethod def configurable_base(cls): + # type: () -> Any + # TODO: This class needs https://github.com/python/typing/issues/107 + # to be fully typeable. """Returns the base class of a configurable hierarchy. This will normally return the class in which it is defined. @@ -223,10 +284,12 @@ class Configurable(object): @classmethod def configurable_default(cls): + # type: () -> type """Returns the implementation class to be used if none is configured.""" raise NotImplementedError() def initialize(self): + # type: () -> None """Initialize a `Configurable` subclass instance. Configurable classes should use `initialize` instead of ``__init__``. @@ -237,6 +300,7 @@ class Configurable(object): @classmethod def configure(cls, impl, **kwargs): + # type: (Any, **Any) -> None """Sets the class to use when the base class is instantiated. Keyword arguments will be saved and added to the arguments passed @@ -244,7 +308,7 @@ class Configurable(object): some parameters. """ base = cls.configurable_base() - if isinstance(impl, (unicode_type, bytes)): + if isinstance(impl, (str, unicode_type)): impl = import_object(impl) if impl is not None and not issubclass(impl, cls): raise ValueError("Invalid subclass of %s" % cls) @@ -253,6 +317,7 @@ class Configurable(object): @classmethod def configured_class(cls): + # type: () -> type """Returns the currently configured class.""" base = cls.configurable_base() if cls.__impl_class is None: @@ -261,11 +326,13 @@ class Configurable(object): @classmethod def _save_configuration(cls): + # type: () -> Tuple[type, Dict[str, Any]] base = cls.configurable_base() return (base.__impl_class, base.__impl_kwargs) @classmethod def _restore_configuration(cls, saved): + # type: (Tuple[type, Dict[str, Any]]) -> None base = cls.configurable_base() base.__impl_class = saved[0] base.__impl_kwargs = saved[1] @@ -279,6 +346,7 @@ class ArgReplacer(object): and similar wrappers. """ def __init__(self, func, name): + # type: (Callable, str) -> None self.name = name try: self.arg_pos = self._getargnames(func).index(name) @@ -287,6 +355,7 @@ class ArgReplacer(object): self.arg_pos = None def _getargnames(self, func): + # type: (Callable) -> List[str] try: return getargspec(func).args except TypeError: @@ -297,11 +366,12 @@ class ArgReplacer(object): # getargspec that we need here. Note that for static # functions the @cython.binding(True) decorator must # be used (for methods it works out of the box). - code = func.func_code + code = func.func_code # type: ignore return code.co_varnames[:code.co_argcount] raise def get_old_value(self, args, kwargs, default=None): + # type: (List[Any], Dict[str, Any], Any) -> Any """Returns the old value of the named argument without replacing it. Returns ``default`` if the argument is not present. @@ -312,6 +382,7 @@ class ArgReplacer(object): return kwargs.get(self.name, default) def replace(self, new_value, args, kwargs): + # type: (Any, List[Any], Dict[str, Any]) -> Tuple[Any, List[Any], Dict[str, Any]] """Replace the named argument in ``args, kwargs`` with ``new_value``. Returns ``(old_value, args, kwargs)``. The returned ``args`` and @@ -334,11 +405,13 @@ class ArgReplacer(object): def timedelta_to_seconds(td): + # type: (datetime.timedelta) -> float """Equivalent to td.total_seconds() (introduced in python 2.7).""" return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6) def _websocket_mask_python(mask, data): + # type: (bytes, bytes) -> bytes """Websocket masking function. `mask` is a `bytes` object of length 4; `data` is a `bytes` object of any length. @@ -347,17 +420,17 @@ def _websocket_mask_python(mask, data): This pure-python implementation may be replaced by an optimized version when available. """ - mask = array.array("B", mask) - unmasked = array.array("B", data) + mask_arr = array.array("B", mask) + unmasked_arr = array.array("B", data) for i in xrange(len(data)): - unmasked[i] = unmasked[i] ^ mask[i % 4] - if hasattr(unmasked, 'tobytes'): + unmasked_arr[i] = unmasked_arr[i] ^ mask_arr[i % 4] + if PY3: # tostring was deprecated in py32. It hasn't been removed, # but since we turn on deprecation warnings in our tests # we need to use the right one. - return unmasked.tobytes() + return unmasked_arr.tobytes() else: - return unmasked.tostring() + return unmasked_arr.tostring() if (os.environ.get('TORNADO_NO_EXTENSION') or os.environ.get('TORNADO_EXTENSION') == '0'): diff --git a/lib/tornado/web.py b/lib/tornado/web.py index c51d5f68..7e5860af 100644 --- a/lib/tornado/web.py +++ b/lib/tornado/web.py @@ -90,24 +90,27 @@ from tornado import stack_context from tornado import template from tornado.escape import utf8, _unicode from tornado.util import (import_object, ObjectDict, raise_exc_info, - unicode_type, _websocket_mask) + unicode_type, _websocket_mask, re_unescape, PY3) from tornado.httputil import split_host_and_port +if PY3: + import http.cookies as Cookie + import urllib.parse as urlparse + from urllib.parse import urlencode +else: + import Cookie + import urlparse + from urllib import urlencode try: - import Cookie # py2 -except ImportError: - import http.cookies as Cookie # py3 + import typing # noqa -try: - import urlparse # py2 + # The following types are accepted by RequestHandler.set_header + # and related methods. + _HeaderTypes = typing.Union[bytes, unicode_type, + numbers.Integral, datetime.datetime] except ImportError: - import urllib.parse as urlparse # py3 - -try: - from urllib import urlencode # py2 -except ImportError: - from urllib.parse import urlencode # py3 + pass MIN_SUPPORTED_SIGNED_VALUE_VERSION = 1 @@ -152,7 +155,7 @@ class RequestHandler(object): SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT", "OPTIONS") - _template_loaders = {} # {path: template.BaseLoader} + _template_loaders = {} # type: typing.Dict[str, template.BaseLoader] _template_loader_lock = threading.Lock() _remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]") @@ -166,6 +169,7 @@ class RequestHandler(object): self._auto_finish = True self._transforms = None # will be set in _execute self._prepared_future = None + self._headers = None # type: httputil.HTTPHeaders self.path_args = None self.path_kwargs = None self.ui = ObjectDict((n, self._ui_method(m)) for n, m in @@ -184,7 +188,7 @@ class RequestHandler(object): def initialize(self): """Hook for subclass initialization. Called for each request. - + A dictionary passed as the third argument of a url spec will be supplied as keyword arguments to initialize(). @@ -313,13 +317,14 @@ class RequestHandler(object): try: self._reason = httputil.responses[status_code] except KeyError: - raise ValueError("unknown status code %d", status_code) + raise ValueError("unknown status code %d" % status_code) def get_status(self): """Returns the status code for our response.""" return self._status_code def set_header(self, name, value): + # type: (str, _HeaderTypes) -> None """Sets the given response header name and value. If a datetime is given, we automatically format it according to the @@ -329,6 +334,7 @@ class RequestHandler(object): self._headers[name] = self._convert_header_value(value) def add_header(self, name, value): + # type: (str, _HeaderTypes) -> None """Adds the given response header and value. Unlike `set_header`, `add_header` may be called multiple times @@ -345,13 +351,25 @@ class RequestHandler(object): if name in self._headers: del self._headers[name] - _INVALID_HEADER_CHAR_RE = re.compile(br"[\x00-\x1f]") + _INVALID_HEADER_CHAR_RE = re.compile(r"[\x00-\x1f]") def _convert_header_value(self, value): - if isinstance(value, bytes): - pass - elif isinstance(value, unicode_type): - value = value.encode('utf-8') + # type: (_HeaderTypes) -> str + + # Convert the input value to a str. This type check is a bit + # subtle: The bytes case only executes on python 3, and the + # unicode case only executes on python 2, because the other + # cases are covered by the first match for str. + if isinstance(value, str): + retval = value + elif isinstance(value, bytes): # py3 + # Non-ascii characters in headers are not well supported, + # but if you pass bytes, use latin1 so they pass through as-is. + retval = value.decode('latin1') + elif isinstance(value, unicode_type): # py2 + # TODO: This is inconsistent with the use of latin1 above, + # but it's been that way for a long time. Should it change? + retval = escape.utf8(value) elif isinstance(value, numbers.Integral): # return immediately since we know the converted value will be safe return str(value) @@ -361,11 +379,11 @@ class RequestHandler(object): raise TypeError("Unsupported header value %r" % value) # If \n is allowed into the header, it is possible to inject # additional headers or split the request. - if RequestHandler._INVALID_HEADER_CHAR_RE.search(value): - raise ValueError("Unsafe header value %r", value) - return value + if RequestHandler._INVALID_HEADER_CHAR_RE.search(retval): + raise ValueError("Unsafe header value %r", retval) + return retval - _ARG_DEFAULT = [] + _ARG_DEFAULT = object() def get_argument(self, name, default=_ARG_DEFAULT, strip=True): """Returns the value of the argument with the given name. @@ -509,7 +527,7 @@ class RequestHandler(object): Additional keyword arguments are set on the Cookie.Morsel directly. - See http://docs.python.org/library/cookie.html#morsel-objects + See https://docs.python.org/2/library/cookie.html#Cookie.Morsel for available attributes. """ # The cookie library only accepts type str, in both python 2 and 3 @@ -696,6 +714,8 @@ class RequestHandler(object): def render(self, template_name, **kwargs): """Renders the template with the given arguments as the response.""" + if self._finished: + raise RuntimeError("Cannot render() after finish()") html = self.render_string(template_name, **kwargs) # Insert the additional JS and CSS added by the modules on the page @@ -915,8 +935,8 @@ class RequestHandler(object): if self.check_etag_header(): self._write_buffer = [] self.set_status(304) - if self._status_code == 304: - assert not self._write_buffer, "Cannot send body with 304" + if self._status_code in (204, 304): + assert not self._write_buffer, "Cannot send body with %s" % self._status_code self._clear_headers_for_304() elif "Content-Length" not in self._headers: content_length = sum(len(part) for part in self._write_buffer) @@ -1072,8 +1092,8 @@ class RequestHandler(object): def get_current_user(self): user_cookie = self.get_secure_cookie("user") - if user_cookie: - return json.loads(user_cookie) + if user_cookie: + return json.loads(user_cookie) return None * It may be set as a normal variable, typically from an overridden @@ -1089,7 +1109,7 @@ class RequestHandler(object): may not, so the latter form is necessary if loading the user requires asynchronous operations. - The user object may any type of the application's choosing. + The user object may be any type of the application's choosing. """ if not hasattr(self, "_current_user"): self._current_user = self.get_current_user() @@ -1265,6 +1285,8 @@ class RequestHandler(object): raise HTTPError(403, "'_xsrf' argument missing from POST") _, token, _ = self._decode_xsrf_token(token) _, expected_token, _ = self._get_raw_xsrf_token() + if not token: + raise HTTPError(403, "'_xsrf' argument has invalid format") if not _time_independent_equals(utf8(token), utf8(expected_token)): raise HTTPError(403, "XSRF cookie does not match POST argument") @@ -1385,7 +1407,9 @@ class RequestHandler(object): match = True else: # Use a weak comparison when comparing entity-tags. - val = lambda x: x[2:] if x.startswith(b'W/') else x + def val(x): + return x[2:] if x.startswith(b'W/') else x + for etag in etags: if val(etag) == val(computed_etag): match = True @@ -1603,6 +1627,7 @@ def asynchronous(method): result = method(self, *args, **kwargs) if result is not None: result = gen.convert_yielded(result) + # If @asynchronous is used with @gen.coroutine, (but # not @gen.engine), we can automatically finish the # request when the future resolves. Additionally, @@ -2240,7 +2265,7 @@ class StaticFileHandler(RequestHandler): """ CACHE_MAX_AGE = 86400 * 365 * 10 # 10 years - _static_hashes = {} + _static_hashes = {} # type: typing.Dict _lock = threading.Lock() # protects _static_hashes def initialize(self, path, default_filename=None): @@ -2693,6 +2718,7 @@ class OutputTransform(object): pass def transform_first_chunk(self, status_code, headers, chunk, finishing): + # type: (int, httputil.HTTPHeaders, bytes, bool) -> typing.Tuple[int, httputil.HTTPHeaders, bytes] return status_code, headers, chunk def transform_chunk(self, chunk, finishing): @@ -2713,7 +2739,8 @@ class GZipContentEncoding(OutputTransform): # beginning with "text/"). CONTENT_TYPES = set(["application/javascript", "application/x-javascript", "application/xml", "application/atom+xml", - "application/json", "application/xhtml+xml"]) + "application/json", "application/xhtml+xml", + "image/svg+xml"]) # Python's GzipFile defaults to level 9, while most other gzip # tools (including gzip itself) default to 6, which is probably a # better CPU/size tradeoff. @@ -2732,10 +2759,12 @@ class GZipContentEncoding(OutputTransform): return ctype.startswith('text/') or ctype in self.CONTENT_TYPES def transform_first_chunk(self, status_code, headers, chunk, finishing): + # type: (int, httputil.HTTPHeaders, bytes, bool) -> typing.Tuple[int, httputil.HTTPHeaders, bytes] + # TODO: can/should this type be inherited from the superclass? if 'Vary' in headers: - headers['Vary'] += b', Accept-Encoding' + headers['Vary'] += ', Accept-Encoding' else: - headers['Vary'] = b'Accept-Encoding' + headers['Vary'] = 'Accept-Encoding' if self._gzipping: ctype = _unicode(headers.get("Content-Type", "")).split(";")[0] self._gzipping = self._compressible_type(ctype) and \ @@ -2966,9 +2995,11 @@ class URLSpec(object): def __init__(self, pattern, handler, kwargs=None, name=None): """Parameters: - * ``pattern``: Regular expression to be matched. Any groups - in the regex will be passed in to the handler's get/post/etc - methods as arguments. + * ``pattern``: Regular expression to be matched. Any capturing + groups in the regex will be passed in to the handler's + get/post/etc methods as arguments (by keyword if named, by + position if unnamed. Named and unnamed capturing groups may + may not be mixed in the same rule). * ``handler``: `RequestHandler` subclass to be invoked. @@ -2977,6 +3008,7 @@ class URLSpec(object): * ``name`` (optional): A name for this handler. Used by `Application.reverse_url`. + """ if not pattern.endswith('$'): pattern += '$' @@ -3024,13 +3056,19 @@ class URLSpec(object): if paren_loc >= 0: pieces.append('%s' + fragment[paren_loc + 1:]) else: - pieces.append(fragment) + try: + unescaped_fragment = re_unescape(fragment) + except ValueError as exc: + # If we can't unescape part of it, we can't + # reverse this url. + return (None, None) + pieces.append(unescaped_fragment) return (''.join(pieces), self.regex.groups) def reverse(self, *args): - assert self._path is not None, \ - "Cannot reverse url regex " + self.regex.pattern + if self._path is None: + raise ValueError("Cannot reverse url regex " + self.regex.pattern) assert len(args) == self._group_count, "required number of arguments "\ "not found" if not len(args): @@ -3268,7 +3306,7 @@ def _create_signature_v2(secret, s): def _unquote_or_none(s): - """None-safe wrapper around url_unescape to handle unamteched optional + """None-safe wrapper around url_unescape to handle unmatched optional groups correctly. Note that args are passed as bytes so the handler can decide what diff --git a/lib/tornado/websocket.py b/lib/tornado/websocket.py index f5e3dbd7..6e1220b3 100644 --- a/lib/tornado/websocket.py +++ b/lib/tornado/websocket.py @@ -36,18 +36,14 @@ from tornado.iostream import StreamClosedError from tornado.log import gen_log, app_log from tornado import simple_httpclient from tornado.tcpclient import TCPClient -from tornado.util import _websocket_mask +from tornado.util import _websocket_mask, PY3 -try: +if PY3: from urllib.parse import urlparse # py2 -except ImportError: + xrange = range +else: from urlparse import urlparse # py3 -try: - xrange # py2 -except NameError: - xrange = range # py3 - class WebSocketError(Exception): pass @@ -319,6 +315,19 @@ class WebSocketHandler(tornado.web.RequestHandler): browsers, since WebSockets are allowed to bypass the usual same-origin policies and don't use CORS headers. + .. warning:: + + This is an important security measure; don't disable it + without understanding the security implications. In + particular, if your authenticatino is cookie-based, you + must either restrict the origins allowed by + ``check_origin()`` or implement your own XSRF-like + protection for websocket connections. See `these + `_ + `articles + `_ + for more. + To accept all cross-origin traffic (which was the default prior to Tornado 4.0), simply override this method to always return true:: @@ -333,6 +342,7 @@ class WebSocketHandler(tornado.web.RequestHandler): return parsed_origin.netloc.endswith(".mydomain.com") .. versionadded:: 4.0 + """ parsed_origin = urlparse(origin) origin = parsed_origin.netloc diff --git a/lib/tornado/wsgi.py b/lib/tornado/wsgi.py index 59e6c559..e9ead300 100644 --- a/lib/tornado/wsgi.py +++ b/lib/tornado/wsgi.py @@ -41,12 +41,12 @@ from tornado import httputil from tornado.log import access_log from tornado import web from tornado.escape import native_str -from tornado.util import unicode_type +from tornado.util import unicode_type, PY3 -try: +if PY3: import urllib.parse as urllib_parse # py3 -except ImportError: +else: import urllib as urllib_parse # PEP 3333 specifies that WSGI on python 3 generally deals with byte strings