2014-06-11 08:34:28 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
#
|
|
|
|
# Copyright 2009 Facebook
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
# not use this file except in compliance with the License. You may obtain
|
|
|
|
# a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
# License for the specific language governing permissions and limitations
|
|
|
|
# under the License.
|
|
|
|
|
|
|
|
"""``tornado.web`` provides a simple web framework with asynchronous
|
|
|
|
features that allow it to scale to large numbers of open connections,
|
|
|
|
making it ideal for `long polling
|
|
|
|
<http://en.wikipedia.org/wiki/Push_technology#Long_polling>`_.
|
|
|
|
|
2015-04-27 19:06:19 +00:00
|
|
|
Here is a simple "Hello, world" example app:
|
|
|
|
|
|
|
|
.. testcode::
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
import tornado.ioloop
|
|
|
|
import tornado.web
|
|
|
|
|
|
|
|
class MainHandler(tornado.web.RequestHandler):
|
|
|
|
def get(self):
|
|
|
|
self.write("Hello, world")
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
application = tornado.web.Application([
|
|
|
|
(r"/", MainHandler),
|
|
|
|
])
|
|
|
|
application.listen(8888)
|
2015-04-27 19:06:19 +00:00
|
|
|
tornado.ioloop.IOLoop.current().start()
|
|
|
|
|
|
|
|
.. testoutput::
|
|
|
|
:hide:
|
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
|
2014-10-14 04:24:01 +00:00
|
|
|
See the :doc:`guide` for additional information.
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
Thread-safety notes
|
|
|
|
-------------------
|
|
|
|
|
|
|
|
In general, methods on `RequestHandler` and elsewhere in Tornado are
|
|
|
|
not thread-safe. In particular, methods such as
|
|
|
|
`~RequestHandler.write()`, `~RequestHandler.finish()`, and
|
|
|
|
`~RequestHandler.flush()` must only be called from the main thread. If
|
|
|
|
you use multiple threads it is important to use `.IOLoop.add_callback`
|
|
|
|
to transfer control back to the main thread before finishing the
|
|
|
|
request.
|
2014-10-14 04:24:01 +00:00
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
"""
|
|
|
|
|
2015-12-23 04:15:36 +00:00
|
|
|
from __future__ import absolute_import, division, print_function, with_statement
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
import base64
|
|
|
|
import binascii
|
|
|
|
import datetime
|
|
|
|
import email.utils
|
|
|
|
import functools
|
|
|
|
import gzip
|
|
|
|
import hashlib
|
|
|
|
import hmac
|
|
|
|
import mimetypes
|
|
|
|
import numbers
|
|
|
|
import os.path
|
|
|
|
import re
|
|
|
|
import stat
|
|
|
|
import sys
|
|
|
|
import threading
|
|
|
|
import time
|
|
|
|
import tornado
|
|
|
|
import traceback
|
|
|
|
import types
|
2014-10-14 04:24:01 +00:00
|
|
|
from io import BytesIO
|
2014-06-11 08:34:28 +00:00
|
|
|
|
2015-12-23 04:15:36 +00:00
|
|
|
from tornado.concurrent import Future
|
2014-06-11 08:34:28 +00:00
|
|
|
from tornado import escape
|
2014-06-17 04:54:00 +00:00
|
|
|
from tornado import gen
|
2014-06-11 08:34:28 +00:00
|
|
|
from tornado import httputil
|
2014-06-17 04:54:00 +00:00
|
|
|
from tornado import iostream
|
2014-06-11 08:34:28 +00:00
|
|
|
from tornado import locale
|
|
|
|
from tornado.log import access_log, app_log, gen_log
|
|
|
|
from tornado import stack_context
|
|
|
|
from tornado import template
|
|
|
|
from tornado.escape import utf8, _unicode
|
2015-04-27 19:06:19 +00:00
|
|
|
from tornado.util import (import_object, ObjectDict, raise_exc_info,
|
2016-09-30 22:40:12 +00:00
|
|
|
unicode_type, _websocket_mask, re_unescape, PY3)
|
2015-04-27 19:06:19 +00:00
|
|
|
from tornado.httputil import split_host_and_port
|
2014-06-11 08:34:28 +00:00
|
|
|
|
2016-09-30 22:40:12 +00:00
|
|
|
if PY3:
|
|
|
|
import http.cookies as Cookie
|
|
|
|
import urllib.parse as urlparse
|
|
|
|
from urllib.parse import urlencode
|
|
|
|
else:
|
|
|
|
import Cookie
|
|
|
|
import urlparse
|
|
|
|
from urllib import urlencode
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
try:
|
2016-09-30 22:40:12 +00:00
|
|
|
import typing # noqa
|
2014-06-11 08:34:28 +00:00
|
|
|
|
2016-09-30 22:40:12 +00:00
|
|
|
# The following types are accepted by RequestHandler.set_header
|
|
|
|
# and related methods.
|
|
|
|
_HeaderTypes = typing.Union[bytes, unicode_type,
|
|
|
|
numbers.Integral, datetime.datetime]
|
2014-06-11 08:34:28 +00:00
|
|
|
except ImportError:
|
2016-09-30 22:40:12 +00:00
|
|
|
pass
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
|
|
|
|
MIN_SUPPORTED_SIGNED_VALUE_VERSION = 1
|
|
|
|
"""The oldest signed value version supported by this version of Tornado.
|
|
|
|
|
|
|
|
Signed values older than this version cannot be decoded.
|
|
|
|
|
|
|
|
.. versionadded:: 3.2.1
|
|
|
|
"""
|
|
|
|
|
|
|
|
MAX_SUPPORTED_SIGNED_VALUE_VERSION = 2
|
|
|
|
"""The newest signed value version supported by this version of Tornado.
|
|
|
|
|
|
|
|
Signed values newer than this version cannot be decoded.
|
|
|
|
|
|
|
|
.. versionadded:: 3.2.1
|
|
|
|
"""
|
|
|
|
|
|
|
|
DEFAULT_SIGNED_VALUE_VERSION = 2
|
|
|
|
"""The signed value version produced by `.RequestHandler.create_signed_value`.
|
|
|
|
|
|
|
|
May be overridden by passing a ``version`` keyword argument.
|
|
|
|
|
|
|
|
.. versionadded:: 3.2.1
|
|
|
|
"""
|
|
|
|
|
|
|
|
DEFAULT_SIGNED_VALUE_MIN_VERSION = 1
|
|
|
|
"""The oldest signed value accepted by `.RequestHandler.get_secure_cookie`.
|
|
|
|
|
2015-04-27 19:06:19 +00:00
|
|
|
May be overridden by passing a ``min_version`` keyword argument.
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
.. versionadded:: 3.2.1
|
|
|
|
"""
|
|
|
|
|
2015-05-18 18:08:55 +00:00
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
class RequestHandler(object):
|
2015-05-18 18:08:55 +00:00
|
|
|
"""Base class for HTTP request handlers.
|
2014-06-11 08:34:28 +00:00
|
|
|
|
2015-05-18 18:08:55 +00:00
|
|
|
Subclasses must define at least one of the methods defined in the
|
|
|
|
"Entry points" section below.
|
2014-06-11 08:34:28 +00:00
|
|
|
"""
|
|
|
|
SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT",
|
|
|
|
"OPTIONS")
|
|
|
|
|
2016-09-30 22:40:12 +00:00
|
|
|
_template_loaders = {} # type: typing.Dict[str, template.BaseLoader]
|
2014-06-11 08:34:28 +00:00
|
|
|
_template_loader_lock = threading.Lock()
|
|
|
|
_remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]")
|
|
|
|
|
|
|
|
def __init__(self, application, request, **kwargs):
|
|
|
|
super(RequestHandler, self).__init__()
|
|
|
|
|
|
|
|
self.application = application
|
|
|
|
self.request = request
|
|
|
|
self._headers_written = False
|
|
|
|
self._finished = False
|
|
|
|
self._auto_finish = True
|
|
|
|
self._transforms = None # will be set in _execute
|
2014-06-17 04:54:00 +00:00
|
|
|
self._prepared_future = None
|
2016-09-30 22:40:12 +00:00
|
|
|
self._headers = None # type: httputil.HTTPHeaders
|
2014-06-11 08:34:28 +00:00
|
|
|
self.path_args = None
|
|
|
|
self.path_kwargs = None
|
|
|
|
self.ui = ObjectDict((n, self._ui_method(m)) for n, m in
|
|
|
|
application.ui_methods.items())
|
|
|
|
# UIModules are available as both `modules` and `_tt_modules` in the
|
|
|
|
# template namespace. Historically only `modules` was available
|
|
|
|
# but could be clobbered by user additions to the namespace.
|
|
|
|
# The template {% module %} directive looks in `_tt_modules` to avoid
|
|
|
|
# possible conflicts.
|
|
|
|
self.ui["_tt_modules"] = _UIModuleNamespace(self,
|
|
|
|
application.ui_modules)
|
|
|
|
self.ui["modules"] = self.ui["_tt_modules"]
|
|
|
|
self.clear()
|
2014-06-17 04:54:00 +00:00
|
|
|
self.request.connection.set_close_callback(self.on_connection_close)
|
2014-06-11 08:34:28 +00:00
|
|
|
self.initialize(**kwargs)
|
|
|
|
|
|
|
|
def initialize(self):
|
2015-12-23 04:15:36 +00:00
|
|
|
"""Hook for subclass initialization. Called for each request.
|
2016-09-30 22:40:12 +00:00
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
A dictionary passed as the third argument of a url spec will be
|
|
|
|
supplied as keyword arguments to initialize().
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
class ProfileHandler(RequestHandler):
|
|
|
|
def initialize(self, database):
|
|
|
|
self.database = database
|
|
|
|
|
|
|
|
def get(self, username):
|
|
|
|
...
|
|
|
|
|
|
|
|
app = Application([
|
|
|
|
(r'/user/(.*)', ProfileHandler, dict(database=database)),
|
|
|
|
])
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
@property
|
|
|
|
def settings(self):
|
|
|
|
"""An alias for `self.application.settings <Application.settings>`."""
|
|
|
|
return self.application.settings
|
|
|
|
|
|
|
|
def head(self, *args, **kwargs):
|
|
|
|
raise HTTPError(405)
|
|
|
|
|
|
|
|
def get(self, *args, **kwargs):
|
|
|
|
raise HTTPError(405)
|
|
|
|
|
|
|
|
def post(self, *args, **kwargs):
|
|
|
|
raise HTTPError(405)
|
|
|
|
|
|
|
|
def delete(self, *args, **kwargs):
|
|
|
|
raise HTTPError(405)
|
|
|
|
|
|
|
|
def patch(self, *args, **kwargs):
|
|
|
|
raise HTTPError(405)
|
|
|
|
|
|
|
|
def put(self, *args, **kwargs):
|
|
|
|
raise HTTPError(405)
|
|
|
|
|
|
|
|
def options(self, *args, **kwargs):
|
|
|
|
raise HTTPError(405)
|
|
|
|
|
|
|
|
def prepare(self):
|
|
|
|
"""Called at the beginning of a request before `get`/`post`/etc.
|
|
|
|
|
|
|
|
Override this method to perform common initialization regardless
|
|
|
|
of the request method.
|
|
|
|
|
|
|
|
Asynchronous support: Decorate this method with `.gen.coroutine`
|
|
|
|
or `.return_future` to make it asynchronous (the
|
|
|
|
`asynchronous` decorator cannot be used on `prepare`).
|
|
|
|
If this method returns a `.Future` execution will not proceed
|
|
|
|
until the `.Future` is done.
|
|
|
|
|
|
|
|
.. versionadded:: 3.1
|
|
|
|
Asynchronous support.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
def on_finish(self):
|
|
|
|
"""Called after the end of a request.
|
|
|
|
|
|
|
|
Override this method to perform cleanup, logging, etc.
|
|
|
|
This method is a counterpart to `prepare`. ``on_finish`` may
|
|
|
|
not produce any output, as it is called after the response
|
|
|
|
has been sent to the client.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
def on_connection_close(self):
|
|
|
|
"""Called in async handlers if the client closed the connection.
|
|
|
|
|
|
|
|
Override this to clean up resources associated with
|
|
|
|
long-lived connections. Note that this method is called only if
|
|
|
|
the connection was closed during asynchronous processing; if you
|
|
|
|
need to do cleanup after every request override `on_finish`
|
|
|
|
instead.
|
|
|
|
|
|
|
|
Proxies may keep a connection open for a time (perhaps
|
|
|
|
indefinitely) after the client has gone away, so this method
|
|
|
|
may not be called promptly after the end user closes their
|
|
|
|
connection.
|
|
|
|
"""
|
2014-06-17 04:54:00 +00:00
|
|
|
if _has_stream_request_body(self.__class__):
|
|
|
|
if not self.request.body.done():
|
|
|
|
self.request.body.set_exception(iostream.StreamClosedError())
|
2015-04-27 19:06:19 +00:00
|
|
|
self.request.body.exception()
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def clear(self):
|
|
|
|
"""Resets all headers and content for this response."""
|
|
|
|
self._headers = httputil.HTTPHeaders({
|
|
|
|
"Server": "TornadoServer/%s" % tornado.version,
|
|
|
|
"Content-Type": "text/html; charset=UTF-8",
|
|
|
|
"Date": httputil.format_timestamp(time.time()),
|
|
|
|
})
|
|
|
|
self.set_default_headers()
|
|
|
|
self._write_buffer = []
|
|
|
|
self._status_code = 200
|
|
|
|
self._reason = httputil.responses[200]
|
|
|
|
|
|
|
|
def set_default_headers(self):
|
|
|
|
"""Override this to set HTTP headers at the beginning of the request.
|
|
|
|
|
|
|
|
For example, this is the place to set a custom ``Server`` header.
|
|
|
|
Note that setting such headers in the normal flow of request
|
|
|
|
processing may not do what you want, since headers may be reset
|
|
|
|
during error handling.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
def set_status(self, status_code, reason=None):
|
|
|
|
"""Sets the status code for our response.
|
|
|
|
|
|
|
|
:arg int status_code: Response status code. If ``reason`` is ``None``,
|
|
|
|
it must be present in `httplib.responses <http.client.responses>`.
|
|
|
|
:arg string reason: Human-readable reason phrase describing the status
|
|
|
|
code. If ``None``, it will be filled in from
|
|
|
|
`httplib.responses <http.client.responses>`.
|
|
|
|
"""
|
|
|
|
self._status_code = status_code
|
|
|
|
if reason is not None:
|
|
|
|
self._reason = escape.native_str(reason)
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
self._reason = httputil.responses[status_code]
|
|
|
|
except KeyError:
|
2016-09-30 22:40:12 +00:00
|
|
|
raise ValueError("unknown status code %d" % status_code)
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def get_status(self):
|
|
|
|
"""Returns the status code for our response."""
|
|
|
|
return self._status_code
|
|
|
|
|
|
|
|
def set_header(self, name, value):
|
2016-09-30 22:40:12 +00:00
|
|
|
# type: (str, _HeaderTypes) -> None
|
2014-06-11 08:34:28 +00:00
|
|
|
"""Sets the given response header name and value.
|
|
|
|
|
|
|
|
If a datetime is given, we automatically format it according to the
|
|
|
|
HTTP specification. If the value is not a string, we convert it to
|
|
|
|
a string. All header values are then encoded as UTF-8.
|
|
|
|
"""
|
|
|
|
self._headers[name] = self._convert_header_value(value)
|
|
|
|
|
|
|
|
def add_header(self, name, value):
|
2016-09-30 22:40:12 +00:00
|
|
|
# type: (str, _HeaderTypes) -> None
|
2014-06-11 08:34:28 +00:00
|
|
|
"""Adds the given response header and value.
|
|
|
|
|
|
|
|
Unlike `set_header`, `add_header` may be called multiple times
|
|
|
|
to return multiple values for the same header.
|
|
|
|
"""
|
|
|
|
self._headers.add(name, self._convert_header_value(value))
|
|
|
|
|
|
|
|
def clear_header(self, name):
|
|
|
|
"""Clears an outgoing header, undoing a previous `set_header` call.
|
|
|
|
|
|
|
|
Note that this method does not apply to multi-valued headers
|
|
|
|
set by `add_header`.
|
|
|
|
"""
|
|
|
|
if name in self._headers:
|
|
|
|
del self._headers[name]
|
|
|
|
|
2016-09-30 22:40:12 +00:00
|
|
|
_INVALID_HEADER_CHAR_RE = re.compile(r"[\x00-\x1f]")
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def _convert_header_value(self, value):
|
2016-09-30 22:40:12 +00:00
|
|
|
# type: (_HeaderTypes) -> str
|
|
|
|
|
|
|
|
# Convert the input value to a str. This type check is a bit
|
|
|
|
# subtle: The bytes case only executes on python 3, and the
|
|
|
|
# unicode case only executes on python 2, because the other
|
|
|
|
# cases are covered by the first match for str.
|
|
|
|
if isinstance(value, str):
|
|
|
|
retval = value
|
|
|
|
elif isinstance(value, bytes): # py3
|
|
|
|
# Non-ascii characters in headers are not well supported,
|
|
|
|
# but if you pass bytes, use latin1 so they pass through as-is.
|
|
|
|
retval = value.decode('latin1')
|
|
|
|
elif isinstance(value, unicode_type): # py2
|
|
|
|
# TODO: This is inconsistent with the use of latin1 above,
|
|
|
|
# but it's been that way for a long time. Should it change?
|
|
|
|
retval = escape.utf8(value)
|
2014-06-11 08:34:28 +00:00
|
|
|
elif isinstance(value, numbers.Integral):
|
|
|
|
# return immediately since we know the converted value will be safe
|
|
|
|
return str(value)
|
|
|
|
elif isinstance(value, datetime.datetime):
|
|
|
|
return httputil.format_timestamp(value)
|
|
|
|
else:
|
|
|
|
raise TypeError("Unsupported header value %r" % value)
|
|
|
|
# If \n is allowed into the header, it is possible to inject
|
2015-08-21 18:08:34 +00:00
|
|
|
# additional headers or split the request.
|
2016-09-30 22:40:12 +00:00
|
|
|
if RequestHandler._INVALID_HEADER_CHAR_RE.search(retval):
|
|
|
|
raise ValueError("Unsafe header value %r", retval)
|
|
|
|
return retval
|
2014-06-11 08:34:28 +00:00
|
|
|
|
2016-09-30 22:40:12 +00:00
|
|
|
_ARG_DEFAULT = object()
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def get_argument(self, name, default=_ARG_DEFAULT, strip=True):
|
|
|
|
"""Returns the value of the argument with the given name.
|
|
|
|
|
|
|
|
If default is not provided, the argument is considered to be
|
|
|
|
required, and we raise a `MissingArgumentError` if it is missing.
|
|
|
|
|
|
|
|
If the argument appears in the url more than once, we return the
|
|
|
|
last value.
|
|
|
|
|
|
|
|
The returned value is always unicode.
|
|
|
|
"""
|
|
|
|
return self._get_argument(name, default, self.request.arguments, strip)
|
|
|
|
|
|
|
|
def get_arguments(self, name, strip=True):
|
|
|
|
"""Returns a list of the arguments with the given name.
|
|
|
|
|
|
|
|
If the argument is not present, returns an empty list.
|
|
|
|
|
|
|
|
The returned values are always unicode.
|
|
|
|
"""
|
2015-04-27 19:06:19 +00:00
|
|
|
|
|
|
|
# Make sure `get_arguments` isn't accidentally being called with a
|
|
|
|
# positional argument that's assumed to be a default (like in
|
|
|
|
# `get_argument`.)
|
|
|
|
assert isinstance(strip, bool)
|
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
return self._get_arguments(name, self.request.arguments, strip)
|
|
|
|
|
|
|
|
def get_body_argument(self, name, default=_ARG_DEFAULT, strip=True):
|
|
|
|
"""Returns the value of the argument with the given name
|
|
|
|
from the request body.
|
|
|
|
|
|
|
|
If default is not provided, the argument is considered to be
|
|
|
|
required, and we raise a `MissingArgumentError` if it is missing.
|
|
|
|
|
|
|
|
If the argument appears in the url more than once, we return the
|
|
|
|
last value.
|
|
|
|
|
|
|
|
The returned value is always unicode.
|
|
|
|
|
|
|
|
.. versionadded:: 3.2
|
|
|
|
"""
|
2015-04-27 19:06:19 +00:00
|
|
|
return self._get_argument(name, default, self.request.body_arguments,
|
|
|
|
strip)
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def get_body_arguments(self, name, strip=True):
|
|
|
|
"""Returns a list of the body arguments with the given name.
|
|
|
|
|
|
|
|
If the argument is not present, returns an empty list.
|
|
|
|
|
|
|
|
The returned values are always unicode.
|
|
|
|
|
|
|
|
.. versionadded:: 3.2
|
|
|
|
"""
|
|
|
|
return self._get_arguments(name, self.request.body_arguments, strip)
|
|
|
|
|
|
|
|
def get_query_argument(self, name, default=_ARG_DEFAULT, strip=True):
|
|
|
|
"""Returns the value of the argument with the given name
|
|
|
|
from the request query string.
|
|
|
|
|
|
|
|
If default is not provided, the argument is considered to be
|
|
|
|
required, and we raise a `MissingArgumentError` if it is missing.
|
|
|
|
|
|
|
|
If the argument appears in the url more than once, we return the
|
|
|
|
last value.
|
|
|
|
|
|
|
|
The returned value is always unicode.
|
|
|
|
|
|
|
|
.. versionadded:: 3.2
|
|
|
|
"""
|
2015-04-27 19:06:19 +00:00
|
|
|
return self._get_argument(name, default,
|
|
|
|
self.request.query_arguments, strip)
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def get_query_arguments(self, name, strip=True):
|
|
|
|
"""Returns a list of the query arguments with the given name.
|
|
|
|
|
|
|
|
If the argument is not present, returns an empty list.
|
|
|
|
|
|
|
|
The returned values are always unicode.
|
|
|
|
|
|
|
|
.. versionadded:: 3.2
|
|
|
|
"""
|
|
|
|
return self._get_arguments(name, self.request.query_arguments, strip)
|
|
|
|
|
|
|
|
def _get_argument(self, name, default, source, strip=True):
|
|
|
|
args = self._get_arguments(name, source, strip=strip)
|
|
|
|
if not args:
|
|
|
|
if default is self._ARG_DEFAULT:
|
|
|
|
raise MissingArgumentError(name)
|
|
|
|
return default
|
|
|
|
return args[-1]
|
|
|
|
|
|
|
|
def _get_arguments(self, name, source, strip=True):
|
|
|
|
values = []
|
|
|
|
for v in source.get(name, []):
|
|
|
|
v = self.decode_argument(v, name=name)
|
|
|
|
if isinstance(v, unicode_type):
|
|
|
|
# Get rid of any weird control chars (unless decoding gave
|
|
|
|
# us bytes, in which case leave it alone)
|
|
|
|
v = RequestHandler._remove_control_chars_regex.sub(" ", v)
|
|
|
|
if strip:
|
|
|
|
v = v.strip()
|
|
|
|
values.append(v)
|
|
|
|
return values
|
|
|
|
|
|
|
|
def decode_argument(self, value, name=None):
|
|
|
|
"""Decodes an argument from the request.
|
|
|
|
|
|
|
|
The argument has been percent-decoded and is now a byte string.
|
|
|
|
By default, this method decodes the argument as utf-8 and returns
|
|
|
|
a unicode string, but this may be overridden in subclasses.
|
|
|
|
|
|
|
|
This method is used as a filter for both `get_argument()` and for
|
|
|
|
values extracted from the url and passed to `get()`/`post()`/etc.
|
|
|
|
|
|
|
|
The name of the argument is provided if known, but may be None
|
|
|
|
(e.g. for unnamed groups in the url regex).
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return _unicode(value)
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
raise HTTPError(400, "Invalid unicode in %s: %r" %
|
|
|
|
(name or "url", value[:40]))
|
|
|
|
|
|
|
|
@property
|
|
|
|
def cookies(self):
|
2015-04-27 19:06:19 +00:00
|
|
|
"""An alias for
|
|
|
|
`self.request.cookies <.httputil.HTTPServerRequest.cookies>`."""
|
2014-06-11 08:34:28 +00:00
|
|
|
return self.request.cookies
|
|
|
|
|
|
|
|
def get_cookie(self, name, default=None):
|
|
|
|
"""Gets the value of the cookie with the given name, else default."""
|
|
|
|
if self.request.cookies is not None and name in self.request.cookies:
|
|
|
|
return self.request.cookies[name].value
|
|
|
|
return default
|
|
|
|
|
|
|
|
def set_cookie(self, name, value, domain=None, expires=None, path="/",
|
|
|
|
expires_days=None, **kwargs):
|
|
|
|
"""Sets the given cookie name/value with the given options.
|
|
|
|
|
|
|
|
Additional keyword arguments are set on the Cookie.Morsel
|
|
|
|
directly.
|
2016-09-30 22:40:12 +00:00
|
|
|
See https://docs.python.org/2/library/cookie.html#Cookie.Morsel
|
2014-06-11 08:34:28 +00:00
|
|
|
for available attributes.
|
|
|
|
"""
|
|
|
|
# The cookie library only accepts type str, in both python 2 and 3
|
|
|
|
name = escape.native_str(name)
|
|
|
|
value = escape.native_str(value)
|
|
|
|
if re.search(r"[\x00-\x20]", name + value):
|
|
|
|
# Don't let us accidentally inject bad stuff
|
|
|
|
raise ValueError("Invalid cookie %r: %r" % (name, value))
|
|
|
|
if not hasattr(self, "_new_cookie"):
|
|
|
|
self._new_cookie = Cookie.SimpleCookie()
|
|
|
|
if name in self._new_cookie:
|
|
|
|
del self._new_cookie[name]
|
|
|
|
self._new_cookie[name] = value
|
|
|
|
morsel = self._new_cookie[name]
|
|
|
|
if domain:
|
|
|
|
morsel["domain"] = domain
|
|
|
|
if expires_days is not None and not expires:
|
|
|
|
expires = datetime.datetime.utcnow() + datetime.timedelta(
|
|
|
|
days=expires_days)
|
|
|
|
if expires:
|
|
|
|
morsel["expires"] = httputil.format_timestamp(expires)
|
|
|
|
if path:
|
|
|
|
morsel["path"] = path
|
|
|
|
for k, v in kwargs.items():
|
|
|
|
if k == 'max_age':
|
|
|
|
k = 'max-age'
|
2015-04-27 19:06:19 +00:00
|
|
|
|
|
|
|
# skip falsy values for httponly and secure flags because
|
|
|
|
# SimpleCookie sets them regardless
|
|
|
|
if k in ['httponly', 'secure'] and not v:
|
|
|
|
continue
|
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
morsel[k] = v
|
|
|
|
|
|
|
|
def clear_cookie(self, name, path="/", domain=None):
|
|
|
|
"""Deletes the cookie with the given name.
|
|
|
|
|
|
|
|
Due to limitations of the cookie protocol, you must pass the same
|
|
|
|
path and domain to clear a cookie as were used when that cookie
|
|
|
|
was set (but there is no way to find out on the server side
|
|
|
|
which values were used for a given cookie).
|
|
|
|
"""
|
|
|
|
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
|
|
|
|
self.set_cookie(name, value="", path=path, expires=expires,
|
|
|
|
domain=domain)
|
|
|
|
|
|
|
|
def clear_all_cookies(self, path="/", domain=None):
|
|
|
|
"""Deletes all the cookies the user sent with this request.
|
|
|
|
|
|
|
|
See `clear_cookie` for more information on the path and domain
|
|
|
|
parameters.
|
|
|
|
|
|
|
|
.. versionchanged:: 3.2
|
|
|
|
|
|
|
|
Added the ``path`` and ``domain`` parameters.
|
|
|
|
"""
|
|
|
|
for name in self.request.cookies:
|
|
|
|
self.clear_cookie(name, path=path, domain=domain)
|
|
|
|
|
|
|
|
def set_secure_cookie(self, name, value, expires_days=30, version=None,
|
|
|
|
**kwargs):
|
|
|
|
"""Signs and timestamps a cookie so it cannot be forged.
|
|
|
|
|
|
|
|
You must specify the ``cookie_secret`` setting in your Application
|
|
|
|
to use this method. It should be a long, random sequence of bytes
|
|
|
|
to be used as the HMAC secret for the signature.
|
|
|
|
|
|
|
|
To read a cookie set with this method, use `get_secure_cookie()`.
|
|
|
|
|
|
|
|
Note that the ``expires_days`` parameter sets the lifetime of the
|
|
|
|
cookie in the browser, but is independent of the ``max_age_days``
|
|
|
|
parameter to `get_secure_cookie`.
|
|
|
|
|
|
|
|
Secure cookies may contain arbitrary byte values, not just unicode
|
|
|
|
strings (unlike regular cookies)
|
|
|
|
|
|
|
|
.. versionchanged:: 3.2.1
|
|
|
|
|
|
|
|
Added the ``version`` argument. Introduced cookie version 2
|
|
|
|
and made it the default.
|
|
|
|
"""
|
|
|
|
self.set_cookie(name, self.create_signed_value(name, value,
|
|
|
|
version=version),
|
|
|
|
expires_days=expires_days, **kwargs)
|
|
|
|
|
|
|
|
def create_signed_value(self, name, value, version=None):
|
|
|
|
"""Signs and timestamps a string so it cannot be forged.
|
|
|
|
|
|
|
|
Normally used via set_secure_cookie, but provided as a separate
|
|
|
|
method for non-cookie uses. To decode a value not stored
|
|
|
|
as a cookie use the optional value argument to get_secure_cookie.
|
|
|
|
|
|
|
|
.. versionchanged:: 3.2.1
|
|
|
|
|
|
|
|
Added the ``version`` argument. Introduced cookie version 2
|
|
|
|
and made it the default.
|
|
|
|
"""
|
|
|
|
self.require_setting("cookie_secret", "secure cookies")
|
2015-04-27 19:06:19 +00:00
|
|
|
secret = self.application.settings["cookie_secret"]
|
|
|
|
key_version = None
|
|
|
|
if isinstance(secret, dict):
|
|
|
|
if self.application.settings.get("key_version") is None:
|
|
|
|
raise Exception("key_version setting must be used for secret_key dicts")
|
|
|
|
key_version = self.application.settings["key_version"]
|
|
|
|
|
|
|
|
return create_signed_value(secret, name, value, version=version,
|
|
|
|
key_version=key_version)
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def get_secure_cookie(self, name, value=None, max_age_days=31,
|
|
|
|
min_version=None):
|
|
|
|
"""Returns the given signed cookie if it validates, or None.
|
|
|
|
|
|
|
|
The decoded cookie value is returned as a byte string (unlike
|
|
|
|
`get_cookie`).
|
|
|
|
|
|
|
|
.. versionchanged:: 3.2.1
|
|
|
|
|
|
|
|
Added the ``min_version`` argument. Introduced cookie version 2;
|
|
|
|
both versions 1 and 2 are accepted by default.
|
|
|
|
"""
|
|
|
|
self.require_setting("cookie_secret", "secure cookies")
|
|
|
|
if value is None:
|
|
|
|
value = self.get_cookie(name)
|
|
|
|
return decode_signed_value(self.application.settings["cookie_secret"],
|
|
|
|
name, value, max_age_days=max_age_days,
|
|
|
|
min_version=min_version)
|
|
|
|
|
2015-04-27 19:06:19 +00:00
|
|
|
def get_secure_cookie_key_version(self, name, value=None):
|
|
|
|
"""Returns the signing key version of the secure cookie.
|
|
|
|
|
|
|
|
The version is returned as int.
|
|
|
|
"""
|
|
|
|
self.require_setting("cookie_secret", "secure cookies")
|
|
|
|
if value is None:
|
|
|
|
value = self.get_cookie(name)
|
|
|
|
return get_signature_key_version(value)
|
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
def redirect(self, url, permanent=False, status=None):
|
|
|
|
"""Sends a redirect to the given (optionally relative) URL.
|
|
|
|
|
|
|
|
If the ``status`` argument is specified, that value is used as the
|
|
|
|
HTTP status code; otherwise either 301 (permanent) or 302
|
|
|
|
(temporary) is chosen based on the ``permanent`` argument.
|
|
|
|
The default is 302 (temporary).
|
|
|
|
"""
|
|
|
|
if self._headers_written:
|
|
|
|
raise Exception("Cannot redirect after headers have been written")
|
|
|
|
if status is None:
|
|
|
|
status = 301 if permanent else 302
|
|
|
|
else:
|
|
|
|
assert isinstance(status, int) and 300 <= status <= 399
|
|
|
|
self.set_status(status)
|
2015-04-27 19:06:19 +00:00
|
|
|
self.set_header("Location", utf8(url))
|
2014-06-11 08:34:28 +00:00
|
|
|
self.finish()
|
|
|
|
|
|
|
|
def write(self, chunk):
|
|
|
|
"""Writes the given chunk to the output buffer.
|
|
|
|
|
|
|
|
To write the output to the network, use the flush() method below.
|
|
|
|
|
|
|
|
If the given chunk is a dictionary, we write it as JSON and set
|
|
|
|
the Content-Type of the response to be ``application/json``.
|
|
|
|
(if you want to send JSON as a different ``Content-Type``, call
|
|
|
|
set_header *after* calling write()).
|
|
|
|
|
|
|
|
Note that lists are not converted to JSON because of a potential
|
|
|
|
cross-site security vulnerability. All JSON output should be
|
|
|
|
wrapped in a dictionary. More details at
|
2014-06-17 04:54:00 +00:00
|
|
|
http://haacked.com/archive/2009/06/25/json-hijacking.aspx/ and
|
|
|
|
https://github.com/facebook/tornado/issues/1009
|
2014-06-11 08:34:28 +00:00
|
|
|
"""
|
|
|
|
if self._finished:
|
2015-04-27 19:06:19 +00:00
|
|
|
raise RuntimeError("Cannot write() after finish()")
|
2014-10-14 04:24:01 +00:00
|
|
|
if not isinstance(chunk, (bytes, unicode_type, dict)):
|
2015-04-27 19:06:19 +00:00
|
|
|
message = "write() only accepts bytes, unicode, and dict objects"
|
|
|
|
if isinstance(chunk, list):
|
|
|
|
message += ". Lists not accepted for security reasons; see http://www.tornadoweb.org/en/stable/web.html#tornado.web.RequestHandler.write"
|
|
|
|
raise TypeError(message)
|
2014-06-11 08:34:28 +00:00
|
|
|
if isinstance(chunk, dict):
|
2015-12-23 04:15:36 +00:00
|
|
|
chunk = escape.json_encode(chunk)
|
2014-06-11 08:34:28 +00:00
|
|
|
self.set_header("Content-Type", "application/json; charset=UTF-8")
|
|
|
|
chunk = utf8(chunk)
|
|
|
|
self._write_buffer.append(chunk)
|
|
|
|
|
|
|
|
def render(self, template_name, **kwargs):
|
|
|
|
"""Renders the template with the given arguments as the response."""
|
2016-09-30 22:40:12 +00:00
|
|
|
if self._finished:
|
|
|
|
raise RuntimeError("Cannot render() after finish()")
|
2014-06-11 08:34:28 +00:00
|
|
|
html = self.render_string(template_name, **kwargs)
|
|
|
|
|
|
|
|
# Insert the additional JS and CSS added by the modules on the page
|
|
|
|
js_embed = []
|
|
|
|
js_files = []
|
|
|
|
css_embed = []
|
|
|
|
css_files = []
|
|
|
|
html_heads = []
|
|
|
|
html_bodies = []
|
|
|
|
for module in getattr(self, "_active_modules", {}).values():
|
|
|
|
embed_part = module.embedded_javascript()
|
|
|
|
if embed_part:
|
|
|
|
js_embed.append(utf8(embed_part))
|
|
|
|
file_part = module.javascript_files()
|
|
|
|
if file_part:
|
2014-10-14 04:24:01 +00:00
|
|
|
if isinstance(file_part, (unicode_type, bytes)):
|
2014-06-11 08:34:28 +00:00
|
|
|
js_files.append(file_part)
|
|
|
|
else:
|
|
|
|
js_files.extend(file_part)
|
|
|
|
embed_part = module.embedded_css()
|
|
|
|
if embed_part:
|
|
|
|
css_embed.append(utf8(embed_part))
|
|
|
|
file_part = module.css_files()
|
|
|
|
if file_part:
|
2014-10-14 04:24:01 +00:00
|
|
|
if isinstance(file_part, (unicode_type, bytes)):
|
2014-06-11 08:34:28 +00:00
|
|
|
css_files.append(file_part)
|
|
|
|
else:
|
|
|
|
css_files.extend(file_part)
|
|
|
|
head_part = module.html_head()
|
|
|
|
if head_part:
|
|
|
|
html_heads.append(utf8(head_part))
|
|
|
|
body_part = module.html_body()
|
|
|
|
if body_part:
|
|
|
|
html_bodies.append(utf8(body_part))
|
|
|
|
|
|
|
|
def is_absolute(path):
|
|
|
|
return any(path.startswith(x) for x in ["/", "http:", "https:"])
|
|
|
|
if js_files:
|
|
|
|
# Maintain order of JavaScript files given by modules
|
|
|
|
paths = []
|
|
|
|
unique_paths = set()
|
|
|
|
for path in js_files:
|
|
|
|
if not is_absolute(path):
|
|
|
|
path = self.static_url(path)
|
|
|
|
if path not in unique_paths:
|
|
|
|
paths.append(path)
|
|
|
|
unique_paths.add(path)
|
|
|
|
js = ''.join('<script src="' + escape.xhtml_escape(p) +
|
|
|
|
'" type="text/javascript"></script>'
|
|
|
|
for p in paths)
|
|
|
|
sloc = html.rindex(b'</body>')
|
|
|
|
html = html[:sloc] + utf8(js) + b'\n' + html[sloc:]
|
|
|
|
if js_embed:
|
|
|
|
js = b'<script type="text/javascript">\n//<![CDATA[\n' + \
|
|
|
|
b'\n'.join(js_embed) + b'\n//]]>\n</script>'
|
|
|
|
sloc = html.rindex(b'</body>')
|
|
|
|
html = html[:sloc] + js + b'\n' + html[sloc:]
|
|
|
|
if css_files:
|
|
|
|
paths = []
|
|
|
|
unique_paths = set()
|
|
|
|
for path in css_files:
|
|
|
|
if not is_absolute(path):
|
|
|
|
path = self.static_url(path)
|
|
|
|
if path not in unique_paths:
|
|
|
|
paths.append(path)
|
|
|
|
unique_paths.add(path)
|
|
|
|
css = ''.join('<link href="' + escape.xhtml_escape(p) + '" '
|
|
|
|
'type="text/css" rel="stylesheet"/>'
|
|
|
|
for p in paths)
|
|
|
|
hloc = html.index(b'</head>')
|
|
|
|
html = html[:hloc] + utf8(css) + b'\n' + html[hloc:]
|
|
|
|
if css_embed:
|
|
|
|
css = b'<style type="text/css">\n' + b'\n'.join(css_embed) + \
|
|
|
|
b'\n</style>'
|
|
|
|
hloc = html.index(b'</head>')
|
|
|
|
html = html[:hloc] + css + b'\n' + html[hloc:]
|
|
|
|
if html_heads:
|
|
|
|
hloc = html.index(b'</head>')
|
|
|
|
html = html[:hloc] + b''.join(html_heads) + b'\n' + html[hloc:]
|
|
|
|
if html_bodies:
|
|
|
|
hloc = html.index(b'</body>')
|
|
|
|
html = html[:hloc] + b''.join(html_bodies) + b'\n' + html[hloc:]
|
|
|
|
self.finish(html)
|
|
|
|
|
|
|
|
def render_string(self, template_name, **kwargs):
|
|
|
|
"""Generate the given template with the given arguments.
|
|
|
|
|
|
|
|
We return the generated byte string (in utf8). To generate and
|
|
|
|
write a template as a response, use render() above.
|
|
|
|
"""
|
|
|
|
# If no template_path is specified, use the path of the calling file
|
|
|
|
template_path = self.get_template_path()
|
|
|
|
if not template_path:
|
|
|
|
frame = sys._getframe(0)
|
|
|
|
web_file = frame.f_code.co_filename
|
|
|
|
while frame.f_code.co_filename == web_file:
|
|
|
|
frame = frame.f_back
|
|
|
|
template_path = os.path.dirname(frame.f_code.co_filename)
|
|
|
|
with RequestHandler._template_loader_lock:
|
|
|
|
if template_path not in RequestHandler._template_loaders:
|
|
|
|
loader = self.create_template_loader(template_path)
|
|
|
|
RequestHandler._template_loaders[template_path] = loader
|
|
|
|
else:
|
|
|
|
loader = RequestHandler._template_loaders[template_path]
|
|
|
|
t = loader.load(template_name)
|
|
|
|
namespace = self.get_template_namespace()
|
|
|
|
namespace.update(kwargs)
|
|
|
|
return t.generate(**namespace)
|
|
|
|
|
|
|
|
def get_template_namespace(self):
|
|
|
|
"""Returns a dictionary to be used as the default template namespace.
|
|
|
|
|
|
|
|
May be overridden by subclasses to add or modify values.
|
|
|
|
|
|
|
|
The results of this method will be combined with additional
|
|
|
|
defaults in the `tornado.template` module and keyword arguments
|
|
|
|
to `render` or `render_string`.
|
|
|
|
"""
|
|
|
|
namespace = dict(
|
|
|
|
handler=self,
|
|
|
|
request=self.request,
|
|
|
|
current_user=self.current_user,
|
|
|
|
locale=self.locale,
|
|
|
|
_=self.locale.translate,
|
2015-04-27 19:06:19 +00:00
|
|
|
pgettext=self.locale.pgettext,
|
2014-06-11 08:34:28 +00:00
|
|
|
static_url=self.static_url,
|
|
|
|
xsrf_form_html=self.xsrf_form_html,
|
|
|
|
reverse_url=self.reverse_url
|
|
|
|
)
|
|
|
|
namespace.update(self.ui)
|
|
|
|
return namespace
|
|
|
|
|
|
|
|
def create_template_loader(self, template_path):
|
|
|
|
"""Returns a new template loader for the given path.
|
|
|
|
|
|
|
|
May be overridden by subclasses. By default returns a
|
|
|
|
directory-based loader on the given path, using the
|
2015-08-21 18:08:34 +00:00
|
|
|
``autoescape`` and ``template_whitespace`` application
|
|
|
|
settings. If a ``template_loader`` application setting is
|
|
|
|
supplied, uses that instead.
|
2014-06-11 08:34:28 +00:00
|
|
|
"""
|
|
|
|
settings = self.application.settings
|
|
|
|
if "template_loader" in settings:
|
|
|
|
return settings["template_loader"]
|
|
|
|
kwargs = {}
|
|
|
|
if "autoescape" in settings:
|
|
|
|
# autoescape=None means "no escaping", so we have to be sure
|
|
|
|
# to only pass this kwarg if the user asked for it.
|
|
|
|
kwargs["autoescape"] = settings["autoescape"]
|
2015-08-21 18:08:34 +00:00
|
|
|
if "template_whitespace" in settings:
|
|
|
|
kwargs["whitespace"] = settings["template_whitespace"]
|
2014-06-11 08:34:28 +00:00
|
|
|
return template.Loader(template_path, **kwargs)
|
|
|
|
|
|
|
|
def flush(self, include_footers=False, callback=None):
|
|
|
|
"""Flushes the current output buffer to the network.
|
|
|
|
|
|
|
|
The ``callback`` argument, if given, can be used for flow control:
|
|
|
|
it will be run when all flushed data has been written to the socket.
|
|
|
|
Note that only one flush callback can be outstanding at a time;
|
|
|
|
if another flush occurs before the previous flush's callback
|
|
|
|
has been run, the previous callback will be discarded.
|
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
.. versionchanged:: 4.0
|
|
|
|
Now returns a `.Future` if no callback is given.
|
|
|
|
"""
|
2014-06-11 08:34:28 +00:00
|
|
|
chunk = b"".join(self._write_buffer)
|
|
|
|
self._write_buffer = []
|
|
|
|
if not self._headers_written:
|
|
|
|
self._headers_written = True
|
2014-06-17 04:54:00 +00:00
|
|
|
for transform in self._transforms:
|
2014-06-11 08:34:28 +00:00
|
|
|
self._status_code, self._headers, chunk = \
|
|
|
|
transform.transform_first_chunk(
|
2015-04-27 19:06:19 +00:00
|
|
|
self._status_code, self._headers,
|
|
|
|
chunk, include_footers)
|
2014-06-17 04:54:00 +00:00
|
|
|
# Ignore the chunk and only write the headers for HEAD requests
|
|
|
|
if self.request.method == "HEAD":
|
|
|
|
chunk = None
|
|
|
|
|
|
|
|
# Finalize the cookie headers (which have been stored in a side
|
|
|
|
# object so an outgoing cookie could be overwritten before it
|
|
|
|
# is sent).
|
|
|
|
if hasattr(self, "_new_cookie"):
|
|
|
|
for cookie in self._new_cookie.values():
|
|
|
|
self.add_header("Set-Cookie", cookie.OutputString(None))
|
|
|
|
|
2015-04-27 19:06:19 +00:00
|
|
|
start_line = httputil.ResponseStartLine('',
|
2014-06-17 04:54:00 +00:00
|
|
|
self._status_code,
|
|
|
|
self._reason)
|
|
|
|
return self.request.connection.write_headers(
|
|
|
|
start_line, self._headers, chunk, callback=callback)
|
2014-06-11 08:34:28 +00:00
|
|
|
else:
|
|
|
|
for transform in self._transforms:
|
|
|
|
chunk = transform.transform_chunk(chunk, include_footers)
|
2014-06-17 04:54:00 +00:00
|
|
|
# Ignore the chunk and only write the headers for HEAD requests
|
|
|
|
if self.request.method != "HEAD":
|
|
|
|
return self.request.connection.write(chunk, callback=callback)
|
|
|
|
else:
|
|
|
|
future = Future()
|
|
|
|
future.set_result(None)
|
|
|
|
return future
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def finish(self, chunk=None):
|
|
|
|
"""Finishes this response, ending the HTTP request."""
|
|
|
|
if self._finished:
|
2015-04-27 19:06:19 +00:00
|
|
|
raise RuntimeError("finish() called twice")
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
if chunk is not None:
|
|
|
|
self.write(chunk)
|
|
|
|
|
|
|
|
# Automatically support ETags and add the Content-Length header if
|
|
|
|
# we have not flushed any content yet.
|
|
|
|
if not self._headers_written:
|
|
|
|
if (self._status_code == 200 and
|
|
|
|
self.request.method in ("GET", "HEAD") and
|
|
|
|
"Etag" not in self._headers):
|
|
|
|
self.set_etag_header()
|
|
|
|
if self.check_etag_header():
|
|
|
|
self._write_buffer = []
|
|
|
|
self.set_status(304)
|
2016-09-30 22:40:12 +00:00
|
|
|
if self._status_code in (204, 304):
|
|
|
|
assert not self._write_buffer, "Cannot send body with %s" % self._status_code
|
2014-06-11 08:34:28 +00:00
|
|
|
self._clear_headers_for_304()
|
|
|
|
elif "Content-Length" not in self._headers:
|
|
|
|
content_length = sum(len(part) for part in self._write_buffer)
|
|
|
|
self.set_header("Content-Length", content_length)
|
|
|
|
|
|
|
|
if hasattr(self.request, "connection"):
|
|
|
|
# Now that the request is finished, clear the callback we
|
|
|
|
# set on the HTTPConnection (which would otherwise prevent the
|
|
|
|
# garbage collection of the RequestHandler when there
|
|
|
|
# are keepalive connections)
|
|
|
|
self.request.connection.set_close_callback(None)
|
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
self.flush(include_footers=True)
|
|
|
|
self.request.finish()
|
|
|
|
self._log()
|
2014-06-11 08:34:28 +00:00
|
|
|
self._finished = True
|
|
|
|
self.on_finish()
|
|
|
|
# Break up a reference cycle between this handler and the
|
|
|
|
# _ui_module closures to allow for faster GC on CPython.
|
|
|
|
self.ui = None
|
|
|
|
|
|
|
|
def send_error(self, status_code=500, **kwargs):
|
|
|
|
"""Sends the given HTTP error code to the browser.
|
|
|
|
|
|
|
|
If `flush()` has already been called, it is not possible to send
|
|
|
|
an error, so this method will simply terminate the response.
|
|
|
|
If output has been written but not yet flushed, it will be discarded
|
|
|
|
and replaced with the error page.
|
|
|
|
|
|
|
|
Override `write_error()` to customize the error page that is returned.
|
|
|
|
Additional keyword arguments are passed through to `write_error`.
|
|
|
|
"""
|
|
|
|
if self._headers_written:
|
|
|
|
gen_log.error("Cannot send error response after headers written")
|
|
|
|
if not self._finished:
|
2015-04-27 19:06:19 +00:00
|
|
|
# If we get an error between writing headers and finishing,
|
|
|
|
# we are unlikely to be able to finish due to a
|
|
|
|
# Content-Length mismatch. Try anyway to release the
|
|
|
|
# socket.
|
|
|
|
try:
|
|
|
|
self.finish()
|
|
|
|
except Exception:
|
|
|
|
gen_log.error("Failed to flush partial response",
|
|
|
|
exc_info=True)
|
2014-06-11 08:34:28 +00:00
|
|
|
return
|
|
|
|
self.clear()
|
|
|
|
|
2014-10-14 04:24:01 +00:00
|
|
|
reason = kwargs.get('reason')
|
2014-06-11 08:34:28 +00:00
|
|
|
if 'exc_info' in kwargs:
|
|
|
|
exception = kwargs['exc_info'][1]
|
|
|
|
if isinstance(exception, HTTPError) and exception.reason:
|
|
|
|
reason = exception.reason
|
|
|
|
self.set_status(status_code, reason=reason)
|
|
|
|
try:
|
|
|
|
self.write_error(status_code, **kwargs)
|
|
|
|
except Exception:
|
|
|
|
app_log.error("Uncaught exception in write_error", exc_info=True)
|
|
|
|
if not self._finished:
|
|
|
|
self.finish()
|
|
|
|
|
|
|
|
def write_error(self, status_code, **kwargs):
|
|
|
|
"""Override to implement custom error pages.
|
|
|
|
|
|
|
|
``write_error`` may call `write`, `render`, `set_header`, etc
|
|
|
|
to produce output as usual.
|
|
|
|
|
|
|
|
If this error was caused by an uncaught exception (including
|
|
|
|
HTTPError), an ``exc_info`` triple will be available as
|
|
|
|
``kwargs["exc_info"]``. Note that this exception may not be
|
|
|
|
the "current" exception for purposes of methods like
|
|
|
|
``sys.exc_info()`` or ``traceback.format_exc``.
|
2014-06-29 10:05:33 +00:00
|
|
|
"""
|
2014-06-11 08:34:28 +00:00
|
|
|
if self.settings.get("serve_traceback") and "exc_info" in kwargs:
|
|
|
|
# in debug mode, try to send a traceback
|
|
|
|
self.set_header('Content-Type', 'text/plain')
|
|
|
|
for line in traceback.format_exception(*kwargs["exc_info"]):
|
|
|
|
self.write(line)
|
|
|
|
self.finish()
|
|
|
|
else:
|
|
|
|
self.finish("<html><title>%(code)d: %(message)s</title>"
|
|
|
|
"<body>%(code)d: %(message)s</body></html>" % {
|
|
|
|
"code": status_code,
|
|
|
|
"message": self._reason,
|
|
|
|
})
|
|
|
|
|
|
|
|
@property
|
|
|
|
def locale(self):
|
2014-10-14 04:24:01 +00:00
|
|
|
"""The locale for the current session.
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
Determined by either `get_user_locale`, which you can override to
|
|
|
|
set the locale based on, e.g., a user preference stored in a
|
|
|
|
database, or `get_browser_locale`, which uses the ``Accept-Language``
|
|
|
|
header.
|
2014-10-14 04:24:01 +00:00
|
|
|
|
|
|
|
.. versionchanged: 4.1
|
|
|
|
Added a property setter.
|
2014-06-11 08:34:28 +00:00
|
|
|
"""
|
|
|
|
if not hasattr(self, "_locale"):
|
|
|
|
self._locale = self.get_user_locale()
|
|
|
|
if not self._locale:
|
|
|
|
self._locale = self.get_browser_locale()
|
|
|
|
assert self._locale
|
|
|
|
return self._locale
|
|
|
|
|
2014-10-14 04:24:01 +00:00
|
|
|
@locale.setter
|
|
|
|
def locale(self, value):
|
|
|
|
self._locale = value
|
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
def get_user_locale(self):
|
|
|
|
"""Override to determine the locale from the authenticated user.
|
|
|
|
|
|
|
|
If None is returned, we fall back to `get_browser_locale()`.
|
|
|
|
|
|
|
|
This method should return a `tornado.locale.Locale` object,
|
|
|
|
most likely obtained via a call like ``tornado.locale.get("en")``
|
|
|
|
"""
|
|
|
|
return None
|
|
|
|
|
|
|
|
def get_browser_locale(self, default="en_US"):
|
|
|
|
"""Determines the user's locale from ``Accept-Language`` header.
|
|
|
|
|
|
|
|
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
|
|
|
|
"""
|
|
|
|
if "Accept-Language" in self.request.headers:
|
|
|
|
languages = self.request.headers["Accept-Language"].split(",")
|
|
|
|
locales = []
|
|
|
|
for language in languages:
|
|
|
|
parts = language.strip().split(";")
|
|
|
|
if len(parts) > 1 and parts[1].startswith("q="):
|
|
|
|
try:
|
|
|
|
score = float(parts[1][2:])
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
score = 0.0
|
|
|
|
else:
|
|
|
|
score = 1.0
|
|
|
|
locales.append((parts[0], score))
|
|
|
|
if locales:
|
|
|
|
locales.sort(key=lambda pair: pair[1], reverse=True)
|
|
|
|
codes = [l[0] for l in locales]
|
|
|
|
return locale.get(*codes)
|
|
|
|
return locale.get(default)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def current_user(self):
|
|
|
|
"""The authenticated user for this request.
|
|
|
|
|
2015-12-23 04:15:36 +00:00
|
|
|
This is set in one of two ways:
|
|
|
|
|
|
|
|
* A subclass may override `get_current_user()`, which will be called
|
|
|
|
automatically the first time ``self.current_user`` is accessed.
|
|
|
|
`get_current_user()` will only be called once per request,
|
|
|
|
and is cached for future access::
|
2014-06-11 08:34:28 +00:00
|
|
|
|
2015-12-23 04:15:36 +00:00
|
|
|
def get_current_user(self):
|
|
|
|
user_cookie = self.get_secure_cookie("user")
|
2016-09-30 22:40:12 +00:00
|
|
|
if user_cookie:
|
|
|
|
return json.loads(user_cookie)
|
2015-12-23 04:15:36 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
* It may be set as a normal variable, typically from an overridden
|
|
|
|
`prepare()`::
|
|
|
|
|
|
|
|
@gen.coroutine
|
|
|
|
def prepare(self):
|
|
|
|
user_id_cookie = self.get_secure_cookie("user_id")
|
|
|
|
if user_id_cookie:
|
|
|
|
self.current_user = yield load_user(user_id_cookie)
|
|
|
|
|
|
|
|
Note that `prepare()` may be a coroutine while `get_current_user()`
|
|
|
|
may not, so the latter form is necessary if loading the user requires
|
|
|
|
asynchronous operations.
|
|
|
|
|
2016-09-30 22:40:12 +00:00
|
|
|
The user object may be any type of the application's choosing.
|
2014-06-11 08:34:28 +00:00
|
|
|
"""
|
|
|
|
if not hasattr(self, "_current_user"):
|
|
|
|
self._current_user = self.get_current_user()
|
|
|
|
return self._current_user
|
|
|
|
|
|
|
|
@current_user.setter
|
|
|
|
def current_user(self, value):
|
|
|
|
self._current_user = value
|
|
|
|
|
|
|
|
def get_current_user(self):
|
2015-12-23 04:15:36 +00:00
|
|
|
"""Override to determine the current user from, e.g., a cookie.
|
|
|
|
|
|
|
|
This method may not be a coroutine.
|
|
|
|
"""
|
2014-06-11 08:34:28 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
def get_login_url(self):
|
|
|
|
"""Override to customize the login URL based on the request.
|
|
|
|
|
|
|
|
By default, we use the ``login_url`` application setting.
|
|
|
|
"""
|
|
|
|
self.require_setting("login_url", "@tornado.web.authenticated")
|
|
|
|
return self.application.settings["login_url"]
|
|
|
|
|
|
|
|
def get_template_path(self):
|
|
|
|
"""Override to customize template path for each handler.
|
|
|
|
|
|
|
|
By default, we use the ``template_path`` application setting.
|
|
|
|
Return None to load templates relative to the calling file.
|
|
|
|
"""
|
|
|
|
return self.application.settings.get("template_path")
|
|
|
|
|
|
|
|
@property
|
|
|
|
def xsrf_token(self):
|
|
|
|
"""The XSRF-prevention token for the current user/session.
|
|
|
|
|
|
|
|
To prevent cross-site request forgery, we set an '_xsrf' cookie
|
|
|
|
and include the same '_xsrf' value as an argument with all POST
|
|
|
|
requests. If the two do not match, we reject the form submission
|
|
|
|
as a potential forgery.
|
|
|
|
|
|
|
|
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
|
|
|
|
|
|
|
|
.. versionchanged:: 3.2.2
|
|
|
|
The xsrf token will now be have a random mask applied in every
|
|
|
|
request, which makes it safe to include the token in pages
|
|
|
|
that are compressed. See http://breachattack.com for more
|
|
|
|
information on the issue fixed by this change. Old (version 1)
|
|
|
|
cookies will be converted to version 2 when this method is called
|
|
|
|
unless the ``xsrf_cookie_version`` `Application` setting is
|
|
|
|
set to 1.
|
2015-12-23 04:15:36 +00:00
|
|
|
|
|
|
|
.. versionchanged:: 4.3
|
|
|
|
The ``xsrf_cookie_kwargs`` `Application` setting may be
|
|
|
|
used to supply additional cookie options (which will be
|
|
|
|
passed directly to `set_cookie`). For example,
|
|
|
|
``xsrf_cookie_kwargs=dict(httponly=True, secure=True)``
|
|
|
|
will set the ``secure`` and ``httponly`` flags on the
|
|
|
|
``_xsrf`` cookie.
|
2014-06-11 08:34:28 +00:00
|
|
|
"""
|
|
|
|
if not hasattr(self, "_xsrf_token"):
|
|
|
|
version, token, timestamp = self._get_raw_xsrf_token()
|
|
|
|
output_version = self.settings.get("xsrf_cookie_version", 2)
|
2015-12-23 04:15:36 +00:00
|
|
|
cookie_kwargs = self.settings.get("xsrf_cookie_kwargs", {})
|
2014-06-11 08:34:28 +00:00
|
|
|
if output_version == 1:
|
|
|
|
self._xsrf_token = binascii.b2a_hex(token)
|
|
|
|
elif output_version == 2:
|
|
|
|
mask = os.urandom(4)
|
|
|
|
self._xsrf_token = b"|".join([
|
|
|
|
b"2",
|
|
|
|
binascii.b2a_hex(mask),
|
|
|
|
binascii.b2a_hex(_websocket_mask(mask, token)),
|
|
|
|
utf8(str(int(timestamp)))])
|
|
|
|
else:
|
|
|
|
raise ValueError("unknown xsrf cookie version %d",
|
|
|
|
output_version)
|
|
|
|
if version is None:
|
|
|
|
expires_days = 30 if self.current_user else None
|
|
|
|
self.set_cookie("_xsrf", self._xsrf_token,
|
2015-12-23 04:15:36 +00:00
|
|
|
expires_days=expires_days,
|
|
|
|
**cookie_kwargs)
|
2014-06-11 08:34:28 +00:00
|
|
|
return self._xsrf_token
|
|
|
|
|
|
|
|
def _get_raw_xsrf_token(self):
|
|
|
|
"""Read or generate the xsrf token in its raw form.
|
|
|
|
|
|
|
|
The raw_xsrf_token is a tuple containing:
|
|
|
|
|
|
|
|
* version: the version of the cookie from which this token was read,
|
|
|
|
or None if we generated a new token in this request.
|
|
|
|
* token: the raw token data; random (non-ascii) bytes.
|
|
|
|
* timestamp: the time this token was generated (will not be accurate
|
|
|
|
for version 1 cookies)
|
|
|
|
"""
|
|
|
|
if not hasattr(self, '_raw_xsrf_token'):
|
|
|
|
cookie = self.get_cookie("_xsrf")
|
|
|
|
if cookie:
|
|
|
|
version, token, timestamp = self._decode_xsrf_token(cookie)
|
|
|
|
else:
|
|
|
|
version, token, timestamp = None, None, None
|
|
|
|
if token is None:
|
|
|
|
version = None
|
|
|
|
token = os.urandom(16)
|
|
|
|
timestamp = time.time()
|
|
|
|
self._raw_xsrf_token = (version, token, timestamp)
|
|
|
|
return self._raw_xsrf_token
|
|
|
|
|
|
|
|
def _decode_xsrf_token(self, cookie):
|
|
|
|
"""Convert a cookie string into a the tuple form returned by
|
|
|
|
_get_raw_xsrf_token.
|
|
|
|
"""
|
2015-04-27 19:06:19 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
m = _signed_value_version_re.match(utf8(cookie))
|
|
|
|
|
|
|
|
if m:
|
|
|
|
version = int(m.group(1))
|
|
|
|
if version == 2:
|
|
|
|
_, mask, masked_token, timestamp = cookie.split("|")
|
|
|
|
|
|
|
|
mask = binascii.a2b_hex(utf8(mask))
|
|
|
|
token = _websocket_mask(
|
|
|
|
mask, binascii.a2b_hex(utf8(masked_token)))
|
|
|
|
timestamp = int(timestamp)
|
|
|
|
return version, token, timestamp
|
|
|
|
else:
|
|
|
|
# Treat unknown versions as not present instead of failing.
|
|
|
|
raise Exception("Unknown xsrf cookie version")
|
2014-06-11 08:34:28 +00:00
|
|
|
else:
|
2015-04-27 19:06:19 +00:00
|
|
|
version = 1
|
|
|
|
try:
|
|
|
|
token = binascii.a2b_hex(utf8(cookie))
|
|
|
|
except (binascii.Error, TypeError):
|
|
|
|
token = utf8(cookie)
|
|
|
|
# We don't have a usable timestamp in older versions.
|
|
|
|
timestamp = int(time.time())
|
|
|
|
return (version, token, timestamp)
|
|
|
|
except Exception:
|
|
|
|
# Catch exceptions and return nothing instead of failing.
|
|
|
|
gen_log.debug("Uncaught exception in _decode_xsrf_token",
|
|
|
|
exc_info=True)
|
|
|
|
return None, None, None
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def check_xsrf_cookie(self):
|
|
|
|
"""Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.
|
|
|
|
|
|
|
|
To prevent cross-site request forgery, we set an ``_xsrf``
|
|
|
|
cookie and include the same value as a non-cookie
|
|
|
|
field with all ``POST`` requests. If the two do not match, we
|
|
|
|
reject the form submission as a potential forgery.
|
|
|
|
|
|
|
|
The ``_xsrf`` value may be set as either a form field named ``_xsrf``
|
|
|
|
or in a custom HTTP header named ``X-XSRFToken`` or ``X-CSRFToken``
|
|
|
|
(the latter is accepted for compatibility with Django).
|
|
|
|
|
|
|
|
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
|
|
|
|
|
|
|
|
Prior to release 1.1.1, this check was ignored if the HTTP header
|
|
|
|
``X-Requested-With: XMLHTTPRequest`` was present. This exception
|
|
|
|
has been shown to be insecure and has been removed. For more
|
|
|
|
information please see
|
|
|
|
http://www.djangoproject.com/weblog/2011/feb/08/security/
|
|
|
|
http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails
|
|
|
|
|
|
|
|
.. versionchanged:: 3.2.2
|
|
|
|
Added support for cookie version 2. Both versions 1 and 2 are
|
|
|
|
supported.
|
|
|
|
"""
|
|
|
|
token = (self.get_argument("_xsrf", None) or
|
|
|
|
self.request.headers.get("X-Xsrftoken") or
|
|
|
|
self.request.headers.get("X-Csrftoken"))
|
|
|
|
if not token:
|
|
|
|
raise HTTPError(403, "'_xsrf' argument missing from POST")
|
|
|
|
_, token, _ = self._decode_xsrf_token(token)
|
|
|
|
_, expected_token, _ = self._get_raw_xsrf_token()
|
2016-09-30 22:40:12 +00:00
|
|
|
if not token:
|
|
|
|
raise HTTPError(403, "'_xsrf' argument has invalid format")
|
2014-06-11 08:34:28 +00:00
|
|
|
if not _time_independent_equals(utf8(token), utf8(expected_token)):
|
|
|
|
raise HTTPError(403, "XSRF cookie does not match POST argument")
|
|
|
|
|
|
|
|
def xsrf_form_html(self):
|
|
|
|
"""An HTML ``<input/>`` element to be included with all POST forms.
|
|
|
|
|
|
|
|
It defines the ``_xsrf`` input value, which we check on all POST
|
|
|
|
requests to prevent cross-site request forgery. If you have set
|
|
|
|
the ``xsrf_cookies`` application setting, you must include this
|
|
|
|
HTML within all of your HTML forms.
|
|
|
|
|
|
|
|
In a template, this method should be called with ``{% module
|
|
|
|
xsrf_form_html() %}``
|
|
|
|
|
|
|
|
See `check_xsrf_cookie()` above for more information.
|
|
|
|
"""
|
|
|
|
return '<input type="hidden" name="_xsrf" value="' + \
|
|
|
|
escape.xhtml_escape(self.xsrf_token) + '"/>'
|
|
|
|
|
|
|
|
def static_url(self, path, include_host=None, **kwargs):
|
|
|
|
"""Returns a static URL for the given relative static file path.
|
|
|
|
|
|
|
|
This method requires you set the ``static_path`` setting in your
|
|
|
|
application (which specifies the root directory of your static
|
|
|
|
files).
|
|
|
|
|
|
|
|
This method returns a versioned url (by default appending
|
|
|
|
``?v=<signature>``), which allows the static files to be
|
|
|
|
cached indefinitely. This can be disabled by passing
|
|
|
|
``include_version=False`` (in the default implementation;
|
|
|
|
other static file implementations are not required to support
|
|
|
|
this, but they may support other options).
|
|
|
|
|
|
|
|
By default this method returns URLs relative to the current
|
|
|
|
host, but if ``include_host`` is true the URL returned will be
|
|
|
|
absolute. If this handler has an ``include_host`` attribute,
|
|
|
|
that value will be used as the default for all `static_url`
|
|
|
|
calls that do not pass ``include_host`` as a keyword argument.
|
|
|
|
|
|
|
|
"""
|
|
|
|
self.require_setting("static_path", "static_url")
|
|
|
|
get_url = self.settings.get("static_handler_class",
|
|
|
|
StaticFileHandler).make_static_url
|
|
|
|
|
|
|
|
if include_host is None:
|
|
|
|
include_host = getattr(self, "include_host", False)
|
|
|
|
|
|
|
|
if include_host:
|
|
|
|
base = self.request.protocol + "://" + self.request.host
|
|
|
|
else:
|
|
|
|
base = ""
|
|
|
|
|
|
|
|
return base + get_url(self.settings, path, **kwargs)
|
|
|
|
|
|
|
|
def require_setting(self, name, feature="this feature"):
|
|
|
|
"""Raises an exception if the given app setting is not defined."""
|
|
|
|
if not self.application.settings.get(name):
|
|
|
|
raise Exception("You must define the '%s' setting in your "
|
|
|
|
"application to use %s" % (name, feature))
|
|
|
|
|
|
|
|
def reverse_url(self, name, *args):
|
|
|
|
"""Alias for `Application.reverse_url`."""
|
|
|
|
return self.application.reverse_url(name, *args)
|
|
|
|
|
|
|
|
def compute_etag(self):
|
|
|
|
"""Computes the etag header to be used for this request.
|
|
|
|
|
|
|
|
By default uses a hash of the content written so far.
|
|
|
|
|
|
|
|
May be overridden to provide custom etag implementations,
|
|
|
|
or may return None to disable tornado's default etag support.
|
|
|
|
"""
|
|
|
|
hasher = hashlib.sha1()
|
|
|
|
for part in self._write_buffer:
|
|
|
|
hasher.update(part)
|
|
|
|
return '"%s"' % hasher.hexdigest()
|
|
|
|
|
|
|
|
def set_etag_header(self):
|
|
|
|
"""Sets the response's Etag header using ``self.compute_etag()``.
|
|
|
|
|
|
|
|
Note: no header will be set if ``compute_etag()`` returns ``None``.
|
|
|
|
|
|
|
|
This method is called automatically when the request is finished.
|
|
|
|
"""
|
|
|
|
etag = self.compute_etag()
|
|
|
|
if etag is not None:
|
|
|
|
self.set_header("Etag", etag)
|
|
|
|
|
|
|
|
def check_etag_header(self):
|
|
|
|
"""Checks the ``Etag`` header against requests's ``If-None-Match``.
|
|
|
|
|
|
|
|
Returns ``True`` if the request's Etag matches and a 304 should be
|
|
|
|
returned. For example::
|
|
|
|
|
|
|
|
self.set_etag_header()
|
|
|
|
if self.check_etag_header():
|
|
|
|
self.set_status(304)
|
|
|
|
return
|
|
|
|
|
|
|
|
This method is called automatically when the request is finished,
|
|
|
|
but may be called earlier for applications that override
|
|
|
|
`compute_etag` and want to do an early check for ``If-None-Match``
|
|
|
|
before completing the request. The ``Etag`` header should be set
|
|
|
|
(perhaps with `set_etag_header`) before calling this method.
|
|
|
|
"""
|
2015-04-27 19:06:19 +00:00
|
|
|
computed_etag = utf8(self._headers.get("Etag", ""))
|
|
|
|
# Find all weak and strong etag values from If-None-Match header
|
|
|
|
# because RFC 7232 allows multiple etag values in a single header.
|
|
|
|
etags = re.findall(
|
|
|
|
br'\*|(?:W/)?"[^"]*"',
|
|
|
|
utf8(self.request.headers.get("If-None-Match", ""))
|
|
|
|
)
|
|
|
|
if not computed_etag or not etags:
|
|
|
|
return False
|
|
|
|
|
|
|
|
match = False
|
|
|
|
if etags[0] == b'*':
|
|
|
|
match = True
|
|
|
|
else:
|
|
|
|
# Use a weak comparison when comparing entity-tags.
|
2016-09-30 22:40:12 +00:00
|
|
|
def val(x):
|
|
|
|
return x[2:] if x.startswith(b'W/') else x
|
|
|
|
|
2015-04-27 19:06:19 +00:00
|
|
|
for etag in etags:
|
|
|
|
if val(etag) == val(computed_etag):
|
|
|
|
match = True
|
|
|
|
break
|
|
|
|
return match
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def _stack_context_handle_exception(self, type, value, traceback):
|
|
|
|
try:
|
|
|
|
# For historical reasons _handle_request_exception only takes
|
|
|
|
# the exception value instead of the full triple,
|
|
|
|
# so re-raise the exception to ensure that it's in
|
|
|
|
# sys.exc_info()
|
|
|
|
raise_exc_info((type, value, traceback))
|
|
|
|
except Exception:
|
|
|
|
self._handle_request_exception(value)
|
|
|
|
return True
|
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
@gen.coroutine
|
2014-06-11 08:34:28 +00:00
|
|
|
def _execute(self, transforms, *args, **kwargs):
|
|
|
|
"""Executes this request with the given output transforms."""
|
|
|
|
self._transforms = transforms
|
|
|
|
try:
|
|
|
|
if self.request.method not in self.SUPPORTED_METHODS:
|
|
|
|
raise HTTPError(405)
|
|
|
|
self.path_args = [self.decode_argument(arg) for arg in args]
|
|
|
|
self.path_kwargs = dict((k, self.decode_argument(v, name=k))
|
|
|
|
for (k, v) in kwargs.items())
|
|
|
|
# If XSRF cookies are turned on, reject form submissions without
|
|
|
|
# the proper cookie
|
|
|
|
if self.request.method not in ("GET", "HEAD", "OPTIONS") and \
|
|
|
|
self.application.settings.get("xsrf_cookies"):
|
|
|
|
self.check_xsrf_cookie()
|
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
result = self.prepare()
|
|
|
|
if result is not None:
|
2015-08-21 18:08:34 +00:00
|
|
|
result = yield result
|
2014-06-17 04:54:00 +00:00
|
|
|
if self._prepared_future is not None:
|
|
|
|
# Tell the Application we've finished with prepare()
|
|
|
|
# and are ready for the body to arrive.
|
|
|
|
self._prepared_future.set_result(None)
|
|
|
|
if self._finished:
|
|
|
|
return
|
2014-06-11 08:34:28 +00:00
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
if _has_stream_request_body(self.__class__):
|
|
|
|
# In streaming mode request.body is a Future that signals
|
|
|
|
# the body has been completely received. The Future has no
|
|
|
|
# result; the data has been passed to self.data_received
|
|
|
|
# instead.
|
|
|
|
try:
|
|
|
|
yield self.request.body
|
|
|
|
except iostream.StreamClosedError:
|
|
|
|
return
|
2014-06-11 08:34:28 +00:00
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
method = getattr(self, self.request.method.lower())
|
|
|
|
result = method(*self.path_args, **self.path_kwargs)
|
|
|
|
if result is not None:
|
2015-08-21 18:08:34 +00:00
|
|
|
result = yield result
|
2014-06-17 04:54:00 +00:00
|
|
|
if self._auto_finish and not self._finished:
|
|
|
|
self.finish()
|
|
|
|
except Exception as e:
|
2015-04-27 19:06:19 +00:00
|
|
|
try:
|
|
|
|
self._handle_request_exception(e)
|
|
|
|
except Exception:
|
|
|
|
app_log.error("Exception in exception handler", exc_info=True)
|
2014-06-17 04:54:00 +00:00
|
|
|
if (self._prepared_future is not None and
|
|
|
|
not self._prepared_future.done()):
|
|
|
|
# In case we failed before setting _prepared_future, do it
|
|
|
|
# now (to unblock the HTTP server). Note that this is not
|
|
|
|
# in a finally block to avoid GC issues prior to Python 3.4.
|
|
|
|
self._prepared_future.set_result(None)
|
2014-06-11 08:34:28 +00:00
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
def data_received(self, chunk):
|
|
|
|
"""Implement this method to handle streamed request data.
|
2014-06-11 08:34:28 +00:00
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
Requires the `.stream_request_body` decorator.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def _log(self):
|
|
|
|
"""Logs the current request.
|
|
|
|
|
|
|
|
Sort of deprecated since this functionality was moved to the
|
|
|
|
Application, but left in place for the benefit of existing apps
|
|
|
|
that have overridden this method.
|
|
|
|
"""
|
|
|
|
self.application.log_request(self)
|
|
|
|
|
|
|
|
def _request_summary(self):
|
2015-04-27 19:06:19 +00:00
|
|
|
return "%s %s (%s)" % (self.request.method, self.request.uri,
|
|
|
|
self.request.remote_ip)
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def _handle_request_exception(self, e):
|
2014-06-29 10:05:33 +00:00
|
|
|
if isinstance(e, Finish):
|
|
|
|
# Not an error; just finish the request without logging.
|
|
|
|
if not self._finished:
|
2015-12-23 04:15:36 +00:00
|
|
|
self.finish(*e.args)
|
2014-06-29 10:05:33 +00:00
|
|
|
return
|
2015-04-27 19:06:19 +00:00
|
|
|
try:
|
|
|
|
self.log_exception(*sys.exc_info())
|
|
|
|
except Exception:
|
|
|
|
# An error here should still get a best-effort send_error()
|
|
|
|
# to avoid leaking the connection.
|
|
|
|
app_log.error("Error in exception logger", exc_info=True)
|
2014-06-11 08:34:28 +00:00
|
|
|
if self._finished:
|
|
|
|
# Extra errors after the request has been finished should
|
|
|
|
# be logged, but there is no reason to continue to try and
|
|
|
|
# send a response.
|
|
|
|
return
|
|
|
|
if isinstance(e, HTTPError):
|
|
|
|
if e.status_code not in httputil.responses and not e.reason:
|
|
|
|
gen_log.error("Bad HTTP status code: %d", e.status_code)
|
|
|
|
self.send_error(500, exc_info=sys.exc_info())
|
|
|
|
else:
|
|
|
|
self.send_error(e.status_code, exc_info=sys.exc_info())
|
|
|
|
else:
|
|
|
|
self.send_error(500, exc_info=sys.exc_info())
|
|
|
|
|
|
|
|
def log_exception(self, typ, value, tb):
|
|
|
|
"""Override to customize logging of uncaught exceptions.
|
|
|
|
|
|
|
|
By default logs instances of `HTTPError` as warnings without
|
|
|
|
stack traces (on the ``tornado.general`` logger), and all
|
|
|
|
other exceptions as errors with stack traces (on the
|
|
|
|
``tornado.application`` logger).
|
|
|
|
|
|
|
|
.. versionadded:: 3.1
|
|
|
|
"""
|
|
|
|
if isinstance(value, HTTPError):
|
|
|
|
if value.log_message:
|
|
|
|
format = "%d %s: " + value.log_message
|
|
|
|
args = ([value.status_code, self._request_summary()] +
|
|
|
|
list(value.args))
|
|
|
|
gen_log.warning(format, *args)
|
|
|
|
else:
|
|
|
|
app_log.error("Uncaught exception %s\n%r", self._request_summary(),
|
|
|
|
self.request, exc_info=(typ, value, tb))
|
|
|
|
|
|
|
|
def _ui_module(self, name, module):
|
|
|
|
def render(*args, **kwargs):
|
|
|
|
if not hasattr(self, "_active_modules"):
|
|
|
|
self._active_modules = {}
|
|
|
|
if name not in self._active_modules:
|
|
|
|
self._active_modules[name] = module(self)
|
|
|
|
rendered = self._active_modules[name].render(*args, **kwargs)
|
|
|
|
return rendered
|
|
|
|
return render
|
|
|
|
|
|
|
|
def _ui_method(self, method):
|
|
|
|
return lambda *args, **kwargs: method(self, *args, **kwargs)
|
|
|
|
|
|
|
|
def _clear_headers_for_304(self):
|
|
|
|
# 304 responses should not contain entity headers (defined in
|
|
|
|
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
|
|
|
|
# not explicitly allowed by
|
|
|
|
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
|
|
|
|
headers = ["Allow", "Content-Encoding", "Content-Language",
|
|
|
|
"Content-Length", "Content-MD5", "Content-Range",
|
|
|
|
"Content-Type", "Last-Modified"]
|
|
|
|
for h in headers:
|
|
|
|
self.clear_header(h)
|
|
|
|
|
|
|
|
|
|
|
|
def asynchronous(method):
|
|
|
|
"""Wrap request handler methods with this if they are asynchronous.
|
|
|
|
|
2015-04-27 19:06:19 +00:00
|
|
|
This decorator is for callback-style asynchronous methods; for
|
|
|
|
coroutines, use the ``@gen.coroutine`` decorator without
|
|
|
|
``@asynchronous``. (It is legal for legacy reasons to use the two
|
|
|
|
decorators together provided ``@asynchronous`` is first, but
|
|
|
|
``@asynchronous`` will be ignored in this case)
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
This decorator should only be applied to the :ref:`HTTP verb
|
|
|
|
methods <verbs>`; its behavior is undefined for any other method.
|
|
|
|
This decorator does not *make* a method asynchronous; it tells
|
|
|
|
the framework that the method *is* asynchronous. For this decorator
|
|
|
|
to be useful the method must (at least sometimes) do something
|
|
|
|
asynchronous.
|
|
|
|
|
|
|
|
If this decorator is given, the response is not finished when the
|
|
|
|
method returns. It is up to the request handler to call
|
|
|
|
`self.finish() <RequestHandler.finish>` to finish the HTTP
|
|
|
|
request. Without this decorator, the request is automatically
|
2015-04-27 19:06:19 +00:00
|
|
|
finished when the ``get()`` or ``post()`` method returns. Example:
|
|
|
|
|
|
|
|
.. testcode::
|
2014-06-11 08:34:28 +00:00
|
|
|
|
2015-04-27 19:06:19 +00:00
|
|
|
class MyRequestHandler(RequestHandler):
|
|
|
|
@asynchronous
|
2014-06-11 08:34:28 +00:00
|
|
|
def get(self):
|
|
|
|
http = httpclient.AsyncHTTPClient()
|
|
|
|
http.fetch("http://friendfeed.com/", self._on_download)
|
|
|
|
|
|
|
|
def _on_download(self, response):
|
|
|
|
self.write("Downloaded!")
|
|
|
|
self.finish()
|
|
|
|
|
2015-04-27 19:06:19 +00:00
|
|
|
.. testoutput::
|
|
|
|
:hide:
|
|
|
|
|
2015-12-23 04:15:36 +00:00
|
|
|
.. versionchanged:: 3.1
|
2014-06-11 08:34:28 +00:00
|
|
|
The ability to use ``@gen.coroutine`` without ``@asynchronous``.
|
2015-04-27 19:06:19 +00:00
|
|
|
|
2015-12-23 04:15:36 +00:00
|
|
|
.. versionchanged:: 4.3 Returning anything but ``None`` or a
|
|
|
|
yieldable object from a method decorated with ``@asynchronous``
|
|
|
|
is an error. Such return values were previously ignored silently.
|
2014-06-11 08:34:28 +00:00
|
|
|
"""
|
|
|
|
# Delay the IOLoop import because it's not available on app engine.
|
|
|
|
from tornado.ioloop import IOLoop
|
2015-04-27 19:06:19 +00:00
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
@functools.wraps(method)
|
|
|
|
def wrapper(self, *args, **kwargs):
|
|
|
|
self._auto_finish = False
|
|
|
|
with stack_context.ExceptionStackContext(
|
|
|
|
self._stack_context_handle_exception):
|
|
|
|
result = method(self, *args, **kwargs)
|
2015-12-23 04:15:36 +00:00
|
|
|
if result is not None:
|
|
|
|
result = gen.convert_yielded(result)
|
2016-09-30 22:40:12 +00:00
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
# If @asynchronous is used with @gen.coroutine, (but
|
|
|
|
# not @gen.engine), we can automatically finish the
|
|
|
|
# request when the future resolves. Additionally,
|
|
|
|
# the Future will swallow any exceptions so we need
|
|
|
|
# to throw them back out to the stack context to finish
|
|
|
|
# the request.
|
|
|
|
def future_complete(f):
|
|
|
|
f.result()
|
|
|
|
if not self._finished:
|
|
|
|
self.finish()
|
|
|
|
IOLoop.current().add_future(result, future_complete)
|
|
|
|
# Once we have done this, hide the Future from our
|
|
|
|
# caller (i.e. RequestHandler._when_complete), which
|
|
|
|
# would otherwise set up its own callback and
|
|
|
|
# exception handler (resulting in exceptions being
|
|
|
|
# logged twice).
|
|
|
|
return None
|
|
|
|
return result
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
def stream_request_body(cls):
|
|
|
|
"""Apply to `RequestHandler` subclasses to enable streaming body support.
|
|
|
|
|
|
|
|
This decorator implies the following changes:
|
|
|
|
|
|
|
|
* `.HTTPServerRequest.body` is undefined, and body arguments will not
|
|
|
|
be included in `RequestHandler.get_argument`.
|
|
|
|
* `RequestHandler.prepare` is called when the request headers have been
|
|
|
|
read instead of after the entire body has been read.
|
|
|
|
* The subclass must define a method ``data_received(self, data):``, which
|
|
|
|
will be called zero or more times as data is available. Note that
|
|
|
|
if the request has an empty body, ``data_received`` may not be called.
|
|
|
|
* ``prepare`` and ``data_received`` may return Futures (such as via
|
|
|
|
``@gen.coroutine``, in which case the next method will not be called
|
|
|
|
until those futures have completed.
|
|
|
|
* The regular HTTP method (``post``, ``put``, etc) will be called after
|
|
|
|
the entire body has been read.
|
|
|
|
|
|
|
|
There is a subtle interaction between ``data_received`` and asynchronous
|
2015-04-27 19:06:19 +00:00
|
|
|
``prepare``: The first call to ``data_received`` may occur at any point
|
2014-06-17 04:54:00 +00:00
|
|
|
after the call to ``prepare`` has returned *or yielded*.
|
|
|
|
"""
|
|
|
|
if not issubclass(cls, RequestHandler):
|
|
|
|
raise TypeError("expected subclass of RequestHandler, got %r", cls)
|
|
|
|
cls._stream_request_body = True
|
|
|
|
return cls
|
|
|
|
|
|
|
|
|
|
|
|
def _has_stream_request_body(cls):
|
|
|
|
if not issubclass(cls, RequestHandler):
|
|
|
|
raise TypeError("expected subclass of RequestHandler, got %r", cls)
|
|
|
|
return getattr(cls, '_stream_request_body', False)
|
|
|
|
|
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
def removeslash(method):
|
|
|
|
"""Use this decorator to remove trailing slashes from the request path.
|
|
|
|
|
|
|
|
For example, a request to ``/foo/`` would redirect to ``/foo`` with this
|
|
|
|
decorator. Your request handler mapping should use a regular expression
|
|
|
|
like ``r'/foo/*'`` in conjunction with using the decorator.
|
|
|
|
"""
|
|
|
|
@functools.wraps(method)
|
|
|
|
def wrapper(self, *args, **kwargs):
|
|
|
|
if self.request.path.endswith("/"):
|
|
|
|
if self.request.method in ("GET", "HEAD"):
|
|
|
|
uri = self.request.path.rstrip("/")
|
|
|
|
if uri: # don't try to redirect '/' to ''
|
|
|
|
if self.request.query:
|
|
|
|
uri += "?" + self.request.query
|
2014-06-29 10:05:33 +00:00
|
|
|
self.redirect(uri, permanent=True)
|
2014-06-11 08:34:28 +00:00
|
|
|
return
|
|
|
|
else:
|
|
|
|
raise HTTPError(404)
|
|
|
|
return method(self, *args, **kwargs)
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
|
|
|
def addslash(method):
|
|
|
|
"""Use this decorator to add a missing trailing slash to the request path.
|
|
|
|
|
|
|
|
For example, a request to ``/foo`` would redirect to ``/foo/`` with this
|
|
|
|
decorator. Your request handler mapping should use a regular expression
|
|
|
|
like ``r'/foo/?'`` in conjunction with using the decorator.
|
|
|
|
"""
|
|
|
|
@functools.wraps(method)
|
|
|
|
def wrapper(self, *args, **kwargs):
|
|
|
|
if not self.request.path.endswith("/"):
|
|
|
|
if self.request.method in ("GET", "HEAD"):
|
|
|
|
uri = self.request.path + "/"
|
|
|
|
if self.request.query:
|
|
|
|
uri += "?" + self.request.query
|
2014-06-29 10:05:33 +00:00
|
|
|
self.redirect(uri, permanent=True)
|
2014-06-11 08:34:28 +00:00
|
|
|
return
|
|
|
|
raise HTTPError(404)
|
|
|
|
return method(self, *args, **kwargs)
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
class Application(httputil.HTTPServerConnectionDelegate):
|
2014-06-11 08:34:28 +00:00
|
|
|
"""A collection of request handlers that make up a web application.
|
|
|
|
|
|
|
|
Instances of this class are callable and can be passed directly to
|
|
|
|
HTTPServer to serve the application::
|
|
|
|
|
|
|
|
application = web.Application([
|
|
|
|
(r"/", MainPageHandler),
|
|
|
|
])
|
|
|
|
http_server = httpserver.HTTPServer(application)
|
|
|
|
http_server.listen(8080)
|
2015-04-27 19:06:19 +00:00
|
|
|
ioloop.IOLoop.current().start()
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
The constructor for this class takes in a list of `URLSpec` objects
|
|
|
|
or (regexp, request_class) tuples. When we receive requests, we
|
|
|
|
iterate over the list in order and instantiate an instance of the
|
|
|
|
first request class whose regexp matches the request path.
|
|
|
|
The request class can be specified as either a class object or a
|
|
|
|
(fully-qualified) name.
|
|
|
|
|
|
|
|
Each tuple can contain additional elements, which correspond to the
|
2015-12-23 04:15:36 +00:00
|
|
|
arguments to the `URLSpec` constructor. (Prior to Tornado 3.2,
|
2014-06-11 08:34:28 +00:00
|
|
|
only tuples of two or three elements were allowed).
|
|
|
|
|
|
|
|
A dictionary may be passed as the third element of the tuple,
|
|
|
|
which will be used as keyword arguments to the handler's
|
|
|
|
constructor and `~RequestHandler.initialize` method. This pattern
|
|
|
|
is used for the `StaticFileHandler` in this example (note that a
|
|
|
|
`StaticFileHandler` can be installed automatically with the
|
|
|
|
static_path setting described below)::
|
|
|
|
|
|
|
|
application = web.Application([
|
|
|
|
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
|
|
|
|
])
|
|
|
|
|
|
|
|
We support virtual hosts with the `add_handlers` method, which takes in
|
|
|
|
a host regular expression as the first argument::
|
|
|
|
|
|
|
|
application.add_handlers(r"www\.myhost\.com", [
|
|
|
|
(r"/article/([0-9]+)", ArticleHandler),
|
|
|
|
])
|
|
|
|
|
|
|
|
You can serve static files by sending the ``static_path`` setting
|
|
|
|
as a keyword argument. We will serve those files from the
|
|
|
|
``/static/`` URI (this is configurable with the
|
|
|
|
``static_url_prefix`` setting), and we will serve ``/favicon.ico``
|
|
|
|
and ``/robots.txt`` from the same directory. A custom subclass of
|
|
|
|
`StaticFileHandler` can be specified with the
|
|
|
|
``static_handler_class`` setting.
|
|
|
|
|
|
|
|
"""
|
|
|
|
def __init__(self, handlers=None, default_host="", transforms=None,
|
2014-06-17 04:54:00 +00:00
|
|
|
**settings):
|
2014-06-11 08:34:28 +00:00
|
|
|
if transforms is None:
|
|
|
|
self.transforms = []
|
2014-10-14 04:24:01 +00:00
|
|
|
if settings.get("compress_response") or settings.get("gzip"):
|
2014-06-11 08:34:28 +00:00
|
|
|
self.transforms.append(GZipContentEncoding)
|
|
|
|
else:
|
|
|
|
self.transforms = transforms
|
|
|
|
self.handlers = []
|
|
|
|
self.named_handlers = {}
|
|
|
|
self.default_host = default_host
|
|
|
|
self.settings = settings
|
|
|
|
self.ui_modules = {'linkify': _linkify,
|
|
|
|
'xsrf_form_html': _xsrf_form_html,
|
|
|
|
'Template': TemplateModule,
|
|
|
|
}
|
|
|
|
self.ui_methods = {}
|
|
|
|
self._load_ui_modules(settings.get("ui_modules", {}))
|
|
|
|
self._load_ui_methods(settings.get("ui_methods", {}))
|
|
|
|
if self.settings.get("static_path"):
|
|
|
|
path = self.settings["static_path"]
|
|
|
|
handlers = list(handlers or [])
|
|
|
|
static_url_prefix = settings.get("static_url_prefix",
|
|
|
|
"/static/")
|
|
|
|
static_handler_class = settings.get("static_handler_class",
|
|
|
|
StaticFileHandler)
|
|
|
|
static_handler_args = settings.get("static_handler_args", {})
|
|
|
|
static_handler_args['path'] = path
|
|
|
|
for pattern in [re.escape(static_url_prefix) + r"(.*)",
|
|
|
|
r"/(favicon\.ico)", r"/(robots\.txt)"]:
|
|
|
|
handlers.insert(0, (pattern, static_handler_class,
|
|
|
|
static_handler_args))
|
|
|
|
if handlers:
|
|
|
|
self.add_handlers(".*$", handlers)
|
|
|
|
|
|
|
|
if self.settings.get('debug'):
|
|
|
|
self.settings.setdefault('autoreload', True)
|
|
|
|
self.settings.setdefault('compiled_template_cache', False)
|
|
|
|
self.settings.setdefault('static_hash_cache', False)
|
|
|
|
self.settings.setdefault('serve_traceback', True)
|
|
|
|
|
|
|
|
# Automatically reload modified modules
|
2014-06-17 04:54:00 +00:00
|
|
|
if self.settings.get('autoreload'):
|
2014-06-11 08:34:28 +00:00
|
|
|
from tornado import autoreload
|
|
|
|
autoreload.start()
|
|
|
|
|
|
|
|
def listen(self, port, address="", **kwargs):
|
|
|
|
"""Starts an HTTP server for this application on the given port.
|
|
|
|
|
|
|
|
This is a convenience alias for creating an `.HTTPServer`
|
|
|
|
object and calling its listen method. Keyword arguments not
|
|
|
|
supported by `HTTPServer.listen <.TCPServer.listen>` are passed to the
|
|
|
|
`.HTTPServer` constructor. For advanced uses
|
|
|
|
(e.g. multi-process mode), do not use this method; create an
|
|
|
|
`.HTTPServer` and call its
|
|
|
|
`.TCPServer.bind`/`.TCPServer.start` methods directly.
|
|
|
|
|
|
|
|
Note that after calling this method you still need to call
|
2015-04-27 19:06:19 +00:00
|
|
|
``IOLoop.current().start()`` to start the server.
|
2015-12-23 04:15:36 +00:00
|
|
|
|
|
|
|
Returns the `.HTTPServer` object.
|
|
|
|
|
|
|
|
.. versionchanged:: 4.3
|
|
|
|
Now returns the `.HTTPServer` object.
|
2014-06-11 08:34:28 +00:00
|
|
|
"""
|
|
|
|
# import is here rather than top level because HTTPServer
|
|
|
|
# is not importable on appengine
|
|
|
|
from tornado.httpserver import HTTPServer
|
|
|
|
server = HTTPServer(self, **kwargs)
|
|
|
|
server.listen(port, address)
|
2015-12-23 04:15:36 +00:00
|
|
|
return server
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def add_handlers(self, host_pattern, host_handlers):
|
|
|
|
"""Appends the given handlers to our handler list.
|
|
|
|
|
|
|
|
Host patterns are processed sequentially in the order they were
|
|
|
|
added. All matching patterns will be considered.
|
|
|
|
"""
|
|
|
|
if not host_pattern.endswith("$"):
|
|
|
|
host_pattern += "$"
|
|
|
|
handlers = []
|
|
|
|
# The handlers with the wildcard host_pattern are a special
|
|
|
|
# case - they're added in the constructor but should have lower
|
|
|
|
# precedence than the more-precise handlers added later.
|
|
|
|
# If a wildcard handler group exists, it should always be last
|
|
|
|
# in the list, so insert new groups just before it.
|
|
|
|
if self.handlers and self.handlers[-1][0].pattern == '.*$':
|
|
|
|
self.handlers.insert(-1, (re.compile(host_pattern), handlers))
|
|
|
|
else:
|
|
|
|
self.handlers.append((re.compile(host_pattern), handlers))
|
|
|
|
|
|
|
|
for spec in host_handlers:
|
|
|
|
if isinstance(spec, (tuple, list)):
|
|
|
|
assert len(spec) in (2, 3, 4)
|
|
|
|
spec = URLSpec(*spec)
|
|
|
|
handlers.append(spec)
|
|
|
|
if spec.name:
|
|
|
|
if spec.name in self.named_handlers:
|
|
|
|
app_log.warning(
|
|
|
|
"Multiple handlers named %s; replacing previous value",
|
|
|
|
spec.name)
|
|
|
|
self.named_handlers[spec.name] = spec
|
|
|
|
|
|
|
|
def add_transform(self, transform_class):
|
|
|
|
self.transforms.append(transform_class)
|
|
|
|
|
|
|
|
def _get_host_handlers(self, request):
|
2015-04-27 19:06:19 +00:00
|
|
|
host = split_host_and_port(request.host.lower())[0]
|
2014-06-11 08:34:28 +00:00
|
|
|
matches = []
|
|
|
|
for pattern, handlers in self.handlers:
|
|
|
|
if pattern.match(host):
|
|
|
|
matches.extend(handlers)
|
|
|
|
# Look for default host if not behind load balancer (for debugging)
|
|
|
|
if not matches and "X-Real-Ip" not in request.headers:
|
|
|
|
for pattern, handlers in self.handlers:
|
|
|
|
if pattern.match(self.default_host):
|
|
|
|
matches.extend(handlers)
|
|
|
|
return matches or None
|
|
|
|
|
|
|
|
def _load_ui_methods(self, methods):
|
|
|
|
if isinstance(methods, types.ModuleType):
|
|
|
|
self._load_ui_methods(dict((n, getattr(methods, n))
|
|
|
|
for n in dir(methods)))
|
|
|
|
elif isinstance(methods, list):
|
|
|
|
for m in methods:
|
|
|
|
self._load_ui_methods(m)
|
|
|
|
else:
|
|
|
|
for name, fn in methods.items():
|
|
|
|
if not name.startswith("_") and hasattr(fn, "__call__") \
|
|
|
|
and name[0].lower() == name[0]:
|
|
|
|
self.ui_methods[name] = fn
|
|
|
|
|
|
|
|
def _load_ui_modules(self, modules):
|
|
|
|
if isinstance(modules, types.ModuleType):
|
|
|
|
self._load_ui_modules(dict((n, getattr(modules, n))
|
|
|
|
for n in dir(modules)))
|
|
|
|
elif isinstance(modules, list):
|
|
|
|
for m in modules:
|
|
|
|
self._load_ui_modules(m)
|
|
|
|
else:
|
|
|
|
assert isinstance(modules, dict)
|
|
|
|
for name, cls in modules.items():
|
|
|
|
try:
|
|
|
|
if issubclass(cls, UIModule):
|
|
|
|
self.ui_modules[name] = cls
|
|
|
|
except TypeError:
|
|
|
|
pass
|
|
|
|
|
2015-04-27 19:06:19 +00:00
|
|
|
def start_request(self, server_conn, request_conn):
|
2014-06-17 04:54:00 +00:00
|
|
|
# Modern HTTPServer interface
|
2015-04-27 19:06:19 +00:00
|
|
|
return _RequestDispatcher(self, request_conn)
|
2014-06-11 08:34:28 +00:00
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
def __call__(self, request):
|
|
|
|
# Legacy HTTPServer interface
|
|
|
|
dispatcher = _RequestDispatcher(self, None)
|
|
|
|
dispatcher.set_request(request)
|
|
|
|
return dispatcher.execute()
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def reverse_url(self, name, *args):
|
|
|
|
"""Returns a URL path for handler named ``name``
|
|
|
|
|
|
|
|
The handler must be added to the application as a named `URLSpec`.
|
|
|
|
|
|
|
|
Args will be substituted for capturing groups in the `URLSpec` regex.
|
|
|
|
They will be converted to strings if necessary, encoded as utf8,
|
|
|
|
and url-escaped.
|
|
|
|
"""
|
|
|
|
if name in self.named_handlers:
|
|
|
|
return self.named_handlers[name].reverse(*args)
|
|
|
|
raise KeyError("%s not found in named urls" % name)
|
|
|
|
|
|
|
|
def log_request(self, handler):
|
|
|
|
"""Writes a completed HTTP request to the logs.
|
|
|
|
|
|
|
|
By default writes to the python root logger. To change
|
|
|
|
this behavior either subclass Application and override this method,
|
|
|
|
or pass a function in the application settings dictionary as
|
|
|
|
``log_function``.
|
|
|
|
"""
|
|
|
|
if "log_function" in self.settings:
|
|
|
|
self.settings["log_function"](handler)
|
|
|
|
return
|
|
|
|
if handler.get_status() < 400:
|
|
|
|
log_method = access_log.info
|
|
|
|
elif handler.get_status() < 500:
|
|
|
|
log_method = access_log.warning
|
|
|
|
else:
|
|
|
|
log_method = access_log.error
|
|
|
|
request_time = 1000.0 * handler.request.request_time()
|
|
|
|
log_method("%d %s %.2fms", handler.get_status(),
|
|
|
|
handler._request_summary(), request_time)
|
|
|
|
|
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
class _RequestDispatcher(httputil.HTTPMessageDelegate):
|
|
|
|
def __init__(self, application, connection):
|
|
|
|
self.application = application
|
|
|
|
self.connection = connection
|
|
|
|
self.request = None
|
|
|
|
self.chunks = []
|
|
|
|
self.handler_class = None
|
|
|
|
self.handler_kwargs = None
|
|
|
|
self.path_args = []
|
|
|
|
self.path_kwargs = {}
|
|
|
|
|
|
|
|
def headers_received(self, start_line, headers):
|
|
|
|
self.set_request(httputil.HTTPServerRequest(
|
2015-04-27 19:06:19 +00:00
|
|
|
connection=self.connection, start_line=start_line,
|
|
|
|
headers=headers))
|
2014-06-17 04:54:00 +00:00
|
|
|
if self.stream_request_body:
|
|
|
|
self.request.body = Future()
|
|
|
|
return self.execute()
|
|
|
|
|
|
|
|
def set_request(self, request):
|
|
|
|
self.request = request
|
|
|
|
self._find_handler()
|
|
|
|
self.stream_request_body = _has_stream_request_body(self.handler_class)
|
|
|
|
|
|
|
|
def _find_handler(self):
|
|
|
|
# Identify the handler to use as soon as we have the request.
|
|
|
|
# Save url path arguments for later.
|
|
|
|
app = self.application
|
|
|
|
handlers = app._get_host_handlers(self.request)
|
|
|
|
if not handlers:
|
|
|
|
self.handler_class = RedirectHandler
|
2015-04-27 19:06:19 +00:00
|
|
|
self.handler_kwargs = dict(url="%s://%s/"
|
|
|
|
% (self.request.protocol,
|
|
|
|
app.default_host))
|
2014-06-17 04:54:00 +00:00
|
|
|
return
|
|
|
|
for spec in handlers:
|
|
|
|
match = spec.regex.match(self.request.path)
|
|
|
|
if match:
|
|
|
|
self.handler_class = spec.handler_class
|
|
|
|
self.handler_kwargs = spec.kwargs
|
|
|
|
if spec.regex.groups:
|
|
|
|
# Pass matched groups to the handler. Since
|
|
|
|
# match.groups() includes both named and
|
|
|
|
# unnamed groups, we want to use either groups
|
|
|
|
# or groupdict but not both.
|
|
|
|
if spec.regex.groupindex:
|
|
|
|
self.path_kwargs = dict(
|
|
|
|
(str(k), _unquote_or_none(v))
|
|
|
|
for (k, v) in match.groupdict().items())
|
|
|
|
else:
|
|
|
|
self.path_args = [_unquote_or_none(s)
|
|
|
|
for s in match.groups()]
|
|
|
|
return
|
|
|
|
if app.settings.get('default_handler_class'):
|
|
|
|
self.handler_class = app.settings['default_handler_class']
|
|
|
|
self.handler_kwargs = app.settings.get(
|
|
|
|
'default_handler_args', {})
|
|
|
|
else:
|
|
|
|
self.handler_class = ErrorHandler
|
|
|
|
self.handler_kwargs = dict(status_code=404)
|
|
|
|
|
|
|
|
def data_received(self, data):
|
|
|
|
if self.stream_request_body:
|
|
|
|
return self.handler.data_received(data)
|
|
|
|
else:
|
|
|
|
self.chunks.append(data)
|
|
|
|
|
|
|
|
def finish(self):
|
|
|
|
if self.stream_request_body:
|
|
|
|
self.request.body.set_result(None)
|
|
|
|
else:
|
|
|
|
self.request.body = b''.join(self.chunks)
|
|
|
|
self.request._parse_body()
|
|
|
|
self.execute()
|
|
|
|
|
|
|
|
def on_connection_close(self):
|
|
|
|
if self.stream_request_body:
|
|
|
|
self.handler.on_connection_close()
|
|
|
|
else:
|
|
|
|
self.chunks = None
|
|
|
|
|
|
|
|
def execute(self):
|
|
|
|
# If template cache is disabled (usually in the debug mode),
|
|
|
|
# re-compile templates and reload static files on every
|
|
|
|
# request so you don't need to restart to see changes
|
|
|
|
if not self.application.settings.get("compiled_template_cache", True):
|
|
|
|
with RequestHandler._template_loader_lock:
|
|
|
|
for loader in RequestHandler._template_loaders.values():
|
|
|
|
loader.reset()
|
|
|
|
if not self.application.settings.get('static_hash_cache', True):
|
|
|
|
StaticFileHandler.reset()
|
|
|
|
|
|
|
|
self.handler = self.handler_class(self.application, self.request,
|
|
|
|
**self.handler_kwargs)
|
|
|
|
transforms = [t(self.request) for t in self.application.transforms]
|
|
|
|
|
|
|
|
if self.stream_request_body:
|
|
|
|
self.handler._prepared_future = Future()
|
|
|
|
# Note that if an exception escapes handler._execute it will be
|
2015-04-27 19:06:19 +00:00
|
|
|
# trapped in the Future it returns (which we are ignoring here,
|
|
|
|
# leaving it to be logged when the Future is GC'd).
|
2014-06-17 04:54:00 +00:00
|
|
|
# However, that shouldn't happen because _execute has a blanket
|
|
|
|
# except handler, and we cannot easily access the IOLoop here to
|
2015-04-27 19:06:19 +00:00
|
|
|
# call add_future (because of the requirement to remain compatible
|
|
|
|
# with WSGI)
|
2015-12-23 04:15:36 +00:00
|
|
|
self.handler._execute(transforms, *self.path_args,
|
|
|
|
**self.path_kwargs)
|
2014-06-17 04:54:00 +00:00
|
|
|
# If we are streaming the request body, then execute() is finished
|
|
|
|
# when the handler has prepared to receive the body. If not,
|
|
|
|
# it doesn't matter when execute() finishes (so we return None)
|
|
|
|
return self.handler._prepared_future
|
|
|
|
|
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
class HTTPError(Exception):
|
|
|
|
"""An exception that will turn into an HTTP error response.
|
|
|
|
|
|
|
|
Raising an `HTTPError` is a convenient alternative to calling
|
|
|
|
`RequestHandler.send_error` since it automatically ends the
|
|
|
|
current function.
|
|
|
|
|
2014-06-29 10:05:33 +00:00
|
|
|
To customize the response sent with an `HTTPError`, override
|
|
|
|
`RequestHandler.write_error`.
|
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
:arg int status_code: HTTP status code. Must be listed in
|
|
|
|
`httplib.responses <http.client.responses>` unless the ``reason``
|
|
|
|
keyword argument is given.
|
|
|
|
:arg string log_message: Message to be written to the log for this error
|
|
|
|
(will not be shown to the user unless the `Application` is in debug
|
|
|
|
mode). May contain ``%s``-style placeholders, which will be filled
|
|
|
|
in with remaining positional parameters.
|
|
|
|
:arg string reason: Keyword-only argument. The HTTP "reason" phrase
|
|
|
|
to pass in the status line along with ``status_code``. Normally
|
|
|
|
determined automatically from ``status_code``, but can be used
|
|
|
|
to use a non-standard numeric code.
|
|
|
|
"""
|
2015-12-23 04:15:36 +00:00
|
|
|
def __init__(self, status_code=500, log_message=None, *args, **kwargs):
|
2014-06-11 08:34:28 +00:00
|
|
|
self.status_code = status_code
|
|
|
|
self.log_message = log_message
|
|
|
|
self.args = args
|
|
|
|
self.reason = kwargs.get('reason', None)
|
2015-04-27 19:06:19 +00:00
|
|
|
if log_message and not args:
|
|
|
|
self.log_message = log_message.replace('%', '%%')
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
message = "HTTP %d: %s" % (
|
|
|
|
self.status_code,
|
|
|
|
self.reason or httputil.responses.get(self.status_code, 'Unknown'))
|
|
|
|
if self.log_message:
|
|
|
|
return message + " (" + (self.log_message % self.args) + ")"
|
|
|
|
else:
|
|
|
|
return message
|
|
|
|
|
|
|
|
|
2014-06-29 10:05:33 +00:00
|
|
|
class Finish(Exception):
|
|
|
|
"""An exception that ends the request without producing an error response.
|
|
|
|
|
2015-12-23 04:15:36 +00:00
|
|
|
When `Finish` is raised in a `RequestHandler`, the request will
|
|
|
|
end (calling `RequestHandler.finish` if it hasn't already been
|
|
|
|
called), but the error-handling methods (including
|
|
|
|
`RequestHandler.write_error`) will not be called.
|
|
|
|
|
|
|
|
If `Finish()` was created with no arguments, the pending response
|
|
|
|
will be sent as-is. If `Finish()` was given an argument, that
|
|
|
|
argument will be passed to `RequestHandler.finish()`.
|
2014-06-29 10:05:33 +00:00
|
|
|
|
|
|
|
This can be a more convenient way to implement custom error pages
|
|
|
|
than overriding ``write_error`` (especially in library code)::
|
|
|
|
|
|
|
|
if self.current_user is None:
|
|
|
|
self.set_status(401)
|
|
|
|
self.set_header('WWW-Authenticate', 'Basic realm="something"')
|
|
|
|
raise Finish()
|
2015-12-23 04:15:36 +00:00
|
|
|
|
|
|
|
.. versionchanged:: 4.3
|
|
|
|
Arguments passed to ``Finish()`` will be passed on to
|
|
|
|
`RequestHandler.finish`.
|
2014-06-29 10:05:33 +00:00
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
class MissingArgumentError(HTTPError):
|
|
|
|
"""Exception raised by `RequestHandler.get_argument`.
|
|
|
|
|
|
|
|
This is a subclass of `HTTPError`, so if it is uncaught a 400 response
|
|
|
|
code will be used instead of 500 (and a stack trace will not be logged).
|
|
|
|
|
|
|
|
.. versionadded:: 3.1
|
|
|
|
"""
|
|
|
|
def __init__(self, arg_name):
|
|
|
|
super(MissingArgumentError, self).__init__(
|
|
|
|
400, 'Missing argument %s' % arg_name)
|
|
|
|
self.arg_name = arg_name
|
|
|
|
|
|
|
|
|
|
|
|
class ErrorHandler(RequestHandler):
|
|
|
|
"""Generates an error response with ``status_code`` for all requests."""
|
|
|
|
def initialize(self, status_code):
|
|
|
|
self.set_status(status_code)
|
|
|
|
|
|
|
|
def prepare(self):
|
|
|
|
raise HTTPError(self._status_code)
|
|
|
|
|
|
|
|
def check_xsrf_cookie(self):
|
|
|
|
# POSTs to an ErrorHandler don't actually have side effects,
|
|
|
|
# so we don't need to check the xsrf token. This allows POSTs
|
|
|
|
# to the wrong url to return a 404 instead of 403.
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class RedirectHandler(RequestHandler):
|
|
|
|
"""Redirects the client to the given URL for all GET requests.
|
|
|
|
|
|
|
|
You should provide the keyword argument ``url`` to the handler, e.g.::
|
|
|
|
|
|
|
|
application = web.Application([
|
|
|
|
(r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
|
|
|
|
])
|
|
|
|
"""
|
|
|
|
def initialize(self, url, permanent=True):
|
|
|
|
self._url = url
|
|
|
|
self._permanent = permanent
|
|
|
|
|
|
|
|
def get(self):
|
|
|
|
self.redirect(self._url, permanent=self._permanent)
|
|
|
|
|
|
|
|
|
|
|
|
class StaticFileHandler(RequestHandler):
|
|
|
|
"""A simple handler that can serve static content from a directory.
|
|
|
|
|
|
|
|
A `StaticFileHandler` is configured automatically if you pass the
|
|
|
|
``static_path`` keyword argument to `Application`. This handler
|
|
|
|
can be customized with the ``static_url_prefix``, ``static_handler_class``,
|
|
|
|
and ``static_handler_args`` settings.
|
|
|
|
|
|
|
|
To map an additional path to this handler for a static data directory
|
|
|
|
you would add a line to your application like::
|
|
|
|
|
|
|
|
application = web.Application([
|
|
|
|
(r"/content/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
|
|
|
|
])
|
|
|
|
|
|
|
|
The handler constructor requires a ``path`` argument, which specifies the
|
|
|
|
local root directory of the content to be served.
|
|
|
|
|
|
|
|
Note that a capture group in the regex is required to parse the value for
|
|
|
|
the ``path`` argument to the get() method (different than the constructor
|
|
|
|
argument above); see `URLSpec` for details.
|
|
|
|
|
2015-08-21 18:08:34 +00:00
|
|
|
To serve a file like ``index.html`` automatically when a directory is
|
|
|
|
requested, set ``static_handler_args=dict(default_filename="index.html")``
|
|
|
|
in your application settings, or add ``default_filename`` as an initializer
|
|
|
|
argument for your ``StaticFileHandler``.
|
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
To maximize the effectiveness of browser caching, this class supports
|
|
|
|
versioned urls (by default using the argument ``?v=``). If a version
|
|
|
|
is given, we instruct the browser to cache this file indefinitely.
|
|
|
|
`make_static_url` (also available as `RequestHandler.static_url`) can
|
|
|
|
be used to construct a versioned url.
|
|
|
|
|
|
|
|
This handler is intended primarily for use in development and light-duty
|
|
|
|
file serving; for heavy traffic it will be more efficient to use
|
|
|
|
a dedicated static file server (such as nginx or Apache). We support
|
|
|
|
the HTTP ``Accept-Ranges`` mechanism to return partial content (because
|
|
|
|
some browsers require this functionality to be present to seek in
|
2015-08-21 18:08:34 +00:00
|
|
|
HTML5 audio or video).
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
**Subclassing notes**
|
|
|
|
|
|
|
|
This class is designed to be extensible by subclassing, but because
|
|
|
|
of the way static urls are generated with class methods rather than
|
|
|
|
instance methods, the inheritance patterns are somewhat unusual.
|
|
|
|
Be sure to use the ``@classmethod`` decorator when overriding a
|
|
|
|
class method. Instance methods may use the attributes ``self.path``
|
|
|
|
``self.absolute_path``, and ``self.modified``.
|
|
|
|
|
|
|
|
Subclasses should only override methods discussed in this section;
|
|
|
|
overriding other methods is error-prone. Overriding
|
|
|
|
``StaticFileHandler.get`` is particularly problematic due to the
|
|
|
|
tight coupling with ``compute_etag`` and other methods.
|
|
|
|
|
|
|
|
To change the way static urls are generated (e.g. to match the behavior
|
|
|
|
of another server or CDN), override `make_static_url`, `parse_url_path`,
|
|
|
|
`get_cache_time`, and/or `get_version`.
|
|
|
|
|
|
|
|
To replace all interaction with the filesystem (e.g. to serve
|
|
|
|
static content from a database), override `get_content`,
|
|
|
|
`get_content_size`, `get_modified_time`, `get_absolute_path`, and
|
|
|
|
`validate_absolute_path`.
|
|
|
|
|
|
|
|
.. versionchanged:: 3.1
|
|
|
|
Many of the methods for subclasses were added in Tornado 3.1.
|
|
|
|
"""
|
|
|
|
CACHE_MAX_AGE = 86400 * 365 * 10 # 10 years
|
|
|
|
|
2016-09-30 22:40:12 +00:00
|
|
|
_static_hashes = {} # type: typing.Dict
|
2014-06-11 08:34:28 +00:00
|
|
|
_lock = threading.Lock() # protects _static_hashes
|
|
|
|
|
|
|
|
def initialize(self, path, default_filename=None):
|
|
|
|
self.root = path
|
|
|
|
self.default_filename = default_filename
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def reset(cls):
|
|
|
|
with cls._lock:
|
|
|
|
cls._static_hashes = {}
|
|
|
|
|
|
|
|
def head(self, path):
|
2014-06-17 04:54:00 +00:00
|
|
|
return self.get(path, include_body=False)
|
2014-06-11 08:34:28 +00:00
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
@gen.coroutine
|
2014-06-11 08:34:28 +00:00
|
|
|
def get(self, path, include_body=True):
|
|
|
|
# Set up our path instance variables.
|
|
|
|
self.path = self.parse_url_path(path)
|
|
|
|
del path # make sure we don't refer to path instead of self.path again
|
|
|
|
absolute_path = self.get_absolute_path(self.root, self.path)
|
|
|
|
self.absolute_path = self.validate_absolute_path(
|
|
|
|
self.root, absolute_path)
|
|
|
|
if self.absolute_path is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
self.modified = self.get_modified_time()
|
|
|
|
self.set_headers()
|
|
|
|
|
|
|
|
if self.should_return_304():
|
|
|
|
self.set_status(304)
|
|
|
|
return
|
|
|
|
|
|
|
|
request_range = None
|
|
|
|
range_header = self.request.headers.get("Range")
|
|
|
|
if range_header:
|
|
|
|
# As per RFC 2616 14.16, if an invalid Range header is specified,
|
|
|
|
# the request will be treated as if the header didn't exist.
|
|
|
|
request_range = httputil._parse_request_range(range_header)
|
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
size = self.get_content_size()
|
2014-06-11 08:34:28 +00:00
|
|
|
if request_range:
|
|
|
|
start, end = request_range
|
|
|
|
if (start is not None and start >= size) or end == 0:
|
|
|
|
# As per RFC 2616 14.35.1, a range is not satisfiable only: if
|
|
|
|
# the first requested byte is equal to or greater than the
|
|
|
|
# content, or when a suffix with length 0 is specified
|
|
|
|
self.set_status(416) # Range Not Satisfiable
|
|
|
|
self.set_header("Content-Type", "text/plain")
|
|
|
|
self.set_header("Content-Range", "bytes */%s" % (size, ))
|
|
|
|
return
|
|
|
|
if start is not None and start < 0:
|
|
|
|
start += size
|
|
|
|
if end is not None and end > size:
|
|
|
|
# Clients sometimes blindly use a large range to limit their
|
|
|
|
# download size; cap the endpoint at the actual file size.
|
|
|
|
end = size
|
|
|
|
# Note: only return HTTP 206 if less than the entire range has been
|
|
|
|
# requested. Not only is this semantically correct, but Chrome
|
|
|
|
# refuses to play audio if it gets an HTTP 206 in response to
|
|
|
|
# ``Range: bytes=0-``.
|
|
|
|
if size != (end or size) - (start or 0):
|
|
|
|
self.set_status(206) # Partial Content
|
|
|
|
self.set_header("Content-Range",
|
|
|
|
httputil._get_content_range(start, end, size))
|
|
|
|
else:
|
|
|
|
start = end = None
|
2014-06-17 04:54:00 +00:00
|
|
|
|
|
|
|
if start is not None and end is not None:
|
|
|
|
content_length = end - start
|
|
|
|
elif end is not None:
|
|
|
|
content_length = end
|
|
|
|
elif start is not None:
|
|
|
|
content_length = size - start
|
|
|
|
else:
|
|
|
|
content_length = size
|
|
|
|
self.set_header("Content-Length", content_length)
|
|
|
|
|
|
|
|
if include_body:
|
|
|
|
content = self.get_content(self.absolute_path, start, end)
|
2014-10-14 04:24:01 +00:00
|
|
|
if isinstance(content, bytes):
|
2014-06-17 04:54:00 +00:00
|
|
|
content = [content]
|
|
|
|
for chunk in content:
|
2014-10-14 04:24:01 +00:00
|
|
|
try:
|
|
|
|
self.write(chunk)
|
|
|
|
yield self.flush()
|
|
|
|
except iostream.StreamClosedError:
|
|
|
|
return
|
2014-06-17 04:54:00 +00:00
|
|
|
else:
|
2014-06-11 08:34:28 +00:00
|
|
|
assert self.request.method == "HEAD"
|
|
|
|
|
|
|
|
def compute_etag(self):
|
|
|
|
"""Sets the ``Etag`` header based on static url version.
|
|
|
|
|
|
|
|
This allows efficient ``If-None-Match`` checks against cached
|
|
|
|
versions, and sends the correct ``Etag`` for a partial response
|
|
|
|
(i.e. the same ``Etag`` as the full file).
|
|
|
|
|
|
|
|
.. versionadded:: 3.1
|
|
|
|
"""
|
|
|
|
version_hash = self._get_cached_version(self.absolute_path)
|
|
|
|
if not version_hash:
|
|
|
|
return None
|
|
|
|
return '"%s"' % (version_hash, )
|
|
|
|
|
|
|
|
def set_headers(self):
|
|
|
|
"""Sets the content and caching headers on the response.
|
|
|
|
|
|
|
|
.. versionadded:: 3.1
|
|
|
|
"""
|
|
|
|
self.set_header("Accept-Ranges", "bytes")
|
|
|
|
self.set_etag_header()
|
|
|
|
|
|
|
|
if self.modified is not None:
|
|
|
|
self.set_header("Last-Modified", self.modified)
|
|
|
|
|
|
|
|
content_type = self.get_content_type()
|
|
|
|
if content_type:
|
|
|
|
self.set_header("Content-Type", content_type)
|
|
|
|
|
2015-04-27 19:06:19 +00:00
|
|
|
cache_time = self.get_cache_time(self.path, self.modified,
|
|
|
|
content_type)
|
2014-06-11 08:34:28 +00:00
|
|
|
if cache_time > 0:
|
|
|
|
self.set_header("Expires", datetime.datetime.utcnow() +
|
|
|
|
datetime.timedelta(seconds=cache_time))
|
|
|
|
self.set_header("Cache-Control", "max-age=" + str(cache_time))
|
|
|
|
|
|
|
|
self.set_extra_headers(self.path)
|
|
|
|
|
|
|
|
def should_return_304(self):
|
|
|
|
"""Returns True if the headers indicate that we should return 304.
|
|
|
|
|
|
|
|
.. versionadded:: 3.1
|
|
|
|
"""
|
|
|
|
if self.check_etag_header():
|
|
|
|
return True
|
|
|
|
|
|
|
|
# Check the If-Modified-Since, and don't send the result if the
|
|
|
|
# content has not been modified
|
|
|
|
ims_value = self.request.headers.get("If-Modified-Since")
|
|
|
|
if ims_value is not None:
|
|
|
|
date_tuple = email.utils.parsedate(ims_value)
|
|
|
|
if date_tuple is not None:
|
|
|
|
if_since = datetime.datetime(*date_tuple[:6])
|
|
|
|
if if_since >= self.modified:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_absolute_path(cls, root, path):
|
|
|
|
"""Returns the absolute location of ``path`` relative to ``root``.
|
|
|
|
|
|
|
|
``root`` is the path configured for this `StaticFileHandler`
|
|
|
|
(in most cases the ``static_path`` `Application` setting).
|
|
|
|
|
|
|
|
This class method may be overridden in subclasses. By default
|
|
|
|
it returns a filesystem path, but other strings may be used
|
|
|
|
as long as they are unique and understood by the subclass's
|
|
|
|
overridden `get_content`.
|
|
|
|
|
|
|
|
.. versionadded:: 3.1
|
|
|
|
"""
|
|
|
|
abspath = os.path.abspath(os.path.join(root, path))
|
|
|
|
return abspath
|
|
|
|
|
|
|
|
def validate_absolute_path(self, root, absolute_path):
|
|
|
|
"""Validate and return the absolute path.
|
|
|
|
|
|
|
|
``root`` is the configured path for the `StaticFileHandler`,
|
|
|
|
and ``path`` is the result of `get_absolute_path`
|
|
|
|
|
|
|
|
This is an instance method called during request processing,
|
|
|
|
so it may raise `HTTPError` or use methods like
|
|
|
|
`RequestHandler.redirect` (return None after redirecting to
|
|
|
|
halt further processing). This is where 404 errors for missing files
|
|
|
|
are generated.
|
|
|
|
|
|
|
|
This method may modify the path before returning it, but note that
|
|
|
|
any such modifications will not be understood by `make_static_url`.
|
|
|
|
|
|
|
|
In instance methods, this method's result is available as
|
|
|
|
``self.absolute_path``.
|
|
|
|
|
|
|
|
.. versionadded:: 3.1
|
|
|
|
"""
|
2015-08-21 18:08:34 +00:00
|
|
|
# os.path.abspath strips a trailing /.
|
|
|
|
# We must add it back to `root` so that we only match files
|
|
|
|
# in a directory named `root` instead of files starting with
|
|
|
|
# that prefix.
|
2015-12-23 04:15:36 +00:00
|
|
|
root = os.path.abspath(root)
|
|
|
|
if not root.endswith(os.path.sep):
|
|
|
|
# abspath always removes a trailing slash, except when
|
|
|
|
# root is '/'. This is an unusual case, but several projects
|
|
|
|
# have independently discovered this technique to disable
|
|
|
|
# Tornado's path validation and (hopefully) do their own,
|
|
|
|
# so we need to support it.
|
|
|
|
root += os.path.sep
|
2015-08-21 18:08:34 +00:00
|
|
|
# The trailing slash also needs to be temporarily added back
|
|
|
|
# the requested path so a request to root/ will match.
|
2014-06-11 08:34:28 +00:00
|
|
|
if not (absolute_path + os.path.sep).startswith(root):
|
|
|
|
raise HTTPError(403, "%s is not in root static directory",
|
|
|
|
self.path)
|
|
|
|
if (os.path.isdir(absolute_path) and
|
|
|
|
self.default_filename is not None):
|
|
|
|
# need to look at the request.path here for when path is empty
|
|
|
|
# but there is some prefix to the path that was already
|
|
|
|
# trimmed by the routing
|
|
|
|
if not self.request.path.endswith("/"):
|
|
|
|
self.redirect(self.request.path + "/", permanent=True)
|
|
|
|
return
|
|
|
|
absolute_path = os.path.join(absolute_path, self.default_filename)
|
|
|
|
if not os.path.exists(absolute_path):
|
|
|
|
raise HTTPError(404)
|
|
|
|
if not os.path.isfile(absolute_path):
|
|
|
|
raise HTTPError(403, "%s is not a file", self.path)
|
|
|
|
return absolute_path
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_content(cls, abspath, start=None, end=None):
|
|
|
|
"""Retrieve the content of the requested resource which is located
|
|
|
|
at the given absolute path.
|
|
|
|
|
|
|
|
This class method may be overridden by subclasses. Note that its
|
|
|
|
signature is different from other overridable class methods
|
|
|
|
(no ``settings`` argument); this is deliberate to ensure that
|
|
|
|
``abspath`` is able to stand on its own as a cache key.
|
|
|
|
|
|
|
|
This method should either return a byte string or an iterator
|
|
|
|
of byte strings. The latter is preferred for large files
|
|
|
|
as it helps reduce memory fragmentation.
|
|
|
|
|
|
|
|
.. versionadded:: 3.1
|
|
|
|
"""
|
|
|
|
with open(abspath, "rb") as file:
|
|
|
|
if start is not None:
|
|
|
|
file.seek(start)
|
|
|
|
if end is not None:
|
|
|
|
remaining = end - (start or 0)
|
|
|
|
else:
|
|
|
|
remaining = None
|
|
|
|
while True:
|
|
|
|
chunk_size = 64 * 1024
|
|
|
|
if remaining is not None and remaining < chunk_size:
|
|
|
|
chunk_size = remaining
|
|
|
|
chunk = file.read(chunk_size)
|
|
|
|
if chunk:
|
|
|
|
if remaining is not None:
|
|
|
|
remaining -= len(chunk)
|
|
|
|
yield chunk
|
|
|
|
else:
|
|
|
|
if remaining is not None:
|
|
|
|
assert remaining == 0
|
|
|
|
return
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_content_version(cls, abspath):
|
|
|
|
"""Returns a version string for the resource at the given path.
|
|
|
|
|
|
|
|
This class method may be overridden by subclasses. The
|
|
|
|
default implementation is a hash of the file's contents.
|
|
|
|
|
|
|
|
.. versionadded:: 3.1
|
|
|
|
"""
|
|
|
|
data = cls.get_content(abspath)
|
|
|
|
hasher = hashlib.md5()
|
2014-10-14 04:24:01 +00:00
|
|
|
if isinstance(data, bytes):
|
2014-06-11 08:34:28 +00:00
|
|
|
hasher.update(data)
|
|
|
|
else:
|
|
|
|
for chunk in data:
|
|
|
|
hasher.update(chunk)
|
|
|
|
return hasher.hexdigest()
|
|
|
|
|
|
|
|
def _stat(self):
|
|
|
|
if not hasattr(self, '_stat_result'):
|
|
|
|
self._stat_result = os.stat(self.absolute_path)
|
|
|
|
return self._stat_result
|
|
|
|
|
|
|
|
def get_content_size(self):
|
|
|
|
"""Retrieve the total size of the resource at the given path.
|
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
This method may be overridden by subclasses.
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
.. versionadded:: 3.1
|
2014-06-17 04:54:00 +00:00
|
|
|
|
|
|
|
.. versionchanged:: 4.0
|
|
|
|
This method is now always called, instead of only when
|
|
|
|
partial results are requested.
|
2014-06-11 08:34:28 +00:00
|
|
|
"""
|
|
|
|
stat_result = self._stat()
|
|
|
|
return stat_result[stat.ST_SIZE]
|
|
|
|
|
|
|
|
def get_modified_time(self):
|
|
|
|
"""Returns the time that ``self.absolute_path`` was last modified.
|
|
|
|
|
|
|
|
May be overridden in subclasses. Should return a `~datetime.datetime`
|
|
|
|
object or None.
|
|
|
|
|
|
|
|
.. versionadded:: 3.1
|
|
|
|
"""
|
|
|
|
stat_result = self._stat()
|
2015-04-27 19:06:19 +00:00
|
|
|
modified = datetime.datetime.utcfromtimestamp(
|
|
|
|
stat_result[stat.ST_MTIME])
|
2014-06-11 08:34:28 +00:00
|
|
|
return modified
|
|
|
|
|
|
|
|
def get_content_type(self):
|
|
|
|
"""Returns the ``Content-Type`` header to be used for this request.
|
|
|
|
|
|
|
|
.. versionadded:: 3.1
|
|
|
|
"""
|
|
|
|
mime_type, encoding = mimetypes.guess_type(self.absolute_path)
|
2015-08-21 18:08:34 +00:00
|
|
|
# per RFC 6713, use the appropriate type for a gzip compressed file
|
|
|
|
if encoding == "gzip":
|
|
|
|
return "application/gzip"
|
|
|
|
# As of 2015-07-21 there is no bzip2 encoding defined at
|
|
|
|
# http://www.iana.org/assignments/media-types/media-types.xhtml
|
|
|
|
# So for that (and any other encoding), use octet-stream.
|
|
|
|
elif encoding is not None:
|
|
|
|
return "application/octet-stream"
|
|
|
|
elif mime_type is not None:
|
|
|
|
return mime_type
|
|
|
|
# if mime_type not detected, use application/octet-stream
|
|
|
|
else:
|
|
|
|
return "application/octet-stream"
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def set_extra_headers(self, path):
|
|
|
|
"""For subclass to add extra headers to the response"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
def get_cache_time(self, path, modified, mime_type):
|
|
|
|
"""Override to customize cache control behavior.
|
|
|
|
|
|
|
|
Return a positive number of seconds to make the result
|
|
|
|
cacheable for that amount of time or 0 to mark resource as
|
|
|
|
cacheable for an unspecified amount of time (subject to
|
|
|
|
browser heuristics).
|
|
|
|
|
|
|
|
By default returns cache expiry of 10 years for resources requested
|
|
|
|
with ``v`` argument.
|
|
|
|
"""
|
|
|
|
return self.CACHE_MAX_AGE if "v" in self.request.arguments else 0
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def make_static_url(cls, settings, path, include_version=True):
|
|
|
|
"""Constructs a versioned url for the given path.
|
|
|
|
|
|
|
|
This method may be overridden in subclasses (but note that it
|
|
|
|
is a class method rather than an instance method). Subclasses
|
|
|
|
are only required to implement the signature
|
|
|
|
``make_static_url(cls, settings, path)``; other keyword
|
|
|
|
arguments may be passed through `~RequestHandler.static_url`
|
|
|
|
but are not standard.
|
|
|
|
|
|
|
|
``settings`` is the `Application.settings` dictionary. ``path``
|
|
|
|
is the static path being requested. The url returned should be
|
|
|
|
relative to the current host.
|
|
|
|
|
|
|
|
``include_version`` determines whether the generated URL should
|
|
|
|
include the query string containing the version hash of the
|
|
|
|
file corresponding to the given ``path``.
|
|
|
|
|
|
|
|
"""
|
|
|
|
url = settings.get('static_url_prefix', '/static/') + path
|
|
|
|
if not include_version:
|
|
|
|
return url
|
|
|
|
|
|
|
|
version_hash = cls.get_version(settings, path)
|
|
|
|
if not version_hash:
|
|
|
|
return url
|
|
|
|
|
|
|
|
return '%s?v=%s' % (url, version_hash)
|
|
|
|
|
|
|
|
def parse_url_path(self, url_path):
|
|
|
|
"""Converts a static URL path into a filesystem path.
|
|
|
|
|
|
|
|
``url_path`` is the path component of the URL with
|
|
|
|
``static_url_prefix`` removed. The return value should be
|
|
|
|
filesystem path relative to ``static_path``.
|
|
|
|
|
|
|
|
This is the inverse of `make_static_url`.
|
|
|
|
"""
|
|
|
|
if os.path.sep != "/":
|
|
|
|
url_path = url_path.replace("/", os.path.sep)
|
|
|
|
return url_path
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_version(cls, settings, path):
|
|
|
|
"""Generate the version string to be used in static URLs.
|
|
|
|
|
|
|
|
``settings`` is the `Application.settings` dictionary and ``path``
|
|
|
|
is the relative location of the requested asset on the filesystem.
|
|
|
|
The returned value should be a string, or ``None`` if no version
|
|
|
|
could be determined.
|
|
|
|
|
|
|
|
.. versionchanged:: 3.1
|
|
|
|
This method was previously recommended for subclasses to override;
|
|
|
|
`get_content_version` is now preferred as it allows the base
|
|
|
|
class to handle caching of the result.
|
|
|
|
"""
|
|
|
|
abs_path = cls.get_absolute_path(settings['static_path'], path)
|
|
|
|
return cls._get_cached_version(abs_path)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def _get_cached_version(cls, abs_path):
|
|
|
|
with cls._lock:
|
|
|
|
hashes = cls._static_hashes
|
|
|
|
if abs_path not in hashes:
|
|
|
|
try:
|
|
|
|
hashes[abs_path] = cls.get_content_version(abs_path)
|
|
|
|
except Exception:
|
|
|
|
gen_log.error("Could not open static file %r", abs_path)
|
|
|
|
hashes[abs_path] = None
|
|
|
|
hsh = hashes.get(abs_path)
|
|
|
|
if hsh:
|
|
|
|
return hsh
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
class FallbackHandler(RequestHandler):
|
|
|
|
"""A `RequestHandler` that wraps another HTTP server callback.
|
|
|
|
|
|
|
|
The fallback is a callable object that accepts an
|
2014-06-17 04:54:00 +00:00
|
|
|
`~.httputil.HTTPServerRequest`, such as an `Application` or
|
2014-06-11 08:34:28 +00:00
|
|
|
`tornado.wsgi.WSGIContainer`. This is most useful to use both
|
|
|
|
Tornado ``RequestHandlers`` and WSGI in the same server. Typical
|
|
|
|
usage::
|
|
|
|
|
|
|
|
wsgi_app = tornado.wsgi.WSGIContainer(
|
|
|
|
django.core.handlers.wsgi.WSGIHandler())
|
|
|
|
application = tornado.web.Application([
|
|
|
|
(r"/foo", FooHandler),
|
|
|
|
(r".*", FallbackHandler, dict(fallback=wsgi_app),
|
|
|
|
])
|
|
|
|
"""
|
|
|
|
def initialize(self, fallback):
|
|
|
|
self.fallback = fallback
|
|
|
|
|
|
|
|
def prepare(self):
|
|
|
|
self.fallback(self.request)
|
|
|
|
self._finished = True
|
|
|
|
|
|
|
|
|
|
|
|
class OutputTransform(object):
|
|
|
|
"""A transform modifies the result of an HTTP request (e.g., GZip encoding)
|
|
|
|
|
2014-06-29 10:05:33 +00:00
|
|
|
Applications are not expected to create their own OutputTransforms
|
|
|
|
or interact with them directly; the framework chooses which transforms
|
|
|
|
(if any) to apply.
|
2014-06-11 08:34:28 +00:00
|
|
|
"""
|
|
|
|
def __init__(self, request):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def transform_first_chunk(self, status_code, headers, chunk, finishing):
|
2016-09-30 22:40:12 +00:00
|
|
|
# type: (int, httputil.HTTPHeaders, bytes, bool) -> typing.Tuple[int, httputil.HTTPHeaders, bytes]
|
2014-06-11 08:34:28 +00:00
|
|
|
return status_code, headers, chunk
|
|
|
|
|
|
|
|
def transform_chunk(self, chunk, finishing):
|
|
|
|
return chunk
|
|
|
|
|
|
|
|
|
|
|
|
class GZipContentEncoding(OutputTransform):
|
|
|
|
"""Applies the gzip content encoding to the response.
|
|
|
|
|
|
|
|
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
|
2014-06-17 04:54:00 +00:00
|
|
|
|
|
|
|
.. versionchanged:: 4.0
|
|
|
|
Now compresses all mime types beginning with ``text/``, instead
|
|
|
|
of just a whitelist. (the whitelist is still used for certain
|
|
|
|
non-text mime types).
|
2014-06-11 08:34:28 +00:00
|
|
|
"""
|
2014-06-17 04:54:00 +00:00
|
|
|
# Whitelist of compressible mime types (in addition to any types
|
|
|
|
# beginning with "text/").
|
|
|
|
CONTENT_TYPES = set(["application/javascript", "application/x-javascript",
|
|
|
|
"application/xml", "application/atom+xml",
|
2016-09-30 22:40:12 +00:00
|
|
|
"application/json", "application/xhtml+xml",
|
|
|
|
"image/svg+xml"])
|
2015-08-21 18:08:34 +00:00
|
|
|
# Python's GzipFile defaults to level 9, while most other gzip
|
|
|
|
# tools (including gzip itself) default to 6, which is probably a
|
|
|
|
# better CPU/size tradeoff.
|
|
|
|
GZIP_LEVEL = 6
|
|
|
|
# Responses that are too short are unlikely to benefit from gzipping
|
|
|
|
# after considering the "Content-Encoding: gzip" header and the header
|
|
|
|
# inside the gzip encoding.
|
|
|
|
# Note that responses written in multiple chunks will be compressed
|
|
|
|
# regardless of size.
|
|
|
|
MIN_LENGTH = 1024
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def __init__(self, request):
|
2014-06-17 04:54:00 +00:00
|
|
|
self._gzipping = "gzip" in request.headers.get("Accept-Encoding", "")
|
|
|
|
|
|
|
|
def _compressible_type(self, ctype):
|
|
|
|
return ctype.startswith('text/') or ctype in self.CONTENT_TYPES
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
def transform_first_chunk(self, status_code, headers, chunk, finishing):
|
2016-09-30 22:40:12 +00:00
|
|
|
# type: (int, httputil.HTTPHeaders, bytes, bool) -> typing.Tuple[int, httputil.HTTPHeaders, bytes]
|
|
|
|
# TODO: can/should this type be inherited from the superclass?
|
2014-06-11 08:34:28 +00:00
|
|
|
if 'Vary' in headers:
|
2016-09-30 22:40:12 +00:00
|
|
|
headers['Vary'] += ', Accept-Encoding'
|
2014-06-11 08:34:28 +00:00
|
|
|
else:
|
2016-09-30 22:40:12 +00:00
|
|
|
headers['Vary'] = 'Accept-Encoding'
|
2014-06-11 08:34:28 +00:00
|
|
|
if self._gzipping:
|
|
|
|
ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
|
2014-06-17 04:54:00 +00:00
|
|
|
self._gzipping = self._compressible_type(ctype) and \
|
2014-06-11 08:34:28 +00:00
|
|
|
(not finishing or len(chunk) >= self.MIN_LENGTH) and \
|
|
|
|
("Content-Encoding" not in headers)
|
|
|
|
if self._gzipping:
|
|
|
|
headers["Content-Encoding"] = "gzip"
|
|
|
|
self._gzip_value = BytesIO()
|
2015-08-21 18:08:34 +00:00
|
|
|
self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value,
|
|
|
|
compresslevel=self.GZIP_LEVEL)
|
2014-06-11 08:34:28 +00:00
|
|
|
chunk = self.transform_chunk(chunk, finishing)
|
|
|
|
if "Content-Length" in headers:
|
2014-10-14 04:24:01 +00:00
|
|
|
# The original content length is no longer correct.
|
|
|
|
# If this is the last (and only) chunk, we can set the new
|
|
|
|
# content-length; otherwise we remove it and fall back to
|
|
|
|
# chunked encoding.
|
|
|
|
if finishing:
|
|
|
|
headers["Content-Length"] = str(len(chunk))
|
|
|
|
else:
|
|
|
|
del headers["Content-Length"]
|
2014-06-11 08:34:28 +00:00
|
|
|
return status_code, headers, chunk
|
|
|
|
|
|
|
|
def transform_chunk(self, chunk, finishing):
|
|
|
|
if self._gzipping:
|
|
|
|
self._gzip_file.write(chunk)
|
|
|
|
if finishing:
|
|
|
|
self._gzip_file.close()
|
|
|
|
else:
|
|
|
|
self._gzip_file.flush()
|
|
|
|
chunk = self._gzip_value.getvalue()
|
|
|
|
self._gzip_value.truncate(0)
|
|
|
|
self._gzip_value.seek(0)
|
|
|
|
return chunk
|
|
|
|
|
|
|
|
|
|
|
|
def authenticated(method):
|
|
|
|
"""Decorate methods with this to require that the user be logged in.
|
|
|
|
|
|
|
|
If the user is not logged in, they will be redirected to the configured
|
|
|
|
`login url <RequestHandler.get_login_url>`.
|
2014-06-17 04:54:00 +00:00
|
|
|
|
|
|
|
If you configure a login url with a query parameter, Tornado will
|
|
|
|
assume you know what you're doing and use it as-is. If not, it
|
|
|
|
will add a `next` parameter so the login page knows where to send
|
|
|
|
you once you're logged in.
|
2014-06-11 08:34:28 +00:00
|
|
|
"""
|
|
|
|
@functools.wraps(method)
|
|
|
|
def wrapper(self, *args, **kwargs):
|
|
|
|
if not self.current_user:
|
|
|
|
if self.request.method in ("GET", "HEAD"):
|
|
|
|
url = self.get_login_url()
|
|
|
|
if "?" not in url:
|
|
|
|
if urlparse.urlsplit(url).scheme:
|
|
|
|
# if login url is absolute, make next absolute too
|
|
|
|
next_url = self.request.full_url()
|
|
|
|
else:
|
|
|
|
next_url = self.request.uri
|
|
|
|
url += "?" + urlencode(dict(next=next_url))
|
2014-06-29 10:05:33 +00:00
|
|
|
self.redirect(url)
|
2014-06-11 08:34:28 +00:00
|
|
|
return
|
|
|
|
raise HTTPError(403)
|
|
|
|
return method(self, *args, **kwargs)
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
|
|
|
class UIModule(object):
|
|
|
|
"""A re-usable, modular UI unit on a page.
|
|
|
|
|
|
|
|
UI modules often execute additional queries, and they can include
|
|
|
|
additional CSS and JavaScript that will be included in the output
|
|
|
|
page, which is automatically inserted on page render.
|
2015-04-27 19:06:19 +00:00
|
|
|
|
|
|
|
Subclasses of UIModule must override the `render` method.
|
2014-06-11 08:34:28 +00:00
|
|
|
"""
|
|
|
|
def __init__(self, handler):
|
|
|
|
self.handler = handler
|
|
|
|
self.request = handler.request
|
|
|
|
self.ui = handler.ui
|
|
|
|
self.locale = handler.locale
|
|
|
|
|
|
|
|
@property
|
|
|
|
def current_user(self):
|
|
|
|
return self.handler.current_user
|
|
|
|
|
|
|
|
def render(self, *args, **kwargs):
|
2015-04-27 19:06:19 +00:00
|
|
|
"""Override in subclasses to return this module's output."""
|
2014-06-11 08:34:28 +00:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
def embedded_javascript(self):
|
2015-04-27 19:06:19 +00:00
|
|
|
"""Override to return a JavaScript string
|
|
|
|
to be embedded in the page."""
|
2014-06-11 08:34:28 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
def javascript_files(self):
|
2015-04-27 19:06:19 +00:00
|
|
|
"""Override to return a list of JavaScript files needed by this module.
|
|
|
|
|
|
|
|
If the return values are relative paths, they will be passed to
|
|
|
|
`RequestHandler.static_url`; otherwise they will be used as-is.
|
|
|
|
"""
|
2014-06-11 08:34:28 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
def embedded_css(self):
|
2015-04-27 19:06:19 +00:00
|
|
|
"""Override to return a CSS string
|
|
|
|
that will be embedded in the page."""
|
2014-06-11 08:34:28 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
def css_files(self):
|
2015-04-27 19:06:19 +00:00
|
|
|
"""Override to returns a list of CSS files required by this module.
|
|
|
|
|
|
|
|
If the return values are relative paths, they will be passed to
|
|
|
|
`RequestHandler.static_url`; otherwise they will be used as-is.
|
|
|
|
"""
|
2014-06-11 08:34:28 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
def html_head(self):
|
2015-04-27 19:06:19 +00:00
|
|
|
"""Override to return an HTML string that will be put in the <head/>
|
|
|
|
element.
|
|
|
|
"""
|
2014-06-11 08:34:28 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
def html_body(self):
|
2015-04-27 19:06:19 +00:00
|
|
|
"""Override to return an HTML string that will be put at the end of
|
|
|
|
the <body/> element.
|
|
|
|
"""
|
2014-06-11 08:34:28 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
def render_string(self, path, **kwargs):
|
|
|
|
"""Renders a template and returns it as a string."""
|
|
|
|
return self.handler.render_string(path, **kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
class _linkify(UIModule):
|
|
|
|
def render(self, text, **kwargs):
|
|
|
|
return escape.linkify(text, **kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
class _xsrf_form_html(UIModule):
|
|
|
|
def render(self):
|
|
|
|
return self.handler.xsrf_form_html()
|
|
|
|
|
|
|
|
|
|
|
|
class TemplateModule(UIModule):
|
|
|
|
"""UIModule that simply renders the given template.
|
|
|
|
|
|
|
|
{% module Template("foo.html") %} is similar to {% include "foo.html" %},
|
|
|
|
but the module version gets its own namespace (with kwargs passed to
|
|
|
|
Template()) instead of inheriting the outer template's namespace.
|
|
|
|
|
|
|
|
Templates rendered through this module also get access to UIModule's
|
|
|
|
automatic javascript/css features. Simply call set_resources
|
|
|
|
inside the template and give it keyword arguments corresponding to
|
|
|
|
the methods on UIModule: {{ set_resources(js_files=static_url("my.js")) }}
|
|
|
|
Note that these resources are output once per template file, not once
|
|
|
|
per instantiation of the template, so they must not depend on
|
|
|
|
any arguments to the template.
|
|
|
|
"""
|
|
|
|
def __init__(self, handler):
|
|
|
|
super(TemplateModule, self).__init__(handler)
|
|
|
|
# keep resources in both a list and a dict to preserve order
|
|
|
|
self._resource_list = []
|
|
|
|
self._resource_dict = {}
|
|
|
|
|
|
|
|
def render(self, path, **kwargs):
|
|
|
|
def set_resources(**kwargs):
|
|
|
|
if path not in self._resource_dict:
|
|
|
|
self._resource_list.append(kwargs)
|
|
|
|
self._resource_dict[path] = kwargs
|
|
|
|
else:
|
|
|
|
if self._resource_dict[path] != kwargs:
|
|
|
|
raise ValueError("set_resources called with different "
|
|
|
|
"resources for the same template")
|
|
|
|
return ""
|
|
|
|
return self.render_string(path, set_resources=set_resources,
|
|
|
|
**kwargs)
|
|
|
|
|
|
|
|
def _get_resources(self, key):
|
|
|
|
return (r[key] for r in self._resource_list if key in r)
|
|
|
|
|
|
|
|
def embedded_javascript(self):
|
|
|
|
return "\n".join(self._get_resources("embedded_javascript"))
|
|
|
|
|
|
|
|
def javascript_files(self):
|
|
|
|
result = []
|
|
|
|
for f in self._get_resources("javascript_files"):
|
2014-10-14 04:24:01 +00:00
|
|
|
if isinstance(f, (unicode_type, bytes)):
|
2014-06-11 08:34:28 +00:00
|
|
|
result.append(f)
|
|
|
|
else:
|
|
|
|
result.extend(f)
|
|
|
|
return result
|
|
|
|
|
|
|
|
def embedded_css(self):
|
|
|
|
return "\n".join(self._get_resources("embedded_css"))
|
|
|
|
|
|
|
|
def css_files(self):
|
|
|
|
result = []
|
|
|
|
for f in self._get_resources("css_files"):
|
2014-10-14 04:24:01 +00:00
|
|
|
if isinstance(f, (unicode_type, bytes)):
|
2014-06-11 08:34:28 +00:00
|
|
|
result.append(f)
|
|
|
|
else:
|
|
|
|
result.extend(f)
|
|
|
|
return result
|
|
|
|
|
|
|
|
def html_head(self):
|
|
|
|
return "".join(self._get_resources("html_head"))
|
|
|
|
|
|
|
|
def html_body(self):
|
|
|
|
return "".join(self._get_resources("html_body"))
|
|
|
|
|
|
|
|
|
|
|
|
class _UIModuleNamespace(object):
|
|
|
|
"""Lazy namespace which creates UIModule proxies bound to a handler."""
|
|
|
|
def __init__(self, handler, ui_modules):
|
|
|
|
self.handler = handler
|
|
|
|
self.ui_modules = ui_modules
|
|
|
|
|
|
|
|
def __getitem__(self, key):
|
|
|
|
return self.handler._ui_module(key, self.ui_modules[key])
|
|
|
|
|
|
|
|
def __getattr__(self, key):
|
|
|
|
try:
|
|
|
|
return self[key]
|
|
|
|
except KeyError as e:
|
|
|
|
raise AttributeError(str(e))
|
|
|
|
|
|
|
|
|
|
|
|
class URLSpec(object):
|
|
|
|
"""Specifies mappings between URLs and handlers."""
|
|
|
|
def __init__(self, pattern, handler, kwargs=None, name=None):
|
|
|
|
"""Parameters:
|
|
|
|
|
2016-09-30 22:40:12 +00:00
|
|
|
* ``pattern``: Regular expression to be matched. Any capturing
|
|
|
|
groups in the regex will be passed in to the handler's
|
|
|
|
get/post/etc methods as arguments (by keyword if named, by
|
|
|
|
position if unnamed. Named and unnamed capturing groups may
|
|
|
|
may not be mixed in the same rule).
|
2014-06-11 08:34:28 +00:00
|
|
|
|
2014-10-14 04:24:01 +00:00
|
|
|
* ``handler``: `RequestHandler` subclass to be invoked.
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
* ``kwargs`` (optional): A dictionary of additional arguments
|
|
|
|
to be passed to the handler's constructor.
|
|
|
|
|
|
|
|
* ``name`` (optional): A name for this handler. Used by
|
|
|
|
`Application.reverse_url`.
|
2016-09-30 22:40:12 +00:00
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
"""
|
|
|
|
if not pattern.endswith('$'):
|
|
|
|
pattern += '$'
|
|
|
|
self.regex = re.compile(pattern)
|
|
|
|
assert len(self.regex.groupindex) in (0, self.regex.groups), \
|
|
|
|
("groups in url regexes must either be all named or all "
|
|
|
|
"positional: %r" % self.regex.pattern)
|
|
|
|
|
|
|
|
if isinstance(handler, str):
|
|
|
|
# import the Module and instantiate the class
|
|
|
|
# Must be a fully qualified name (module.ClassName)
|
|
|
|
handler = import_object(handler)
|
|
|
|
|
|
|
|
self.handler_class = handler
|
|
|
|
self.kwargs = kwargs or {}
|
|
|
|
self.name = name
|
|
|
|
self._path, self._group_count = self._find_groups()
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return '%s(%r, %s, kwargs=%r, name=%r)' % \
|
|
|
|
(self.__class__.__name__, self.regex.pattern,
|
|
|
|
self.handler_class, self.kwargs, self.name)
|
|
|
|
|
|
|
|
def _find_groups(self):
|
|
|
|
"""Returns a tuple (reverse string, group count) for a url.
|
|
|
|
|
|
|
|
For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method
|
|
|
|
would return ('/%s/%s/', 2).
|
|
|
|
"""
|
|
|
|
pattern = self.regex.pattern
|
|
|
|
if pattern.startswith('^'):
|
|
|
|
pattern = pattern[1:]
|
|
|
|
if pattern.endswith('$'):
|
|
|
|
pattern = pattern[:-1]
|
|
|
|
|
|
|
|
if self.regex.groups != pattern.count('('):
|
|
|
|
# The pattern is too complicated for our simplistic matching,
|
|
|
|
# so we can't support reversing it.
|
|
|
|
return (None, None)
|
|
|
|
|
|
|
|
pieces = []
|
|
|
|
for fragment in pattern.split('('):
|
|
|
|
if ')' in fragment:
|
|
|
|
paren_loc = fragment.index(')')
|
|
|
|
if paren_loc >= 0:
|
|
|
|
pieces.append('%s' + fragment[paren_loc + 1:])
|
|
|
|
else:
|
2016-09-30 22:40:12 +00:00
|
|
|
try:
|
|
|
|
unescaped_fragment = re_unescape(fragment)
|
|
|
|
except ValueError as exc:
|
|
|
|
# If we can't unescape part of it, we can't
|
|
|
|
# reverse this url.
|
|
|
|
return (None, None)
|
|
|
|
pieces.append(unescaped_fragment)
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
return (''.join(pieces), self.regex.groups)
|
|
|
|
|
|
|
|
def reverse(self, *args):
|
2016-09-30 22:40:12 +00:00
|
|
|
if self._path is None:
|
|
|
|
raise ValueError("Cannot reverse url regex " + self.regex.pattern)
|
2014-06-11 08:34:28 +00:00
|
|
|
assert len(args) == self._group_count, "required number of arguments "\
|
|
|
|
"not found"
|
|
|
|
if not len(args):
|
|
|
|
return self._path
|
|
|
|
converted_args = []
|
|
|
|
for a in args:
|
2014-10-14 04:24:01 +00:00
|
|
|
if not isinstance(a, (unicode_type, bytes)):
|
2014-06-11 08:34:28 +00:00
|
|
|
a = str(a)
|
|
|
|
converted_args.append(escape.url_escape(utf8(a), plus=False))
|
|
|
|
return self._path % tuple(converted_args)
|
|
|
|
|
|
|
|
url = URLSpec
|
|
|
|
|
|
|
|
|
|
|
|
if hasattr(hmac, 'compare_digest'): # python 3.3
|
|
|
|
_time_independent_equals = hmac.compare_digest
|
|
|
|
else:
|
|
|
|
def _time_independent_equals(a, b):
|
|
|
|
if len(a) != len(b):
|
|
|
|
return False
|
|
|
|
result = 0
|
|
|
|
if isinstance(a[0], int): # python3 byte strings
|
|
|
|
for x, y in zip(a, b):
|
|
|
|
result |= x ^ y
|
|
|
|
else: # python2
|
|
|
|
for x, y in zip(a, b):
|
|
|
|
result |= ord(x) ^ ord(y)
|
|
|
|
return result == 0
|
|
|
|
|
|
|
|
|
2015-04-27 19:06:19 +00:00
|
|
|
def create_signed_value(secret, name, value, version=None, clock=None,
|
|
|
|
key_version=None):
|
2014-06-11 08:34:28 +00:00
|
|
|
if version is None:
|
|
|
|
version = DEFAULT_SIGNED_VALUE_VERSION
|
|
|
|
if clock is None:
|
|
|
|
clock = time.time
|
2015-04-27 19:06:19 +00:00
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
timestamp = utf8(str(int(clock())))
|
|
|
|
value = base64.b64encode(utf8(value))
|
|
|
|
if version == 1:
|
|
|
|
signature = _create_signature_v1(secret, name, value, timestamp)
|
|
|
|
value = b"|".join([value, timestamp, signature])
|
|
|
|
return value
|
|
|
|
elif version == 2:
|
|
|
|
# The v2 format consists of a version number and a series of
|
|
|
|
# length-prefixed fields "%d:%s", the last of which is a
|
|
|
|
# signature, all separated by pipes. All numbers are in
|
|
|
|
# decimal format with no leading zeros. The signature is an
|
|
|
|
# HMAC-SHA256 of the whole string up to that point, including
|
|
|
|
# the final pipe.
|
|
|
|
#
|
|
|
|
# The fields are:
|
|
|
|
# - format version (i.e. 2; no length prefix)
|
2015-04-27 19:06:19 +00:00
|
|
|
# - key version (integer, default is 0)
|
2014-06-11 08:34:28 +00:00
|
|
|
# - timestamp (integer seconds since epoch)
|
|
|
|
# - name (not encoded; assumed to be ~alphanumeric)
|
|
|
|
# - value (base64-encoded)
|
|
|
|
# - signature (hex-encoded; no length prefix)
|
|
|
|
def format_field(s):
|
|
|
|
return utf8("%d:" % len(s)) + utf8(s)
|
|
|
|
to_sign = b"|".join([
|
2015-04-27 19:06:19 +00:00
|
|
|
b"2",
|
|
|
|
format_field(str(key_version or 0)),
|
2014-06-11 08:34:28 +00:00
|
|
|
format_field(timestamp),
|
|
|
|
format_field(name),
|
|
|
|
format_field(value),
|
|
|
|
b''])
|
2015-04-27 19:06:19 +00:00
|
|
|
|
|
|
|
if isinstance(secret, dict):
|
|
|
|
assert key_version is not None, 'Key version must be set when sign key dict is used'
|
|
|
|
assert version >= 2, 'Version must be at least 2 for key version support'
|
|
|
|
secret = secret[key_version]
|
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
signature = _create_signature_v2(secret, to_sign)
|
|
|
|
return to_sign + signature
|
|
|
|
else:
|
|
|
|
raise ValueError("Unsupported version %d" % version)
|
|
|
|
|
2015-04-27 19:06:19 +00:00
|
|
|
# A leading version number in decimal
|
|
|
|
# with no leading zeros, followed by a pipe.
|
2014-06-11 08:34:28 +00:00
|
|
|
_signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$")
|
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
|
2015-04-27 19:06:19 +00:00
|
|
|
def _get_version(value):
|
|
|
|
# Figures out what version value is. Version 1 did not include an
|
2014-06-11 08:34:28 +00:00
|
|
|
# explicit version field and started with arbitrary base64 data,
|
|
|
|
# which makes this tricky.
|
|
|
|
m = _signed_value_version_re.match(value)
|
|
|
|
if m is None:
|
|
|
|
version = 1
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
version = int(m.group(1))
|
|
|
|
if version > 999:
|
|
|
|
# Certain payloads from the version-less v1 format may
|
|
|
|
# be parsed as valid integers. Due to base64 padding
|
|
|
|
# restrictions, this can only happen for numbers whose
|
|
|
|
# length is a multiple of 4, so we can treat all
|
|
|
|
# numbers up to 999 as versions, and for the rest we
|
|
|
|
# fall back to v1 format.
|
|
|
|
version = 1
|
|
|
|
except ValueError:
|
|
|
|
version = 1
|
2015-04-27 19:06:19 +00:00
|
|
|
return version
|
|
|
|
|
|
|
|
|
|
|
|
def decode_signed_value(secret, name, value, max_age_days=31,
|
|
|
|
clock=None, min_version=None):
|
|
|
|
if clock is None:
|
|
|
|
clock = time.time
|
|
|
|
if min_version is None:
|
|
|
|
min_version = DEFAULT_SIGNED_VALUE_MIN_VERSION
|
|
|
|
if min_version > 2:
|
|
|
|
raise ValueError("Unsupported min_version %d" % min_version)
|
|
|
|
if not value:
|
|
|
|
return None
|
|
|
|
|
|
|
|
value = utf8(value)
|
|
|
|
version = _get_version(value)
|
2014-06-11 08:34:28 +00:00
|
|
|
|
|
|
|
if version < min_version:
|
|
|
|
return None
|
|
|
|
if version == 1:
|
2015-04-27 19:06:19 +00:00
|
|
|
return _decode_signed_value_v1(secret, name, value,
|
|
|
|
max_age_days, clock)
|
2014-06-11 08:34:28 +00:00
|
|
|
elif version == 2:
|
2015-04-27 19:06:19 +00:00
|
|
|
return _decode_signed_value_v2(secret, name, value,
|
|
|
|
max_age_days, clock)
|
2014-06-11 08:34:28 +00:00
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
def _decode_signed_value_v1(secret, name, value, max_age_days, clock):
|
|
|
|
parts = utf8(value).split(b"|")
|
|
|
|
if len(parts) != 3:
|
|
|
|
return None
|
|
|
|
signature = _create_signature_v1(secret, name, parts[0], parts[1])
|
|
|
|
if not _time_independent_equals(parts[2], signature):
|
|
|
|
gen_log.warning("Invalid cookie signature %r", value)
|
|
|
|
return None
|
|
|
|
timestamp = int(parts[1])
|
|
|
|
if timestamp < clock() - max_age_days * 86400:
|
|
|
|
gen_log.warning("Expired cookie %r", value)
|
|
|
|
return None
|
|
|
|
if timestamp > clock() + 31 * 86400:
|
|
|
|
# _cookie_signature does not hash a delimiter between the
|
|
|
|
# parts of the cookie, so an attacker could transfer trailing
|
|
|
|
# digits from the payload to the timestamp without altering the
|
|
|
|
# signature. For backwards compatibility, sanity-check timestamp
|
|
|
|
# here instead of modifying _cookie_signature.
|
2015-04-27 19:06:19 +00:00
|
|
|
gen_log.warning("Cookie timestamp in future; possible tampering %r",
|
|
|
|
value)
|
2014-06-11 08:34:28 +00:00
|
|
|
return None
|
|
|
|
if parts[1].startswith(b"0"):
|
|
|
|
gen_log.warning("Tampered cookie %r", value)
|
|
|
|
return None
|
|
|
|
try:
|
|
|
|
return base64.b64decode(parts[0])
|
|
|
|
except Exception:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2015-04-27 19:06:19 +00:00
|
|
|
def _decode_fields_v2(value):
|
2014-06-11 08:34:28 +00:00
|
|
|
def _consume_field(s):
|
|
|
|
length, _, rest = s.partition(b':')
|
|
|
|
n = int(length)
|
|
|
|
field_value = rest[:n]
|
|
|
|
# In python 3, indexing bytes returns small integers; we must
|
|
|
|
# use a slice to get a byte string as in python 2.
|
2014-06-17 04:54:00 +00:00
|
|
|
if rest[n:n + 1] != b'|':
|
2014-06-11 08:34:28 +00:00
|
|
|
raise ValueError("malformed v2 signed value field")
|
2014-06-17 04:54:00 +00:00
|
|
|
rest = rest[n + 1:]
|
2014-06-11 08:34:28 +00:00
|
|
|
return field_value, rest
|
2015-04-27 19:06:19 +00:00
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
rest = value[2:] # remove version number
|
2015-04-27 19:06:19 +00:00
|
|
|
key_version, rest = _consume_field(rest)
|
|
|
|
timestamp, rest = _consume_field(rest)
|
|
|
|
name_field, rest = _consume_field(rest)
|
|
|
|
value_field, passed_sig = _consume_field(rest)
|
|
|
|
return int(key_version), timestamp, name_field, value_field, passed_sig
|
|
|
|
|
|
|
|
|
|
|
|
def _decode_signed_value_v2(secret, name, value, max_age_days, clock):
|
2014-06-11 08:34:28 +00:00
|
|
|
try:
|
2015-04-27 19:06:19 +00:00
|
|
|
key_version, timestamp, name_field, value_field, passed_sig = _decode_fields_v2(value)
|
2014-06-11 08:34:28 +00:00
|
|
|
except ValueError:
|
|
|
|
return None
|
|
|
|
signed_string = value[:-len(passed_sig)]
|
2015-04-27 19:06:19 +00:00
|
|
|
|
|
|
|
if isinstance(secret, dict):
|
|
|
|
try:
|
|
|
|
secret = secret[key_version]
|
|
|
|
except KeyError:
|
|
|
|
return None
|
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
expected_sig = _create_signature_v2(secret, signed_string)
|
|
|
|
if not _time_independent_equals(passed_sig, expected_sig):
|
|
|
|
return None
|
|
|
|
if name_field != utf8(name):
|
|
|
|
return None
|
|
|
|
timestamp = int(timestamp)
|
|
|
|
if timestamp < clock() - max_age_days * 86400:
|
|
|
|
# The signature has expired.
|
|
|
|
return None
|
|
|
|
try:
|
|
|
|
return base64.b64decode(value_field)
|
|
|
|
except Exception:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2015-04-27 19:06:19 +00:00
|
|
|
def get_signature_key_version(value):
|
|
|
|
value = utf8(value)
|
|
|
|
version = _get_version(value)
|
|
|
|
if version < 2:
|
|
|
|
return None
|
|
|
|
try:
|
|
|
|
key_version, _, _, _, _ = _decode_fields_v2(value)
|
|
|
|
except ValueError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return key_version
|
|
|
|
|
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
def _create_signature_v1(secret, *parts):
|
|
|
|
hash = hmac.new(utf8(secret), digestmod=hashlib.sha1)
|
|
|
|
for part in parts:
|
|
|
|
hash.update(utf8(part))
|
|
|
|
return utf8(hash.hexdigest())
|
|
|
|
|
2014-06-17 04:54:00 +00:00
|
|
|
|
2014-06-11 08:34:28 +00:00
|
|
|
def _create_signature_v2(secret, s):
|
|
|
|
hash = hmac.new(utf8(secret), digestmod=hashlib.sha256)
|
|
|
|
hash.update(utf8(s))
|
|
|
|
return utf8(hash.hexdigest())
|
2014-06-17 04:54:00 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _unquote_or_none(s):
|
2016-09-30 22:40:12 +00:00
|
|
|
"""None-safe wrapper around url_unescape to handle unmatched optional
|
2014-06-17 04:54:00 +00:00
|
|
|
groups correctly.
|
|
|
|
|
|
|
|
Note that args are passed as bytes so the handler can decide what
|
|
|
|
encoding to use.
|
|
|
|
"""
|
|
|
|
if s is None:
|
|
|
|
return s
|
|
|
|
return escape.url_unescape(s, encoding=None, plus=False)
|