mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-01 00:43:37 +00:00
Update Tornado Web Server 4.5.dev1 (92f29b8) to 4.5.dev1 (38e493e).
This commit is contained in:
parent
204b4ad425
commit
2cb0fe9693
15 changed files with 995 additions and 328 deletions
|
@ -23,6 +23,7 @@
|
||||||
* Update SimpleJSON library 3.8.1 (6022794) to 3.10.0 (c52efea)
|
* Update SimpleJSON library 3.8.1 (6022794) to 3.10.0 (c52efea)
|
||||||
* Update Six compatibility library 1.10.0 (r405) to 1.10.0 (r433)
|
* Update Six compatibility library 1.10.0 (r405) to 1.10.0 (r433)
|
||||||
* Update socks from SocksiPy 1.0 to PySocks 1.6.5 (b4323df)
|
* Update socks from SocksiPy 1.0 to PySocks 1.6.5 (b4323df)
|
||||||
|
* Update Tornado Web Server 4.5.dev1 (92f29b8) to 4.5.dev1 (38e493e)
|
||||||
|
|
||||||
|
|
||||||
[develop changelog]
|
[develop changelog]
|
||||||
|
|
|
@ -31,7 +31,7 @@ import sys
|
||||||
|
|
||||||
from tornado.log import app_log
|
from tornado.log import app_log
|
||||||
from tornado.stack_context import ExceptionStackContext, wrap
|
from tornado.stack_context import ExceptionStackContext, wrap
|
||||||
from tornado.util import raise_exc_info, ArgReplacer
|
from tornado.util import raise_exc_info, ArgReplacer, is_finalizing
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from concurrent import futures
|
from concurrent import futures
|
||||||
|
@ -123,8 +123,8 @@ class _TracebackLogger(object):
|
||||||
self.exc_info = None
|
self.exc_info = None
|
||||||
self.formatted_tb = None
|
self.formatted_tb = None
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self, is_finalizing=is_finalizing):
|
||||||
if self.formatted_tb:
|
if not is_finalizing() and self.formatted_tb:
|
||||||
app_log.error('Future exception was never retrieved: %s',
|
app_log.error('Future exception was never retrieved: %s',
|
||||||
''.join(self.formatted_tb).rstrip())
|
''.join(self.formatted_tb).rstrip())
|
||||||
|
|
||||||
|
@ -329,8 +329,8 @@ class Future(object):
|
||||||
# cycle are never destroyed. It's no longer the case on Python 3.4 thanks to
|
# cycle are never destroyed. It's no longer the case on Python 3.4 thanks to
|
||||||
# the PEP 442.
|
# the PEP 442.
|
||||||
if _GC_CYCLE_FINALIZERS:
|
if _GC_CYCLE_FINALIZERS:
|
||||||
def __del__(self):
|
def __del__(self, is_finalizing=is_finalizing):
|
||||||
if not self._log_traceback:
|
if is_finalizing() or not self._log_traceback:
|
||||||
# set_exception() was not called, or result() or exception()
|
# set_exception() was not called, or result() or exception()
|
||||||
# has consumed the exception
|
# has consumed the exception
|
||||||
return
|
return
|
||||||
|
|
|
@ -273,10 +273,11 @@ def _make_coroutine_wrapper(func, replace_callback):
|
||||||
"""
|
"""
|
||||||
# On Python 3.5, set the coroutine flag on our generator, to allow it
|
# On Python 3.5, set the coroutine flag on our generator, to allow it
|
||||||
# to be used with 'await'.
|
# to be used with 'await'.
|
||||||
|
wrapped = func
|
||||||
if hasattr(types, 'coroutine'):
|
if hasattr(types, 'coroutine'):
|
||||||
func = types.coroutine(func)
|
func = types.coroutine(func)
|
||||||
|
|
||||||
@functools.wraps(func)
|
@functools.wraps(wrapped)
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
future = TracebackFuture()
|
future = TracebackFuture()
|
||||||
|
|
||||||
|
@ -328,9 +329,19 @@ def _make_coroutine_wrapper(func, replace_callback):
|
||||||
future = None
|
future = None
|
||||||
future.set_result(result)
|
future.set_result(result)
|
||||||
return future
|
return future
|
||||||
|
|
||||||
|
wrapper.__wrapped__ = wrapped
|
||||||
|
wrapper.__tornado_coroutine__ = True
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def is_coroutine_function(func):
|
||||||
|
"""Return whether *func* is a coroutine function, i.e. a function
|
||||||
|
wrapped with `~.gen.coroutine`.
|
||||||
|
"""
|
||||||
|
return getattr(func, '__tornado_coroutine__', False)
|
||||||
|
|
||||||
|
|
||||||
class Return(Exception):
|
class Return(Exception):
|
||||||
"""Special exception to return a value from a `coroutine`.
|
"""Special exception to return a value from a `coroutine`.
|
||||||
|
|
||||||
|
|
|
@ -341,13 +341,15 @@ class HTTPRequest(object):
|
||||||
Allowed values are implementation-defined; ``curl_httpclient``
|
Allowed values are implementation-defined; ``curl_httpclient``
|
||||||
supports "basic" and "digest"; ``simple_httpclient`` only supports
|
supports "basic" and "digest"; ``simple_httpclient`` only supports
|
||||||
"basic"
|
"basic"
|
||||||
:arg float connect_timeout: Timeout for initial connection in seconds
|
:arg float connect_timeout: Timeout for initial connection in seconds,
|
||||||
:arg float request_timeout: Timeout for entire request in seconds
|
default 20 seconds
|
||||||
|
:arg float request_timeout: Timeout for entire request in seconds,
|
||||||
|
default 20 seconds
|
||||||
:arg if_modified_since: Timestamp for ``If-Modified-Since`` header
|
:arg if_modified_since: Timestamp for ``If-Modified-Since`` header
|
||||||
:type if_modified_since: `datetime` or `float`
|
:type if_modified_since: `datetime` or `float`
|
||||||
:arg bool follow_redirects: Should redirects be followed automatically
|
:arg bool follow_redirects: Should redirects be followed automatically
|
||||||
or return the 3xx response?
|
or return the 3xx response? Default True.
|
||||||
:arg int max_redirects: Limit for ``follow_redirects``
|
:arg int max_redirects: Limit for ``follow_redirects``, default 5.
|
||||||
:arg string user_agent: String to send as ``User-Agent`` header
|
:arg string user_agent: String to send as ``User-Agent`` header
|
||||||
:arg bool decompress_response: Request a compressed response from
|
:arg bool decompress_response: Request a compressed response from
|
||||||
the server and decompress it after downloading. Default is True.
|
the server and decompress it after downloading. Default is True.
|
||||||
|
@ -381,9 +383,9 @@ class HTTPRequest(object):
|
||||||
:arg string proxy_auth_mode: HTTP proxy Authentication mode;
|
:arg string proxy_auth_mode: HTTP proxy Authentication mode;
|
||||||
default is "basic". supports "basic" and "digest"
|
default is "basic". supports "basic" and "digest"
|
||||||
:arg bool allow_nonstandard_methods: Allow unknown values for ``method``
|
:arg bool allow_nonstandard_methods: Allow unknown values for ``method``
|
||||||
argument?
|
argument? Default is False.
|
||||||
:arg bool validate_cert: For HTTPS requests, validate the server's
|
:arg bool validate_cert: For HTTPS requests, validate the server's
|
||||||
certificate?
|
certificate? Default is True.
|
||||||
:arg string ca_certs: filename of CA certificates in PEM format,
|
:arg string ca_certs: filename of CA certificates in PEM format,
|
||||||
or None to use defaults. See note below when used with
|
or None to use defaults. See note below when used with
|
||||||
``curl_httpclient``.
|
``curl_httpclient``.
|
||||||
|
|
|
@ -179,12 +179,45 @@ class HTTPServer(TCPServer, Configurable,
|
||||||
conn.start_serving(self)
|
conn.start_serving(self)
|
||||||
|
|
||||||
def start_request(self, server_conn, request_conn):
|
def start_request(self, server_conn, request_conn):
|
||||||
return _ServerRequestAdapter(self, server_conn, request_conn)
|
if isinstance(self.request_callback, httputil.HTTPServerConnectionDelegate):
|
||||||
|
delegate = self.request_callback.start_request(server_conn, request_conn)
|
||||||
|
else:
|
||||||
|
delegate = _CallableAdapter(self.request_callback, request_conn)
|
||||||
|
|
||||||
|
if self.xheaders:
|
||||||
|
delegate = _ProxyAdapter(delegate, request_conn)
|
||||||
|
|
||||||
|
return delegate
|
||||||
|
|
||||||
def on_close(self, server_conn):
|
def on_close(self, server_conn):
|
||||||
self._connections.remove(server_conn)
|
self._connections.remove(server_conn)
|
||||||
|
|
||||||
|
|
||||||
|
class _CallableAdapter(httputil.HTTPMessageDelegate):
|
||||||
|
def __init__(self, request_callback, request_conn):
|
||||||
|
self.connection = request_conn
|
||||||
|
self.request_callback = request_callback
|
||||||
|
self.request = None
|
||||||
|
self.delegate = None
|
||||||
|
self._chunks = []
|
||||||
|
|
||||||
|
def headers_received(self, start_line, headers):
|
||||||
|
self.request = httputil.HTTPServerRequest(
|
||||||
|
connection=self.connection, start_line=start_line,
|
||||||
|
headers=headers)
|
||||||
|
|
||||||
|
def data_received(self, chunk):
|
||||||
|
self._chunks.append(chunk)
|
||||||
|
|
||||||
|
def finish(self):
|
||||||
|
self.request.body = b''.join(self._chunks)
|
||||||
|
self.request._parse_body()
|
||||||
|
self.request_callback(self.request)
|
||||||
|
|
||||||
|
def on_connection_close(self):
|
||||||
|
self._chunks = None
|
||||||
|
|
||||||
|
|
||||||
class _HTTPRequestContext(object):
|
class _HTTPRequestContext(object):
|
||||||
def __init__(self, stream, address, protocol):
|
def __init__(self, stream, address, protocol):
|
||||||
self.address = address
|
self.address = address
|
||||||
|
@ -247,58 +280,27 @@ class _HTTPRequestContext(object):
|
||||||
self.protocol = self._orig_protocol
|
self.protocol = self._orig_protocol
|
||||||
|
|
||||||
|
|
||||||
class _ServerRequestAdapter(httputil.HTTPMessageDelegate):
|
class _ProxyAdapter(httputil.HTTPMessageDelegate):
|
||||||
"""Adapts the `HTTPMessageDelegate` interface to the interface expected
|
def __init__(self, delegate, request_conn):
|
||||||
by our clients.
|
|
||||||
"""
|
|
||||||
def __init__(self, server, server_conn, request_conn):
|
|
||||||
self.server = server
|
|
||||||
self.connection = request_conn
|
self.connection = request_conn
|
||||||
self.request = None
|
self.delegate = delegate
|
||||||
if isinstance(server.request_callback,
|
|
||||||
httputil.HTTPServerConnectionDelegate):
|
|
||||||
self.delegate = server.request_callback.start_request(
|
|
||||||
server_conn, request_conn)
|
|
||||||
self._chunks = None
|
|
||||||
else:
|
|
||||||
self.delegate = None
|
|
||||||
self._chunks = []
|
|
||||||
|
|
||||||
def headers_received(self, start_line, headers):
|
def headers_received(self, start_line, headers):
|
||||||
if self.server.xheaders:
|
|
||||||
self.connection.context._apply_xheaders(headers)
|
self.connection.context._apply_xheaders(headers)
|
||||||
if self.delegate is None:
|
|
||||||
self.request = httputil.HTTPServerRequest(
|
|
||||||
connection=self.connection, start_line=start_line,
|
|
||||||
headers=headers)
|
|
||||||
else:
|
|
||||||
return self.delegate.headers_received(start_line, headers)
|
return self.delegate.headers_received(start_line, headers)
|
||||||
|
|
||||||
def data_received(self, chunk):
|
def data_received(self, chunk):
|
||||||
if self.delegate is None:
|
|
||||||
self._chunks.append(chunk)
|
|
||||||
else:
|
|
||||||
return self.delegate.data_received(chunk)
|
return self.delegate.data_received(chunk)
|
||||||
|
|
||||||
def finish(self):
|
def finish(self):
|
||||||
if self.delegate is None:
|
|
||||||
self.request.body = b''.join(self._chunks)
|
|
||||||
self.request._parse_body()
|
|
||||||
self.server.request_callback(self.request)
|
|
||||||
else:
|
|
||||||
self.delegate.finish()
|
self.delegate.finish()
|
||||||
self._cleanup()
|
self._cleanup()
|
||||||
|
|
||||||
def on_connection_close(self):
|
def on_connection_close(self):
|
||||||
if self.delegate is None:
|
|
||||||
self._chunks = None
|
|
||||||
else:
|
|
||||||
self.delegate.on_connection_close()
|
self.delegate.on_connection_close()
|
||||||
self._cleanup()
|
self._cleanup()
|
||||||
|
|
||||||
def _cleanup(self):
|
def _cleanup(self):
|
||||||
if self.server.xheaders:
|
|
||||||
self.connection.context._unapply_xheaders()
|
self.connection.context._unapply_xheaders()
|
||||||
|
|
||||||
|
|
||||||
HTTPRequest = httputil.HTTPServerRequest
|
HTTPRequest = httputil.HTTPServerRequest
|
||||||
|
|
|
@ -337,7 +337,7 @@ class HTTPServerRequest(object):
|
||||||
"""
|
"""
|
||||||
def __init__(self, method=None, uri=None, version="HTTP/1.0", headers=None,
|
def __init__(self, method=None, uri=None, version="HTTP/1.0", headers=None,
|
||||||
body=None, host=None, files=None, connection=None,
|
body=None, host=None, files=None, connection=None,
|
||||||
start_line=None):
|
start_line=None, server_connection=None):
|
||||||
if start_line is not None:
|
if start_line is not None:
|
||||||
method, uri, version = start_line
|
method, uri, version = start_line
|
||||||
self.method = method
|
self.method = method
|
||||||
|
@ -352,8 +352,10 @@ class HTTPServerRequest(object):
|
||||||
self.protocol = getattr(context, 'protocol', "http")
|
self.protocol = getattr(context, 'protocol', "http")
|
||||||
|
|
||||||
self.host = host or self.headers.get("Host") or "127.0.0.1"
|
self.host = host or self.headers.get("Host") or "127.0.0.1"
|
||||||
|
self.host_name = split_host_and_port(self.host.lower())[0]
|
||||||
self.files = files or {}
|
self.files = files or {}
|
||||||
self.connection = connection
|
self.connection = connection
|
||||||
|
self.server_connection = server_connection
|
||||||
self._start_time = time.time()
|
self._start_time = time.time()
|
||||||
self._finish_time = None
|
self._finish_time = None
|
||||||
|
|
||||||
|
@ -379,10 +381,18 @@ class HTTPServerRequest(object):
|
||||||
self._cookies = Cookie.SimpleCookie()
|
self._cookies = Cookie.SimpleCookie()
|
||||||
if "Cookie" in self.headers:
|
if "Cookie" in self.headers:
|
||||||
try:
|
try:
|
||||||
self._cookies.load(
|
parsed = parse_cookie(self.headers["Cookie"])
|
||||||
native_str(self.headers["Cookie"]))
|
|
||||||
except Exception:
|
except Exception:
|
||||||
self._cookies = {}
|
pass
|
||||||
|
else:
|
||||||
|
for k, v in parsed.items():
|
||||||
|
try:
|
||||||
|
self._cookies[k] = v
|
||||||
|
except Exception:
|
||||||
|
# SimpleCookie imposes some restrictions on keys;
|
||||||
|
# parse_cookie does not. Discard any cookies
|
||||||
|
# with disallowed keys.
|
||||||
|
pass
|
||||||
return self._cookies
|
return self._cookies
|
||||||
|
|
||||||
def write(self, chunk, callback=None):
|
def write(self, chunk, callback=None):
|
||||||
|
@ -909,3 +919,82 @@ def split_host_and_port(netloc):
|
||||||
host = netloc
|
host = netloc
|
||||||
port = None
|
port = None
|
||||||
return (host, port)
|
return (host, port)
|
||||||
|
|
||||||
|
_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]")
|
||||||
|
_QuotePatt = re.compile(r"[\\].")
|
||||||
|
_nulljoin = ''.join
|
||||||
|
|
||||||
|
def _unquote_cookie(str):
|
||||||
|
"""Handle double quotes and escaping in cookie values.
|
||||||
|
|
||||||
|
This method is copied verbatim from the Python 3.5 standard
|
||||||
|
library (http.cookies._unquote) so we don't have to depend on
|
||||||
|
non-public interfaces.
|
||||||
|
"""
|
||||||
|
# If there aren't any doublequotes,
|
||||||
|
# then there can't be any special characters. See RFC 2109.
|
||||||
|
if str is None or len(str) < 2:
|
||||||
|
return str
|
||||||
|
if str[0] != '"' or str[-1] != '"':
|
||||||
|
return str
|
||||||
|
|
||||||
|
# We have to assume that we must decode this string.
|
||||||
|
# Down to work.
|
||||||
|
|
||||||
|
# Remove the "s
|
||||||
|
str = str[1:-1]
|
||||||
|
|
||||||
|
# Check for special sequences. Examples:
|
||||||
|
# \012 --> \n
|
||||||
|
# \" --> "
|
||||||
|
#
|
||||||
|
i = 0
|
||||||
|
n = len(str)
|
||||||
|
res = []
|
||||||
|
while 0 <= i < n:
|
||||||
|
o_match = _OctalPatt.search(str, i)
|
||||||
|
q_match = _QuotePatt.search(str, i)
|
||||||
|
if not o_match and not q_match: # Neither matched
|
||||||
|
res.append(str[i:])
|
||||||
|
break
|
||||||
|
# else:
|
||||||
|
j = k = -1
|
||||||
|
if o_match:
|
||||||
|
j = o_match.start(0)
|
||||||
|
if q_match:
|
||||||
|
k = q_match.start(0)
|
||||||
|
if q_match and (not o_match or k < j): # QuotePatt matched
|
||||||
|
res.append(str[i:k])
|
||||||
|
res.append(str[k+1])
|
||||||
|
i = k + 2
|
||||||
|
else: # OctalPatt matched
|
||||||
|
res.append(str[i:j])
|
||||||
|
res.append(chr(int(str[j+1:j+4], 8)))
|
||||||
|
i = j + 4
|
||||||
|
return _nulljoin(res)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_cookie(cookie):
|
||||||
|
"""Parse a ``Cookie`` HTTP header into a dict of name/value pairs.
|
||||||
|
|
||||||
|
This function attempts to mimic browser cookie parsing behavior;
|
||||||
|
it specifically does not follow any of the cookie-related RFCs
|
||||||
|
(because browsers don't either).
|
||||||
|
|
||||||
|
The algorithm used is identical to that used by Django version 1.9.10.
|
||||||
|
|
||||||
|
.. versionadded:: 4.4.2
|
||||||
|
"""
|
||||||
|
cookiedict = {}
|
||||||
|
for chunk in cookie.split(str(';')):
|
||||||
|
if str('=') in chunk:
|
||||||
|
key, val = chunk.split(str('='), 1)
|
||||||
|
else:
|
||||||
|
# Assume an empty name per
|
||||||
|
# https://bugzilla.mozilla.org/show_bug.cgi?id=169091
|
||||||
|
key, val = str(''), chunk
|
||||||
|
key, val = key.strip(), val.strip()
|
||||||
|
if key or val:
|
||||||
|
# unquote using Python's algorithm.
|
||||||
|
cookiedict[key] = _unquote_cookie(val)
|
||||||
|
return cookiedict
|
||||||
|
|
|
@ -28,6 +28,7 @@ In addition to I/O events, the `IOLoop` can also schedule time-based events.
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function, with_statement
|
from __future__ import absolute_import, division, print_function, with_statement
|
||||||
|
|
||||||
|
import collections
|
||||||
import datetime
|
import datetime
|
||||||
import errno
|
import errno
|
||||||
import functools
|
import functools
|
||||||
|
@ -693,8 +694,7 @@ class PollIOLoop(IOLoop):
|
||||||
self.time_func = time_func or time.time
|
self.time_func = time_func or time.time
|
||||||
self._handlers = {}
|
self._handlers = {}
|
||||||
self._events = {}
|
self._events = {}
|
||||||
self._callbacks = []
|
self._callbacks = collections.deque()
|
||||||
self._callback_lock = threading.Lock()
|
|
||||||
self._timeouts = []
|
self._timeouts = []
|
||||||
self._cancellations = 0
|
self._cancellations = 0
|
||||||
self._running = False
|
self._running = False
|
||||||
|
@ -712,7 +712,6 @@ class PollIOLoop(IOLoop):
|
||||||
self.READ)
|
self.READ)
|
||||||
|
|
||||||
def close(self, all_fds=False):
|
def close(self, all_fds=False):
|
||||||
with self._callback_lock:
|
|
||||||
self._closing = True
|
self._closing = True
|
||||||
self.remove_handler(self._waker.fileno())
|
self.remove_handler(self._waker.fileno())
|
||||||
if all_fds:
|
if all_fds:
|
||||||
|
@ -800,9 +799,7 @@ class PollIOLoop(IOLoop):
|
||||||
while True:
|
while True:
|
||||||
# Prevent IO event starvation by delaying new callbacks
|
# Prevent IO event starvation by delaying new callbacks
|
||||||
# to the next iteration of the event loop.
|
# to the next iteration of the event loop.
|
||||||
with self._callback_lock:
|
ncallbacks = len(self._callbacks)
|
||||||
callbacks = self._callbacks
|
|
||||||
self._callbacks = []
|
|
||||||
|
|
||||||
# Add any timeouts that have come due to the callback list.
|
# Add any timeouts that have come due to the callback list.
|
||||||
# Do not run anything until we have determined which ones
|
# Do not run anything until we have determined which ones
|
||||||
|
@ -831,14 +828,14 @@ class PollIOLoop(IOLoop):
|
||||||
if x.callback is not None]
|
if x.callback is not None]
|
||||||
heapq.heapify(self._timeouts)
|
heapq.heapify(self._timeouts)
|
||||||
|
|
||||||
for callback in callbacks:
|
for i in range(ncallbacks):
|
||||||
self._run_callback(callback)
|
self._run_callback(self._callbacks.popleft())
|
||||||
for timeout in due_timeouts:
|
for timeout in due_timeouts:
|
||||||
if timeout.callback is not None:
|
if timeout.callback is not None:
|
||||||
self._run_callback(timeout.callback)
|
self._run_callback(timeout.callback)
|
||||||
# Closures may be holding on to a lot of memory, so allow
|
# Closures may be holding on to a lot of memory, so allow
|
||||||
# them to be freed before we go into our poll wait.
|
# them to be freed before we go into our poll wait.
|
||||||
callbacks = callback = due_timeouts = timeout = None
|
due_timeouts = timeout = None
|
||||||
|
|
||||||
if self._callbacks:
|
if self._callbacks:
|
||||||
# If any callbacks or timeouts called add_callback,
|
# If any callbacks or timeouts called add_callback,
|
||||||
|
@ -934,36 +931,20 @@ class PollIOLoop(IOLoop):
|
||||||
self._cancellations += 1
|
self._cancellations += 1
|
||||||
|
|
||||||
def add_callback(self, callback, *args, **kwargs):
|
def add_callback(self, callback, *args, **kwargs):
|
||||||
if thread.get_ident() != self._thread_ident:
|
|
||||||
# If we're not on the IOLoop's thread, we need to synchronize
|
|
||||||
# with other threads, or waking logic will induce a race.
|
|
||||||
with self._callback_lock:
|
|
||||||
if self._closing:
|
if self._closing:
|
||||||
return
|
return
|
||||||
list_empty = not self._callbacks
|
# Blindly insert into self._callbacks. This is safe even
|
||||||
|
# from signal handlers because deque.append is atomic.
|
||||||
self._callbacks.append(functools.partial(
|
self._callbacks.append(functools.partial(
|
||||||
stack_context.wrap(callback), *args, **kwargs))
|
stack_context.wrap(callback), *args, **kwargs))
|
||||||
if list_empty:
|
if thread.get_ident() != self._thread_ident:
|
||||||
# If we're not in the IOLoop's thread, and we added the
|
# This will write one byte but Waker.consume() reads many
|
||||||
# first callback to an empty list, we may need to wake it
|
# at once, so it's ok to write even when not strictly
|
||||||
# up (it may wake up on its own, but an occasional extra
|
# necessary.
|
||||||
# wake is harmless). Waking up a polling IOLoop is
|
|
||||||
# relatively expensive, so we try to avoid it when we can.
|
|
||||||
self._waker.wake()
|
self._waker.wake()
|
||||||
else:
|
else:
|
||||||
if self._closing:
|
# If we're on the IOLoop's thread, we don't need to wake anyone.
|
||||||
return
|
pass
|
||||||
# If we're on the IOLoop's thread, we don't need the lock,
|
|
||||||
# since we don't need to wake anyone, just add the
|
|
||||||
# callback. Blindly insert into self._callbacks. This is
|
|
||||||
# safe even from signal handlers because the GIL makes
|
|
||||||
# list.append atomic. One subtlety is that if the signal
|
|
||||||
# is interrupting another thread holding the
|
|
||||||
# _callback_lock block in IOLoop.start, we may modify
|
|
||||||
# either the old or new version of self._callbacks, but
|
|
||||||
# either way will work.
|
|
||||||
self._callbacks.append(functools.partial(
|
|
||||||
stack_context.wrap(callback), *args, **kwargs))
|
|
||||||
|
|
||||||
def add_callback_from_signal(self, callback, *args, **kwargs):
|
def add_callback_from_signal(self, callback, *args, **kwargs):
|
||||||
with stack_context.NullContext():
|
with stack_context.NullContext():
|
||||||
|
|
|
@ -96,6 +96,9 @@ else:
|
||||||
# thread now.
|
# thread now.
|
||||||
u'foo'.encode('idna')
|
u'foo'.encode('idna')
|
||||||
|
|
||||||
|
# For undiagnosed reasons, 'latin1' codec may also need to be preloaded.
|
||||||
|
u'foo'.encode('latin1')
|
||||||
|
|
||||||
# These errnos indicate that a non-blocking operation must be retried
|
# These errnos indicate that a non-blocking operation must be retried
|
||||||
# at a later time. On most platforms they're the same value, but on
|
# at a later time. On most platforms they're the same value, but on
|
||||||
# some they differ.
|
# some they differ.
|
||||||
|
|
|
@ -3,8 +3,24 @@ from __future__ import absolute_import, division, print_function, with_statement
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import socket
|
import socket
|
||||||
|
import time
|
||||||
|
|
||||||
from tornado.platform import interface
|
from tornado.platform import interface
|
||||||
|
from tornado.util import errno_from_exception
|
||||||
|
|
||||||
|
def try_close(f):
|
||||||
|
# Avoid issue #875 (race condition when using the file in another
|
||||||
|
# thread).
|
||||||
|
for i in range(10):
|
||||||
|
try:
|
||||||
|
f.close()
|
||||||
|
except IOError:
|
||||||
|
# Yield to another thread
|
||||||
|
time.sleep(1e-3)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
# Try a last time and let raise
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
class Waker(interface.Waker):
|
class Waker(interface.Waker):
|
||||||
|
@ -45,7 +61,7 @@ class Waker(interface.Waker):
|
||||||
break # success
|
break # success
|
||||||
except socket.error as detail:
|
except socket.error as detail:
|
||||||
if (not hasattr(errno, 'WSAEADDRINUSE') or
|
if (not hasattr(errno, 'WSAEADDRINUSE') or
|
||||||
detail[0] != errno.WSAEADDRINUSE):
|
errno_from_exception(detail) != errno.WSAEADDRINUSE):
|
||||||
# "Address already in use" is the only error
|
# "Address already in use" is the only error
|
||||||
# I've seen on two WinXP Pro SP2 boxes, under
|
# I've seen on two WinXP Pro SP2 boxes, under
|
||||||
# Pythons 2.3.5 and 2.4.1.
|
# Pythons 2.3.5 and 2.4.1.
|
||||||
|
@ -75,7 +91,7 @@ class Waker(interface.Waker):
|
||||||
def wake(self):
|
def wake(self):
|
||||||
try:
|
try:
|
||||||
self.writer.send(b"x")
|
self.writer.send(b"x")
|
||||||
except (IOError, socket.error):
|
except (IOError, socket.error, ValueError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def consume(self):
|
def consume(self):
|
||||||
|
@ -89,4 +105,4 @@ class Waker(interface.Waker):
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
self.reader.close()
|
self.reader.close()
|
||||||
self.writer.close()
|
try_close(self.writer)
|
||||||
|
|
|
@ -21,7 +21,7 @@ from __future__ import absolute_import, division, print_function, with_statement
|
||||||
import fcntl
|
import fcntl
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from tornado.platform import interface
|
from tornado.platform import common, interface
|
||||||
|
|
||||||
|
|
||||||
def set_close_exec(fd):
|
def set_close_exec(fd):
|
||||||
|
@ -53,7 +53,7 @@ class Waker(interface.Waker):
|
||||||
def wake(self):
|
def wake(self):
|
||||||
try:
|
try:
|
||||||
self.writer.write(b"x")
|
self.writer.write(b"x")
|
||||||
except IOError:
|
except (IOError, ValueError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def consume(self):
|
def consume(self):
|
||||||
|
@ -67,4 +67,4 @@ class Waker(interface.Waker):
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
self.reader.close()
|
self.reader.close()
|
||||||
self.writer.close()
|
common.try_close(self.writer)
|
||||||
|
|
|
@ -67,7 +67,7 @@ def cpu_count():
|
||||||
pass
|
pass
|
||||||
try:
|
try:
|
||||||
return os.sysconf("SC_NPROCESSORS_CONF")
|
return os.sysconf("SC_NPROCESSORS_CONF")
|
||||||
except ValueError:
|
except (AttributeError, ValueError):
|
||||||
pass
|
pass
|
||||||
gen_log.error("Could not detect number of processors; assuming 1")
|
gen_log.error("Could not detect number of processors; assuming 1")
|
||||||
return 1
|
return 1
|
||||||
|
|
611
lib/tornado/routing.py
Normal file
611
lib/tornado/routing.py
Normal file
|
@ -0,0 +1,611 @@
|
||||||
|
# Copyright 2015 The Tornado Authors
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""Basic routing implementation.
|
||||||
|
|
||||||
|
Tornado routes HTTP requests to appropriate handlers using `Router` class implementations.
|
||||||
|
|
||||||
|
`Router` interface extends `~.httputil.HTTPServerConnectionDelegate` to provide additional
|
||||||
|
routing capabilities. This also means that any `Router` implementation can be used directly
|
||||||
|
as a ``request_callback`` for `~.httpserver.HTTPServer` constructor.
|
||||||
|
|
||||||
|
`Router` subclass must implement a ``find_handler`` method to provide a suitable
|
||||||
|
`~.httputil.HTTPMessageDelegate` instance to handle the request:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class CustomRouter(Router):
|
||||||
|
def find_handler(self, request, **kwargs):
|
||||||
|
# some routing logic providing a suitable HTTPMessageDelegate instance
|
||||||
|
return MessageDelegate(request.connection)
|
||||||
|
|
||||||
|
class MessageDelegate(HTTPMessageDelegate):
|
||||||
|
def __init__(self, connection):
|
||||||
|
self.connection = connection
|
||||||
|
|
||||||
|
def finish(self):
|
||||||
|
self.connection.write_headers(
|
||||||
|
ResponseStartLine("HTTP/1.1", 200, "OK"),
|
||||||
|
HTTPHeaders({"Content-Length": "2"}),
|
||||||
|
b"OK")
|
||||||
|
self.connection.finish()
|
||||||
|
|
||||||
|
router = CustomRouter()
|
||||||
|
server = HTTPServer(router)
|
||||||
|
|
||||||
|
The main responsibility of `Router` implementation is to provide a mapping from a request
|
||||||
|
to `~.httputil.HTTPMessageDelegate` instance that will handle this request. In the example above
|
||||||
|
we can see that routing is possible even without instantiating an `~.web.Application`.
|
||||||
|
|
||||||
|
For routing to `~.web.RequestHandler` implementations we need an `~.web.Application` instance.
|
||||||
|
`~.web.Application.get_handler_delegate` provides a convenient way to create
|
||||||
|
`~.httputil.HTTPMessageDelegate` for a given request and `~.web.RequestHandler`.
|
||||||
|
|
||||||
|
Here is a simple example of how we can we route to `~.web.RequestHandler` subclasses
|
||||||
|
by HTTP method:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
resources = {}
|
||||||
|
|
||||||
|
class GetResource(RequestHandler):
|
||||||
|
def get(self, path):
|
||||||
|
if path not in resources:
|
||||||
|
raise HTTPError(404)
|
||||||
|
|
||||||
|
self.finish(resources[path])
|
||||||
|
|
||||||
|
class PostResource(RequestHandler):
|
||||||
|
def post(self, path):
|
||||||
|
resources[path] = self.request.body
|
||||||
|
|
||||||
|
class HTTPMethodRouter(Router):
|
||||||
|
def __init__(self, app):
|
||||||
|
self.app = app
|
||||||
|
|
||||||
|
def find_handler(self, request, **kwargs):
|
||||||
|
handler = GetResource if request.method == "GET" else PostResource
|
||||||
|
return self.app.get_handler_delegate(request, handler, path_args=[request.path])
|
||||||
|
|
||||||
|
router = HTTPMethodRouter(Application())
|
||||||
|
server = HTTPServer(router)
|
||||||
|
|
||||||
|
`ReversibleRouter` interface adds the ability to distinguish between the routes and
|
||||||
|
reverse them to the original urls using route's name and additional arguments.
|
||||||
|
`~.web.Application` is itself an implementation of `ReversibleRouter` class.
|
||||||
|
|
||||||
|
`RuleRouter` and `ReversibleRuleRouter` are implementations of `Router` and `ReversibleRouter`
|
||||||
|
interfaces and can be used for creating rule-based routing configurations.
|
||||||
|
|
||||||
|
Rules are instances of `Rule` class. They contain a `Matcher`, which provides the logic for
|
||||||
|
determining whether the rule is a match for a particular request and a target, which can be
|
||||||
|
one of the following.
|
||||||
|
|
||||||
|
1) An instance of `~.httputil.HTTPServerConnectionDelegate`:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
router = RuleRouter([
|
||||||
|
Rule(PathMatches("/handler"), ConnectionDelegate()),
|
||||||
|
# ... more rules
|
||||||
|
])
|
||||||
|
|
||||||
|
class ConnectionDelegate(HTTPServerConnectionDelegate):
|
||||||
|
def start_request(self, server_conn, request_conn):
|
||||||
|
return MessageDelegate(request_conn)
|
||||||
|
|
||||||
|
2) A callable accepting a single argument of `~.httputil.HTTPServerRequest` type:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
router = RuleRouter([
|
||||||
|
Rule(PathMatches("/callable"), request_callable)
|
||||||
|
])
|
||||||
|
|
||||||
|
def request_callable(request):
|
||||||
|
request.write(b"HTTP/1.1 200 OK\\r\\nContent-Length: 2\\r\\n\\r\\nOK")
|
||||||
|
request.finish()
|
||||||
|
|
||||||
|
3) Another `Router` instance:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
router = RuleRouter([
|
||||||
|
Rule(PathMatches("/router.*"), CustomRouter())
|
||||||
|
])
|
||||||
|
|
||||||
|
Of course a nested `RuleRouter` or a `~.web.Application` is allowed:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
router = RuleRouter([
|
||||||
|
Rule(HostMatches("example.com"), RuleRouter([
|
||||||
|
Rule(PathMatches("/app1/.*"), Application([(r"/app1/handler", Handler)]))),
|
||||||
|
]))
|
||||||
|
])
|
||||||
|
|
||||||
|
server = HTTPServer(router)
|
||||||
|
|
||||||
|
In the example below `RuleRouter` is used to route between applications:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
app1 = Application([
|
||||||
|
(r"/app1/handler", Handler1),
|
||||||
|
# other handlers ...
|
||||||
|
])
|
||||||
|
|
||||||
|
app2 = Application([
|
||||||
|
(r"/app2/handler", Handler2),
|
||||||
|
# other handlers ...
|
||||||
|
])
|
||||||
|
|
||||||
|
router = RuleRouter([
|
||||||
|
Rule(PathMatches("/app1.*"), app1),
|
||||||
|
Rule(PathMatches("/app2.*"), app2)
|
||||||
|
])
|
||||||
|
|
||||||
|
server = HTTPServer(router)
|
||||||
|
|
||||||
|
For more information on application-level routing see docs for `~.web.Application`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function, with_statement
|
||||||
|
|
||||||
|
import re
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from tornado import httputil
|
||||||
|
from tornado.httpserver import _CallableAdapter
|
||||||
|
from tornado.escape import url_escape, url_unescape, utf8
|
||||||
|
from tornado.log import app_log
|
||||||
|
from tornado.util import basestring_type, import_object, re_unescape, unicode_type
|
||||||
|
|
||||||
|
try:
|
||||||
|
import typing # noqa
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Router(httputil.HTTPServerConnectionDelegate):
|
||||||
|
"""Abstract router interface."""
|
||||||
|
|
||||||
|
def find_handler(self, request, **kwargs):
|
||||||
|
# type: (httputil.HTTPServerRequest, typing.Any)->httputil.HTTPMessageDelegate
|
||||||
|
"""Must be implemented to return an appropriate instance of `~.httputil.HTTPMessageDelegate`
|
||||||
|
that can serve the request.
|
||||||
|
Routing implementations may pass additional kwargs to extend the routing logic.
|
||||||
|
|
||||||
|
:arg httputil.HTTPServerRequest request: current HTTP request.
|
||||||
|
:arg kwargs: additional keyword arguments passed by routing implementation.
|
||||||
|
:returns: an instance of `~.httputil.HTTPMessageDelegate` that will be used to
|
||||||
|
process the request.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def start_request(self, server_conn, request_conn):
|
||||||
|
return _RoutingDelegate(self, server_conn, request_conn)
|
||||||
|
|
||||||
|
|
||||||
|
class ReversibleRouter(Router):
|
||||||
|
"""Abstract router interface for routers that can handle named routes
|
||||||
|
and support reversing them to original urls.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def reverse_url(self, name, *args):
|
||||||
|
"""Returns url string for a given route name and arguments
|
||||||
|
or ``None`` if no match is found.
|
||||||
|
|
||||||
|
:arg str name: route name.
|
||||||
|
:arg args: url parameters.
|
||||||
|
:returns: parametrized url string for a given route name (or ``None``).
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
class _RoutingDelegate(httputil.HTTPMessageDelegate):
|
||||||
|
def __init__(self, router, server_conn, request_conn):
|
||||||
|
self.server_conn = server_conn
|
||||||
|
self.request_conn = request_conn
|
||||||
|
self.delegate = None
|
||||||
|
self.router = router # type: Router
|
||||||
|
|
||||||
|
def headers_received(self, start_line, headers):
|
||||||
|
request = httputil.HTTPServerRequest(
|
||||||
|
connection=self.request_conn,
|
||||||
|
server_connection=self.server_conn,
|
||||||
|
start_line=start_line, headers=headers)
|
||||||
|
|
||||||
|
self.delegate = self.router.find_handler(request)
|
||||||
|
return self.delegate.headers_received(start_line, headers)
|
||||||
|
|
||||||
|
def data_received(self, chunk):
|
||||||
|
return self.delegate.data_received(chunk)
|
||||||
|
|
||||||
|
def finish(self):
|
||||||
|
self.delegate.finish()
|
||||||
|
|
||||||
|
def on_connection_close(self):
|
||||||
|
self.delegate.on_connection_close()
|
||||||
|
|
||||||
|
|
||||||
|
class RuleRouter(Router):
|
||||||
|
"""Rule-based router implementation."""
|
||||||
|
|
||||||
|
def __init__(self, rules=None):
|
||||||
|
"""Constructs a router from an ordered list of rules::
|
||||||
|
|
||||||
|
RuleRouter([
|
||||||
|
Rule(PathMatches("/handler"), Target),
|
||||||
|
# ... more rules
|
||||||
|
])
|
||||||
|
|
||||||
|
You can also omit explicit `Rule` constructor and use tuples of arguments::
|
||||||
|
|
||||||
|
RuleRouter([
|
||||||
|
(PathMatches("/handler"), Target),
|
||||||
|
])
|
||||||
|
|
||||||
|
`PathMatches` is a default matcher, so the example above can be simplified::
|
||||||
|
|
||||||
|
RuleRouter([
|
||||||
|
("/handler", Target),
|
||||||
|
])
|
||||||
|
|
||||||
|
In the examples above, ``Target`` can be a nested `Router` instance, an instance of
|
||||||
|
`~.httputil.HTTPServerConnectionDelegate` or an old-style callable, accepting a request argument.
|
||||||
|
|
||||||
|
:arg rules: a list of `Rule` instances or tuples of `Rule`
|
||||||
|
constructor arguments.
|
||||||
|
"""
|
||||||
|
self.rules = [] # type: typing.List[Rule]
|
||||||
|
if rules:
|
||||||
|
self.add_rules(rules)
|
||||||
|
|
||||||
|
def add_rules(self, rules):
|
||||||
|
"""Appends new rules to the router.
|
||||||
|
|
||||||
|
:arg rules: a list of Rule instances (or tuples of arguments, which are
|
||||||
|
passed to Rule constructor).
|
||||||
|
"""
|
||||||
|
for rule in rules:
|
||||||
|
if isinstance(rule, (tuple, list)):
|
||||||
|
assert len(rule) in (2, 3, 4)
|
||||||
|
if isinstance(rule[0], basestring_type):
|
||||||
|
rule = Rule(PathMatches(rule[0]), *rule[1:])
|
||||||
|
else:
|
||||||
|
rule = Rule(*rule)
|
||||||
|
|
||||||
|
self.rules.append(self.process_rule(rule))
|
||||||
|
|
||||||
|
def process_rule(self, rule):
|
||||||
|
"""Override this method for additional preprocessing of each rule.
|
||||||
|
|
||||||
|
:arg Rule rule: a rule to be processed.
|
||||||
|
:returns: the same or modified Rule instance.
|
||||||
|
"""
|
||||||
|
return rule
|
||||||
|
|
||||||
|
def find_handler(self, request, **kwargs):
|
||||||
|
for rule in self.rules:
|
||||||
|
target_params = rule.matcher.match(request)
|
||||||
|
if target_params is not None:
|
||||||
|
if rule.target_kwargs:
|
||||||
|
target_params['target_kwargs'] = rule.target_kwargs
|
||||||
|
|
||||||
|
delegate = self.get_target_delegate(
|
||||||
|
rule.target, request, **target_params)
|
||||||
|
|
||||||
|
if delegate is not None:
|
||||||
|
return delegate
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_target_delegate(self, target, request, **target_params):
|
||||||
|
"""Returns an instance of `~.httputil.HTTPMessageDelegate` for a
|
||||||
|
Rule's target. This method is called by `~.find_handler` and can be
|
||||||
|
extended to provide additional target types.
|
||||||
|
|
||||||
|
:arg target: a Rule's target.
|
||||||
|
:arg httputil.HTTPServerRequest request: current request.
|
||||||
|
:arg target_params: additional parameters that can be useful
|
||||||
|
for `~.httputil.HTTPMessageDelegate` creation.
|
||||||
|
"""
|
||||||
|
if isinstance(target, Router):
|
||||||
|
return target.find_handler(request, **target_params)
|
||||||
|
|
||||||
|
elif isinstance(target, httputil.HTTPServerConnectionDelegate):
|
||||||
|
return target.start_request(request.server_connection, request.connection)
|
||||||
|
|
||||||
|
elif callable(target):
|
||||||
|
return _CallableAdapter(
|
||||||
|
partial(target, **target_params), request.connection
|
||||||
|
)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class ReversibleRuleRouter(ReversibleRouter, RuleRouter):
|
||||||
|
"""A rule-based router that implements ``reverse_url`` method.
|
||||||
|
|
||||||
|
Each rule added to this router may have a ``name`` attribute that can be
|
||||||
|
used to reconstruct an original uri. The actual reconstruction takes place
|
||||||
|
in a rule's matcher (see `Matcher.reverse`).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, rules=None):
|
||||||
|
self.named_rules = {} # type: typing.Dict[str]
|
||||||
|
super(ReversibleRuleRouter, self).__init__(rules)
|
||||||
|
|
||||||
|
def process_rule(self, rule):
|
||||||
|
rule = super(ReversibleRuleRouter, self).process_rule(rule)
|
||||||
|
|
||||||
|
if rule.name:
|
||||||
|
if rule.name in self.named_rules:
|
||||||
|
app_log.warning(
|
||||||
|
"Multiple handlers named %s; replacing previous value",
|
||||||
|
rule.name)
|
||||||
|
self.named_rules[rule.name] = rule
|
||||||
|
|
||||||
|
return rule
|
||||||
|
|
||||||
|
def reverse_url(self, name, *args):
|
||||||
|
if name in self.named_rules:
|
||||||
|
return self.named_rules[name].matcher.reverse(*args)
|
||||||
|
|
||||||
|
for rule in self.rules:
|
||||||
|
if isinstance(rule.target, ReversibleRouter):
|
||||||
|
reversed_url = rule.target.reverse_url(name, *args)
|
||||||
|
if reversed_url is not None:
|
||||||
|
return reversed_url
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class Rule(object):
|
||||||
|
"""A routing rule."""
|
||||||
|
|
||||||
|
def __init__(self, matcher, target, target_kwargs=None, name=None):
|
||||||
|
"""Constructs a Rule instance.
|
||||||
|
|
||||||
|
:arg Matcher matcher: a `Matcher` instance used for determining
|
||||||
|
whether the rule should be considered a match for a specific
|
||||||
|
request.
|
||||||
|
:arg target: a Rule's target (typically a ``RequestHandler`` or
|
||||||
|
`~.httputil.HTTPServerConnectionDelegate` subclass or even a nested `Router`,
|
||||||
|
depending on routing implementation).
|
||||||
|
:arg dict target_kwargs: a dict of parameters that can be useful
|
||||||
|
at the moment of target instantiation (for example, ``status_code``
|
||||||
|
for a ``RequestHandler`` subclass). They end up in
|
||||||
|
``target_params['target_kwargs']`` of `RuleRouter.get_target_delegate`
|
||||||
|
method.
|
||||||
|
:arg str name: the name of the rule that can be used to find it
|
||||||
|
in `ReversibleRouter.reverse_url` implementation.
|
||||||
|
"""
|
||||||
|
if isinstance(target, str):
|
||||||
|
# import the Module and instantiate the class
|
||||||
|
# Must be a fully qualified name (module.ClassName)
|
||||||
|
target = import_object(target)
|
||||||
|
|
||||||
|
self.matcher = matcher # type: Matcher
|
||||||
|
self.target = target
|
||||||
|
self.target_kwargs = target_kwargs if target_kwargs else {}
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def reverse(self, *args):
|
||||||
|
return self.matcher.reverse(*args)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '%s(%r, %s, kwargs=%r, name=%r)' % \
|
||||||
|
(self.__class__.__name__, self.matcher,
|
||||||
|
self.target, self.target_kwargs, self.name)
|
||||||
|
|
||||||
|
|
||||||
|
class Matcher(object):
|
||||||
|
"""Represents a matcher for request features."""
|
||||||
|
|
||||||
|
def match(self, request):
|
||||||
|
"""Matches current instance against the request.
|
||||||
|
|
||||||
|
:arg httputil.HTTPServerRequest request: current HTTP request
|
||||||
|
:returns: a dict of parameters to be passed to the target handler
|
||||||
|
(for example, ``handler_kwargs``, ``path_args``, ``path_kwargs``
|
||||||
|
can be passed for proper `~.web.RequestHandler` instantiation).
|
||||||
|
An empty dict is a valid (and common) return value to indicate a match
|
||||||
|
when the argument-passing features are not used.
|
||||||
|
``None`` must be returned to indicate that there is no match."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def reverse(self, *args):
|
||||||
|
"""Reconstructs full url from matcher instance and additional arguments."""
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class AnyMatches(Matcher):
|
||||||
|
"""Matches any request."""
|
||||||
|
|
||||||
|
def match(self, request):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
class HostMatches(Matcher):
|
||||||
|
"""Matches requests from hosts specified by ``host_pattern`` regex."""
|
||||||
|
|
||||||
|
def __init__(self, host_pattern):
|
||||||
|
if isinstance(host_pattern, basestring_type):
|
||||||
|
if not host_pattern.endswith("$"):
|
||||||
|
host_pattern += "$"
|
||||||
|
self.host_pattern = re.compile(host_pattern)
|
||||||
|
else:
|
||||||
|
self.host_pattern = host_pattern
|
||||||
|
|
||||||
|
def match(self, request):
|
||||||
|
if self.host_pattern.match(request.host_name):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class DefaultHostMatches(Matcher):
|
||||||
|
"""Matches requests from host that is equal to application's default_host.
|
||||||
|
Always returns no match if ``X-Real-Ip`` header is present.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, application, host_pattern):
|
||||||
|
self.application = application
|
||||||
|
self.host_pattern = host_pattern
|
||||||
|
|
||||||
|
def match(self, request):
|
||||||
|
# Look for default host if not behind load balancer (for debugging)
|
||||||
|
if "X-Real-Ip" not in request.headers:
|
||||||
|
if self.host_pattern.match(self.application.default_host):
|
||||||
|
return {}
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class PathMatches(Matcher):
|
||||||
|
"""Matches requests with paths specified by ``path_pattern`` regex."""
|
||||||
|
|
||||||
|
def __init__(self, path_pattern):
|
||||||
|
if isinstance(path_pattern, basestring_type):
|
||||||
|
if not path_pattern.endswith('$'):
|
||||||
|
path_pattern += '$'
|
||||||
|
self.regex = re.compile(path_pattern)
|
||||||
|
else:
|
||||||
|
self.regex = path_pattern
|
||||||
|
|
||||||
|
assert len(self.regex.groupindex) in (0, self.regex.groups), \
|
||||||
|
("groups in url regexes must either be all named or all "
|
||||||
|
"positional: %r" % self.regex.pattern)
|
||||||
|
|
||||||
|
self._path, self._group_count = self._find_groups()
|
||||||
|
|
||||||
|
def match(self, request):
|
||||||
|
match = self.regex.match(request.path)
|
||||||
|
if match is None:
|
||||||
|
return None
|
||||||
|
if not self.regex.groups:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
path_args, path_kwargs = [], {}
|
||||||
|
|
||||||
|
# Pass matched groups to the handler. Since
|
||||||
|
# match.groups() includes both named and
|
||||||
|
# unnamed groups, we want to use either groups
|
||||||
|
# or groupdict but not both.
|
||||||
|
if self.regex.groupindex:
|
||||||
|
path_kwargs = dict(
|
||||||
|
(str(k), _unquote_or_none(v))
|
||||||
|
for (k, v) in match.groupdict().items())
|
||||||
|
else:
|
||||||
|
path_args = [_unquote_or_none(s) for s in match.groups()]
|
||||||
|
|
||||||
|
return dict(path_args=path_args, path_kwargs=path_kwargs)
|
||||||
|
|
||||||
|
def reverse(self, *args):
|
||||||
|
if self._path is None:
|
||||||
|
raise ValueError("Cannot reverse url regex " + self.regex.pattern)
|
||||||
|
assert len(args) == self._group_count, "required number of arguments " \
|
||||||
|
"not found"
|
||||||
|
if not len(args):
|
||||||
|
return self._path
|
||||||
|
converted_args = []
|
||||||
|
for a in args:
|
||||||
|
if not isinstance(a, (unicode_type, bytes)):
|
||||||
|
a = str(a)
|
||||||
|
converted_args.append(url_escape(utf8(a), plus=False))
|
||||||
|
return self._path % tuple(converted_args)
|
||||||
|
|
||||||
|
def _find_groups(self):
|
||||||
|
"""Returns a tuple (reverse string, group count) for a url.
|
||||||
|
|
||||||
|
For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method
|
||||||
|
would return ('/%s/%s/', 2).
|
||||||
|
"""
|
||||||
|
pattern = self.regex.pattern
|
||||||
|
if pattern.startswith('^'):
|
||||||
|
pattern = pattern[1:]
|
||||||
|
if pattern.endswith('$'):
|
||||||
|
pattern = pattern[:-1]
|
||||||
|
|
||||||
|
if self.regex.groups != pattern.count('('):
|
||||||
|
# The pattern is too complicated for our simplistic matching,
|
||||||
|
# so we can't support reversing it.
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
pieces = []
|
||||||
|
for fragment in pattern.split('('):
|
||||||
|
if ')' in fragment:
|
||||||
|
paren_loc = fragment.index(')')
|
||||||
|
if paren_loc >= 0:
|
||||||
|
pieces.append('%s' + fragment[paren_loc + 1:])
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
unescaped_fragment = re_unescape(fragment)
|
||||||
|
except ValueError as exc:
|
||||||
|
# If we can't unescape part of it, we can't
|
||||||
|
# reverse this url.
|
||||||
|
return (None, None)
|
||||||
|
pieces.append(unescaped_fragment)
|
||||||
|
|
||||||
|
return ''.join(pieces), self.regex.groups
|
||||||
|
|
||||||
|
|
||||||
|
class URLSpec(Rule):
|
||||||
|
"""Specifies mappings between URLs and handlers.
|
||||||
|
|
||||||
|
.. versionchanged: 4.5
|
||||||
|
`URLSpec` is now a subclass of a `Rule` with `PathMatches` matcher and is preserved for
|
||||||
|
backwards compatibility.
|
||||||
|
"""
|
||||||
|
def __init__(self, pattern, handler, kwargs=None, name=None):
|
||||||
|
"""Parameters:
|
||||||
|
|
||||||
|
* ``pattern``: Regular expression to be matched. Any capturing
|
||||||
|
groups in the regex will be passed in to the handler's
|
||||||
|
get/post/etc methods as arguments (by keyword if named, by
|
||||||
|
position if unnamed. Named and unnamed capturing groups may
|
||||||
|
may not be mixed in the same rule).
|
||||||
|
|
||||||
|
* ``handler``: `~.web.RequestHandler` subclass to be invoked.
|
||||||
|
|
||||||
|
* ``kwargs`` (optional): A dictionary of additional arguments
|
||||||
|
to be passed to the handler's constructor.
|
||||||
|
|
||||||
|
* ``name`` (optional): A name for this handler. Used by
|
||||||
|
`~.web.Application.reverse_url`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
super(URLSpec, self).__init__(PathMatches(pattern), handler, kwargs, name)
|
||||||
|
|
||||||
|
self.regex = self.matcher.regex
|
||||||
|
self.handler_class = self.target
|
||||||
|
self.kwargs = kwargs
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '%s(%r, %s, kwargs=%r, name=%r)' % \
|
||||||
|
(self.__class__.__name__, self.regex.pattern,
|
||||||
|
self.handler_class, self.kwargs, self.name)
|
||||||
|
|
||||||
|
|
||||||
|
def _unquote_or_none(s):
|
||||||
|
"""None-safe wrapper around url_unescape to handle unmatched optional
|
||||||
|
groups correctly.
|
||||||
|
|
||||||
|
Note that args are passed as bytes so the handler can decide what
|
||||||
|
encoding to use.
|
||||||
|
"""
|
||||||
|
if s is None:
|
||||||
|
return s
|
||||||
|
return url_unescape(s, encoding=None, plus=False)
|
|
@ -21,6 +21,7 @@ import errno
|
||||||
import os
|
import os
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
|
from tornado import gen
|
||||||
from tornado.log import app_log
|
from tornado.log import app_log
|
||||||
from tornado.ioloop import IOLoop
|
from tornado.ioloop import IOLoop
|
||||||
from tornado.iostream import IOStream, SSLIOStream
|
from tornado.iostream import IOStream, SSLIOStream
|
||||||
|
@ -109,6 +110,7 @@ class TCPServer(object):
|
||||||
self._sockets = {} # fd -> socket object
|
self._sockets = {} # fd -> socket object
|
||||||
self._pending_sockets = []
|
self._pending_sockets = []
|
||||||
self._started = False
|
self._started = False
|
||||||
|
self._stopped = False
|
||||||
self.max_buffer_size = max_buffer_size
|
self.max_buffer_size = max_buffer_size
|
||||||
self.read_chunk_size = read_chunk_size
|
self.read_chunk_size = read_chunk_size
|
||||||
|
|
||||||
|
@ -227,7 +229,11 @@ class TCPServer(object):
|
||||||
Requests currently in progress may still continue after the
|
Requests currently in progress may still continue after the
|
||||||
server is stopped.
|
server is stopped.
|
||||||
"""
|
"""
|
||||||
|
if self._stopped:
|
||||||
|
return
|
||||||
|
self._stopped = True
|
||||||
for fd, sock in self._sockets.items():
|
for fd, sock in self._sockets.items():
|
||||||
|
assert sock.fileno() == fd
|
||||||
self.io_loop.remove_handler(fd)
|
self.io_loop.remove_handler(fd)
|
||||||
sock.close()
|
sock.close()
|
||||||
|
|
||||||
|
@ -285,8 +291,10 @@ class TCPServer(object):
|
||||||
stream = IOStream(connection, io_loop=self.io_loop,
|
stream = IOStream(connection, io_loop=self.io_loop,
|
||||||
max_buffer_size=self.max_buffer_size,
|
max_buffer_size=self.max_buffer_size,
|
||||||
read_chunk_size=self.read_chunk_size)
|
read_chunk_size=self.read_chunk_size)
|
||||||
|
|
||||||
future = self.handle_stream(stream, address)
|
future = self.handle_stream(stream, address)
|
||||||
if future is not None:
|
if future is not None:
|
||||||
self.io_loop.add_future(future, lambda f: f.result())
|
self.io_loop.add_future(gen.convert_yielded(future),
|
||||||
|
lambda f: f.result())
|
||||||
except Exception:
|
except Exception:
|
||||||
app_log.error("Error in connection callback", exc_info=True)
|
app_log.error("Error in connection callback", exc_info=True)
|
||||||
|
|
|
@ -13,6 +13,7 @@ and `.Resolver`.
|
||||||
from __future__ import absolute_import, division, print_function, with_statement
|
from __future__ import absolute_import, division, print_function, with_statement
|
||||||
|
|
||||||
import array
|
import array
|
||||||
|
import atexit
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
@ -66,6 +67,23 @@ else:
|
||||||
_BaseString = Union[bytes, unicode_type]
|
_BaseString = Union[bytes, unicode_type]
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from sys import is_finalizing
|
||||||
|
except ImportError:
|
||||||
|
# Emulate it
|
||||||
|
def _get_emulated_is_finalizing():
|
||||||
|
L = []
|
||||||
|
atexit.register(lambda: L.append(None))
|
||||||
|
|
||||||
|
def is_finalizing():
|
||||||
|
# Not referencing any globals here
|
||||||
|
return L != []
|
||||||
|
|
||||||
|
return is_finalizing
|
||||||
|
|
||||||
|
is_finalizing = _get_emulated_is_finalizing()
|
||||||
|
|
||||||
|
|
||||||
class ObjectDict(_ObjectDictBase):
|
class ObjectDict(_ObjectDictBase):
|
||||||
"""Makes a dictionary behave like an object, with attribute-style access.
|
"""Makes a dictionary behave like an object, with attribute-style access.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -77,6 +77,7 @@ import time
|
||||||
import tornado
|
import tornado
|
||||||
import traceback
|
import traceback
|
||||||
import types
|
import types
|
||||||
|
from inspect import isclass
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
from tornado.concurrent import Future
|
from tornado.concurrent import Future
|
||||||
|
@ -89,9 +90,13 @@ from tornado.log import access_log, app_log, gen_log
|
||||||
from tornado import stack_context
|
from tornado import stack_context
|
||||||
from tornado import template
|
from tornado import template
|
||||||
from tornado.escape import utf8, _unicode
|
from tornado.escape import utf8, _unicode
|
||||||
from tornado.util import (import_object, ObjectDict, raise_exc_info,
|
from tornado.routing import (AnyMatches, DefaultHostMatches, HostMatches,
|
||||||
unicode_type, _websocket_mask, re_unescape, PY3)
|
ReversibleRouter, Rule, ReversibleRuleRouter,
|
||||||
from tornado.httputil import split_host_and_port
|
URLSpec)
|
||||||
|
from tornado.util import (ObjectDict, raise_exc_info,
|
||||||
|
unicode_type, _websocket_mask, PY3)
|
||||||
|
|
||||||
|
url = URLSpec
|
||||||
|
|
||||||
if PY3:
|
if PY3:
|
||||||
import http.cookies as Cookie
|
import http.cookies as Cookie
|
||||||
|
@ -1670,6 +1675,9 @@ def stream_request_body(cls):
|
||||||
There is a subtle interaction between ``data_received`` and asynchronous
|
There is a subtle interaction between ``data_received`` and asynchronous
|
||||||
``prepare``: The first call to ``data_received`` may occur at any point
|
``prepare``: The first call to ``data_received`` may occur at any point
|
||||||
after the call to ``prepare`` has returned *or yielded*.
|
after the call to ``prepare`` has returned *or yielded*.
|
||||||
|
|
||||||
|
See the `file receiver demo <https://github.com/tornadoweb/tornado/tree/master/demos/file_upload/>`_
|
||||||
|
for example usage.
|
||||||
"""
|
"""
|
||||||
if not issubclass(cls, RequestHandler):
|
if not issubclass(cls, RequestHandler):
|
||||||
raise TypeError("expected subclass of RequestHandler, got %r", cls)
|
raise TypeError("expected subclass of RequestHandler, got %r", cls)
|
||||||
|
@ -1727,7 +1735,38 @@ def addslash(method):
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
class Application(httputil.HTTPServerConnectionDelegate):
|
class _ApplicationRouter(ReversibleRuleRouter):
|
||||||
|
"""Routing implementation used internally by `Application`.
|
||||||
|
|
||||||
|
Provides a binding between `Application` and `RequestHandler`.
|
||||||
|
This implementation extends `~.routing.ReversibleRuleRouter` in a couple of ways:
|
||||||
|
* it allows to use `RequestHandler` subclasses as `~.routing.Rule` target and
|
||||||
|
* it allows to use a list/tuple of rules as `~.routing.Rule` target.
|
||||||
|
``process_rule`` implementation will substitute this list with an appropriate
|
||||||
|
`_ApplicationRouter` instance.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, application, rules=None):
|
||||||
|
assert isinstance(application, Application)
|
||||||
|
self.application = application
|
||||||
|
super(_ApplicationRouter, self).__init__(rules)
|
||||||
|
|
||||||
|
def process_rule(self, rule):
|
||||||
|
rule = super(_ApplicationRouter, self).process_rule(rule)
|
||||||
|
|
||||||
|
if isinstance(rule.target, (list, tuple)):
|
||||||
|
rule.target = _ApplicationRouter(self.application, rule.target)
|
||||||
|
|
||||||
|
return rule
|
||||||
|
|
||||||
|
def get_target_delegate(self, target, request, **target_params):
|
||||||
|
if isclass(target) and issubclass(target, RequestHandler):
|
||||||
|
return self.application.get_handler_delegate(request, target, **target_params)
|
||||||
|
|
||||||
|
return super(_ApplicationRouter, self).get_target_delegate(target, request, **target_params)
|
||||||
|
|
||||||
|
|
||||||
|
class Application(ReversibleRouter):
|
||||||
"""A collection of request handlers that make up a web application.
|
"""A collection of request handlers that make up a web application.
|
||||||
|
|
||||||
Instances of this class are callable and can be passed directly to
|
Instances of this class are callable and can be passed directly to
|
||||||
|
@ -1740,19 +1779,34 @@ class Application(httputil.HTTPServerConnectionDelegate):
|
||||||
http_server.listen(8080)
|
http_server.listen(8080)
|
||||||
ioloop.IOLoop.current().start()
|
ioloop.IOLoop.current().start()
|
||||||
|
|
||||||
The constructor for this class takes in a list of `URLSpec` objects
|
The constructor for this class takes in a list of `~.routing.Rule`
|
||||||
or (regexp, request_class) tuples. When we receive requests, we
|
objects or tuples of values corresponding to the arguments of
|
||||||
iterate over the list in order and instantiate an instance of the
|
`~.routing.Rule` constructor: ``(matcher, target, [target_kwargs], [name])``,
|
||||||
first request class whose regexp matches the request path.
|
the values in square brackets being optional. The default matcher is
|
||||||
The request class can be specified as either a class object or a
|
`~.routing.PathMatches`, so ``(regexp, target)`` tuples can also be used
|
||||||
(fully-qualified) name.
|
instead of ``(PathMatches(regexp), target)``.
|
||||||
|
|
||||||
Each tuple can contain additional elements, which correspond to the
|
A common routing target is a `RequestHandler` subclass, but you can also
|
||||||
arguments to the `URLSpec` constructor. (Prior to Tornado 3.2,
|
use lists of rules as a target, which create a nested routing configuration::
|
||||||
only tuples of two or three elements were allowed).
|
|
||||||
|
|
||||||
A dictionary may be passed as the third element of the tuple,
|
application = web.Application([
|
||||||
which will be used as keyword arguments to the handler's
|
(HostMatches("example.com"), [
|
||||||
|
(r"/", MainPageHandler),
|
||||||
|
(r"/feed", FeedHandler),
|
||||||
|
]),
|
||||||
|
])
|
||||||
|
|
||||||
|
In addition to this you can use nested `~.routing.Router` instances,
|
||||||
|
`~.httputil.HTTPMessageDelegate` subclasses and callables as routing targets
|
||||||
|
(see `~.routing` module docs for more information).
|
||||||
|
|
||||||
|
When we receive requests, we iterate over the list in order and
|
||||||
|
instantiate an instance of the first request class whose regexp
|
||||||
|
matches the request path. The request class can be specified as
|
||||||
|
either a class object or a (fully-qualified) name.
|
||||||
|
|
||||||
|
A dictionary may be passed as the third element (``target_kwargs``)
|
||||||
|
of the tuple, which will be used as keyword arguments to the handler's
|
||||||
constructor and `~RequestHandler.initialize` method. This pattern
|
constructor and `~RequestHandler.initialize` method. This pattern
|
||||||
is used for the `StaticFileHandler` in this example (note that a
|
is used for the `StaticFileHandler` in this example (note that a
|
||||||
`StaticFileHandler` can be installed automatically with the
|
`StaticFileHandler` can be installed automatically with the
|
||||||
|
@ -1769,6 +1823,9 @@ class Application(httputil.HTTPServerConnectionDelegate):
|
||||||
(r"/article/([0-9]+)", ArticleHandler),
|
(r"/article/([0-9]+)", ArticleHandler),
|
||||||
])
|
])
|
||||||
|
|
||||||
|
If there's no match for the current request's host, then ``default_host``
|
||||||
|
parameter value is matched against host regular expressions.
|
||||||
|
|
||||||
You can serve static files by sending the ``static_path`` setting
|
You can serve static files by sending the ``static_path`` setting
|
||||||
as a keyword argument. We will serve those files from the
|
as a keyword argument. We will serve those files from the
|
||||||
``/static/`` URI (this is configurable with the
|
``/static/`` URI (this is configurable with the
|
||||||
|
@ -1778,7 +1835,7 @@ class Application(httputil.HTTPServerConnectionDelegate):
|
||||||
``static_handler_class`` setting.
|
``static_handler_class`` setting.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, handlers=None, default_host="", transforms=None,
|
def __init__(self, handlers=None, default_host=None, transforms=None,
|
||||||
**settings):
|
**settings):
|
||||||
if transforms is None:
|
if transforms is None:
|
||||||
self.transforms = []
|
self.transforms = []
|
||||||
|
@ -1786,8 +1843,6 @@ class Application(httputil.HTTPServerConnectionDelegate):
|
||||||
self.transforms.append(GZipContentEncoding)
|
self.transforms.append(GZipContentEncoding)
|
||||||
else:
|
else:
|
||||||
self.transforms = transforms
|
self.transforms = transforms
|
||||||
self.handlers = []
|
|
||||||
self.named_handlers = {}
|
|
||||||
self.default_host = default_host
|
self.default_host = default_host
|
||||||
self.settings = settings
|
self.settings = settings
|
||||||
self.ui_modules = {'linkify': _linkify,
|
self.ui_modules = {'linkify': _linkify,
|
||||||
|
@ -1810,8 +1865,6 @@ class Application(httputil.HTTPServerConnectionDelegate):
|
||||||
r"/(favicon\.ico)", r"/(robots\.txt)"]:
|
r"/(favicon\.ico)", r"/(robots\.txt)"]:
|
||||||
handlers.insert(0, (pattern, static_handler_class,
|
handlers.insert(0, (pattern, static_handler_class,
|
||||||
static_handler_args))
|
static_handler_args))
|
||||||
if handlers:
|
|
||||||
self.add_handlers(".*$", handlers)
|
|
||||||
|
|
||||||
if self.settings.get('debug'):
|
if self.settings.get('debug'):
|
||||||
self.settings.setdefault('autoreload', True)
|
self.settings.setdefault('autoreload', True)
|
||||||
|
@ -1819,6 +1872,11 @@ class Application(httputil.HTTPServerConnectionDelegate):
|
||||||
self.settings.setdefault('static_hash_cache', False)
|
self.settings.setdefault('static_hash_cache', False)
|
||||||
self.settings.setdefault('serve_traceback', True)
|
self.settings.setdefault('serve_traceback', True)
|
||||||
|
|
||||||
|
self.wildcard_router = _ApplicationRouter(self, handlers)
|
||||||
|
self.default_router = _ApplicationRouter(self, [
|
||||||
|
Rule(AnyMatches(), self.wildcard_router)
|
||||||
|
])
|
||||||
|
|
||||||
# Automatically reload modified modules
|
# Automatically reload modified modules
|
||||||
if self.settings.get('autoreload'):
|
if self.settings.get('autoreload'):
|
||||||
from tornado import autoreload
|
from tornado import autoreload
|
||||||
|
@ -1856,47 +1914,20 @@ class Application(httputil.HTTPServerConnectionDelegate):
|
||||||
Host patterns are processed sequentially in the order they were
|
Host patterns are processed sequentially in the order they were
|
||||||
added. All matching patterns will be considered.
|
added. All matching patterns will be considered.
|
||||||
"""
|
"""
|
||||||
if not host_pattern.endswith("$"):
|
host_matcher = HostMatches(host_pattern)
|
||||||
host_pattern += "$"
|
rule = Rule(host_matcher, _ApplicationRouter(self, host_handlers))
|
||||||
handlers = []
|
|
||||||
# The handlers with the wildcard host_pattern are a special
|
|
||||||
# case - they're added in the constructor but should have lower
|
|
||||||
# precedence than the more-precise handlers added later.
|
|
||||||
# If a wildcard handler group exists, it should always be last
|
|
||||||
# in the list, so insert new groups just before it.
|
|
||||||
if self.handlers and self.handlers[-1][0].pattern == '.*$':
|
|
||||||
self.handlers.insert(-1, (re.compile(host_pattern), handlers))
|
|
||||||
else:
|
|
||||||
self.handlers.append((re.compile(host_pattern), handlers))
|
|
||||||
|
|
||||||
for spec in host_handlers:
|
self.default_router.rules.insert(-1, rule)
|
||||||
if isinstance(spec, (tuple, list)):
|
|
||||||
assert len(spec) in (2, 3, 4)
|
if self.default_host is not None:
|
||||||
spec = URLSpec(*spec)
|
self.wildcard_router.add_rules([(
|
||||||
handlers.append(spec)
|
DefaultHostMatches(self, host_matcher.host_pattern),
|
||||||
if spec.name:
|
host_handlers
|
||||||
if spec.name in self.named_handlers:
|
)])
|
||||||
app_log.warning(
|
|
||||||
"Multiple handlers named %s; replacing previous value",
|
|
||||||
spec.name)
|
|
||||||
self.named_handlers[spec.name] = spec
|
|
||||||
|
|
||||||
def add_transform(self, transform_class):
|
def add_transform(self, transform_class):
|
||||||
self.transforms.append(transform_class)
|
self.transforms.append(transform_class)
|
||||||
|
|
||||||
def _get_host_handlers(self, request):
|
|
||||||
host = split_host_and_port(request.host.lower())[0]
|
|
||||||
matches = []
|
|
||||||
for pattern, handlers in self.handlers:
|
|
||||||
if pattern.match(host):
|
|
||||||
matches.extend(handlers)
|
|
||||||
# Look for default host if not behind load balancer (for debugging)
|
|
||||||
if not matches and "X-Real-Ip" not in request.headers:
|
|
||||||
for pattern, handlers in self.handlers:
|
|
||||||
if pattern.match(self.default_host):
|
|
||||||
matches.extend(handlers)
|
|
||||||
return matches or None
|
|
||||||
|
|
||||||
def _load_ui_methods(self, methods):
|
def _load_ui_methods(self, methods):
|
||||||
if isinstance(methods, types.ModuleType):
|
if isinstance(methods, types.ModuleType):
|
||||||
self._load_ui_methods(dict((n, getattr(methods, n))
|
self._load_ui_methods(dict((n, getattr(methods, n))
|
||||||
|
@ -1926,16 +1957,40 @@ class Application(httputil.HTTPServerConnectionDelegate):
|
||||||
except TypeError:
|
except TypeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def start_request(self, server_conn, request_conn):
|
|
||||||
# Modern HTTPServer interface
|
|
||||||
return _RequestDispatcher(self, request_conn)
|
|
||||||
|
|
||||||
def __call__(self, request):
|
def __call__(self, request):
|
||||||
# Legacy HTTPServer interface
|
# Legacy HTTPServer interface
|
||||||
dispatcher = _RequestDispatcher(self, None)
|
dispatcher = self.find_handler(request)
|
||||||
dispatcher.set_request(request)
|
|
||||||
return dispatcher.execute()
|
return dispatcher.execute()
|
||||||
|
|
||||||
|
def find_handler(self, request, **kwargs):
|
||||||
|
route = self.default_router.find_handler(request)
|
||||||
|
if route is not None:
|
||||||
|
return route
|
||||||
|
|
||||||
|
if self.settings.get('default_handler_class'):
|
||||||
|
return self.get_handler_delegate(
|
||||||
|
request,
|
||||||
|
self.settings['default_handler_class'],
|
||||||
|
self.settings.get('default_handler_args', {}))
|
||||||
|
|
||||||
|
return self.get_handler_delegate(
|
||||||
|
request, ErrorHandler, {'status_code': 404})
|
||||||
|
|
||||||
|
def get_handler_delegate(self, request, target_class, target_kwargs=None,
|
||||||
|
path_args=None, path_kwargs=None):
|
||||||
|
"""Returns `~.httputil.HTTPMessageDelegate` that can serve a request
|
||||||
|
for application and `RequestHandler` subclass.
|
||||||
|
|
||||||
|
:arg httputil.HTTPServerRequest request: current HTTP request.
|
||||||
|
:arg RequestHandler target_class: a `RequestHandler` class.
|
||||||
|
:arg dict target_kwargs: keyword arguments for ``target_class`` constructor.
|
||||||
|
:arg list path_args: positional arguments for ``target_class`` HTTP method that
|
||||||
|
will be executed while handling a request (``get``, ``post`` or any other).
|
||||||
|
:arg dict path_kwargs: keyword arguments for ``target_class`` HTTP method.
|
||||||
|
"""
|
||||||
|
return _HandlerDelegate(
|
||||||
|
self, request, target_class, target_kwargs, path_args, path_kwargs)
|
||||||
|
|
||||||
def reverse_url(self, name, *args):
|
def reverse_url(self, name, *args):
|
||||||
"""Returns a URL path for handler named ``name``
|
"""Returns a URL path for handler named ``name``
|
||||||
|
|
||||||
|
@ -1945,8 +2000,10 @@ class Application(httputil.HTTPServerConnectionDelegate):
|
||||||
They will be converted to strings if necessary, encoded as utf8,
|
They will be converted to strings if necessary, encoded as utf8,
|
||||||
and url-escaped.
|
and url-escaped.
|
||||||
"""
|
"""
|
||||||
if name in self.named_handlers:
|
reversed_url = self.default_router.reverse_url(name, *args)
|
||||||
return self.named_handlers[name].reverse(*args)
|
if reversed_url is not None:
|
||||||
|
return reversed_url
|
||||||
|
|
||||||
raise KeyError("%s not found in named urls" % name)
|
raise KeyError("%s not found in named urls" % name)
|
||||||
|
|
||||||
def log_request(self, handler):
|
def log_request(self, handler):
|
||||||
|
@ -1971,67 +2028,24 @@ class Application(httputil.HTTPServerConnectionDelegate):
|
||||||
handler._request_summary(), request_time)
|
handler._request_summary(), request_time)
|
||||||
|
|
||||||
|
|
||||||
class _RequestDispatcher(httputil.HTTPMessageDelegate):
|
class _HandlerDelegate(httputil.HTTPMessageDelegate):
|
||||||
def __init__(self, application, connection):
|
def __init__(self, application, request, handler_class, handler_kwargs,
|
||||||
|
path_args, path_kwargs):
|
||||||
self.application = application
|
self.application = application
|
||||||
self.connection = connection
|
self.connection = request.connection
|
||||||
self.request = None
|
self.request = request
|
||||||
|
self.handler_class = handler_class
|
||||||
|
self.handler_kwargs = handler_kwargs or {}
|
||||||
|
self.path_args = path_args or []
|
||||||
|
self.path_kwargs = path_kwargs or {}
|
||||||
self.chunks = []
|
self.chunks = []
|
||||||
self.handler_class = None
|
self.stream_request_body = _has_stream_request_body(self.handler_class)
|
||||||
self.handler_kwargs = None
|
|
||||||
self.path_args = []
|
|
||||||
self.path_kwargs = {}
|
|
||||||
|
|
||||||
def headers_received(self, start_line, headers):
|
def headers_received(self, start_line, headers):
|
||||||
self.set_request(httputil.HTTPServerRequest(
|
|
||||||
connection=self.connection, start_line=start_line,
|
|
||||||
headers=headers))
|
|
||||||
if self.stream_request_body:
|
if self.stream_request_body:
|
||||||
self.request.body = Future()
|
self.request.body = Future()
|
||||||
return self.execute()
|
return self.execute()
|
||||||
|
|
||||||
def set_request(self, request):
|
|
||||||
self.request = request
|
|
||||||
self._find_handler()
|
|
||||||
self.stream_request_body = _has_stream_request_body(self.handler_class)
|
|
||||||
|
|
||||||
def _find_handler(self):
|
|
||||||
# Identify the handler to use as soon as we have the request.
|
|
||||||
# Save url path arguments for later.
|
|
||||||
app = self.application
|
|
||||||
handlers = app._get_host_handlers(self.request)
|
|
||||||
if not handlers:
|
|
||||||
self.handler_class = RedirectHandler
|
|
||||||
self.handler_kwargs = dict(url="%s://%s/"
|
|
||||||
% (self.request.protocol,
|
|
||||||
app.default_host))
|
|
||||||
return
|
|
||||||
for spec in handlers:
|
|
||||||
match = spec.regex.match(self.request.path)
|
|
||||||
if match:
|
|
||||||
self.handler_class = spec.handler_class
|
|
||||||
self.handler_kwargs = spec.kwargs
|
|
||||||
if spec.regex.groups:
|
|
||||||
# Pass matched groups to the handler. Since
|
|
||||||
# match.groups() includes both named and
|
|
||||||
# unnamed groups, we want to use either groups
|
|
||||||
# or groupdict but not both.
|
|
||||||
if spec.regex.groupindex:
|
|
||||||
self.path_kwargs = dict(
|
|
||||||
(str(k), _unquote_or_none(v))
|
|
||||||
for (k, v) in match.groupdict().items())
|
|
||||||
else:
|
|
||||||
self.path_args = [_unquote_or_none(s)
|
|
||||||
for s in match.groups()]
|
|
||||||
return
|
|
||||||
if app.settings.get('default_handler_class'):
|
|
||||||
self.handler_class = app.settings['default_handler_class']
|
|
||||||
self.handler_kwargs = app.settings.get(
|
|
||||||
'default_handler_args', {})
|
|
||||||
else:
|
|
||||||
self.handler_class = ErrorHandler
|
|
||||||
self.handler_kwargs = dict(status_code=404)
|
|
||||||
|
|
||||||
def data_received(self, data):
|
def data_received(self, data):
|
||||||
if self.stream_request_body:
|
if self.stream_request_body:
|
||||||
return self.handler.data_received(data)
|
return self.handler.data_received(data)
|
||||||
|
@ -2188,13 +2202,29 @@ class RedirectHandler(RequestHandler):
|
||||||
application = web.Application([
|
application = web.Application([
|
||||||
(r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
|
(r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
|
||||||
])
|
])
|
||||||
|
|
||||||
|
`RedirectHandler` supports regular expression substitutions. E.g., to
|
||||||
|
swap the first and second parts of a path while preserving the remainder::
|
||||||
|
|
||||||
|
application = web.Application([
|
||||||
|
(r"/(.*?)/(.*?)/(.*)", web.RedirectHandler, {"url": "/{1}/{0}/{2}"}),
|
||||||
|
])
|
||||||
|
|
||||||
|
The final URL is formatted with `str.format` and the substrings that match
|
||||||
|
the capturing groups. In the above example, a request to "/a/b/c" would be
|
||||||
|
formatted like::
|
||||||
|
|
||||||
|
str.format("/{1}/{0}/{2}", "a", "b", "c") # -> "/b/a/c"
|
||||||
|
|
||||||
|
Use Python's :ref:`format string syntax <formatstrings>` to customize how
|
||||||
|
values are substituted.
|
||||||
"""
|
"""
|
||||||
def initialize(self, url, permanent=True):
|
def initialize(self, url, permanent=True):
|
||||||
self._url = url
|
self._url = url
|
||||||
self._permanent = permanent
|
self._permanent = permanent
|
||||||
|
|
||||||
def get(self):
|
def get(self, *args):
|
||||||
self.redirect(self._url, permanent=self._permanent)
|
self.redirect(self._url.format(*args), permanent=self._permanent)
|
||||||
|
|
||||||
|
|
||||||
class StaticFileHandler(RequestHandler):
|
class StaticFileHandler(RequestHandler):
|
||||||
|
@ -2990,99 +3020,6 @@ class _UIModuleNamespace(object):
|
||||||
raise AttributeError(str(e))
|
raise AttributeError(str(e))
|
||||||
|
|
||||||
|
|
||||||
class URLSpec(object):
|
|
||||||
"""Specifies mappings between URLs and handlers."""
|
|
||||||
def __init__(self, pattern, handler, kwargs=None, name=None):
|
|
||||||
"""Parameters:
|
|
||||||
|
|
||||||
* ``pattern``: Regular expression to be matched. Any capturing
|
|
||||||
groups in the regex will be passed in to the handler's
|
|
||||||
get/post/etc methods as arguments (by keyword if named, by
|
|
||||||
position if unnamed. Named and unnamed capturing groups may
|
|
||||||
may not be mixed in the same rule).
|
|
||||||
|
|
||||||
* ``handler``: `RequestHandler` subclass to be invoked.
|
|
||||||
|
|
||||||
* ``kwargs`` (optional): A dictionary of additional arguments
|
|
||||||
to be passed to the handler's constructor.
|
|
||||||
|
|
||||||
* ``name`` (optional): A name for this handler. Used by
|
|
||||||
`Application.reverse_url`.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if not pattern.endswith('$'):
|
|
||||||
pattern += '$'
|
|
||||||
self.regex = re.compile(pattern)
|
|
||||||
assert len(self.regex.groupindex) in (0, self.regex.groups), \
|
|
||||||
("groups in url regexes must either be all named or all "
|
|
||||||
"positional: %r" % self.regex.pattern)
|
|
||||||
|
|
||||||
if isinstance(handler, str):
|
|
||||||
# import the Module and instantiate the class
|
|
||||||
# Must be a fully qualified name (module.ClassName)
|
|
||||||
handler = import_object(handler)
|
|
||||||
|
|
||||||
self.handler_class = handler
|
|
||||||
self.kwargs = kwargs or {}
|
|
||||||
self.name = name
|
|
||||||
self._path, self._group_count = self._find_groups()
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '%s(%r, %s, kwargs=%r, name=%r)' % \
|
|
||||||
(self.__class__.__name__, self.regex.pattern,
|
|
||||||
self.handler_class, self.kwargs, self.name)
|
|
||||||
|
|
||||||
def _find_groups(self):
|
|
||||||
"""Returns a tuple (reverse string, group count) for a url.
|
|
||||||
|
|
||||||
For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method
|
|
||||||
would return ('/%s/%s/', 2).
|
|
||||||
"""
|
|
||||||
pattern = self.regex.pattern
|
|
||||||
if pattern.startswith('^'):
|
|
||||||
pattern = pattern[1:]
|
|
||||||
if pattern.endswith('$'):
|
|
||||||
pattern = pattern[:-1]
|
|
||||||
|
|
||||||
if self.regex.groups != pattern.count('('):
|
|
||||||
# The pattern is too complicated for our simplistic matching,
|
|
||||||
# so we can't support reversing it.
|
|
||||||
return (None, None)
|
|
||||||
|
|
||||||
pieces = []
|
|
||||||
for fragment in pattern.split('('):
|
|
||||||
if ')' in fragment:
|
|
||||||
paren_loc = fragment.index(')')
|
|
||||||
if paren_loc >= 0:
|
|
||||||
pieces.append('%s' + fragment[paren_loc + 1:])
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
unescaped_fragment = re_unescape(fragment)
|
|
||||||
except ValueError as exc:
|
|
||||||
# If we can't unescape part of it, we can't
|
|
||||||
# reverse this url.
|
|
||||||
return (None, None)
|
|
||||||
pieces.append(unescaped_fragment)
|
|
||||||
|
|
||||||
return (''.join(pieces), self.regex.groups)
|
|
||||||
|
|
||||||
def reverse(self, *args):
|
|
||||||
if self._path is None:
|
|
||||||
raise ValueError("Cannot reverse url regex " + self.regex.pattern)
|
|
||||||
assert len(args) == self._group_count, "required number of arguments "\
|
|
||||||
"not found"
|
|
||||||
if not len(args):
|
|
||||||
return self._path
|
|
||||||
converted_args = []
|
|
||||||
for a in args:
|
|
||||||
if not isinstance(a, (unicode_type, bytes)):
|
|
||||||
a = str(a)
|
|
||||||
converted_args.append(escape.url_escape(utf8(a), plus=False))
|
|
||||||
return self._path % tuple(converted_args)
|
|
||||||
|
|
||||||
url = URLSpec
|
|
||||||
|
|
||||||
|
|
||||||
if hasattr(hmac, 'compare_digest'): # python 3.3
|
if hasattr(hmac, 'compare_digest'): # python 3.3
|
||||||
_time_independent_equals = hmac.compare_digest
|
_time_independent_equals = hmac.compare_digest
|
||||||
else:
|
else:
|
||||||
|
@ -3303,15 +3240,3 @@ def _create_signature_v2(secret, s):
|
||||||
hash = hmac.new(utf8(secret), digestmod=hashlib.sha256)
|
hash = hmac.new(utf8(secret), digestmod=hashlib.sha256)
|
||||||
hash.update(utf8(s))
|
hash.update(utf8(s))
|
||||||
return utf8(hash.hexdigest())
|
return utf8(hash.hexdigest())
|
||||||
|
|
||||||
|
|
||||||
def _unquote_or_none(s):
|
|
||||||
"""None-safe wrapper around url_unescape to handle unmatched optional
|
|
||||||
groups correctly.
|
|
||||||
|
|
||||||
Note that args are passed as bytes so the handler can decide what
|
|
||||||
encoding to use.
|
|
||||||
"""
|
|
||||||
if s is None:
|
|
||||||
return s
|
|
||||||
return escape.url_unescape(s, encoding=None, plus=False)
|
|
||||||
|
|
Loading…
Reference in a new issue