http.fetch(self._oauth_request_token_url(**args),
functools.partial(self._on_access_token, redirect_uri, client_id,
- client_secret, callback, fields))
+ client_secret, callback, fields))
def _on_access_token(self, redirect_uri, client_id, client_secret,
future, fields, response):
_GC_CYCLE_FINALIZERS = (platform.python_implementation() == 'CPython' and
sys.version_info >= (3, 4))
+
class ReturnValueIgnoredError(Exception):
pass
# This class and associated code in the future object is derived
# from the Trollius project, a backport of asyncio to Python 2.x - 3.x
+
class _TracebackLogger(object):
"""Helper to log a traceback upon destruction if not cleared.
curl_log = logging.getLogger('tornado.curl_httpclient')
+
class CurlAsyncHTTPClient(AsyncHTTPClient):
def initialize(self, io_loop, max_clients=10, defaults=None):
super(CurlAsyncHTTPClient, self).initialize(io_loop, defaults=defaults)
curl.setopt(pycurl.HEADERFUNCTION,
functools.partial(self._curl_header_callback,
- headers, request.header_callback))
+ headers, request.header_callback))
if request.streaming_callback:
write_function = lambda chunk: self.io_loop.add_callback(
request.streaming_callback, chunk)
curl.setopt(pycurl.USERPWD, native_str(userpwd))
curl_log.debug("%s %s (username: %r)", request.method, request.url,
- request.auth_username)
+ request.auth_username)
else:
curl.unsetopt(pycurl.USERPWD)
curl_log.debug("%s %s", request.method, request.url)
super(Return, self).__init__()
self.value = value
+
class WaitIterator(object):
"""Provides an iterator to yield the results of futures as they finish.
# Lists containing YieldPoints require stack contexts;
# other lists are handled via multi_future in convert_yielded.
if (isinstance(yielded, list) and
- any(isinstance(f, YieldPoint) for f in yielded)):
+ any(isinstance(f, YieldPoint) for f in yielded)):
yielded = Multi(yielded)
elif (isinstance(yielded, dict) and
- any(isinstance(f, YieldPoint) for f in yielded.values())):
+ any(isinstance(f, YieldPoint) for f in yielded.values())):
yielded = Multi(yielded)
if isinstance(yielded, YieldPoint):
def __init__(self):
pass
+
class _ExceptionLoggingContext(object):
"""Used with the ``with`` statement when calling delegate methods to
log any exceptions with the given logger. Any exceptions caught are
self.logger.error("Uncaught exception", exc_info=(typ, value, tb))
raise _QuietException
+
class HTTP1ConnectionParameters(object):
"""Parameters for `.HTTP1Connection` and `.HTTP1ServerConnection`.
"""
# 1xx responses should never indicate the presence of
# a body.
if ('Content-Length' in headers or
- 'Transfer-Encoding' in headers):
+ 'Transfer-Encoding' in headers):
raise httputil.HTTPInputError(
"Response code %d cannot have body" % code)
# TODO: client delegates will get headers_received twice
import doctest
return doctest.DocTestSuite()
+
def split_host_and_port(netloc):
"""Returns ``(host, port)`` tuple from ``netloc``.
_ERRNO_INPROGRESS += (errno.WSAEINPROGRESS,)
#######################################################
+
+
class StreamClosedError(IOError):
"""Exception raised by `IOStream` methods when the stream is closed.
# quiet as well.
# https://groups.google.com/forum/?fromgroups#!topic/python-tornado/ApucKJat1_0
if (err.args[0] in _ERRNO_CONNRESET or
- err.args[0] == errno.EBADF):
+ err.args[0] == errno.EBADF):
return self.close(exc_info=True)
raise
except AttributeError:
# Default backlog used when calling sock.listen()
_DEFAULT_BACKLOG = 128
+
def bind_sockets(port, address=None, family=socket.AF_UNSPEC,
backlog=_DEFAULT_BACKLOG, flags=None):
"""Creates listening sockets bound to the given port and address.
# Re-raise the original asyncio error, not the trollius one.
raise e
+
class BaseAsyncIOLoop(IOLoop):
def initialize(self, asyncio_loop, close_loop=False):
self.asyncio_loop = asyncio_loop
super(AsyncIOLoop, self).initialize(asyncio.new_event_loop(),
close_loop=True)
+
def to_tornado_future(asyncio_future):
"""Convert an ``asyncio.Future`` to a `tornado.concurrent.Future`."""
tf = tornado.concurrent.Future()
tornado.concurrent.chain_future(asyncio_future, tf)
return tf
+
def to_asyncio_future(tornado_future):
"""Convert a `tornado.concurrent.Future` to an ``asyncio.Future``."""
af = asyncio.Future()
# Closed connections are reported as errors by epoll and kqueue,
# but as zero-byte reads by select, so when errors are requested
# we need to listen for both read and error.
- #self.read_fds.add(fd)
+ # self.read_fds.add(fd)
def modify(self, fd, events):
self.unregister(fd)
body_present = (self.request.body is not None or
self.request.body_producer is not None)
if ((body_expected and not body_present) or
- (body_present and not body_expected)):
+ (body_present and not body_expected)):
raise ValueError(
'Body must %sbe None for method %s (unelss '
'allow_nonstandard_methods is true)' %
skipIfNoSingleDispatch = unittest.skipIf(
gen.singledispatch is None, "singledispatch module not present")
+
@unittest.skipIf(asyncio is None, "asyncio module not present")
class AsyncIOLoopTest(AsyncTestCase):
def get_new_ioloop(self):
self.assertRaises(UnicodeDecodeError, json_encode, b"\xe9")
def test_squeeze(self):
- self.assertEqual(squeeze(u('sequences of whitespace chars'))
- , u('sequences of whitespace chars'))
-
+ self.assertEqual(squeeze(u('sequences of whitespace chars')), u('sequences of whitespace chars'))
+
def test_recursive_unicode(self):
tests = {
'dict': {b"foo": b"bar"},
yield gen.with_timeout(datetime.timedelta(seconds=3600),
executor.submit(lambda: None))
+
class WaitIteratorTest(AsyncTestCase):
@gen_test
def test_empty_iterator(self):
while not dg.done():
dr = yield dg.next()
if dg.current_index == "f1":
- self.assertTrue(dg.current_future==f1 and dr==24,
+ self.assertTrue(dg.current_future == f1 and dr == 24,
"WaitIterator dict status incorrect")
elif dg.current_index == "f2":
- self.assertTrue(dg.current_future==f2 and dr==42,
+ self.assertTrue(dg.current_future == f2 and dr == 42,
"WaitIterator dict status incorrect")
else:
self.fail("got bad WaitIterator index {}".format(
futures[3].set_result(84)
if iteration < 8:
- self.io_loop.add_callback(self.finish_coroutines, iteration+1, futures)
+ self.io_loop.add_callback(self.finish_coroutines, iteration + 1, futures)
@gen_test
def test_iterator(self):
# Twisted's reactor does not. The removeReader call fails and so
# do all future removeAll calls (which our tests do at cleanup).
#
- #def test_post_307(self):
+ # def test_post_307(self):
# response = self.fetch("/redirect?status=307&url=/post",
# method="POST", body=b"arg1=foo&arg2=bar")
# self.assertEqual(response.body, b"Post arg1: foo, arg2: bar")
def test_if_modified_since(self):
http_date = datetime.datetime.utcnow()
request = HTTPRequest('http://example.com', if_modified_since=http_date)
- self.assertEqual(request.headers,
- {'If-Modified-Since': format_timestamp(http_date)})
+ self.assertEqual(request.headers,
+ {'If-Modified-Since': format_timestamp(http_date)})
# and cpython's unicodeobject.c (which defines the implementation
# of unicode_type.splitlines(), and uses a different list than TR13).
newlines = [
- u('\u001b'), # VERTICAL TAB
- u('\u001c'), # FILE SEPARATOR
- u('\u001d'), # GROUP SEPARATOR
- u('\u001e'), # RECORD SEPARATOR
- u('\u0085'), # NEXT LINE
- u('\u2028'), # LINE SEPARATOR
- u('\u2029'), # PARAGRAPH SEPARATOR
- ]
+ u('\u001b'), # VERTICAL TAB
+ u('\u001c'), # FILE SEPARATOR
+ u('\u001d'), # GROUP SEPARATOR
+ u('\u001e'), # RECORD SEPARATOR
+ u('\u0085'), # NEXT LINE
+ u('\u2028'), # LINE SEPARATOR
+ u('\u2029'), # PARAGRAPH SEPARATOR
+ ]
for newline in newlines:
# Try the utf8 and latin1 representations of each newline
for encoding in ['utf8', 'latin1']:
[('Cr', 'cr\rMore: more'),
('Crlf', 'crlf'),
('Lf', 'lf'),
- ])
+ ])
class FormatTimestampTest(unittest.TestCase):
# Use a NullContext to keep the exception from being caught by
# AsyncTestCase.
with NullContext():
- self.io_loop.add_callback(lambda: 1/0)
+ self.io_loop.add_callback(lambda: 1 / 0)
self.io_loop.add_callback(self.stop)
with ExpectLog(app_log, "Exception in callback"):
self.wait()
@gen.coroutine
def callback():
self.io_loop.add_callback(self.stop)
- 1/0
+ 1 / 0
self.io_loop.add_callback(callback)
with ExpectLog(app_log, "Exception in callback"):
self.wait()
def test_spawn_callback(self):
# An added callback runs in the test's stack_context, so will be
# re-arised in wait().
- self.io_loop.add_callback(lambda: 1/0)
+ self.io_loop.add_callback(lambda: 1 / 0)
with self.assertRaises(ZeroDivisionError):
self.wait()
# A spawned callback is run directly on the IOLoop, so it will be
# logged without stopping the test.
- self.io_loop.spawn_callback(lambda: 1/0)
+ self.io_loop.spawn_callback(lambda: 1 / 0)
self.io_loop.add_callback(self.stop)
with ExpectLog(app_log, "Exception in callback"):
self.wait()
with self.assertRaises((ssl.SSLError, socket.error)):
yield server_future
-
@unittest.skipIf(not hasattr(ssl, 'create_default_context'),
'ssl.create_default_context not present')
@gen_test
self.assertEqual(locale.format_date(date, full_format=True),
'April 28, 2013 at 6:35 pm')
- self.assertEqual(locale.format_date(datetime.datetime.utcnow() - datetime.timedelta(seconds=2), full_format=False),
+ self.assertEqual(locale.format_date(datetime.datetime.utcnow() - datetime.timedelta(seconds=2), full_format=False),
'2 seconds ago')
self.assertEqual(locale.format_date(datetime.datetime.utcnow() - datetime.timedelta(minutes=2), full_format=False),
'2 minutes ago')
yield self.resolver.resolve('an invalid domain', 80,
socket.AF_UNSPEC)
+
def _failing_getaddrinfo(*args):
"""Dummy implementation of getaddrinfo for use in mocks"""
raise socket.gaierror("mock: lookup failed")
+
@skipIfNoNetwork
class BlockingResolverTest(AsyncTestCase, _ResolverTestMixin):
def setUp(self):
skipIfNoSingleDispatch = unittest.skipIf(
gen.singledispatch is None, "singledispatch module not present")
+
def save_signal_handlers():
saved = {}
for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGCHLD]:
skipIfNoIPv6 = unittest.skipIf(not socket.has_ipv6, 'ipv6 support not present')
+
def refusing_port():
"""Returns a local port number that will refuse all connections.
headers = response.headers.get_list("Set-Cookie")
self.assertEqual(sorted(headers),
["foo=bar; Max-Age=10; Path=/"])
-
+
def test_set_cookie_expires_days(self):
response = self.fetch("/set_expires_days")
header = response.headers.get("Set-Cookie")
token2 = self.get_token()
# Each token can be used to authenticate its own request.
for token in (self.xsrf_token, token2):
- response = self.fetch(
+ response = self.fetch(
"/", method="POST",
body=urllib_parse.urlencode(dict(_xsrf=token)),
headers=self.cookie_headers(token))
class ErrorInOnMessageHandler(TestWebSocketHandler):
def on_message(self, message):
- 1/0
+ 1 / 0
class HeaderHandler(TestWebSocketHandler):
ws.close()
yield self.close_future
+
class WebSocketTest(WebSocketBaseTestCase):
def get_app(self):
self.close_future = Future()
headers = {'Origin': 'http://127.0.0.1:%d' % port}
ws = yield websocket_connect(HTTPRequest(url, headers=headers),
- io_loop=self.io_loop)
+ io_loop=self.io_loop)
ws.write_message('hello')
response = yield ws.read_message()
self.assertEqual(response, 'hello')
headers = {'Origin': 'http://127.0.0.1:%d/something' % port}
ws = yield websocket_connect(HTTPRequest(url, headers=headers),
- io_loop=self.io_loop)
+ io_loop=self.io_loop)
ws.write_message('hello')
response = yield ws.read_message()
self.assertEqual(response, 'hello')
return unmasked.tostring()
if (os.environ.get('TORNADO_NO_EXTENSION') or
- os.environ.get('TORNADO_EXTENSION') == '0'):
+ os.environ.get('TORNADO_EXTENSION') == '0'):
# These environment variables exist to make it easier to do performance
# comparisons; they are not guaranteed to remain supported in the future.
_websocket_mask = _websocket_mask_python
from tornado.util import _websocket_mask
try:
- from urllib.parse import urlparse # py2
+ from urllib.parse import urlparse # py2
except ImportError:
- from urlparse import urlparse # py3
+ from urlparse import urlparse # py3
try:
xrange # py2
else:
origin = self.request.headers.get("Sec-Websocket-Origin", None)
-
# If there was an origin header, check to make sure it matches
# according to check_origin. When the origin is None, we assume it
# did not come from a browser and that it can be passed on.
extensions = self._parse_extensions_header(self.request.headers)
for ext in extensions:
if (ext[0] == 'permessage-deflate' and
- self._compression_options is not None):
+ self._compression_options is not None):
# TODO: negotiate parameters if compression_options
# specifies limits.
self._create_compressors('server', ext[1])
if ('client_max_window_bits' in ext[1] and
- ext[1]['client_max_window_bits'] is None):
+ ext[1]['client_max_window_bits'] is None):
# Don't echo an offered client_max_window_bits
# parameter with no value.
del ext[1]['client_max_window_bits']
extensions = self._parse_extensions_header(headers)
for ext in extensions:
if (ext[0] == 'permessage-deflate' and
- self._compression_options is not None):
+ self._compression_options is not None):
self._create_compressors('client', ext[1])
else:
raise ValueError("unsupported extension %r", ext)