]> git.ipfire.org Git - thirdparty/tornado.git/commitdiff
Autopep8 formatting updates.
authorBen Darnell <ben@bendarnell.com>
Sun, 25 May 2014 03:35:27 +0000 (23:35 -0400)
committerBen Darnell <ben@bendarnell.com>
Sun, 25 May 2014 03:35:27 +0000 (23:35 -0400)
27 files changed:
tornado/auth.py
tornado/concurrent.py
tornado/gen.py
tornado/http1connection.py
tornado/httpclient.py
tornado/httpserver.py
tornado/httputil.py
tornado/iostream.py
tornado/log.py
tornado/netutil.py
tornado/platform/auto.py
tornado/simple_httpclient.py
tornado/tcpclient.py
tornado/tcpserver.py
tornado/template.py
tornado/test/curl_httpclient_test.py
tornado/test/gen_test.py
tornado/test/httpserver_test.py
tornado/test/iostream_test.py
tornado/test/runtests.py
tornado/test/tcpclient_test.py
tornado/test/template_test.py
tornado/test/web_test.py
tornado/testing.py
tornado/web.py
tornado/websocket.py
tornado/wsgi.py

index 6b1dd10e798a153695ed387fe6b16221856d6a71..6892f91aa7aaab204a99246069d11a83be9b3a02 100644 (file)
@@ -78,6 +78,7 @@ try:
 except NameError:
     long = int  # py3
 
+
 class AuthError(Exception):
     pass
 
index 2b4b1e9573dfcc70ddea6fbc329d322e083b38bf..63b0a8c138aff53468718b4500b55782bacfc443 100644 (file)
@@ -184,6 +184,7 @@ if futures is None:
 else:
     FUTURES = (futures.Future, Future)
 
+
 def is_future(x):
     return isinstance(x, FUTURES)
 
index e136a6a7b0863345c28e4a838f275dc566fb2590..4d1dc6e115eba0fd4cefbc5810fee9eeff8dd83c 100644 (file)
@@ -556,7 +556,7 @@ _null_future.set_result(None)
 
 moment = Future()
 moment.__doc__ = \
-"""A special object which may be yielded to allow the IOLoop to run for
+    """A special object which may be yielded to allow the IOLoop to run for
 one iteration.
 
 This is not needed in normal use but it can be helpful in long-running
@@ -568,6 +568,7 @@ Usage: ``yield gen.moment``
 """
 moment.set_result(None)
 
+
 class Runner(object):
     """Internal implementation of `tornado.gen.engine`.
 
@@ -732,7 +733,6 @@ class Runner(object):
                 "yielded unknown object %r" % (yielded,)))
         return True
 
-
     def result_callback(self, key):
         return stack_context.wrap(_argument_adapter(
             functools.partial(self.set_result, key)))
@@ -753,6 +753,7 @@ class Runner(object):
 
 Arguments = collections.namedtuple('Arguments', ['args', 'kwargs'])
 
+
 def _argument_adapter(callback):
     """Returns a function that when invoked runs ``callback`` with one arg.
 
index d71a3244a0b04cb52532743dbaf7d4104c211fdd..1490267a10c307e4d4e633ed71fb6df41e878158 100644 (file)
@@ -30,6 +30,7 @@ from tornado.log import gen_log, app_log
 from tornado import stack_context
 from tornado.util import GzipDecompressor
 
+
 class HTTP1ConnectionParameters(object):
     """Parameters for `.HTTP1Connection` and `.HTTP1ServerConnection`.
     """
@@ -54,6 +55,7 @@ class HTTP1ConnectionParameters(object):
         self.body_timeout = body_timeout
         self.use_gzip = use_gzip
 
+
 class HTTP1Connection(httputil.HTTPConnection):
     """Implements the HTTP/1.x protocol.
 
@@ -128,8 +130,8 @@ class HTTP1Connection(httputil.HTTPConnection):
         need_delegate_close = False
         try:
             header_future = self.stream.read_until_regex(
-                        b"\r?\n\r?\n",
-                        max_bytes=self.params.max_header_size)
+                b"\r?\n\r?\n",
+                max_bytes=self.params.max_header_size)
             if self.params.header_timeout is None:
                 header_data = yield header_future
             else:
@@ -163,7 +165,7 @@ class HTTP1Connection(httputil.HTTPConnection):
             skip_body = False
             if self.is_client:
                 if (self._request_start_line is not None and
-                    self._request_start_line.method == 'HEAD'):
+                        self._request_start_line.method == 'HEAD'):
                     skip_body = True
                 code = start_line.code
                 if code == 304:
@@ -174,7 +176,7 @@ class HTTP1Connection(httputil.HTTPConnection):
                     yield self._read_message(delegate)
             else:
                 if (headers.get("Expect") == "100-continue" and
-                    not self._write_finished):
+                        not self._write_finished):
                     self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")
             if not skip_body:
                 body_future = self._read_body(headers, delegate)
@@ -199,8 +201,8 @@ class HTTP1Connection(httputil.HTTPConnection):
             # response, and we're not detached, register a close callback
             # on the stream (we didn't need one while we were reading)
             if (not self._finish_future.done() and
-                self.stream is not None and
-                not self.stream.closed()):
+                    self.stream is not None and
+                    not self.stream.closed()):
                 self.stream.set_close_callback(self._on_connection_close)
                 yield self._finish_future
             if self.is_client and self._disconnect_on_finish:
@@ -383,8 +385,8 @@ class HTTP1Connection(httputil.HTTPConnection):
     def finish(self):
         """Implements `.HTTPConnection.finish`."""
         if (self._expected_content_remaining is not None and
-            self._expected_content_remaining != 0 and
-            not self.stream.closed()):
+                self._expected_content_remaining != 0 and
+                not self.stream.closed()):
             self.stream.close()
             raise httputil.HTTPOutputException(
                 "Tried to write %d bytes less than Content-Length" %
index dd2d9798bc89eddbf0cdd8ae179c1d02bcd7a76d..94a4593aaae8dc98c71fb3f5ada6d9e73b32f975 100644 (file)
@@ -500,7 +500,7 @@ class HTTPResponse(object):
             self.effective_url = effective_url
         if error is None:
             if self.code < 200 or self.code >= 300:
-                self.error = HTTPError(self.code, message=self.reason, 
+                self.error = HTTPError(self.code, message=self.reason,
                                        response=self)
             else:
                 self.error = None
index 597db20e46d97550999002a1c8511503f8e25373..469374e1b54a0a6468ab49a30f62952df3cf1840 100644 (file)
@@ -190,7 +190,7 @@ class _HTTPRequestContext(object):
             self.address_family = None
         # In HTTPServerRequest we want an IP, not a full socket address.
         if (self.address_family in (socket.AF_INET, socket.AF_INET6) and
-            address is not None):
+                address is not None):
             self.remote_ip = address[0]
         else:
             # Unix (or other) socket; fake the remote address.
@@ -204,7 +204,6 @@ class _HTTPRequestContext(object):
         self._orig_remote_ip = self.remote_ip
         self._orig_protocol = self.protocol
 
-
     def __str__(self):
         if self.address_family in (socket.AF_INET, socket.AF_INET6):
             return self.remote_ip
index ee2f130eaa2e53e19e2d4aca14c58659e3f170d1..077fdc2f920012ef54ca0ed01c8204494eb39a41 100644 (file)
@@ -58,7 +58,9 @@ try:
     from ssl import SSLError
 except ImportError:
     # ssl is unavailable on app engine.
-    class SSLError(Exception): pass
+    class SSLError(Exception):
+        pass
+
 
 class _NormalizedHeaderCache(dict):
     """Dynamic cached mapping of header names to Http-Header-Case.
index df362b78355b99c857ddff1770b1c0a54cdbc5ab..cad8a6f732966aebee58905013fdd1a0a50da0dd 100644 (file)
@@ -72,7 +72,6 @@ class StreamClosedError(IOError):
     pass
 
 
-
 class UnsatisfiableReadError(Exception):
     """Exception raised when a read cannot be satisfied.
 
@@ -107,7 +106,7 @@ class BaseIOStream(object):
         # A chunk size that is too close to max_buffer_size can cause
         # spurious failures.
         self.read_chunk_size = min(read_chunk_size or 65536,
-                                   self.max_buffer_size//2)
+                                   self.max_buffer_size // 2)
         self.error = None
         self._read_buffer = collections.deque()
         self._write_buffer = collections.deque()
@@ -366,7 +365,7 @@ class BaseIOStream(object):
                 self._connect_future = None
             for future in futures:
                 if (isinstance(self.error, (socket.error, IOError)) and
-                    errno_from_exception(self.error) in _ERRNO_CONNRESET):
+                        errno_from_exception(self.error) in _ERRNO_CONNRESET):
                     # Treat connection resets as closed connections so
                     # clients only have to catch one kind of exception
                     # to avoid logging.
@@ -542,7 +541,7 @@ class BaseIOStream(object):
                 #
                 # If we've reached target_bytes, we know we're done.
                 if (target_bytes is not None and
-                    self._read_buffer_size >= target_bytes):
+                        self._read_buffer_size >= target_bytes):
                     break
 
                 # Otherwise, we need to call the more expensive find_read_pos.
@@ -731,7 +730,7 @@ class BaseIOStream(object):
 
     def _check_max_bytes(self, delimiter, size):
         if (self._read_max_bytes is not None and
-            size > self._read_max_bytes):
+                size > self._read_max_bytes):
             raise UnsatisfiableReadError(
                 "delimiter %r not found within %d bytes" % (
                     delimiter, self._read_max_bytes))
@@ -999,10 +998,10 @@ class IOStream(BaseIOStream):
         .. versionadded:: 3.3
         """
         if (self._read_callback or self._read_future or
-            self._write_callback or self._write_future or
-            self._connect_callback or self._connect_future or
-            self._pending_callbacks or self._closed or
-            self._read_buffer or self._write_buffer):
+                self._write_callback or self._write_future or
+                self._connect_callback or self._connect_future or
+                self._pending_callbacks or self._closed or
+                self._read_buffer or self._write_buffer):
             raise ValueError("IOStream is not idle; cannot convert to SSL")
         if ssl_options is None:
             ssl_options = {}
@@ -1033,7 +1032,6 @@ class IOStream(BaseIOStream):
         ssl_stream.read_chunk_size = self.read_chunk_size
         return future
 
-
     def _handle_connect(self):
         err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
         if err != 0:
index 306220cd24fc12ec2bf5ca3f694e993ec9449cd0..7066466414a7fc8b3d4b8286c453ffdbc8005644 100644 (file)
@@ -83,10 +83,10 @@ class LogFormatter(logging.Formatter):
     DEFAULT_FORMAT = '%(color)s[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s'
     DEFAULT_DATE_FORMAT = '%y%m%d %H:%M:%S'
     DEFAULT_COLORS = {
-        logging.DEBUG:      4,  # Blue
-        logging.INFO:       2,  # Green
-        logging.WARNING:    3,  # Yellow
-        logging.ERROR:      1,  # Red
+        logging.DEBUG: 4,  # Blue
+        logging.INFO: 2,  # Green
+        logging.WARNING: 3,  # Yellow
+        logging.ERROR: 1,  # Red
     }
 
     def __init__(self, color=True, fmt=DEFAULT_FORMAT,
index 7e37d0eaa9f89f6ba7bb391e787ba9a9b248ef9f..a9e05d1e6158db3cecf44360d185f76eadc28eaf 100644 (file)
@@ -95,7 +95,7 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags
                                       0, flags)):
         af, socktype, proto, canonname, sockaddr = res
         if (platform.system() == 'Darwin' and address == 'localhost' and
-            af == socket.AF_INET6 and sockaddr[3] != 0):
+                af == socket.AF_INET6 and sockaddr[3] != 0):
             # Mac OS X includes a link-local address fe80::1%lo0 in the
             # getaddrinfo results for 'localhost'.  However, the firewall
             # doesn't understand that this is a local address and will
index 1dbb3f4169f68f1e5d481011d2fa6f265b457f71..ddfe06b4a5e2c33ea2b12f90ce3196c53bf4b09a 100644 (file)
@@ -32,7 +32,8 @@ if os.name == 'nt':
     from tornado.platform.windows import set_close_exec
 elif 'APPENGINE_RUNTIME' in os.environ:
     from tornado.platform.common import Waker
-    def set_close_exec(fd): pass
+    def set_close_exec(fd):
+        pass
 else:
     from tornado.platform.posix import set_close_exec, Waker
 
index ddbaa9c0041ac4a0441e5a67e15181268b4bd275..06d7ecfa9fb8ba6213976fce9fe90f498a681e82 100644 (file)
@@ -41,12 +41,14 @@ try:
 except ImportError:
     certifi = None
 
+
 def _default_ca_certs():
     if certifi is None:
         raise Exception("The 'certifi' package is required to use https "
                         "in simple_httpclient")
     return certifi.where()
 
+
 class SimpleAsyncHTTPClient(AsyncHTTPClient):
     """Non-blocking HTTP client with no external dependencies.
 
@@ -316,13 +318,13 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
         if not self.request.allow_nonstandard_methods:
             if self.request.method in ("POST", "PATCH", "PUT"):
                 if (self.request.body is None and
-                    self.request.body_producer is None):
+                        self.request.body_producer is None):
                     raise AssertionError(
                         'Body must not be empty for "%s" request'
                         % self.request.method)
             else:
                 if (self.request.body is not None or
-                    self.request.body_producer is not None):
+                        self.request.body_producer is not None):
                     raise AssertionError(
                         'Body must be empty for "%s" request'
                         % self.request.method)
@@ -339,7 +341,7 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
         if self.request.use_gzip:
             self.request.headers["Accept-Encoding"] = "gzip"
         req_path = ((self.parsed.path or '/') +
-                   (('?' + self.parsed.query) if self.parsed.query else ''))
+                    (('?' + self.parsed.query) if self.parsed.query else ''))
         self.stream.set_nodelay(True)
         self.connection = HTTP1Connection(
             self.stream, True,
@@ -374,7 +376,6 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
         if start_read:
             self._read_response()
 
-
     def _read_response(self):
         # Ensure that any exception raised in read_response ends up in our
         # stack context.
index 4354a2ba2cc302fab618021107b8b2514bc9db08..d49eb5cd60b38fe61fb8e46b2a06713fe7264c4e 100644 (file)
@@ -29,6 +29,7 @@ from tornado.netutil import Resolver
 
 _INITIAL_CONNECT_TIMEOUT = 0.3
 
+
 class _Connector(object):
     """A stateless implementation of the "Happy Eyeballs" algorithm.
 
index d5e28df6efbdfc28d914564472b7d99fc20b6fd2..427acec5758f2728279a1e64c30dca23f91262c6 100644 (file)
@@ -34,6 +34,7 @@ except ImportError:
     # ssl is not available on Google App Engine.
     ssl = None
 
+
 class TCPServer(object):
     r"""A non-blocking, single-threaded TCP server.
 
index 8bb0ac0aaecee43f4ce88c6bd0c047f32aea6f7f..4dcec5d5f617f92591f6ac503f3de14a1ee93052 100644 (file)
@@ -784,7 +784,7 @@ def _parse(reader, template, in_block=None, in_loop=None):
         if allowed_parents is not None:
             if not in_block:
                 raise ParseError("%s outside %s block" %
-                                (operator, allowed_parents))
+                                 (operator, allowed_parents))
             if in_block not in allowed_parents:
                 raise ParseError("%s block cannot be attached to %s block" % (operator, in_block))
             body.chunks.append(_IntermediateControlBlock(contents, line))
index 259041b06965ee86e62d4d32edb791ecf6a7aa23..3873cf1e3182f8911649755fa00bcff5ba8976eb 100644 (file)
@@ -67,14 +67,17 @@ class DigestAuthHandler(RequestHandler):
                             'Digest realm="%s", nonce="%s", opaque="%s"' %
                             (realm, nonce, opaque))
 
+
 class CustomReasonHandler(RequestHandler):
     def get(self):
         self.set_status(200, "Custom reason")
 
+
 class CustomFailReasonHandler(RequestHandler):
     def get(self):
         self.set_status(400, "Custom reason")
 
+
 @unittest.skipIf(pycurl is None, "pycurl module not present")
 class CurlHTTPClientTestCase(AsyncHTTPTestCase):
     def setUp(self):
index 254d5175ae0d479348ac562a08e7a40599cc622a..a15cdf73a152f970986937d233ecc06f42ecd0d1 100644 (file)
@@ -1031,7 +1031,7 @@ class WithTimeoutTest(AsyncTestCase):
         self.io_loop.add_timeout(datetime.timedelta(seconds=0.1),
                                  lambda: future.set_result('asdf'))
         result = yield gen.with_timeout(datetime.timedelta(seconds=3600),
-                                         future)
+                                        future)
         self.assertEqual(result, 'asdf')
 
     @gen_test
index 8387c23cf5bb7f4c256c77e096abb5013ddab9a9..4701569d5ff102b6a95bdfece14ae04bc805c62a 100644 (file)
@@ -735,6 +735,7 @@ class GzipBaseTest(object):
         response = self.fetch('/', method='POST', body='foo=bar')
         self.assertEquals(json_decode(response.body), {u('foo'): [u('bar')]})
 
+
 class GzipTest(GzipBaseTest, AsyncHTTPTestCase):
     def get_httpserver_options(self):
         return dict(gzip=True)
@@ -743,6 +744,7 @@ class GzipTest(GzipBaseTest, AsyncHTTPTestCase):
         response = self.post_gzip('foo=bar')
         self.assertEquals(json_decode(response.body), {u('foo'): [u('bar')]})
 
+
 class GzipUnsupportedTest(GzipBaseTest, AsyncHTTPTestCase):
     def test_gzip_unsupported(self):
         # Gzip support is opt-in; without it the server fails to parse
@@ -818,10 +820,10 @@ class StreamingChunkSizeTest(AsyncHTTPTestCase):
         self.assertEqual([16, 16, 16, 2], chunks)
 
     def test_compressed_body(self):
-         self.fetch_chunk_sizes(body=self.compress(self.BODY),
-                                headers={'Content-Encoding': 'gzip'})
-         # Compression creates irregular boundaries so the assertions
-         # in fetch_chunk_sizes are as specific as we can get.
+        self.fetch_chunk_sizes(body=self.compress(self.BODY),
+                               headers={'Content-Encoding': 'gzip'})
+        # Compression creates irregular boundaries so the assertions
+        # in fetch_chunk_sizes are as specific as we can get.
 
     def test_chunked_body(self):
         def body_producer(write):
@@ -943,41 +945,41 @@ class BodyLimitsTest(AsyncHTTPTestCase):
         return SimpleAsyncHTTPClient(io_loop=self.io_loop)
 
     def test_small_body(self):
-        response = self.fetch('/buffered', method='PUT', body=b'a'*4096)
+        response = self.fetch('/buffered', method='PUT', body=b'a' * 4096)
         self.assertEqual(response.body, b'4096')
-        response = self.fetch('/streaming', method='PUT', body=b'a'*4096)
+        response = self.fetch('/streaming', method='PUT', body=b'a' * 4096)
         self.assertEqual(response.body, b'4096')
 
     def test_large_body_buffered(self):
         with ExpectLog(gen_log, '.*Content-Length too long'):
-            response = self.fetch('/buffered', method='PUT', body=b'a'*10240)
+            response = self.fetch('/buffered', method='PUT', body=b'a' * 10240)
         self.assertEqual(response.code, 599)
 
     def test_large_body_buffered_chunked(self):
         with ExpectLog(gen_log, '.*chunked body too large'):
             response = self.fetch('/buffered', method='PUT',
-                                  body_producer=lambda write: write(b'a'*10240))
+                                  body_producer=lambda write: write(b'a' * 10240))
         self.assertEqual(response.code, 599)
 
     def test_large_body_streaming(self):
         with ExpectLog(gen_log, '.*Content-Length too long'):
-            response = self.fetch('/streaming', method='PUT', body=b'a'*10240)
+            response = self.fetch('/streaming', method='PUT', body=b'a' * 10240)
         self.assertEqual(response.code, 599)
 
     def test_large_body_streaming_chunked(self):
         with ExpectLog(gen_log, '.*chunked body too large'):
             response = self.fetch('/streaming', method='PUT',
-                                  body_producer=lambda write: write(b'a'*10240))
+                                  body_producer=lambda write: write(b'a' * 10240))
         self.assertEqual(response.code, 599)
 
     def test_large_body_streaming_override(self):
         response = self.fetch('/streaming?expected_size=10240', method='PUT',
-                              body=b'a'*10240)
+                              body=b'a' * 10240)
         self.assertEqual(response.body, b'10240')
 
     def test_large_body_streaming_chunked_override(self):
         response = self.fetch('/streaming?expected_size=10240', method='PUT',
-                              body_producer=lambda write: write(b'a'*10240))
+                              body_producer=lambda write: write(b'a' * 10240))
         self.assertEqual(response.body, b'10240')
 
     @gen_test
@@ -1004,7 +1006,7 @@ class BodyLimitsTest(AsyncHTTPTestCase):
             # Use a raw stream so we can make sure it's all on one connection.
             stream.write(b'PUT /streaming?expected_size=10240 HTTP/1.1\r\n'
                          b'Content-Length: 10240\r\n\r\n')
-            stream.write(b'a'*10240)
+            stream.write(b'a' * 10240)
             headers, response = yield gen.Task(read_stream_body, stream)
             self.assertEqual(response, b'10240')
             # Without the ?expected_size parameter, we get the old default value
index b3b3e82d6974304fbbb91f4384361dff44318c64..e9d241a5dcb4978b9b192a5a6b9d6cff671f22a0 100644 (file)
@@ -19,12 +19,14 @@ import socket
 import ssl
 import sys
 
+
 def _server_ssl_options():
     return dict(
         certfile=os.path.join(os.path.dirname(__file__), 'test.crt'),
         keyfile=os.path.join(os.path.dirname(__file__), 'test.key'),
     )
 
+
 class HelloHandler(RequestHandler):
     def get(self):
         self.write("Hello")
@@ -719,6 +721,7 @@ class TestIOStreamMixin(object):
             server.close()
             client.close()
 
+
 class TestIOStreamWebHTTP(TestIOStreamWebMixin, AsyncHTTPTestCase):
     def _make_client_iostream(self):
         return IOStream(socket.socket(), io_loop=self.io_loop)
index c1c5746b07680a7b477de93b636d4364b0c94ad1..a80b80b9268017af0d4c6f2963c8e55500528fa6 100644 (file)
@@ -66,6 +66,7 @@ class TornadoTextTestRunner(unittest.TextTestRunner):
             self.stream.write("\n")
         return result
 
+
 def main():
     # The -W command-line option does not work in a virtualenv with
     # python 3 (as of virtualenv 1.7), so configure warnings
index ae0e6a06c19bab031a53d33830d5754b8bd5b551..a9dfe5a5af9e01de393a123ad949a89e2ec4f769 100644 (file)
@@ -31,6 +31,7 @@ from tornado.test.util import skipIfNoIPv6, unittest
 # and AF_INET6 because some installations do not have AF_INET6.
 AF1, AF2 = 1, 2
 
+
 class TestTCPServer(TCPServer):
     def __init__(self, family):
         super(TestTCPServer, self).__init__()
@@ -47,6 +48,7 @@ class TestTCPServer(TCPServer):
         for stream in self.streams:
             stream.close()
 
+
 class TCPClientTest(AsyncTestCase):
     def setUp(self):
         super(TCPClientTest, self).setUp()
@@ -76,7 +78,6 @@ class TCPClientTest(AsyncTestCase):
         if socket.AF_INET6 not in families:
             self.skipTest("localhost does not resolve to ipv6")
 
-
     @gen_test
     def do_test_connect(self, family, host):
         port = self.start_server(family)
index e9291d16d0f72dd5ff1f0cf1afc53a5441ec03b5..6d8b624eb9405f25a00207164ff835190d73f926 100644 (file)
@@ -397,7 +397,7 @@ raw: {% raw name %}""",
   #}{{i
   }}{% end
 %}""",
-                         })
+                             })
         self.assertEqual(loader.load("foo.txt").generate(items=range(5)),
                          b"0, 1, 2, 3, 4")
 
index d7762a730aac55e53551a2a77f83ac14982a0ce0..f3d592b0ce70b6c92860ac24bc3d5fc87b596b0a 100644 (file)
@@ -102,14 +102,14 @@ class SecureCookieV1Test(unittest.TestCase):
         sig = match.group(2)
         self.assertEqual(
             _create_signature_v1(handler.application.settings["cookie_secret"],
-                              'foo', '12345678', timestamp),
+                                 'foo', '12345678', timestamp),
             sig)
         # shifting digits from payload to timestamp doesn't alter signature
         # (this is not desirable behavior, just confirming that that's how it
         # works)
         self.assertEqual(
             _create_signature_v1(handler.application.settings["cookie_secret"],
-                              'foo', '1234', b'5678' + timestamp),
+                                 'foo', '1234', b'5678' + timestamp),
             sig)
         # tamper with the cookie
         handler._cookies['foo'] = utf8('1234|5678%s|%s' % (
index 4bf37b89d7a95872d20db27caee0246e7e90fa46..dc30e94ffa1eb91dc25aff214534f1bc8ed3443d 100644 (file)
@@ -131,6 +131,7 @@ class _TestMethodWrapper(object):
         """
         return getattr(self.orig_method, name)
 
+
 class AsyncTestCase(unittest.TestCase):
     """`~unittest.TestCase` subclass for testing `.IOLoop`-based
     asynchronous code.
index 011db9e6d490f61b038c763dfd401f10dce6d41e..cc05fabfc0bfea5a3547233f0fa00223fdcb5e49 100644 (file)
@@ -1302,7 +1302,7 @@ class RequestHandler(object):
         except Exception as e:
             self._handle_request_exception(e)
             if (self._prepared_future is not None and
-                not self._prepared_future.done()):
+                    not self._prepared_future.done()):
                 # In case we failed before setting _prepared_future, do it
                 # now (to unblock the HTTP server).  Note that this is not
                 # in a finally block to avoid GC issues prior to Python 3.4.
@@ -1859,7 +1859,7 @@ class _RequestDispatcher(httputil.HTTPMessageDelegate):
             StaticFileHandler.reset()
 
         self.handler = self.handler_class(self.application, self.request,
-                                     **self.handler_kwargs)
+                                          **self.handler_kwargs)
         transforms = [t(self.request) for t in self.application.transforms]
 
         if self.stream_request_body:
@@ -1876,7 +1876,6 @@ class _RequestDispatcher(httputil.HTTPMessageDelegate):
         return self.handler._prepared_future
 
 
-
 class HTTPError(Exception):
     """An exception that will turn into an HTTP error response.
 
@@ -2813,7 +2812,8 @@ def create_signed_value(secret, name, value, version=None, clock=None):
 # A leading version number in decimal with no leading zeros, followed by a pipe.
 _signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$")
 
-def decode_signed_value(secret, name, value, max_age_days=31, clock=None,min_version=None):
+
+def decode_signed_value(secret, name, value, max_age_days=31, clock=None, min_version=None):
     if clock is None:
         clock = time.time
     if min_version is None:
@@ -2853,6 +2853,7 @@ def decode_signed_value(secret, name, value, max_age_days=31, clock=None,min_ver
     else:
         return None
 
+
 def _decode_signed_value_v1(secret, name, value, max_age_days, clock):
     parts = utf8(value).split(b"|")
     if len(parts) != 3:
@@ -2889,9 +2890,9 @@ def _decode_signed_value_v2(secret, name, value, max_age_days, clock):
         field_value = rest[:n]
         # In python 3, indexing bytes returns small integers; we must
         # use a slice to get a byte string as in python 2.
-        if rest[n:n+1] != b'|':
+        if rest[n:n + 1] != b'|':
             raise ValueError("malformed v2 signed value field")
-        rest = rest[n+1:]
+        rest = rest[n + 1:]
         return field_value, rest
     rest = value[2:]  # remove version number
     try:
@@ -2924,11 +2925,13 @@ def _create_signature_v1(secret, *parts):
         hash.update(utf8(part))
     return utf8(hash.hexdigest())
 
+
 def _create_signature_v2(secret, s):
     hash = hmac.new(utf8(secret), digestmod=hashlib.sha256)
     hash.update(utf8(s))
     return utf8(hash.hexdigest())
 
+
 def _unquote_or_none(s):
     """None-safe wrapper around url_unescape to handle unamteched optional
     groups correctly.
index 3db215dba3f2db181670c27a88c51228eff094d4..bfb90d180d24dfed9428949a4614543dfa16efc0 100644 (file)
@@ -960,7 +960,7 @@ def _websocket_mask_python(mask, data):
         return unmasked.tostring()
 
 if (os.environ.get('TORNADO_NO_EXTENSION') or
-    os.environ.get('TORNADO_EXTENSION') == '0'):
+        os.environ.get('TORNADO_EXTENSION') == '0'):
     # These environment variables exist to make it easier to do performance
     # comparisons; they are not guaranteed to remain supported in the future.
     _websocket_mask = _websocket_mask_python
index a42b5b0bae52c3236afc96005400ad9b9a325dea..0052d674cfbefe63fdd28d8dcae3efb3ed1c4bda 100644 (file)
@@ -127,7 +127,7 @@ class _WSGIConnection(httputil.HTTPConnection):
 
     def finish(self):
         if (self._expected_content_remaining is not None and
-            self._expected_content_remaining != 0):
+                self._expected_content_remaining != 0):
             self._error = httputil.HTTPOutputException(
                 "Tried to write %d bytes less than Content-Length" %
                 self._expected_content_remaining)