]> git.ipfire.org Git - thirdparty/tornado.git/commitdiff
*: Run the new autopep8
authorBen Darnell <ben@bendarnell.com>
Mon, 10 Apr 2017 03:07:00 +0000 (23:07 -0400)
committerBen Darnell <ben@bendarnell.com>
Mon, 10 Apr 2017 03:07:00 +0000 (23:07 -0400)
Mainly adds a bunch of blank lines.

27 files changed:
tornado/autoreload.py
tornado/concurrent.py
tornado/curl_httpclient.py
tornado/escape.py
tornado/gen.py
tornado/httpclient.py
tornado/httpserver.py
tornado/httputil.py
tornado/iostream.py
tornado/netutil.py
tornado/platform/asyncio.py
tornado/platform/common.py
tornado/platform/interface.py
tornado/platform/twisted.py
tornado/stack_context.py
tornado/test/concurrent_test.py
tornado/test/gen_test.py
tornado/test/runtests.py
tornado/test/stack_context_test.py
tornado/test/tcpserver_test.py
tornado/test/web_test.py
tornado/test/windows_test.py
tornado/test/wsgi_test.py
tornado/testing.py
tornado/util.py
tornado/web.py
tornado/websocket.py

index 5b50ce9332ea575de3bd943d138d0b961717d47e..60571efe71fe83504ab1b84bbf16f5315b155071 100644 (file)
@@ -243,6 +243,7 @@ def _reload():
             # unwind, so just exit uncleanly.
             os._exit(0)
 
+
 _USAGE = """\
 Usage:
   python -m tornado.autoreload -m module.to.run [args...]
index 312fdf4ff4852d6b524c3371a970ef1963df8b5d..667e6b1788ecfe426e1f02b084d01840fd1929a3 100644 (file)
@@ -343,6 +343,7 @@ class Future(object):
             app_log.error('Future %r exception was never retrieved: %s',
                           self, ''.join(tb).rstrip())
 
+
 TracebackFuture = Future
 
 if futures is None:
@@ -367,6 +368,7 @@ class DummyExecutor(object):
     def shutdown(self, wait=True):
         pass
 
+
 dummy_executor = DummyExecutor()
 
 
index ab54bc0b14e55e815c6160ced271b14ad7a63616..eef4a17a6b4a7e6e30c2db0ab5b1f44c2791ebea 100644 (file)
@@ -278,9 +278,9 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
         if curl_log.isEnabledFor(logging.DEBUG):
             curl.setopt(pycurl.VERBOSE, 1)
             curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug)
-        if hasattr(pycurl,'PROTOCOLS'): # PROTOCOLS first appeared in pycurl 7.19.5 (2014-07-12)
-            curl.setopt(pycurl.PROTOCOLS, pycurl.PROTO_HTTP|pycurl.PROTO_HTTPS)
-            curl.setopt(pycurl.REDIR_PROTOCOLS, pycurl.PROTO_HTTP|pycurl.PROTO_HTTPS)
+        if hasattr(pycurl, 'PROTOCOLS'):  # PROTOCOLS first appeared in pycurl 7.19.5 (2014-07-12)
+            curl.setopt(pycurl.PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS)
+            curl.setopt(pycurl.REDIR_PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS)
         return curl
 
     def _curl_setup_request(self, curl, request, buffer, headers):
index c4b2fa3b480ec864adfa61e73c0a2dc71157ee92..2ca3fe3fe883249d6cacc457d5d0565b04804731 100644 (file)
@@ -199,6 +199,7 @@ def utf8(value):
         )
     return value.encode("utf-8")
 
+
 _TO_UNICODE_TYPES = (unicode_type, type(None))
 
 
@@ -216,6 +217,7 @@ def to_unicode(value):
         )
     return value.decode("utf-8")
 
+
 # to_unicode was previously named _unicode not because it was private,
 # but to avoid conflicts with the built-in unicode() function/type
 _unicode = to_unicode
@@ -264,6 +266,7 @@ def recursive_unicode(obj):
     else:
         return obj
 
+
 # I originally used the regex from
 # http://daringfireball.net/2010/07/improved_regex_for_matching_urls
 # but it gets all exponential on certain patterns (such as too many trailing
@@ -391,4 +394,5 @@ def _build_unicode_map():
         unicode_map[name] = unichr(value)
     return unicode_map
 
+
 _HTML_UNICODE_MAP = _build_unicode_map()
index 9ac1d6e35efd0e8233557c13e957d6266eed18cf..0ce4dc9f2bf62337759ffc294b0464ed7d34a4a9 100644 (file)
@@ -245,6 +245,7 @@ def coroutine(func, replace_callback=True):
     """
     return _make_coroutine_wrapper(func, replace_callback=True)
 
+
 # Ties lifetime of runners to their result futures. Github Issue #1769
 # Generators, like any object in Python, must be strong referenced
 # in order to not be cleaned up by the garbage collector. When using
@@ -264,6 +265,7 @@ def coroutine(func, replace_callback=True):
 # Runner alive.
 _futures_to_runners = weakref.WeakKeyDictionary()
 
+
 def _make_coroutine_wrapper(func, replace_callback):
     """The inner workings of ``@gen.coroutine`` and ``@gen.engine``.
 
@@ -718,6 +720,7 @@ def multi(children, quiet_exceptions=()):
     else:
         return multi_future(children, quiet_exceptions=quiet_exceptions)
 
+
 Multi = multi
 
 
@@ -1169,6 +1172,7 @@ class Runner(object):
             self.stack_context_deactivate()
             self.stack_context_deactivate = None
 
+
 Arguments = collections.namedtuple('Arguments', ['args', 'kwargs'])
 
 
@@ -1188,6 +1192,7 @@ def _argument_adapter(callback):
             callback(None)
     return wrapper
 
+
 # Convert Awaitables into Futures. It is unfortunately possible
 # to have infinite recursion here if those Awaitables assume that
 # we're using a different coroutine runner and yield objects
@@ -1276,6 +1281,7 @@ def convert_yielded(yielded):
     else:
         raise BadYieldError("yielded unknown object %r" % (yielded,))
 
+
 if singledispatch is not None:
     convert_yielded = singledispatch(convert_yielded)
 
index 6f45367ca4d7be9c4a8a0ab65ed4bd0b685b8e5e..8436ece46993755a167dcc0ac217940261543c1d 100644 (file)
@@ -673,5 +673,6 @@ def main():
             print(native_str(response.body))
     client.close()
 
+
 if __name__ == "__main__":
     main()
index 6ae32e6f7666c2e52d1c7cde9b112d1af0e60e41..d757be188df1b10b3ef36e70c589ce110051adfb 100644 (file)
@@ -321,4 +321,5 @@ class _ProxyAdapter(httputil.HTTPMessageDelegate):
     def _cleanup(self):
         self.connection.context._unapply_xheaders()
 
+
 HTTPRequest = httputil.HTTPServerRequest
index 39a27f78c08ad0169f1ce488997f85bd442f4ed2..dc206fc8cba6b489beb0f647dcb1c75ab7455a4e 100644 (file)
@@ -99,6 +99,7 @@ class _NormalizedHeaderCache(dict):
             del self[old_key]
         return normalized
 
+
 _normalized_headers = _NormalizedHeaderCache(1000)
 
 
@@ -936,10 +937,12 @@ def split_host_and_port(netloc):
         port = None
     return (host, port)
 
+
 _OctalPatt = re.compile(r"\\[0-3][0-7][0-7]")
 _QuotePatt = re.compile(r"[\\].")
 _nulljoin = ''.join
 
+
 def _unquote_cookie(str):
     """Handle double quotes and escaping in cookie values.
 
@@ -981,11 +984,11 @@ def _unquote_cookie(str):
             k = q_match.start(0)
         if q_match and (not o_match or k < j):     # QuotePatt matched
             res.append(str[i:k])
-            res.append(str[k+1])
+            res.append(str[k + 1])
             i = k + 2
         else:                                      # OctalPatt matched
             res.append(str[i:j])
-            res.append(chr(int(str[j+1:j+4], 8)))
+            res.append(chr(int(str[j + 1:j + 4], 8)))
             i = j + 4
     return _nulljoin(res)
 
index 691251ad2ac03df4a0f78131985bacdedee8bf34..a1619c497415aa863246fd6dee364ee52f4fc973 100644 (file)
@@ -908,8 +908,8 @@ class BaseIOStream(object):
         assert loc <= self._read_buffer_size
         # Slice the bytearray buffer into bytes, without intermediate copying
         b = (memoryview(self._read_buffer)
-                       [self._read_buffer_pos:self._read_buffer_pos + loc]
-                       ).tobytes()
+             [self._read_buffer_pos:self._read_buffer_pos + loc]
+             ).tobytes()
         self._read_buffer_pos += loc
         self._read_buffer_size -= loc
         # Amortized O(1) shrink
index 9653421fedde75038b8f5bf3d57148a39ad69488..c34c8c8bb5bdba6c6348be5c7418584b554fdf6d 100644 (file)
@@ -200,6 +200,7 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC,
         sockets.append(sock)
     return sockets
 
+
 if hasattr(socket, 'AF_UNIX'):
     def bind_unix_socket(file, mode=0o600, backlog=_DEFAULT_BACKLOG):
         """Creates a listening unix socket.
index 549a1cc00c0bc405cbc708e55bd2a73132f55606..830ee1f3b1b68d97fc1871798f59740155e18403 100644 (file)
@@ -30,7 +30,7 @@ from tornado import stack_context
 try:
     # Import the real asyncio module for py33+ first.  Older versions of the
     # trollius backport also use this name.
-    import asyncio # type: ignore
+    import asyncio  # type: ignore
 except ImportError as e:
     # Asyncio itself isn't available; see if trollius is (backport to py26+).
     try:
@@ -217,5 +217,6 @@ def to_asyncio_future(tornado_future):
     tornado.concurrent.chain_future(tornado_future, af)
     return af
 
+
 if hasattr(convert_yielded, 'register'):
     convert_yielded.register(asyncio.Future, to_tornado_future)  # type: ignore
index f51f49acc88dac05e7761d940dd387e6d728ce40..a73f8db7fbac40834662126b874aa1fa79093d2e 100644 (file)
@@ -8,6 +8,7 @@ import time
 from tornado.platform import interface
 from tornado.util import errno_from_exception
 
+
 def try_close(f):
     # Avoid issue #875 (race condition when using the file in another
     # thread).
index e4d92736a9b688af7e4c108f8cb77f3bc53471fe..c0ef2905c3d30ab604e8e3f62153d1f3d035edb2 100644 (file)
@@ -62,5 +62,6 @@ class Waker(object):
         """Closes the waker's file descriptor(s)."""
         raise NotImplementedError()
 
+
 def monotonic_time():
     raise NotImplementedError()
index ec269413b27da463d30876a72e513b6797f95e28..0f9787e84d7033a07eb138ed82adf9e1de86ad84 100644 (file)
@@ -574,6 +574,7 @@ class TwistedResolver(Resolver):
         ]
         raise gen.Return(result)
 
+
 if hasattr(gen.convert_yielded, 'register'):
     @gen.convert_yielded.register(Deferred)  # type: ignore
     def _(d):
index 74e393856cb7f354aa824ff03cfe4b82603945e9..61ae51f4eb10641d2bcb43afeacc11cd316e3321 100644 (file)
@@ -82,6 +82,8 @@ class StackContextInconsistentError(Exception):
 class _State(threading.local):
     def __init__(self):
         self.contexts = (tuple(), None)
+
+
 _state = _State()
 
 
index fd0f4a67fe3516e2a14f1b8cee4dc1be43f7ee64..4d89f572375e61f9806dafb94417a1d2384775d3 100644 (file)
@@ -178,7 +178,7 @@ class ReturnFutureTest(AsyncTestCase):
         @gen.coroutine
         def f():
             yield gen.moment
-            1/0
+            1 / 0
 
         g = f()
 
index de803a8b3bdcb9c937299a066f2b10c0c8003ded..fea4c644978ad5f4fe841ff52c208f42e5b4f4e1 100644 (file)
@@ -26,6 +26,7 @@ try:
 except ImportError:
     futures = None
 
+
 class GenEngineTest(AsyncTestCase):
     def setUp(self):
         super(GenEngineTest, self).setUp()
@@ -1444,6 +1445,7 @@ class RunnerGCTest(AsyncTestCase):
         """Runners shouldn't GC if future is alive"""
         # Create the weakref
         weakref_scope = [None]
+
         def callback():
             gc.collect(2)
             weakref_scope[0]().set_result(123)
@@ -1460,5 +1462,6 @@ class RunnerGCTest(AsyncTestCase):
             tester()
         )
 
+
 if __name__ == '__main__':
     unittest.main()
index 81ae9a27db512040bb9f6be342f6ddd1f807749c..b81c5f225ebdbcbbe6aa888c75611c663880c533 100644 (file)
@@ -185,5 +185,6 @@ def main():
                           log_counter.warning_count, log_counter.error_count)
             sys.exit(1)
 
+
 if __name__ == '__main__':
     main()
index d55e0ee35257618b01fb3fd2bf2693994c8d277c..59d25474c3acdbaf699e83a70601df5b24d2b92e 100644 (file)
@@ -284,5 +284,6 @@ class StackContextTest(AsyncTestCase):
             f1)
         self.assertEqual(self.active_contexts, [])
 
+
 if __name__ == '__main__':
     unittest.main()
index 18473a55e947c3e1ba8614a29fab8b0ecf5318f9..9afb54202a572718cfee924a01111e3f6608bcc1 100644 (file)
@@ -68,4 +68,3 @@ class TCPServerTest(AsyncTestCase):
         server.add_socket(sock)
         server.stop()
         server.stop()
-
index 913818f99a6a9e2b514c8565d75d257840b354dc..d79ea52c1be27111e387f1a23bbd8c5954ae2258 100644 (file)
@@ -1574,7 +1574,6 @@ class GzipTestCase(SimpleHandlerTestCase):
                 response.headers.get('X-Consumed-Content-Encoding')),
             'gzip')
 
-
     def test_gzip(self):
         response = self.fetch('/')
         self.assert_compressed(response)
@@ -1605,6 +1604,7 @@ class GzipTestCase(SimpleHandlerTestCase):
         self.assertEqual([s.strip() for s in response.headers['Vary'].split(',')],
                          ['Accept-Language', 'Cookie', 'Accept-Encoding'])
 
+
 @wsgi_safe
 class PathArgsInPrepareTest(WebTestCase):
     class Handler(RequestHandler):
index f136c8aab0160de1ab2f9a7b7f13ca802a85b211..e5cb33813909def00d43621ffed025a8022be884 100644 (file)
@@ -8,6 +8,7 @@ from tornado.platform.auto import set_close_exec
 
 skipIfNonWindows = unittest.skipIf(os.name != 'nt', 'non-windows platform')
 
+
 @skipIfNonWindows
 class WindowsTest(unittest.TestCase):
     def test_set_close_exec(self):
index 78da7853fee49afeed773567979d2b39444a70a3..e6ccc82ae0c8c63d4c04d8966f7375f87e5d362c 100644 (file)
@@ -83,6 +83,8 @@ def wrap_web_tests_application():
                 return WSGIContainer(validator(self.app))
         result["WSGIApplication_" + cls.__name__] = WSGIApplicationWrappedTest
     return result
+
+
 globals().update(wrap_web_tests_application())
 
 
@@ -96,4 +98,6 @@ def wrap_web_tests_adapter():
                 return WSGIContainer(validator(WSGIAdapter(self.app)))
         result["WSGIAdapter_" + cls.__name__] = WSGIAdapterWrappedTest
     return result
+
+
 globals().update(wrap_web_tests_adapter())
index eff2684d2c1bccace94120928cb76933f719a046..74d04b6000b8f2ac61a3bf2793e1cfe27cc3098a 100644 (file)
@@ -737,5 +737,6 @@ def main(**kwargs):
             gen_log.error('FAIL')
         raise
 
+
 if __name__ == '__main__':
     main()
index b9960cf742247862c573eca6c578a035d77ec545..981b94c8eaeba783f832e6c0480ec1f0485415d3 100644 (file)
@@ -236,6 +236,7 @@ def _re_unescape_replacement(match):
         raise ValueError("cannot unescape '\\\\%s'" % group[0])
     return group
 
+
 _re_unescape_pattern = re.compile(r'\\(.)', re.DOTALL)
 
 
@@ -454,6 +455,7 @@ def _websocket_mask_python(mask, data):
     else:
         return unmasked_arr.tostring()
 
+
 if (os.environ.get('TORNADO_NO_EXTENSION') or
         os.environ.get('TORNADO_EXTENSION') == '0'):
     # These environment variables exist to make it easier to do performance
index 6d6ce92e7ab9abf8c0eecb9e022ff8dca7303c8e..8ff52e9ce5046b39709c40307c1874aabe1dc717 100644 (file)
@@ -3119,6 +3119,7 @@ def create_signed_value(secret, name, value, version=None, clock=None,
     else:
         raise ValueError("Unsupported version %d" % version)
 
+
 # A leading version number in decimal
 # with no leading zeros, followed by a pipe.
 _signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$")
index 0af9e8f8e4ed137ed15f7b7d23f6412ed5e03f05..1668a596855f5d3d4587490af0ba5d990dd466f3 100644 (file)
@@ -285,7 +285,7 @@ class WebSocketHandler(tornado.web.RequestHandler):
 
            Added ``compression_level`` and ``mem_level``.
         """
-        #TODO: Add wbits option.
+        # TODO: Add wbits option.
         return None
 
     def open(self, *args, **kwargs):
@@ -603,7 +603,6 @@ class WebSocketProtocol13(WebSocketProtocol):
         self.last_ping = 0
         self.last_pong = 0
 
-
     def accept_connection(self):
         try:
             self._handle_websocket_headers()
@@ -826,7 +825,7 @@ class WebSocketProtocol13(WebSocketProtocol):
         new_len = self._frame_length
         if self._fragmented_message_buffer is not None:
             new_len += len(self._fragmented_message_buffer)
-        if new_len > (self.handler.max_message_size or 10*1024*1024):
+        if new_len > (self.handler.max_message_size or 10 * 1024 * 1024):
             self.close(1009, "message too big")
             return
         self.stream.read_bytes(
@@ -997,7 +996,7 @@ class WebSocketProtocol13(WebSocketProtocol):
         if self.ping_interval > 0:
             self.last_ping = self.last_pong = IOLoop.current().time()
             self.ping_callback = PeriodicCallback(
-                self.periodic_ping, self.ping_interval*1000)
+                self.periodic_ping, self.ping_interval * 1000)
             self.ping_callback.start()
 
     def periodic_ping(self):
@@ -1015,7 +1014,7 @@ class WebSocketProtocol13(WebSocketProtocol):
         now = IOLoop.current().time()
         since_last_pong = now - self.last_pong
         since_last_ping = now - self.last_ping
-        if (since_last_ping < 2*self.ping_interval and
+        if (since_last_ping < 2 * self.ping_interval and
                 since_last_pong > self.ping_timeout):
             self.close()
             return