]> git.ipfire.org Git - thirdparty/tornado.git/commitdiff
Switch from root logger to separate loggers.
authorBen Darnell <ben@bendarnell.com>
Sun, 9 Sep 2012 03:51:31 +0000 (20:51 -0700)
committerBen Darnell <ben@bendarnell.com>
Sun, 9 Sep 2012 03:54:06 +0000 (20:54 -0700)
Rather than using one logger per module, use three semantically-defined
loggers:  tornado.access, tornado.application, and tornado.general.

19 files changed:
tornado/auth.py
tornado/autoreload.py
tornado/curl_httpclient.py
tornado/database.py
tornado/httpserver.py
tornado/httputil.py
tornado/ioloop.py
tornado/iostream.py
tornado/locale.py
tornado/log.py [new file with mode: 0644]
tornado/netutil.py
tornado/platform/twisted.py
tornado/process.py
tornado/simple_httpclient.py
tornado/template.py
tornado/testing.py
tornado/web.py
tornado/websocket.py
tornado/wsgi.py

index 14d9d7753ce5483c7d7fda8d0181aacc26fda2e6..016c5b94ed7a5a2fb7858e3a51085f5b37d2ad8b 100644 (file)
@@ -50,7 +50,6 @@ import base64
 import binascii
 import hashlib
 import hmac
-import logging
 import time
 import urllib
 import urlparse
@@ -59,6 +58,7 @@ import uuid
 from tornado import httpclient
 from tornado import escape
 from tornado.httputil import url_concat
+from tornado.log import gen_log
 from tornado.util import bytes_type, b
 
 
@@ -150,7 +150,7 @@ class OpenIdMixin(object):
 
     def _on_authentication_verified(self, callback, response):
         if response.error or b("is_valid:true") not in response.body:
-            logging.warning("Invalid OpenID response: %s", response.error or
+            gen_log.warning("Invalid OpenID response: %s", response.error or
                             response.body)
             callback(None)
             return
@@ -263,14 +263,14 @@ class OAuthMixin(object):
         oauth_verifier = self.get_argument("oauth_verifier", None)
         request_cookie = self.get_cookie("_oauth_request_token")
         if not request_cookie:
-            logging.warning("Missing OAuth request token cookie")
+            gen_log.warning("Missing OAuth request token cookie")
             callback(None)
             return
         self.clear_cookie("_oauth_request_token")
         cookie_key, cookie_secret = [base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|")]
         if cookie_key != request_key:
-            logging.info((cookie_key, request_key, request_cookie))
-            logging.warning("Request token does not match cookie")
+            gen_log.info((cookie_key, request_key, request_cookie))
+            gen_log.warning("Request token does not match cookie")
             callback(None)
             return
         token = dict(key=cookie_key, secret=cookie_secret)
@@ -348,7 +348,7 @@ class OAuthMixin(object):
 
     def _on_access_token(self, callback, response):
         if response.error:
-            logging.warning("Could not fetch access token")
+            gen_log.warning("Could not fetch access token")
             callback(None)
             return
 
@@ -547,7 +547,7 @@ class TwitterMixin(OAuthMixin):
 
     def _on_twitter_request(self, callback, response):
         if response.error:
-            logging.warning("Error response %s fetching %s", response.error,
+            gen_log.warning("Error response %s fetching %s", response.error,
                             response.request.url)
             callback(None)
             return
@@ -669,7 +669,7 @@ class FriendFeedMixin(OAuthMixin):
 
     def _on_friendfeed_request(self, callback, response):
         if response.error:
-            logging.warning("Error response %s fetching %s", response.error,
+            gen_log.warning("Error response %s fetching %s", response.error,
                             response.request.url)
             callback(None)
             return
@@ -930,17 +930,17 @@ class FacebookMixin(object):
 
     def _parse_response(self, callback, response):
         if response.error:
-            logging.warning("HTTP error from Facebook: %s", response.error)
+            gen_log.warning("HTTP error from Facebook: %s", response.error)
             callback(None)
             return
         try:
             json = escape.json_decode(response.body)
         except Exception:
-            logging.warning("Invalid JSON from Facebook: %r", response.body)
+            gen_log.warning("Invalid JSON from Facebook: %r", response.body)
             callback(None)
             return
         if isinstance(json, dict) and json.get("error_code"):
-            logging.warning("Facebook error: %d: %r", json["error_code"],
+            gen_log.warning("Facebook error: %d: %r", json["error_code"],
                             json.get("error_msg"))
             callback(None)
             return
@@ -1007,7 +1007,7 @@ class FacebookGraphMixin(OAuth2Mixin):
     def _on_access_token(self, redirect_uri, client_id, client_secret,
                         callback, fields, response):
         if response.error:
-            logging.warning('Facebook auth error: %s' % str(response))
+            gen_log.warning('Facebook auth error: %s' % str(response))
             callback(None)
             return
 
@@ -1090,7 +1090,7 @@ class FacebookGraphMixin(OAuth2Mixin):
 
     def _on_facebook_request(self, callback, response):
         if response.error:
-            logging.warning("Error response %s fetching %s", response.error,
+            gen_log.warning("Error response %s fetching %s", response.error,
                             response.request.url)
             callback(None)
             return
index 55a10d10cc5aa2ff2d6f2386fd3aa6477175f389..ebd3f8b13c770deb987d999446c9974b23a0ae3a 100644 (file)
@@ -67,7 +67,6 @@ if __name__ == "__main__":
         del sys.path[0]
 
 import functools
-import logging
 import os
 import pkgutil
 import sys
@@ -75,6 +74,7 @@ import types
 import subprocess
 
 from tornado import ioloop
+from tornado.log import gen_log
 from tornado import process
 
 try:
@@ -177,7 +177,7 @@ def _check_file(modify_times, path):
         modify_times[path] = modified
         return
     if modify_times[path] != modified:
-        logging.info("%s modified; restarting server", path)
+        gen_log.info("%s modified; restarting server", path)
         _reload()
 
 
@@ -272,13 +272,13 @@ def main():
                 # module) will see the right things.
                 exec f.read() in globals(), globals()
     except SystemExit, e:
-        logging.info("Script exited with status %s", e.code)
+        gen_log.info("Script exited with status %s", e.code)
     except Exception, e:
-        logging.warning("Script exited with uncaught exception", exc_info=True)
+        gen_log.warning("Script exited with uncaught exception", exc_info=True)
         if isinstance(e, SyntaxError):
             watch(e.filename)
     else:
-        logging.info("Script exited normally")
+        gen_log.info("Script exited normally")
     # restore sys.argv so subsequent executions will include autoreload
     sys.argv = original_argv
 
index 6017017be36ec7931aea788480112c6ae6eb507b..3bdecaa064194a8c4ba39ef9317fcb78d7eb2635 100644 (file)
@@ -27,6 +27,7 @@ import time
 
 from tornado import httputil
 from tornado import ioloop
+from tornado.log import gen_log
 from tornado import stack_context
 
 from tornado.escape import utf8
@@ -51,7 +52,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
             # socket_action is found in pycurl since 7.18.2 (it's been
             # in libcurl longer than that but wasn't accessible to
             # python).
-            logging.warning("socket_action method missing from pycurl; "
+            gen_log.warning("socket_action method missing from pycurl; "
                             "falling back to socket_all. Upgrading "
                             "libcurl and pycurl will improve performance")
             self._socket_action = \
@@ -263,7 +264,7 @@ class CurlError(HTTPError):
 
 def _curl_create():
     curl = pycurl.Curl()
-    if logging.getLogger().isEnabledFor(logging.DEBUG):
+    if gen_log.isEnabledFor(logging.DEBUG):
         curl.setopt(pycurl.VERBOSE, 1)
         curl.setopt(pycurl.DEBUGFUNCTION, _curl_debug)
     return curl
@@ -386,11 +387,11 @@ def _curl_setup_request(curl, request, buffer, headers):
         userpwd = "%s:%s" % (request.auth_username, request.auth_password or '')
         curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
         curl.setopt(pycurl.USERPWD, utf8(userpwd))
-        logging.debug("%s %s (username: %r)", request.method, request.url,
+        gen_log.debug("%s %s (username: %r)", request.method, request.url,
                       request.auth_username)
     else:
         curl.unsetopt(pycurl.USERPWD)
-        logging.debug("%s %s", request.method, request.url)
+        gen_log.debug("%s %s", request.method, request.url)
 
     if request.client_cert is not None:
         curl.setopt(pycurl.SSLCERT, request.client_cert)
@@ -426,12 +427,12 @@ def _curl_header_callback(headers, header_line):
 def _curl_debug(debug_type, debug_msg):
     debug_types = ('I', '<', '>', '<', '>')
     if debug_type == 0:
-        logging.debug('%s', debug_msg.strip())
+        gen_log.debug('%s', debug_msg.strip())
     elif debug_type in (1, 2):
         for line in debug_msg.splitlines():
-            logging.debug('%s %s', debug_types[debug_type], line)
+            gen_log.debug('%s %s', debug_types[debug_type], line)
     elif debug_type == 4:
-        logging.debug('%s %r', debug_types[debug_type], debug_msg)
+        gen_log.debug('%s %r', debug_types[debug_type], debug_msg)
 
 if __name__ == "__main__":
     AsyncHTTPClient.configure(CurlAsyncHTTPClient)
index 982c5db5045ddd612fa23ebe97594ce520869a28..f680bc34e4028459f69011dbc87861be47220311 100644 (file)
@@ -20,9 +20,10 @@ from __future__ import absolute_import, division, with_statement
 
 import copy
 import itertools
-import logging
 import time
 
+from tornado.log import gen_log
+
 try:
     import MySQLdb.constants
     import MySQLdb.converters
@@ -83,7 +84,7 @@ class Connection(object):
         try:
             self.reconnect()
         except Exception:
-            logging.error("Cannot connect to MySQL on %s", self.host,
+            gen_log.error("Cannot connect to MySQL on %s", self.host,
                           exc_info=True)
 
     def __del__(self):
@@ -207,7 +208,7 @@ class Connection(object):
         try:
             return cursor.execute(query, parameters)
         except OperationalError:
-            logging.error("Error connecting to MySQL on %s", self.host)
+            gen_log.error("Error connecting to MySQL on %s", self.host)
             self.close()
             raise
 
index 274e5268cf36dbe004c4fbd48af6aa867ab4c045..566f4a575c3cb84a3355a96f79343117f4d93e5e 100644 (file)
@@ -27,13 +27,13 @@ This module also defines the `HTTPRequest` class which is exposed via
 from __future__ import absolute_import, division, with_statement
 
 import Cookie
-import logging
 import socket
 import time
 
 from tornado.escape import native_str, parse_qs_bytes
 from tornado import httputil
 from tornado import iostream
+from tornado.log import gen_log
 from tornado.netutil import TCPServer
 from tornado import stack_context
 from tornado.util import b, bytes_type
@@ -267,7 +267,7 @@ class HTTPConnection(object):
 
             self.request_callback(self._request)
         except _BadRequestException, e:
-            logging.info("Malformed HTTP request from %s: %s",
+            gen_log.info("Malformed HTTP request from %s: %s",
                          self.address[0], e)
             self.close()
             return
index 0f8a8438d43582ac32a47cadf6e24fc0a2765ae3..a7d543cc23571ae45f11c2902bd3f9d87a4879d2 100644 (file)
 
 from __future__ import absolute_import, division, with_statement
 
-import logging
 import urllib
 import re
 
 from tornado.escape import native_str, parse_qs_bytes, utf8
+from tornado.log import gen_log
 from tornado.util import b, ObjectDict
 
 
@@ -228,7 +228,7 @@ def parse_body_arguments(content_type, body, arguments, files):
                 parse_multipart_form_data(utf8(v), body, arguments, files)
                 break
         else:
-            logging.warning("Invalid multipart/form-data")
+            gen_log.warning("Invalid multipart/form-data")
 
 
 def parse_multipart_form_data(boundary, data, arguments, files):
@@ -247,7 +247,7 @@ def parse_multipart_form_data(boundary, data, arguments, files):
         boundary = boundary[1:-1]
     final_boundary_index = data.rfind(b("--") + boundary + b("--"))
     if final_boundary_index == -1:
-        logging.warning("Invalid multipart/form-data: no final boundary")
+        gen_log.warning("Invalid multipart/form-data: no final boundary")
         return
     parts = data[:final_boundary_index].split(b("--") + boundary + b("\r\n"))
     for part in parts:
@@ -255,17 +255,17 @@ def parse_multipart_form_data(boundary, data, arguments, files):
             continue
         eoh = part.find(b("\r\n\r\n"))
         if eoh == -1:
-            logging.warning("multipart/form-data missing headers")
+            gen_log.warning("multipart/form-data missing headers")
             continue
         headers = HTTPHeaders.parse(part[:eoh].decode("utf-8"))
         disp_header = headers.get("Content-Disposition", "")
         disposition, disp_params = _parse_header(disp_header)
         if disposition != "form-data" or not part.endswith(b("\r\n")):
-            logging.warning("Invalid multipart/form-data")
+            gen_log.warning("Invalid multipart/form-data")
             continue
         value = part[eoh + 4:-2]
         if not disp_params.get("name"):
-            logging.warning("multipart/form-data value missing name")
+            gen_log.warning("multipart/form-data value missing name")
             continue
         name = disp_params["name"]
         if disp_params.get("filename"):
index 70eb5564b217d8e5fbf199bc33916f3d451e679a..db0eeb7e747c6d0daa6a48317daaf9bc06171cf2 100644 (file)
@@ -32,13 +32,13 @@ import datetime
 import errno
 import heapq
 import os
-import logging
 import select
 import thread
 import threading
 import time
 import traceback
 
+from tornado.log import app_log, gen_log
 from tornado import stack_context
 
 try:
@@ -191,7 +191,7 @@ class IOLoop(object):
                 try:
                     os.close(fd)
                 except Exception:
-                    logging.debug("error closing fd %s", fd, exc_info=True)
+                    gen_log.debug("error closing fd %s", fd, exc_info=True)
         self._waker.close()
         self._impl.close()
 
@@ -211,7 +211,7 @@ class IOLoop(object):
         try:
             self._impl.unregister(fd)
         except (OSError, IOError):
-            logging.debug("Error deleting fd from IOLoop", exc_info=True)
+            gen_log.debug("Error deleting fd from IOLoop", exc_info=True)
 
     def set_blocking_signal_threshold(self, seconds, action):
         """Sends a signal if the ioloop is blocked for more than s seconds.
@@ -225,8 +225,8 @@ class IOLoop(object):
         too long.
         """
         if not hasattr(signal, "setitimer"):
-            logging.error("set_blocking_signal_threshold requires a signal module "
-                       "with the setitimer method")
+            gen_log.error("set_blocking_signal_threshold requires a signal module "
+                           "with the setitimer method")
             return
         self._blocking_signal_threshold = seconds
         if seconds is not None:
@@ -244,9 +244,9 @@ class IOLoop(object):
 
         For use with set_blocking_signal_threshold.
         """
-        logging.warning('IOLoop blocked for %f seconds in\n%s',
-                        self._blocking_signal_threshold,
-                        ''.join(traceback.format_stack(frame)))
+        gen_log.warning('IOLoop blocked for %f seconds in\n%s',
+                         self._blocking_signal_threshold,
+                         ''.join(traceback.format_stack(frame)))
 
     def start(self):
         """Starts the I/O loop.
@@ -330,10 +330,10 @@ class IOLoop(object):
                         # Happens when the client closes the connection
                         pass
                     else:
-                        logging.error("Exception in I/O handler for fd %s",
+                        app_log.error("Exception in I/O handler for fd %s",
                                       fd, exc_info=True)
                 except Exception:
-                    logging.error("Exception in I/O handler for fd %s",
+                    app_log.error("Exception in I/O handler for fd %s",
                                   fd, exc_info=True)
         # reset the stopped flag so another start/stop pair can be issued
         self._stopped = False
@@ -432,7 +432,7 @@ class IOLoop(object):
         The exception itself is not passed explicitly, but is available
         in sys.exc_info.
         """
-        logging.error("Exception in callback %r", callback, exc_info=True)
+        app_log.error("Exception in callback %r", callback, exc_info=True)
 
 
 class _Timeout(object):
@@ -501,7 +501,7 @@ class PeriodicCallback(object):
         try:
             self.callback()
         except Exception:
-            logging.error("Error in periodic callback", exc_info=True)
+            app_log.error("Error in periodic callback", exc_info=True)
         self._schedule_next()
 
     def _schedule_next(self):
@@ -668,5 +668,5 @@ else:
         # All other systems
         import sys
         if "linux" in sys.platform:
-            logging.warning("epoll module not found; using select()")
+            gen_log.warning("epoll module not found; using select()")
         _poll = _Select
index 25caf3ee6aa4b329c7b8e69d82ab94339d4b17d4..8713d8c9ef599ff45dfa48fdf6c1e21be522ab23 100644 (file)
@@ -20,13 +20,13 @@ from __future__ import absolute_import, division, with_statement
 
 import collections
 import errno
-import logging
 import os
 import socket
 import sys
 import re
 
 from tornado import ioloop
+from tornado.log import gen_log, app_log
 from tornado import stack_context
 from tornado.util import b, bytes_type
 
@@ -133,7 +133,7 @@ class IOStream(object):
             # localhost, so handle them the same way as an error
             # reported later in _handle_connect.
             if e.args[0] not in (errno.EINPROGRESS, errno.EWOULDBLOCK):
-                logging.warning("Connect error on fd %d: %s",
+                gen_log.warning("Connect error on fd %d: %s",
                                 self.socket.fileno(), e)
                 self.close()
                 return
@@ -258,7 +258,7 @@ class IOStream(object):
 
     def _handle_events(self, fd, events):
         if not self.socket:
-            logging.warning("Got events for closed stream %d", fd)
+            gen_log.warning("Got events for closed stream %d", fd)
             return
         try:
             if events & self.io_loop.READ:
@@ -293,7 +293,7 @@ class IOStream(object):
                 self._state = state
                 self.io_loop.update_handler(self.socket.fileno(), self._state)
         except Exception:
-            logging.error("Uncaught exception, closing connection.",
+            gen_log.error("Uncaught exception, closing connection.",
                           exc_info=True)
             self.close()
             raise
@@ -304,7 +304,7 @@ class IOStream(object):
             try:
                 callback(*args)
             except Exception:
-                logging.error("Uncaught exception, closing connection.",
+                app_log.error("Uncaught exception, closing connection.",
                               exc_info=True)
                 # Close the socket on an uncaught exception from a user callback
                 # (It would eventually get closed when the socket object is
@@ -357,7 +357,7 @@ class IOStream(object):
             finally:
                 self._pending_callbacks -= 1
         except Exception:
-            logging.warning("error on read", exc_info=True)
+            gen_log.warning("error on read", exc_info=True)
             self.close()
             return
         if self._read_from_buffer():
@@ -422,7 +422,7 @@ class IOStream(object):
             chunk = self._read_from_socket()
         except socket.error, e:
             # ssl.SSLError is a subclass of socket.error
-            logging.warning("Read error on %d: %s",
+            gen_log.warning("Read error on %d: %s",
                             self.socket.fileno(), e)
             self.close()
             raise
@@ -431,7 +431,7 @@ class IOStream(object):
         self._read_buffer.append(chunk)
         self._read_buffer_size += len(chunk)
         if self._read_buffer_size >= self.max_buffer_size:
-            logging.error("Reached maximum read buffer size")
+            gen_log.error("Reached maximum read buffer size")
             self.close()
             raise IOError("Reached maximum read buffer size")
         return len(chunk)
@@ -504,7 +504,7 @@ class IOStream(object):
             # an error state before the socket becomes writable, so
             # in that case a connection failure would be handled by the
             # error path in _handle_events instead of here.
-            logging.warning("Connect error on fd %d: %s",
+            gen_log.warning("Connect error on fd %d: %s",
                             self.socket.fileno(), errno.errorcode[err])
             self.close()
             return
@@ -544,7 +544,7 @@ class IOStream(object):
                     self._write_buffer_frozen = True
                     break
                 else:
-                    logging.warning("Write error on %d: %s",
+                    gen_log.warning("Write error on %d: %s",
                                     self.socket.fileno(), e)
                     self.close()
                     return
@@ -656,7 +656,7 @@ class SSLIOStream(IOStream):
                     peer = self.socket.getpeername()
                 except:
                     peer = '(not connected)'
-                logging.warning("SSL Error on %d %s: %s",
+                gen_log.warning("SSL Error on %d %s: %s",
                                 self.socket.fileno(), peer, err)
                 return self.close()
             raise
index 9f8ee7ea8217c1b6da85a1f10c8dcd05d757aa0d..918f0c415f7aa8a5454d3cc1ea19a62e04410bed 100644 (file)
@@ -43,11 +43,11 @@ from __future__ import absolute_import, division, with_statement
 
 import csv
 import datetime
-import logging
 import os
 import re
 
 from tornado import escape
+from tornado.log import gen_log
 
 _default_locale = "en_US"
 _translations = {}
@@ -118,7 +118,7 @@ def load_translations(directory):
             continue
         locale, extension = path.split(".")
         if not re.match("[a-z]+(_[A-Z]+)?$", locale):
-            logging.error("Unrecognized locale %r (path: %s)", locale,
+            gen_log.error("Unrecognized locale %r (path: %s)", locale,
                           os.path.join(directory, path))
             continue
         full_path = os.path.join(directory, path)
@@ -142,13 +142,13 @@ def load_translations(directory):
             else:
                 plural = "unknown"
             if plural not in ("plural", "singular", "unknown"):
-                logging.error("Unrecognized plural indicator %r in %s line %d",
+                gen_log.error("Unrecognized plural indicator %r in %s line %d",
                               plural, path, i + 1)
                 continue
             _translations[locale].setdefault(plural, {})[english] = translation
         f.close()
     _supported_locales = frozenset(_translations.keys() + [_default_locale])
-    logging.debug("Supported locales: %s", sorted(_supported_locales))
+    gen_log.debug("Supported locales: %s", sorted(_supported_locales))
 
 
 def load_gettext_translations(directory, domain):
@@ -184,11 +184,11 @@ def load_gettext_translations(directory, domain):
             _translations[lang] = gettext.translation(domain, directory,
                                                       languages=[lang])
         except Exception, e:
-            logging.error("Cannot load translation for '%s': %s", lang, str(e))
+            gen_log.error("Cannot load translation for '%s': %s", lang, str(e))
             continue
     _supported_locales = frozenset(_translations.keys() + [_default_locale])
     _use_gettext = True
-    logging.debug("Supported locales: %s", sorted(_supported_locales))
+    gen_log.debug("Supported locales: %s", sorted(_supported_locales))
 
 
 def get_supported_locales():
diff --git a/tornado/log.py b/tornado/log.py
new file mode 100644 (file)
index 0000000..96b9998
--- /dev/null
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+#
+# Copyright 2012 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from __future__ import absolute_import, division, with_statement
+
+import logging
+
+# Per-request logging for Tornado's HTTP servers (and potentially other servers
+# in the future)
+access_log = logging.getLogger("tornado.access")
+
+# Logging of errors from application code (i.e. uncaught exceptions from
+# callbacks
+app_log = logging.getLogger("tornado.application")
+
+# General logging, i.e. everything else
+gen_log = logging.getLogger("tornado.general")
index cf9bc9c41f20862e417ded0c089b35d0e0801835..d796a2ae221c681a101d54880e8e662be79e6644 100644 (file)
@@ -19,7 +19,6 @@
 from __future__ import absolute_import, division, with_statement
 
 import errno
-import logging
 import os
 import socket
 import stat
@@ -27,6 +26,7 @@ import stat
 from tornado import process
 from tornado.ioloop import IOLoop
 from tornado.iostream import IOStream, SSLIOStream
+from tornado.log import app_log
 from tornado.platform.auto import set_close_exec
 
 try:
@@ -234,7 +234,7 @@ class TCPServer(object):
                 stream = IOStream(connection, io_loop=self.io_loop)
             self.handle_stream(stream, address)
         except Exception:
-            logging.error("Error in connection callback", exc_info=True)
+            app_log.error("Error in connection callback", exc_info=True)
 
 
 def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags=None):
index 5513964075b9199530d4229661ef601052149ccd..2c094bd8c2631a21847e9650225baff148647366 100644 (file)
@@ -47,7 +47,6 @@ This module has been tested with Twisted versions 11.0.0, 11.1.0, and 12.0.0
 from __future__ import absolute_import, division, with_statement
 
 import functools
-import logging
 import time
 
 from twisted.internet.posixbase import PosixReactorBase
@@ -60,6 +59,7 @@ from zope.interface import implementer
 
 import tornado
 import tornado.ioloop
+from tornado.log import app_log
 from tornado.stack_context import NullContext
 from tornado.ioloop import IOLoop
 
@@ -80,7 +80,7 @@ class TornadoDelayedCall(object):
         try:
             self._func()
         except:
-            logging.error("_called caught exception", exc_info=True)
+            app_log.error("_called caught exception", exc_info=True)
 
     def getTime(self):
         return self._time
index 28a61bcd3e9392b9bc07f577630cfafa60e14e70..a2c63b83183201a9c948a139c2bc80429f2098b8 100644 (file)
@@ -19,7 +19,6 @@
 from __future__ import absolute_import, division, with_statement
 
 import errno
-import logging
 import os
 import sys
 import time
@@ -27,6 +26,7 @@ import time
 from binascii import hexlify
 
 from tornado import ioloop
+from tornado.log import gen_log
 
 try:
     import multiprocessing  # Python 2.6+
@@ -45,7 +45,7 @@ def cpu_count():
         return os.sysconf("SC_NPROCESSORS_CONF")
     except ValueError:
         pass
-    logging.error("Could not detect number of processors; assuming 1")
+    gen_log.error("Could not detect number of processors; assuming 1")
     return 1
 
 
@@ -98,7 +98,7 @@ def fork_processes(num_processes, max_restarts=100):
         raise RuntimeError("Cannot run in multiple processes: IOLoop instance "
                            "has already been initialized. You cannot call "
                            "IOLoop.instance() before calling start_processes()")
-    logging.info("Starting %d processes", num_processes)
+    gen_log.info("Starting %d processes", num_processes)
     children = {}
 
     def start_child(i):
@@ -128,13 +128,13 @@ def fork_processes(num_processes, max_restarts=100):
             continue
         id = children.pop(pid)
         if os.WIFSIGNALED(status):
-            logging.warning("child %d (pid %d) killed by signal %d, restarting",
+            gen_log.warning("child %d (pid %d) killed by signal %d, restarting",
                             id, pid, os.WTERMSIG(status))
         elif os.WEXITSTATUS(status) != 0:
-            logging.warning("child %d (pid %d) exited with status %d, restarting",
+            gen_log.warning("child %d (pid %d) exited with status %d, restarting",
                             id, pid, os.WEXITSTATUS(status))
         else:
-            logging.info("child %d (pid %d) exited normally", id, pid)
+            gen_log.info("child %d (pid %d) exited normally", id, pid)
             continue
         num_restarts += 1
         if num_restarts > max_restarts:
index 1223620ab5915395449d27faffe229fac8e443a1..6bd10c17ce718126d1ced1034e6e5250955ce560 100644 (file)
@@ -5,6 +5,7 @@ from tornado.escape import utf8, _unicode, native_str
 from tornado.httpclient import HTTPRequest, HTTPResponse, HTTPError, AsyncHTTPClient, main
 from tornado.httputil import HTTPHeaders
 from tornado.iostream import IOStream, SSLIOStream
+from tornado.log import gen_log
 from tornado import stack_context
 from tornado.util import b, GzipDecompressor
 
@@ -13,7 +14,6 @@ import collections
 import contextlib
 import copy
 import functools
-import logging
 import os.path
 import re
 import socket
@@ -99,7 +99,7 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient):
         self.queue.append((request, callback))
         self._process_queue()
         if self.queue:
-            logging.debug("max_clients limit reached, request queued. "
+            gen_log.debug("max_clients limit reached, request queued. "
                           "%d active, %d queued requests." % (
                     len(self.active), len(self.queue)))
 
@@ -317,7 +317,7 @@ class _HTTPConnection(object):
         try:
             yield
         except Exception, e:
-            logging.warning("uncaught exception", exc_info=True)
+            gen_log.warning("uncaught exception", exc_info=True)
             self._run_callback(HTTPResponse(self.request, 599, error=e,
                                 request_time=time.time() - self.start_time,
                                 ))
index 14306d9a529032f60780e6f94d55ad58df317417..a7efa47e7497a01906ddcadf3be1f25f40f82017 100644 (file)
@@ -184,13 +184,13 @@ from __future__ import absolute_import, division, with_statement
 import cStringIO
 import datetime
 import linecache
-import logging
 import os.path
 import posixpath
 import re
 import threading
 
 from tornado import escape
+from tornado.log import app_log
 from tornado.util import bytes_type, ObjectDict
 
 _DEFAULT_AUTOESCAPE = "xhtml_escape"
@@ -229,7 +229,7 @@ class Template(object):
                 "exec")
         except Exception:
             formatted_code = _format_code(self.code).rstrip()
-            logging.error("%s code:\n%s", self.name, formatted_code)
+            app_log.error("%s code:\n%s", self.name, formatted_code)
             raise
 
     def generate(self, **kwargs):
@@ -261,7 +261,7 @@ class Template(object):
             return execute()
         except Exception:
             formatted_code = _format_code(self.code).rstrip()
-            logging.error("%s code:\n%s", self.name, formatted_code)
+            app_log.error("%s code:\n%s", self.name, formatted_code)
             raise
 
     def _generate_python(self, loader, compress_whitespace):
index c597e3a77f1a9cb6acc832c42c11169fef2603e7..07f53d7745f03c683f521cf6a485d6c1306f9e65 100644 (file)
@@ -33,6 +33,7 @@ except ImportError:
     HTTPServer = None
     IOLoop = None
     SimpleAsyncHTTPClient = None
+from tornado.log import gen_log
 from tornado.stack_context import StackContext, NullContext
 from tornado.util import raise_exc_info
 import contextlib
@@ -364,7 +365,7 @@ class LogTrapTestCase(unittest.TestCase):
         old_stream = handler.stream
         try:
             handler.stream = StringIO()
-            logging.info("RUNNING TEST: " + str(self))
+            gen_log.info("RUNNING TEST: " + str(self))
             old_error_count = len(result.failures) + len(result.errors)
             super(LogTrapTestCase, self).run(result)
             new_error_count = len(result.failures) + len(result.errors)
@@ -440,9 +441,9 @@ def main(**kwargs):
             unittest.main(defaultTest="all", argv=argv, **kwargs)
     except SystemExit, e:
         if e.code == 0:
-            logging.info('PASS')
+            gen_log.info('PASS')
         else:
-            logging.error('FAIL')
+            gen_log.error('FAIL')
         if not options.autoreload:
             raise
     if options.autoreload:
index eb64011d0b98b2e4fcaedf3079c03076d11e6323..af398181bb0d3e7e49757e8813f877a6bf1d8be2 100644 (file)
@@ -63,7 +63,6 @@ import hashlib
 import hmac
 import httplib
 import itertools
-import logging
 import mimetypes
 import os.path
 import re
@@ -80,6 +79,7 @@ import uuid
 
 from tornado import escape
 from tornado import locale
+from tornado.log import access_log, app_log, gen_log
 from tornado import stack_context
 from tornado import template
 from tornado.escape import utf8, _unicode
@@ -736,7 +736,7 @@ class RequestHandler(object):
         Additional keyword arguments are passed through to `write_error`.
         """
         if self._headers_written:
-            logging.error("Cannot send error response after headers written")
+            gen_log.error("Cannot send error response after headers written")
             if not self._finished:
                 self.finish()
             return
@@ -745,7 +745,7 @@ class RequestHandler(object):
         try:
             self.write_error(status_code, **kwargs)
         except Exception:
-            logging.error("Uncaught exception in write_error", exc_info=True)
+            app_log.error("Uncaught exception in write_error", exc_info=True)
         if not self._finished:
             self.finish()
 
@@ -985,7 +985,7 @@ class RequestHandler(object):
                 return callback(*args, **kwargs)
             except Exception, e:
                 if self._headers_written:
-                    logging.error("Exception after headers written",
+                    app_log.error("Exception after headers written",
                                   exc_info=True)
                 else:
                     self._handle_request_exception(e)
@@ -1074,14 +1074,14 @@ class RequestHandler(object):
             if e.log_message:
                 format = "%d %s: " + e.log_message
                 args = [e.status_code, self._request_summary()] + list(e.args)
-                logging.warning(format, *args)
+                gen_log.warning(format, *args)
             if e.status_code not in httplib.responses:
-                logging.error("Bad HTTP status code: %d", e.status_code)
+                gen_log.error("Bad HTTP status code: %d", e.status_code)
                 self.send_error(500, exc_info=sys.exc_info())
             else:
                 self.send_error(e.status_code, exc_info=sys.exc_info())
         else:
-            logging.error("Uncaught exception %s\n%r", self._request_summary(),
+            app_log.error("Uncaught exception %s\n%r", self._request_summary(),
                           self.request, exc_info=True)
             self.send_error(500, exc_info=sys.exc_info())
 
@@ -1328,7 +1328,7 @@ class Application(object):
             handlers.append(spec)
             if spec.name:
                 if spec.name in self.named_handlers:
-                    logging.warning(
+                    app_log.warning(
                         "Multiple handlers named %s; replacing previous value",
                         spec.name)
                 self.named_handlers[spec.name] = spec
@@ -1452,11 +1452,11 @@ class Application(object):
             self.settings["log_function"](handler)
             return
         if handler.get_status() < 400:
-            log_method = logging.info
+            log_method = access_log.info
         elif handler.get_status() < 500:
-            log_method = logging.warning
+            log_method = access_log.warning
         else:
-            log_method = logging.error
+            log_method = access_log.error
         request_time = 1000.0 * handler.request.request_time()
         log_method("%d %s %.2fms", handler.get_status(),
                    handler._request_summary(), request_time)
@@ -1655,7 +1655,7 @@ class StaticFileHandler(RequestHandler):
                     hashes[abs_path] = hashlib.md5(f.read()).hexdigest()
                     f.close()
                 except Exception:
-                    logging.error("Could not open static file %r", path)
+                    gen_log.error("Could not open static file %r", path)
                     hashes[abs_path] = None
             hsh = hashes.get(abs_path)
             if hsh:
@@ -2039,11 +2039,11 @@ def decode_signed_value(secret, name, value, max_age_days=31):
         return None
     signature = _create_signature(secret, name, parts[0], parts[1])
     if not _time_independent_equals(parts[2], signature):
-        logging.warning("Invalid cookie signature %r", value)
+        gen_log.warning("Invalid cookie signature %r", value)
         return None
     timestamp = int(parts[1])
     if timestamp < time.time() - max_age_days * 86400:
-        logging.warning("Expired cookie %r", value)
+        gen_log.warning("Expired cookie %r", value)
         return None
     if timestamp > time.time() + 31 * 86400:
         # _cookie_signature does not hash a delimiter between the
@@ -2051,10 +2051,10 @@ def decode_signed_value(secret, name, value, max_age_days=31):
         # digits from the payload to the timestamp without altering the
         # signature.  For backwards compatibility, sanity-check timestamp
         # here instead of modifying _cookie_signature.
-        logging.warning("Cookie timestamp in future; possible tampering %r", value)
+        gen_log.warning("Cookie timestamp in future; possible tampering %r", value)
         return None
     if parts[1].startswith(b("0")):
-        logging.warning("Tampered cookie %r", value)
+        gen_log.warning("Tampered cookie %r", value)
         return None
     try:
         return base64.b64decode(parts[0])
index 266b114b0306676cbe81d55556e455e6eeb3b119..9bc1e053bc87e54de130ba63dd8deaa26b039f15 100644 (file)
@@ -23,13 +23,13 @@ from __future__ import absolute_import, division, with_statement
 import array
 import functools
 import hashlib
-import logging
 import struct
 import time
 import base64
 import tornado.escape
 import tornado.web
 
+from tornado.log import gen_log, app_log
 from tornado.util import bytes_type, b
 
 
@@ -257,7 +257,7 @@ class WebSocketProtocol(object):
             try:
                 return callback(*args, **kwargs)
             except Exception:
-                logging.error("Uncaught exception in %s",
+                app_log.error("Uncaught exception in %s",
                               self.request.path, exc_info=True)
                 self._abort()
         return wrapper
@@ -289,7 +289,7 @@ class WebSocketProtocol76(WebSocketProtocol):
         try:
             self._handle_websocket_headers()
         except ValueError:
-            logging.debug("Malformed WebSocket request received")
+            gen_log.debug("Malformed WebSocket request received")
             self._abort()
             return
 
@@ -344,7 +344,7 @@ class WebSocketProtocol76(WebSocketProtocol):
         try:
             challenge_response = self.challenge_response(challenge)
         except ValueError:
-            logging.debug("Malformed key data in WebSocket request")
+            gen_log.debug("Malformed key data in WebSocket request")
             self._abort()
             return
         self._write_response(challenge_response)
@@ -457,7 +457,7 @@ class WebSocketProtocol13(WebSocketProtocol):
             self._handle_websocket_headers()
             self._accept_connection()
         except ValueError:
-            logging.debug("Malformed WebSocket request received")
+            gen_log.debug("Malformed WebSocket request received")
             self._abort()
             return
 
index ca34ff3e8829a787af888b6584d33b7806aff0de..dd2488e852b27d5d56eed8f7ac22f08abbc083a2 100644 (file)
@@ -33,7 +33,6 @@ from __future__ import absolute_import, division, with_statement
 
 import Cookie
 import httplib
-import logging
 import sys
 import time
 import tornado
@@ -41,6 +40,7 @@ import urllib
 
 from tornado import escape
 from tornado import httputil
+from tornado.log import access_log
 from tornado import web
 from tornado.escape import native_str, utf8, parse_qs_bytes
 from tornado.util import b, bytes_type
@@ -302,11 +302,11 @@ class WSGIContainer(object):
 
     def _log(self, status_code, request):
         if status_code < 400:
-            log_method = logging.info
+            log_method = access_log.info
         elif status_code < 500:
-            log_method = logging.warning
+            log_method = access_log.warning
         else:
-            log_method = logging.error
+            log_method = access_log.error
         request_time = 1000.0 * request.request_time()
         summary = request.method + " " + request.uri + " (" + \
             request.remote_ip + ")"