import binascii
import hashlib
import hmac
-import logging
import time
import urllib
import urlparse
from tornado import httpclient
from tornado import escape
from tornado.httputil import url_concat
+from tornado.log import gen_log
from tornado.util import bytes_type, b
def _on_authentication_verified(self, callback, response):
if response.error or b("is_valid:true") not in response.body:
- logging.warning("Invalid OpenID response: %s", response.error or
+ gen_log.warning("Invalid OpenID response: %s", response.error or
response.body)
callback(None)
return
oauth_verifier = self.get_argument("oauth_verifier", None)
request_cookie = self.get_cookie("_oauth_request_token")
if not request_cookie:
- logging.warning("Missing OAuth request token cookie")
+ gen_log.warning("Missing OAuth request token cookie")
callback(None)
return
self.clear_cookie("_oauth_request_token")
cookie_key, cookie_secret = [base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|")]
if cookie_key != request_key:
- logging.info((cookie_key, request_key, request_cookie))
- logging.warning("Request token does not match cookie")
+ gen_log.info((cookie_key, request_key, request_cookie))
+ gen_log.warning("Request token does not match cookie")
callback(None)
return
token = dict(key=cookie_key, secret=cookie_secret)
def _on_access_token(self, callback, response):
if response.error:
- logging.warning("Could not fetch access token")
+ gen_log.warning("Could not fetch access token")
callback(None)
return
def _on_twitter_request(self, callback, response):
if response.error:
- logging.warning("Error response %s fetching %s", response.error,
+ gen_log.warning("Error response %s fetching %s", response.error,
response.request.url)
callback(None)
return
def _on_friendfeed_request(self, callback, response):
if response.error:
- logging.warning("Error response %s fetching %s", response.error,
+ gen_log.warning("Error response %s fetching %s", response.error,
response.request.url)
callback(None)
return
def _parse_response(self, callback, response):
if response.error:
- logging.warning("HTTP error from Facebook: %s", response.error)
+ gen_log.warning("HTTP error from Facebook: %s", response.error)
callback(None)
return
try:
json = escape.json_decode(response.body)
except Exception:
- logging.warning("Invalid JSON from Facebook: %r", response.body)
+ gen_log.warning("Invalid JSON from Facebook: %r", response.body)
callback(None)
return
if isinstance(json, dict) and json.get("error_code"):
- logging.warning("Facebook error: %d: %r", json["error_code"],
+ gen_log.warning("Facebook error: %d: %r", json["error_code"],
json.get("error_msg"))
callback(None)
return
def _on_access_token(self, redirect_uri, client_id, client_secret,
callback, fields, response):
if response.error:
- logging.warning('Facebook auth error: %s' % str(response))
+ gen_log.warning('Facebook auth error: %s' % str(response))
callback(None)
return
def _on_facebook_request(self, callback, response):
if response.error:
- logging.warning("Error response %s fetching %s", response.error,
+ gen_log.warning("Error response %s fetching %s", response.error,
response.request.url)
callback(None)
return
del sys.path[0]
import functools
-import logging
import os
import pkgutil
import sys
import subprocess
from tornado import ioloop
+from tornado.log import gen_log
from tornado import process
try:
modify_times[path] = modified
return
if modify_times[path] != modified:
- logging.info("%s modified; restarting server", path)
+ gen_log.info("%s modified; restarting server", path)
_reload()
# module) will see the right things.
exec f.read() in globals(), globals()
except SystemExit, e:
- logging.info("Script exited with status %s", e.code)
+ gen_log.info("Script exited with status %s", e.code)
except Exception, e:
- logging.warning("Script exited with uncaught exception", exc_info=True)
+ gen_log.warning("Script exited with uncaught exception", exc_info=True)
if isinstance(e, SyntaxError):
watch(e.filename)
else:
- logging.info("Script exited normally")
+ gen_log.info("Script exited normally")
# restore sys.argv so subsequent executions will include autoreload
sys.argv = original_argv
from tornado import httputil
from tornado import ioloop
+from tornado.log import gen_log
from tornado import stack_context
from tornado.escape import utf8
# socket_action is found in pycurl since 7.18.2 (it's been
# in libcurl longer than that but wasn't accessible to
# python).
- logging.warning("socket_action method missing from pycurl; "
+ gen_log.warning("socket_action method missing from pycurl; "
"falling back to socket_all. Upgrading "
"libcurl and pycurl will improve performance")
self._socket_action = \
def _curl_create():
curl = pycurl.Curl()
- if logging.getLogger().isEnabledFor(logging.DEBUG):
+ if gen_log.isEnabledFor(logging.DEBUG):
curl.setopt(pycurl.VERBOSE, 1)
curl.setopt(pycurl.DEBUGFUNCTION, _curl_debug)
return curl
userpwd = "%s:%s" % (request.auth_username, request.auth_password or '')
curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
curl.setopt(pycurl.USERPWD, utf8(userpwd))
- logging.debug("%s %s (username: %r)", request.method, request.url,
+ gen_log.debug("%s %s (username: %r)", request.method, request.url,
request.auth_username)
else:
curl.unsetopt(pycurl.USERPWD)
- logging.debug("%s %s", request.method, request.url)
+ gen_log.debug("%s %s", request.method, request.url)
if request.client_cert is not None:
curl.setopt(pycurl.SSLCERT, request.client_cert)
def _curl_debug(debug_type, debug_msg):
debug_types = ('I', '<', '>', '<', '>')
if debug_type == 0:
- logging.debug('%s', debug_msg.strip())
+ gen_log.debug('%s', debug_msg.strip())
elif debug_type in (1, 2):
for line in debug_msg.splitlines():
- logging.debug('%s %s', debug_types[debug_type], line)
+ gen_log.debug('%s %s', debug_types[debug_type], line)
elif debug_type == 4:
- logging.debug('%s %r', debug_types[debug_type], debug_msg)
+ gen_log.debug('%s %r', debug_types[debug_type], debug_msg)
if __name__ == "__main__":
AsyncHTTPClient.configure(CurlAsyncHTTPClient)
import copy
import itertools
-import logging
import time
+from tornado.log import gen_log
+
try:
import MySQLdb.constants
import MySQLdb.converters
try:
self.reconnect()
except Exception:
- logging.error("Cannot connect to MySQL on %s", self.host,
+ gen_log.error("Cannot connect to MySQL on %s", self.host,
exc_info=True)
def __del__(self):
try:
return cursor.execute(query, parameters)
except OperationalError:
- logging.error("Error connecting to MySQL on %s", self.host)
+ gen_log.error("Error connecting to MySQL on %s", self.host)
self.close()
raise
from __future__ import absolute_import, division, with_statement
import Cookie
-import logging
import socket
import time
from tornado.escape import native_str, parse_qs_bytes
from tornado import httputil
from tornado import iostream
+from tornado.log import gen_log
from tornado.netutil import TCPServer
from tornado import stack_context
from tornado.util import b, bytes_type
self.request_callback(self._request)
except _BadRequestException, e:
- logging.info("Malformed HTTP request from %s: %s",
+ gen_log.info("Malformed HTTP request from %s: %s",
self.address[0], e)
self.close()
return
from __future__ import absolute_import, division, with_statement
-import logging
import urllib
import re
from tornado.escape import native_str, parse_qs_bytes, utf8
+from tornado.log import gen_log
from tornado.util import b, ObjectDict
parse_multipart_form_data(utf8(v), body, arguments, files)
break
else:
- logging.warning("Invalid multipart/form-data")
+ gen_log.warning("Invalid multipart/form-data")
def parse_multipart_form_data(boundary, data, arguments, files):
boundary = boundary[1:-1]
final_boundary_index = data.rfind(b("--") + boundary + b("--"))
if final_boundary_index == -1:
- logging.warning("Invalid multipart/form-data: no final boundary")
+ gen_log.warning("Invalid multipart/form-data: no final boundary")
return
parts = data[:final_boundary_index].split(b("--") + boundary + b("\r\n"))
for part in parts:
continue
eoh = part.find(b("\r\n\r\n"))
if eoh == -1:
- logging.warning("multipart/form-data missing headers")
+ gen_log.warning("multipart/form-data missing headers")
continue
headers = HTTPHeaders.parse(part[:eoh].decode("utf-8"))
disp_header = headers.get("Content-Disposition", "")
disposition, disp_params = _parse_header(disp_header)
if disposition != "form-data" or not part.endswith(b("\r\n")):
- logging.warning("Invalid multipart/form-data")
+ gen_log.warning("Invalid multipart/form-data")
continue
value = part[eoh + 4:-2]
if not disp_params.get("name"):
- logging.warning("multipart/form-data value missing name")
+ gen_log.warning("multipart/form-data value missing name")
continue
name = disp_params["name"]
if disp_params.get("filename"):
import errno
import heapq
import os
-import logging
import select
import thread
import threading
import time
import traceback
+from tornado.log import app_log, gen_log
from tornado import stack_context
try:
try:
os.close(fd)
except Exception:
- logging.debug("error closing fd %s", fd, exc_info=True)
+ gen_log.debug("error closing fd %s", fd, exc_info=True)
self._waker.close()
self._impl.close()
try:
self._impl.unregister(fd)
except (OSError, IOError):
- logging.debug("Error deleting fd from IOLoop", exc_info=True)
+ gen_log.debug("Error deleting fd from IOLoop", exc_info=True)
def set_blocking_signal_threshold(self, seconds, action):
"""Sends a signal if the ioloop is blocked for more than s seconds.
too long.
"""
if not hasattr(signal, "setitimer"):
- logging.error("set_blocking_signal_threshold requires a signal module "
- "with the setitimer method")
+ gen_log.error("set_blocking_signal_threshold requires a signal module "
+ "with the setitimer method")
return
self._blocking_signal_threshold = seconds
if seconds is not None:
For use with set_blocking_signal_threshold.
"""
- logging.warning('IOLoop blocked for %f seconds in\n%s',
- self._blocking_signal_threshold,
- ''.join(traceback.format_stack(frame)))
+ gen_log.warning('IOLoop blocked for %f seconds in\n%s',
+ self._blocking_signal_threshold,
+ ''.join(traceback.format_stack(frame)))
def start(self):
"""Starts the I/O loop.
# Happens when the client closes the connection
pass
else:
- logging.error("Exception in I/O handler for fd %s",
+ app_log.error("Exception in I/O handler for fd %s",
fd, exc_info=True)
except Exception:
- logging.error("Exception in I/O handler for fd %s",
+ app_log.error("Exception in I/O handler for fd %s",
fd, exc_info=True)
# reset the stopped flag so another start/stop pair can be issued
self._stopped = False
The exception itself is not passed explicitly, but is available
in sys.exc_info.
"""
- logging.error("Exception in callback %r", callback, exc_info=True)
+ app_log.error("Exception in callback %r", callback, exc_info=True)
class _Timeout(object):
try:
self.callback()
except Exception:
- logging.error("Error in periodic callback", exc_info=True)
+ app_log.error("Error in periodic callback", exc_info=True)
self._schedule_next()
def _schedule_next(self):
# All other systems
import sys
if "linux" in sys.platform:
- logging.warning("epoll module not found; using select()")
+ gen_log.warning("epoll module not found; using select()")
_poll = _Select
import collections
import errno
-import logging
import os
import socket
import sys
import re
from tornado import ioloop
+from tornado.log import gen_log, app_log
from tornado import stack_context
from tornado.util import b, bytes_type
# localhost, so handle them the same way as an error
# reported later in _handle_connect.
if e.args[0] not in (errno.EINPROGRESS, errno.EWOULDBLOCK):
- logging.warning("Connect error on fd %d: %s",
+ gen_log.warning("Connect error on fd %d: %s",
self.socket.fileno(), e)
self.close()
return
def _handle_events(self, fd, events):
if not self.socket:
- logging.warning("Got events for closed stream %d", fd)
+ gen_log.warning("Got events for closed stream %d", fd)
return
try:
if events & self.io_loop.READ:
self._state = state
self.io_loop.update_handler(self.socket.fileno(), self._state)
except Exception:
- logging.error("Uncaught exception, closing connection.",
+ gen_log.error("Uncaught exception, closing connection.",
exc_info=True)
self.close()
raise
try:
callback(*args)
except Exception:
- logging.error("Uncaught exception, closing connection.",
+ app_log.error("Uncaught exception, closing connection.",
exc_info=True)
# Close the socket on an uncaught exception from a user callback
# (It would eventually get closed when the socket object is
finally:
self._pending_callbacks -= 1
except Exception:
- logging.warning("error on read", exc_info=True)
+ gen_log.warning("error on read", exc_info=True)
self.close()
return
if self._read_from_buffer():
chunk = self._read_from_socket()
except socket.error, e:
# ssl.SSLError is a subclass of socket.error
- logging.warning("Read error on %d: %s",
+ gen_log.warning("Read error on %d: %s",
self.socket.fileno(), e)
self.close()
raise
self._read_buffer.append(chunk)
self._read_buffer_size += len(chunk)
if self._read_buffer_size >= self.max_buffer_size:
- logging.error("Reached maximum read buffer size")
+ gen_log.error("Reached maximum read buffer size")
self.close()
raise IOError("Reached maximum read buffer size")
return len(chunk)
# an error state before the socket becomes writable, so
# in that case a connection failure would be handled by the
# error path in _handle_events instead of here.
- logging.warning("Connect error on fd %d: %s",
+ gen_log.warning("Connect error on fd %d: %s",
self.socket.fileno(), errno.errorcode[err])
self.close()
return
self._write_buffer_frozen = True
break
else:
- logging.warning("Write error on %d: %s",
+ gen_log.warning("Write error on %d: %s",
self.socket.fileno(), e)
self.close()
return
peer = self.socket.getpeername()
except:
peer = '(not connected)'
- logging.warning("SSL Error on %d %s: %s",
+ gen_log.warning("SSL Error on %d %s: %s",
self.socket.fileno(), peer, err)
return self.close()
raise
import csv
import datetime
-import logging
import os
import re
from tornado import escape
+from tornado.log import gen_log
_default_locale = "en_US"
_translations = {}
continue
locale, extension = path.split(".")
if not re.match("[a-z]+(_[A-Z]+)?$", locale):
- logging.error("Unrecognized locale %r (path: %s)", locale,
+ gen_log.error("Unrecognized locale %r (path: %s)", locale,
os.path.join(directory, path))
continue
full_path = os.path.join(directory, path)
else:
plural = "unknown"
if plural not in ("plural", "singular", "unknown"):
- logging.error("Unrecognized plural indicator %r in %s line %d",
+ gen_log.error("Unrecognized plural indicator %r in %s line %d",
plural, path, i + 1)
continue
_translations[locale].setdefault(plural, {})[english] = translation
f.close()
_supported_locales = frozenset(_translations.keys() + [_default_locale])
- logging.debug("Supported locales: %s", sorted(_supported_locales))
+ gen_log.debug("Supported locales: %s", sorted(_supported_locales))
def load_gettext_translations(directory, domain):
_translations[lang] = gettext.translation(domain, directory,
languages=[lang])
except Exception, e:
- logging.error("Cannot load translation for '%s': %s", lang, str(e))
+ gen_log.error("Cannot load translation for '%s': %s", lang, str(e))
continue
_supported_locales = frozenset(_translations.keys() + [_default_locale])
_use_gettext = True
- logging.debug("Supported locales: %s", sorted(_supported_locales))
+ gen_log.debug("Supported locales: %s", sorted(_supported_locales))
def get_supported_locales():
--- /dev/null
+#!/usr/bin/env python
+#
+# Copyright 2012 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from __future__ import absolute_import, division, with_statement
+
+import logging
+
+# Per-request logging for Tornado's HTTP servers (and potentially other servers
+# in the future)
+access_log = logging.getLogger("tornado.access")
+
+# Logging of errors from application code (i.e. uncaught exceptions from
+# callbacks
+app_log = logging.getLogger("tornado.application")
+
+# General logging, i.e. everything else
+gen_log = logging.getLogger("tornado.general")
from __future__ import absolute_import, division, with_statement
import errno
-import logging
import os
import socket
import stat
from tornado import process
from tornado.ioloop import IOLoop
from tornado.iostream import IOStream, SSLIOStream
+from tornado.log import app_log
from tornado.platform.auto import set_close_exec
try:
stream = IOStream(connection, io_loop=self.io_loop)
self.handle_stream(stream, address)
except Exception:
- logging.error("Error in connection callback", exc_info=True)
+ app_log.error("Error in connection callback", exc_info=True)
def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags=None):
from __future__ import absolute_import, division, with_statement
import functools
-import logging
import time
from twisted.internet.posixbase import PosixReactorBase
import tornado
import tornado.ioloop
+from tornado.log import app_log
from tornado.stack_context import NullContext
from tornado.ioloop import IOLoop
try:
self._func()
except:
- logging.error("_called caught exception", exc_info=True)
+ app_log.error("_called caught exception", exc_info=True)
def getTime(self):
return self._time
from __future__ import absolute_import, division, with_statement
import errno
-import logging
import os
import sys
import time
from binascii import hexlify
from tornado import ioloop
+from tornado.log import gen_log
try:
import multiprocessing # Python 2.6+
return os.sysconf("SC_NPROCESSORS_CONF")
except ValueError:
pass
- logging.error("Could not detect number of processors; assuming 1")
+ gen_log.error("Could not detect number of processors; assuming 1")
return 1
raise RuntimeError("Cannot run in multiple processes: IOLoop instance "
"has already been initialized. You cannot call "
"IOLoop.instance() before calling start_processes()")
- logging.info("Starting %d processes", num_processes)
+ gen_log.info("Starting %d processes", num_processes)
children = {}
def start_child(i):
continue
id = children.pop(pid)
if os.WIFSIGNALED(status):
- logging.warning("child %d (pid %d) killed by signal %d, restarting",
+ gen_log.warning("child %d (pid %d) killed by signal %d, restarting",
id, pid, os.WTERMSIG(status))
elif os.WEXITSTATUS(status) != 0:
- logging.warning("child %d (pid %d) exited with status %d, restarting",
+ gen_log.warning("child %d (pid %d) exited with status %d, restarting",
id, pid, os.WEXITSTATUS(status))
else:
- logging.info("child %d (pid %d) exited normally", id, pid)
+ gen_log.info("child %d (pid %d) exited normally", id, pid)
continue
num_restarts += 1
if num_restarts > max_restarts:
from tornado.httpclient import HTTPRequest, HTTPResponse, HTTPError, AsyncHTTPClient, main
from tornado.httputil import HTTPHeaders
from tornado.iostream import IOStream, SSLIOStream
+from tornado.log import gen_log
from tornado import stack_context
from tornado.util import b, GzipDecompressor
import contextlib
import copy
import functools
-import logging
import os.path
import re
import socket
self.queue.append((request, callback))
self._process_queue()
if self.queue:
- logging.debug("max_clients limit reached, request queued. "
+ gen_log.debug("max_clients limit reached, request queued. "
"%d active, %d queued requests." % (
len(self.active), len(self.queue)))
try:
yield
except Exception, e:
- logging.warning("uncaught exception", exc_info=True)
+ gen_log.warning("uncaught exception", exc_info=True)
self._run_callback(HTTPResponse(self.request, 599, error=e,
request_time=time.time() - self.start_time,
))
import cStringIO
import datetime
import linecache
-import logging
import os.path
import posixpath
import re
import threading
from tornado import escape
+from tornado.log import app_log
from tornado.util import bytes_type, ObjectDict
_DEFAULT_AUTOESCAPE = "xhtml_escape"
"exec")
except Exception:
formatted_code = _format_code(self.code).rstrip()
- logging.error("%s code:\n%s", self.name, formatted_code)
+ app_log.error("%s code:\n%s", self.name, formatted_code)
raise
def generate(self, **kwargs):
return execute()
except Exception:
formatted_code = _format_code(self.code).rstrip()
- logging.error("%s code:\n%s", self.name, formatted_code)
+ app_log.error("%s code:\n%s", self.name, formatted_code)
raise
def _generate_python(self, loader, compress_whitespace):
HTTPServer = None
IOLoop = None
SimpleAsyncHTTPClient = None
+from tornado.log import gen_log
from tornado.stack_context import StackContext, NullContext
from tornado.util import raise_exc_info
import contextlib
old_stream = handler.stream
try:
handler.stream = StringIO()
- logging.info("RUNNING TEST: " + str(self))
+ gen_log.info("RUNNING TEST: " + str(self))
old_error_count = len(result.failures) + len(result.errors)
super(LogTrapTestCase, self).run(result)
new_error_count = len(result.failures) + len(result.errors)
unittest.main(defaultTest="all", argv=argv, **kwargs)
except SystemExit, e:
if e.code == 0:
- logging.info('PASS')
+ gen_log.info('PASS')
else:
- logging.error('FAIL')
+ gen_log.error('FAIL')
if not options.autoreload:
raise
if options.autoreload:
import hmac
import httplib
import itertools
-import logging
import mimetypes
import os.path
import re
from tornado import escape
from tornado import locale
+from tornado.log import access_log, app_log, gen_log
from tornado import stack_context
from tornado import template
from tornado.escape import utf8, _unicode
Additional keyword arguments are passed through to `write_error`.
"""
if self._headers_written:
- logging.error("Cannot send error response after headers written")
+ gen_log.error("Cannot send error response after headers written")
if not self._finished:
self.finish()
return
try:
self.write_error(status_code, **kwargs)
except Exception:
- logging.error("Uncaught exception in write_error", exc_info=True)
+ app_log.error("Uncaught exception in write_error", exc_info=True)
if not self._finished:
self.finish()
return callback(*args, **kwargs)
except Exception, e:
if self._headers_written:
- logging.error("Exception after headers written",
+ app_log.error("Exception after headers written",
exc_info=True)
else:
self._handle_request_exception(e)
if e.log_message:
format = "%d %s: " + e.log_message
args = [e.status_code, self._request_summary()] + list(e.args)
- logging.warning(format, *args)
+ gen_log.warning(format, *args)
if e.status_code not in httplib.responses:
- logging.error("Bad HTTP status code: %d", e.status_code)
+ gen_log.error("Bad HTTP status code: %d", e.status_code)
self.send_error(500, exc_info=sys.exc_info())
else:
self.send_error(e.status_code, exc_info=sys.exc_info())
else:
- logging.error("Uncaught exception %s\n%r", self._request_summary(),
+ app_log.error("Uncaught exception %s\n%r", self._request_summary(),
self.request, exc_info=True)
self.send_error(500, exc_info=sys.exc_info())
handlers.append(spec)
if spec.name:
if spec.name in self.named_handlers:
- logging.warning(
+ app_log.warning(
"Multiple handlers named %s; replacing previous value",
spec.name)
self.named_handlers[spec.name] = spec
self.settings["log_function"](handler)
return
if handler.get_status() < 400:
- log_method = logging.info
+ log_method = access_log.info
elif handler.get_status() < 500:
- log_method = logging.warning
+ log_method = access_log.warning
else:
- log_method = logging.error
+ log_method = access_log.error
request_time = 1000.0 * handler.request.request_time()
log_method("%d %s %.2fms", handler.get_status(),
handler._request_summary(), request_time)
hashes[abs_path] = hashlib.md5(f.read()).hexdigest()
f.close()
except Exception:
- logging.error("Could not open static file %r", path)
+ gen_log.error("Could not open static file %r", path)
hashes[abs_path] = None
hsh = hashes.get(abs_path)
if hsh:
return None
signature = _create_signature(secret, name, parts[0], parts[1])
if not _time_independent_equals(parts[2], signature):
- logging.warning("Invalid cookie signature %r", value)
+ gen_log.warning("Invalid cookie signature %r", value)
return None
timestamp = int(parts[1])
if timestamp < time.time() - max_age_days * 86400:
- logging.warning("Expired cookie %r", value)
+ gen_log.warning("Expired cookie %r", value)
return None
if timestamp > time.time() + 31 * 86400:
# _cookie_signature does not hash a delimiter between the
# digits from the payload to the timestamp without altering the
# signature. For backwards compatibility, sanity-check timestamp
# here instead of modifying _cookie_signature.
- logging.warning("Cookie timestamp in future; possible tampering %r", value)
+ gen_log.warning("Cookie timestamp in future; possible tampering %r", value)
return None
if parts[1].startswith(b("0")):
- logging.warning("Tampered cookie %r", value)
+ gen_log.warning("Tampered cookie %r", value)
return None
try:
return base64.b64decode(parts[0])
import array
import functools
import hashlib
-import logging
import struct
import time
import base64
import tornado.escape
import tornado.web
+from tornado.log import gen_log, app_log
from tornado.util import bytes_type, b
try:
return callback(*args, **kwargs)
except Exception:
- logging.error("Uncaught exception in %s",
+ app_log.error("Uncaught exception in %s",
self.request.path, exc_info=True)
self._abort()
return wrapper
try:
self._handle_websocket_headers()
except ValueError:
- logging.debug("Malformed WebSocket request received")
+ gen_log.debug("Malformed WebSocket request received")
self._abort()
return
try:
challenge_response = self.challenge_response(challenge)
except ValueError:
- logging.debug("Malformed key data in WebSocket request")
+ gen_log.debug("Malformed key data in WebSocket request")
self._abort()
return
self._write_response(challenge_response)
self._handle_websocket_headers()
self._accept_connection()
except ValueError:
- logging.debug("Malformed WebSocket request received")
+ gen_log.debug("Malformed WebSocket request received")
self._abort()
return
import Cookie
import httplib
-import logging
import sys
import time
import tornado
from tornado import escape
from tornado import httputil
+from tornado.log import access_log
from tornado import web
from tornado.escape import native_str, utf8, parse_qs_bytes
from tornado.util import b, bytes_type
def _log(self, status_code, request):
if status_code < 400:
- log_method = logging.info
+ log_method = access_log.info
elif status_code < 500:
- log_method = logging.warning
+ log_method = access_log.warning
else:
- log_method = logging.error
+ log_method = access_log.error
request_time = 1000.0 * request.request_time()
summary = request.method + " " + request.uri + " (" + \
request.remote_ip + ")"