def _cursor_execute(self, cursor, statement, parameters, context=None):
if self.engine._should_log_info:
self.engine.logger.info(statement)
- self.engine.logger.info(repr(parameters))
+ self.engine.logger.info("%r", parameters)
try:
self.dialect.do_execute(cursor, statement, parameters, context=context)
except Exception, e:
def _cursor_executemany(self, cursor, statement, parameters, context=None):
if self.engine._should_log_info:
self.engine.logger.info(statement)
- self.engine.logger.info(repr(parameters))
+ self.engine.logger.info("%r", parameters)
try:
self.dialect.do_executemany(cursor, statement, parameters, context=context)
except Exception, e:
self.__parent = parent
self.__row = row
if self.__parent._echo:
- self.__parent.context.engine.logger.debug("Row " + repr(row))
+ self.__parent.context.engine.logger.debug("Row %r", row)
def close(self):
"""Close the parent ResultProxy."""
if self._echo:
self.context.engine.logger.debug(
- "Col " + repr(tuple(x[0] for x in metadata)))
+ "Col %r", tuple(x[0] for x in metadata))
def __key_fallback(self):
# create a closure without 'self' to avoid circular references
def _post_init(self):
if self._should_log_info:
- self.logger.info(str(self) + " setup primary join %s" % self.primaryjoin)
- self.logger.info(str(self) + " setup secondary join %s" % self.secondaryjoin)
- self.logger.info(str(self) + " synchronize pairs [%s]" % ",".join("(%s => %s)" % (l, r) for l, r in self.synchronize_pairs))
- self.logger.info(str(self) + " secondary synchronize pairs [%s]" % ",".join(("(%s => %s)" % (l, r) for l, r in self.secondary_synchronize_pairs or [])))
- self.logger.info(str(self) + " local/remote pairs [%s]" % ",".join("(%s / %s)" % (l, r) for l, r in self.local_remote_pairs))
- self.logger.info(str(self) + " relation direction %s" % self.direction)
+ self.logger.info("%s setup primary join %s", self, self.primaryjoin)
+ self.logger.info("%s setup secondary join %s", self, self.secondaryjoin)
+ self.logger.info("%s synchronize pairs [%s]", self, ",".join("(%s => %s)" % (l, r) for l, r in self.synchronize_pairs))
+ self.logger.info("%s secondary synchronize pairs [%s]", self, ",".join(("(%s => %s)" % (l, r) for l, r in self.secondary_synchronize_pairs or [])))
+ self.logger.info("%s local/remote pairs [%s]", self, ",".join("(%s / %s)" % (l, r) for l, r in self.local_remote_pairs))
+ self.logger.info("%s relation direction %s", self, self.direction)
if self.uselist is None and self.direction is MANYTOONE:
self.uselist = False
if isnew:
state.expire_attributes([key])
if self._should_log_debug:
- self.logger.debug("%s deferring load" % self)
+ self.logger.debug("%s deferring load", self)
return (new_execute, None)
log.class_logger(ColumnLoader)
def init_class_attribute(self, mapper):
self.is_class_level = True
- self.logger.info("%s register managed composite attribute" % self)
+ self.logger.info("%s register managed composite attribute", self)
def copy(obj):
if obj is None:
if isnew:
state.expire_attributes([key])
if self._should_log_debug:
- self.logger.debug("%s deferring load" % self)
+ self.logger.debug("%s deferring load", self)
return (new_execute, None)
else:
def new_execute(state, dict_, row, **flags):
super(LazyLoader, self).init()
(self.__lazywhere, self.__bind_to_col, self._equated_columns) = self._create_lazy_clause(self.parent_property)
- self.logger.info("%s lazy loading clause %s" % (self, self.__lazywhere))
+ self.logger.info("%s lazy loading clause %s", self, self.__lazywhere)
# determine if our "lazywhere" clause is the same as the mapper's
# get() clause. then we can just use mapper.get()
# if object is not in the overall session, do nothing
if not self.session._contains_state(state):
if self._should_log_debug:
- self.logger.debug("object %s not part of session, not registering for flush" %
- (mapperutil.state_str(state)))
+ self.logger.debug("object %s not part of session, not registering for flush",
+ mapperutil.state_str(state))
return
if self._should_log_debug:
- self.logger.debug("register object for flush: %s isdelete=%s listonly=%s postupdate=%s"
- % (mapperutil.state_str(state), isdelete, listonly, postupdate))
+ self.logger.debug("register object for flush: %s isdelete=%s listonly=%s postupdate=%s",
+ mapperutil.state_str(state), isdelete, listonly, postupdate)
mapper = _state_mapper(state)
tasks = self._sort_dependencies()
if self._should_log_info:
- self.logger.info("Task dump:\n" + self._dump(tasks))
+ self.logger.info("Task dump:\n%s", self._dump(tasks))
UOWExecutor().execute(self, tasks)
if self._should_log_info:
self.logger.info("Execute Complete")
self.logger.debug("Dependent tuples:\n" + "\n".join(
"(%s->%s)" % (d[0].class_.__name__, d[1].class_.__name__)
for d in self.dependencies))
- self.logger.debug("Dependency sort:\n"+ str(ret))
+ self.logger.debug("Dependency sort:\n%s", ret)
return ret
log.class_logger(UOWTransaction)
if hasattr(listener, 'checkin'):
self._on_checkin.append(listener)
- def log(self, msg):
- self.logger.info(msg)
+ def log(self, msg, *args):
+ self.logger.info(msg, *args)
class _ConnectionRecord(object):
def __init__(self, pool):
def close(self):
if self.connection is not None:
if self.__pool._should_log_info:
- self.__pool.log("Closing connection %r" % self.connection)
+ self.__pool.log("Closing connection %r", self.connection)
try:
self.connection.close()
except (SystemExit, KeyboardInterrupt):
raise
except:
if self.__pool._should_log_info:
- self.__pool.log("Exception closing connection %r" %
+ self.__pool.log("Exception closing connection %r",
self.connection)
def invalidate(self, e=None):
if self.__pool._should_log_info:
if e is not None:
- self.__pool.log("Invalidate connection %r (reason: %s:%s)" %
- (self.connection, e.__class__.__name__, e))
+ self.__pool.log("Invalidate connection %r (reason: %s:%s)",
+ self.connection, e.__class__.__name__, e)
else:
- self.__pool.log("Invalidate connection %r" % self.connection)
+ self.__pool.log("Invalidate connection %r", self.connection)
self.__close()
self.connection = None
l.connect(self.connection, self)
elif (self.__pool._recycle > -1 and time.time() - self.starttime > self.__pool._recycle):
if self.__pool._should_log_info:
- self.__pool.log("Connection %r exceeded timeout; recycling" %
+ self.__pool.log("Connection %r exceeded timeout; recycling",
self.connection)
self.__close()
self.connection = self.__connect()
def __close(self):
try:
if self.__pool._should_log_info:
- self.__pool.log("Closing connection %r" % self.connection)
+ self.__pool.log("Closing connection %r", self.connection)
self.connection.close()
except Exception, e:
if self.__pool._should_log_info:
self.starttime = time.time()
connection = self.__pool._creator()
if self.__pool._should_log_info:
- self.__pool.log("Created new connection %r" % connection)
+ self.__pool.log("Created new connection %r", connection)
return connection
except Exception, e:
if self.__pool._should_log_info:
- self.__pool.log("Error on connect(): %s" % e)
+ self.__pool.log("Error on connect(): %s", e)
raise
if connection_record is not None:
connection_record.fairy = None
if pool._should_log_info:
- pool.log("Connection %r being returned to pool" % connection)
+ pool.log("Connection %r being returned to pool", connection)
if pool._on_checkin:
for l in pool._on_checkin:
l.checkin(connection, connection_record)
except exc.DisconnectionError, e:
if self._pool._should_log_info:
self._pool.log(
- "Disconnection detected on checkout: %s" % e)
+ "Disconnection detected on checkout: %s", e)
self._connection_record.invalidate(e)
self.connection = self._connection_record.get_connection()
attempts -= 1
''.join(str(n) for n in self.children)
def __repr__(self):
- return "%s" % (str(self.item))
+ return str(self.item)
def all_deps(self):
"""Return a set of dependencies for this node and all its cycles."""