]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
use *args with log.debug()/log.info(), [ticket:1520]
authorMike Bayer <mike_mp@zzzcomputing.com>
Wed, 26 Aug 2009 19:46:04 +0000 (19:46 +0000)
committerMike Bayer <mike_mp@zzzcomputing.com>
Wed, 26 Aug 2009 19:46:04 +0000 (19:46 +0000)
lib/sqlalchemy/engine/base.py
lib/sqlalchemy/orm/properties.py
lib/sqlalchemy/orm/strategies.py
lib/sqlalchemy/orm/unitofwork.py
lib/sqlalchemy/pool.py
lib/sqlalchemy/topological.py

index e126cec6818373b37578215bc99261e14304e080..70ee295db4acd4ef0841cebd05853eb5bba392fe 100644 (file)
@@ -1118,7 +1118,7 @@ class Connection(Connectable):
     def _cursor_execute(self, cursor, statement, parameters, context=None):
         if self.engine._should_log_info:
             self.engine.logger.info(statement)
-            self.engine.logger.info(repr(parameters))
+            self.engine.logger.info("%r", parameters)
         try:
             self.dialect.do_execute(cursor, statement, parameters, context=context)
         except Exception, e:
@@ -1128,7 +1128,7 @@ class Connection(Connectable):
     def _cursor_executemany(self, cursor, statement, parameters, context=None):
         if self.engine._should_log_info:
             self.engine.logger.info(statement)
-            self.engine.logger.info(repr(parameters))
+            self.engine.logger.info("%r", parameters)
         try:
             self.dialect.do_executemany(cursor, statement, parameters, context=context)
         except Exception, e:
@@ -1496,7 +1496,7 @@ class RowProxy(object):
         self.__parent = parent
         self.__row = row
         if self.__parent._echo:
-            self.__parent.context.engine.logger.debug("Row " + repr(row))
+            self.__parent.context.engine.logger.debug("Row %r", row)
 
     def close(self):
         """Close the parent ResultProxy."""
@@ -1711,7 +1711,7 @@ class ResultProxy(object):
 
         if self._echo:
             self.context.engine.logger.debug(
-                "Col " + repr(tuple(x[0] for x in metadata)))
+                "Col %r", tuple(x[0] for x in metadata))
 
     def __key_fallback(self):
         # create a closure without 'self' to avoid circular references
index 3489d81f2e02c8ac7789ced0009bfe0888e89b58..739b2c51945b452f15c728abccf57c35c2490214 100644 (file)
@@ -1000,12 +1000,12 @@ class RelationProperty(StrategizedProperty):
 
     def _post_init(self):
         if self._should_log_info:
-            self.logger.info(str(self) + " setup primary join %s" % self.primaryjoin)
-            self.logger.info(str(self) + " setup secondary join %s" % self.secondaryjoin)
-            self.logger.info(str(self) + " synchronize pairs [%s]" % ",".join("(%s => %s)" % (l, r) for l, r in self.synchronize_pairs))
-            self.logger.info(str(self) + " secondary synchronize pairs [%s]" % ",".join(("(%s => %s)" % (l, r) for l, r in self.secondary_synchronize_pairs or [])))
-            self.logger.info(str(self) + " local/remote pairs [%s]" % ",".join("(%s / %s)" % (l, r) for l, r in self.local_remote_pairs))
-            self.logger.info(str(self) + " relation direction %s" % self.direction)
+            self.logger.info("%s setup primary join %s", self, self.primaryjoin)
+            self.logger.info("%s setup secondary join %s", self, self.secondaryjoin)
+            self.logger.info("%s synchronize pairs [%s]", self, ",".join("(%s => %s)" % (l, r) for l, r in self.synchronize_pairs))
+            self.logger.info("%s secondary synchronize pairs [%s]", self, ",".join(("(%s => %s)" % (l, r) for l, r in self.secondary_synchronize_pairs or [])))
+            self.logger.info("%s local/remote pairs [%s]", self, ",".join("(%s / %s)" % (l, r) for l, r in self.local_remote_pairs))
+            self.logger.info("%s relation direction %s", self, self.direction)
 
         if self.uselist is None and self.direction is MANYTOONE:
             self.uselist = False
index 23114cdab2499a65f569fea873754de6dbe49ebe..a76eae0e0cc6ba7ddd23e93323948be27b9cf6c4 100644 (file)
@@ -133,7 +133,7 @@ class ColumnLoader(LoaderStrategy):
                 if isnew:
                     state.expire_attributes([key])
             if self._should_log_debug:
-                self.logger.debug("%s deferring load" % self)
+                self.logger.debug("%s deferring load", self)
             return (new_execute, None)
 
 log.class_logger(ColumnLoader)
@@ -143,7 +143,7 @@ class CompositeColumnLoader(ColumnLoader):
 
     def init_class_attribute(self, mapper):
         self.is_class_level = True
-        self.logger.info("%s register managed composite attribute" % self)
+        self.logger.info("%s register managed composite attribute", self)
 
         def copy(obj):
             if obj is None:
@@ -179,7 +179,7 @@ class CompositeColumnLoader(ColumnLoader):
                     if isnew:
                         state.expire_attributes([key])
                 if self._should_log_debug:
-                    self.logger.debug("%s deferring load" % self)
+                    self.logger.debug("%s deferring load", self)
                 return (new_execute, None)
         else:
             def new_execute(state, dict_, row, **flags):
@@ -367,7 +367,7 @@ class LazyLoader(AbstractRelationLoader):
         super(LazyLoader, self).init()
         (self.__lazywhere, self.__bind_to_col, self._equated_columns) = self._create_lazy_clause(self.parent_property)
         
-        self.logger.info("%s lazy loading clause %s" % (self, self.__lazywhere))
+        self.logger.info("%s lazy loading clause %s", self, self.__lazywhere)
 
         # determine if our "lazywhere" clause is the same as the mapper's
         # get() clause.  then we can just use mapper.get()
index bca6b4f463da07ac973ec50ae4232491bcfc3ab4..2047557bda80e725c2274afec27b65818e022dc7 100644 (file)
@@ -127,13 +127,13 @@ class UOWTransaction(object):
         # if object is not in the overall session, do nothing
         if not self.session._contains_state(state):
             if self._should_log_debug:
-                self.logger.debug("object %s not part of session, not registering for flush" % 
-                                        (mapperutil.state_str(state)))
+                self.logger.debug("object %s not part of session, not registering for flush",
+                                        mapperutil.state_str(state))
             return
 
         if self._should_log_debug:
-            self.logger.debug("register object for flush: %s isdelete=%s listonly=%s postupdate=%s"
-                                    % (mapperutil.state_str(state), isdelete, listonly, postupdate))
+            self.logger.debug("register object for flush: %s isdelete=%s listonly=%s postupdate=%s",
+                                    mapperutil.state_str(state), isdelete, listonly, postupdate)
 
         mapper = _state_mapper(state)
 
@@ -257,7 +257,7 @@ class UOWTransaction(object):
 
         tasks = self._sort_dependencies()
         if self._should_log_info:
-            self.logger.info("Task dump:\n" + self._dump(tasks))
+            self.logger.info("Task dump:\n%s", self._dump(tasks))
         UOWExecutor().execute(self, tasks)
         if self._should_log_info:
             self.logger.info("Execute Complete")
@@ -305,7 +305,7 @@ class UOWTransaction(object):
             self.logger.debug("Dependent tuples:\n" + "\n".join(
                     "(%s->%s)" % (d[0].class_.__name__, d[1].class_.__name__)
                     for d in self.dependencies))
-            self.logger.debug("Dependency sort:\n"+ str(ret))
+            self.logger.debug("Dependency sort:\n%s", ret)
         return ret
 
 log.class_logger(UOWTransaction)
index dabdc6e35345f0b339ad1dd611ee4fc8f5de30bd..6a67d78ebe4cc1f0be13c77639f0a058a76dad3f 100644 (file)
@@ -192,8 +192,8 @@ class Pool(object):
         if hasattr(listener, 'checkin'):
             self._on_checkin.append(listener)
 
-    def log(self, msg):
-        self.logger.info(msg)
+    def log(self, msg, *args):
+        self.logger.info(msg, *args)
 
 class _ConnectionRecord(object):
     def __init__(self, pool):
@@ -211,23 +211,23 @@ class _ConnectionRecord(object):
     def close(self):
         if self.connection is not None:
             if self.__pool._should_log_info:
-                self.__pool.log("Closing connection %r" % self.connection)
+                self.__pool.log("Closing connection %r", self.connection)
             try:
                 self.connection.close()
             except (SystemExit, KeyboardInterrupt):
                 raise
             except:
                 if self.__pool._should_log_info:
-                    self.__pool.log("Exception closing connection %r" %
+                    self.__pool.log("Exception closing connection %r",
                                     self.connection)
 
     def invalidate(self, e=None):
         if self.__pool._should_log_info:
             if e is not None:
-                self.__pool.log("Invalidate connection %r (reason: %s:%s)" %
-                                (self.connection, e.__class__.__name__, e))
+                self.__pool.log("Invalidate connection %r (reason: %s:%s)",
+                                self.connection, e.__class__.__name__, e)
             else:
-                self.__pool.log("Invalidate connection %r" % self.connection)
+                self.__pool.log("Invalidate connection %r", self.connection)
         self.__close()
         self.connection = None
 
@@ -240,7 +240,7 @@ class _ConnectionRecord(object):
                     l.connect(self.connection, self)
         elif (self.__pool._recycle > -1 and time.time() - self.starttime > self.__pool._recycle):
             if self.__pool._should_log_info:
-                self.__pool.log("Connection %r exceeded timeout; recycling" %
+                self.__pool.log("Connection %r exceeded timeout; recycling",
                                 self.connection)
             self.__close()
             self.connection = self.__connect()
@@ -253,7 +253,7 @@ class _ConnectionRecord(object):
     def __close(self):
         try:
             if self.__pool._should_log_info:
-                self.__pool.log("Closing connection %r" % self.connection)
+                self.__pool.log("Closing connection %r", self.connection)
             self.connection.close()
         except Exception, e:
             if self.__pool._should_log_info:
@@ -267,11 +267,11 @@ class _ConnectionRecord(object):
             self.starttime = time.time()
             connection = self.__pool._creator()
             if self.__pool._should_log_info:
-                self.__pool.log("Created new connection %r" % connection)
+                self.__pool.log("Created new connection %r", connection)
             return connection
         except Exception, e:
             if self.__pool._should_log_info:
-                self.__pool.log("Error on connect(): %s" % e)
+                self.__pool.log("Error on connect(): %s", e)
             raise
 
 
@@ -296,7 +296,7 @@ def _finalize_fairy(connection, connection_record, pool, ref=None):
     if connection_record is not None:
         connection_record.fairy = None
         if pool._should_log_info:
-            pool.log("Connection %r being returned to pool" % connection)
+            pool.log("Connection %r being returned to pool", connection)
         if pool._on_checkin:
             for l in pool._on_checkin:
                 l.checkin(connection, connection_record)
@@ -391,7 +391,7 @@ class _ConnectionFairy(object):
             except exc.DisconnectionError, e:
                 if self._pool._should_log_info:
                     self._pool.log(
-                    "Disconnection detected on checkout: %s" % e)
+                    "Disconnection detected on checkout: %s", e)
                 self._connection_record.invalidate(e)
                 self.connection = self._connection_record.get_connection()
                 attempts -= 1
index fbdb17963b8b899e479ca2c0d178437db659348f..6e4166faad5d5b4e1287c4eac3dae32a03831760 100644 (file)
@@ -79,7 +79,7 @@ class _Node(object):
             ''.join(str(n) for n in self.children)
 
     def __repr__(self):
-        return "%s" % (str(self.item))
+        return str(self.item)
 
     def all_deps(self):
         """Return a set of dependencies for this node and all its cycles."""