]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
added some echo arguments for uow, pool, propigated from the engine
authorMike Bayer <mike_mp@zzzcomputing.com>
Sat, 21 Jan 2006 06:33:33 +0000 (06:33 +0000)
committerMike Bayer <mike_mp@zzzcomputing.com>
Sat, 21 Jan 2006 06:33:33 +0000 (06:33 +0000)
lib/sqlalchemy/engine.py
lib/sqlalchemy/mapping/objectstore.py
lib/sqlalchemy/pool.py

index a1e57f6b3439f668c1ba3db9631ea0dbd1040e78..2d038478e3a2860dc2fb67f22204d5349933b8c0 100644 (file)
@@ -161,7 +161,7 @@ class SQLEngine(schema.SchemaEngine):
     SQLEngines are constructed via the create_engine() function inside this package.
     """
     
-    def __init__(self, pool=None, echo=False, logger=None, default_ordering=False, **params):
+    def __init__(self, pool=None, echo=False, logger=None, default_ordering=False, echo_pool=False, echo_uow=False, **params):
         """constructs a new SQLEngine.   SQLEngines should be constructed via the create_engine()
         function which will construct the appropriate subclass of SQLEngine."""
         # get a handle on the connection pool via the connect arguments
@@ -169,20 +169,21 @@ class SQLEngine(schema.SchemaEngine):
         # by direct usage of pool.manager(<module>).connect(*args, **params)
         (cargs, cparams) = self.connect_args()
         if pool is None:
+            params['echo'] = echo_pool
             self._pool = sqlalchemy.pool.manage(self.dbapi(), **params).get_pool(*cargs, **cparams)
         else:
             self._pool = pool
         self.default_ordering=default_ordering
         self.echo = echo
+        self.echo_uow = echo_uow
         self.context = util.ThreadLocal(raiseerror=False)
         self.tables = {}
-        self.notes = {}
         self._figure_paramstyle()
         if logger is None:
             self.logger = sys.stdout
         else:
             self.logger = logger
-    
+
     def dispose(self):
         """disposes of the underlying pool manager for this SQLEngine."""
         (cargs, cparams) = self.connect_args()
index 6dcd184577c1b402f74afd633d642b1453370eb2..8dfa785e8f452b469380eda3d15112a2b9a0cdb9 100644 (file)
@@ -21,7 +21,7 @@ import StringIO
 __all__ = ['get_id_key', 'get_row_key', 'commit', 'update', 'clear', 'delete', 
         'begin', 'has_key', 'has_instance', 'UnitOfWork']
 
-LOG = 1
+LOG = False
 
 def get_id_key(ident, class_, table):
     """returns an identity-map key for use in storing/retrieving an item from the identity
@@ -276,11 +276,13 @@ class UnitOfWork(object):
         for mapper in commit_context.mappers:
             for e in mapper.engines:
                 engines.append(e)
-                
+        
+        echo_commit = False        
         for e in engines:
+            echo_commit = echo_commit or e.echo_uow
             e.begin()
         try:
-            commit_context.execute()
+            commit_context.execute(echo=echo_commit)
         except:
             for e in engines:
                 e.rollback()
@@ -388,12 +390,12 @@ class UOWTransaction(object):
     def register_deleted_object(self, obj):
         self.deleted_objects.append(obj)
         
-    def execute(self):
+    def execute(self, echo=False):
         for task in self.tasks.values():
             task.mapper.register_dependencies(self)
 
         head = self._sort_dependencies()
-        if LOG:
+        if LOG or echo:
             print "Task dump:\n" + head.dump()
         if head is not None:
             head.execute(self)
@@ -693,7 +695,7 @@ class UOWTask(object):
         def _repr_task(task):
             if task.mapper is not None:
                 if task.mapper.__class__.__name__ == 'Mapper':
-                    name = task.mapper.class_.__name__ + "/" + task.mapper.primarytable.name
+                    name = task.mapper.class_.__name__ + "/" + task.mapper.primarytable.id
                 else:
                     name = repr(task.mapper)
             else:
index 699aadb042038972124d0fbd5912350f135b7052..e62ac709a434e252fa9d535ad8d3c960288c4505 100644 (file)
@@ -84,11 +84,25 @@ class Pool(object):
             agent = ConnectionFairy(self)
             self._threadconns[thread.get_ident()] = agent
             return agent
-            
+
+    def return_conn(self, conn):
+        if self._echo:
+            self.log("return connection to pool")
+        self.do_return_conn(conn)
+        
     def get(self):
+        if self._echo:
+            self.log("get connection from pool")
+            self.log(self.status())
+        return self.do_get()
+        
+    def do_get(self):
         raise NotImplementedError()
         
-    def return_conn(self, conn):
+    def do_return_conn(self, conn):
+        raise NotImplementedError()
+    
+    def status(self):
         raise NotImplementedError()
 
     def log(self, msg):
@@ -127,11 +141,14 @@ class SingletonThreadPool(Pool):
         Pool.__init__(self, **params)
         self._conns = {}
         self._creator = creator
-        
-    def return_conn(self, conn):
+
+    def status(self):
+        return "SingletonThreadPool size: %d" % len(self._conns)
+
+    def do_return_conn(self, conn):
         pass
         
-    def get(self):
+    def do_get(self):
         try:
             return self._conns[thread.get_ident()]
         except KeyError:
@@ -146,7 +163,7 @@ class QueuePool(Pool):
         self._overflow = 0 - pool_size
         self._max_overflow = max_overflow
     
-    def return_conn(self, conn):
+    def do_return_conn(self, conn):
         if self._echo:
             self.log("return connection to pool")
         try:
@@ -154,7 +171,7 @@ class QueuePool(Pool):
         except Queue.Full:
             self._overflow -= 1
 
-    def get(self):
+    def do_get(self):
         if self._echo:
             self.log("get connection from pool")
             self.log(self.status())