]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
added "recreate()" argument to connection pool classes
authorMike Bayer <mike_mp@zzzcomputing.com>
Tue, 17 Apr 2007 00:21:25 +0000 (00:21 +0000)
committerMike Bayer <mike_mp@zzzcomputing.com>
Tue, 17 Apr 2007 00:21:25 +0000 (00:21 +0000)
this method is called when the invalidate() occurs for a disconnect condition,
so that the entire pool is recreated, thereby avoiding repeat errors on
remaining connections in the pool.
dispose() called as well (also fixed up) but cant guarantee all connections closed.

CHANGES
lib/sqlalchemy/engine/base.py
lib/sqlalchemy/engine/default.py
lib/sqlalchemy/pool.py
test/engine/pool.py

diff --git a/CHANGES b/CHANGES
index d8875df0859719b0b7af57352b065ce19cf703b6..42c109d99efecd6591d52a3a41423c8d75aa5aa7 100644 (file)
--- a/CHANGES
+++ b/CHANGES
     - server side cursor support fully functional in postgres
       [ticket:514].
     - improved framework for auto-invalidation of connections that have
-      lost their underlying database - the error catching/invalidate
-      step is totally moved to the connection pool. #516
+      lost their underlying database, via dialect-specific detection
+      of exceptions corresponding to that database's disconnect
+      related error messages.  Additionally, when a "connection no 
+      longer open" condition is detected, the entire connection pool 
+      is discarded and replaced with a new instance.  #516
 - sql:
     - preliminary support for unicode table names, column names and 
       SQL statements added, for databases which can support them.
index 6f0ff029a081acb0c6406c64f536ed01c8fd4d0e..e7a3f8feb1549ab0fac4c74aa2fc4b4362b325d1 100644 (file)
@@ -577,6 +577,7 @@ class Connection(Connectable):
         except Exception, e:
             if self.dialect.is_disconnect(e):
                 self.__connection.invalidate(e=e)
+                self.engine.connection_provider.dispose()
             self._autorollback()
             if self.__close_with_result:
                 self.close()
@@ -588,6 +589,7 @@ class Connection(Connectable):
         except Exception, e:
             if self.dialect.is_disconnect(e):
                 self.__connection.invalidate(e=e)
+                self.engine.connection_provider.dispose()
             self._autorollback()
             if self.__close_with_result:
                 self.close()
index 969bde8d9dc44c60de7f8977fda94143becacb84..f1858acdc0913a3586a6264877f5b42f7a048282 100644 (file)
@@ -20,9 +20,8 @@ class PoolConnectionProvider(base.ConnectionProvider):
 
     def dispose(self):
         self._pool.dispose()
-        if hasattr(self, '_dbproxy'):
-            self._dbproxy.dispose()
-
+        self._pool = self._pool.recreate()
+        
 class DefaultDialect(base.Dialect):
     """Default implementation of Dialect"""
 
index a617f8fecd1cfadde87081cef79e809a58834f70..9a2cdad0e2d3ea0ba6993377b5e06e172363b5fc 100644 (file)
@@ -142,7 +142,20 @@ class Pool(object):
 
     def create_connection(self):
         return _ConnectionRecord(self)
+    
+    def recreate(self):
+        """return a new instance of this Pool's class with identical creation arguments."""
+        raise NotImplementedError()
 
+    def dispose(self):
+        """dispose of this pool.
+        
+        this method leaves the possibility of checked-out connections remaining opened,
+        so it is advised to not reuse the pool once dispose() is called, and to instead
+        use a new pool constructed by the recreate() method.
+        """
+        raise NotImplementedError()
+        
     def connect(self):
         if not self._use_threadlocal:
             return _ConnectionFairy(self).checkout()
@@ -172,17 +185,15 @@ class Pool(object):
     def log(self, msg):
         self.logger.info(msg)
 
-    def dispose(self):
-        raise NotImplementedError()
-
 class _ConnectionRecord(object):
     def __init__(self, pool):
         self.__pool = pool
         self.connection = self.__connect()
 
     def close(self):
-        self.__pool.log("Closing connection %s" % repr(self.connection))
-        self.connection.close()
+        if self.connection is not None:
+            self.__pool.log("Closing connection %s" % repr(self.connection))
+            self.connection.close()
 
     def invalidate(self, e=None):
         if e is not None:
@@ -348,7 +359,17 @@ class SingletonThreadPool(Pool):
         self._conns = {}
         self.size = pool_size
 
+    def recreate(self):
+        self.log("Pool recreating")
+        return SingletonThreadPool(self._creator, pool_size=self.size, recycle=self._recycle, echo=self.echo, use_threadlocal=self._use_threadlocal, auto_close_cursors=self.auto_close_cursors, disallow_open_cursors=self.disallow_open_cursors)
+        
     def dispose(self):
+        """dispose of this pool.
+        
+        this method leaves the possibility of checked-out connections remaining opened,
+        so it is advised to not reuse the pool once dispose() is called, and to instead
+        use a new pool constructed by the recreate() method.
+        """
         for key, conn in self._conns.items():
             try:
                 conn.close()
@@ -426,6 +447,10 @@ class QueuePool(Pool):
         self._max_overflow = max_overflow
         self._timeout = timeout
 
+    def recreate(self):
+        self.log("Pool recreating")
+        return QueuePool(self._creator, pool_size=self._pool.maxsize, max_overflow=self._max_overflow, timeout=self._timeout, recycle=self._recycle, echo=self.echo, use_threadlocal=self._use_threadlocal, auto_close_cursors=self.auto_close_cursors, disallow_open_cursors=self.disallow_open_cursors)
+
     def do_return_conn(self, conn):
         try:
             self._pool.put(conn, False)
index db97ea6f8db8505b460d692b3ec64aa1597a2285..a2f7f9f35a3f43c5708023764c43ff8c5a44e142 100644 (file)
@@ -180,6 +180,14 @@ class PoolTest(PersistTest):
         c1 = p.connect()
         assert c1.connection.id != c_id
 
+    def test_recreate(self):
+        dbapi = MockDBAPI()
+        p = pool.QueuePool(creator = lambda: dbapi.connect('foo.db'), pool_size = 1, max_overflow = 0, use_threadlocal = False)
+        p2 = p.recreate()
+        assert p2.size() == 1
+        assert p2._use_threadlocal is False
+        assert p2._max_overflow == 0
+        
     def test_reconnect(self):
         dbapi = MockDBAPI()
         p = pool.QueuePool(creator = lambda: dbapi.connect('foo.db'), pool_size = 1, max_overflow = 0, use_threadlocal = False)