]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
adjustments to pool stemming from changes made for [ticket:224].
authorMike Bayer <mike_mp@zzzcomputing.com>
Fri, 4 Aug 2006 06:21:58 +0000 (06:21 +0000)
committerMike Bayer <mike_mp@zzzcomputing.com>
Fri, 4 Aug 2006 06:21:58 +0000 (06:21 +0000)
overflow counter should only be decremented if the connection actually
succeeded.  added a test script to attempt testing this.

CHANGES
lib/sqlalchemy/pool.py
test/perf/poolload.py [new file with mode: 0644]

diff --git a/CHANGES b/CHANGES
index 487b7c5ba2e54990ef922c5f9abd1dfedc86bced..da97fff3537e401f517a92a98e2876ebaafd95b4 100644 (file)
--- a/CHANGES
+++ b/CHANGES
@@ -14,6 +14,9 @@ better error message in PropertyLoader (i.e. relation()/backref()) for when
 the join condition can't be reasonably determined.
 - sqlite creates ForeignKeyConstraint objects properly upon table 
 reflection.
+- adjustments to pool stemming from changes made for [ticket:224]. 
+overflow counter should only be decremented if the connection actually
+succeeded.  added a test script to attempt testing this.
 
 0.2.6
 - big overhaul to schema to allow truly composite primary and foreign
index 7a88ac6f7487f2f199a03005f4ec4bf40e9ab82e..d71f645a6e72f06438b3170a2875d335b055711b 100644 (file)
@@ -102,9 +102,9 @@ class Pool(object):
         self._purge_for_threadlocal()
         self.do_return_conn(agent.connection)
 
-    def return_invalid(self):
+    def return_invalid(self, agent):
         self._purge_for_threadlocal()
-        self.do_return_invalid()
+        self.do_return_invalid(agent.connection)
         
     def get(self):
         return self.do_get()
@@ -115,7 +115,7 @@ class Pool(object):
     def do_return_conn(self, conn):
         raise NotImplementedError()
         
-    def do_return_invalid(self):
+    def do_return_invalid(self, conn):
         raise NotImplementedError()
         
     def status(self):
@@ -141,7 +141,7 @@ class ConnectionFairy(object):
                 self.connection = pool.get()
             except:
                 self.connection = None
-                self.pool.return_invalid()
+                self.pool.return_invalid(self)
                 raise
         if self.pool.echo:
             self.pool.log("Connection %s checked out from pool" % repr(self.connection))
@@ -149,7 +149,7 @@ class ConnectionFairy(object):
         if self.pool.echo:
             self.pool.log("Invalidate connection %s" % repr(self.connection))
         self.connection = None
-        self.pool.return_invalid()
+        self.pool.return_invalid(self)
     def cursor(self, *args, **kwargs):
         return CursorFairy(self, self.connection.cursor(*args, **kwargs))
     def __getattr__(self, key):
@@ -204,7 +204,7 @@ class SingletonThreadPool(Pool):
     def do_return_conn(self, conn):
         pass
         
-    def do_return_invalid(self):
+    def do_return_invalid(self, conn):
         try:
             del self._conns[thread.get_ident()]
         except KeyError:
@@ -235,8 +235,9 @@ class QueuePool(Pool):
         except Queue.Full:
             self._overflow -= 1
 
-    def do_return_invalid(self):
-        self._overflow -= 1
+    def do_return_invalid(self, conn):
+        if conn is not None:
+            self._overflow -= 1
         
     def do_get(self):
         try:
diff --git a/test/perf/poolload.py b/test/perf/poolload.py
new file mode 100644 (file)
index 0000000..1b130f5
--- /dev/null
@@ -0,0 +1,36 @@
+# this program should open three connections.  then after five seconds, the remaining
+# 45 threads should receive a timeout error.  then the program will just stop until
+# ctrl-C is pressed.  it should *NOT* open a bunch of new connections.
+
+from sqlalchemy import *
+import sqlalchemy.pool as pool
+import psycopg2 as psycopg
+import thread,time
+psycopg = pool.manage(psycopg,pool_size=2,max_overflow=1, timeout=5, echo=True)
+print psycopg
+db = create_engine('postgres://scott:tiger@127.0.0.1/test',pool=psycopg,strategy='threadlocal')
+print db.connection_provider._pool
+metadata = BoundMetaData(db)
+
+users_table = Table('users', metadata,
+  Column('user_id', Integer, primary_key=True),
+  Column('user_name', String(40)),
+  Column('password', String(10)))
+metadata.create_all()
+
+class User(object):
+    pass
+usermapper = mapper(User, users_table)
+
+#Then i create loads of threads and in run() of each thread:
+def run():
+    session = create_session()
+    transaction = session.create_transaction()
+    query = session.query(User)
+    u1=query.select(User.c.user_id==3)
+    
+for x in range(0,50):
+    thread.start_new_thread(run, ())
+
+while True:
+    time.sleep(5)
\ No newline at end of file