]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
- Eh, dummy_threading trick didn't actually work for no-op'ing overflow.
authorJason Kirtland <jek@discorporate.us>
Sat, 16 Jun 2007 18:46:23 +0000 (18:46 +0000)
committerJason Kirtland <jek@discorporate.us>
Sat, 16 Jun 2007 18:46:23 +0000 (18:46 +0000)
  Works now, but less readable.
- Shaped the script from ticket #608 into a test case

lib/sqlalchemy/pool.py
test/engine/pool.py

index 075e5cbc4e4bcc0f52113bb5ff20d9ee96ec3488..1ae787c7c2a94e4cfb0a108c38fc586bb3e76283 100644 (file)
@@ -23,7 +23,6 @@ from sqlalchemy import exceptions, logging
 from sqlalchemy import queue as Queue
 
 try:
-    import dummy_threading
     import thread, threading
 except:
     import dummy_thread as thread
@@ -471,7 +470,7 @@ class QueuePool(Pool):
         self._overflow = 0 - pool_size
         self._max_overflow = max_overflow
         self._timeout = timeout
-        self._overflow_lock = max_overflow > 0 and threading.Lock() or dummy_threading.Lock()
+        self._overflow_lock = max_overflow > 0 and threading.Lock() or None
 
     def recreate(self):
         self.log("Pool recreating")
@@ -481,22 +480,27 @@ class QueuePool(Pool):
         try:
             self._pool.put(conn, False)
         except Queue.Full:
-            self._overflow_lock.acquire()
-            self._overflow -= 1
-            self._overflow_lock.release()
+            if not self._overflow_lock:
+                self._overflow -= 1
+            else:
+                self._overflow_lock.acquire()
+                self._overflow -= 1
+                self._overflow_lock.release()
 
     def do_get(self):
         try:
             return self._pool.get(self._max_overflow > -1 and self._overflow >= self._max_overflow, self._timeout)
         except Queue.Empty:
-            self._overflow_lock.acquire()
+            if self._overflow_lock:
+                self._overflow_lock.acquire()
             try:
                 if self._max_overflow > -1 and self._overflow >= self._max_overflow:
                     raise exceptions.TimeoutError("QueuePool limit of size %d overflow %d reached, connection timed out" % (self.size(), self.overflow()))
                 con = self.create_connection()
                 self._overflow += 1
             finally:
-                self._overflow_lock.release()
+                if self._overflow_lock:
+                    self._overflow_lock.release()
             return con
 
     def dispose(self):
index 315470e98b34c634cc57db21a094e610293c0bd6..7f881fb5564c98e716cb2033c9c10f91ac5f3838 100644 (file)
@@ -1,6 +1,7 @@
 import testbase
 from testbase import PersistTest
 import unittest, sys, os, time
+import threading
 
 import sqlalchemy.pool as pool
 import sqlalchemy.exceptions as exceptions
@@ -126,6 +127,36 @@ class PoolTest(PersistTest):
             assert False
         except exceptions.TimeoutError, e:
             assert int(time.time() - now) == 2
+
+    def _test_overflow(self, thread_count, max_overflow):
+        p = pool.QueuePool(creator=lambda: mock_dbapi.connect('foo.db'),
+                           pool_size=3, timeout=2,
+                           max_overflow=max_overflow)
+        peaks = []
+        def whammy():
+            for i in range(10):
+                try:
+                    con = p.connect()
+                    peaks.append(p.overflow())
+                    con.close()
+                    del con
+                except exceptions.TimeoutError:
+                    pass
+        threads = []
+        for i in xrange(thread_count):
+            th = threading.Thread(target=whammy)
+            th.start()
+            threads.append(th)
+        for th in threads:
+            th.join()
+
+        self.assert_(max(peaks) <= max_overflow)
+
+    def test_no_overflow(self):
+        self._test_overflow(20, 0)
+
+    def test_max_overflow(self):
+        self._test_overflow(20, 5)
         
     def test_mixed_close(self):
         p = pool.QueuePool(creator = lambda: mock_dbapi.connect('foo.db'), pool_size = 3, max_overflow = -1, use_threadlocal = True)