From: Mike Bayer Date: Sun, 29 Jul 2007 00:42:49 +0000 (+0000) Subject: - removed auto_close_cursors and disallow_open_cursors arguments from Pool; X-Git-Tag: rel_0_4beta1~158 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=33fa7019fbb3ed5c2d3fbe22bb552988fdd4c61f;p=thirdparty%2Fsqlalchemy%2Fsqlalchemy.git - removed auto_close_cursors and disallow_open_cursors arguments from Pool; reduces overhead as cursors are normally closed by ResultProxy and Connection. --- diff --git a/CHANGES b/CHANGES index a92199a785..7969770320 100644 --- a/CHANGES +++ b/CHANGES @@ -210,6 +210,8 @@ other logic at new each DBAPI connection, pool check-out and check-in. - Connections gain a .properties collection, with contents scoped to the lifetime of the underlying DBAPI connection + - removed auto_close_cursors and disallow_open_cursors arguments from Pool; + reduces overhead as cursors are normally closed by ResultProxy and Connection. - extensions - proxyengine is temporarily removed, pending an actually working replacement. diff --git a/doc/build/content/pooling.txt b/doc/build/content/pooling.txt index ea9df87517..0703ea9a20 100644 --- a/doc/build/content/pooling.txt +++ b/doc/build/content/pooling.txt @@ -48,11 +48,6 @@ Common options include: * recycle=-1 : if set to non -1, a number of seconds between connection recycling, which means upon checkout, if this timeout is surpassed the connection will be closed and replaced with a newly opened connection. - * auto_close_cursors = True : cursors, returned by connection.cursor(), are tracked and are - automatically closed when the connection is returned to the pool. some DBAPIs like MySQLDB - become unstable if cursors remain open. - * disallow_open_cursors = False : if auto_close_cursors is False, and disallow_open_cursors is True, - will raise an exception if an open cursor is detected upon connection checkin. If auto_close_cursors and disallow_open_cursors are both False, then no cursor processing occurs upon checkin. QueuePool options include: diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index 3e89c44dd7..2d82e3342d 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -619,7 +619,6 @@ class Connection(Connectable): self.__engine.dialect.do_rollback(self.connection) except Exception, e: raise exceptions.SQLError(None, None, e) - self.__connection.close_open_cursors() self.__transaction = None def _commit_impl(self): @@ -770,6 +769,7 @@ class Connection(Connectable): if self.dialect.is_disconnect(e): self.__connection.invalidate(e=e) self.engine.dispose() + context.cursor.close() self._autorollback() if self.__close_with_result: self.close() @@ -782,6 +782,7 @@ class Connection(Connectable): if self.dialect.is_disconnect(e): self.__connection.invalidate(e=e) self.engine.dispose() + context.cursor.close() self._autorollback() if self.__close_with_result: self.close() diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py index 02f7b1527b..7166346537 100644 --- a/lib/sqlalchemy/pool.py +++ b/lib/sqlalchemy/pool.py @@ -116,31 +116,15 @@ class Pool(object): DBAPI connections are created, checked out and checked in to the pool. - auto_close_cursors - Cursors, returned by ``connection.cursor()``, are tracked and - are automatically closed when the connection is returned to the - pool. Some DBAPIs like MySQLDB become unstable if cursors - remain open. Defaults to True. - - disallow_open_cursors - If `auto_close_cursors` is False, and `disallow_open_cursors` is - True, will raise an exception if an open cursor is detected upon - connection checkin. Defaults to False. - - If `auto_close_cursors` and `disallow_open_cursors` are both - False, then no cursor processing occurs upon checkin. """ def __init__(self, creator, recycle=-1, echo=None, use_threadlocal=False, - auto_close_cursors=True, disallow_open_cursors=False, listeners=None): self.logger = logging.instance_logger(self) self._threadconns = weakref.WeakValueDictionary() self._creator = creator self._recycle = recycle self._use_threadlocal = use_threadlocal - self.auto_close_cursors = auto_close_cursors - self.disallow_open_cursors = disallow_open_cursors self.echo = echo self.listeners = [] self._on_connect = [] @@ -277,7 +261,6 @@ class _ConnectionFairy(object): def __init__(self, pool): self._threadfairy = _ThreadFairy(self) - self._cursors = weakref.WeakKeyDictionary() self._pool = pool self.__counter = 0 try: @@ -320,7 +303,6 @@ class _ConnectionFairy(object): if self._connection_record is not None: self._connection_record.invalidate(e=e) self.connection = None - self._cursors = None self._close() def cursor(self, *args, **kwargs): @@ -376,11 +358,6 @@ class _ConnectionFairy(object): self._connection_record.properties.copy() self._connection_record = None - def close_open_cursors(self): - if self._cursors is not None: - for c in list(self._cursors): - c.close() - def close(self): self.__counter -=1 if self.__counter == 0: @@ -390,14 +367,6 @@ class _ConnectionFairy(object): self._close() def _close(self): - if self._cursors is not None: - # cursors should be closed before connection is returned to the pool. some dbapis like - # mysql have real issues if they are not. - if self._pool.auto_close_cursors: - self.close_open_cursors() - elif self._pool.disallow_open_cursors: - if len(self._cursors): - raise exceptions.InvalidRequestError("This connection still has %d open cursors" % len(self._cursors)) if self.connection is not None: try: self.connection.rollback() @@ -417,24 +386,20 @@ class _ConnectionFairy(object): self.connection = None self._connection_record = None self._threadfairy = None - self._cursors = None class _CursorFairy(object): def __init__(self, parent, cursor): self.__parent = parent - self.__parent._cursors[self] = True self.cursor = cursor def invalidate(self, e=None): self.__parent.invalidate(e=e) def close(self): - if self in self.__parent._cursors: - del self.__parent._cursors[self] - try: - self.cursor.close() - except Exception, e: - self.__parent._logger.warn("Error closing cursor: " + str(e)) + try: + self.cursor.close() + except Exception, e: + self.__parent._logger.warn("Error closing cursor: " + str(e)) def __getattr__(self, key): return getattr(self.cursor, key) @@ -461,7 +426,7 @@ class SingletonThreadPool(Pool): def recreate(self): self.log("Pool recreating") - return SingletonThreadPool(self._creator, pool_size=self.size, recycle=self._recycle, echo=self.echo, use_threadlocal=self._use_threadlocal, auto_close_cursors=self.auto_close_cursors, disallow_open_cursors=self.disallow_open_cursors) + return SingletonThreadPool(self._creator, pool_size=self.size, recycle=self._recycle, echo=self.echo, use_threadlocal=self._use_threadlocal) def dispose(self): """dispose of this pool. @@ -550,7 +515,7 @@ class QueuePool(Pool): def recreate(self): self.log("Pool recreating") - return QueuePool(self._creator, pool_size=self._pool.maxsize, max_overflow=self._max_overflow, timeout=self._timeout, recycle=self._recycle, echo=self.echo, use_threadlocal=self._use_threadlocal, auto_close_cursors=self.auto_close_cursors, disallow_open_cursors=self.disallow_open_cursors) + return QueuePool(self._creator, pool_size=self._pool.maxsize, max_overflow=self._max_overflow, timeout=self._timeout, recycle=self._recycle, echo=self.echo, use_threadlocal=self._use_threadlocal) def do_return_conn(self, conn): try: diff --git a/test/engine/parseconnect.py b/test/engine/parseconnect.py index 3e186275d5..fef3742cf8 100644 --- a/test/engine/parseconnect.py +++ b/test/engine/parseconnect.py @@ -136,10 +136,9 @@ class CreateEngineTest(PersistTest): def testpoolargs(self): """test that connection pool args make it thru""" - e = create_engine('postgres://', creator=None, pool_recycle=-1, echo_pool=None, auto_close_cursors=False, disallow_open_cursors=True, module=MockDBAPI()) - assert e.pool.auto_close_cursors is False - assert e.pool.disallow_open_cursors is True - + e = create_engine('postgres://', creator=None, pool_recycle=50, echo_pool=None, module=MockDBAPI()) + assert e.pool._recycle == 50 + # these args work for QueuePool e = create_engine('postgres://', max_overflow=8, pool_timeout=60, poolclass=pool.QueuePool, module=MockDBAPI())