- for now, lastrowid-capable dialects will use pre-execute for any defaults that arent the real "autoincrement";
currently this is letting us treat MSSQL the same as them but we may want to improve upon this
if self._enable_identity_insert:
self.cursor.execute("SET IDENTITY_INSERT %s OFF" % self.dialect.identifier_preparer.format_table(self.compiled.statement.table))
-
-
+
def get_lastrowid(self):
return self._lastrowid
pass
def get_result_proxy(self):
- return self._result_proxy or base.ResultProxy(self)
+ if self._result_proxy:
+ return self._result_proxy
+ else:
+ return base.ResultProxy(self)
class MSSQLCompiler(compiler.SQLCompiler):
if context.should_autocommit and not self.in_transaction():
self._commit_impl()
- return context.get_result_proxy()
+ return context.get_result_proxy()._autoclose()
def _handle_dbapi_exception(self, e, statement, parameters, cursor, context):
if getattr(self, '_reentrant_error', False):
self.connection = context.root_connection
self._echo = context.engine._should_log_info
self._init_metadata()
-
+
@util.memoized_property
def rowcount(self):
+ """Return the 'rowcount' for this result.
+
+ The 'rowcount' reports the number of rows affected
+ by an UPDATE or DELETE statement. It has *no* other
+ uses and is not intended to provide the number of rows
+ present from a SELECT.
+
+ Additionally, this value is only meaningful if the
+ dialect's supports_sane_rowcount flag is True for
+ single-parameter executions, or supports_sane_multi_rowcount
+ is true for multiple parameter executions - otherwise
+ results are undefined.
+
+ rowcount may not work at this time for a statement
+ that uses ``returning()``.
+
+ """
return self.context.rowcount
@property
This is a DBAPI specific method and is only functional
for those backends which support it, for statements
- where it is appropriate.
+ where it is appropriate. It's behavior is not
+ consistent across backends.
Usage of this method is normally unnecessary; the
last_inserted_ids() method provides a
return self.context.out_parameters
def _cursor_description(self):
- metadata = self.cursor.description
- if metadata is None:
- return
- else:
- return [(r[0], r[1]) for r in metadata]
+ return self.cursor.description
- def _init_metadata(self):
-
- metadata = self._cursor_description()
- if metadata is None:
+ def _autoclose(self):
+ if self._metadata is None:
# no results, get rowcount
# (which requires open cursor on some DB's such as firebird),
self.rowcount
self.close() # autoclose
+ elif self.context.isinsert and \
+ not self.context._is_explicit_returning:
+ # an insert, no explicit returning(), may need
+ # to fetch rows which were created via implicit
+ # returning, then close
+ self.context.last_inserted_ids(self)
+ self.close()
+
+ return self
+
+ def _init_metadata(self):
+ self._metadata = metadata = self._cursor_description()
+ if metadata is None:
return
self._props = util.populate_column_dict(None)
typemap = self.dialect.dbapi_type_map
- for i, (colname, coltype) in enumerate(metadata):
+ for i, (colname, coltype) in enumerate(m[0:2] for m in metadata):
if self.dialect.description_encoding:
colname = colname.decode(self.dialect.description_encoding)
"""Close this ResultProxy.
Closes the underlying DBAPI cursor corresponding to the execution.
+
+ Note that any data cached within this ResultProxy is still available.
+ For some types of results, this may include buffered rows.
If this ResultProxy was generated from an implicit execution,
the underlying Connection will also be closed (returns the
"""
def _init_metadata(self):
- self.__rowbuffer = self._buffer_rows()
super(FullyBufferedResultProxy, self)._init_metadata()
+ self.__rowbuffer = self._buffer_rows()
def _buffer_rows(self):
return self.cursor.fetchall()
self.statement = self.compiled = None
self.isinsert = self.isupdate = self.isdelete = self.executemany = self.should_autocommit = False
self.cursor = self.create_cursor()
+
+ @property
+ def _is_explicit_returning(self):
+ return self.compiled and \
+ getattr(self.compiled.statement, '_returning', False)
@property
def connection(self):
# then implicit_returning/supports sequence/doesnt
if c.primary_key and \
(
- self.dialect.preexecute_pk_sequences or
+ self.dialect.preexecute_pk_sequences or
+ c is not stmt.table._autoincrement_column or
implicit_returning
) and \
not self.inline and \
exclude('mysql', '<', (4, 1, 1), 'no subquery support'),
)
+def returning(fn):
+ return _chain_decorators_on(
+ fn,
+ no_support('access', 'not supported by database'),
+ no_support('sqlite', 'not supported by database'),
+ no_support('mysql', 'not supported by database'),
+ no_support('maxdb', 'not supported by database'),
+ no_support('sybase', 'not supported by database'),
+ no_support('informix', 'not supported by database'),
+ )
+
def two_phase_transactions(fn):
"""Target database must support two-phase transactions."""
return _chain_decorators_on(
)
metadata.create_all()
- try:
- test_items = [decimal.Decimal(d) for d in '1500000.00000000000000000000',
- '-1500000.00000000000000000000', '1500000',
- '0.0000000000000000002', '0.2', '-0.0000000000000000002', '-2E-2',
- '156666.458923543', '-156666.458923543', '1', '-1', '-1234', '1234',
- '2E-12', '4E8', '3E-6', '3E-7', '4.1', '1E-1', '1E-2', '1E-3',
- '1E-4', '1E-5', '1E-6', '1E-7', '1E-1', '1E-8', '0.2732E2', '-0.2432E2', '4.35656E2',
- '-02452E-2', '45125E-2',
- '1234.58965E-2', '1.521E+15', '-1E-25', '1E-25', '1254E-25', '-1203E-25',
- '0', '-0.00', '-0', '4585E12', '000000000000000000012', '000000000000.32E12',
- '00000000000000.1E+12', '000000000000.2E-32']
-
- for value in test_items:
- numeric_table.insert().execute(numericcol=value)
-
- for value in select([numeric_table.c.numericcol]).execute():
- assert value[0] in test_items, "%s not in test_items" % value[0]
-
- except Exception, e:
- raise e
+ test_items = [decimal.Decimal(d) for d in '1500000.00000000000000000000',
+ '-1500000.00000000000000000000', '1500000',
+ '0.0000000000000000002', '0.2', '-0.0000000000000000002', '-2E-2',
+ '156666.458923543', '-156666.458923543', '1', '-1', '-1234', '1234',
+ '2E-12', '4E8', '3E-6', '3E-7', '4.1', '1E-1', '1E-2', '1E-3',
+ '1E-4', '1E-5', '1E-6', '1E-7', '1E-1', '1E-8', '0.2732E2', '-0.2432E2', '4.35656E2',
+ '-02452E-2', '45125E-2',
+ '1234.58965E-2', '1.521E+15', '-1E-25', '1E-25', '1254E-25', '-1203E-25',
+ '0', '-0.00', '-0', '4585E12', '000000000000000000012', '000000000000.32E12',
+ '00000000000000.1E+12', '000000000000.2E-32']
+
+ for value in test_items:
+ numeric_table.insert().execute(numericcol=value)
+
+ for value in select([numeric_table.c.numericcol]).execute():
+ assert value[0] in test_items, "%s not in test_items" % value[0]
def test_float(self):
float_table = Table('float_table', metadata,
Column('parent', Integer, ForeignKey('base.id'))
)
- @testing.fails_on('mssql', 'FIXME: the flush still happens with the concurrency issue.')
@engines.close_open_connections
def test_save_update(self):
class Base(_fixtures.Base):
s2.subdata = 'sess2 subdata'
sess2.flush()
- @testing.fails_on('mssql', 'FIXME: the flush still happens with the concurrency issue.')
def test_delete(self):
class Base(_fixtures.Base):
pass
assert_raises_message(sa.exc.ArgumentError,
ex_msg,
sa.ColumnDefault, fn)
-
+
def test_arg_signature(self):
def fn1(): pass
def fn2(): pass
Column('id', Integer, primary_key=True,
default=sa.select([func.max(t2.c.nextid)]).as_scalar()),
Column('data', String(30)))
-
+
+ @testing.requires.returning
+ def test_with_implicit_returning(self):
+ self._test(True)
+
+ def test_regular(self):
+ self._test(False)
+
@testing.resolve_artifact_names
- def test_basic(self):
- t2.insert().execute(nextid=1)
- r = t1.insert().execute(data='hi')
+ def _test(self, returning):
+ if not returning and not testing.db.dialect.implicit_returning:
+ engine = testing.db
+ else:
+ engine = engines.testing_engine(options={'implicit_returning':returning})
+ engine.execute(t2.insert(), nextid=1)
+ r = engine.execute(t1.insert(), data='hi')
eq_([1], r.last_inserted_ids())
- t2.insert().execute(nextid=2)
- r = t1.insert().execute(data='there')
+ engine.execute(t2.insert(), nextid=2)
+ r = engine.execute(t1.insert(), data='there')
eq_([2], r.last_inserted_ids())
-
class PKIncrementTest(_base.TablesTest):
run_define_tables = 'each'
ret[c.key] = row[c]
return ret
- if testing.against('firebird', 'postgres', 'oracle', 'mssql'):
+ if testing.against('firebird', 'postgresql', 'oracle', 'mssql'):
test_engines = [
engines.testing_engine(options={'implicit_returning':False}),
engines.testing_engine(options={'implicit_returning':True}),
r = t6.insert().values(manual_id=id).execute()
eq_(r.last_inserted_ids(), [12, 1])
-
+ def test_autoclose_on_insert(self):
+ if testing.against('firebird', 'postgresql', 'oracle', 'mssql'):
+ test_engines = [
+ engines.testing_engine(options={'implicit_returning':False}),
+ engines.testing_engine(options={'implicit_returning':True}),
+ ]
+ else:
+ test_engines = [testing.db]
+
+ for engine in test_engines:
+
+ r = engine.execute(users.insert(),
+ {'user_name':'jack'},
+ )
+ assert r.closed
+
def test_row_iteration(self):
users.insert().execute(
{'user_id':7, 'user_name':'jack'},