--- /dev/null
+.. change::
+ :tags: bug, engine
+ :tickets: 11532
+
+ Fixed issue in "insertmanyvalues" feature where a particular call to
+ ``cursor.fetchall()`` were not wrapped in SQLAlchemy's exception wrapper,
+ which apparently can raise a database exception during fetch when using
+ pyodbc.
rowcount = 0
for imv_batch in dialect._deliver_insertmanyvalues_batches(
+ self,
cursor,
str_statement,
effective_parameters,
imv_batch.replaced_parameters,
None,
context,
+ is_sub_exec=True,
)
sub_stmt = imv_batch.replaced_statement
from ..sql import dml
from ..sql import expression
from ..sql import type_api
+from ..sql import util as sql_util
from ..sql._typing import is_tuple_type
from ..sql.base import _NoArg
from ..sql.compiler import DDLCompiler
connection.execute(expression.ReleaseSavepointClause(name))
def _deliver_insertmanyvalues_batches(
- self, cursor, statement, parameters, generic_setinputsizes, context
+ self,
+ connection,
+ cursor,
+ statement,
+ parameters,
+ generic_setinputsizes,
+ context,
):
context = cast(DefaultExecutionContext, context)
compiled = cast(SQLCompiler, context.compiled)
if is_returning:
- rows = context.fetchall_for_returning(cursor)
+ try:
+ rows = context.fetchall_for_returning(cursor)
+ except BaseException as be:
+ connection._handle_dbapi_exception(
+ be,
+ sql_util._long_statement(imv_batch.replaced_statement),
+ imv_batch.replaced_parameters,
+ None,
+ context,
+ is_sub_exec=True,
+ )
# I would have thought "is_returning: Final[bool]"
# would have assured this but pylance thinks not
def _deliver_insertmanyvalues_batches(
self,
+ connection: Connection,
cursor: DBAPICursor,
statement: str,
parameters: _DBAPIMultiExecuteParams,
return rows
def _deliver_insertmanyvalues_batches(
- cursor, statement, parameters, generic_setinputsizes, context
+ connection,
+ cursor,
+ statement,
+ parameters,
+ generic_setinputsizes,
+ context,
):
if randomize_rows:
cursor = RandomCursor(cursor)
for batch in orig_dialect(
- cursor, statement, parameters, generic_setinputsizes, context
+ connection,
+ cursor,
+ statement,
+ parameters,
+ generic_setinputsizes,
+ context,
):
if warn_on_downgraded and batch.is_downgraded:
util.warn("Batches were downgraded for sorted INSERT")
Column("x_value", String(50)),
Column("y_value", String(50)),
)
+ Table(
+ "uniq_cons",
+ metadata,
+ Column("id", Integer, primary_key=True),
+ Column("data", String(50), unique=True),
+ )
+
+ @testing.variation("use_returning", [True, False])
+ def test_returning_integrity_error(self, connection, use_returning):
+ """test for #11532"""
+
+ stmt = self.tables.uniq_cons.insert()
+ if use_returning:
+ stmt = stmt.returning(self.tables.uniq_cons.c.id)
+
+ # pymssql thought it would be funny to use OperationalError for
+ # a unique key violation.
+ with expect_raises((exc.IntegrityError, exc.OperationalError)):
+ connection.execute(
+ stmt, [{"data": "the data"}, {"data": "the data"}]
+ )
def test_insert_unicode_keys(self, connection):
table = self.tables["Unitéble2"]