]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
ORM executemany returning
authorMike Bayer <mike_mp@zzzcomputing.com>
Fri, 17 Apr 2020 14:55:08 +0000 (10:55 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Sun, 28 Jun 2020 01:30:37 +0000 (21:30 -0400)
Build on #5401 to allow the ORM to take advanage
of executemany INSERT + RETURNING.

Implemented the feature

updated tests

to support INSERT DEFAULT VALUES, needed to come up with
a new syntax for compiler INSERT INTO table (anycol) VALUES (DEFAULT)
which can then be iterated out for executemany.

Added graceful degrade to plain executemany for PostgreSQL <= 8.2

Renamed EXECUTEMANY_DEFAULT to EXECUTEMANY_PLAIN

Fix issue where unicode identifiers or parameter names wouldn't
work with execute_values() under Py2K, because we have to
encode the statement and therefore have to encode the
insert_single_values_expr too.

Correct issue from #5401 to support executemany + return_defaults
for a PK that is explicitly pre-generated, meaning we aren't actually
getting RETURNING but need to return it from compiled_parameters.

Fixes: #5263
Change-Id: Id68e5c158c4f9ebc33b61c06a448907921c2a657

16 files changed:
doc/build/changelog/migration_14.rst
doc/build/changelog/unreleased_14/5263.rst [new file with mode: 0644]
lib/sqlalchemy/dialects/postgresql/psycopg2.py
lib/sqlalchemy/engine/default.py
lib/sqlalchemy/orm/persistence.py
lib/sqlalchemy/sql/crud.py
lib/sqlalchemy/testing/assertions.py
lib/sqlalchemy/testing/assertsql.py
test/dialect/postgresql/test_dialect.py
test/orm/inheritance/test_basic.py
test/orm/test_bulk.py
test/orm/test_cycles.py
test/orm/test_defaults.py
test/orm/test_unitofwork.py
test/orm/test_unitofworkv2.py
test/sql/test_insert.py

index 1ee52b86ac62276a0b82a8aff70242c9480ed45c..dd1c3db5962d59c5f42962dbe0a9d1c30c1c9a39 100644 (file)
@@ -594,6 +594,75 @@ as was present previously.
 
 :ticket:`4826`
 
+.. _change_5263:
+
+ORM Batch inserts with psycopg2 now batch statements with RETURNING in most cases
+---------------------------------------------------------------------------------
+
+The change in :ref:`change_5401` adds support for "executemany" + "RETURNING"
+at the same time in Core, which is now enabled for the psycopg2 dialect
+by default using the psycopg2 ``execute_values()`` extension.   The ORM flush
+process now makes use of this feature such that the retrieval of newly generated
+primary key values and server defaults can be achieved while not losing the
+performance benefits of being able to batch INSERT statements together.  Additionally,
+psycopg2's ``execute_values()`` extension itself provides a five-fold performance
+improvement over psycopg2's default "executemany" implementation, by rewriting
+an INSERT statement to include many "VALUES" expressions all in one statement
+rather than invoking the same statement repeatedly, as psycopg2 lacks the ability
+to PREPARE the statement ahead of time as would normally be expected for this
+approach to be performant.
+
+SQLAlchemy includes a :ref:`performance suite <examples_performance>` within
+its examples, where we can compare the times generated for the "batch_inserts"
+runner against 1.3 and 1.4, revealing a 3x-5x speedup for most flavors
+of batch insert::
+
+    # 1.3
+    $ python -m examples.performance bulk_inserts --dburl postgresql://scott:tiger@localhost/test
+    test_flush_no_pk : (100000 iterations); total time 14.051527 sec
+    test_bulk_save_return_pks : (100000 iterations); total time 15.002470 sec
+    test_flush_pk_given : (100000 iterations); total time 7.863680 sec
+    test_bulk_save : (100000 iterations); total time 6.780378 sec
+    test_bulk_insert_mappings :  (100000 iterations); total time 5.363070 sec
+    test_core_insert : (100000 iterations); total time 5.362647 sec
+
+    # 1.4 with enhancement
+    $ python -m examples.performance bulk_inserts --dburl postgresql://scott:tiger@localhost/test
+    test_flush_no_pk : (100000 iterations); total time 3.820807 sec
+    test_bulk_save_return_pks : (100000 iterations); total time 3.176378 sec
+    test_flush_pk_given : (100000 iterations); total time 4.037789 sec
+    test_bulk_save : (100000 iterations); total time 2.604446 sec
+    test_bulk_insert_mappings : (100000 iterations); total time 1.204897 sec
+    test_core_insert : (100000 iterations); total time 0.958976 sec
+
+Note that the ``execute_values()`` extension modifies the INSERT statement in the psycopg2
+layer, **after** it's been logged by SQLAlchemy.  So with SQL logging, one will see the
+parameter sets batched together, but the joining of multiple "values" will not be visible
+on the application side::
+
+    2020-06-27 19:08:18,166 INFO sqlalchemy.engine.Engine INSERT INTO a (data) VALUES (%(data)s) RETURNING a.id
+    2020-06-27 19:08:18,166 INFO sqlalchemy.engine.Engine [generated in 0.00698s] ({'data': 'data 1'}, {'data': 'data 2'}, {'data': 'data 3'}, {'data': 'data 4'}, {'data': 'data 5'}, {'data': 'data 6'}, {'data': 'data 7'}, {'data': 'data 8'}  ... displaying 10 of 4999 total bound parameter sets ...  {'data': 'data 4998'}, {'data': 'data 4999'})
+    2020-06-27 19:08:18,254 INFO sqlalchemy.engine.Engine COMMIT
+
+The ultimate INSERT statement can be seen by enabling statement logging on the PostgreSQL side::
+
+    2020-06-27 19:08:18.169 EDT [26960] LOG:  statement: INSERT INTO a (data)
+    VALUES ('data 1'),('data 2'),('data 3'),('data 4'),('data 5'),('data 6'),('data
+    7'),('data 8'),('data 9'),('data 10'),('data 11'),('data 12'),
+    ... ('data 999'),('data 1000') RETURNING a.id
+
+    2020-06-27 19:08:18.175 EDT
+    [26960] LOG:  statement: INSERT INTO a (data) VALUES ('data 1001'),('data
+    1002'),('data 1003'),('data 1004'),('data 1005 '),('data 1006'),('data
+    1007'),('data 1008'),('data 1009'),('data 1010'),('data 1011'), ...
+
+The feature batches rows into groups of 1000 by default which can be affected
+using the ``executemany_values_page_size`` argument documented at
+:ref:`psycopg2_executemany_mode`.
+
+:ticket:`5263`
+
+
 .. _change_orm_update_returning_14:
 
 ORM Bulk Update and Delete use RETURNING for "fetch" strategy when available
@@ -1591,7 +1660,10 @@ psycopg2 dialect features "execute_values" with RETURNING for INSERT statements
 The first half of a significant performance enhancement for PostgreSQL when
 using both Core and ORM, the psycopg2 dialect now uses
 ``psycopg2.extras.execute_values()`` by default for compiled INSERT statements
-and also implements RETURNING support in this mode.
+and also implements RETURNING support in this mode.   The other half of this
+change is :ref:`change_5263` which allows the ORM to take advantage of
+RETURNING with executemany (i.e. batching of INSERT statements) so that ORM
+bulk inserts with psycopg2 are up to 400% faster depending on specifics.
 
 This extension method allows many rows to be INSERTed within a single
 statement, using an extended VALUES clause for the statement.  While
diff --git a/doc/build/changelog/unreleased_14/5263.rst b/doc/build/changelog/unreleased_14/5263.rst
new file mode 100644 (file)
index 0000000..4b8af30
--- /dev/null
@@ -0,0 +1,18 @@
+.. change::
+    :tags: orm, performance, postgresql
+    :tickets: 5263
+
+    Implemented support for the psycopg2 ``execute_values()`` extension
+    within the ORM flush process via the enhancements to Core made
+    in :ticket:`5401`, so that this extension is used
+    both as a strategy to batch INSERT statements together as well as
+    that RETURNING may now be used among multiple parameter sets to
+    retrieve primary key values back in batch.   This allows nearly
+    all INSERT statements emitted by the ORM on behalf of PostgreSQL
+    to be submitted in batch and also via the ``execute_values()``
+    extension which benches at five times faster than plain
+    executemany() for this particular backend.
+
+    .. seealso::
+
+        :ref:`change_5263`
index 6364838a62b28461de87a980368219a51c84f3fa..850e5717c6299d015672f2d4155b2cafe6f1761f 100644 (file)
@@ -643,7 +643,7 @@ class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer):
     pass
 
 
-EXECUTEMANY_DEFAULT = util.symbol("executemany_default", canonical=0)
+EXECUTEMANY_PLAIN = util.symbol("executemany_plain", canonical=0)
 EXECUTEMANY_BATCH = util.symbol("executemany_batch", canonical=1)
 EXECUTEMANY_VALUES = util.symbol("executemany_values", canonical=2)
 EXECUTEMANY_VALUES_PLUS_BATCH = util.symbol(
@@ -655,6 +655,12 @@ EXECUTEMANY_VALUES_PLUS_BATCH = util.symbol(
 class PGDialect_psycopg2(PGDialect):
     driver = "psycopg2"
     if util.py2k:
+        # turn off supports_unicode_statements for Python 2. psycopg2 supports
+        # unicode statements in Py2K. But!  it does not support unicode *bound
+        # parameter names* because it uses the Python "%" operator to
+        # interpolate these into the string, and this fails.   So for Py2K, we
+        # have to use full-on encoding for statements and parameters before
+        # passing to cursor.execute().
         supports_unicode_statements = False
 
     supports_server_side_cursors = True
@@ -714,7 +720,7 @@ class PGDialect_psycopg2(PGDialect):
         self.executemany_mode = util.symbol.parse_user_argument(
             executemany_mode,
             {
-                EXECUTEMANY_DEFAULT: [None],
+                EXECUTEMANY_PLAIN: [None],
                 EXECUTEMANY_BATCH: ["batch"],
                 EXECUTEMANY_VALUES: ["values_only"],
                 EXECUTEMANY_VALUES_PLUS_BATCH: ["values_plus_batch", "values"],
@@ -747,7 +753,12 @@ class PGDialect_psycopg2(PGDialect):
             and self._hstore_oids(connection.connection) is not None
         )
 
-        # http://initd.org/psycopg/docs/news.html#what-s-new-in-psycopg-2-0-9
+        # PGDialect.initialize() checks server version for <= 8.2 and sets
+        # this flag to False if so
+        if not self.full_returning:
+            self.insert_executemany_returning = False
+            self.executemany_mode = EXECUTEMANY_PLAIN
+
         self.supports_sane_multi_rowcount = not (
             self.executemany_mode & EXECUTEMANY_BATCH
         )
@@ -876,6 +887,9 @@ class PGDialect_psycopg2(PGDialect):
             executemany_values = (
                 "(%s)" % context.compiled.insert_single_values_expr
             )
+            if not self.supports_unicode_statements:
+                executemany_values = executemany_values.encode(self.encoding)
+
             # guard for statement that was altered via event hook or similar
             if executemany_values not in statement:
                 executemany_values = None
@@ -883,10 +897,6 @@ class PGDialect_psycopg2(PGDialect):
             executemany_values = None
 
         if executemany_values:
-            # Currently, SQLAlchemy does not pass "RETURNING" statements
-            # into executemany(), since no DBAPI has ever supported that
-            # until the introduction of psycopg2's executemany_values, so
-            # we are not yet using the fetch=True flag.
             statement = statement.replace(executemany_values, "%s")
             if self.executemany_values_page_size:
                 kwargs = {"page_size": self.executemany_values_page_size}
index 790f68de710209a9a49a5f05b711cfcf4abfe678..f2f30455ae355d880a6c1c1db283927bf6b4be80 100644 (file)
@@ -824,7 +824,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
         if self.isinsert or self.isupdate or self.isdelete:
             self.is_crud = True
             self._is_explicit_returning = bool(compiled.statement._returning)
-            self._is_implicit_returning = (
+            self._is_implicit_returning = bool(
                 compiled.returning and not compiled.statement._returning
             )
 
@@ -1291,11 +1291,12 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
         result.out_parameters = out_parameters
 
     def _setup_dml_or_text_result(self):
-        if self.isinsert and not self.executemany:
+        if self.isinsert:
             if (
                 not self._is_implicit_returning
                 and not self.compiled.inline
                 and self.dialect.postfetch_lastrowid
+                and not self.executemany
             ):
 
                 self._setup_ins_pk_from_lastrowid()
@@ -1375,7 +1376,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
         getter = self.compiled._inserted_primary_key_from_lastrowid_getter
 
         self.inserted_primary_key_rows = [
-            getter(None, self.compiled_parameters[0])
+            getter(None, param) for param in self.compiled_parameters
         ]
 
     def _setup_ins_pk_from_implicit_returning(self, result, rows):
index 88524dc49fa9d545048ffe6741b8a27c3c63b76a..cbe7bde33230e1ea07d32ecbeaed6b09b9ca90d9 100644 (file)
@@ -960,6 +960,7 @@ def _emit_update_statements(
                         c.context.compiled_parameters[0],
                         value_params,
                         True,
+                        c.returned_defaults,
                     )
                 rows += c.rowcount
                 check_rowcount = assert_singlerow
@@ -992,6 +993,7 @@ def _emit_update_statements(
                             c.context.compiled_parameters[0],
                             value_params,
                             True,
+                            c.returned_defaults,
                         )
                     rows += c.rowcount
             else:
@@ -1028,6 +1030,9 @@ def _emit_update_statements(
                             c.context.compiled_parameters[0],
                             value_params,
                             True,
+                            c.returned_defaults
+                            if not c.context.executemany
+                            else None,
                         )
 
         if check_rowcount:
@@ -1086,7 +1091,10 @@ def _emit_insert_statements(
             and has_all_pks
             and not hasvalue
         ):
-
+            # the "we don't need newly generated values back" section.
+            # here we have all the PKs, all the defaults or we don't want
+            # to fetch them, or the dialect doesn't support RETURNING at all
+            # so we have to post-fetch / use lastrowid anyway.
             records = list(records)
             multiparams = [rec[2] for rec in records]
 
@@ -1116,63 +1124,132 @@ def _emit_insert_statements(
                             last_inserted_params,
                             value_params,
                             False,
+                            c.returned_defaults
+                            if not c.context.executemany
+                            else None,
                         )
                     else:
                         _postfetch_bulk_save(mapper_rec, state_dict, table)
 
         else:
+            # here, we need defaults and/or pk values back.
+
+            records = list(records)
+            if (
+                not hasvalue
+                and connection.dialect.insert_executemany_returning
+                and len(records) > 1
+            ):
+                do_executemany = True
+            else:
+                do_executemany = False
+
             if not has_all_defaults and base_mapper.eager_defaults:
                 statement = statement.return_defaults()
             elif mapper.version_id_col is not None:
                 statement = statement.return_defaults(mapper.version_id_col)
+            elif do_executemany:
+                statement = statement.return_defaults(*table.primary_key)
 
-            for (
-                state,
-                state_dict,
-                params,
-                mapper_rec,
-                connection,
-                value_params,
-                has_all_pks,
-                has_all_defaults,
-            ) in records:
+            if do_executemany:
+                multiparams = [rec[2] for rec in records]
 
-                if value_params:
-                    result = connection.execute(
-                        statement.values(value_params), params
-                    )
-                else:
-                    result = cached_connections[connection].execute(
-                        statement, params
-                    )
+                c = cached_connections[connection].execute(
+                    statement, multiparams
+                )
+                if bookkeeping:
+                    for (
+                        (
+                            state,
+                            state_dict,
+                            params,
+                            mapper_rec,
+                            conn,
+                            value_params,
+                            has_all_pks,
+                            has_all_defaults,
+                        ),
+                        last_inserted_params,
+                        inserted_primary_key,
+                        returned_defaults,
+                    ) in util.zip_longest(
+                        records,
+                        c.context.compiled_parameters,
+                        c.inserted_primary_key_rows,
+                        c.returned_defaults_rows or (),
+                    ):
+                        for pk, col in zip(
+                            inserted_primary_key, mapper._pks_by_table[table],
+                        ):
+                            prop = mapper_rec._columntoproperty[col]
+                            if state_dict.get(prop.key) is None:
+                                state_dict[prop.key] = pk
+
+                        if state:
+                            _postfetch(
+                                mapper_rec,
+                                uowtransaction,
+                                table,
+                                state,
+                                state_dict,
+                                c,
+                                last_inserted_params,
+                                value_params,
+                                False,
+                                returned_defaults,
+                            )
+                        else:
+                            _postfetch_bulk_save(mapper_rec, state_dict, table)
+            else:
+                for (
+                    state,
+                    state_dict,
+                    params,
+                    mapper_rec,
+                    connection,
+                    value_params,
+                    has_all_pks,
+                    has_all_defaults,
+                ) in records:
+
+                    if value_params:
+                        result = connection.execute(
+                            statement.values(value_params), params
+                        )
+                    else:
+                        result = cached_connections[connection].execute(
+                            statement, params
+                        )
 
-                primary_key = result.inserted_primary_key
-                if primary_key is not None:
-                    # set primary key attributes
+                    primary_key = result.inserted_primary_key
+                    assert primary_key
                     for pk, col in zip(
                         primary_key, mapper._pks_by_table[table]
                     ):
                         prop = mapper_rec._columntoproperty[col]
-                        if pk is not None and (
+                        if (
                             col in value_params
                             or state_dict.get(prop.key) is None
                         ):
                             state_dict[prop.key] = pk
-                if bookkeeping:
-                    if state:
-                        _postfetch(
-                            mapper_rec,
-                            uowtransaction,
-                            table,
-                            state,
-                            state_dict,
-                            result,
-                            result.context.compiled_parameters[0],
-                            value_params,
-                            False,
-                        )
-                    else:
-                        _postfetch_bulk_save(mapper_rec, state_dict, table)
+                    if bookkeeping:
+                        if state:
+                            _postfetch(
+                                mapper_rec,
+                                uowtransaction,
+                                table,
+                                state,
+                                state_dict,
+                                result,
+                                result.context.compiled_parameters[0],
+                                value_params,
+                                False,
+                                result.returned_defaults
+                                if not result.context.executemany
+                                else None,
+                            )
+                        else:
+                            _postfetch_bulk_save(mapper_rec, state_dict, table)
 
 
 def _emit_post_update_statements(
@@ -1507,6 +1584,7 @@ def _postfetch(
     params,
     value_params,
     isupdate,
+    returned_defaults,
 ):
     """Expire attributes in need of newly persisted database state,
     after an INSERT or UPDATE statement has proceeded for that
@@ -1527,7 +1605,7 @@ def _postfetch(
         load_evt_attrs = []
 
     if returning_cols:
-        row = result.returned_defaults
+        row = returned_defaults
         if row is not None:
             for row_value, col in zip(row, returning_cols):
                 # pk cols returned from insert are handled
index c80d95a2c77c5a273c81344bb3b907c9f596dd81..85112f8506aaad2a5a46cfdcfe0cdbcc6ac683c0 100644 (file)
@@ -157,6 +157,12 @@ def _get_crud_params(compiler, stmt, compile_state, **kw):
         values = _extend_values_for_multiparams(
             compiler, stmt, compile_state, values, kw
         )
+    elif not values and compiler.for_executemany:
+        # convert an "INSERT DEFAULT VALUES"
+        # into INSERT (firstcol) VALUES (DEFAULT) which can be turned
+        # into an in-place multi values.  This supports
+        # insert_executemany_returning mode :)
+        values = [(stmt.table.columns[0], "DEFAULT")]
 
     return values
 
index 1ea366dacb74dce11e564672fca748bc50214164..998dde66bbeffa199b641b67996d62552c2e71ee 100644 (file)
@@ -343,6 +343,7 @@ class AssertsCompiledSQL(object):
         result,
         params=None,
         checkparams=None,
+        for_executemany=False,
         check_literal_execute=None,
         check_post_param=None,
         dialect=None,
@@ -391,6 +392,9 @@ class AssertsCompiledSQL(object):
         if render_postcompile:
             compile_kwargs["render_postcompile"] = True
 
+        if for_executemany:
+            kw["for_executemany"] = True
+
         if render_schema_translate:
             kw["render_schema_translate"] = True
 
index ef324635e0a1da719d32ba8be9242540f7207c6d..caf61a80616bfb606180263da4fac6df078c0135 100644 (file)
@@ -325,6 +325,14 @@ class EachOf(AssertRule):
             super(EachOf, self).no_more_statements()
 
 
+class Conditional(EachOf):
+    def __init__(self, condition, rules, else_rules):
+        if condition:
+            super(Conditional, self).__init__(*rules)
+        else:
+            super(Conditional, self).__init__(*else_rules)
+
+
 class Or(AllOf):
     def process_statement(self, execute_observed):
         for rule in self.rules:
index 1fbe870bae7b802a1a653c28931296e15d37651b..cc0f3b4dfbe65adce374cabfbf30ce75b8f5ed31 100644 (file)
@@ -1,5 +1,6 @@
 # coding: utf-8
 import datetime
+import itertools
 import logging
 import logging.handlers
 
@@ -26,10 +27,11 @@ from sqlalchemy import Table
 from sqlalchemy import testing
 from sqlalchemy import text
 from sqlalchemy import TypeDecorator
+from sqlalchemy import util
 from sqlalchemy.dialects.postgresql import base as postgresql
 from sqlalchemy.dialects.postgresql import psycopg2 as psycopg2_dialect
 from sqlalchemy.dialects.postgresql.psycopg2 import EXECUTEMANY_BATCH
-from sqlalchemy.dialects.postgresql.psycopg2 import EXECUTEMANY_DEFAULT
+from sqlalchemy.dialects.postgresql.psycopg2 import EXECUTEMANY_PLAIN
 from sqlalchemy.dialects.postgresql.psycopg2 import EXECUTEMANY_VALUES
 from sqlalchemy.engine import cursor as _cursor
 from sqlalchemy.engine import engine_from_config
@@ -45,6 +47,8 @@ from sqlalchemy.testing.assertions import AssertsExecutionResults
 from sqlalchemy.testing.assertions import eq_
 from sqlalchemy.testing.assertions import eq_regex
 from sqlalchemy.testing.assertions import ne_
+from sqlalchemy.util import u
+from sqlalchemy.util import ue
 from ...engine import test_execute
 
 if True:
@@ -160,6 +164,7 @@ class ExecuteManyMode(object):
     __backend__ = True
 
     run_create_tables = "each"
+    run_deletes = None
 
     options = None
 
@@ -174,6 +179,13 @@ class ExecuteManyMode(object):
             Column("z", Integer, server_default="5"),
         )
 
+        Table(
+            u("Unitéble2"),
+            metadata,
+            Column(u("méil"), Integer, primary_key=True),
+            Column(ue("\u6e2c\u8a66"), Integer),
+        )
+
     def setup(self):
         super(ExecuteManyMode, self).setup()
         self.engine = engines.testing_engine(options=self.options)
@@ -343,6 +355,22 @@ class ExecuteManyMode(object):
             ],
         )
 
+    def test_insert_unicode_keys(self, connection):
+        table = self.tables[u("Unitéble2")]
+
+        stmt = table.insert()
+
+        connection.execute(
+            stmt,
+            [
+                {u("méil"): 1, ue("\u6e2c\u8a66"): 1},
+                {u("méil"): 2, ue("\u6e2c\u8a66"): 2},
+                {u("méil"): 3, ue("\u6e2c\u8a66"): 3},
+            ],
+        )
+
+        eq_(connection.execute(table.select()).all(), [(1, 1), (2, 2), (3, 3)])
+
     def test_update_fallback(self):
         from psycopg2 import extras
 
@@ -423,57 +451,104 @@ class ExecutemanyBatchModeTest(ExecuteManyMode, fixtures.TablesTest):
 class ExecutemanyValuesInsertsTest(ExecuteManyMode, fixtures.TablesTest):
     options = {"executemany_mode": "values_only"}
 
-    def test_insert_returning_values(self):
+    def test_insert_returning_values(self, connection):
         """the psycopg2 dialect needs to assemble a fully buffered result
         with the return value of execute_values().
 
         """
         t = self.tables.data
 
-        with self.engine.connect() as conn:
-            page_size = conn.dialect.executemany_values_page_size or 100
-            data = [
-                {"x": "x%d" % i, "y": "y%d" % i}
-                for i in range(1, page_size * 5 + 27)
-            ]
-            result = conn.execute(t.insert().returning(t.c.x, t.c.y), data)
-
-            eq_([tup[0] for tup in result.cursor.description], ["x", "y"])
-            eq_(result.keys(), ["x", "y"])
-            assert t.c.x in result.keys()
-            assert t.c.id not in result.keys()
-            assert not result._soft_closed
-            assert isinstance(
-                result.cursor_strategy,
-                _cursor.FullyBufferedCursorFetchStrategy,
-            )
-            assert not result.cursor.closed
-            assert not result.closed
-            eq_(result.mappings().all(), data)
+        conn = connection
+        page_size = conn.dialect.executemany_values_page_size or 100
+        data = [
+            {"x": "x%d" % i, "y": "y%d" % i}
+            for i in range(1, page_size * 5 + 27)
+        ]
+        result = conn.execute(t.insert().returning(t.c.x, t.c.y), data)
+
+        eq_([tup[0] for tup in result.cursor.description], ["x", "y"])
+        eq_(result.keys(), ["x", "y"])
+        assert t.c.x in result.keys()
+        assert t.c.id not in result.keys()
+        assert not result._soft_closed
+        assert isinstance(
+            result.cursor_strategy, _cursor.FullyBufferedCursorFetchStrategy,
+        )
+        assert not result.cursor.closed
+        assert not result.closed
+        eq_(result.mappings().all(), data)
+
+        assert result._soft_closed
+        # assert result.closed
+        assert result.cursor is None
+
+    @testing.provide_metadata
+    def test_insert_returning_preexecute_pk(self, connection):
+        counter = itertools.count(1)
+
+        t = Table(
+            "t",
+            self.metadata,
+            Column(
+                "id",
+                Integer,
+                primary_key=True,
+                default=lambda: util.next(counter),
+            ),
+            Column("data", Integer),
+        )
+        self.metadata.create_all(connection)
+
+        result = connection.execute(
+            t.insert().return_defaults(),
+            [{"data": 1}, {"data": 2}, {"data": 3}],
+        )
 
-            assert result._soft_closed
-            # assert result.closed
-            assert result.cursor is None
+        eq_(result.inserted_primary_key_rows, [(1,), (2,), (3,)])
 
-    def test_insert_returning_defaults(self):
+    def test_insert_returning_defaults(self, connection):
         t = self.tables.data
 
-        with self.engine.connect() as conn:
+        conn = connection
 
-            result = conn.execute(t.insert(), {"x": "x0", "y": "y0"})
-            first_pk = result.inserted_primary_key[0]
+        result = conn.execute(t.insert(), {"x": "x0", "y": "y0"})
+        first_pk = result.inserted_primary_key[0]
 
-            page_size = conn.dialect.executemany_values_page_size or 100
-            total_rows = page_size * 5 + 27
-            data = [
-                {"x": "x%d" % i, "y": "y%d" % i} for i in range(1, total_rows)
-            ]
-            result = conn.execute(t.insert().returning(t.c.id, t.c.z), data)
+        page_size = conn.dialect.executemany_values_page_size or 100
+        total_rows = page_size * 5 + 27
+        data = [{"x": "x%d" % i, "y": "y%d" % i} for i in range(1, total_rows)]
+        result = conn.execute(t.insert().returning(t.c.id, t.c.z), data)
 
-            eq_(
-                result.all(),
-                [(pk, 5) for pk in range(1 + first_pk, total_rows + first_pk)],
-            )
+        eq_(
+            result.all(),
+            [(pk, 5) for pk in range(1 + first_pk, total_rows + first_pk)],
+        )
+
+    def test_insert_return_pks_default_values(self, connection):
+        """test sending multiple, empty rows into an INSERT and getting primary
+        key values back.
+
+        This has to use a format that indicates at least one DEFAULT in
+        multiple parameter sets, i.e. "INSERT INTO table (anycol) VALUES
+        (DEFAULT) (DEFAULT) (DEFAULT) ... RETURNING col"
+
+        """
+        t = self.tables.data
+
+        conn = connection
+
+        result = conn.execute(t.insert(), {"x": "x0", "y": "y0"})
+        first_pk = result.inserted_primary_key[0]
+
+        page_size = conn.dialect.executemany_values_page_size or 100
+        total_rows = page_size * 5 + 27
+        data = [{} for i in range(1, total_rows)]
+        result = conn.execute(t.insert().returning(t.c.id), data)
+
+        eq_(
+            result.all(),
+            [(pk,) for pk in range(1 + first_pk, total_rows + first_pk)],
+        )
 
     def test_insert_w_newlines(self):
         from psycopg2 import extras
@@ -611,7 +686,7 @@ class ExecutemanyFlagOptionsTest(fixtures.TablesTest):
 
     def test_executemany_correct_flag_options(self):
         for opt, expected in [
-            (None, EXECUTEMANY_DEFAULT),
+            (None, EXECUTEMANY_PLAIN),
             ("batch", EXECUTEMANY_BATCH),
             ("values_only", EXECUTEMANY_VALUES),
             ("values_plus_batch", EXECUTEMANY_VALUES_PLUS_BATCH),
index e38758ee2dbb9f2b303745d7d4837eba5f541734..b4a17bf3a15308af0bcd5bddfc136412df20e200 100644 (file)
@@ -40,6 +40,7 @@ from sqlalchemy.testing import is_
 from sqlalchemy.testing import mock
 from sqlalchemy.testing.assertsql import AllOf
 from sqlalchemy.testing.assertsql import CompiledSQL
+from sqlalchemy.testing.assertsql import Conditional
 from sqlalchemy.testing.assertsql import Or
 from sqlalchemy.testing.assertsql import RegexSQL
 from sqlalchemy.testing.schema import Column
@@ -1829,10 +1830,20 @@ class JoinedNoFKSortingTest(fixtures.MappedTest):
         self.assert_sql_execution(
             testing.db,
             sess.flush,
-            CompiledSQL("INSERT INTO a () VALUES ()", {}),
-            CompiledSQL("INSERT INTO a () VALUES ()", {}),
-            CompiledSQL("INSERT INTO a () VALUES ()", {}),
-            CompiledSQL("INSERT INTO a () VALUES ()", {}),
+            Conditional(
+                testing.db.dialect.insert_executemany_returning,
+                [
+                    CompiledSQL(
+                        "INSERT INTO a (id) VALUES (DEFAULT)", [{}, {}, {}, {}]
+                    ),
+                ],
+                [
+                    CompiledSQL("INSERT INTO a () VALUES ()", {}),
+                    CompiledSQL("INSERT INTO a () VALUES ()", {}),
+                    CompiledSQL("INSERT INTO a () VALUES ()", {}),
+                    CompiledSQL("INSERT INTO a () VALUES ()", {}),
+                ],
+            ),
             AllOf(
                 CompiledSQL(
                     "INSERT INTO b (id) VALUES (:id)", [{"id": 1}, {"id": 3}]
index 79de19f68403a540112d9999189d9fd8f846194e..27b187342db2a90c7b87e2298e90be640127b8c6 100644 (file)
@@ -9,6 +9,7 @@ from sqlalchemy.testing import eq_
 from sqlalchemy.testing import fixtures
 from sqlalchemy.testing import mock
 from sqlalchemy.testing.assertsql import CompiledSQL
+from sqlalchemy.testing.assertsql import Conditional
 from sqlalchemy.testing.schema import Column
 from sqlalchemy.testing.schema import Table
 from test.orm import _fixtures
@@ -91,15 +92,29 @@ class BulkInsertUpdateTest(BulkTest, _fixtures.FixtureTest):
             s.bulk_save_objects(objects, return_defaults=True)
 
         asserter.assert_(
-            CompiledSQL(
-                "INSERT INTO users (name) VALUES (:name)", [{"name": "u1"}]
-            ),
-            CompiledSQL(
-                "INSERT INTO users (name) VALUES (:name)", [{"name": "u2"}]
-            ),
-            CompiledSQL(
-                "INSERT INTO users (name) VALUES (:name)", [{"name": "u3"}]
-            ),
+            Conditional(
+                testing.db.dialect.insert_executemany_returning,
+                [
+                    CompiledSQL(
+                        "INSERT INTO users (name) VALUES (:name)",
+                        [{"name": "u1"}, {"name": "u2"}, {"name": "u3"}],
+                    ),
+                ],
+                [
+                    CompiledSQL(
+                        "INSERT INTO users (name) VALUES (:name)",
+                        [{"name": "u1"}],
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO users (name) VALUES (:name)",
+                        [{"name": "u2"}],
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO users (name) VALUES (:name)",
+                        [{"name": "u3"}],
+                    ),
+                ],
+            )
         )
         eq_(objects[0].__dict__["id"], 1)
 
@@ -612,13 +627,30 @@ class BulkInheritanceTest(BulkTest, fixtures.MappedTest):
                 "VALUES (:person_id, :status, :manager_name)",
                 [{"person_id": 1, "status": "s1", "manager_name": "mn1"}],
             ),
-            CompiledSQL(
-                "INSERT INTO people (name, type) VALUES (:name, :type)",
-                [{"type": "engineer", "name": "e1"}],
-            ),
-            CompiledSQL(
-                "INSERT INTO people (name, type) VALUES (:name, :type)",
-                [{"type": "engineer", "name": "e2"}],
+            Conditional(
+                testing.db.dialect.insert_executemany_returning,
+                [
+                    CompiledSQL(
+                        "INSERT INTO people (name, type) "
+                        "VALUES (:name, :type)",
+                        [
+                            {"type": "engineer", "name": "e1"},
+                            {"type": "engineer", "name": "e2"},
+                        ],
+                    ),
+                ],
+                [
+                    CompiledSQL(
+                        "INSERT INTO people (name, type) "
+                        "VALUES (:name, :type)",
+                        [{"type": "engineer", "name": "e1"}],
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO people (name, type) "
+                        "VALUES (:name, :type)",
+                        [{"type": "engineer", "name": "e2"}],
+                    ),
+                ],
             ),
             CompiledSQL(
                 "INSERT INTO engineers (person_id, status, primary_language) "
@@ -762,14 +794,28 @@ class BulkInheritanceTest(BulkTest, fixtures.MappedTest):
             )
 
         asserter.assert_(
-            CompiledSQL(
-                "INSERT INTO people (name) VALUES (:name)", [{"name": "b1"}]
-            ),
-            CompiledSQL(
-                "INSERT INTO people (name) VALUES (:name)", [{"name": "b2"}]
-            ),
-            CompiledSQL(
-                "INSERT INTO people (name) VALUES (:name)", [{"name": "b3"}]
+            Conditional(
+                testing.db.dialect.insert_executemany_returning,
+                [
+                    CompiledSQL(
+                        "INSERT INTO people (name) VALUES (:name)",
+                        [{"name": "b1"}, {"name": "b2"}, {"name": "b3"}],
+                    ),
+                ],
+                [
+                    CompiledSQL(
+                        "INSERT INTO people (name) VALUES (:name)",
+                        [{"name": "b1"}],
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO people (name) VALUES (:name)",
+                        [{"name": "b2"}],
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO people (name) VALUES (:name)",
+                        [{"name": "b3"}],
+                    ),
+                ],
             ),
             CompiledSQL(
                 "INSERT INTO managers (person_id, status, manager_name) "
index 22a26e6178e6b6985a7262234361cb44748f2d05..5c61a6370ad998d8e9933f30e5ed8435d126f47f 100644 (file)
@@ -25,6 +25,7 @@ from sqlalchemy.testing import is_
 from sqlalchemy.testing import mock
 from sqlalchemy.testing.assertsql import AllOf
 from sqlalchemy.testing.assertsql import CompiledSQL
+from sqlalchemy.testing.assertsql import Conditional
 from sqlalchemy.testing.assertsql import RegexSQL
 from sqlalchemy.testing.schema import Column
 from sqlalchemy.testing.schema import Table
@@ -910,21 +911,37 @@ class OneToManyManyToOneTest(fixtures.MappedTest):
             testing.db,
             sess.flush,
             RegexSQL("^INSERT INTO person", {"data": "some data"}),
-            RegexSQL(
-                "^INSERT INTO ball",
-                lambda c: {"person_id": p.id, "data": "some data"},
-            ),
-            RegexSQL(
-                "^INSERT INTO ball",
-                lambda c: {"person_id": p.id, "data": "some data"},
-            ),
-            RegexSQL(
-                "^INSERT INTO ball",
-                lambda c: {"person_id": p.id, "data": "some data"},
-            ),
-            RegexSQL(
-                "^INSERT INTO ball",
-                lambda c: {"person_id": p.id, "data": "some data"},
+            Conditional(
+                testing.db.dialect.insert_executemany_returning,
+                [
+                    RegexSQL(
+                        "^INSERT INTO ball",
+                        lambda c: [
+                            {"person_id": p.id, "data": "some data"},
+                            {"person_id": p.id, "data": "some data"},
+                            {"person_id": p.id, "data": "some data"},
+                            {"person_id": p.id, "data": "some data"},
+                        ],
+                    )
+                ],
+                [
+                    RegexSQL(
+                        "^INSERT INTO ball",
+                        lambda c: {"person_id": p.id, "data": "some data"},
+                    ),
+                    RegexSQL(
+                        "^INSERT INTO ball",
+                        lambda c: {"person_id": p.id, "data": "some data"},
+                    ),
+                    RegexSQL(
+                        "^INSERT INTO ball",
+                        lambda c: {"person_id": p.id, "data": "some data"},
+                    ),
+                    RegexSQL(
+                        "^INSERT INTO ball",
+                        lambda c: {"person_id": p.id, "data": "some data"},
+                    ),
+                ],
             ),
             CompiledSQL(
                 "UPDATE person SET favorite_ball_id=:favorite_ball_id "
@@ -1054,25 +1071,42 @@ class OneToManyManyToOneTest(fixtures.MappedTest):
         self.assert_sql_execution(
             testing.db,
             sess.flush,
-            CompiledSQL(
-                "INSERT INTO ball (person_id, data) "
-                "VALUES (:person_id, :data)",
-                {"person_id": None, "data": "some data"},
-            ),
-            CompiledSQL(
-                "INSERT INTO ball (person_id, data) "
-                "VALUES (:person_id, :data)",
-                {"person_id": None, "data": "some data"},
-            ),
-            CompiledSQL(
-                "INSERT INTO ball (person_id, data) "
-                "VALUES (:person_id, :data)",
-                {"person_id": None, "data": "some data"},
-            ),
-            CompiledSQL(
-                "INSERT INTO ball (person_id, data) "
-                "VALUES (:person_id, :data)",
-                {"person_id": None, "data": "some data"},
+            Conditional(
+                testing.db.dialect.insert_executemany_returning,
+                [
+                    CompiledSQL(
+                        "INSERT INTO ball (person_id, data) "
+                        "VALUES (:person_id, :data)",
+                        [
+                            {"person_id": None, "data": "some data"},
+                            {"person_id": None, "data": "some data"},
+                            {"person_id": None, "data": "some data"},
+                            {"person_id": None, "data": "some data"},
+                        ],
+                    ),
+                ],
+                [
+                    CompiledSQL(
+                        "INSERT INTO ball (person_id, data) "
+                        "VALUES (:person_id, :data)",
+                        {"person_id": None, "data": "some data"},
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO ball (person_id, data) "
+                        "VALUES (:person_id, :data)",
+                        {"person_id": None, "data": "some data"},
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO ball (person_id, data) "
+                        "VALUES (:person_id, :data)",
+                        {"person_id": None, "data": "some data"},
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO ball (person_id, data) "
+                        "VALUES (:person_id, :data)",
+                        {"person_id": None, "data": "some data"},
+                    ),
+                ],
             ),
             CompiledSQL(
                 "INSERT INTO person (favorite_ball_id, data) "
index 5cadea5ffc7e0075c067e5be5b152a57c99e5adb..94397e22aebc6ecb7ca6129b203f8caf85f23292 100644 (file)
@@ -11,6 +11,7 @@ from sqlalchemy.testing import eq_
 from sqlalchemy.testing import fixtures
 from sqlalchemy.testing.assertsql import assert_engine
 from sqlalchemy.testing.assertsql import CompiledSQL
+from sqlalchemy.testing.assertsql import Conditional
 from sqlalchemy.testing.schema import Column
 from sqlalchemy.testing.schema import Table
 
@@ -229,38 +230,57 @@ class ComputedDefaultsOnUpdateTest(fixtures.MappedTest):
             eq_(t1.bar, 5 + 42)
             eq_(t2.bar, 10 + 42)
 
-        if eager and testing.db.dialect.implicit_returning:
-            asserter.assert_(
-                CompiledSQL(
-                    "INSERT INTO test (id, foo) VALUES (%(id)s, %(foo)s) "
-                    "RETURNING test.bar",
-                    [{"foo": 5, "id": 1}],
-                    dialect="postgresql",
-                ),
-                CompiledSQL(
-                    "INSERT INTO test (id, foo) VALUES (%(id)s, %(foo)s) "
-                    "RETURNING test.bar",
-                    [{"foo": 10, "id": 2}],
-                    dialect="postgresql",
-                ),
-            )
-        else:
-            asserter.assert_(
-                CompiledSQL(
-                    "INSERT INTO test (id, foo) VALUES (:id, :foo)",
-                    [{"foo": 5, "id": 1}, {"foo": 10, "id": 2}],
-                ),
-                CompiledSQL(
-                    "SELECT test.bar AS test_bar FROM test "
-                    "WHERE test.id = :param_1",
-                    [{"param_1": 1}],
-                ),
-                CompiledSQL(
-                    "SELECT test.bar AS test_bar FROM test "
-                    "WHERE test.id = :param_1",
-                    [{"param_1": 2}],
-                ),
+        asserter.assert_(
+            Conditional(
+                eager and testing.db.dialect.implicit_returning,
+                [
+                    Conditional(
+                        testing.db.dialect.insert_executemany_returning,
+                        [
+                            CompiledSQL(
+                                "INSERT INTO test (id, foo) "
+                                "VALUES (%(id)s, %(foo)s) "
+                                "RETURNING test.bar",
+                                [{"foo": 5, "id": 1}, {"foo": 10, "id": 2}],
+                                dialect="postgresql",
+                            ),
+                        ],
+                        [
+                            CompiledSQL(
+                                "INSERT INTO test (id, foo) "
+                                "VALUES (%(id)s, %(foo)s) "
+                                "RETURNING test.bar",
+                                [{"foo": 5, "id": 1}],
+                                dialect="postgresql",
+                            ),
+                            CompiledSQL(
+                                "INSERT INTO test (id, foo) "
+                                "VALUES (%(id)s, %(foo)s) "
+                                "RETURNING test.bar",
+                                [{"foo": 10, "id": 2}],
+                                dialect="postgresql",
+                            ),
+                        ],
+                    )
+                ],
+                [
+                    CompiledSQL(
+                        "INSERT INTO test (id, foo) VALUES (:id, :foo)",
+                        [{"foo": 5, "id": 1}, {"foo": 10, "id": 2}],
+                    ),
+                    CompiledSQL(
+                        "SELECT test.bar AS test_bar FROM test "
+                        "WHERE test.id = :param_1",
+                        [{"param_1": 1}],
+                    ),
+                    CompiledSQL(
+                        "SELECT test.bar AS test_bar FROM test "
+                        "WHERE test.id = :param_1",
+                        [{"param_1": 2}],
+                    ),
+                ],
             )
+        )
 
     @testing.combinations(
         (
index 3a1594a61359b9b79a999f05b625a934d3a770d4..306fc0d2ad58d5e9f3c791dc2b0a32a3e4622009 100644 (file)
@@ -29,6 +29,7 @@ from sqlalchemy.testing import eq_
 from sqlalchemy.testing import fixtures
 from sqlalchemy.testing.assertsql import AllOf
 from sqlalchemy.testing.assertsql import CompiledSQL
+from sqlalchemy.testing.assertsql import Conditional
 from sqlalchemy.testing.schema import Column
 from sqlalchemy.testing.schema import Table
 from sqlalchemy.util import OrderedDict
@@ -2789,21 +2790,42 @@ class SaveTest2(_fixtures.FixtureTest):
         self.assert_sql_execution(
             testing.db,
             session.flush,
-            CompiledSQL(
-                "INSERT INTO users (name) VALUES (:name)", {"name": "u1"}
-            ),
-            CompiledSQL(
-                "INSERT INTO users (name) VALUES (:name)", {"name": "u2"}
-            ),
-            CompiledSQL(
-                "INSERT INTO addresses (user_id, email_address) "
-                "VALUES (:user_id, :email_address)",
-                {"user_id": 1, "email_address": "a1"},
-            ),
-            CompiledSQL(
-                "INSERT INTO addresses (user_id, email_address) "
-                "VALUES (:user_id, :email_address)",
-                {"user_id": 2, "email_address": "a2"},
+            Conditional(
+                testing.db.dialect.insert_executemany_returning,
+                [
+                    CompiledSQL(
+                        "INSERT INTO users (name) VALUES (:name)",
+                        [{"name": "u1"}, {"name": "u2"}],
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO addresses (user_id, email_address) "
+                        "VALUES (:user_id, :email_address)",
+                        [
+                            {"user_id": 1, "email_address": "a1"},
+                            {"user_id": 2, "email_address": "a2"},
+                        ],
+                    ),
+                ],
+                [
+                    CompiledSQL(
+                        "INSERT INTO users (name) VALUES (:name)",
+                        {"name": "u1"},
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO users (name) VALUES (:name)",
+                        {"name": "u2"},
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO addresses (user_id, email_address) "
+                        "VALUES (:user_id, :email_address)",
+                        {"user_id": 1, "email_address": "a1"},
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO addresses (user_id, email_address) "
+                        "VALUES (:user_id, :email_address)",
+                        {"user_id": 2, "email_address": "a2"},
+                    ),
+                ],
             ),
         )
 
index 2bd908c8f9ef8614024f05d203ad073e73ca3c65..08dd8c96634dc980986337a537ce298cc0cf1e45 100644 (file)
@@ -27,6 +27,7 @@ from sqlalchemy.testing import eq_
 from sqlalchemy.testing import fixtures
 from sqlalchemy.testing.assertsql import AllOf
 from sqlalchemy.testing.assertsql import CompiledSQL
+from sqlalchemy.testing.assertsql import Conditional
 from sqlalchemy.testing.mock import Mock
 from sqlalchemy.testing.mock import patch
 from sqlalchemy.testing.schema import Column
@@ -82,15 +83,30 @@ class RudimentaryFlushTest(UOWTest):
             CompiledSQL(
                 "INSERT INTO users (name) VALUES (:name)", {"name": "u1"}
             ),
-            CompiledSQL(
-                "INSERT INTO addresses (user_id, email_address) "
-                "VALUES (:user_id, :email_address)",
-                lambda ctx: {"email_address": "a1", "user_id": u1.id},
-            ),
-            CompiledSQL(
-                "INSERT INTO addresses (user_id, email_address) "
-                "VALUES (:user_id, :email_address)",
-                lambda ctx: {"email_address": "a2", "user_id": u1.id},
+            Conditional(
+                testing.db.dialect.insert_executemany_returning,
+                [
+                    CompiledSQL(
+                        "INSERT INTO addresses (user_id, email_address) "
+                        "VALUES (:user_id, :email_address)",
+                        lambda ctx: [
+                            {"email_address": "a1", "user_id": u1.id},
+                            {"email_address": "a2", "user_id": u1.id},
+                        ],
+                    ),
+                ],
+                [
+                    CompiledSQL(
+                        "INSERT INTO addresses (user_id, email_address) "
+                        "VALUES (:user_id, :email_address)",
+                        lambda ctx: {"email_address": "a1", "user_id": u1.id},
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO addresses (user_id, email_address) "
+                        "VALUES (:user_id, :email_address)",
+                        lambda ctx: {"email_address": "a2", "user_id": u1.id},
+                    ),
+                ],
             ),
         )
 
@@ -183,15 +199,30 @@ class RudimentaryFlushTest(UOWTest):
             CompiledSQL(
                 "INSERT INTO users (name) VALUES (:name)", {"name": "u1"}
             ),
-            CompiledSQL(
-                "INSERT INTO addresses (user_id, email_address) "
-                "VALUES (:user_id, :email_address)",
-                lambda ctx: {"email_address": "a1", "user_id": u1.id},
-            ),
-            CompiledSQL(
-                "INSERT INTO addresses (user_id, email_address) "
-                "VALUES (:user_id, :email_address)",
-                lambda ctx: {"email_address": "a2", "user_id": u1.id},
+            Conditional(
+                testing.db.dialect.insert_executemany_returning,
+                [
+                    CompiledSQL(
+                        "INSERT INTO addresses (user_id, email_address) "
+                        "VALUES (:user_id, :email_address)",
+                        lambda ctx: [
+                            {"email_address": "a1", "user_id": u1.id},
+                            {"email_address": "a2", "user_id": u1.id},
+                        ],
+                    ),
+                ],
+                [
+                    CompiledSQL(
+                        "INSERT INTO addresses (user_id, email_address) "
+                        "VALUES (:user_id, :email_address)",
+                        lambda ctx: {"email_address": "a1", "user_id": u1.id},
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO addresses (user_id, email_address) "
+                        "VALUES (:user_id, :email_address)",
+                        lambda ctx: {"email_address": "a2", "user_id": u1.id},
+                    ),
+                ],
             ),
         )
 
@@ -767,17 +798,32 @@ class SingleCycleTest(UOWTest):
                 "(:parent_id, :data)",
                 {"parent_id": None, "data": "n1"},
             ),
-            AllOf(
-                CompiledSQL(
-                    "INSERT INTO nodes (parent_id, data) VALUES "
-                    "(:parent_id, :data)",
-                    lambda ctx: {"parent_id": n1.id, "data": "n2"},
-                ),
-                CompiledSQL(
-                    "INSERT INTO nodes (parent_id, data) VALUES "
-                    "(:parent_id, :data)",
-                    lambda ctx: {"parent_id": n1.id, "data": "n3"},
-                ),
+            Conditional(
+                testing.db.dialect.insert_executemany_returning,
+                [
+                    CompiledSQL(
+                        "INSERT INTO nodes (parent_id, data) VALUES "
+                        "(:parent_id, :data)",
+                        lambda ctx: [
+                            {"parent_id": n1.id, "data": "n2"},
+                            {"parent_id": n1.id, "data": "n3"},
+                        ],
+                    ),
+                ],
+                [
+                    AllOf(
+                        CompiledSQL(
+                            "INSERT INTO nodes (parent_id, data) VALUES "
+                            "(:parent_id, :data)",
+                            lambda ctx: {"parent_id": n1.id, "data": "n2"},
+                        ),
+                        CompiledSQL(
+                            "INSERT INTO nodes (parent_id, data) VALUES "
+                            "(:parent_id, :data)",
+                            lambda ctx: {"parent_id": n1.id, "data": "n3"},
+                        ),
+                    ),
+                ],
             ),
         )
 
@@ -864,17 +910,32 @@ class SingleCycleTest(UOWTest):
                 "(:parent_id, :data)",
                 {"parent_id": None, "data": "n1"},
             ),
-            AllOf(
-                CompiledSQL(
-                    "INSERT INTO nodes (parent_id, data) VALUES "
-                    "(:parent_id, :data)",
-                    lambda ctx: {"parent_id": n1.id, "data": "n2"},
-                ),
-                CompiledSQL(
-                    "INSERT INTO nodes (parent_id, data) VALUES "
-                    "(:parent_id, :data)",
-                    lambda ctx: {"parent_id": n1.id, "data": "n3"},
-                ),
+            Conditional(
+                testing.db.dialect.insert_executemany_returning,
+                [
+                    CompiledSQL(
+                        "INSERT INTO nodes (parent_id, data) VALUES "
+                        "(:parent_id, :data)",
+                        lambda ctx: [
+                            {"parent_id": n1.id, "data": "n2"},
+                            {"parent_id": n1.id, "data": "n3"},
+                        ],
+                    ),
+                ],
+                [
+                    AllOf(
+                        CompiledSQL(
+                            "INSERT INTO nodes (parent_id, data) VALUES "
+                            "(:parent_id, :data)",
+                            lambda ctx: {"parent_id": n1.id, "data": "n2"},
+                        ),
+                        CompiledSQL(
+                            "INSERT INTO nodes (parent_id, data) VALUES "
+                            "(:parent_id, :data)",
+                            lambda ctx: {"parent_id": n1.id, "data": "n3"},
+                        ),
+                    ),
+                ],
             ),
         )
 
@@ -1009,35 +1070,67 @@ class SingleCycleTest(UOWTest):
                 "(:parent_id, :data)",
                 lambda ctx: {"parent_id": None, "data": "n1"},
             ),
-            CompiledSQL(
-                "INSERT INTO nodes (parent_id, data) VALUES "
-                "(:parent_id, :data)",
-                lambda ctx: {"parent_id": n1.id, "data": "n11"},
-            ),
-            CompiledSQL(
-                "INSERT INTO nodes (parent_id, data) VALUES "
-                "(:parent_id, :data)",
-                lambda ctx: {"parent_id": n1.id, "data": "n12"},
-            ),
-            CompiledSQL(
-                "INSERT INTO nodes (parent_id, data) VALUES "
-                "(:parent_id, :data)",
-                lambda ctx: {"parent_id": n1.id, "data": "n13"},
-            ),
-            CompiledSQL(
-                "INSERT INTO nodes (parent_id, data) VALUES "
-                "(:parent_id, :data)",
-                lambda ctx: {"parent_id": n12.id, "data": "n121"},
-            ),
-            CompiledSQL(
-                "INSERT INTO nodes (parent_id, data) VALUES "
-                "(:parent_id, :data)",
-                lambda ctx: {"parent_id": n12.id, "data": "n122"},
+            Conditional(
+                testing.db.dialect.insert_executemany_returning,
+                [
+                    CompiledSQL(
+                        "INSERT INTO nodes (parent_id, data) VALUES "
+                        "(:parent_id, :data)",
+                        lambda ctx: [
+                            {"parent_id": n1.id, "data": "n11"},
+                            {"parent_id": n1.id, "data": "n12"},
+                            {"parent_id": n1.id, "data": "n13"},
+                        ],
+                    ),
+                ],
+                [
+                    CompiledSQL(
+                        "INSERT INTO nodes (parent_id, data) VALUES "
+                        "(:parent_id, :data)",
+                        lambda ctx: {"parent_id": n1.id, "data": "n11"},
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO nodes (parent_id, data) VALUES "
+                        "(:parent_id, :data)",
+                        lambda ctx: {"parent_id": n1.id, "data": "n12"},
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO nodes (parent_id, data) VALUES "
+                        "(:parent_id, :data)",
+                        lambda ctx: {"parent_id": n1.id, "data": "n13"},
+                    ),
+                ],
             ),
-            CompiledSQL(
-                "INSERT INTO nodes (parent_id, data) VALUES "
-                "(:parent_id, :data)",
-                lambda ctx: {"parent_id": n12.id, "data": "n123"},
+            Conditional(
+                testing.db.dialect.insert_executemany_returning,
+                [
+                    CompiledSQL(
+                        "INSERT INTO nodes (parent_id, data) VALUES "
+                        "(:parent_id, :data)",
+                        lambda ctx: [
+                            {"parent_id": n12.id, "data": "n121"},
+                            {"parent_id": n12.id, "data": "n122"},
+                            {"parent_id": n12.id, "data": "n123"},
+                        ],
+                    ),
+                ],
+                [
+                    CompiledSQL(
+                        "INSERT INTO nodes (parent_id, data) VALUES "
+                        "(:parent_id, :data)",
+                        lambda ctx: {"parent_id": n12.id, "data": "n121"},
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO nodes (parent_id, data) VALUES "
+                        "(:parent_id, :data)",
+                        lambda ctx: {"parent_id": n12.id, "data": "n122"},
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO nodes (parent_id, data) VALUES "
+                        "(:parent_id, :data)",
+                        lambda ctx: {"parent_id": n12.id, "data": "n123"},
+                    ),
+                ],
             ),
         )
 
@@ -1907,8 +2000,23 @@ class BatchInsertsTest(fixtures.MappedTest, testing.AssertsExecutionResults):
         self.assert_sql_execution(
             testing.db,
             sess.flush,
-            CompiledSQL("INSERT INTO t (data) VALUES (:data)", {"data": "t1"}),
-            CompiledSQL("INSERT INTO t (data) VALUES (:data)", {"data": "t2"}),
+            Conditional(
+                testing.db.dialect.insert_executemany_returning,
+                [
+                    CompiledSQL(
+                        "INSERT INTO t (data) VALUES (:data)",
+                        [{"data": "t1"}, {"data": "t2"}],
+                    ),
+                ],
+                [
+                    CompiledSQL(
+                        "INSERT INTO t (data) VALUES (:data)", {"data": "t1"}
+                    ),
+                    CompiledSQL(
+                        "INSERT INTO t (data) VALUES (:data)", {"data": "t2"}
+                    ),
+                ],
+            ),
             CompiledSQL(
                 "INSERT INTO t (id, data) VALUES (:id, :data)",
                 [
@@ -2256,40 +2364,56 @@ class EagerDefaultsTest(fixtures.MappedTest):
 
         s.add_all([t1, t2])
 
-        if testing.db.dialect.implicit_returning:
-            self.assert_sql_execution(
-                testing.db,
-                s.commit,
-                CompiledSQL(
-                    "INSERT INTO test (id) VALUES (%(id)s) RETURNING test.foo",
-                    [{"id": 1}],
-                    dialect="postgresql",
-                ),
-                CompiledSQL(
-                    "INSERT INTO test (id) VALUES (%(id)s) RETURNING test.foo",
-                    [{"id": 2}],
-                    dialect="postgresql",
-                ),
-            )
-        else:
-            self.assert_sql_execution(
-                testing.db,
-                s.commit,
-                CompiledSQL(
-                    "INSERT INTO test (id) VALUES (:id)",
-                    [{"id": 1}, {"id": 2}],
-                ),
-                CompiledSQL(
-                    "SELECT test.foo AS test_foo FROM test "
-                    "WHERE test.id = :param_1",
-                    [{"param_1": 1}],
-                ),
-                CompiledSQL(
-                    "SELECT test.foo AS test_foo FROM test "
-                    "WHERE test.id = :param_1",
-                    [{"param_1": 2}],
-                ),
-            )
+        self.assert_sql_execution(
+            testing.db,
+            s.commit,
+            Conditional(
+                testing.db.dialect.implicit_returning,
+                [
+                    Conditional(
+                        testing.db.dialect.insert_executemany_returning,
+                        [
+                            CompiledSQL(
+                                "INSERT INTO test (id) VALUES (%(id)s) "
+                                "RETURNING test.foo",
+                                [{"id": 1}, {"id": 2}],
+                                dialect="postgresql",
+                            ),
+                        ],
+                        [
+                            CompiledSQL(
+                                "INSERT INTO test (id) VALUES (%(id)s) "
+                                "RETURNING test.foo",
+                                [{"id": 1}],
+                                dialect="postgresql",
+                            ),
+                            CompiledSQL(
+                                "INSERT INTO test (id) VALUES (%(id)s) "
+                                "RETURNING test.foo",
+                                [{"id": 2}],
+                                dialect="postgresql",
+                            ),
+                        ],
+                    ),
+                ],
+                [
+                    CompiledSQL(
+                        "INSERT INTO test (id) VALUES (:id)",
+                        [{"id": 1}, {"id": 2}],
+                    ),
+                    CompiledSQL(
+                        "SELECT test.foo AS test_foo FROM test "
+                        "WHERE test.id = :param_1",
+                        [{"param_1": 1}],
+                    ),
+                    CompiledSQL(
+                        "SELECT test.foo AS test_foo FROM test "
+                        "WHERE test.id = :param_1",
+                        [{"param_1": 2}],
+                    ),
+                ],
+            ),
+        )
 
     def test_update_defaults_nonpresent(self):
         Thing2 = self.classes.Thing2
@@ -2312,70 +2436,71 @@ class EagerDefaultsTest(fixtures.MappedTest):
         t4.foo = 8
         t4.bar = 12
 
-        if testing.db.dialect.implicit_returning:
-            self.assert_sql_execution(
-                testing.db,
-                s.flush,
-                CompiledSQL(
-                    "UPDATE test2 SET foo=%(foo)s "
-                    "WHERE test2.id = %(test2_id)s "
-                    "RETURNING test2.bar",
-                    [{"foo": 5, "test2_id": 1}],
-                    dialect="postgresql",
-                ),
-                CompiledSQL(
-                    "UPDATE test2 SET foo=%(foo)s, bar=%(bar)s "
-                    "WHERE test2.id = %(test2_id)s",
-                    [{"foo": 6, "bar": 10, "test2_id": 2}],
-                    dialect="postgresql",
-                ),
-                CompiledSQL(
-                    "UPDATE test2 SET foo=%(foo)s "
-                    "WHERE test2.id = %(test2_id)s "
-                    "RETURNING test2.bar",
-                    [{"foo": 7, "test2_id": 3}],
-                    dialect="postgresql",
-                ),
-                CompiledSQL(
-                    "UPDATE test2 SET foo=%(foo)s, bar=%(bar)s "
-                    "WHERE test2.id = %(test2_id)s",
-                    [{"foo": 8, "bar": 12, "test2_id": 4}],
-                    dialect="postgresql",
-                ),
-            )
-        else:
-            self.assert_sql_execution(
-                testing.db,
-                s.flush,
-                CompiledSQL(
-                    "UPDATE test2 SET foo=:foo WHERE test2.id = :test2_id",
-                    [{"foo": 5, "test2_id": 1}],
-                ),
-                CompiledSQL(
-                    "UPDATE test2 SET foo=:foo, bar=:bar "
-                    "WHERE test2.id = :test2_id",
-                    [{"foo": 6, "bar": 10, "test2_id": 2}],
-                ),
-                CompiledSQL(
-                    "UPDATE test2 SET foo=:foo WHERE test2.id = :test2_id",
-                    [{"foo": 7, "test2_id": 3}],
-                ),
-                CompiledSQL(
-                    "UPDATE test2 SET foo=:foo, bar=:bar "
-                    "WHERE test2.id = :test2_id",
-                    [{"foo": 8, "bar": 12, "test2_id": 4}],
-                ),
-                CompiledSQL(
-                    "SELECT test2.bar AS test2_bar FROM test2 "
-                    "WHERE test2.id = :param_1",
-                    [{"param_1": 1}],
-                ),
-                CompiledSQL(
-                    "SELECT test2.bar AS test2_bar FROM test2 "
-                    "WHERE test2.id = :param_1",
-                    [{"param_1": 3}],
-                ),
-            )
+        self.assert_sql_execution(
+            testing.db,
+            s.flush,
+            Conditional(
+                testing.db.dialect.implicit_returning,
+                [
+                    CompiledSQL(
+                        "UPDATE test2 SET foo=%(foo)s "
+                        "WHERE test2.id = %(test2_id)s "
+                        "RETURNING test2.bar",
+                        [{"foo": 5, "test2_id": 1}],
+                        dialect="postgresql",
+                    ),
+                    CompiledSQL(
+                        "UPDATE test2 SET foo=%(foo)s, bar=%(bar)s "
+                        "WHERE test2.id = %(test2_id)s",
+                        [{"foo": 6, "bar": 10, "test2_id": 2}],
+                        dialect="postgresql",
+                    ),
+                    CompiledSQL(
+                        "UPDATE test2 SET foo=%(foo)s "
+                        "WHERE test2.id = %(test2_id)s "
+                        "RETURNING test2.bar",
+                        [{"foo": 7, "test2_id": 3}],
+                        dialect="postgresql",
+                    ),
+                    CompiledSQL(
+                        "UPDATE test2 SET foo=%(foo)s, bar=%(bar)s "
+                        "WHERE test2.id = %(test2_id)s",
+                        [{"foo": 8, "bar": 12, "test2_id": 4}],
+                        dialect="postgresql",
+                    ),
+                ],
+                [
+                    CompiledSQL(
+                        "UPDATE test2 SET foo=:foo WHERE test2.id = :test2_id",
+                        [{"foo": 5, "test2_id": 1}],
+                    ),
+                    CompiledSQL(
+                        "UPDATE test2 SET foo=:foo, bar=:bar "
+                        "WHERE test2.id = :test2_id",
+                        [{"foo": 6, "bar": 10, "test2_id": 2}],
+                    ),
+                    CompiledSQL(
+                        "UPDATE test2 SET foo=:foo WHERE test2.id = :test2_id",
+                        [{"foo": 7, "test2_id": 3}],
+                    ),
+                    CompiledSQL(
+                        "UPDATE test2 SET foo=:foo, bar=:bar "
+                        "WHERE test2.id = :test2_id",
+                        [{"foo": 8, "bar": 12, "test2_id": 4}],
+                    ),
+                    CompiledSQL(
+                        "SELECT test2.bar AS test2_bar FROM test2 "
+                        "WHERE test2.id = :param_1",
+                        [{"param_1": 1}],
+                    ),
+                    CompiledSQL(
+                        "SELECT test2.bar AS test2_bar FROM test2 "
+                        "WHERE test2.id = :param_1",
+                        [{"param_1": 3}],
+                    ),
+                ],
+            ),
+        )
 
         def go():
             eq_(t1.bar, 2)
index c7749e74c9900b19ee021f4beea9201fb10da9c8..e5f2fbe6d560542a20a5418861961d45caa9caa3 100644 (file)
@@ -936,11 +936,25 @@ class EmptyTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
         dialect = default.DefaultDialect()
         dialect.supports_empty_insert = dialect.supports_default_values = True
 
-        stmt = table1.insert().values({})  # hide from 2to3
+        stmt = table1.insert().values({})
         self.assert_compile(
             stmt, "INSERT INTO mytable DEFAULT VALUES", dialect=dialect
         )
 
+    def test_supports_empty_insert_true_executemany_mode(self):
+        table1 = self.tables.mytable
+
+        dialect = default.DefaultDialect()
+        dialect.supports_empty_insert = dialect.supports_default_values = True
+
+        stmt = table1.insert().values({})
+        self.assert_compile(
+            stmt,
+            "INSERT INTO mytable (myid) VALUES (DEFAULT)",
+            dialect=dialect,
+            for_executemany=True,
+        )
+
     def test_supports_empty_insert_false(self):
         table1 = self.tables.mytable