]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Turn off pyodbc setinputsizes() by default
authorMike Bayer <mike_mp@zzzcomputing.com>
Tue, 16 Mar 2021 15:23:07 +0000 (11:23 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Tue, 16 Mar 2021 17:03:17 +0000 (13:03 -0400)
Fixed regression where a new setinputsizes() API that's available for
pyodbc was enabled, which is apparently incompatible with pyodbc's
fast_executemany() mode in the absence of more accurate typing information,
which as of yet is not fully implemented or tested. The pyodbc dialect and
connector has been modified so that setinputsizes() is not used at all
unless the parameter ``use_setinputsizes`` is passed to the dialect, e.g.
via :func:`_sa.create_engine`, at which point its behavior can be
customized using the :meth:`.DialectEvents.do_setinputsizes` hook.

Fixes: #6058
Change-Id: I99c2be3a5cd76fc3e490d10865292ed85ffc23ae

doc/build/changelog/unreleased_14/6058.rst [new file with mode: 0644]
lib/sqlalchemy/connectors/pyodbc.py
lib/sqlalchemy/dialects/mssql/pyodbc.py
lib/sqlalchemy/engine/events.py
test/dialect/mssql/test_engine.py

diff --git a/doc/build/changelog/unreleased_14/6058.rst b/doc/build/changelog/unreleased_14/6058.rst
new file mode 100644 (file)
index 0000000..c45efcd
--- /dev/null
@@ -0,0 +1,16 @@
+.. change:
+    :tags: bug, mssql
+    :tickets: 6058
+
+    Fixed regression where a new setinputsizes() API that's available for
+    pyodbc was enabled, which is apparently incompatible with pyodbc's
+    fast_executemany() mode in the absence of more accurate typing information,
+    which as of yet is not fully implemented or tested. The pyodbc dialect and
+    connector has been modified so that setinputsizes() is not used at all
+    unless the parameter ``use_setinputsizes`` is passed to the dialect, e.g.
+    via :func:`_sa.create_engine`, at which point its behavior can be
+    customized using the :meth:`.DialectEvents.do_setinputsizes` hook.
+
+    .. seealso::
+
+      :ref:`mssql_pyodbc_setinputsizes`
index aa14cd9aa480313b9d3f1b90194a38a94dc4da85..ddff62db65cab0df6e463111058f5b7c33b0654a 100644 (file)
@@ -24,16 +24,19 @@ class PyODBCConnector(Connector):
     supports_native_decimal = True
     default_paramstyle = "named"
 
-    use_setinputsizes = True
+    use_setinputsizes = False
 
     # for non-DSN connections, this *may* be used to
     # hold the desired driver name
     pyodbc_driver_name = None
 
-    def __init__(self, supports_unicode_binds=None, **kw):
+    def __init__(
+        self, supports_unicode_binds=None, use_setinputsizes=False, **kw
+    ):
         super(PyODBCConnector, self).__init__(**kw)
         if supports_unicode_binds is not None:
             self.supports_unicode_binds = supports_unicode_binds
+        self.use_setinputsizes = use_setinputsizes
 
     @classmethod
     def dbapi(cls):
@@ -163,6 +166,10 @@ class PyODBCConnector(Connector):
         # for the subsequent values if you don't pass a tuple which fails
         # for types such as pyodbc.SQL_WLONGVARCHAR, which is the datatype
         # that ticket #5649 is targeting.
+
+        # NOTE: as of #6058, this won't be called if the use_setinputsizes flag
+        # is False, or if no types were specified in list_of_tuples
+
         cursor.setinputsizes(
             [
                 (dbtype, None, None)
index 96ac2dff72d53a8872334575cf8bc5de131381ad..ba4fc84ec8cf061c62991bc1cf492c7bae801fb2 100644 (file)
@@ -56,7 +56,7 @@ Other keywords interpreted by the Pyodbc dialect to be passed to
 ``authentication``.
 Note that in order for the dialect to recognize these keywords
 (including the ``driver`` keyword above) they must be all lowercase.
-Multiple additional keyword arguments must be separated by an 
+Multiple additional keyword arguments must be separated by an
 ampersand (``&``), not a semicolon::
 
     engine = create_engine(
@@ -158,6 +158,23 @@ driver in order to use this flag::
     `fast executemany <https://github.com/mkleehammer/pyodbc/wiki/Features-beyond-the-DB-API#fast_executemany>`_
     - on github
 
+.. _mssql_pyodbc_setinputsizes:
+
+Setinputsizes Support
+-----------------------
+
+The pyodbc ``cursor.setinputsizes()`` method can be used if necessary.  To
+enable this hook, pass ``use_setinputsizes=True`` to :func:`_sa.create_engine`::
+
+    engine = create_engine("mssql+pyodbc://...", use_setinputsizes=True)
+
+The behavior of the hook can then be customized, as may be necessary
+particularly if fast_executemany is in use, via the
+:meth:`.DialectEvents.do_setinputsizes` hook. See that method for usage
+examples.
+
+.. versionchanged:: 1.4.1  The pyodbc dialects will not use setinputsizes
+   unless ``use_setinputsizes=True`` is passed.
 
 """  # noqa
 
index fb8e5aeb21c560a7e2176e006de2b5ea728bc854..c6e27c03c40f6b53ae4566bfde3599869f236496 100644 (file)
@@ -796,6 +796,17 @@ class DialectEvents(event.Events):
         the flag ``use_setinputsizes=True``.  Dialects which use this
         include cx_Oracle, pg8000, asyncpg, and pyodbc dialects.
 
+        .. note::
+
+            For use with pyodbc, the ``use_setinputsizes`` flag
+            must be passed to the dialect, e.g.::
+
+                create_engine("mssql+pyodbc://...", use_setinputsizes=True)
+
+            .. seealso::
+
+                  :ref:`mssql_pyodbc_setinputsizes`
+
         .. versionadded:: 1.2.9
 
         .. seealso::
index bfaa5f576492b7a55a17c93159484bdf0fb6f70b..5482e261670aa7654f2dec9d4e1cc753db4e0c59 100644 (file)
@@ -1,9 +1,13 @@
 # -*- encoding: utf-8
 
+from decimal import Decimal
+
 from sqlalchemy import Column
 from sqlalchemy import event
 from sqlalchemy import exc
 from sqlalchemy import Integer
+from sqlalchemy import Numeric
+from sqlalchemy import select
 from sqlalchemy import String
 from sqlalchemy import Table
 from sqlalchemy import testing
@@ -11,14 +15,17 @@ from sqlalchemy.dialects.mssql import base
 from sqlalchemy.dialects.mssql import pymssql
 from sqlalchemy.dialects.mssql import pyodbc
 from sqlalchemy.engine import url
+from sqlalchemy.exc import DBAPIError
 from sqlalchemy.exc import IntegrityError
 from sqlalchemy.testing import assert_raises
 from sqlalchemy.testing import assert_raises_message
 from sqlalchemy.testing import assert_warnings
 from sqlalchemy.testing import engines
 from sqlalchemy.testing import eq_
+from sqlalchemy.testing import expect_raises
 from sqlalchemy.testing import expect_warnings
 from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import mock
 from sqlalchemy.testing.mock import Mock
 
 
@@ -389,6 +396,147 @@ class FastExecutemanyTest(fixtures.TestBase):
 
             conn.execute(t.insert(), {"id": 200, "data": "data_200"})
 
+    @testing.fixture
+    def fe_engine(self, testing_engine):
+        def go(use_fastexecutemany, apply_setinputsizes_flag):
+            engine = testing_engine(
+                options={
+                    "fast_executemany": use_fastexecutemany,
+                    "use_setinputsizes": apply_setinputsizes_flag,
+                }
+            )
+            return engine
+
+        return go
+
+    @testing.combinations(
+        (
+            "setinputsizeshook",
+            True,
+        ),
+        (
+            "nosetinputsizeshook",
+            False,
+        ),
+        argnames="include_setinputsizes",
+        id_="ia",
+    )
+    @testing.combinations(
+        (
+            "setinputsizesflag",
+            True,
+        ),
+        (
+            "nosetinputsizesflag",
+            False,
+        ),
+        argnames="apply_setinputsizes_flag",
+        id_="ia",
+    )
+    @testing.combinations(
+        (
+            "fastexecutemany",
+            True,
+        ),
+        (
+            "nofastexecutemany",
+            False,
+        ),
+        argnames="use_fastexecutemany",
+        id_="ia",
+    )
+    def test_insert_floats(
+        self,
+        metadata,
+        fe_engine,
+        include_setinputsizes,
+        use_fastexecutemany,
+        apply_setinputsizes_flag,
+    ):
+        expect_failure = (
+            apply_setinputsizes_flag
+            and not include_setinputsizes
+            and use_fastexecutemany
+        )
+
+        engine = fe_engine(use_fastexecutemany, apply_setinputsizes_flag)
+
+        observations = Table(
+            "Observations",
+            metadata,
+            Column("id", Integer, nullable=False, primary_key=True),
+            Column("obs1", Numeric(19, 15), nullable=True),
+            Column("obs2", Numeric(19, 15), nullable=True),
+            schema="test_schema",
+        )
+        with engine.begin() as conn:
+            metadata.create_all(conn)
+
+        records = [
+            {
+                "id": 1,
+                "obs1": Decimal("60.1722066045792"),
+                "obs2": Decimal("24.929289808227466"),
+            },
+            {
+                "id": 2,
+                "obs1": Decimal("60.16325715615476"),
+                "obs2": Decimal("24.93886459535008"),
+            },
+            {
+                "id": 3,
+                "obs1": Decimal("60.16445165123469"),
+                "obs2": Decimal("24.949856300109516"),
+            },
+        ]
+
+        if include_setinputsizes:
+            canary = mock.Mock()
+
+            @event.listens_for(engine, "do_setinputsizes")
+            def do_setinputsizes(
+                inputsizes, cursor, statement, parameters, context
+            ):
+                canary(list(inputsizes.values()))
+
+                for key in inputsizes:
+                    if isinstance(key.type, Numeric):
+                        inputsizes[key] = (
+                            engine.dialect.dbapi.SQL_DECIMAL,
+                            19,
+                            15,
+                        )
+
+        with engine.begin() as conn:
+
+            if expect_failure:
+                with expect_raises(DBAPIError):
+                    conn.execute(observations.insert(), records)
+            else:
+                conn.execute(observations.insert(), records)
+
+                eq_(
+                    conn.execute(
+                        select(observations).order_by(observations.c.id)
+                    )
+                    .mappings()
+                    .all(),
+                    records,
+                )
+
+        if include_setinputsizes:
+            if apply_setinputsizes_flag:
+                eq_(
+                    canary.mock_calls,
+                    [
+                        # float for int?  this seems wrong
+                        mock.call([float, float, float]),
+                        mock.call([]),
+                    ],
+                )
+            else:
+                eq_(canary.mock_calls, [])
+
 
 class VersionDetectionTest(fixtures.TestBase):
     @testing.fixture