]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Fix / consolidate for SQL Server BINARY, VARBINARY
authorBen Shen <bshen@telesign.com>
Fri, 20 Oct 2017 15:31:59 +0000 (11:31 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Wed, 25 Oct 2017 14:50:54 +0000 (10:50 -0400)
Fixed bug where sqltypes.BINARY and sqltypes.VARBINARY datatypes
would not include correct bound-value handlers for pyodbc,
which allows the pyodbc.NullParam value to be passed that
helps with FreeTDS.

Co-authored by: Mike Bayer <mike_mp@zzzcomputing.com>
Change-Id: I6e3c16a69465b4fbc7b17a1927fb5e66acee93cb
Pull-request: https://github.com/zzzeek/sqlalchemy/pull/386
Fixes: #4121
doc/build/changelog/unreleased_12/4121.rst [new file with mode: 0644]
lib/sqlalchemy/dialects/mssql/base.py
lib/sqlalchemy/dialects/mssql/pyodbc.py
test/dialect/mssql/test_types.py

diff --git a/doc/build/changelog/unreleased_12/4121.rst b/doc/build/changelog/unreleased_12/4121.rst
new file mode 100644 (file)
index 0000000..9256b00
--- /dev/null
@@ -0,0 +1,11 @@
+.. change::
+    :tags: bug, mssql
+    :tickets: 4121
+
+       Fixed bug where sqltypes.BINARY and sqltypes.VARBINARY datatypes
+       would not include correct bound-value handlers for pyodbc,
+       which allows the pyodbc.NullParam value to be passed that
+       helps with FreeTDS.
+
+
+
index a7c5286e08753e905294810dddb35bab57e395a4..0c3688c9b9ec2062e2425a71ee25bda1b0870ba8 100644 (file)
@@ -618,7 +618,7 @@ from ... import engine
 from ...engine import reflection, default
 from ... import types as sqltypes
 from ...types import INTEGER, BIGINT, SMALLINT, DECIMAL, NUMERIC, \
-    FLOAT, DATETIME, DATE, BINARY,\
+    FLOAT, DATETIME, DATE, BINARY, \
     TEXT, VARCHAR, NVARCHAR, CHAR, NCHAR
 
 
@@ -874,10 +874,9 @@ class NTEXT(sqltypes.UnicodeText):
 class VARBINARY(sqltypes.VARBINARY, sqltypes.LargeBinary):
     """The MSSQL VARBINARY type.
 
-    This type extends both :class:`.types.VARBINARY` and
-    :class:`.types.LargeBinary`.   In "deprecate_large_types" mode,
-    the :class:`.types.LargeBinary` type will produce ``VARBINARY(max)``
-    on SQL Server.
+    This type is present to support "deprecate_large_types" mode where
+    either ``VARBINARY(max)`` or IMAGE is rendered.   Otherwise, this type
+    object is redundant vs. :class:`.types.VARBINARY`.
 
     .. versionadded:: 1.0.0
 
index 6fd4fc5543877b2f27ad5c7bec182252aa1b989e..60f08a8b3f299e07f275e7c141b5a94bd82f3d9b 100644 (file)
@@ -87,7 +87,7 @@ versioning.
 
 """
 
-from .base import MSExecutionContext, MSDialect, VARBINARY
+from .base import MSExecutionContext, MSDialect, BINARY, VARBINARY
 from ...connectors.pyodbc import PyODBCConnector
 from ... import types as sqltypes, util, exc
 import decimal
@@ -167,7 +167,13 @@ class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float):
     pass
 
 
-class _VARBINARY_pyodbc(VARBINARY):
+class _ms_binary_pyodbc(object):
+    """Wraps binary values in dialect-specific Binary wrapper.
+    If the value is null, return a pyodbc-specific BinaryNull
+    object to prevent pyODBC [and FreeTDS] from defaulting binary
+    NULL types to SQLWCHAR and causing implicit conversion errors.
+    """
+
     def bind_processor(self, dialect):
         if dialect.dbapi is None:
             return None
@@ -183,6 +189,14 @@ class _VARBINARY_pyodbc(VARBINARY):
         return process
 
 
+class _VARBINARY_pyodbc(_ms_binary_pyodbc, VARBINARY):
+    pass
+
+
+class _BINARY_pyodbc(_ms_binary_pyodbc, BINARY):
+    pass
+
+
 class MSExecutionContext_pyodbc(MSExecutionContext):
     _embedded_scope_identity = False
 
@@ -240,7 +254,13 @@ class MSDialect_pyodbc(PyODBCConnector, MSDialect):
         {
             sqltypes.Numeric: _MSNumeric_pyodbc,
             sqltypes.Float: _MSFloat_pyodbc,
+            BINARY: _BINARY_pyodbc,
+
+            # SQL Server dialect has a VARBINARY that is just to support
+            # "deprecate_large_types" w/ VARBINARY(max), but also we must
+            # handle the usual SQL standard VARBINARY
             VARBINARY: _VARBINARY_pyodbc,
+            sqltypes.VARBINARY: _VARBINARY_pyodbc,
             sqltypes.LargeBinary: _VARBINARY_pyodbc,
         }
     )
index 00955b426d07f384278ab851b339b1b2e4c380b6..d7ea8f708f233229ca4b4d96565747fc01da1cf4 100644 (file)
@@ -898,7 +898,6 @@ class BinaryTest(fixtures.TestBase):
     __requires__ = "non_broken_binary",
     __backend__ = True
 
-
     def test_character_binary(self):
         self._test_round_trip(
             mssql.MSVarBinary(800), b("some normal data")
@@ -920,7 +919,7 @@ class BinaryTest(fixtures.TestBase):
             Column('id', Integer, primary_key=True),
             Column('data', type_)
         )
-        binary_table.create()
+        binary_table.create(engine)
 
         if expected is None:
             expected = data
@@ -1011,17 +1010,51 @@ class BinaryTest(fixtures.TestBase):
             deprecate_large_types=False
         )
 
-    def test_slice_one(self):
+    def test_mssql_varbinary_max(self):
+        stream1 = self._load_stream('binary_data_one.dat')
+        self._test_round_trip(
+            mssql.VARBINARY("max"),
+            stream1
+        )
+
+    def test_mssql_legacy_varbinary_max(self):
+        stream1 = self._load_stream('binary_data_one.dat')
+        self._test_round_trip(
+            mssql.VARBINARY("max"),
+            stream1,
+            deprecate_large_types=False
+        )
+
+    def test_binary_slice(self):
+        self._test_var_slice(types.BINARY)
+
+    def test_binary_slice_zeropadding(self):
+        self._test_var_slice_zeropadding(types.BINARY, True)
+
+    def test_varbinary_slice(self):
+        self._test_var_slice(types.VARBINARY)
+
+    def test_varbinary_slice_zeropadding(self):
+        self._test_var_slice_zeropadding(types.VARBINARY, False)
+
+    def test_mssql_varbinary_slice(self):
+        self._test_var_slice(mssql.VARBINARY)
+
+    def test_mssql_varbinary_slice_zeropadding(self):
+        self._test_var_slice_zeropadding(mssql.VARBINARY, False)
+
+    def _test_var_slice(self, type_):
         stream1 = self._load_stream('binary_data_one.dat')
 
         data = stream1[0:100]
 
         self._test_round_trip(
-            types.BINARY(100),
+            type_(100),
             data
         )
 
-    def test_slice_zeropadding(self):
+    def _test_var_slice_zeropadding(
+            self, type_, pad, deprecate_large_types=True):
         stream2 = self._load_stream('binary_data_two.dat')
 
         data = stream2[0:99]
@@ -1029,10 +1062,13 @@ class BinaryTest(fixtures.TestBase):
         # the type we used here is 100 bytes
         # so we will get 100 bytes zero-padded
 
-        paddedstream = stream2[0:99] + b'\x00'
+        if pad:
+            paddedstream = stream2[0:99] + b'\x00'
+        else:
+            paddedstream = stream2[0:99]
 
         self._test_round_trip(
-            types.BINARY(100),
+            type_(100),
             data, expected=paddedstream
         )