]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Render bind cast in json and jsonb in PG
authorFederico Caselli <cfederico87@gmail.com>
Sun, 13 Oct 2024 16:32:46 +0000 (18:32 +0200)
committerFederico Caselli <cfederico87@gmail.com>
Sun, 13 Oct 2024 16:33:04 +0000 (18:33 +0200)
Render bind cast for ``JSON`` and ``JSONB`` datatype on every dialect.
Previously this was only enabled in a subset of dialects.
Fixes: #11994
Change-Id: Ib085deb3e84034dac9e4f4057d32f055d5533e52
(cherry picked from commit b2648e69f2375f7257cbe04b16f663d97795db19)

doc/build/changelog/unreleased_20/11994.rst [new file with mode: 0644]
lib/sqlalchemy/dialects/postgresql/asyncpg.py
lib/sqlalchemy/dialects/postgresql/json.py
lib/sqlalchemy/dialects/postgresql/psycopg.py
test/dialect/postgresql/test_query.py
test/dialect/postgresql/test_types.py

diff --git a/doc/build/changelog/unreleased_20/11994.rst b/doc/build/changelog/unreleased_20/11994.rst
new file mode 100644 (file)
index 0000000..efcb8e9
--- /dev/null
@@ -0,0 +1,6 @@
+.. change::
+    :tags: postgresql, usecase
+    :tickets: 11994
+
+    Render bind cast for ``JSON`` and ``JSONB`` datatype on every dialect.
+    Previously this was only enabled in a subset of dialects.
index 53ebe60398de3b354f539d7b0c0506acc3971f88..27bd07ab07792dd66bf64890f83b980269335ebb 100644 (file)
@@ -263,15 +263,11 @@ class AsyncpgBigInteger(sqltypes.BigInteger):
 
 
 class AsyncpgJSON(json.JSON):
-    render_bind_cast = True
-
     def result_processor(self, dialect, coltype):
         return None
 
 
 class AsyncpgJSONB(json.JSONB):
-    render_bind_cast = True
-
     def result_processor(self, dialect, coltype):
         return None
 
index 1cdafbd03d91beb461fe775683fdb351172273a8..914d8423d4b18afe56fdfa5f7946fd48807f7511 100644 (file)
@@ -144,6 +144,7 @@ class JSON(sqltypes.JSON):
 
     """  # noqa
 
+    render_bind_cast = True
     astext_type = sqltypes.Text()
 
     def __init__(self, none_as_null=False, astext_type=None):
index b8c0087dd491dcd761c6c8970c715bcb1c52e51e..66a2c7746231f076189d18d606328568e54072b9 100644 (file)
@@ -126,8 +126,6 @@ class _PGREGCONFIG(REGCONFIG):
 
 
 class _PGJSON(JSON):
-    render_bind_cast = True
-
     def bind_processor(self, dialect):
         return self._make_bind_processor(None, dialect._psycopg_Json)
 
@@ -136,8 +134,6 @@ class _PGJSON(JSON):
 
 
 class _PGJSONB(JSONB):
-    render_bind_cast = True
-
     def bind_processor(self, dialect):
         return self._make_bind_processor(None, dialect._psycopg_Jsonb)
 
index a737381760e77351c7d315176b8516372810efbc..9198fb96aea6ecefe7a530ad3bf011d3c03e8dc8 100644 (file)
@@ -26,6 +26,8 @@ from sqlalchemy import text
 from sqlalchemy import Time
 from sqlalchemy import true
 from sqlalchemy import tuple_
+from sqlalchemy import Uuid
+from sqlalchemy import values
 from sqlalchemy.dialects import postgresql
 from sqlalchemy.dialects.postgresql import JSONB
 from sqlalchemy.dialects.postgresql import REGCONFIG
@@ -1791,3 +1793,59 @@ class TableValuedRoundTripTest(fixtures.TestBase):
         stmt = select(fn.c.CaseSensitive, fn.c["the % value"])
 
         eq_(connection.execute(stmt).all(), [(1, "foo"), (2, "bar")])
+
+
+class RequiresCastTest(fixtures.TablesTest):
+    __only_on__ = "postgresql"
+    __backend__ = True
+
+    @classmethod
+    def define_tables(cls, metadata):
+        Table(
+            "t",
+            metadata,
+            Column("id", Integer, primary_key=True),
+            Column("uuid", Uuid),
+            Column("j", JSON),
+            Column("jb", JSONB),
+        )
+
+    @classmethod
+    def insert_data(cls, connection):
+        connection.execute(
+            cls.tables["t"].insert(),
+            [
+                {"id": 1, "uuid": "d24587a1-06d9-41df-b1c3-3f423b97a755"},
+                {"id": 2, "uuid": "4b07e1c8-d60c-4ea8-9d01-d7cd01362224"},
+            ],
+        )
+
+    def test_update_values(self, connection):
+        value = values(
+            Column("id", Integer),
+            Column("uuid", Uuid),
+            Column("j", JSON),
+            Column("jb", JSONB),
+            name="update_data",
+        ).data(
+            [
+                (
+                    1,
+                    "8b6ec1ec-b979-4d0b-b2ce-9acc6e4c2943",
+                    {"foo": 1},
+                    {"foo_jb": 1},
+                ),
+                (
+                    2,
+                    "a2123bcb-7ea3-420a-8284-1db4b2759d79",
+                    {"bar": 2},
+                    {"bar_jb": 2},
+                ),
+            ]
+        )
+        connection.execute(
+            self.tables["t"]
+            .update()
+            .values(uuid=value.c.uuid, j=value.c.j, jb=value.c.jb)
+            .where(self.tables["t"].c.id == value.c.id)
+        )
index 65c5fdbf7f64c5e2fde37b6a109a8db59de5b636..25237656735762858999d3273c215a895cc5cc76 100644 (file)
@@ -6195,7 +6195,7 @@ class JSONBTest(JSONTest):
             lambda self: self.jsoncol.has_all(
                 {"name": "r1", "data": {"k1": "r1v1", "k2": "r1v2"}}
             ),
-            "test_table.test_column ?& %(test_column_1)s",
+            "test_table.test_column ?& %(test_column_1)s::JSONB",
         ),
         (
             lambda self: self.jsoncol.has_all(self.any_),
@@ -6213,7 +6213,7 @@ class JSONBTest(JSONTest):
         ),
         (
             lambda self: self.jsoncol.contains({"k1": "r1v1"}),
-            "test_table.test_column @> %(test_column_1)s",
+            "test_table.test_column @> %(test_column_1)s::JSONB",
         ),
         (
             lambda self: self.jsoncol.contains(self.any_),
@@ -6221,7 +6221,7 @@ class JSONBTest(JSONTest):
         ),
         (
             lambda self: self.jsoncol.contained_by({"foo": "1", "bar": None}),
-            "test_table.test_column <@ %(test_column_1)s",
+            "test_table.test_column <@ %(test_column_1)s::JSONB",
         ),
         (
             lambda self: self.jsoncol.contained_by(self.any_),