(:ticket:`#503`).
- Fix "filedescriptor out of range" using a large number of files open
in Python implementation (:ticket:`#532`).
+- Allow JSON dumpers to be registered on `!dict` or any other object, as was
+ possible in psycopg2 (:ticket:`#541`).
- Fix canceling running queries on process interruption in async connections
(:ticket:`#543`).
- Fix loading ROW values with different types in the same query using the
super().__init__(cls, context)
self.dumps = self.__class__._dumps
- def dump(self, obj: _JsonWrapper) -> bytes:
- dumps = obj.dumps or self.dumps
- return dumps(obj.obj).encode()
+ def dump(self, obj: Any) -> bytes:
+ if isinstance(obj, _JsonWrapper):
+ dumps = obj.dumps or self.dumps
+ obj = obj.obj
+ else:
+ dumps = self.dumps
+ return dumps(obj).encode()
class JsonDumper(_JsonDumper):
format = Format.BINARY
oid = postgres.types["jsonb"].oid
- def dump(self, obj: _JsonWrapper) -> bytes:
- dumps = obj.dumps or self.dumps
- return b"\x01" + dumps(obj.obj).encode()
+ def dump(self, obj: Any) -> bytes:
+ return b"\x01" + super().dump(obj)
class _JsonLoader(Loader):
assert cur.fetchone()[0] is True
+@pytest.mark.parametrize(
+ "fmt_in, type, dumper_name",
+ [
+ ("t", "json", "JsonDumper"),
+ ("b", "json", "JsonBinaryDumper"),
+ ("t", "jsonb", "JsonbDumper"),
+ ("b", "jsonb", "JsonbBinaryDumper"),
+ ],
+)
+def test_dump_dict(conn, fmt_in, type, dumper_name):
+ obj = {"foo": "bar"}
+ cur = conn.cursor()
+ cur.adapters.register_dumper(dict, getattr(psycopg.types.json, dumper_name))
+ cur.execute(f"select %{fmt_in}", (obj,))
+ assert cur.fetchone()[0] == obj
+ assert cur.description[0].type_code == conn.adapters.types[type].oid
+
+
@pytest.mark.crdb_skip("json array")
@pytest.mark.parametrize("val", samples)
@pytest.mark.parametrize("wrapper", ["Json", "Jsonb"])