]> git.ipfire.org Git - thirdparty/psycopg.git/commitdiff
Optimise the cython path around the use of row_maker
authorDaniele Varrazzo <daniele.varrazzo@gmail.com>
Wed, 24 Feb 2021 15:50:00 +0000 (16:50 +0100)
committerDaniele Varrazzo <daniele.varrazzo@gmail.com>
Wed, 24 Feb 2021 15:50:00 +0000 (16:50 +0100)
psycopg3_c/psycopg3_c/_psycopg3/transform.pyx
tests/test_cursor.py
tests/test_cursor_async.py

index 1aff27eb8c6e308f42ab0b6ba92c6a0b2ec03fd5..dc6956ac0f9b180939f3cba2810614246da0f038 100644 (file)
@@ -9,7 +9,7 @@ too many temporary Python objects and performing less memory copying.
 # Copyright (C) 2020-2021 The Psycopg Team
 
 cimport cython
-from cpython.ref cimport Py_INCREF
+from cpython.ref cimport Py_INCREF, Py_DECREF
 from cpython.set cimport PySet_Add, PySet_Contains
 from cpython.dict cimport PyDict_GetItem, PyDict_SetItem
 from cpython.list cimport (
@@ -336,8 +336,13 @@ cdef class Transformer:
 
         cdef object make_row = self.make_row
         if make_row is not tuple:
-            for i in range(len(records)):
-                records[i] = make_row(records[i])
+            for i in range(row1 - row0):
+                brecord = PyList_GET_ITEM(records, i)
+                record = PyObject_CallFunctionObjArgs(
+                    make_row, <PyObject *>brecord, NULL)
+                Py_INCREF(record)
+                PyList_SET_ITEM(records, i, record)
+                Py_DECREF(<object>brecord)
         return records
 
     def load_row(self, int row) -> Optional[Row]:
@@ -381,7 +386,8 @@ cdef class Transformer:
 
         cdef object make_row = self.make_row
         if make_row is not tuple:
-            record = make_row(record)
+            record = PyObject_CallFunctionObjArgs(
+                make_row, <PyObject *>record, NULL)
         return record
 
     cpdef object load_sequence(self, record: Sequence[Optional[bytes]]):
index 60924c32c6274f951dbb2d5817da461664e543e9..5cd21bb49216e082d0ac340278364d2710651f53 100644 (file)
@@ -6,9 +6,8 @@ import datetime as dt
 import pytest
 
 import psycopg3
-from psycopg3 import sql
+from psycopg3 import sql, rows
 from psycopg3.oids import postgres_types as builtins
-from psycopg3.rows import dict_row
 from psycopg3.adapt import Format
 
 
@@ -299,7 +298,7 @@ def test_row_factory(conn):
     assert cur.fetchall() == [["Yy", "Zz"]]
 
     cur.scroll(-1)
-    cur.row_factory = dict_row
+    cur.row_factory = rows.dict_row
     assert cur.fetchone() == {"y": "y", "z": "z"}
 
 
@@ -530,15 +529,21 @@ def test_str(conn):
 @pytest.mark.slow
 @pytest.mark.parametrize("fmt", [Format.AUTO, Format.TEXT, Format.BINARY])
 @pytest.mark.parametrize("fetch", ["one", "many", "all", "iter"])
-def test_leak(dsn, faker, fmt, fetch):
+@pytest.mark.parametrize(
+    "row_factory", ["tuple_row", "dict_row", "namedtuple_row"]
+)
+def test_leak(dsn, faker, fmt, fetch, row_factory):
     faker.format = fmt
     faker.choose_schema(ncols=5)
     faker.make_records(10)
+    row_factory = getattr(rows, row_factory)
 
     n = []
     for i in range(3):
         with psycopg3.connect(dsn) as conn:
-            with conn.cursor(binary=Format.as_pq(fmt)) as cur:
+            with conn.cursor(
+                binary=Format.as_pq(fmt), row_factory=row_factory
+            ) as cur:
                 cur.execute(faker.drop_stmt)
                 cur.execute(faker.create_stmt)
                 cur.executemany(faker.insert_stmt, faker.records)
index 7cf689a40da20aad6c0516cca68a6db795dd432f..1159c8ea574427660252b8ce7dd37b4cb598fe0a 100644 (file)
@@ -4,8 +4,7 @@ import weakref
 import datetime as dt
 
 import psycopg3
-from psycopg3 import sql
-from psycopg3.rows import dict_row
+from psycopg3 import sql, rows
 from psycopg3.adapt import Format
 from .test_cursor import my_row_factory
 
@@ -305,7 +304,7 @@ async def test_row_factory(aconn):
     assert await cur.fetchall() == [["Yy", "Zz"]]
 
     await cur.scroll(-1)
-    cur.row_factory = dict_row
+    cur.row_factory = rows.dict_row
     assert await cur.fetchone() == {"y": "y", "z": "z"}
 
 
@@ -444,15 +443,21 @@ async def test_str(aconn):
 @pytest.mark.slow
 @pytest.mark.parametrize("fmt", [Format.AUTO, Format.TEXT, Format.BINARY])
 @pytest.mark.parametrize("fetch", ["one", "many", "all", "iter"])
-async def test_leak(dsn, faker, fmt, fetch):
+@pytest.mark.parametrize(
+    "row_factory", ["tuple_row", "dict_row", "namedtuple_row"]
+)
+async def test_leak(dsn, faker, fmt, fetch, row_factory):
     faker.format = fmt
     faker.choose_schema(ncols=5)
     faker.make_records(10)
+    row_factory = getattr(rows, row_factory)
 
     n = []
     for i in range(3):
         async with await psycopg3.AsyncConnection.connect(dsn) as conn:
-            async with conn.cursor(binary=Format.as_pq(fmt)) as cur:
+            async with conn.cursor(
+                binary=Format.as_pq(fmt), row_factory=row_factory
+            ) as cur:
                 await cur.execute(faker.drop_stmt)
                 await cur.execute(faker.create_stmt)
                 await cur.executemany(faker.insert_stmt, faker.records)