From: Daniele Varrazzo Date: Wed, 24 Feb 2021 15:50:00 +0000 (+0100) Subject: Optimise the cython path around the use of row_maker X-Git-Tag: 3.0.dev0~106^2~3 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=060b8810cba321c86cae7addb075bf3321855f30;p=thirdparty%2Fpsycopg.git Optimise the cython path around the use of row_maker --- diff --git a/psycopg3_c/psycopg3_c/_psycopg3/transform.pyx b/psycopg3_c/psycopg3_c/_psycopg3/transform.pyx index 1aff27eb8..dc6956ac0 100644 --- a/psycopg3_c/psycopg3_c/_psycopg3/transform.pyx +++ b/psycopg3_c/psycopg3_c/_psycopg3/transform.pyx @@ -9,7 +9,7 @@ too many temporary Python objects and performing less memory copying. # Copyright (C) 2020-2021 The Psycopg Team cimport cython -from cpython.ref cimport Py_INCREF +from cpython.ref cimport Py_INCREF, Py_DECREF from cpython.set cimport PySet_Add, PySet_Contains from cpython.dict cimport PyDict_GetItem, PyDict_SetItem from cpython.list cimport ( @@ -336,8 +336,13 @@ cdef class Transformer: cdef object make_row = self.make_row if make_row is not tuple: - for i in range(len(records)): - records[i] = make_row(records[i]) + for i in range(row1 - row0): + brecord = PyList_GET_ITEM(records, i) + record = PyObject_CallFunctionObjArgs( + make_row, brecord, NULL) + Py_INCREF(record) + PyList_SET_ITEM(records, i, record) + Py_DECREF(brecord) return records def load_row(self, int row) -> Optional[Row]: @@ -381,7 +386,8 @@ cdef class Transformer: cdef object make_row = self.make_row if make_row is not tuple: - record = make_row(record) + record = PyObject_CallFunctionObjArgs( + make_row, record, NULL) return record cpdef object load_sequence(self, record: Sequence[Optional[bytes]]): diff --git a/tests/test_cursor.py b/tests/test_cursor.py index 60924c32c..5cd21bb49 100644 --- a/tests/test_cursor.py +++ b/tests/test_cursor.py @@ -6,9 +6,8 @@ import datetime as dt import pytest import psycopg3 -from psycopg3 import sql +from psycopg3 import sql, rows from psycopg3.oids import postgres_types as builtins -from psycopg3.rows import dict_row from psycopg3.adapt import Format @@ -299,7 +298,7 @@ def test_row_factory(conn): assert cur.fetchall() == [["Yy", "Zz"]] cur.scroll(-1) - cur.row_factory = dict_row + cur.row_factory = rows.dict_row assert cur.fetchone() == {"y": "y", "z": "z"} @@ -530,15 +529,21 @@ def test_str(conn): @pytest.mark.slow @pytest.mark.parametrize("fmt", [Format.AUTO, Format.TEXT, Format.BINARY]) @pytest.mark.parametrize("fetch", ["one", "many", "all", "iter"]) -def test_leak(dsn, faker, fmt, fetch): +@pytest.mark.parametrize( + "row_factory", ["tuple_row", "dict_row", "namedtuple_row"] +) +def test_leak(dsn, faker, fmt, fetch, row_factory): faker.format = fmt faker.choose_schema(ncols=5) faker.make_records(10) + row_factory = getattr(rows, row_factory) n = [] for i in range(3): with psycopg3.connect(dsn) as conn: - with conn.cursor(binary=Format.as_pq(fmt)) as cur: + with conn.cursor( + binary=Format.as_pq(fmt), row_factory=row_factory + ) as cur: cur.execute(faker.drop_stmt) cur.execute(faker.create_stmt) cur.executemany(faker.insert_stmt, faker.records) diff --git a/tests/test_cursor_async.py b/tests/test_cursor_async.py index 7cf689a40..1159c8ea5 100644 --- a/tests/test_cursor_async.py +++ b/tests/test_cursor_async.py @@ -4,8 +4,7 @@ import weakref import datetime as dt import psycopg3 -from psycopg3 import sql -from psycopg3.rows import dict_row +from psycopg3 import sql, rows from psycopg3.adapt import Format from .test_cursor import my_row_factory @@ -305,7 +304,7 @@ async def test_row_factory(aconn): assert await cur.fetchall() == [["Yy", "Zz"]] await cur.scroll(-1) - cur.row_factory = dict_row + cur.row_factory = rows.dict_row assert await cur.fetchone() == {"y": "y", "z": "z"} @@ -444,15 +443,21 @@ async def test_str(aconn): @pytest.mark.slow @pytest.mark.parametrize("fmt", [Format.AUTO, Format.TEXT, Format.BINARY]) @pytest.mark.parametrize("fetch", ["one", "many", "all", "iter"]) -async def test_leak(dsn, faker, fmt, fetch): +@pytest.mark.parametrize( + "row_factory", ["tuple_row", "dict_row", "namedtuple_row"] +) +async def test_leak(dsn, faker, fmt, fetch, row_factory): faker.format = fmt faker.choose_schema(ncols=5) faker.make_records(10) + row_factory = getattr(rows, row_factory) n = [] for i in range(3): async with await psycopg3.AsyncConnection.connect(dsn) as conn: - async with conn.cursor(binary=Format.as_pq(fmt)) as cur: + async with conn.cursor( + binary=Format.as_pq(fmt), row_factory=row_factory + ) as cur: await cur.execute(faker.drop_stmt) await cur.execute(faker.create_stmt) await cur.executemany(faker.insert_stmt, faker.records)