]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Performance improvement in Row
authorFederico Caselli <cfederico87@gmail.com>
Wed, 19 Apr 2023 22:39:18 +0000 (18:39 -0400)
committermike bayer <mike_mp@zzzcomputing.com>
Wed, 26 Apr 2023 19:48:00 +0000 (19:48 +0000)
Various performance improvements to Row instanciation
- avoid passing processors if they are all None
- improve processor logic in cython
- improve tuplegetter using slices when contiguous indexes are used

Some timing follow.

In particular [base_]row_new_proc that tests using processors has
a 25% improvement compared to before in cython.
Looking at the [b]row_new_proc_none that test a list of processors
all None, this has 50% improvement in cython when passing the none list,
but in this patch it would usually be disabled by passing None, so the
performance gain is actually 90%, since it would run the case
[base_]row_new.

Tuplegetter is a bit faster in the single item get and when getting
sequential indexes (like indexes 1,2,3,4) at the cost of a bit
longer creation time in python, cython is mostly the same.

Current times
                    | python      | cython      | cy / py     |
base_row_new        | 0.639817400 | 0.118265500 | 0.184842582 |
row_new             | 0.680355100 | 0.129714600 | 0.190657202 |
base_row_new_proc   | 3.076538900 | 1.488428600 | 0.483799701 |
row_new_proc        | 3.119700100 | 1.532197500 | 0.491136151 |
brow_new_proc_none  | 1.917702300 | 0.475511500 | 0.247958977 |
row_new_proc_none   | 1.956253300 | 0.497803100 | 0.254467609 |

tuplegetter_one     | 0.152512600 | 0.148523900 | 0.973846751 |
tuplegetter_many    | 0.184394100 | 0.184511500 | 1.000636680 |
tuplegetter_seq     | 0.154832800 | 0.156270100 | 1.009282917 |
tuplegetter_new_one | 0.523730000 | 0.343402200 | 0.655685563 |
tuplegetter_new_many| 0.738924400 | 0.420961400 | 0.569694816 |
tuplegetter_new_seq | 1.062036900 | 0.495462000 | 0.466520514 |

Parent commit times
                    | python      | cython      | cy / py     |
base_row_new        | 0.643890800 | 0.113548300 | 0.176347138 |
row_new             | 0.674885900 | 0.124391800 | 0.184315304 |
base_row_new_proc   | 3.072020400 | 2.017367000 | 0.656690626 |
row_new_proc        | 3.109943400 | 2.048359400 | 0.658648450 |
brow_new_proc_none  | 1.967133700 | 1.006326000 | 0.511569702 |
row_new_proc_none   | 1.960814900 | 1.025217800 | 0.522852922 |

tuplegetter_one     | 0.197359900 | 0.205999000 | 1.043773330 |
tuplegetter_many    | 0.196575900 | 0.194888500 | 0.991416038 |
tuplegetter_seq     | 0.192723900 | 0.205635000 | 1.066992729 |
tuplegetter_new_one | 0.534644500 | 0.414311700 | 0.774929322 |
tuplegetter_new_many| 0.479376500 | 0.417448100 | 0.870814694 |
tuplegetter_new_seq | 0.481580200 | 0.412697900 | 0.856966088 |

Change-Id: I2ca1f49dca2beff625c283f1363c29c8ccc0c3f7

lib/sqlalchemy/cyextension/resultproxy.pyx
lib/sqlalchemy/engine/_py_row.py
lib/sqlalchemy/engine/cursor.py
lib/sqlalchemy/engine/default.py
lib/sqlalchemy/engine/result.py
lib/sqlalchemy/engine/row.py
test/base/test_result.py
test/perf/compiled_extensions.py
test/profiles.txt

index c358f043f00fe0e480042c313bd7301cacf0bce4..0d7eeece93cdc62bf1fab31c07eabc8a9b4f6d18 100644 (file)
@@ -13,12 +13,7 @@ cdef class BaseRow:
         self._key_to_index = key_to_index
 
         if processors:
-            self._data = tuple(
-                [
-                    proc(value) if proc else value
-                    for proc, value in zip(processors, data)
-                ]
-            )
+            self._data = _apply_processors(processors, data)
         else:
             self._data = tuple(data)
 
@@ -64,6 +59,20 @@ cdef class BaseRow:
     def __getattr__(self, name):
         return self._get_by_key_impl(name, 1)
 
+    def _to_tuple_instance(self):
+        return self._data
+
+
+cdef tuple _apply_processors(proc, data):
+    res = []
+    for i in range(len(proc)):
+        p = proc[i]
+        if p is None:
+            res.append(data[i])
+        else:
+            res.append(p(data[i]))
+    return tuple(res)
+
 
 def rowproxy_reconstructor(cls, state):
     obj = cls.__new__(cls)
@@ -71,10 +80,17 @@ def rowproxy_reconstructor(cls, state):
     return obj
 
 
-def tuplegetter(*indexes):
-    it = operator.itemgetter(*indexes)
+cdef int is_contiguous(tuple indexes):
+    cdef int i
+    for i in range(1, len(indexes)):
+        if indexes[i-1] != indexes[i] -1:
+            return 0
+    return 1
+
 
-    if len(indexes) > 1:
-        return it
+def tuplegetter(*indexes):
+    if len(indexes) == 1 or is_contiguous(indexes) != 0:
+        # slice form is faster but returns a list if input is list
+        return operator.itemgetter(slice(indexes[0], indexes[-1] + 1))
     else:
-        return lambda row: (it(row),)
+        return operator.itemgetter(*indexes)
index 4a9acec9bb09a517e1abde308d4b7171b84754b9..3358abd7848cb91866a9822b4b139aa55beb1c33 100644 (file)
@@ -99,6 +99,9 @@ class BaseRow:
             pass
         self._parent._key_not_found(name, True)
 
+    def _to_tuple_instance(self) -> Tuple[Any, ...]:
+        return self._data
+
 
 # This reconstructor is necessary so that pickles with the Cy extension or
 # without use the same Binary format.
@@ -111,9 +114,9 @@ def rowproxy_reconstructor(
 
 
 def tuplegetter(*indexes: int) -> _TupleGetterType:
-    it = operator.itemgetter(*indexes)
-
-    if len(indexes) > 1:
-        return it
-    else:
-        return lambda row: (it(row),)
+    if len(indexes) != 1:
+        for i in range(1, len(indexes)):
+            if indexes[i - 1] != indexes[i] - 1:
+                return operator.itemgetter(*indexes)
+    # slice form is faster but returns a list if input is list
+    return operator.itemgetter(slice(indexes[0], indexes[-1] + 1))
index bd46f30ac34f7da801eab4bd147be20cbbdceb64..7491afc3e2f5f6c8fbdced4bdcb02ad4a253bfd3 100644 (file)
@@ -14,6 +14,7 @@ from __future__ import annotations
 
 import collections
 import functools
+import operator
 import typing
 from typing import Any
 from typing import cast
@@ -1440,39 +1441,46 @@ class CursorResult(Result[_T]):
             # getter assuming no transformations will be called as this
             # is the most common case
 
-            if echo:
-                log = self.context.connection._log_debug
-
-                def _log_row(row):
-                    log("Row %r", sql_util._repr_row(row))
-                    return row
-
-                self._row_logging_fn = log_row = _log_row
-            else:
-                log_row = None
-
             metadata = self._init_metadata(context, cursor_description)
 
             _make_row = functools.partial(
                 Row,
                 metadata,
-                metadata._processors,
+                metadata._effective_processors,
                 metadata._key_to_index,
             )
-            if log_row:
+
+            if context._num_sentinel_cols:
+                sentinel_filter = operator.itemgetter(
+                    slice(-context._num_sentinel_cols)
+                )
+
+                def _sliced_row(raw_data):
+                    return _make_row(sentinel_filter(raw_data))
+
+                sliced_row = _sliced_row
+            else:
+                sliced_row = _make_row
+
+            if echo:
+                log = self.context.connection._log_debug
+
+                def _log_row(row):
+                    log("Row %r", sql_util._repr_row(row))
+                    return row
+
+                self._row_logging_fn = _log_row
 
                 def _make_row_2(row):
-                    made_row = _make_row(row)
-                    assert log_row is not None
-                    log_row(made_row)
-                    return made_row
+                    return _log_row(sliced_row(row))
 
                 make_row = _make_row_2
             else:
-                make_row = _make_row
+                make_row = sliced_row
             self._set_memoized_attribute("_row_getter", make_row)
 
         else:
+            assert context._num_sentinel_cols == 0
             self._metadata = self._no_result_metadata
 
     def _init_metadata(self, context, cursor_description):
index 8992334ee6bc112f8db973b5d76e4f6185bafa0f..0b884d621bc8c84f20ccedfb351eb6e0bdf8a191 100644 (file)
@@ -1881,11 +1881,11 @@ class DefaultExecutionContext(ExecutionContext):
             strategy = _cursor._NO_CURSOR_DML
         elif self._num_sentinel_cols:
             assert self.execute_style is ExecuteStyle.INSERTMANYVALUES
-            if cursor_description:
-                # strip out the sentinel columns from cursor description
-                cursor_description = cursor_description[
-                    0 : -(self._num_sentinel_cols)
-                ]
+            # strip out the sentinel columns from cursor description
+            # a similar logic is done to the rows only in CursorResult
+            cursor_description = cursor_description[
+                0 : -self._num_sentinel_cols
+            ]
 
         result: _cursor.CursorResult[Any] = _cursor.CursorResult(
             self, strategy, cursor_description
index cc6d26c881b5fdb511fbe4e39270be117191befa..cf34c195afdd1ba075b744d33db2f71d6ac98590 100644 (file)
@@ -41,6 +41,7 @@ from ..sql.base import _generative
 from ..sql.base import HasMemoized
 from ..sql.base import InPlaceGenerative
 from ..util import HasMemoized_ro_memoized_attribute
+from ..util import NONE_SET
 from ..util._has_cy import HAS_CYEXTENSION
 from ..util.typing import Literal
 from ..util.typing import Self
@@ -84,7 +85,7 @@ across all the result types
 _InterimSupportsScalarsRowType = Union[Row, Any]
 
 _ProcessorsType = Sequence[Optional["_ResultProcessorType[Any]"]]
-_TupleGetterType = Callable[[Sequence[Any]], Tuple[Any, ...]]
+_TupleGetterType = Callable[[Sequence[Any]], Sequence[Any]]
 _UniqueFilterType = Callable[[Any], Any]
 _UniqueFilterStateType = Tuple[Set[Any], Optional[_UniqueFilterType]]
 
@@ -205,6 +206,13 @@ class ResultMetaData:
             else:
                 self._key_fallback(key, None)
 
+    @property
+    def _effective_processors(self) -> Optional[_ProcessorsType]:
+        if not self._processors or NONE_SET.issuperset(self._processors):
+            return None
+        else:
+            return self._processors
+
 
 class RMKeyView(typing.KeysView[Any]):
     __slots__ = ("_parent", "_keys")
@@ -390,7 +398,7 @@ def result_tuple(
 ) -> Callable[[Iterable[Any]], Row[Any]]:
     parent = SimpleResultMetaData(fields, extra)
     return functools.partial(
-        Row, parent, parent._processors, parent._key_to_index
+        Row, parent, parent._effective_processors, parent._key_to_index
     )
 
 
@@ -454,7 +462,7 @@ class ResultInternal(InPlaceGenerative, Generic[_R]):
 
                 def process_row(  # type: ignore
                     metadata: ResultMetaData,
-                    processors: _ProcessorsType,
+                    processors: Optional[_ProcessorsType],
                     key_to_index: Mapping[_KeyType, int],
                     scalar_obj: Any,
                 ) -> Row[Any]:
@@ -468,7 +476,7 @@ class ResultInternal(InPlaceGenerative, Generic[_R]):
         metadata = self._metadata
 
         key_to_index = metadata._key_to_index
-        processors = metadata._processors
+        processors = metadata._effective_processors
         tf = metadata._tuplefilter
 
         if tf and not real_result._source_supports_scalars:
@@ -489,21 +497,12 @@ class ResultInternal(InPlaceGenerative, Generic[_R]):
                 process_row, metadata, processors, key_to_index
             )
 
-        fns: Tuple[Any, ...] = ()
-
         if real_result._row_logging_fn:
-            fns = (real_result._row_logging_fn,)
-        else:
-            fns = ()
-
-        if fns:
+            _log_row = real_result._row_logging_fn
             _make_row = make_row
 
             def make_row(row: _InterimRowType[Row[Any]]) -> _R:
-                interim_row = _make_row(row)
-                for fn in fns:
-                    interim_row = fn(interim_row)
-                return interim_row  # type: ignore
+                return _log_row(_make_row(row))  # type: ignore
 
         return make_row
 
index 4b767da098c3462007e40b3af4ad5830cdc8b8fa..da781334adc6ffe1defc11462308b966480a4d4d 100644 (file)
@@ -40,7 +40,7 @@ else:
 if TYPE_CHECKING:
     from .result import _KeyType
     from .result import RMKeyView
-    from ..sql.type_api import _ResultProcessorType
+    from .result import _ProcessorsType
 
 _T = TypeVar("_T", bound=Any)
 _TP = TypeVar("_TP", bound=Tuple[Any, ...])
@@ -131,9 +131,9 @@ class Row(BaseRow, Sequence[Any], Generic[_TP]):
         return RowMapping(self._parent, None, self._key_to_index, self._data)
 
     def _filter_on_values(
-        self, filters: Optional[Sequence[Optional[_ResultProcessorType[Any]]]]
+        self, processor: Optional[_ProcessorsType]
     ) -> Row[Any]:
-        return Row(self._parent, filters, self._key_to_index, self._data)
+        return Row(self._parent, processor, self._key_to_index, self._data)
 
     if not TYPE_CHECKING:
 
@@ -163,9 +163,9 @@ class Row(BaseRow, Sequence[Any], Generic[_TP]):
 
     def _op(self, other: Any, op: Callable[[Any, Any], bool]) -> bool:
         return (
-            op(tuple(self), tuple(other))
+            op(self._to_tuple_instance(), other._to_tuple_instance())
             if isinstance(other, Row)
-            else op(tuple(self), other)
+            else op(self._to_tuple_instance(), other)
         )
 
     __hash__ = BaseRow.__hash__
index 05f922d671560abe92fda852bedc1d6fb88c7e9c..83017c16528f23b0317c36776e50d2d9a38be675 100644 (file)
@@ -236,6 +236,30 @@ class ResultTupleTest(fixtures.TestBase):
             row3 = loads(state2)
             is_true(isinstance(row3, dump_cls))
 
+    def test_processors(self):
+        parent = result.SimpleResultMetaData(["a", "b", "c", "d"])
+        data = (1, 99, "42", "foo")
+        row_none = result.Row(parent, None, parent._key_to_index, data)
+        eq_(row_none._to_tuple_instance(), data)
+        row_all_p = result.Row(
+            parent, [str, float, int, str.upper], parent._key_to_index, data
+        )
+        eq_(row_all_p._to_tuple_instance(), ("1", 99.0, 42, "FOO"))
+        row_some_p = result.Row(
+            parent, [None, str, None, str.upper], parent._key_to_index, data
+        )
+        eq_(row_some_p._to_tuple_instance(), (1, "99", "42", "FOO"))
+        row_shorter = result.Row(
+            parent, [None, str], parent._key_to_index, data
+        )
+        eq_(row_shorter._to_tuple_instance(), (1, "99"))
+
+    def test_tuplegetter(self):
+        data = list(range(10, 20))
+        eq_(result.tuplegetter(1)(data), [11])
+        eq_(result.tuplegetter(1, 9, 3)(data), (11, 19, 13))
+        eq_(result.tuplegetter(2, 3, 4)(data), [12, 13, 14])
+
 
 class ResultTest(fixtures.TestBase):
     def _fixture(
index 14bb4e4ab78f28e360a586fb1ef42b844acc5030..872165d0720d9ededebbd25a3c6176d3331ffe2d 100644 (file)
@@ -787,6 +787,8 @@ class OrderedSet(IdentitySet):
 
 
 class TupleGetter(Case):
+    NUMBER = 2_000_000
+
     @staticmethod
     def python():
         from sqlalchemy.engine._py_row import tuplegetter
@@ -817,20 +819,7 @@ class TupleGetter(Case):
         self.tuple = tuple(range(1000))
         self.tg_inst = self.impl_tg(42)
         self.tg_inst_m = self.impl_tg(42, 420, 99, 9, 1)
-
-        class MockRow:
-            def __init__(self, data):
-                self.data = data
-
-            def __getitem__(self, index):
-                # called by python
-                return self.data[index]
-
-            def _get_by_key_impl_mapping(self, index):
-                # called by c
-                return self.data[index]
-
-        self.row = MockRow(self.tuple)
+        self.tg_inst_seq = self.impl_tg(*range(70, 75))
 
     @classmethod
     def update_results(cls, results):
@@ -846,6 +835,10 @@ class TupleGetter(Case):
     def tuplegetter_many(self):
         self.tg_inst_m(self.tuple)
 
+    @test_case
+    def tuplegetter_seq(self):
+        self.tg_inst_seq(self.tuple)
+
     @test_case
     def tuplegetter_new_one(self):
         self.impl_tg(42)(self.tuple)
@@ -854,6 +847,10 @@ class TupleGetter(Case):
     def tuplegetter_new_many(self):
         self.impl_tg(42, 420, 99, 9, 1)(self.tuple)
 
+    @test_case
+    def tuplegetter_new_seq(self):
+        self.impl_tg(40, 41, 42, 43, 44)(self.tuple)
+
 
 class BaseRow(Case):
     @staticmethod
@@ -911,6 +908,30 @@ class BaseRow(Case):
         self.row_state = self.row.__getstate__()
         self.row_long_state = self.row_long.__getstate__()
 
+        assert len(ascii_letters) == 52
+        self.parent_proc = SimpleResultMetaData(
+            tuple(ascii_letters),
+            _processors=[None, int, float, None, str] * 10,  # cut the last 2
+        )
+        self.row_proc_args = (
+            self.parent_proc,
+            self.parent_proc._processors,
+            self.parent_proc._key_to_index,
+            tuple(range(len(ascii_letters))),
+        )
+
+        self.parent_proc_none = SimpleResultMetaData(
+            tuple(ascii_letters), _processors=[None] * 52
+        )
+        self.row_proc_none_args = (
+            self.parent_proc_none,
+            # NOTE: usually the code calls _effective_processors that returns
+            # None for this case of all None.
+            self.parent_proc_none._processors,
+            self.parent_proc_none._key_to_index,
+            tuple(range(len(ascii_letters))),
+        )
+
     @classmethod
     def update_results(cls, results):
         cls._divide_results(results, "c", "python", "c / py")
@@ -927,6 +948,22 @@ class BaseRow(Case):
         self.Row(*self.row_args)
         self.Row(*self.row_long_args)
 
+    @test_case
+    def base_row_new_proc(self):
+        self.impl(*self.row_proc_args)
+
+    @test_case
+    def row_new_proc(self):
+        self.Row(*self.row_proc_args)
+
+    @test_case
+    def brow_new_proc_none(self):
+        self.impl(*self.row_proc_none_args)
+
+    @test_case
+    def row_new_proc_none(self):
+        self.Row(*self.row_proc_none_args)
+
     @test_case
     def row_dumps(self):
         self.row.__getstate__()
index 7379f5f3c0cb18f66c40c1826b34ff92b8c621d7..fd229ed03641fe705ec9d70a19a1dafb2c674b99 100644 (file)
@@ -376,7 +376,7 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_6
 test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 2592
 test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 25595
 test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 2539
-test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 15600
+test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 14614
 
 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0]
 
@@ -406,7 +406,7 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64
 test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 14
 test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 16
 test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 14
-test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-1] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 15
 
 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2]
 
@@ -421,7 +421,7 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64
 test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 14
 test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 16
 test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 14
-test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-2] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 15
 
 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1]
 
@@ -436,7 +436,7 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_
 test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 17
 test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 19
 test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 17
-test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 19
+test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[True-1] x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 18
 
 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string
 
@@ -451,7 +451,7 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpy
 test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 291
 test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 6291
 test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 257
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 6257
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_string x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 5277
 
 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode
 
@@ -466,7 +466,7 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cp
 test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 291
 test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 6291
 test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 257
-test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 6257
+test.aaa_profiling.test_resultset.ResultSetTest.test_raw_unicode x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 5277
 
 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_string
 
@@ -481,7 +481,7 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython
 test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 585
 test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 6589
 test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 532
-test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 6536
+test.aaa_profiling.test_resultset.ResultSetTest.test_string x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 5605
 
 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_unicode
 
@@ -496,4 +496,4 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpytho
 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 585
 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 6589
 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 532
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 6536
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 5605