]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
something got faster, something slower
authorAlbert N <anamaev263@gmail.com>
Thu, 16 Oct 2025 16:41:17 +0000 (16:41 +0000)
committerAlbert N <anamaev263@gmail.com>
Thu, 16 Oct 2025 16:41:17 +0000 (16:41 +0000)
Running case BaseRow
Running python     .................. Done
Running cython     .................. Done
                    | python  | cython  | cy / py |
base_row_new        | 1.30845 | 0.16830 | 0.12862 |
row_new             | 1.41965 | 0.20726 | 0.14599 |
base_row_new_proc   | 4.42746 | 2.43170 | 0.54923 |
row_new_proc        | 4.33764 | 2.52859 | 0.58294 |
brow_new_proc_none  | 1.82780 | 0.34398 | 0.18819 |
row_new_proc_none   | 1.91148 | 0.36864 | 0.19285 |
row_dumps           | 0.27072 | 0.48488 | 1.79106 |
row_loads           | 1.45612 | 0.82574 | 0.56708 |
row_values_impl     | 0.28092 | 0.47103 | 1.67672 |
row_iter            | 0.70641 | 0.39112 | 0.55367 |
row_len             | 0.22251 | 0.07515 | 0.33776 |
row_hash            | 0.43154 | 0.24742 | 0.57335 |
getitem             | 0.29614 | 0.17589 | 0.59395 |
getitem_slice       | 0.59998 | 0.32846 | 0.54745 |
get_by_key          | 0.48669 | 0.83138 | 1.70821 |
getattr             | 0.71569 | 0.45048 | 0.62944 |
get_by_key_recreate | 1.95081 | 2.55506 | 1.30974 |
getattr_recreate    | 0.85241 | 0.73593 | 0.86334 |
> mean of values    | —       | —       | 0.71887 |

lib/sqlalchemy/engine/_row_cy.py
lib/sqlalchemy/engine/row.py
setup.py

index e550710821f5c6e39341db032fd751d2c37544d3..7e821c35faf73007c2b8cbed77b34e8d95d1333f 100644 (file)
@@ -11,6 +11,7 @@ from typing import Any
 from typing import Dict
 from typing import Iterator
 from typing import List
+from typing import NoReturn
 from typing import Optional
 from typing import Sequence
 from typing import Tuple
@@ -50,6 +51,29 @@ if not cython.compiled:
     def PyTuple_SET_ITEM(tup, idx, item):  # type: ignore
         tup[idx] = item
 
+    def _getstate_impl(cls: object) -> dict:
+        return {"_parent": cls._parent, "_data": cls._data}
+
+    def _apply_processors(
+        proc: _ProcessorsType, data: Sequence[Any]
+    ) -> Tuple[Any, ...]:
+        res: List[Any] = list(data)
+        proc_size = len(proc)
+        # TODO: would be nice to do this only on the fist row
+        assert len(res) == proc_size
+        for i in range(proc_size):
+            p = proc[i]
+            if p is not None:
+                res[i] = p(res[i])
+        return tuple(res)
+
+    def rowproxy_reconstructor(
+        cls: Type[BaseRow], state: Dict[str, Any]
+    ) -> BaseRow:
+        obj = cls.__new__(cls)
+        obj.__setstate__(state)
+        return obj
+
     PySequence_Fast_GET_SIZE = len
     Py_INCREF = cython._no_op
 else:
@@ -58,11 +82,40 @@ else:
     from cython.cimports.cpython import PyTuple_SET_ITEM
     from cython.cimports.cpython import PySequence_Fast_GET_SIZE
 
+    obj_getattr = object.__getattribute__
+
+    @cython.inline
+    @cython.cfunc
+    @cython.wraparound(False)
+    @cython.boundscheck(False)
+    @cython.returns(tuple)
+    @cython.locals(res=tuple, proc_size=cython.Py_ssize_t, p=object)
+    def _apply_processors(proc: object, data: object) -> Tuple[Any, ...]:
+        proc_size = PySequence_Fast_GET_SIZE(proc)
+        # TODO: would be nice to do this only on the fist row
+        assert PySequence_Fast_GET_SIZE(data) == proc_size
+        res = PyTuple_New(proc_size)
+        for i in range(proc_size):
+            p = proc[i]
+            if p is not None:
+                PyTuple_SET_ITEM(res, i, Py_INCREF(p(data[i])))
+            else:
+                PyTuple_SET_ITEM(res, i, Py_INCREF(data[i]))
+        return res
+
+    @cython.inline
+    @cython.cfunc
+    def rowproxy_reconstructor(
+        cls: Type[BaseRow], state: Dict[str, Any]
+    ) -> BaseRow:
+        obj = cls.__new__(cls)
+        obj.__setstate__(state)
+        return obj
+
 
 @cython.cclass
 class BaseRow:
-    if not cython.compiled:
-        __slots__ = ("_parent", "_data", "_key_to_index")
+    __slots__ = ("_parent", "_data", "_key_to_index")
 
     if cython.compiled:
         _parent: ResultMetaData = cython.declare(object, visibility="readonly")
@@ -71,6 +124,11 @@ class BaseRow:
         )
         _data: Tuple[Any, ...] = cython.declare(tuple, visibility="readonly")
 
+        @cython.inline
+        @cython.cfunc
+        def _getstate_impl(self) -> dict:
+            return {"_parent": self._parent, "_data": self._data}
+
     def __init__(
         self,
         parent: ResultMetaData,
@@ -86,7 +144,7 @@ class BaseRow:
             (
                 _apply_processors(processors, data)
                 if processors is not None
-                else tuple(data)
+                else (data if isinstance(data, tuple) else tuple(data))
             ),
         )
 
@@ -112,16 +170,25 @@ class BaseRow:
     def __reduce__(self) -> Tuple[Any, Any]:
         return (
             rowproxy_reconstructor,
-            (self.__class__, self.__getstate__()),
+            (self.__class__, self._getstate_impl()),
         )
 
-    def __getstate__(self) -> Dict[str, Any]:
-        return {"_parent": self._parent, "_data": self._data}
+    if cython.compiled:
+
+        def __getstate__(self) -> Dict[str, Any]:
+            return self._getstate_impl()
+
+    else:
+
+        def __getstate__(self) -> Dict[str, Any]:
+            return {"_parent": self._parent, "_data": self._data}
 
     def __setstate__(self, state: Dict[str, Any]) -> None:
         parent = state["_parent"]
         self._set_attrs(parent, parent._key_to_index, state["_data"])
 
+    @cython.wraparound(False)
+    @cython.boundscheck(False)
     def _values_impl(self) -> List[Any]:
         return list(self._data)
 
@@ -134,76 +201,47 @@ class BaseRow:
     def __hash__(self) -> int:
         return hash(self._data)
 
-    if not TYPE_CHECKING or cython.compiled:
+    if not TYPE_CHECKING:
 
         def __getitem__(self, key: Any) -> Any:
             return self._data[key]
 
-    def _get_by_key_impl_mapping(self, key: _KeyType) -> Any:
+    def _get_by_key_impl_mapping(self, key: _KeyType) -> object:
         return self._get_by_key_impl(key, False)
 
     @cython.cfunc
     @cython.inline
+    @cython.locals(index=cython.Py_ssize_t)
     def _get_by_key_impl(self, key: _KeyType, attr_err: cython.bint) -> object:
-        index: Optional[int] = self._key_to_index.get(key)
-        if index is not None:
+        index = self._key_to_index.get(key, -1)
+        if index != -1:
             return self._data[index]
         self._parent._key_not_found(key, attr_err)
 
     def __getattr__(self, name: str) -> Any:
         return self._get_by_key_impl(name, True)
 
-    def _to_tuple_instance(self) -> Tuple[Any, ...]:
-        return self._data
-
+    def __setattr__(self, name: str, value: Any) -> NoReturn:
+        raise AttributeError("can't set attribute")
 
-if cython.compiled:
+    def __delattr__(self, name: str) -> NoReturn:
+        raise AttributeError("can't delete attribute")
 
-    @cython.inline
-    @cython.cfunc
-    @cython.wraparound(False)
-    @cython.boundscheck(False)
-    @cython.returns(tuple)
-    @cython.locals(res=tuple, proc_size=cython.Py_ssize_t, p=object)
-    def _apply_processors(proc: object, data: object) -> Tuple[Any, ...]:
-        proc_size = PySequence_Fast_GET_SIZE(proc)
-        # TODO: would be nice to do this only on the fist row
-        assert PySequence_Fast_GET_SIZE(data) == proc_size
-        res = PyTuple_New(proc_size)
-        for i in range(proc_size):
-            p = proc[i]
-            if p is not None:
-                PyTuple_SET_ITEM(res, i, Py_INCREF(p(data[i])))
-            else:
-                PyTuple_SET_ITEM(res, i, Py_INCREF(data[i]))
-        return res
-
-else:
+    if cython.compiled:
 
-    def _apply_processors(
-        proc: _ProcessorsType, data: Sequence[Any]
-    ) -> Tuple[Any, ...]:
-        res: List[Any] = list(data)
-        proc_size = len(proc)
-        # TODO: would be nice to do this only on the fist row
-        assert len(res) == proc_size
-        for i in range(proc_size):
-            p = proc[i]
-            if p is not None:
-                res[i] = p(res[i])
-        return tuple(res)
+        def __getattribute__(self, name: str) -> object:
+            if name == "_data":
+                return self._data
+            if name == "_key_to_index":
+                return self._key_to_index
+            if name == "_parent":
+                return self._parent
+            if name[0] != "_" and name[-1] != "_":
+                return self._get_by_key_impl(name, True)
+            return obj_getattr(self, name)
 
+    def _to_tuple_instance(self) -> Tuple[Any, ...]:
+        return self._data
 
-# This reconstructor is necessary so that pickles with the Cy extension or
-# without use the same Binary format.
-# Turn off annotation typing so the compiled version accepts the python
-# class too.
-# @cython.annotation_typing(False)
-@cython.inline
-@cython.cfunc
-def rowproxy_reconstructor(
-    cls: Type[BaseRow], state: Dict[str, Any]
-) -> BaseRow:
-    obj = cls.__new__(cls)
-    obj.__setstate__(state)
-    return obj
+    def __contains__(self, key: Any) -> cython.bint:
+        return key in self._data
index 6c5db5b49d86fbdd41b78479422a661e0d2e5c7b..6a834a898579618fe55ac098c48238d4a4118799 100644 (file)
@@ -18,9 +18,7 @@ from typing import Callable
 from typing import Dict
 from typing import Generic
 from typing import Iterator
-from typing import List
 from typing import Mapping
-from typing import NoReturn
 from typing import Optional
 from typing import Sequence
 from typing import Tuple
@@ -75,12 +73,6 @@ class Row(BaseRow, _RowBase[Unpack[_Ts]], Generic[Unpack[_Ts]]):
 
     __slots__ = ()
 
-    def __setattr__(self, name: str, value: Any) -> NoReturn:
-        raise AttributeError("can't set attribute")
-
-    def __delattr__(self, name: str) -> NoReturn:
-        raise AttributeError("can't delete attribute")
-
     @deprecated(
         "2.1.0",
         "The :meth:`.Row._tuple` method is deprecated, :class:`.Row` "
@@ -222,9 +214,6 @@ class Row(BaseRow, _RowBase[Unpack[_Ts]], Generic[Unpack[_Ts]]):
         count = _special_name_accessor("count")
         index = _special_name_accessor("index")
 
-    def __contains__(self, key: Any) -> bool:
-        return key in self._data
-
     def _op(self, other: Any, op: Callable[[Any, Any], bool]) -> bool:
         return (
             op(self._to_tuple_instance(), other._to_tuple_instance())
@@ -274,7 +263,7 @@ class Row(BaseRow, _RowBase[Unpack[_Ts]], Generic[Unpack[_Ts]]):
             :attr:`.Row._mapping`
 
         """
-        return tuple([k for k in self._parent.keys if k is not None])
+        return tuple(k for k in self._parent.keys if k is not None)
 
     def _asdict(self) -> Dict[str, Any]:
         """Return a new dict which maps field names to their corresponding
@@ -374,15 +363,9 @@ class RowMapping(BaseRow, typing.Mapping["_KeyType", Any]):
     else:
         __getitem__ = BaseRow._get_by_key_impl_mapping
 
-    def _values_impl(self) -> List[Any]:
-        return list(self._data)
-
     def __iter__(self) -> Iterator[str]:
         return (k for k in self._parent.keys if k is not None)
 
-    def __len__(self) -> int:
-        return len(self._data)
-
     def __contains__(self, key: object) -> bool:
         return self._parent._has_key(key)
 
index 4f73c4e58be8d2eb7820c6321b0ee71beba89f96..9d2e5fb5c1a1208be373cb58772a751dca400de2 100644 (file)
--- a/setup.py
+++ b/setup.py
@@ -46,9 +46,15 @@ CYTHON_MODULES = (
 if HAS_CYTHON and IS_CPYTHON and not DISABLE_EXTENSION:
     assert _cy_Extension is not None
     assert _cy_build_ext is not None
+    from Cython.Compiler import Options
+
+    Options.docstrings = False
+    Options.lookup_module_cpdef = True
+    Options.clear_to_none = False
 
     cython_directives: Dict[str, Any] = {
         "language_level": "3",
+        "initializedcheck": False,
     }
 
     if sys.version_info >= (3, 13):