]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Prebuild the row string to position lookup for Rows
authorJ. Nick Koston <nick@koston.org>
Wed, 19 Apr 2023 22:39:18 +0000 (18:39 -0400)
committerFederico Caselli <cfederico87@gmail.com>
Wed, 26 Apr 2023 18:19:17 +0000 (20:19 +0200)
Improved :class:`_engine.Row` implementation to optimize
``__getattr__`` performance.
The serialization of a :class:`_engine.Row` to pickle has changed with
this change. Pickle saved by older SQLAlchemy versions can still be loaded,
but new pickle saved by this version cannot be loaded by older ones.

Fixes: #9678
Closes: #9668
Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/9668
Pull-request-sha: 86b8ccd1959dbd91b1208f7a648a91f217e1f866

Change-Id: Ia85c26a59e1a57ba2bf0d65578c6168f82a559f2

12 files changed:
doc/build/changelog/unreleased_20/9678.rst [new file with mode: 0644]
lib/sqlalchemy/cyextension/collections.pyx
lib/sqlalchemy/cyextension/resultproxy.pyx
lib/sqlalchemy/engine/_py_row.py
lib/sqlalchemy/engine/cursor.py
lib/sqlalchemy/engine/result.py
lib/sqlalchemy/engine/row.py
test/aaa_profiling/test_resultset.py
test/base/test_result.py
test/perf/compiled_extensions.py
test/profiles.txt
test/sql/test_resultset.py

diff --git a/doc/build/changelog/unreleased_20/9678.rst b/doc/build/changelog/unreleased_20/9678.rst
new file mode 100644 (file)
index 0000000..6ea1525
--- /dev/null
@@ -0,0 +1,10 @@
+.. change::
+    :tags: engine, performance
+    :tickets: 9678
+
+    Improved :class:`_engine.Row` implementation to optimize
+    ``__getattr__`` performance.
+    The serialization of a :class:`_engine.Row` to pickle has changed with
+    this change. Pickle saved by older SQLAlchemy versions can still be loaded,
+    but new pickle saved by this version cannot be loaded by older ones.
+    Pull request curtesy of J. Nick Koston.
index d08fa3aab24911a48210e18d3a06a6e7f124e261..4d134ccf3022713047b42af3908694a1bc91a6d1 100644 (file)
@@ -1,5 +1,4 @@
 cimport cython
-from cpython.dict cimport PyDict_Merge, PyDict_Update
 from cpython.long cimport PyLong_FromLongLong
 from cpython.set cimport PySet_Add
 
index 96a028d93383481f96682581c9ccde534a8deb9c..c358f043f00fe0e480042c313bd7301cacf0bce4 100644 (file)
@@ -1,24 +1,17 @@
-# TODO: this is mostly just copied over from the python implementation
-# more improvements are likely possible
 import operator
 
-cdef int MD_INDEX = 0  # integer index in cursor.description
-cdef int _KEY_OBJECTS_ONLY = 1
-
-KEY_INTEGER_ONLY = 0
-KEY_OBJECTS_ONLY = _KEY_OBJECTS_ONLY
-
 cdef class BaseRow:
     cdef readonly object _parent
+    cdef readonly dict _key_to_index
     cdef readonly tuple _data
-    cdef readonly dict _keymap
-    cdef readonly int _key_style
 
-    def __init__(self, object parent, object processors, dict keymap, int key_style, object data):
+    def __init__(self, object parent, object processors, dict key_to_index, object data):
         """Row objects are constructed by CursorResult objects."""
 
         self._parent = parent
 
+        self._key_to_index = key_to_index
+
         if processors:
             self._data = tuple(
                 [
@@ -29,10 +22,6 @@ cdef class BaseRow:
         else:
             self._data = tuple(data)
 
-        self._keymap = keymap
-
-        self._key_style = key_style
-
     def __reduce__(self):
         return (
             rowproxy_reconstructor,
@@ -40,17 +29,13 @@ cdef class BaseRow:
         )
 
     def __getstate__(self):
-        return {
-            "_parent": self._parent,
-            "_data": self._data,
-            "_key_style": self._key_style,
-        }
+        return {"_parent": self._parent, "_data": self._data}
 
     def __setstate__(self, dict state):
-        self._parent = state["_parent"]
+        parent = state["_parent"]
+        self._parent = parent
         self._data = state["_data"]
-        self._keymap = self._parent._keymap
-        self._key_style = state["_key_style"]
+        self._key_to_index = parent._key_to_index
 
     def _values_impl(self):
         return list(self)
@@ -67,28 +52,17 @@ cdef class BaseRow:
     def __getitem__(self, index):
         return self._data[index]
 
-    cpdef _get_by_key_impl_mapping(self, key):
-        try:
-            rec = self._keymap[key]
-        except KeyError as ke:
-            rec = self._parent._key_fallback(key, ke)
-
-        mdindex = rec[MD_INDEX]
-        if mdindex is None:
-            self._parent._raise_for_ambiguous_column_name(rec)
-        elif (
-            self._key_style == _KEY_OBJECTS_ONLY
-            and isinstance(key, int)
-        ):
-            raise KeyError(key)
+    def _get_by_key_impl_mapping(self, key):
+        return self._get_by_key_impl(key, 0)
 
-        return self._data[mdindex]
+    cdef _get_by_key_impl(self, object key, int attr_err):
+        index = self._key_to_index.get(key)
+        if index is not None:
+            return self._data[<int>index]
+        self._parent._key_not_found(key, attr_err != 0)
 
     def __getattr__(self, name):
-        try:
-            return self._get_by_key_impl_mapping(name)
-        except KeyError as e:
-           raise AttributeError(e.args[0]) from e
+        return self._get_by_key_impl(name, 1)
 
 
 def rowproxy_reconstructor(cls, state):
index 1b952fe4c167feb68e9a4b3b6603b9dc2946d726..4a9acec9bb09a517e1abde308d4b7171b84754b9 100644 (file)
@@ -1,6 +1,5 @@
 from __future__ import annotations
 
-import enum
 import operator
 import typing
 from typing import Any
@@ -8,13 +7,12 @@ from typing import Callable
 from typing import Dict
 from typing import Iterator
 from typing import List
+from typing import Mapping
 from typing import Optional
 from typing import Tuple
 from typing import Type
-from typing import Union
 
 if typing.TYPE_CHECKING:
-    from .result import _KeyMapType
     from .result import _KeyType
     from .result import _ProcessorsType
     from .result import _RawRowType
@@ -24,38 +22,25 @@ if typing.TYPE_CHECKING:
 MD_INDEX = 0  # integer index in cursor.description
 
 
-class _KeyStyle(enum.IntEnum):
-    KEY_INTEGER_ONLY = 0
-    """__getitem__ only allows integer values and slices, raises TypeError
-    otherwise"""
-
-    KEY_OBJECTS_ONLY = 1
-    """__getitem__ only allows string/object values, raises TypeError
-    otherwise"""
-
-
-KEY_INTEGER_ONLY, KEY_OBJECTS_ONLY = list(_KeyStyle)
-
-
 class BaseRow:
-    __slots__ = ("_parent", "_data", "_keymap", "_key_style")
+    __slots__ = ("_parent", "_data", "_key_to_index")
 
     _parent: ResultMetaData
+    _key_to_index: Mapping[_KeyType, int]
     _data: _RawRowType
-    _keymap: _KeyMapType
-    _key_style: _KeyStyle
 
     def __init__(
         self,
         parent: ResultMetaData,
         processors: Optional[_ProcessorsType],
-        keymap: _KeyMapType,
-        key_style: _KeyStyle,
+        key_to_index: Mapping[_KeyType, int],
         data: _RawRowType,
     ):
         """Row objects are constructed by CursorResult objects."""
         object.__setattr__(self, "_parent", parent)
 
+        object.__setattr__(self, "_key_to_index", key_to_index)
+
         if processors:
             object.__setattr__(
                 self,
@@ -70,10 +55,6 @@ class BaseRow:
         else:
             object.__setattr__(self, "_data", tuple(data))
 
-        object.__setattr__(self, "_keymap", keymap)
-
-        object.__setattr__(self, "_key_style", key_style)
-
     def __reduce__(self) -> Tuple[Callable[..., BaseRow], Tuple[Any, ...]]:
         return (
             rowproxy_reconstructor,
@@ -81,18 +62,13 @@ class BaseRow:
         )
 
     def __getstate__(self) -> Dict[str, Any]:
-        return {
-            "_parent": self._parent,
-            "_data": self._data,
-            "_key_style": self._key_style,
-        }
+        return {"_parent": self._parent, "_data": self._data}
 
     def __setstate__(self, state: Dict[str, Any]) -> None:
         parent = state["_parent"]
         object.__setattr__(self, "_parent", parent)
         object.__setattr__(self, "_data", state["_data"])
-        object.__setattr__(self, "_keymap", parent._keymap)
-        object.__setattr__(self, "_key_style", state["_key_style"])
+        object.__setattr__(self, "_key_to_index", parent._key_to_index)
 
     def _values_impl(self) -> List[Any]:
         return list(self)
@@ -106,34 +82,22 @@ class BaseRow:
     def __hash__(self) -> int:
         return hash(self._data)
 
-    def _get_by_int_impl(self, key: Union[int, slice]) -> Any:
+    def __getitem__(self, key: Any) -> Any:
         return self._data[key]
 
-    if not typing.TYPE_CHECKING:
-        __getitem__ = _get_by_int_impl
-
-    def _get_by_key_impl_mapping(self, key: _KeyType) -> Any:
+    def _get_by_key_impl_mapping(self, key: str) -> Any:
         try:
-            rec = self._keymap[key]
-        except KeyError as ke:
-            rec = self._parent._key_fallback(key, ke)
-
-        mdindex = rec[MD_INDEX]
-        if mdindex is None:
-            self._parent._raise_for_ambiguous_column_name(rec)
-        # NOTE: keep "== KEY_OBJECTS_ONLY" instead of "is KEY_OBJECTS_ONLY"
-        # since deserializing the class from cython will load an int in
-        # _key_style, not an instance of _KeyStyle
-        elif self._key_style == KEY_OBJECTS_ONLY and isinstance(key, int):
-            raise KeyError(key)
-
-        return self._data[mdindex]
+            return self._data[self._key_to_index[key]]
+        except KeyError:
+            pass
+        self._parent._key_not_found(key, False)
 
     def __getattr__(self, name: str) -> Any:
         try:
-            return self._get_by_key_impl_mapping(name)
-        except KeyError as e:
-            raise AttributeError(e.args[0]) from e
+            return self._data[self._key_to_index[name]]
+        except KeyError:
+            pass
+        self._parent._key_not_found(name, True)
 
 
 # This reconstructor is necessary so that pickles with the Cy extension or
index aaf2c1918e5f060dd90a490d7f706d937ebb35a9..bd46f30ac34f7da801eab4bd147be20cbbdceb64 100644 (file)
@@ -21,9 +21,9 @@ from typing import ClassVar
 from typing import Dict
 from typing import Iterator
 from typing import List
+from typing import Mapping
 from typing import NoReturn
 from typing import Optional
-from typing import overload
 from typing import Sequence
 from typing import Tuple
 from typing import TYPE_CHECKING
@@ -123,7 +123,7 @@ _CursorKeyMapRecType = Tuple[
     Optional[str],  # MD_UNTRANSLATED
 ]
 
-_CursorKeyMapType = Dict["_KeyType", _CursorKeyMapRecType]
+_CursorKeyMapType = Mapping["_KeyType", _CursorKeyMapRecType]
 
 # same as _CursorKeyMapRecType except the MD_INDEX value is definitely
 # not None
@@ -149,7 +149,8 @@ class CursorResultMetaData(ResultMetaData):
         "_tuplefilter",
         "_translated_indexes",
         "_safe_for_cache",
-        "_unpickled"
+        "_unpickled",
+        "_key_to_index"
         # don't need _unique_filters support here for now.  Can be added
         # if a need arises.
     )
@@ -193,6 +194,7 @@ class CursorResultMetaData(ResultMetaData):
         new_obj._translated_indexes = translated_indexes
         new_obj._safe_for_cache = safe_for_cache
         new_obj._keymap_by_result_column_idx = keymap_by_result_column_idx
+        new_obj._key_to_index = self._make_key_to_index(keymap, MD_INDEX)
         return new_obj
 
     def _remove_processors(self) -> CursorResultMetaData:
@@ -217,7 +219,7 @@ class CursorResultMetaData(ResultMetaData):
 
         assert not self._tuplefilter
 
-        keymap = self._keymap.copy()
+        keymap = dict(self._keymap)
         offset = len(self._keys)
         keymap.update(
             {
@@ -232,7 +234,6 @@ class CursorResultMetaData(ResultMetaData):
                 for key, value in other._keymap.items()
             }
         )
-
         return self._make_new_metadata(
             unpickled=self._unpickled,
             processors=self._processors + other._processors,  # type: ignore
@@ -258,7 +259,7 @@ class CursorResultMetaData(ResultMetaData):
         tup = tuplegetter(*indexes)
         new_recs = [(index,) + rec[1:] for index, rec in enumerate(recs)]
 
-        keymap: _KeyMapType = {rec[MD_LOOKUP_KEY]: rec for rec in new_recs}
+        keymap = {rec[MD_LOOKUP_KEY]: rec for rec in new_recs}
         # TODO: need unit test for:
         # result = connection.execute("raw sql, no columns").scalars()
         # without the "or ()" it's failing because MD_OBJECTS is None
@@ -274,7 +275,7 @@ class CursorResultMetaData(ResultMetaData):
             keys=new_keys,
             tuplefilter=tup,
             translated_indexes=indexes,
-            keymap=keymap,
+            keymap=keymap,  # type: ignore[arg-type]
             safe_for_cache=self._safe_for_cache,
             keymap_by_result_column_idx=self._keymap_by_result_column_idx,
         )
@@ -491,6 +492,8 @@ class CursorResultMetaData(ResultMetaData):
                 }
             )
 
+        self._key_to_index = self._make_key_to_index(self._keymap, MD_INDEX)
+
     def _merge_cursor_description(
         self,
         context,
@@ -807,41 +810,25 @@ class CursorResultMetaData(ResultMetaData):
                 untranslated,
             )
 
-    @overload
-    def _key_fallback(
-        self, key: Any, err: Exception, raiseerr: Literal[True] = ...
-    ) -> NoReturn:
-        ...
+    if not TYPE_CHECKING:
 
-    @overload
-    def _key_fallback(
-        self, key: Any, err: Exception, raiseerr: Literal[False] = ...
-    ) -> None:
-        ...
-
-    @overload
-    def _key_fallback(
-        self, key: Any, err: Exception, raiseerr: bool = ...
-    ) -> Optional[NoReturn]:
-        ...
-
-    def _key_fallback(
-        self, key: Any, err: Exception, raiseerr: bool = True
-    ) -> Optional[NoReturn]:
-
-        if raiseerr:
-            if self._unpickled and isinstance(key, elements.ColumnElement):
-                raise exc.NoSuchColumnError(
-                    "Row was unpickled; lookup by ColumnElement "
-                    "is unsupported"
-                ) from err
+        def _key_fallback(
+            self, key: Any, err: Optional[Exception], raiseerr: bool = True
+        ) -> Optional[NoReturn]:
+
+            if raiseerr:
+                if self._unpickled and isinstance(key, elements.ColumnElement):
+                    raise exc.NoSuchColumnError(
+                        "Row was unpickled; lookup by ColumnElement "
+                        "is unsupported"
+                    ) from err
+                else:
+                    raise exc.NoSuchColumnError(
+                        "Could not locate column in row for column '%s'"
+                        % util.string_or_unprintable(key)
+                    ) from err
             else:
-                raise exc.NoSuchColumnError(
-                    "Could not locate column in row for column '%s'"
-                    % util.string_or_unprintable(key)
-                ) from err
-        else:
-            return None
+                return None
 
     def _raise_for_ambiguous_column_name(self, rec):
         raise exc.InvalidRequestError(
@@ -919,8 +906,8 @@ class CursorResultMetaData(ResultMetaData):
     def __setstate__(self, state):
         self._processors = [None for _ in range(len(state["_keys"]))]
         self._keymap = state["_keymap"]
-
         self._keymap_by_result_column_idx = None
+        self._key_to_index = self._make_key_to_index(self._keymap, MD_INDEX)
         self._keys = state["_keys"]
         self._unpickled = True
         if state["_translated_indexes"]:
@@ -1370,6 +1357,14 @@ class _NoResultMetaData(ResultMetaData):
     def _keymap(self):
         self._we_dont_return_rows()
 
+    @property
+    def _key_to_index(self):
+        self._we_dont_return_rows()
+
+    @property
+    def _processors(self):
+        self._we_dont_return_rows()
+
     @property
     def keys(self):
         self._we_dont_return_rows()
@@ -1458,12 +1453,11 @@ class CursorResult(Result[_T]):
 
             metadata = self._init_metadata(context, cursor_description)
 
-            keymap = metadata._keymap
-            processors = metadata._processors
-            process_row = Row
-            key_style = process_row._default_key_style
             _make_row = functools.partial(
-                process_row, metadata, processors, keymap, key_style
+                Row,
+                metadata,
+                metadata._processors,
+                metadata._key_to_index,
             )
             if log_row:
 
index d5b8057efe0fef9dc8815e0400ed3f21fee274d2..cc6d26c881b5fdb511fbe4e39270be117191befa 100644 (file)
@@ -22,6 +22,7 @@ from typing import Generic
 from typing import Iterable
 from typing import Iterator
 from typing import List
+from typing import Mapping
 from typing import NoReturn
 from typing import Optional
 from typing import overload
@@ -59,7 +60,7 @@ _KeyIndexType = Union[str, "Column[Any]", int]
 # is overridden in cursor using _CursorKeyMapRecType
 _KeyMapRecType = Any
 
-_KeyMapType = Dict[_KeyType, _KeyMapRecType]
+_KeyMapType = Mapping[_KeyType, _KeyMapRecType]
 
 
 _RowData = Union[Row, RowMapping, Any]
@@ -99,6 +100,7 @@ class ResultMetaData:
     _keymap: _KeyMapType
     _keys: Sequence[str]
     _processors: Optional[_ProcessorsType]
+    _key_to_index: Mapping[_KeyType, int]
 
     @property
     def keys(self) -> RMKeyView:
@@ -112,24 +114,27 @@ class ResultMetaData:
 
     @overload
     def _key_fallback(
-        self, key: Any, err: Exception, raiseerr: Literal[True] = ...
+        self, key: Any, err: Optional[Exception], raiseerr: Literal[True] = ...
     ) -> NoReturn:
         ...
 
     @overload
     def _key_fallback(
-        self, key: Any, err: Exception, raiseerr: Literal[False] = ...
+        self,
+        key: Any,
+        err: Optional[Exception],
+        raiseerr: Literal[False] = ...,
     ) -> None:
         ...
 
     @overload
     def _key_fallback(
-        self, key: Any, err: Exception, raiseerr: bool = ...
+        self, key: Any, err: Optional[Exception], raiseerr: bool = ...
     ) -> Optional[NoReturn]:
         ...
 
     def _key_fallback(
-        self, key: Any, err: Exception, raiseerr: bool = True
+        self, key: Any, err: Optional[Exception], raiseerr: bool = True
     ) -> Optional[NoReturn]:
         assert raiseerr
         raise KeyError(key) from err
@@ -177,6 +182,29 @@ class ResultMetaData:
         indexes = self._indexes_for_keys(keys)
         return tuplegetter(*indexes)
 
+    def _make_key_to_index(
+        self, keymap: Mapping[_KeyType, Sequence[Any]], index: int
+    ) -> Mapping[_KeyType, int]:
+        return {
+            key: rec[index]
+            for key, rec in keymap.items()
+            if rec[index] is not None
+        }
+
+    def _key_not_found(self, key: Any, attr_error: bool) -> NoReturn:
+        if key in self._keymap:
+            # the index must be none in this case
+            self._raise_for_ambiguous_column_name(self._keymap[key])
+        else:
+            # unknown key
+            if attr_error:
+                try:
+                    self._key_fallback(key, None)
+                except KeyError as ke:
+                    raise AttributeError(ke.args[0]) from ke
+            else:
+                self._key_fallback(key, None)
+
 
 class RMKeyView(typing.KeysView[Any]):
     __slots__ = ("_parent", "_keys")
@@ -222,6 +250,7 @@ class SimpleResultMetaData(ResultMetaData):
         "_tuplefilter",
         "_translated_indexes",
         "_unique_filters",
+        "_key_to_index",
     )
 
     _keys: Sequence[str]
@@ -257,6 +286,8 @@ class SimpleResultMetaData(ResultMetaData):
 
         self._processors = _processors
 
+        self._key_to_index = self._make_key_to_index(self._keymap, 0)
+
     def _has_key(self, key: object) -> bool:
         return key in self._keymap
 
@@ -359,7 +390,7 @@ def result_tuple(
 ) -> Callable[[Iterable[Any]], Row[Any]]:
     parent = SimpleResultMetaData(fields, extra)
     return functools.partial(
-        Row, parent, parent._processors, parent._keymap, Row._default_key_style
+        Row, parent, parent._processors, parent._key_to_index
     )
 
 
@@ -424,21 +455,19 @@ class ResultInternal(InPlaceGenerative, Generic[_R]):
                 def process_row(  # type: ignore
                     metadata: ResultMetaData,
                     processors: _ProcessorsType,
-                    keymap: _KeyMapType,
-                    key_style: Any,
+                    key_to_index: Mapping[_KeyType, int],
                     scalar_obj: Any,
                 ) -> Row[Any]:
                     return _proc(
-                        metadata, processors, keymap, key_style, (scalar_obj,)
+                        metadata, processors, key_to_index, (scalar_obj,)
                     )
 
         else:
             process_row = Row  # type: ignore
 
-        key_style = Row._default_key_style
         metadata = self._metadata
 
-        keymap = metadata._keymap
+        key_to_index = metadata._key_to_index
         processors = metadata._processors
         tf = metadata._tuplefilter
 
@@ -447,7 +476,7 @@ class ResultInternal(InPlaceGenerative, Generic[_R]):
                 processors = tf(processors)
 
             _make_row_orig: Callable[..., _R] = functools.partial(  # type: ignore  # noqa E501
-                process_row, metadata, processors, keymap, key_style
+                process_row, metadata, processors, key_to_index
             )
 
             fixed_tf = tf
@@ -457,7 +486,7 @@ class ResultInternal(InPlaceGenerative, Generic[_R]):
 
         else:
             make_row = functools.partial(  # type: ignore
-                process_row, metadata, processors, keymap, key_style
+                process_row, metadata, processors, key_to_index
             )
 
         fns: Tuple[Any, ...] = ()
index e15ea7b176763e9827c012e85c28720a3575c096..4b767da098c3462007e40b3af4ad5830cdc8b8fa 100644 (file)
@@ -34,12 +34,8 @@ from ..util._has_cy import HAS_CYEXTENSION
 
 if TYPE_CHECKING or not HAS_CYEXTENSION:
     from ._py_row import BaseRow as BaseRow
-    from ._py_row import KEY_INTEGER_ONLY
-    from ._py_row import KEY_OBJECTS_ONLY
 else:
     from sqlalchemy.cyextension.resultproxy import BaseRow as BaseRow
-    from sqlalchemy.cyextension.resultproxy import KEY_INTEGER_ONLY
-    from sqlalchemy.cyextension.resultproxy import KEY_OBJECTS_ONLY
 
 if TYPE_CHECKING:
     from .result import _KeyType
@@ -80,8 +76,6 @@ class Row(BaseRow, Sequence[Any], Generic[_TP]):
 
     __slots__ = ()
 
-    _default_key_style = KEY_INTEGER_ONLY
-
     def __setattr__(self, name: str, value: Any) -> NoReturn:
         raise AttributeError("can't set attribute")
 
@@ -134,24 +128,12 @@ class Row(BaseRow, Sequence[Any], Generic[_TP]):
         .. versionadded:: 1.4
 
         """
-        return RowMapping(
-            self._parent,
-            None,
-            self._keymap,
-            RowMapping._default_key_style,
-            self._data,
-        )
+        return RowMapping(self._parent, None, self._key_to_index, self._data)
 
     def _filter_on_values(
         self, filters: Optional[Sequence[Optional[_ResultProcessorType[Any]]]]
     ) -> Row[Any]:
-        return Row(
-            self._parent,
-            filters,
-            self._keymap,
-            self._key_style,
-            self._data,
-        )
+        return Row(self._parent, filters, self._key_to_index, self._data)
 
     if not TYPE_CHECKING:
 
@@ -198,9 +180,7 @@ class Row(BaseRow, Sequence[Any], Generic[_TP]):
         def __getitem__(self, index: slice) -> Sequence[Any]:
             ...
 
-        def __getitem__(
-            self, index: Union[int, slice]
-        ) -> Union[Any, Sequence[Any]]:
+        def __getitem__(self, index: Union[int, slice]) -> Any:
             ...
 
     def __lt__(self, other: Any) -> bool:
@@ -337,8 +317,6 @@ class RowMapping(BaseRow, typing.Mapping["_KeyType", Any]):
 
     __slots__ = ()
 
-    _default_key_style = KEY_OBJECTS_ONLY
-
     if TYPE_CHECKING:
 
         def __getitem__(self, key: _KeyType) -> Any:
index f712b729cf9981585434698c21a556447225e50d..049e4a45e50064f352a21f57eab7322132e11103 100644 (file)
@@ -211,9 +211,6 @@ class RowTest(fixtures.TestBase):
 
     def _rowproxy_fixture(self, keys, processors, row, row_cls):
         class MockMeta:
-            def __init__(self):
-                pass
-
             def _warn_for_nonint(self, arg):
                 pass
 
@@ -224,9 +221,9 @@ class RowTest(fixtures.TestBase):
             for key in keyobjs:
                 keymap[key] = (index, key)
             keymap[index] = (index, key)
-        return row_cls(
-            metadata, processors, keymap, row_cls._default_key_style, row
-        )
+
+        key_to_index = {key: rec[0] for key, rec in keymap.items()}
+        return row_cls(metadata, processors, key_to_index, row)
 
     def _test_getitem_value_refcounts_new(self, seq_factory):
         col1, col2 = object(), object()
index 6e7e53c21e7cb8197bf36d3dd5d31a7f6c12c3fd..05f922d671560abe92fda852bedc1d6fb88c7e9c 100644 (file)
@@ -206,11 +206,9 @@ class ResultTupleTest(fixtures.TestBase):
 
         if direction.py_to_cy:
             dump_cls = _py_row.BaseRow
-            num = _py_row.KEY_INTEGER_ONLY
             load_cls = _cy_row.BaseRow
         elif direction.cy_to_py:
             dump_cls = _cy_row.BaseRow
-            num = _cy_row.KEY_INTEGER_ONLY
             load_cls = _py_row.BaseRow
         else:
             direction.fail()
@@ -220,7 +218,7 @@ class ResultTupleTest(fixtures.TestBase):
             class Row(dump_cls):
                 pass
 
-            row = Row(p, p._processors, p._keymap, num, (1, 2, 3))
+            row = Row(p, p._processors, p._key_to_index, (1, 2, 3))
 
             state = dumps(row)
 
index 1f79d460be1f645705240000d852624d2a2af185..14bb4e4ab78f28e360a586fb1ef42b844acc5030 100644 (file)
@@ -10,9 +10,17 @@ from sqlalchemy import bindparam
 from sqlalchemy import column
 
 
-def test_case(fn):
-    fn.__test_case__ = True
-    return fn
+def test_case(fn=None, *, number=None):
+    def wrap(fn):
+        fn.__test_case__ = True
+        if number is not None:
+            fn.__number__ = number
+        return fn
+
+    if fn is None:
+        return wrap
+    else:
+        return wrap(fn)
 
 
 class Case:
@@ -90,7 +98,11 @@ class Case:
             for m in methods:
                 call = getattr(impl_case, m)
                 try:
-                    value = timeit(call, number=number)
+                    t_num = number
+                    fn_num = getattr(call, "__number__", None)
+                    if fn_num is not None:
+                        t_num = max(1, int(fn_num * factor))
+                    value = timeit(call, number=t_num)
                     print(".", end="", flush=True)
                 except Exception as e:
                     fails.append(f"{name}::{m} error: {e}")
@@ -810,7 +822,7 @@ class TupleGetter(Case):
             def __init__(self, data):
                 self.data = data
 
-            def _get_by_int_impl(self, index):
+            def __getitem__(self, index):
                 # called by python
                 return self.data[index]
 
@@ -876,16 +888,14 @@ class BaseRow(Case):
         self.row_args = (
             self.parent,
             self.parent._processors,
-            self.parent._keymap,
-            0,
+            self.parent._key_to_index,
             (1, 2, 3),
         )
         self.parent_long = SimpleResultMetaData(tuple(ascii_letters))
         self.row_long_args = (
             self.parent_long,
             self.parent_long._processors,
-            self.parent_long._keymap,
-            0,
+            self.parent_long._key_to_index,
             tuple(range(len(ascii_letters))),
         )
         self.row = self.impl(*self.row_args)
@@ -927,11 +937,6 @@ class BaseRow(Case):
         self.impl.__new__(self.impl).__setstate__(self.row_state)
         self.impl.__new__(self.impl).__setstate__(self.row_long_state)
 
-    @test_case
-    def row_filter(self):
-        self.row._filter_on_values(None)
-        self.row_long._filter_on_values(None)
-
     @test_case
     def row_values_impl(self):
         self.row._values_impl()
@@ -968,26 +973,12 @@ class BaseRow(Case):
         self.row_long[0:1]
         self.row_long[1:-1]
 
-    @test_case
-    def get_by_int(self):
-        self.row._get_by_int_impl(0)
-        self.row._get_by_int_impl(1)
-        self.row_long._get_by_int_impl(0)
-        self.row_long._get_by_int_impl(1)
-
     @test_case
     def get_by_key(self):
-        self.row._get_by_key_impl(0)
-        self.row._get_by_key_impl(1)
-        self.row_long._get_by_key_impl(0)
-        self.row_long._get_by_key_impl(1)
-
-    @test_case
-    def get_by_key_slice(self):
-        self.row._get_by_key_impl(slice(0, 1))
-        self.row._get_by_key_impl(slice(1, -1))
-        self.row_long._get_by_key_impl(slice(0, 1))
-        self.row_long._get_by_key_impl(slice(1, -1))
+        self.row._get_by_key_impl_mapping("a")
+        self.row._get_by_key_impl_mapping("b")
+        self.row_long._get_by_key_impl_mapping("s")
+        self.row_long._get_by_key_impl_mapping("a")
 
     @test_case
     def getattr(self):
@@ -996,6 +987,40 @@ class BaseRow(Case):
         self.row_long.x
         self.row_long.y
 
+    @test_case(number=50_000)
+    def get_by_key_recreate(self):
+        self.init_objects()
+        row = self.row
+        for _ in range(25):
+            row._get_by_key_impl_mapping("a")
+        l_row = self.row_long
+        for _ in range(25):
+            l_row._get_by_key_impl_mapping("f")
+            l_row._get_by_key_impl_mapping("o")
+            l_row._get_by_key_impl_mapping("r")
+            l_row._get_by_key_impl_mapping("t")
+            l_row._get_by_key_impl_mapping("y")
+            l_row._get_by_key_impl_mapping("t")
+            l_row._get_by_key_impl_mapping("w")
+            l_row._get_by_key_impl_mapping("o")
+
+    @test_case(number=50_000)
+    def getattr_recreate(self):
+        self.init_objects()
+        row = self.row
+        for _ in range(25):
+            row.a
+        l_row = self.row_long
+        for _ in range(25):
+            l_row.f
+            l_row.o
+            l_row.r
+            l_row.t
+            l_row.y
+            l_row.t
+            l_row.w
+            l_row.o
+
 
 class CacheAnonMap(Case):
     @staticmethod
index 6fb9c162e27768ba6da9d945d4ef01d3c1423442..7379f5f3c0cb18f66c40c1826b34ff92b8c621d7 100644 (file)
@@ -320,18 +320,18 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect x86_64_linux_cpyt
 
 # TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
 
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_nocextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 50
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_nocextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 53
 
 # TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute
 
@@ -376,7 +376,7 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_6
 test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 2592
 test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 25595
 test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 2539
-test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 25543
+test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 15600
 
 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0]
 
index e382a7fb66104bb801de13149af7cb4db8571fa8..bf5e30e3cad9552baf0219d8c52ad77ba967873d 100644 (file)
@@ -38,7 +38,6 @@ from sqlalchemy.engine import cursor as _cursor
 from sqlalchemy.engine import default
 from sqlalchemy.engine import Row
 from sqlalchemy.engine.result import SimpleResultMetaData
-from sqlalchemy.engine.row import KEY_INTEGER_ONLY
 from sqlalchemy.ext.compiler import compiles
 from sqlalchemy.sql import ColumnElement
 from sqlalchemy.sql import expression
@@ -201,7 +200,6 @@ class CursorResultTest(fixtures.TablesTest):
             rows[0].user_id
 
     def test_keys_no_rows(self, connection):
-
         for i in range(2):
             r = connection.execute(
                 text("update users set user_name='new' where user_id=10")
@@ -378,7 +376,6 @@ class CursorResultTest(fixtures.TablesTest):
                 operator.ge,
                 operator.le,
             ]:
-
                 try:
                     control = op(equal, compare)
                 except TypeError:
@@ -994,18 +991,14 @@ class CursorResultTest(fixtures.TablesTest):
 
     def test_column_accessor_err(self, connection):
         r = connection.execute(select(1)).first()
-        assert_raises_message(
-            AttributeError,
-            "Could not locate column in row for column 'foo'",
-            getattr,
-            r,
-            "foo",
-        )
-        assert_raises_message(
-            KeyError,
-            "Could not locate column in row for column 'foo'",
-            lambda: r._mapping["foo"],
-        )
+        with expect_raises_message(
+            AttributeError, "Could not locate column in row for column 'foo'"
+        ):
+            r.foo
+        with expect_raises_message(
+            KeyError, "Could not locate column in row for column 'foo'"
+        ):
+            r._mapping["foo"],
 
     def test_graceful_fetch_on_non_rows(self):
         """test that calling fetchone() etc. on a result that doesn't
@@ -1069,9 +1062,9 @@ class CursorResultTest(fixtures.TablesTest):
 
         eq_(list(row._fields), ["case_insensitive", "CaseSensitive"])
 
-        in_("case_insensitive", row._keymap)
-        in_("CaseSensitive", row._keymap)
-        not_in("casesensitive", row._keymap)
+        in_("case_insensitive", row._parent._keymap)
+        in_("CaseSensitive", row._parent._keymap)
+        not_in("casesensitive", row._parent._keymap)
 
         eq_(row._mapping["case_insensitive"], 1)
         eq_(row._mapping["CaseSensitive"], 2)
@@ -1094,9 +1087,9 @@ class CursorResultTest(fixtures.TablesTest):
                 ["case_insensitive", "CaseSensitive", "screw_up_the_cols"],
             )
 
-            in_("case_insensitive", row._keymap)
-            in_("CaseSensitive", row._keymap)
-            not_in("casesensitive", row._keymap)
+            in_("case_insensitive", row._parent._keymap)
+            in_("CaseSensitive", row._parent._keymap)
+            not_in("casesensitive", row._parent._keymap)
 
             eq_(row._mapping["case_insensitive"], 1)
             eq_(row._mapping["CaseSensitive"], 2)
@@ -1718,15 +1711,11 @@ class CursorResultTest(fixtures.TablesTest):
             def __getitem__(self, i):
                 return list.__getitem__(self.internal_list, i)
 
-        proxy = Row(
-            object(),
-            [None],
-            {"key": (0, None, "key"), 0: (0, None, "key")},
-            Row._default_key_style,
-            MyList(["value"]),
-        )
+        parent = SimpleResultMetaData(["key"])
+        proxy = Row(parent, [None], parent._key_to_index, MyList(["value"]))
         eq_(list(proxy), ["value"])
         eq_(proxy[0], "value")
+        eq_(proxy.key, "value")
         eq_(proxy._mapping["key"], "value")
 
     def test_no_rowcount_on_selects_inserts(self, metadata, testing_engine):
@@ -1769,14 +1758,7 @@ class CursorResultTest(fixtures.TablesTest):
                 eq_(len(mock_rowcount.__get__.mock_calls), 2)
 
     def test_row_is_sequence(self):
-
-        row = Row(
-            object(),
-            [None],
-            {"key": (None, 0), 0: (None, 0)},
-            Row._default_key_style,
-            ["value"],
-        )
+        row = Row(object(), [None], {}, ["value"])
         is_true(isinstance(row, collections_abc.Sequence))
 
     def test_row_special_names(self):
@@ -1784,8 +1766,7 @@ class CursorResultTest(fixtures.TablesTest):
         row = Row(
             metadata,
             [None, None, None, None],
-            metadata._keymap,
-            Row._default_key_style,
+            metadata._key_to_index,
             ["kv", "cv", "iv", "f"],
         )
         is_true(isinstance(row, collections_abc.Sequence))
@@ -1803,8 +1784,7 @@ class CursorResultTest(fixtures.TablesTest):
         row = Row(
             metadata,
             [None, None, None],
-            metadata._keymap,
-            Row._default_key_style,
+            metadata._key_to_index,
             ["kv", "cv", "iv"],
         )
         is_true(isinstance(row, collections_abc.Sequence))
@@ -1818,18 +1798,11 @@ class CursorResultTest(fixtures.TablesTest):
 
     def test_new_row_no_dict_behaviors(self):
         """This mode is not used currently but will be once we are in 2.0."""
-        metadata = SimpleResultMetaData(
-            [
-                "a",
-                "b",
-                "count",
-            ]
-        )
+        metadata = SimpleResultMetaData(["a", "b", "count"])
         row = Row(
             metadata,
             [None, None, None],
-            metadata._keymap,
-            KEY_INTEGER_ONLY,
+            metadata._key_to_index,
             ["av", "bv", "cv"],
         )
 
@@ -1850,14 +1823,7 @@ class CursorResultTest(fixtures.TablesTest):
         eq_(list(row._mapping), ["a", "b", "count"])
 
     def test_row_is_hashable(self):
-
-        row = Row(
-            object(),
-            [None, None, None],
-            {"key": (None, 0), 0: (None, 0)},
-            Row._default_key_style,
-            (1, "value", "foo"),
-        )
+        row = Row(object(), [None, None, None], {}, (1, "value", "foo"))
         eq_(hash(row), hash((1, "value", "foo")))
 
     @testing.provide_metadata
@@ -3459,7 +3425,6 @@ class AlternateCursorResultTest(fixtures.TablesTest):
                 r = conn.execute(select(self.table))
                 assert isinstance(r.cursor_strategy, strategy_cls)
                 with mock.patch.object(r, "cursor", cursor()):
-
                     with testing.expect_raises_message(
                         IOError, "random non-DBAPI"
                     ):
@@ -3523,7 +3488,6 @@ class MergeCursorResultTest(fixtures.TablesTest):
         users = self.tables.users
 
         def results(connection):
-
             r1 = connection.execute(
                 users.select()
                 .where(users.c.user_id.in_([7, 8]))