--- /dev/null
+.. change::
+ :tags: engine, performance
+ :tickets: 9678
+
+ Improved :class:`_engine.Row` implementation to optimize
+ ``__getattr__`` performance.
+ The serialization of a :class:`_engine.Row` to pickle has changed with
+ this change. Pickle saved by older SQLAlchemy versions can still be loaded,
+ but new pickle saved by this version cannot be loaded by older ones.
+ Pull request curtesy of J. Nick Koston.
cimport cython
-from cpython.dict cimport PyDict_Merge, PyDict_Update
from cpython.long cimport PyLong_FromLongLong
from cpython.set cimport PySet_Add
-# TODO: this is mostly just copied over from the python implementation
-# more improvements are likely possible
import operator
-cdef int MD_INDEX = 0 # integer index in cursor.description
-cdef int _KEY_OBJECTS_ONLY = 1
-
-KEY_INTEGER_ONLY = 0
-KEY_OBJECTS_ONLY = _KEY_OBJECTS_ONLY
-
cdef class BaseRow:
cdef readonly object _parent
+ cdef readonly dict _key_to_index
cdef readonly tuple _data
- cdef readonly dict _keymap
- cdef readonly int _key_style
- def __init__(self, object parent, object processors, dict keymap, int key_style, object data):
+ def __init__(self, object parent, object processors, dict key_to_index, object data):
"""Row objects are constructed by CursorResult objects."""
self._parent = parent
+ self._key_to_index = key_to_index
+
if processors:
self._data = tuple(
[
else:
self._data = tuple(data)
- self._keymap = keymap
-
- self._key_style = key_style
-
def __reduce__(self):
return (
rowproxy_reconstructor,
)
def __getstate__(self):
- return {
- "_parent": self._parent,
- "_data": self._data,
- "_key_style": self._key_style,
- }
+ return {"_parent": self._parent, "_data": self._data}
def __setstate__(self, dict state):
- self._parent = state["_parent"]
+ parent = state["_parent"]
+ self._parent = parent
self._data = state["_data"]
- self._keymap = self._parent._keymap
- self._key_style = state["_key_style"]
+ self._key_to_index = parent._key_to_index
def _values_impl(self):
return list(self)
def __getitem__(self, index):
return self._data[index]
- cpdef _get_by_key_impl_mapping(self, key):
- try:
- rec = self._keymap[key]
- except KeyError as ke:
- rec = self._parent._key_fallback(key, ke)
-
- mdindex = rec[MD_INDEX]
- if mdindex is None:
- self._parent._raise_for_ambiguous_column_name(rec)
- elif (
- self._key_style == _KEY_OBJECTS_ONLY
- and isinstance(key, int)
- ):
- raise KeyError(key)
+ def _get_by_key_impl_mapping(self, key):
+ return self._get_by_key_impl(key, 0)
- return self._data[mdindex]
+ cdef _get_by_key_impl(self, object key, int attr_err):
+ index = self._key_to_index.get(key)
+ if index is not None:
+ return self._data[<int>index]
+ self._parent._key_not_found(key, attr_err != 0)
def __getattr__(self, name):
- try:
- return self._get_by_key_impl_mapping(name)
- except KeyError as e:
- raise AttributeError(e.args[0]) from e
+ return self._get_by_key_impl(name, 1)
def rowproxy_reconstructor(cls, state):
from __future__ import annotations
-import enum
import operator
import typing
from typing import Any
from typing import Dict
from typing import Iterator
from typing import List
+from typing import Mapping
from typing import Optional
from typing import Tuple
from typing import Type
-from typing import Union
if typing.TYPE_CHECKING:
- from .result import _KeyMapType
from .result import _KeyType
from .result import _ProcessorsType
from .result import _RawRowType
MD_INDEX = 0 # integer index in cursor.description
-class _KeyStyle(enum.IntEnum):
- KEY_INTEGER_ONLY = 0
- """__getitem__ only allows integer values and slices, raises TypeError
- otherwise"""
-
- KEY_OBJECTS_ONLY = 1
- """__getitem__ only allows string/object values, raises TypeError
- otherwise"""
-
-
-KEY_INTEGER_ONLY, KEY_OBJECTS_ONLY = list(_KeyStyle)
-
-
class BaseRow:
- __slots__ = ("_parent", "_data", "_keymap", "_key_style")
+ __slots__ = ("_parent", "_data", "_key_to_index")
_parent: ResultMetaData
+ _key_to_index: Mapping[_KeyType, int]
_data: _RawRowType
- _keymap: _KeyMapType
- _key_style: _KeyStyle
def __init__(
self,
parent: ResultMetaData,
processors: Optional[_ProcessorsType],
- keymap: _KeyMapType,
- key_style: _KeyStyle,
+ key_to_index: Mapping[_KeyType, int],
data: _RawRowType,
):
"""Row objects are constructed by CursorResult objects."""
object.__setattr__(self, "_parent", parent)
+ object.__setattr__(self, "_key_to_index", key_to_index)
+
if processors:
object.__setattr__(
self,
else:
object.__setattr__(self, "_data", tuple(data))
- object.__setattr__(self, "_keymap", keymap)
-
- object.__setattr__(self, "_key_style", key_style)
-
def __reduce__(self) -> Tuple[Callable[..., BaseRow], Tuple[Any, ...]]:
return (
rowproxy_reconstructor,
)
def __getstate__(self) -> Dict[str, Any]:
- return {
- "_parent": self._parent,
- "_data": self._data,
- "_key_style": self._key_style,
- }
+ return {"_parent": self._parent, "_data": self._data}
def __setstate__(self, state: Dict[str, Any]) -> None:
parent = state["_parent"]
object.__setattr__(self, "_parent", parent)
object.__setattr__(self, "_data", state["_data"])
- object.__setattr__(self, "_keymap", parent._keymap)
- object.__setattr__(self, "_key_style", state["_key_style"])
+ object.__setattr__(self, "_key_to_index", parent._key_to_index)
def _values_impl(self) -> List[Any]:
return list(self)
def __hash__(self) -> int:
return hash(self._data)
- def _get_by_int_impl(self, key: Union[int, slice]) -> Any:
+ def __getitem__(self, key: Any) -> Any:
return self._data[key]
- if not typing.TYPE_CHECKING:
- __getitem__ = _get_by_int_impl
-
- def _get_by_key_impl_mapping(self, key: _KeyType) -> Any:
+ def _get_by_key_impl_mapping(self, key: str) -> Any:
try:
- rec = self._keymap[key]
- except KeyError as ke:
- rec = self._parent._key_fallback(key, ke)
-
- mdindex = rec[MD_INDEX]
- if mdindex is None:
- self._parent._raise_for_ambiguous_column_name(rec)
- # NOTE: keep "== KEY_OBJECTS_ONLY" instead of "is KEY_OBJECTS_ONLY"
- # since deserializing the class from cython will load an int in
- # _key_style, not an instance of _KeyStyle
- elif self._key_style == KEY_OBJECTS_ONLY and isinstance(key, int):
- raise KeyError(key)
-
- return self._data[mdindex]
+ return self._data[self._key_to_index[key]]
+ except KeyError:
+ pass
+ self._parent._key_not_found(key, False)
def __getattr__(self, name: str) -> Any:
try:
- return self._get_by_key_impl_mapping(name)
- except KeyError as e:
- raise AttributeError(e.args[0]) from e
+ return self._data[self._key_to_index[name]]
+ except KeyError:
+ pass
+ self._parent._key_not_found(name, True)
# This reconstructor is necessary so that pickles with the Cy extension or
from typing import Dict
from typing import Iterator
from typing import List
+from typing import Mapping
from typing import NoReturn
from typing import Optional
-from typing import overload
from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
Optional[str], # MD_UNTRANSLATED
]
-_CursorKeyMapType = Dict["_KeyType", _CursorKeyMapRecType]
+_CursorKeyMapType = Mapping["_KeyType", _CursorKeyMapRecType]
# same as _CursorKeyMapRecType except the MD_INDEX value is definitely
# not None
"_tuplefilter",
"_translated_indexes",
"_safe_for_cache",
- "_unpickled"
+ "_unpickled",
+ "_key_to_index"
# don't need _unique_filters support here for now. Can be added
# if a need arises.
)
new_obj._translated_indexes = translated_indexes
new_obj._safe_for_cache = safe_for_cache
new_obj._keymap_by_result_column_idx = keymap_by_result_column_idx
+ new_obj._key_to_index = self._make_key_to_index(keymap, MD_INDEX)
return new_obj
def _remove_processors(self) -> CursorResultMetaData:
assert not self._tuplefilter
- keymap = self._keymap.copy()
+ keymap = dict(self._keymap)
offset = len(self._keys)
keymap.update(
{
for key, value in other._keymap.items()
}
)
-
return self._make_new_metadata(
unpickled=self._unpickled,
processors=self._processors + other._processors, # type: ignore
tup = tuplegetter(*indexes)
new_recs = [(index,) + rec[1:] for index, rec in enumerate(recs)]
- keymap: _KeyMapType = {rec[MD_LOOKUP_KEY]: rec for rec in new_recs}
+ keymap = {rec[MD_LOOKUP_KEY]: rec for rec in new_recs}
# TODO: need unit test for:
# result = connection.execute("raw sql, no columns").scalars()
# without the "or ()" it's failing because MD_OBJECTS is None
keys=new_keys,
tuplefilter=tup,
translated_indexes=indexes,
- keymap=keymap,
+ keymap=keymap, # type: ignore[arg-type]
safe_for_cache=self._safe_for_cache,
keymap_by_result_column_idx=self._keymap_by_result_column_idx,
)
}
)
+ self._key_to_index = self._make_key_to_index(self._keymap, MD_INDEX)
+
def _merge_cursor_description(
self,
context,
untranslated,
)
- @overload
- def _key_fallback(
- self, key: Any, err: Exception, raiseerr: Literal[True] = ...
- ) -> NoReturn:
- ...
+ if not TYPE_CHECKING:
- @overload
- def _key_fallback(
- self, key: Any, err: Exception, raiseerr: Literal[False] = ...
- ) -> None:
- ...
-
- @overload
- def _key_fallback(
- self, key: Any, err: Exception, raiseerr: bool = ...
- ) -> Optional[NoReturn]:
- ...
-
- def _key_fallback(
- self, key: Any, err: Exception, raiseerr: bool = True
- ) -> Optional[NoReturn]:
-
- if raiseerr:
- if self._unpickled and isinstance(key, elements.ColumnElement):
- raise exc.NoSuchColumnError(
- "Row was unpickled; lookup by ColumnElement "
- "is unsupported"
- ) from err
+ def _key_fallback(
+ self, key: Any, err: Optional[Exception], raiseerr: bool = True
+ ) -> Optional[NoReturn]:
+
+ if raiseerr:
+ if self._unpickled and isinstance(key, elements.ColumnElement):
+ raise exc.NoSuchColumnError(
+ "Row was unpickled; lookup by ColumnElement "
+ "is unsupported"
+ ) from err
+ else:
+ raise exc.NoSuchColumnError(
+ "Could not locate column in row for column '%s'"
+ % util.string_or_unprintable(key)
+ ) from err
else:
- raise exc.NoSuchColumnError(
- "Could not locate column in row for column '%s'"
- % util.string_or_unprintable(key)
- ) from err
- else:
- return None
+ return None
def _raise_for_ambiguous_column_name(self, rec):
raise exc.InvalidRequestError(
def __setstate__(self, state):
self._processors = [None for _ in range(len(state["_keys"]))]
self._keymap = state["_keymap"]
-
self._keymap_by_result_column_idx = None
+ self._key_to_index = self._make_key_to_index(self._keymap, MD_INDEX)
self._keys = state["_keys"]
self._unpickled = True
if state["_translated_indexes"]:
def _keymap(self):
self._we_dont_return_rows()
+ @property
+ def _key_to_index(self):
+ self._we_dont_return_rows()
+
+ @property
+ def _processors(self):
+ self._we_dont_return_rows()
+
@property
def keys(self):
self._we_dont_return_rows()
metadata = self._init_metadata(context, cursor_description)
- keymap = metadata._keymap
- processors = metadata._processors
- process_row = Row
- key_style = process_row._default_key_style
_make_row = functools.partial(
- process_row, metadata, processors, keymap, key_style
+ Row,
+ metadata,
+ metadata._processors,
+ metadata._key_to_index,
)
if log_row:
from typing import Iterable
from typing import Iterator
from typing import List
+from typing import Mapping
from typing import NoReturn
from typing import Optional
from typing import overload
# is overridden in cursor using _CursorKeyMapRecType
_KeyMapRecType = Any
-_KeyMapType = Dict[_KeyType, _KeyMapRecType]
+_KeyMapType = Mapping[_KeyType, _KeyMapRecType]
_RowData = Union[Row, RowMapping, Any]
_keymap: _KeyMapType
_keys: Sequence[str]
_processors: Optional[_ProcessorsType]
+ _key_to_index: Mapping[_KeyType, int]
@property
def keys(self) -> RMKeyView:
@overload
def _key_fallback(
- self, key: Any, err: Exception, raiseerr: Literal[True] = ...
+ self, key: Any, err: Optional[Exception], raiseerr: Literal[True] = ...
) -> NoReturn:
...
@overload
def _key_fallback(
- self, key: Any, err: Exception, raiseerr: Literal[False] = ...
+ self,
+ key: Any,
+ err: Optional[Exception],
+ raiseerr: Literal[False] = ...,
) -> None:
...
@overload
def _key_fallback(
- self, key: Any, err: Exception, raiseerr: bool = ...
+ self, key: Any, err: Optional[Exception], raiseerr: bool = ...
) -> Optional[NoReturn]:
...
def _key_fallback(
- self, key: Any, err: Exception, raiseerr: bool = True
+ self, key: Any, err: Optional[Exception], raiseerr: bool = True
) -> Optional[NoReturn]:
assert raiseerr
raise KeyError(key) from err
indexes = self._indexes_for_keys(keys)
return tuplegetter(*indexes)
+ def _make_key_to_index(
+ self, keymap: Mapping[_KeyType, Sequence[Any]], index: int
+ ) -> Mapping[_KeyType, int]:
+ return {
+ key: rec[index]
+ for key, rec in keymap.items()
+ if rec[index] is not None
+ }
+
+ def _key_not_found(self, key: Any, attr_error: bool) -> NoReturn:
+ if key in self._keymap:
+ # the index must be none in this case
+ self._raise_for_ambiguous_column_name(self._keymap[key])
+ else:
+ # unknown key
+ if attr_error:
+ try:
+ self._key_fallback(key, None)
+ except KeyError as ke:
+ raise AttributeError(ke.args[0]) from ke
+ else:
+ self._key_fallback(key, None)
+
class RMKeyView(typing.KeysView[Any]):
__slots__ = ("_parent", "_keys")
"_tuplefilter",
"_translated_indexes",
"_unique_filters",
+ "_key_to_index",
)
_keys: Sequence[str]
self._processors = _processors
+ self._key_to_index = self._make_key_to_index(self._keymap, 0)
+
def _has_key(self, key: object) -> bool:
return key in self._keymap
) -> Callable[[Iterable[Any]], Row[Any]]:
parent = SimpleResultMetaData(fields, extra)
return functools.partial(
- Row, parent, parent._processors, parent._keymap, Row._default_key_style
+ Row, parent, parent._processors, parent._key_to_index
)
def process_row( # type: ignore
metadata: ResultMetaData,
processors: _ProcessorsType,
- keymap: _KeyMapType,
- key_style: Any,
+ key_to_index: Mapping[_KeyType, int],
scalar_obj: Any,
) -> Row[Any]:
return _proc(
- metadata, processors, keymap, key_style, (scalar_obj,)
+ metadata, processors, key_to_index, (scalar_obj,)
)
else:
process_row = Row # type: ignore
- key_style = Row._default_key_style
metadata = self._metadata
- keymap = metadata._keymap
+ key_to_index = metadata._key_to_index
processors = metadata._processors
tf = metadata._tuplefilter
processors = tf(processors)
_make_row_orig: Callable[..., _R] = functools.partial( # type: ignore # noqa E501
- process_row, metadata, processors, keymap, key_style
+ process_row, metadata, processors, key_to_index
)
fixed_tf = tf
else:
make_row = functools.partial( # type: ignore
- process_row, metadata, processors, keymap, key_style
+ process_row, metadata, processors, key_to_index
)
fns: Tuple[Any, ...] = ()
if TYPE_CHECKING or not HAS_CYEXTENSION:
from ._py_row import BaseRow as BaseRow
- from ._py_row import KEY_INTEGER_ONLY
- from ._py_row import KEY_OBJECTS_ONLY
else:
from sqlalchemy.cyextension.resultproxy import BaseRow as BaseRow
- from sqlalchemy.cyextension.resultproxy import KEY_INTEGER_ONLY
- from sqlalchemy.cyextension.resultproxy import KEY_OBJECTS_ONLY
if TYPE_CHECKING:
from .result import _KeyType
__slots__ = ()
- _default_key_style = KEY_INTEGER_ONLY
-
def __setattr__(self, name: str, value: Any) -> NoReturn:
raise AttributeError("can't set attribute")
.. versionadded:: 1.4
"""
- return RowMapping(
- self._parent,
- None,
- self._keymap,
- RowMapping._default_key_style,
- self._data,
- )
+ return RowMapping(self._parent, None, self._key_to_index, self._data)
def _filter_on_values(
self, filters: Optional[Sequence[Optional[_ResultProcessorType[Any]]]]
) -> Row[Any]:
- return Row(
- self._parent,
- filters,
- self._keymap,
- self._key_style,
- self._data,
- )
+ return Row(self._parent, filters, self._key_to_index, self._data)
if not TYPE_CHECKING:
def __getitem__(self, index: slice) -> Sequence[Any]:
...
- def __getitem__(
- self, index: Union[int, slice]
- ) -> Union[Any, Sequence[Any]]:
+ def __getitem__(self, index: Union[int, slice]) -> Any:
...
def __lt__(self, other: Any) -> bool:
__slots__ = ()
- _default_key_style = KEY_OBJECTS_ONLY
-
if TYPE_CHECKING:
def __getitem__(self, key: _KeyType) -> Any:
def _rowproxy_fixture(self, keys, processors, row, row_cls):
class MockMeta:
- def __init__(self):
- pass
-
def _warn_for_nonint(self, arg):
pass
for key in keyobjs:
keymap[key] = (index, key)
keymap[index] = (index, key)
- return row_cls(
- metadata, processors, keymap, row_cls._default_key_style, row
- )
+
+ key_to_index = {key: rec[0] for key, rec in keymap.items()}
+ return row_cls(metadata, processors, key_to_index, row)
def _test_getitem_value_refcounts_new(self, seq_factory):
col1, col2 = object(), object()
if direction.py_to_cy:
dump_cls = _py_row.BaseRow
- num = _py_row.KEY_INTEGER_ONLY
load_cls = _cy_row.BaseRow
elif direction.cy_to_py:
dump_cls = _cy_row.BaseRow
- num = _cy_row.KEY_INTEGER_ONLY
load_cls = _py_row.BaseRow
else:
direction.fail()
class Row(dump_cls):
pass
- row = Row(p, p._processors, p._keymap, num, (1, 2, 3))
+ row = Row(p, p._processors, p._key_to_index, (1, 2, 3))
state = dumps(row)
from sqlalchemy import column
-def test_case(fn):
- fn.__test_case__ = True
- return fn
+def test_case(fn=None, *, number=None):
+ def wrap(fn):
+ fn.__test_case__ = True
+ if number is not None:
+ fn.__number__ = number
+ return fn
+
+ if fn is None:
+ return wrap
+ else:
+ return wrap(fn)
class Case:
for m in methods:
call = getattr(impl_case, m)
try:
- value = timeit(call, number=number)
+ t_num = number
+ fn_num = getattr(call, "__number__", None)
+ if fn_num is not None:
+ t_num = max(1, int(fn_num * factor))
+ value = timeit(call, number=t_num)
print(".", end="", flush=True)
except Exception as e:
fails.append(f"{name}::{m} error: {e}")
def __init__(self, data):
self.data = data
- def _get_by_int_impl(self, index):
+ def __getitem__(self, index):
# called by python
return self.data[index]
self.row_args = (
self.parent,
self.parent._processors,
- self.parent._keymap,
- 0,
+ self.parent._key_to_index,
(1, 2, 3),
)
self.parent_long = SimpleResultMetaData(tuple(ascii_letters))
self.row_long_args = (
self.parent_long,
self.parent_long._processors,
- self.parent_long._keymap,
- 0,
+ self.parent_long._key_to_index,
tuple(range(len(ascii_letters))),
)
self.row = self.impl(*self.row_args)
self.impl.__new__(self.impl).__setstate__(self.row_state)
self.impl.__new__(self.impl).__setstate__(self.row_long_state)
- @test_case
- def row_filter(self):
- self.row._filter_on_values(None)
- self.row_long._filter_on_values(None)
-
@test_case
def row_values_impl(self):
self.row._values_impl()
self.row_long[0:1]
self.row_long[1:-1]
- @test_case
- def get_by_int(self):
- self.row._get_by_int_impl(0)
- self.row._get_by_int_impl(1)
- self.row_long._get_by_int_impl(0)
- self.row_long._get_by_int_impl(1)
-
@test_case
def get_by_key(self):
- self.row._get_by_key_impl(0)
- self.row._get_by_key_impl(1)
- self.row_long._get_by_key_impl(0)
- self.row_long._get_by_key_impl(1)
-
- @test_case
- def get_by_key_slice(self):
- self.row._get_by_key_impl(slice(0, 1))
- self.row._get_by_key_impl(slice(1, -1))
- self.row_long._get_by_key_impl(slice(0, 1))
- self.row_long._get_by_key_impl(slice(1, -1))
+ self.row._get_by_key_impl_mapping("a")
+ self.row._get_by_key_impl_mapping("b")
+ self.row_long._get_by_key_impl_mapping("s")
+ self.row_long._get_by_key_impl_mapping("a")
@test_case
def getattr(self):
self.row_long.x
self.row_long.y
+ @test_case(number=50_000)
+ def get_by_key_recreate(self):
+ self.init_objects()
+ row = self.row
+ for _ in range(25):
+ row._get_by_key_impl_mapping("a")
+ l_row = self.row_long
+ for _ in range(25):
+ l_row._get_by_key_impl_mapping("f")
+ l_row._get_by_key_impl_mapping("o")
+ l_row._get_by_key_impl_mapping("r")
+ l_row._get_by_key_impl_mapping("t")
+ l_row._get_by_key_impl_mapping("y")
+ l_row._get_by_key_impl_mapping("t")
+ l_row._get_by_key_impl_mapping("w")
+ l_row._get_by_key_impl_mapping("o")
+
+ @test_case(number=50_000)
+ def getattr_recreate(self):
+ self.init_objects()
+ row = self.row
+ for _ in range(25):
+ row.a
+ l_row = self.row_long
+ for _ in range(25):
+ l_row.f
+ l_row.o
+ l_row.r
+ l_row.t
+ l_row.y
+ l_row.t
+ l_row.w
+ l_row.o
+
class CacheAnonMap(Case):
@staticmethod
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_nocextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 50
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 50
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_mysqldb_dbapiunicode_nocextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mariadb_pymysql_dbapiunicode_nocextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_mssql_pyodbc_dbapiunicode_nocextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_oracle_cx_oracle_dbapiunicode_nocextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 53
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 53
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute
test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_cextensions 2592
test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_postgresql_psycopg2_dbapiunicode_nocextensions 25595
test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_cextensions 2539
-test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 25543
+test.aaa_profiling.test_resultset.ResultSetTest.test_fetch_by_key_mappings x86_64_linux_cpython_3.10_sqlite_pysqlite_dbapiunicode_nocextensions 15600
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_one_or_none[False-0]
from sqlalchemy.engine import default
from sqlalchemy.engine import Row
from sqlalchemy.engine.result import SimpleResultMetaData
-from sqlalchemy.engine.row import KEY_INTEGER_ONLY
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql import ColumnElement
from sqlalchemy.sql import expression
rows[0].user_id
def test_keys_no_rows(self, connection):
-
for i in range(2):
r = connection.execute(
text("update users set user_name='new' where user_id=10")
operator.ge,
operator.le,
]:
-
try:
control = op(equal, compare)
except TypeError:
def test_column_accessor_err(self, connection):
r = connection.execute(select(1)).first()
- assert_raises_message(
- AttributeError,
- "Could not locate column in row for column 'foo'",
- getattr,
- r,
- "foo",
- )
- assert_raises_message(
- KeyError,
- "Could not locate column in row for column 'foo'",
- lambda: r._mapping["foo"],
- )
+ with expect_raises_message(
+ AttributeError, "Could not locate column in row for column 'foo'"
+ ):
+ r.foo
+ with expect_raises_message(
+ KeyError, "Could not locate column in row for column 'foo'"
+ ):
+ r._mapping["foo"],
def test_graceful_fetch_on_non_rows(self):
"""test that calling fetchone() etc. on a result that doesn't
eq_(list(row._fields), ["case_insensitive", "CaseSensitive"])
- in_("case_insensitive", row._keymap)
- in_("CaseSensitive", row._keymap)
- not_in("casesensitive", row._keymap)
+ in_("case_insensitive", row._parent._keymap)
+ in_("CaseSensitive", row._parent._keymap)
+ not_in("casesensitive", row._parent._keymap)
eq_(row._mapping["case_insensitive"], 1)
eq_(row._mapping["CaseSensitive"], 2)
["case_insensitive", "CaseSensitive", "screw_up_the_cols"],
)
- in_("case_insensitive", row._keymap)
- in_("CaseSensitive", row._keymap)
- not_in("casesensitive", row._keymap)
+ in_("case_insensitive", row._parent._keymap)
+ in_("CaseSensitive", row._parent._keymap)
+ not_in("casesensitive", row._parent._keymap)
eq_(row._mapping["case_insensitive"], 1)
eq_(row._mapping["CaseSensitive"], 2)
def __getitem__(self, i):
return list.__getitem__(self.internal_list, i)
- proxy = Row(
- object(),
- [None],
- {"key": (0, None, "key"), 0: (0, None, "key")},
- Row._default_key_style,
- MyList(["value"]),
- )
+ parent = SimpleResultMetaData(["key"])
+ proxy = Row(parent, [None], parent._key_to_index, MyList(["value"]))
eq_(list(proxy), ["value"])
eq_(proxy[0], "value")
+ eq_(proxy.key, "value")
eq_(proxy._mapping["key"], "value")
def test_no_rowcount_on_selects_inserts(self, metadata, testing_engine):
eq_(len(mock_rowcount.__get__.mock_calls), 2)
def test_row_is_sequence(self):
-
- row = Row(
- object(),
- [None],
- {"key": (None, 0), 0: (None, 0)},
- Row._default_key_style,
- ["value"],
- )
+ row = Row(object(), [None], {}, ["value"])
is_true(isinstance(row, collections_abc.Sequence))
def test_row_special_names(self):
row = Row(
metadata,
[None, None, None, None],
- metadata._keymap,
- Row._default_key_style,
+ metadata._key_to_index,
["kv", "cv", "iv", "f"],
)
is_true(isinstance(row, collections_abc.Sequence))
row = Row(
metadata,
[None, None, None],
- metadata._keymap,
- Row._default_key_style,
+ metadata._key_to_index,
["kv", "cv", "iv"],
)
is_true(isinstance(row, collections_abc.Sequence))
def test_new_row_no_dict_behaviors(self):
"""This mode is not used currently but will be once we are in 2.0."""
- metadata = SimpleResultMetaData(
- [
- "a",
- "b",
- "count",
- ]
- )
+ metadata = SimpleResultMetaData(["a", "b", "count"])
row = Row(
metadata,
[None, None, None],
- metadata._keymap,
- KEY_INTEGER_ONLY,
+ metadata._key_to_index,
["av", "bv", "cv"],
)
eq_(list(row._mapping), ["a", "b", "count"])
def test_row_is_hashable(self):
-
- row = Row(
- object(),
- [None, None, None],
- {"key": (None, 0), 0: (None, 0)},
- Row._default_key_style,
- (1, "value", "foo"),
- )
+ row = Row(object(), [None, None, None], {}, (1, "value", "foo"))
eq_(hash(row), hash((1, "value", "foo")))
@testing.provide_metadata
r = conn.execute(select(self.table))
assert isinstance(r.cursor_strategy, strategy_cls)
with mock.patch.object(r, "cursor", cursor()):
-
with testing.expect_raises_message(
IOError, "random non-DBAPI"
):
users = self.tables.users
def results(connection):
-
r1 = connection.execute(
users.select()
.where(users.c.user_id.in_([7, 8]))