]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Complete type annotations of sqlalchemy.engine.cursor module
authorDenis Laxalde <denis@laxalde.org>
Sat, 4 Oct 2025 19:13:29 +0000 (15:13 -0400)
committerFederico Caselli <cfederico87@gmail.com>
Thu, 9 Oct 2025 20:16:18 +0000 (22:16 +0200)
References: #6810
Closes: #12760
Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/12760
Pull-request-sha: 2804f49e1922d3cdc67065b733d0dd6e06554905

Change-Id: I974f966b2cd8f984b872d0aa27bbbdab78c6c1d7
(cherry picked from commit 692dee952499bc9ed5abe31692c011b6e4847486)

lib/sqlalchemy/engine/cursor.py
lib/sqlalchemy/engine/default.py
lib/sqlalchemy/engine/interfaces.py
lib/sqlalchemy/sql/base.py

index 0dbc8e625b7aaf755655e99f1dbf7d9cf2cf2694..26d8750bf073f03329622d479fa3161033c59775 100644 (file)
@@ -4,7 +4,6 @@
 #
 # This module is part of SQLAlchemy and is released under
 # the MIT License: https://www.opensource.org/licenses/mit-license.php
-# mypy: allow-untyped-defs, allow-untyped-calls
 
 """Define cursor-specific result set constructs including
 :class:`.CursorResult`."""
@@ -19,6 +18,7 @@ import typing
 from typing import Any
 from typing import cast
 from typing import ClassVar
+from typing import Deque
 from typing import Dict
 from typing import Iterable
 from typing import Iterator
@@ -61,7 +61,10 @@ if typing.TYPE_CHECKING:
     from .base import Connection
     from .default import DefaultExecutionContext
     from .interfaces import _DBAPICursorDescription
+    from .interfaces import _MutableCoreSingleExecuteParams
+    from .interfaces import CoreExecuteOptionsParameter
     from .interfaces import DBAPICursor
+    from .interfaces import DBAPIType
     from .interfaces import Dialect
     from .interfaces import ExecutionContext
     from .result import _KeyIndexType
@@ -70,11 +73,12 @@ if typing.TYPE_CHECKING:
     from .result import _KeyType
     from .result import _ProcessorsType
     from .result import _TupleGetterType
+    from ..sql.schema import Column
     from ..sql.type_api import _ResultProcessorType
 
 
 _T = TypeVar("_T", bound=Any)
-
+TupleAny = Tuple[Any, ...]
 
 # metadata entry tuple indexes.
 # using raw tuple is faster than namedtuple.
@@ -119,7 +123,7 @@ MD_UNTRANSLATED: Final[Literal[6]] = 6
 _CursorKeyMapRecType = Tuple[
     Optional[int],  # MD_INDEX, None means the record is ambiguously named
     int,  # MD_RESULT_MAP_INDEX, -1 if MD_INDEX is None
-    List[Any],  # MD_OBJECTS
+    TupleAny,  # MD_OBJECTS
     str,  # MD_LOOKUP_KEY
     str,  # MD_RENDERED_NAME
     Optional["_ResultProcessorType[Any]"],  # MD_PROCESSOR
@@ -140,6 +144,16 @@ _NonAmbigCursorKeyMapRecType = Tuple[
     str,
 ]
 
+_MergeColTuple = Tuple[
+    int,
+    Optional[int],
+    str,
+    TypeEngine[Any],
+    "DBAPIType",
+    Optional[TupleAny],
+    Optional[str],
+]
+
 
 class CursorResultMetaData(ResultMetaData):
     """Result metadata for DBAPI cursors."""
@@ -398,7 +412,7 @@ class CursorResultMetaData(ResultMetaData):
         # column keys and other names
         if num_ctx_cols:
             # keymap by primary string...
-            by_key: Dict[str, _CursorKeyMapRecType] = {
+            by_key: Dict[_KeyType, _CursorKeyMapRecType] = {
                 metadata_entry[MD_LOOKUP_KEY]: metadata_entry
                 for metadata_entry in raw
             }
@@ -444,7 +458,7 @@ class CursorResultMetaData(ResultMetaData):
                 # record into by_key.
                 by_key.update(
                     {
-                        key: (None, -1, [], key, key, None, None)
+                        key: (None, -1, (), key, key, None, None)
                         for key in dupes
                     }
                 )
@@ -493,15 +507,15 @@ class CursorResultMetaData(ResultMetaData):
 
     def _merge_cursor_description(
         self,
-        context,
-        cursor_description,
-        result_columns,
-        num_ctx_cols,
-        cols_are_ordered,
-        textual_ordered,
-        ad_hoc_textual,
-        loose_column_name_matching,
-    ):
+        context: DefaultExecutionContext,
+        cursor_description: _DBAPICursorDescription,
+        result_columns: Sequence[ResultColumnsEntry],
+        num_ctx_cols: int,
+        cols_are_ordered: bool,
+        textual_ordered: bool,
+        ad_hoc_textual: bool,
+        loose_column_name_matching: bool,
+    ) -> List[_CursorKeyMapRecType]:
         """Merge a cursor.description with compiled result column information.
 
         There are at least four separate strategies used here, selected
@@ -631,7 +645,7 @@ class CursorResultMetaData(ResultMetaData):
                         mapped_type, cursor_colname, coltype
                     ),
                     untranslated,
-                )
+                )  # type: ignore[misc]
                 for (
                     idx,
                     ridx,
@@ -643,7 +657,11 @@ class CursorResultMetaData(ResultMetaData):
                 ) in raw_iterator
             ]
 
-    def _colnames_from_description(self, context, cursor_description):
+    def _colnames_from_description(
+        self,
+        context: DefaultExecutionContext,
+        cursor_description: _DBAPICursorDescription,
+    ) -> Iterator[Tuple[int, str, Optional[str], DBAPIType]]:
         """Extract column names and data types from a cursor.description.
 
         Applies unicode decoding, column translation, "normalization",
@@ -675,8 +693,11 @@ class CursorResultMetaData(ResultMetaData):
             yield idx, colname, untranslated, coltype
 
     def _merge_textual_cols_by_position(
-        self, context, cursor_description, result_columns
-    ):
+        self,
+        context: DefaultExecutionContext,
+        cursor_description: _DBAPICursorDescription,
+        result_columns: Sequence[ResultColumnsEntry],
+    ) -> Iterator[_MergeColTuple]:
         num_ctx_cols = len(result_columns)
 
         if num_ctx_cols > len(cursor_description):
@@ -712,11 +733,11 @@ class CursorResultMetaData(ResultMetaData):
 
     def _merge_cols_by_name(
         self,
-        context,
-        cursor_description,
-        result_columns,
-        loose_column_name_matching,
-    ):
+        context: DefaultExecutionContext,
+        cursor_description: _DBAPICursorDescription,
+        result_columns: Sequence[ResultColumnsEntry],
+        loose_column_name_matching: bool,
+    ) -> Iterator[_MergeColTuple]:
         match_map = self._create_description_match_map(
             result_columns, loose_column_name_matching
         )
@@ -751,11 +772,9 @@ class CursorResultMetaData(ResultMetaData):
     @classmethod
     def _create_description_match_map(
         cls,
-        result_columns: List[ResultColumnsEntry],
+        result_columns: Sequence[ResultColumnsEntry],
         loose_column_name_matching: bool = False,
-    ) -> Dict[
-        Union[str, object], Tuple[str, Tuple[Any, ...], TypeEngine[Any], int]
-    ]:
+    ) -> Dict[Union[str, object], Tuple[str, TupleAny, TypeEngine[Any], int]]:
         """when matching cursor.description to a set of names that are present
         in a Compiled object, as is the case with TextualSelect, get all the
         names we expect might match those in cursor.description.
@@ -763,7 +782,7 @@ class CursorResultMetaData(ResultMetaData):
 
         d: Dict[
             Union[str, object],
-            Tuple[str, Tuple[Any, ...], TypeEngine[Any], int],
+            Tuple[str, TupleAny, TypeEngine[Any], int],
         ] = {}
         for ridx, elem in enumerate(result_columns):
             key = elem[RM_RENDERED_NAME]
@@ -790,7 +809,13 @@ class CursorResultMetaData(ResultMetaData):
                     )
         return d
 
-    def _merge_cols_by_none(self, context, cursor_description):
+    def _merge_cols_by_none(
+        self,
+        context: DefaultExecutionContext,
+        cursor_description: _DBAPICursorDescription,
+    ) -> Iterator[_MergeColTuple]:
+        self._keys = []
+
         for (
             idx,
             colname,
@@ -826,13 +851,17 @@ class CursorResultMetaData(ResultMetaData):
             else:
                 return None
 
-    def _raise_for_ambiguous_column_name(self, rec):
+    def _raise_for_ambiguous_column_name(
+        self, rec: _KeyMapRecType
+    ) -> NoReturn:
         raise exc.InvalidRequestError(
             "Ambiguous column name '%s' in "
             "result set column descriptions" % rec[MD_LOOKUP_KEY]
         )
 
-    def _index_for_key(self, key: Any, raiseerr: bool = True) -> Optional[int]:
+    def _index_for_key(
+        self, key: _KeyIndexType, raiseerr: bool = True
+    ) -> Optional[int]:
         # TODO: can consider pre-loading ints and negative ints
         # into _keymap - also no coverage here
         if isinstance(key, int):
@@ -851,22 +880,24 @@ class CursorResultMetaData(ResultMetaData):
             self._raise_for_ambiguous_column_name(rec)
         return index
 
-    def _indexes_for_keys(self, keys):
+    def _indexes_for_keys(
+        self, keys: Sequence[_KeyIndexType]
+    ) -> Sequence[int]:
         try:
-            return [self._keymap[key][0] for key in keys]
+            return [self._keymap[key][0] for key in keys]  # type: ignore[index,misc]  # noqa: E501
         except KeyError as ke:
             # ensure it raises
             CursorResultMetaData._key_fallback(self, ke.args[0], ke)
 
     def _metadata_for_keys(
-        self, keys: Sequence[Any]
+        self, keys: Sequence[_KeyIndexType]
     ) -> Iterator[_NonAmbigCursorKeyMapRecType]:
         for key in keys:
             if int in key.__class__.__mro__:
-                key = self._keys[key]
+                key = self._keys[key]  # type: ignore[index]
 
             try:
-                rec = self._keymap[key]
+                rec = self._keymap[key]  # type: ignore[index]
             except KeyError as ke:
                 # ensure it raises
                 CursorResultMetaData._key_fallback(self, ke.args[0], ke)
@@ -878,7 +909,7 @@ class CursorResultMetaData(ResultMetaData):
 
             yield cast(_NonAmbigCursorKeyMapRecType, rec)
 
-    def __getstate__(self):
+    def __getstate__(self) -> Dict[str, Any]:
         # TODO: consider serializing this as SimpleResultMetaData
         return {
             "_keymap": {
@@ -898,7 +929,7 @@ class CursorResultMetaData(ResultMetaData):
             "_translated_indexes": self._translated_indexes,
         }
 
-    def __setstate__(self, state):
+    def __setstate__(self, state: Dict[str, Any]) -> None:
         self._processors = [None for _ in range(len(state["_keys"]))]
         self._keymap = state["_keymap"]
         self._keymap_by_result_column_idx = None
@@ -939,7 +970,7 @@ class ResultFetchStrategy:
     def yield_per(
         self,
         result: CursorResult[Any],
-        dbapi_cursor: Optional[DBAPICursor],
+        dbapi_cursor: DBAPICursor,
         num: int,
     ) -> None:
         return
@@ -987,22 +1018,47 @@ class NoCursorFetchStrategy(ResultFetchStrategy):
 
     __slots__ = ()
 
-    def soft_close(self, result, dbapi_cursor):
+    def soft_close(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: Optional[DBAPICursor],
+    ) -> None:
         pass
 
-    def hard_close(self, result, dbapi_cursor):
+    def hard_close(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: Optional[DBAPICursor],
+    ) -> None:
         pass
 
-    def fetchone(self, result, dbapi_cursor, hard_close=False):
+    def fetchone(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: DBAPICursor,
+        hard_close: bool = False,
+    ) -> Any:
         return self._non_result(result, None)
 
-    def fetchmany(self, result, dbapi_cursor, size=None):
+    def fetchmany(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: DBAPICursor,
+        size: Optional[int] = None,
+    ) -> Any:
         return self._non_result(result, [])
 
-    def fetchall(self, result, dbapi_cursor):
+    def fetchall(
+        self, result: CursorResult[Any], dbapi_cursor: DBAPICursor
+    ) -> Any:
         return self._non_result(result, [])
 
-    def _non_result(self, result, default, err=None):
+    def _non_result(
+        self,
+        result: CursorResult[Any],
+        default: Any,
+        err: Optional[BaseException] = None,
+    ) -> Any:
         raise NotImplementedError()
 
 
@@ -1019,7 +1075,12 @@ class NoCursorDQLFetchStrategy(NoCursorFetchStrategy):
 
     __slots__ = ()
 
-    def _non_result(self, result, default, err=None):
+    def _non_result(
+        self,
+        result: CursorResult[Any],
+        default: Any,
+        err: Optional[BaseException] = None,
+    ) -> Any:
         if result.closed:
             raise exc.ResourceClosedError(
                 "This result object is closed."
@@ -1041,10 +1102,15 @@ class NoCursorDMLFetchStrategy(NoCursorFetchStrategy):
 
     __slots__ = ()
 
-    def _non_result(self, result, default, err=None):
+    def _non_result(
+        self,
+        result: CursorResult[Any],
+        default: Any,
+        err: Optional[BaseException] = None,
+    ) -> Any:
         # we only expect to have a _NoResultMetaData() here right now.
         assert not result._metadata.returns_rows
-        result._metadata._we_dont_return_rows(err)
+        result._metadata._we_dont_return_rows(err)  # type: ignore[union-attr]
 
 
 _NO_CURSOR_DML = NoCursorDMLFetchStrategy()
@@ -1081,10 +1147,7 @@ class CursorFetchStrategy(ResultFetchStrategy):
         )
 
     def yield_per(
-        self,
-        result: CursorResult[Any],
-        dbapi_cursor: Optional[DBAPICursor],
-        num: int,
+        self, result: CursorResult[Any], dbapi_cursor: DBAPICursor, num: int
     ) -> None:
         result.cursor_strategy = BufferedRowCursorFetchStrategy(
             dbapi_cursor,
@@ -1173,11 +1236,11 @@ class BufferedRowCursorFetchStrategy(CursorFetchStrategy):
 
     def __init__(
         self,
-        dbapi_cursor,
-        execution_options,
-        growth_factor=5,
-        initial_buffer=None,
-    ):
+        dbapi_cursor: DBAPICursor,
+        execution_options: CoreExecuteOptionsParameter,
+        growth_factor: int = 5,
+        initial_buffer: Optional[Deque[Any]] = None,
+    ) -> None:
         self._max_row_buffer = execution_options.get("max_row_buffer", 1000)
 
         if initial_buffer is not None:
@@ -1192,13 +1255,17 @@ class BufferedRowCursorFetchStrategy(CursorFetchStrategy):
             self._bufsize = self._max_row_buffer
 
     @classmethod
-    def create(cls, result):
+    def create(
+        cls, result: CursorResult[Any]
+    ) -> BufferedRowCursorFetchStrategy:
         return BufferedRowCursorFetchStrategy(
             result.cursor,
             result.context.execution_options,
         )
 
-    def _buffer_rows(self, result, dbapi_cursor):
+    def _buffer_rows(
+        self, result: CursorResult[Any], dbapi_cursor: DBAPICursor
+    ) -> None:
         """this is currently used only by fetchone()."""
 
         size = self._bufsize
@@ -1218,19 +1285,30 @@ class BufferedRowCursorFetchStrategy(CursorFetchStrategy):
                 self._max_row_buffer, size * self._growth_factor
             )
 
-    def yield_per(self, result, dbapi_cursor, num):
+    def yield_per(
+        self, result: CursorResult[Any], dbapi_cursor: DBAPICursor, num: int
+    ) -> None:
         self._growth_factor = 0
         self._max_row_buffer = self._bufsize = num
 
-    def soft_close(self, result, dbapi_cursor):
+    def soft_close(
+        self, result: CursorResult[Any], dbapi_cursor: Optional[DBAPICursor]
+    ) -> None:
         self._rowbuffer.clear()
         super().soft_close(result, dbapi_cursor)
 
-    def hard_close(self, result, dbapi_cursor):
+    def hard_close(
+        self, result: CursorResult[Any], dbapi_cursor: Optional[DBAPICursor]
+    ) -> None:
         self._rowbuffer.clear()
         super().hard_close(result, dbapi_cursor)
 
-    def fetchone(self, result, dbapi_cursor, hard_close=False):
+    def fetchone(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: DBAPICursor,
+        hard_close: bool = False,
+    ) -> Any:
         if not self._rowbuffer:
             self._buffer_rows(result, dbapi_cursor)
             if not self._rowbuffer:
@@ -1241,7 +1319,12 @@ class BufferedRowCursorFetchStrategy(CursorFetchStrategy):
                 return None
         return self._rowbuffer.popleft()
 
-    def fetchmany(self, result, dbapi_cursor, size=None):
+    def fetchmany(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: DBAPICursor,
+        size: Optional[int] = None,
+    ) -> Any:
         if size is None:
             return self.fetchall(result, dbapi_cursor)
 
@@ -1265,7 +1348,9 @@ class BufferedRowCursorFetchStrategy(CursorFetchStrategy):
             result._soft_close()
         return res
 
-    def fetchall(self, result, dbapi_cursor):
+    def fetchall(
+        self, result: CursorResult[Any], dbapi_cursor: DBAPICursor
+    ) -> Any:
         try:
             ret = list(self._rowbuffer) + list(dbapi_cursor.fetchall())
             self._rowbuffer.clear()
@@ -1299,25 +1384,41 @@ class FullyBufferedCursorFetchStrategy(CursorFetchStrategy):
             assert dbapi_cursor is not None
             self._rowbuffer = collections.deque(dbapi_cursor.fetchall())
 
-    def yield_per(self, result, dbapi_cursor, num):
+    def yield_per(
+        self, result: CursorResult[Any], dbapi_cursor: DBAPICursor, num: int
+    ) -> Any:
         pass
 
-    def soft_close(self, result, dbapi_cursor):
+    def soft_close(
+        self, result: CursorResult[Any], dbapi_cursor: Optional[DBAPICursor]
+    ) -> None:
         self._rowbuffer.clear()
         super().soft_close(result, dbapi_cursor)
 
-    def hard_close(self, result, dbapi_cursor):
+    def hard_close(
+        self, result: CursorResult[Any], dbapi_cursor: Optional[DBAPICursor]
+    ) -> None:
         self._rowbuffer.clear()
         super().hard_close(result, dbapi_cursor)
 
-    def fetchone(self, result, dbapi_cursor, hard_close=False):
+    def fetchone(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: DBAPICursor,
+        hard_close: bool = False,
+    ) -> Any:
         if self._rowbuffer:
             return self._rowbuffer.popleft()
         else:
             result._soft_close(hard=hard_close)
             return None
 
-    def fetchmany(self, result, dbapi_cursor, size=None):
+    def fetchmany(
+        self,
+        result: CursorResult[Any],
+        dbapi_cursor: DBAPICursor,
+        size: Optional[int] = None,
+    ) -> Any:
         if size is None:
             return self.fetchall(result, dbapi_cursor)
 
@@ -1327,7 +1428,9 @@ class FullyBufferedCursorFetchStrategy(CursorFetchStrategy):
             result._soft_close()
         return rows
 
-    def fetchall(self, result, dbapi_cursor):
+    def fetchall(
+        self, result: CursorResult[Any], dbapi_cursor: DBAPICursor
+    ) -> Any:
         ret = self._rowbuffer
         self._rowbuffer = collections.deque()
         result._soft_close()
@@ -1339,35 +1442,37 @@ class _NoResultMetaData(ResultMetaData):
 
     returns_rows = False
 
-    def _we_dont_return_rows(self, err=None):
+    def _we_dont_return_rows(
+        self, err: Optional[BaseException] = None
+    ) -> NoReturn:
         raise exc.ResourceClosedError(
             "This result object does not return rows. "
             "It has been closed automatically."
         ) from err
 
-    def _index_for_key(self, keys, raiseerr):
+    def _index_for_key(self, keys: _KeyIndexType, raiseerr: bool) -> NoReturn:
         self._we_dont_return_rows()
 
-    def _metadata_for_keys(self, key):
+    def _metadata_for_keys(self, keys: Sequence[_KeyIndexType]) -> NoReturn:
         self._we_dont_return_rows()
 
-    def _reduce(self, keys):
+    def _reduce(self, keys: Sequence[_KeyIndexType]) -> NoReturn:
         self._we_dont_return_rows()
 
     @property
-    def _keymap(self):  # type: ignore[override]
+    def _keymap(self) -> NoReturn:  # type: ignore[override]
         self._we_dont_return_rows()
 
     @property
-    def _key_to_index(self):  # type: ignore[override]
+    def _key_to_index(self) -> NoReturn:  # type: ignore[override]
         self._we_dont_return_rows()
 
     @property
-    def _processors(self):  # type: ignore[override]
+    def _processors(self) -> NoReturn:  # type: ignore[override]
         self._we_dont_return_rows()
 
     @property
-    def keys(self):
+    def keys(self) -> NoReturn:
         self._we_dont_return_rows()
 
 
@@ -1456,7 +1561,7 @@ class CursorResult(Result[_T]):
                     slice(-context._num_sentinel_cols)
                 )
 
-                def _sliced_row(raw_data):
+                def _sliced_row(raw_data: Any) -> Any:
                     return _make_row(sentinel_filter(raw_data))
 
                 sliced_row = _sliced_row
@@ -1466,25 +1571,30 @@ class CursorResult(Result[_T]):
             if echo:
                 log = self.context.connection._log_debug
 
-                def _log_row(row):
+                def _log_row(row: Any) -> Any:
                     log("Row %r", sql_util._repr_row(row))
                     return row
 
                 self._row_logging_fn = _log_row
 
-                def _make_row_2(row):
+                def _make_row_2(row: Any) -> Any:
                     return _log_row(sliced_row(row))
 
                 make_row = _make_row_2
             else:
-                make_row = sliced_row
+                make_row = sliced_row  # type: ignore[assignment]
             self._set_memoized_attribute("_row_getter", make_row)
 
         else:
             assert context._num_sentinel_cols == 0
             self._metadata = self._no_result_metadata
 
-    def _init_metadata(self, context, cursor_description):
+    def _init_metadata(
+        self,
+        context: DefaultExecutionContext,
+        cursor_description: _DBAPICursorDescription,
+    ) -> CursorResultMetaData:
+
         if context.compiled:
             compiled = context.compiled
 
@@ -1515,9 +1625,9 @@ class CursorResult(Result[_T]):
                 )
                 and compiled._result_columns
                 and context.cache_hit is context.dialect.CACHE_HIT
-                and compiled.statement is not context.invoked_statement
+                and compiled.statement is not context.invoked_statement  # type: ignore[comparison-overlap] # noqa: E501
             ):
-                metadata = metadata._adapt_to_context(context)
+                metadata = metadata._adapt_to_context(context)  # type: ignore[assignment] # noqa: E501
 
             self._metadata = metadata
 
@@ -1531,7 +1641,7 @@ class CursorResult(Result[_T]):
             )
         return metadata
 
-    def _soft_close(self, hard=False):
+    def _soft_close(self, hard: bool = False) -> None:
         """Soft close this :class:`_engine.CursorResult`.
 
         This releases all DBAPI cursor resources, but leaves the
@@ -1569,7 +1679,7 @@ class CursorResult(Result[_T]):
             self._soft_closed = True
 
     @property
-    def inserted_primary_key_rows(self):
+    def inserted_primary_key_rows(self) -> List[Optional[Any]]:
         """Return the value of
         :attr:`_engine.CursorResult.inserted_primary_key`
         as a row contained within a list; some dialects may support a
@@ -1628,10 +1738,10 @@ class CursorResult(Result[_T]):
                 "when returning() "
                 "is used."
             )
-        return self.context.inserted_primary_key_rows
+        return self.context.inserted_primary_key_rows  # type: ignore[no-any-return]  # noqa: E501
 
     @property
-    def inserted_primary_key(self):
+    def inserted_primary_key(self) -> Optional[Any]:
         """Return the primary key for the row just inserted.
 
         The return value is a :class:`_result.Row` object representing
@@ -1676,7 +1786,11 @@ class CursorResult(Result[_T]):
         else:
             return None
 
-    def last_updated_params(self):
+    def last_updated_params(
+        self,
+    ) -> Union[
+        List[_MutableCoreSingleExecuteParams], _MutableCoreSingleExecuteParams
+    ]:
         """Return the collection of updated parameters from this
         execution.
 
@@ -1698,7 +1812,11 @@ class CursorResult(Result[_T]):
         else:
             return self.context.compiled_parameters[0]
 
-    def last_inserted_params(self):
+    def last_inserted_params(
+        self,
+    ) -> Union[
+        List[_MutableCoreSingleExecuteParams], _MutableCoreSingleExecuteParams
+    ]:
         """Return the collection of inserted parameters from this
         execution.
 
@@ -1721,7 +1839,9 @@ class CursorResult(Result[_T]):
             return self.context.compiled_parameters[0]
 
     @property
-    def returned_defaults_rows(self):
+    def returned_defaults_rows(
+        self,
+    ) -> Optional[Sequence[Row[Any]]]:
         """Return a list of rows each containing the values of default
         columns that were fetched using
         the :meth:`.ValuesBase.return_defaults` feature.
@@ -1733,7 +1853,7 @@ class CursorResult(Result[_T]):
         """
         return self.context.returned_default_rows
 
-    def splice_horizontally(self, other):
+    def splice_horizontally(self, other: CursorResult[Any]) -> Self:
         """Return a new :class:`.CursorResult` that "horizontally splices"
         together the rows of this :class:`.CursorResult` with that of another
         :class:`.CursorResult`.
@@ -1797,7 +1917,7 @@ class CursorResult(Result[_T]):
             )
         ]
 
-        clone._metadata = clone._metadata._splice_horizontally(other._metadata)
+        clone._metadata = clone._metadata._splice_horizontally(other._metadata)  # type: ignore[union-attr, arg-type] # noqa: E501
 
         clone.cursor_strategy = FullyBufferedCursorFetchStrategy(
             None,
@@ -1806,7 +1926,7 @@ class CursorResult(Result[_T]):
         clone._reset_memoizations()
         return clone
 
-    def splice_vertically(self, other):
+    def splice_vertically(self, other: CursorResult[Any]) -> Self:
         """Return a new :class:`.CursorResult` that "vertically splices",
         i.e. "extends", the rows of this :class:`.CursorResult` with that of
         another :class:`.CursorResult`.
@@ -1838,7 +1958,7 @@ class CursorResult(Result[_T]):
         clone._reset_memoizations()
         return clone
 
-    def _rewind(self, rows):
+    def _rewind(self, rows: Any) -> Self:
         """rewind this result back to the given rowset.
 
         this is used internally for the case where an :class:`.Insert`
@@ -1871,7 +1991,7 @@ class CursorResult(Result[_T]):
         return self
 
     @property
-    def returned_defaults(self):
+    def returned_defaults(self) -> Optional[Row[Any]]:
         """Return the values of default columns that were fetched using
         the :meth:`.ValuesBase.return_defaults` feature.
 
@@ -1897,7 +2017,7 @@ class CursorResult(Result[_T]):
         else:
             return None
 
-    def lastrow_has_defaults(self):
+    def lastrow_has_defaults(self) -> bool:
         """Return ``lastrow_has_defaults()`` from the underlying
         :class:`.ExecutionContext`.
 
@@ -1907,7 +2027,7 @@ class CursorResult(Result[_T]):
 
         return self.context.lastrow_has_defaults()
 
-    def postfetch_cols(self):
+    def postfetch_cols(self) -> Optional[Sequence[Column[Any]]]:
         """Return ``postfetch_cols()`` from the underlying
         :class:`.ExecutionContext`.
 
@@ -1930,7 +2050,7 @@ class CursorResult(Result[_T]):
             )
         return self.context.postfetch_cols
 
-    def prefetch_cols(self):
+    def prefetch_cols(self) -> Optional[Sequence[Column[Any]]]:
         """Return ``prefetch_cols()`` from the underlying
         :class:`.ExecutionContext`.
 
@@ -1953,7 +2073,7 @@ class CursorResult(Result[_T]):
             )
         return self.context.prefetch_cols
 
-    def supports_sane_rowcount(self):
+    def supports_sane_rowcount(self) -> bool:
         """Return ``supports_sane_rowcount`` from the dialect.
 
         See :attr:`_engine.CursorResult.rowcount` for background.
@@ -1962,7 +2082,7 @@ class CursorResult(Result[_T]):
 
         return self.dialect.supports_sane_rowcount
 
-    def supports_sane_multi_rowcount(self):
+    def supports_sane_multi_rowcount(self) -> bool:
         """Return ``supports_sane_multi_rowcount`` from the dialect.
 
         See :attr:`_engine.CursorResult.rowcount` for background.
@@ -2054,7 +2174,7 @@ class CursorResult(Result[_T]):
             raise  # not called
 
     @property
-    def lastrowid(self):
+    def lastrowid(self) -> int:
         """Return the 'lastrowid' accessor on the DBAPI cursor.
 
         This is a DBAPI specific method and is only functional
@@ -2075,7 +2195,7 @@ class CursorResult(Result[_T]):
             self.cursor_strategy.handle_exception(self, self.cursor, e)
 
     @property
-    def returns_rows(self):
+    def returns_rows(self) -> bool:
         """True if this :class:`_engine.CursorResult` returns zero or more
         rows.
 
@@ -2103,7 +2223,7 @@ class CursorResult(Result[_T]):
         return self._metadata.returns_rows
 
     @property
-    def is_insert(self):
+    def is_insert(self) -> bool:
         """True if this :class:`_engine.CursorResult` is the result
         of a executing an expression language compiled
         :func:`_expression.insert` construct.
@@ -2116,7 +2236,7 @@ class CursorResult(Result[_T]):
         """
         return self.context.isinsert
 
-    def _fetchiter_impl(self):
+    def _fetchiter_impl(self) -> Iterator[Any]:
         fetchone = self.cursor_strategy.fetchone
 
         while True:
@@ -2125,16 +2245,16 @@ class CursorResult(Result[_T]):
                 break
             yield row
 
-    def _fetchone_impl(self, hard_close=False):
+    def _fetchone_impl(self, hard_close: bool = False) -> Any:
         return self.cursor_strategy.fetchone(self, self.cursor, hard_close)
 
-    def _fetchall_impl(self):
+    def _fetchall_impl(self) -> Any:
         return self.cursor_strategy.fetchall(self, self.cursor)
 
-    def _fetchmany_impl(self, size=None):
+    def _fetchmany_impl(self, size: Optional[int] = None) -> Any:
         return self.cursor_strategy.fetchmany(self, self.cursor, size)
 
-    def _raw_row_iterator(self):
+    def _raw_row_iterator(self) -> Any:
         return self._fetchiter_impl()
 
     def merge(self, *others: Result[Any]) -> MergedResult[Any]:
@@ -2146,7 +2266,7 @@ class CursorResult(Result[_T]):
             )
         return merged_result
 
-    def close(self) -> Any:
+    def close(self) -> None:
         """Close this :class:`_engine.CursorResult`.
 
         This closes out the underlying DBAPI cursor corresponding to the
index a241e901159f0f6742b2dfa13588e17a40355c3d..84ac65a592e1e2485fc3bd72735f60029da6eadf 100644 (file)
@@ -84,6 +84,7 @@ if typing.TYPE_CHECKING:
     from .interfaces import ConnectArgsType
     from .interfaces import DBAPIConnection
     from .interfaces import DBAPIModule
+    from .interfaces import DBAPIType
     from .interfaces import IsolationLevel
     from .row import Row
     from .url import URL
@@ -1237,7 +1238,9 @@ class DefaultExecutionContext(ExecutionContext):
     # a hook for SQLite's translation of
     # result column names
     # NOTE: pyhive is using this hook, can't remove it :(
-    _translate_colname: Optional[Callable[[str], str]] = None
+    _translate_colname: Optional[
+        Callable[[str], Tuple[str, Optional[str]]]
+    ] = None
 
     _expanded_parameters: Mapping[str, List[str]] = util.immutabledict()
     """used by set_input_sizes().
@@ -1793,7 +1796,9 @@ class DefaultExecutionContext(ExecutionContext):
     def post_exec(self):
         pass
 
-    def get_result_processor(self, type_, colname, coltype):
+    def get_result_processor(
+        self, type_: TypeEngine[Any], colname: str, coltype: DBAPIType
+    ) -> Optional[_ResultProcessorType[Any]]:
         """Return a 'result processor' for a given type as present in
         cursor.description.
 
@@ -1803,7 +1808,7 @@ class DefaultExecutionContext(ExecutionContext):
         """
         return type_._cached_result_processor(self.dialect, coltype)
 
-    def get_lastrowid(self):
+    def get_lastrowid(self) -> int:
         """return self.cursor.lastrowid, or equivalent, after an INSERT.
 
         This may involve calling special cursor functions, issuing a new SELECT
@@ -2060,7 +2065,7 @@ class DefaultExecutionContext(ExecutionContext):
             getter(row, param) for row, param in zip(rows, compiled_params)
         ]
 
-    def lastrow_has_defaults(self):
+    def lastrow_has_defaults(self) -> bool:
         return (self.isinsert or self.isupdate) and bool(
             cast(SQLCompiler, self.compiled).postfetch
         )
index 37093e80627e06b046e0d3f0f13668e828deefea..c722c53d34b849b321c804da1acfc919ab4f478b 100644 (file)
@@ -1225,6 +1225,13 @@ class Dialect(EventTarget):
     tuple_in_values: bool
     """target database supports tuple IN, i.e. (x, y) IN ((q, p), (r, z))"""
 
+    requires_name_normalize: bool
+    """Indicates symbol names are returned by the database in
+    UPPERCASED if they are case insensitive within the database.
+    If this is True, the methods normalize_name()
+    and denormalize_name() must be provided.
+    """
+
     _bind_typing_render_casts: bool
 
     _type_memos: MutableMapping[TypeEngine[Any], _TypeMemoDict]
index 21c220140e1b306a2bf96234a92073d397b1b168..ed9e9c74bd148c7e5387fcb4f990e90164033a12 100644 (file)
@@ -771,7 +771,7 @@ class InPlaceGenerative(HasMemoized):
 
     __slots__ = ()
 
-    def _generate(self):
+    def _generate(self) -> Self:
         skip = self._memoized_keys
         # note __dict__ needs to be in __slots__ if this is used
         for k in skip: