]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
remove unused type ignores
authorFederico Caselli <cfederico87@gmail.com>
Tue, 12 Sep 2023 19:56:15 +0000 (21:56 +0200)
committerFederico Caselli <cfederico87@gmail.com>
Tue, 12 Sep 2023 19:56:15 +0000 (21:56 +0200)
Change-Id: Id212e8b4ff7427872ead8fd409a68408813f7d76

56 files changed:
lib/sqlalchemy/connectors/pyodbc.py
lib/sqlalchemy/dialects/__init__.py
lib/sqlalchemy/dialects/postgresql/ranges.py
lib/sqlalchemy/engine/default.py
lib/sqlalchemy/engine/events.py
lib/sqlalchemy/engine/mock.py
lib/sqlalchemy/engine/reflection.py
lib/sqlalchemy/engine/result.py
lib/sqlalchemy/engine/util.py
lib/sqlalchemy/event/base.py
lib/sqlalchemy/event/registry.py
lib/sqlalchemy/ext/associationproxy.py
lib/sqlalchemy/ext/asyncio/base.py
lib/sqlalchemy/ext/asyncio/result.py
lib/sqlalchemy/ext/hybrid.py
lib/sqlalchemy/ext/mypy/names.py
lib/sqlalchemy/ext/mypy/util.py
lib/sqlalchemy/log.py
lib/sqlalchemy/orm/_typing.py
lib/sqlalchemy/orm/attributes.py
lib/sqlalchemy/orm/decl_api.py
lib/sqlalchemy/orm/decl_base.py
lib/sqlalchemy/orm/descriptor_props.py
lib/sqlalchemy/orm/instrumentation.py
lib/sqlalchemy/orm/interfaces.py
lib/sqlalchemy/orm/mapper.py
lib/sqlalchemy/orm/path_registry.py
lib/sqlalchemy/orm/properties.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/orm/relationships.py
lib/sqlalchemy/orm/state.py
lib/sqlalchemy/orm/util.py
lib/sqlalchemy/pool/base.py
lib/sqlalchemy/pool/events.py
lib/sqlalchemy/pool/impl.py
lib/sqlalchemy/sql/_typing.py
lib/sqlalchemy/sql/annotation.py
lib/sqlalchemy/sql/base.py
lib/sqlalchemy/sql/compiler.py
lib/sqlalchemy/sql/crud.py
lib/sqlalchemy/sql/ddl.py
lib/sqlalchemy/sql/elements.py
lib/sqlalchemy/sql/lambdas.py
lib/sqlalchemy/sql/operators.py
lib/sqlalchemy/sql/schema.py
lib/sqlalchemy/sql/selectable.py
lib/sqlalchemy/sql/traversals.py
lib/sqlalchemy/sql/type_api.py
lib/sqlalchemy/sql/util.py
lib/sqlalchemy/util/_collections.py
lib/sqlalchemy/util/_concurrency_py3k.py
lib/sqlalchemy/util/_py_collections.py
lib/sqlalchemy/util/deprecations.py
lib/sqlalchemy/util/langhelpers.py
pyproject.toml
test/typing/plain_files/orm/mapped_assign_expression.py

index 669a8393e4bbb94b8d201979a05c4f27472b5a14..49712a57c4150155f755235f3bcfbf317e78d4b3 100644 (file)
@@ -227,7 +227,7 @@ class PyODBCConnector(Connector):
     def get_isolation_level_values(
         self, dbapi_connection: interfaces.DBAPIConnection
     ) -> List[IsolationLevel]:
-        return super().get_isolation_level_values(dbapi_connection) + [  # type: ignore  # noqa: E501
+        return super().get_isolation_level_values(dbapi_connection) + [
             "AUTOCOMMIT"
         ]
 
index 76b2263063ac576f2fc0de3053a9997a11b34549..055d087cf2458e7a88d95c58989f5586ef2a0171 100644 (file)
@@ -51,7 +51,7 @@ def _auto_fn(name: str) -> Optional[Callable[[], Type[Dialect]]]:
 
     if hasattr(module, driver):
         module = getattr(module, driver)
-        return lambda: module.dialect  # type: ignore
+        return lambda: module.dialect
     else:
         return None
 
index 2cd1552a73d1a4382ea746ebda450dc5b925aba8..f1c29897d013a371431868dd187266900d8e98aa 100644 (file)
@@ -293,7 +293,7 @@ class Range(Generic[_T]):
             else:
                 return 0
 
-    def __eq__(self, other: Any) -> bool:  # type: ignore[override]  # noqa: E501
+    def __eq__(self, other: Any) -> bool:
         """Compare this range to the `other` taking into account
         bounds inclusivity, returning ``True`` if they are equal.
         """
index 4b8dd8797a860be5861d3cb7dcca8102d026a8aa..e9f1efbe637a890fb3b0187535bb40cc291bfab4 100644 (file)
@@ -135,7 +135,7 @@ class DefaultDialect(Dialect):
 
     # most DBAPIs happy with this for execute().
     # not cx_oracle.
-    execute_sequence_format = tuple  # type: ignore
+    execute_sequence_format = tuple
 
     supports_schemas = True
     supports_views = True
@@ -2232,7 +2232,7 @@ class DefaultExecutionContext(ExecutionContext):
             and compile_state._has_multi_parameters
         ):
             if column._is_multiparam_column:
-                index = column.index + 1  # type: ignore
+                index = column.index + 1
                 d = {column.original.key: parameters[column.key]}
             else:
                 d = {column.key: parameters[column.key]}
@@ -2304,7 +2304,7 @@ class DefaultExecutionContext(ExecutionContext):
                     param[param_key] = arg
                 elif is_callable:
                     self.current_column = c
-                    param[param_key] = arg(self)  # type: ignore
+                    param[param_key] = arg(self)
                 else:
                     val = fallback(c)
                     if val is not None:
index 848f39733aa2f4f49b7cc50f8f3209654e54a01e..aac756d18a22e606a462ac3f3e396f632935710d 100644 (file)
@@ -131,7 +131,7 @@ class ConnectionEvents(event.Events[ConnectionEventsTarget]):
         if default_dispatch is None and hasattr(
             target, "_no_async_engine_events"
         ):
-            target._no_async_engine_events()  # type: ignore
+            target._no_async_engine_events()
 
         return default_dispatch
 
@@ -640,7 +640,7 @@ class DialectEvents(event.Events[Dialect]):
     _dispatch_target = Dialect
 
     @classmethod
-    def _listen(  # type: ignore
+    def _listen(
         cls,
         event_key: event._EventKey[Dialect],
         *,
index b80eab516fe7a3dd285def0d6882930d52e82c83..618ea1d85efaa8e59ba3fb3bffb81c27ebc8abee 100644 (file)
@@ -126,6 +126,6 @@ def create_mock_engine(
             dialect_args[k] = kw.pop(k)
 
     # create dialect
-    dialect = dialect_cls(**dialect_args)  # type: ignore
+    dialect = dialect_cls(**dialect_args)
 
     return MockConnection(dialect, executor)
index 4035901aed68bde0051371f095168379a26d3376..6d2a8a29fd8e1ac449f89b76e9c35cf9e0c6bd58 100644 (file)
@@ -230,7 +230,7 @@ class Inspector(inspection.Inspectable["Inspector"]):
         cls, init: Callable[..., Any], bind: Union[Engine, Connection]
     ) -> Inspector:
         if hasattr(bind.dialect, "inspector"):
-            cls = bind.dialect.inspector  # type: ignore[attr-defined]
+            cls = bind.dialect.inspector
 
         self = cls.__new__(cls)
         init(self, bind)
@@ -240,7 +240,7 @@ class Inspector(inspection.Inspectable["Inspector"]):
         if hasattr(bind, "exec_driver_sql"):
             self._init_connection(bind)  # type: ignore[arg-type]
         else:
-            self._init_engine(bind)  # type: ignore[arg-type]
+            self._init_engine(bind)
 
     def _init_engine(self, engine: Engine) -> None:
         self.bind = self.engine = engine
@@ -1627,9 +1627,7 @@ class Inspector(inspection.Inspectable["Inspector"]):
         orig_name = col_d["name"]
 
         table.metadata.dispatch.column_reflect(self, table, col_d)
-        table.dispatch.column_reflect(  # type: ignore[attr-defined]
-            self, table, col_d
-        )
+        table.dispatch.column_reflect(self, table, col_d)
 
         # fetch name again as column_reflect is allowed to
         # change it
@@ -2038,7 +2036,7 @@ class ReflectionDefaults:
 
     @classmethod
     def pk_constraint(cls) -> ReflectedPrimaryKeyConstraint:
-        return {  # type: ignore  # pep-655 not supported
+        return {
             "name": None,
             "constrained_columns": [],
         }
index 1c9cc504b1162cc5b7336f775fa270d7e125510f..132ae88b660ebdd596ef396cd679c6a2760f0796 100644 (file)
@@ -373,7 +373,7 @@ class SimpleResultMetaData(ResultMetaData):
         indexes: Sequence[int]
         new_keys: Sequence[str]
         extra: Sequence[Any]
-        indexes, new_keys, extra = zip(*metadata_for_keys)  # type: ignore
+        indexes, new_keys, extra = zip(*metadata_for_keys)
 
         if self._translated_indexes:
             indexes = [self._translated_indexes[idx] for idx in indexes]
@@ -459,7 +459,7 @@ class ResultInternal(InPlaceGenerative, Generic[_R]):
             else:
                 _proc = Row
 
-                def process_row(  # type: ignore
+                def process_row(
                     metadata: ResultMetaData,
                     processors: Optional[_ProcessorsType],
                     key_to_index: Mapping[_KeyType, int],
index b0a54f97e5201645b260d4ef09f3d4befe157f2b..9b147a7014b97ac9a04827585e9974aec535e673 100644 (file)
@@ -48,7 +48,7 @@ def connection_memoize(key: str) -> Callable[[_C], _C]:
             connection.info[key] = val = fn(self, connection)
             return val
 
-    return decorated  # type: ignore
+    return decorated
 
 
 class _TConsSubject(Protocol):
index 2c32b043e9ab9f84f235f1b5de95383e84b2e5f5..f92b2ede3cdc0a16b5eb20352d83d8534360bdac 100644 (file)
@@ -301,7 +301,7 @@ class _HasEventsDispatch(Generic[_ET]):
             "Type[_Dispatch[_ET]]",
             type(
                 "%sDispatch" % classname,
-                (dispatch_base,),  # type: ignore
+                (dispatch_base,),
                 {"__slots__": event_names},
             ),
         )
@@ -323,7 +323,7 @@ class _HasEventsDispatch(Generic[_ET]):
             assert dispatch_target_cls is not None
             if (
                 hasattr(dispatch_target_cls, "__slots__")
-                and "_slots_dispatch" in dispatch_target_cls.__slots__  # type: ignore  # noqa: E501
+                and "_slots_dispatch" in dispatch_target_cls.__slots__
             ):
                 dispatch_target_cls.dispatch = slots_dispatcher(cls)
             else:
index 8e4a26615436e1c3eb573926415a0e3fd5001884..fb2fed815f1f7f3ddc014eaa7a89c78c70913884 100644 (file)
@@ -241,7 +241,7 @@ class _EventKey(Generic[_ET]):
     ):
         self.target = target
         self.identifier = identifier
-        self.fn = fn  # type: ignore[assignment]
+        self.fn = fn
         if isinstance(fn, types.MethodType):
             self.fn_key = id(fn.__func__), id(fn.__self__)
         else:
index 38755c8fa3e2ec1dff1e4ef7145e5e864fca543a..31df13453487f46f5879aab8c69f4533d4baa0c4 100644 (file)
@@ -1047,7 +1047,7 @@ class AssociationProxyInstance(SQLORMOperations[_T]):
 
         target_assoc = self._unwrap_target_assoc_proxy
         if target_assoc is not None:
-            inner = target_assoc._criterion_exists(  # type: ignore
+            inner = target_assoc._criterion_exists(
                 criterion=criterion, **kwargs
             )
             return self._comparator._criterion_exists(inner)
@@ -1961,7 +1961,7 @@ class _AssociationSet(_AssociationSingleItem[_T], MutableSet[_T]):
         return set(self).symmetric_difference(__s)
 
     def __xor__(self, s: AbstractSet[_S]) -> MutableSet[Union[_T, _S]]:
-        return self.symmetric_difference(s)  # type: ignore
+        return self.symmetric_difference(s)
 
     def symmetric_difference_update(self, other: Iterable[Any]) -> None:
         want, have = self.symmetric_difference(other), set(self)
index 1fecf60abd7701bad4115ba9e8a2dc720d07224e..1c8707c780ad396958991ec3f0eeaa1f2c0c687d 100644 (file)
@@ -58,9 +58,7 @@ class ReversibleProxy(Generic[_PT]):
             )
             proxy_ref = weakref.ref(
                 self,
-                functools.partial(  # type: ignore
-                    ReversibleProxy._target_gced, target_ref
-                ),
+                functools.partial(ReversibleProxy._target_gced, target_ref),
             )
             ReversibleProxy._proxy_objects[target_ref] = proxy_ref
 
@@ -124,7 +122,7 @@ class StartableContext(Awaitable[_T_co], abc.ABC):
         return self.start().__await__()
 
     async def __aenter__(self) -> _T_co:
-        return await self.start(is_ctxmanager=True)  # type: ignore
+        return await self.start(is_ctxmanager=True)
 
     @abc.abstractmethod
     async def __aexit__(
index 3dcb1cfd088c7190d2147ba78d7ab362ac34a297..a13e106ff31a7738ed907a69833e98723c95f82f 100644 (file)
@@ -60,7 +60,7 @@ class AsyncCommon(FilterResult[_R]):
         .. versionadded:: 2.0.0b3
 
         """
-        return self._real_result.closed  # type: ignore
+        return self._real_result.closed
 
 
 class AsyncResult(_WithKeys, AsyncCommon[Row[_TP]]):
index 1ac6fafc11a461f1b72902dac7488e9aa6a3304f..722c9bd563ceed77421137564994225ed4fc4bfa 100644 (file)
@@ -1516,7 +1516,7 @@ class ExprComparator(Comparator[_T]):
     def operate(
         self, op: OperatorType, *other: Any, **kwargs: Any
     ) -> ColumnElement[Any]:
-        return op(self.expression, *other, **kwargs)  # type: ignore
+        return op(self.expression, *other, **kwargs)
 
     def reverse_operate(
         self, op: OperatorType, other: Any, **kwargs: Any
index 989f25592397c5408ffb95ee63a66ea4ba26f35c..ae55ca47b01df975043dab901ece79012440afdd 100644 (file)
@@ -34,24 +34,23 @@ from mypy.types import UnboundType
 
 from ... import util
 
-COLUMN: int = util.symbol("COLUMN")  # type: ignore
-RELATIONSHIP: int = util.symbol("RELATIONSHIP")  # type: ignore
-REGISTRY: int = util.symbol("REGISTRY")  # type: ignore
-COLUMN_PROPERTY: int = util.symbol("COLUMN_PROPERTY")  # type: ignore
-TYPEENGINE: int = util.symbol("TYPEENGNE")  # type: ignore
-MAPPED: int = util.symbol("MAPPED")  # type: ignore
-DECLARATIVE_BASE: int = util.symbol("DECLARATIVE_BASE")  # type: ignore
-DECLARATIVE_META: int = util.symbol("DECLARATIVE_META")  # type: ignore
-MAPPED_DECORATOR: int = util.symbol("MAPPED_DECORATOR")  # type: ignore
-COLUMN_PROPERTY: int = util.symbol("COLUMN_PROPERTY")  # type: ignore
-SYNONYM_PROPERTY: int = util.symbol("SYNONYM_PROPERTY")  # type: ignore
-COMPOSITE_PROPERTY: int = util.symbol("COMPOSITE_PROPERTY")  # type: ignore
-DECLARED_ATTR: int = util.symbol("DECLARED_ATTR")  # type: ignore
-MAPPER_PROPERTY: int = util.symbol("MAPPER_PROPERTY")  # type: ignore
-AS_DECLARATIVE: int = util.symbol("AS_DECLARATIVE")  # type: ignore
-AS_DECLARATIVE_BASE: int = util.symbol("AS_DECLARATIVE_BASE")  # type: ignore
-DECLARATIVE_MIXIN: int = util.symbol("DECLARATIVE_MIXIN")  # type: ignore
-QUERY_EXPRESSION: int = util.symbol("QUERY_EXPRESSION")  # type: ignore
+COLUMN: int = util.symbol("COLUMN")
+RELATIONSHIP: int = util.symbol("RELATIONSHIP")
+REGISTRY: int = util.symbol("REGISTRY")
+COLUMN_PROPERTY: int = util.symbol("COLUMN_PROPERTY")
+TYPEENGINE: int = util.symbol("TYPEENGNE")
+MAPPED: int = util.symbol("MAPPED")
+DECLARATIVE_BASE: int = util.symbol("DECLARATIVE_BASE")
+DECLARATIVE_META: int = util.symbol("DECLARATIVE_META")
+MAPPED_DECORATOR: int = util.symbol("MAPPED_DECORATOR")
+SYNONYM_PROPERTY: int = util.symbol("SYNONYM_PROPERTY")
+COMPOSITE_PROPERTY: int = util.symbol("COMPOSITE_PROPERTY")
+DECLARED_ATTR: int = util.symbol("DECLARED_ATTR")
+MAPPER_PROPERTY: int = util.symbol("MAPPER_PROPERTY")
+AS_DECLARATIVE: int = util.symbol("AS_DECLARATIVE")
+AS_DECLARATIVE_BASE: int = util.symbol("AS_DECLARATIVE_BASE")
+DECLARATIVE_MIXIN: int = util.symbol("DECLARATIVE_MIXIN")
+QUERY_EXPRESSION: int = util.symbol("QUERY_EXPRESSION")
 
 # names that must succeed with mypy.api.named_type
 NAMED_TYPE_BUILTINS_OBJECT = "builtins.object"
index fe4402123633aa2d84b8e0130d326c6eee231e73..238c82a54f2e48768b45faa8c7bcca215709527e 100644 (file)
@@ -173,7 +173,7 @@ def get_mapped_attributes(
 
 def format_type(typ_: Type, options: Options) -> str:
     if mypy_14:
-        return _mypy_format_type(typ_, options)  # type: ignore
+        return _mypy_format_type(typ_, options)
     else:
         return _mypy_format_type(typ_)  # type: ignore
 
index f1e2cf12c79ab3aa2db149a68617e800d21c6488..8de6d188ceeff6e9cec186837db534fb9df78015 100644 (file)
@@ -75,10 +75,10 @@ def _qual_logger_name_for_cls(cls: Type[Identified]) -> str:
 
 def class_logger(cls: Type[_IT]) -> Type[_IT]:
     logger = logging.getLogger(_qual_logger_name_for_cls(cls))
-    cls._should_log_debug = lambda self: logger.isEnabledFor(  # type: ignore[assignment]  # noqa: E501
+    cls._should_log_debug = lambda self: logger.isEnabledFor(  # type: ignore[method-assign]  # noqa: E501
         logging.DEBUG
     )
-    cls._should_log_info = lambda self: logger.isEnabledFor(  # type: ignore[assignment]  # noqa: E501
+    cls._should_log_info = lambda self: logger.isEnabledFor(  # type: ignore[method-assign]  # noqa: E501
         logging.INFO
     )
     cls.logger = logger
index cc4233be1113f469bfa2329056d1a652f66ca0a6..60664283dae03eac7167489f3ce9976dc4958dc9 100644 (file)
@@ -119,7 +119,7 @@ class _LoaderCallable(Protocol):
 def is_orm_option(
     opt: ExecutableOption,
 ) -> TypeGuard[ORMOption]:
-    return not opt._is_core  # type: ignore
+    return not opt._is_core
 
 
 def is_user_defined_option(
index b1bda2281945f4e0a63da3d833c87b842c2446a9..00d3f50792b108ea8766b1ca20c0695227c88c97 100644 (file)
@@ -335,7 +335,7 @@ class QueryableAttribute(
         entity_namespace = self._entity_namespace
         assert isinstance(entity_namespace, HasCacheKey)
 
-        if self.key is _UNKNOWN_ATTR_KEY:  # type: ignore[comparison-overlap]
+        if self.key is _UNKNOWN_ATTR_KEY:
             annotations = {"entity_namespace": entity_namespace}
         else:
             annotations = {
@@ -450,12 +450,12 @@ class QueryableAttribute(
     def operate(
         self, op: OperatorType, *other: Any, **kwargs: Any
     ) -> ColumnElement[Any]:
-        return op(self.comparator, *other, **kwargs)  # type: ignore[return-value,no-any-return]  # noqa: E501
+        return op(self.comparator, *other, **kwargs)  # type: ignore[no-any-return]  # noqa: E501
 
     def reverse_operate(
         self, op: OperatorType, other: Any, **kwargs: Any
     ) -> ColumnElement[Any]:
-        return op(other, self.comparator, **kwargs)  # type: ignore[return-value,no-any-return]  # noqa: E501
+        return op(other, self.comparator, **kwargs)  # type: ignore[no-any-return]  # noqa: E501
 
     def hasparent(
         self, state: InstanceState[Any], optimistic: bool = False
@@ -521,16 +521,16 @@ class InstrumentedAttribute(QueryableAttribute[_T]):
     # InstrumentedAttribute, while still keeping classlevel
     # __doc__ correct
 
-    @util.rw_hybridproperty  # type: ignore
-    def __doc__(self) -> Optional[str]:  # type: ignore
+    @util.rw_hybridproperty
+    def __doc__(self) -> Optional[str]:
         return self._doc
 
     @__doc__.setter  # type: ignore
-    def __doc__(self, value: Optional[str]) -> None:  # type: ignore
+    def __doc__(self, value: Optional[str]) -> None:
         self._doc = value
 
     @__doc__.classlevel  # type: ignore
-    def __doc__(cls) -> Optional[str]:  # type: ignore
+    def __doc__(cls) -> Optional[str]:
         return super().__doc__
 
     def __set__(self, instance: object, value: Any) -> None:
@@ -1941,7 +1941,7 @@ class CollectionAttributeImpl(HasCollectionAdapter, AttributeImpl):
                         and "None"
                         or iterable.__class__.__name__
                     )
-                    wanted = self._duck_typed_as.__name__  # type: ignore
+                    wanted = self._duck_typed_as.__name__
                     raise TypeError(
                         "Incompatible collection type: %s is not %s-like"
                         % (given, wanted)
index 1b378f3c8d7f0ba46bd2bc89244fa7eb627edcc1..80c85f13ad3ecbda56059b714401e16ae33179ad 100644 (file)
@@ -253,7 +253,7 @@ class _declared_attr_common:
         # which seems to help typing tools interpret the fn as a classmethod
         # for situations where needed
         if isinstance(fn, classmethod):
-            fn = fn.__func__  # type: ignore
+            fn = fn.__func__
 
         self.fget = fn
         self._cascading = cascading
@@ -281,11 +281,11 @@ class _declared_attr_common:
                     "Unmanaged access of declarative attribute %s from "
                     "non-mapped class %s" % (self.fget.__name__, cls.__name__)
                 )
-            return self.fget(cls)  # type: ignore
+            return self.fget(cls)
         elif manager.is_mapped:
             # the class is mapped, which means we're outside of the declarative
             # scan setup, just run the function.
-            return self.fget(cls)  # type: ignore
+            return self.fget(cls)
 
         # here, we are inside of the declarative scan.  use the registry
         # that is tracking the values of these attributes.
@@ -297,10 +297,10 @@ class _declared_attr_common:
         reg = declarative_scan.declared_attr_reg
 
         if self in reg:
-            return reg[self]  # type: ignore
+            return reg[self]
         else:
             reg[self] = obj = self.fget(cls)
-            return obj  # type: ignore
+            return obj
 
 
 class _declared_directive(_declared_attr_common, Generic[_T]):
@@ -558,12 +558,12 @@ def _setup_declarative_base(cls: Type[Any]) -> None:
         reg = registry(
             metadata=metadata, type_annotation_map=type_annotation_map
         )
-        cls.registry = reg  # type: ignore
+        cls.registry = reg
 
-    cls._sa_registry = reg  # type: ignore
+    cls._sa_registry = reg
 
     if "metadata" not in cls.__dict__:
-        cls.metadata = cls.registry.metadata  # type: ignore
+        cls.metadata = cls.registry.metadata
 
     if getattr(cls, "__init__", object.__init__) is object.__init__:
         cls.__init__ = cls.registry.constructor
@@ -609,7 +609,7 @@ class MappedAsDataclass(metaclass=DCTransformDeclarative):
         current_transforms: _DataclassArguments
 
         if hasattr(cls, "_sa_apply_dc_transforms"):
-            current = cls._sa_apply_dc_transforms  # type: ignore[attr-defined]
+            current = cls._sa_apply_dc_transforms
 
             _ClassScanMapperConfig._assert_dc_arguments(current)
 
@@ -1274,7 +1274,7 @@ class registry:
                 sql_type = sqltypes._type_map_get(pt)  # type: ignore  # noqa: E501
 
             if sql_type is not None:
-                sql_type_inst = sqltypes.to_instance(sql_type)  # type: ignore
+                sql_type_inst = sqltypes.to_instance(sql_type)
 
                 # ... this additional step will reject most
                 # type -> supertype matches, such as if we had
index 816c7a8fd4bb9b21917d9fadfcc17791ebc6e281..9d10599499a4c6024ef1387641966e5cccb0e612 100644 (file)
@@ -1609,7 +1609,7 @@ class _ClassScanMapperConfig(_MapperConfig):
                         setattr(cls, k, value)
                         continue
 
-            our_stuff[k] = value  # type: ignore
+            our_stuff[k] = value
 
     def _extract_declared_columns(self) -> None:
         our_stuff = self.properties
@@ -1979,7 +1979,7 @@ class _DeferredMapperConfig(_ClassScanMapperConfig):
 
     # mypy disallows plain property override of variable
     @property  # type: ignore
-    def cls(self) -> Type[Any]:  # type: ignore
+    def cls(self) -> Type[Any]:
         return self._cls()  # type: ignore
 
     @cls.setter
@@ -1999,7 +1999,7 @@ class _DeferredMapperConfig(_ClassScanMapperConfig):
     @classmethod
     def raise_unmapped_for_cls(cls, class_: Type[Any]) -> NoReturn:
         if hasattr(class_, "_sa_raise_deferred_config"):
-            class_._sa_raise_deferred_config()  # type: ignore
+            class_._sa_raise_deferred_config()
 
         raise orm_exc.UnmappedClassError(
             class_,
index e7c9de2a62a669c8f7f373a127b6acd4ee7767f4..c1fe9de85cab3cee69531498c0dd52f7a1544f07 100644 (file)
@@ -425,7 +425,7 @@ class CompositeProperty(
             elif hasattr(self.composite_class, "__composite_values__"):
                 _composite_getters[
                     self.composite_class
-                ] = lambda obj: obj.__composite_values__()  # type: ignore
+                ] = lambda obj: obj.__composite_values__()
 
     @util.preload_module("sqlalchemy.orm.properties")
     @util.preload_module("sqlalchemy.orm.decl_base")
@@ -628,7 +628,7 @@ class CompositeProperty(
 
         proxy_attr = self.parent.class_manager[self.key]
         proxy_attr.impl.dispatch = proxy_attr.dispatch  # type: ignore
-        proxy_attr.impl.dispatch._active_history = self.active_history  # type: ignore  # noqa: E501
+        proxy_attr.impl.dispatch._active_history = self.active_history
 
         # TODO: need a deserialize hook here
 
@@ -806,16 +806,16 @@ class CompositeProperty(
         def __ne__(self, other: Any) -> ColumnElement[bool]:  # type: ignore[override]  # noqa: E501
             return self._compare(operators.ne, other)
 
-        def __lt__(self, other: Any) -> ColumnElement[bool]:  # type: ignore[override]  # noqa: E501
+        def __lt__(self, other: Any) -> ColumnElement[bool]:
             return self._compare(operators.lt, other)
 
-        def __gt__(self, other: Any) -> ColumnElement[bool]:  # type: ignore[override]  # noqa: E501
+        def __gt__(self, other: Any) -> ColumnElement[bool]:
             return self._compare(operators.gt, other)
 
-        def __le__(self, other: Any) -> ColumnElement[bool]:  # type: ignore[override]  # noqa: E501
+        def __le__(self, other: Any) -> ColumnElement[bool]:
             return self._compare(operators.le, other)
 
-        def __ge__(self, other: Any) -> ColumnElement[bool]:  # type: ignore[override]  # noqa: E501
+        def __ge__(self, other: Any) -> ColumnElement[bool]:
             return self._compare(operators.ge, other)
 
         # what might be interesting would be if we create
@@ -839,8 +839,8 @@ class CompositeProperty(
             ]
             if self._adapt_to_entity:
                 assert self.adapter is not None
-                comparisons = [self.adapter(x) for x in comparisons]  # type: ignore  # noqa: E501
-            return sql.and_(*comparisons)  # type: ignore
+                comparisons = [self.adapter(x) for x in comparisons]
+            return sql.and_(*comparisons)
 
     def __str__(self) -> str:
         return str(self.parent.class_.__name__) + "." + self.key
index 1b755a27abeeee48a085ed3fb17573cd98bd9fbf..b12d80ac4f7d382397307d1e449301c05981ded1 100644 (file)
@@ -138,7 +138,7 @@ class ClassManager(
     def deferred_scalar_loader(self):
         return self.expired_attribute_loader
 
-    @deferred_scalar_loader.setter  # type: ignore[no-redef]
+    @deferred_scalar_loader.setter
     @util.deprecated(
         "1.4",
         message="The ClassManager.deferred_scalar_loader attribute is now "
@@ -204,7 +204,7 @@ class ClassManager(
         init_method: Optional[Callable[..., None]] = None,
     ) -> None:
         if mapper:
-            self.mapper = mapper  # type: ignore[assignment]
+            self.mapper = mapper  #
         if registry:
             registry._add_manager(self)
         if declarative_scan:
@@ -428,7 +428,7 @@ class ClassManager(
         for key in list(self.originals):
             self.uninstall_member(key)
 
-        self.mapper = None  # type: ignore
+        self.mapper = None
         self.dispatch = None  # type: ignore
         self.new_init = None
         self.info.clear()
@@ -506,11 +506,11 @@ class ClassManager(
         # so that mypy sees that __new__ is present.   currently
         # it's bound to Any as there were other problems not having
         # it that way but these can be revisited
-        instance = self.class_.__new__(self.class_)  # type: ignore
+        instance = self.class_.__new__(self.class_)
         if state is None:
             state = self._state_constructor(instance, self)
         self._state_setter(instance, state)
-        return instance  # type: ignore[no-any-return]
+        return instance
 
     def setup_instance(
         self, instance: _O, state: Optional[InstanceState[_O]] = None
index daba973cb3a4e4d18624a50bd0d4524f4a3c0ef9..a118b2aa85467cabc4fba0e5f79c5453fab2ed4f 100644 (file)
@@ -923,9 +923,7 @@ class PropComparator(SQLORMOperations[_T_co], Generic[_T_co], ColumnOperators):
 
         """
 
-        return self.operate(  # type: ignore
-            PropComparator.any_op, criterion, **kwargs
-        )
+        return self.operate(PropComparator.any_op, criterion, **kwargs)
 
     def has(
         self,
@@ -947,9 +945,7 @@ class PropComparator(SQLORMOperations[_T_co], Generic[_T_co], ColumnOperators):
 
         """
 
-        return self.operate(  # type: ignore
-            PropComparator.has_op, criterion, **kwargs
-        )
+        return self.operate(PropComparator.has_op, criterion, **kwargs)
 
 
 class StrategizedProperty(MapperProperty[_T]):
index 06671a5e12f8e16931cd59a7f474716885b34d8c..a83c70043a34f76c36e4785e47858f9a6bcacef2 100644 (file)
@@ -786,7 +786,7 @@ class Mapper(
 
         # interim - polymorphic_on is further refined in
         # _configure_polymorphic_setter
-        self.polymorphic_on = (  # type: ignore
+        self.polymorphic_on = (
             coercions.expect(  # type: ignore
                 roles.ColumnArgumentOrKeyRole,
                 polymorphic_on,
@@ -1400,7 +1400,7 @@ class Mapper(
             self.with_polymorphic = None
 
         if self.with_polymorphic and self.with_polymorphic[1] is not None:
-            self.with_polymorphic = (  # type: ignore
+            self.with_polymorphic = (
                 self.with_polymorphic[0],
                 coercions.expect(
                     roles.StrictFromClauseRole,
@@ -1504,7 +1504,7 @@ class Mapper(
         manager = instrumentation.register_class(
             self.class_,
             mapper=self,
-            expired_attribute_loader=util.partial(  # type: ignore
+            expired_attribute_loader=util.partial(
                 loading.load_scalar_attributes, self
             ),
             # finalize flag means instrument the __init__ method
@@ -1610,7 +1610,7 @@ class Mapper(
                 if isinstance(c, str)
                 else c
                 for c in (
-                    coercions.expect(  # type: ignore
+                    coercions.expect(
                         roles.DDLConstraintColumnRole,
                         coerce_pk,
                         argname="primary_key",
index 2cd8a1412c4d2e1b91b913970088b8e839b0e702..41ca328e1cf9d49e0ab90cbcc5e78146ec56bad0 100644 (file)
@@ -619,7 +619,7 @@ class PropRegistry(PathRegistry):
 
         self._wildcard_path_loader_key = (
             "loader",
-            parent.natural_path + self.prop._wildcard_token,  # type: ignore
+            parent.natural_path + self.prop._wildcard_token,
         )
         self._default_path_loader_key = self.prop._default_path_loader_key
         self._loader_key = ("loader", self.natural_path)
index 4df5175d07ec0cdcc2638aff45aadae358a015b2..606cebc40c7271f6518b0694a35f93ac5fac317c 100644 (file)
@@ -446,7 +446,7 @@ class ColumnProperty(
             try:
                 return ce.info  # type: ignore
             except AttributeError:
-                return self.prop.info  # type: ignore
+                return self.prop.info
 
         def _memoized_attr_expressions(self) -> Sequence[NamedColumn[Any]]:
             """The full sequence of columns referenced by this
@@ -475,13 +475,13 @@ class ColumnProperty(
         def operate(
             self, op: OperatorType, *other: Any, **kwargs: Any
         ) -> ColumnElement[Any]:
-            return op(self.__clause_element__(), *other, **kwargs)  # type: ignore[return-value,no-any-return]  # noqa: E501
+            return op(self.__clause_element__(), *other, **kwargs)  # type: ignore[no-any-return]  # noqa: E501
 
         def reverse_operate(
             self, op: OperatorType, other: Any, **kwargs: Any
         ) -> ColumnElement[Any]:
             col = self.__clause_element__()
-            return op(col._bind_param(op, other), col, **kwargs)  # type: ignore[return-value,no-any-return]  # noqa: E501
+            return op(col._bind_param(op, other), col, **kwargs)  # type: ignore[no-any-return]  # noqa: E501
 
     def __str__(self) -> str:
         if not self.parent or not self.key:
@@ -639,13 +639,13 @@ class MappedColumn(
     def operate(
         self, op: OperatorType, *other: Any, **kwargs: Any
     ) -> ColumnElement[Any]:
-        return op(self.__clause_element__(), *other, **kwargs)  # type: ignore[return-value,no-any-return]  # noqa: E501
+        return op(self.__clause_element__(), *other, **kwargs)  # type: ignore[no-any-return]  # noqa: E501
 
     def reverse_operate(
         self, op: OperatorType, other: Any, **kwargs: Any
     ) -> ColumnElement[Any]:
         col = self.__clause_element__()
-        return op(col._bind_param(op, other), col, **kwargs)  # type: ignore[return-value,no-any-return]  # noqa: E501
+        return op(col._bind_param(op, other), col, **kwargs)  # type: ignore[no-any-return]  # noqa: E501
 
     def found_in_pep593_annotated(self) -> Any:
         # return a blank mapped_column().  This mapped_column()'s
index 14e75fab94857ba0c5a51b7ae0118dba4f18411d..b1678bce18e513102391253b342efe2255c5a5bb 100644 (file)
@@ -274,7 +274,7 @@ class Query(
         self._set_entities(entities)
 
     def _set_propagate_attrs(self, values: Mapping[str, Any]) -> Self:
-        self._propagate_attrs = util.immutabledict(values)  # type: ignore
+        self._propagate_attrs = util.immutabledict(values)
         return self
 
     def _set_entities(
@@ -478,7 +478,7 @@ class Query(
         return self
 
     def _clone(self, **kw: Any) -> Self:
-        return self._generate()  # type: ignore
+        return self._generate()
 
     def _get_select_statement_only(self) -> Select[_T]:
         if self._statement is not None:
@@ -1450,7 +1450,7 @@ class Query(
         q._set_entities(columns)
         if not q.load_options._yield_per:
             q.load_options += {"_yield_per": 10}
-        return iter(q)  # type: ignore
+        return iter(q)
 
     @util.deprecated(
         "1.4",
index d3a8da042a4a953c2fec8ad4606e48798d4ff4e4..191ace48dd1ab41bb4bbdcb779d419e97b3c30e1 100644 (file)
@@ -1764,7 +1764,7 @@ class RelationshipProperty(
         argument = de_optionalize_union_types(argument)
 
         if hasattr(argument, "__origin__"):
-            arg_origin = argument.__origin__  # type: ignore
+            arg_origin = argument.__origin__
             if isinstance(arg_origin, type) and issubclass(
                 arg_origin, abc.Collection
             ):
@@ -1786,7 +1786,7 @@ class RelationshipProperty(
 
             if argument.__args__:  # type: ignore
                 if isinstance(arg_origin, type) and issubclass(
-                    arg_origin, typing.Mapping  # type: ignore
+                    arg_origin, typing.Mapping
                 ):
                     type_arg = argument.__args__[-1]  # type: ignore
                 else:
@@ -1804,7 +1804,7 @@ class RelationshipProperty(
                     f"Generic alias {argument} requires an argument"
                 )
         elif hasattr(argument, "__forward_arg__"):
-            argument = argument.__forward_arg__  # type: ignore
+            argument = argument.__forward_arg__
 
             argument = resolve_name_to_real_class_name(
                 argument, originating_module
@@ -1874,7 +1874,7 @@ class RelationshipProperty(
                     % (self.key, type(resolved_argument))
                 )
 
-        self.entity = entity  # type: ignore
+        self.entity = entity
         self.target = self.entity.persist_selectable
 
     def _setup_join_conditions(self) -> None:
index b1ae198592fe48c5fc6baff0266a310f1cb4a0bf..d9e1f854d77e8bba58ee4f962ebe9cda4c9aba63 100644 (file)
@@ -617,8 +617,8 @@ class InstanceState(interfaces.InspectionAttrInfo, Generic[_O]):
             self.class_ = state_dict["class_"]
 
         self.committed_state = state_dict.get("committed_state", {})
-        self._pending_mutations = state_dict.get("_pending_mutations", {})  # type: ignore  # noqa E501
-        self.parents = state_dict.get("parents", {})  # type: ignore
+        self._pending_mutations = state_dict.get("_pending_mutations", {})
+        self.parents = state_dict.get("parents", {})
         self.modified = state_dict.get("modified", False)
         self.expired = state_dict.get("expired", False)
         if "info" in state_dict:
index 4371e6116f8fa9df0541fbed0e4679aaddc07551..005155dd0a5114c2eb3dbfd8498865a67f90f303 100644 (file)
@@ -1204,7 +1204,7 @@ class AliasedInsp(
         # IMO mypy should see this one also as returning the same type
         # we put into it, but it's not
         return (
-            self._adapter.traverse(expr)  # type: ignore
+            self._adapter.traverse(expr)
             ._annotate(d)
             ._set_propagate_attrs(
                 {"compile_state_plugin": "orm", "plugin_subject": self}
@@ -1397,7 +1397,7 @@ class LoaderCriteriaOption(CriteriaOption):
 
             self.deferred_where_criteria = True
             self.where_criteria = lambdas.DeferredLambdaElement(
-                where_criteria,  # type: ignore
+                where_criteria,
                 roles.WhereHavingRole,
                 lambda_args=(_WrapUserEntity(wrap_entity),),
                 opts=lambdas.LambdaOptions(
@@ -2169,9 +2169,9 @@ def _getitem(iterable_query: Query[Any], item: Any) -> Any:
 
         res = iterable_query.slice(start, stop)
         if step is not None:
-            return list(res)[None : None : item.step]  # type: ignore
+            return list(res)[None : None : item.step]
         else:
-            return list(res)  # type: ignore
+            return list(res)
     else:
         if item == -1:
             _no_negative_indexes()
@@ -2380,9 +2380,9 @@ def _extract_mapped_subtype(
             else:
                 return annotated, None
 
-        if len(annotated.__args__) != 1:  # type: ignore
+        if len(annotated.__args__) != 1:
             raise sa_exc.ArgumentError(
                 "Expected sub-type for Mapped[] annotation"
             )
 
-        return annotated.__args__[0], annotated.__origin__  # type: ignore
+        return annotated.__args__[0], annotated.__origin__
index 7f542ae013aa8a973ed256c1f5f1abf8f199a95b..915dc400b97931ffbd44f0d1b6a5f6e67fa9a22e 100644 (file)
@@ -323,13 +323,13 @@ class Pool(log.Identified, event.EventTarget):
 
         # mypy seems to get super confused assigning functions to
         # attributes
-        self._invoke_creator = self._should_wrap_creator(creator)  # type: ignore  # noqa: E501
+        self._invoke_creator = self._should_wrap_creator(creator)
 
     @_creator.deleter
     def _creator(self) -> None:
         # needed for mock testing
         del self._creator_arg
-        del self._invoke_creator  # type: ignore[misc]
+        del self._invoke_creator
 
     def _should_wrap_creator(
         self, creator: Union[_CreatorFnType, _CreatorWRecFnType]
@@ -835,7 +835,7 @@ class _ConnectionRecord(ConnectionPoolEntry):
         # time and invalidation for the logic below to work reliably.
 
         if self.dbapi_connection is None:
-            self.info.clear()  # type: ignore  # our info is always present
+            self.info.clear()
             self.__connect()
         elif (
             self.__pool._recycle > -1
@@ -863,7 +863,7 @@ class _ConnectionRecord(ConnectionPoolEntry):
 
         if recycle:
             self.__close(terminate=True)
-            self.info.clear()  # type: ignore  # our info is always present
+            self.info.clear()
 
             self.__connect()
 
index 8366b7bd2a47635dfdb4a5b07c0f89da687a1cd3..762418b14f20b63e4f498f02694a6639dc3a5797 100644 (file)
@@ -82,7 +82,7 @@ class PoolEvents(event.Events[Pool]):
             return None
 
     @classmethod
-    def _listen(  # type: ignore[override]   # would rather keep **kw
+    def _listen(
         cls,
         event_key: event._EventKey[Pool],
         **kw: Any,
index 84849edc29806ab20d079306d19edee2115b35db..af4f788e27d114f925a4511207117fb23e7d2f24 100644 (file)
@@ -386,7 +386,7 @@ class SingletonThreadPool(Pool):
 
     def _do_return_conn(self, record: ConnectionPoolEntry) -> None:
         try:
-            del self._fairy.current  # type: ignore
+            del self._fairy.current
         except AttributeError:
             pass
 
index a08a770945df2de522f8baddcff8dc7b15121e88..c9e183058e6ab23973cedaf47ca28d6a7b5cf69d 100644 (file)
@@ -411,7 +411,7 @@ def Nullable(
 
     .. versionadded:: 2.0.20
     """
-    return val  # type: ignore
+    return val
 
 
 @overload
index 4ccde591a9ac92b979b97578c19818e29daf0ec0..08ff47d3d64e48944e95c40edb0c18e949413d93 100644 (file)
@@ -300,7 +300,7 @@ class Annotated(SupportsAnnotations):
 
     def _annotate(self, values: _AnnotationDict) -> Self:
         _values = self._annotations.union(values)
-        new = self._with_annotations(_values)  # type: ignore
+        new = self._with_annotations(_values)
         return new
 
     def _with_annotations(self, values: _AnnotationDict) -> Self:
index 913ab4300d0d04b128658badf38657f9d893040d..a1d16f44768d04f7a095846920666369c461d455 100644 (file)
@@ -273,7 +273,7 @@ def _generative(fn: _Fn) -> _Fn:
 
     """
 
-    @util.decorator  # type: ignore
+    @util.decorator
     def _generative(
         fn: _Fn, self: _SelfGenerativeType, *args: Any, **kw: Any
     ) -> _SelfGenerativeType:
@@ -299,7 +299,7 @@ def _exclusive_against(*names: str, **kw: Any) -> Callable[[_Fn], _Fn]:
         for name in names
     ]
 
-    @util.decorator  # type: ignore
+    @util.decorator
     def check(fn, *args, **kw):
         # make pylance happy by not including "self" in the argument
         # list
@@ -315,7 +315,7 @@ def _exclusive_against(*names: str, **kw: Any) -> Callable[[_Fn], _Fn]:
                 raise exc.InvalidRequestError(msg)
         return fn(self, *args, **kw)
 
-    return check  # type: ignore
+    return check
 
 
 def _clone(element, **kw):
index 314cbe2167d31b35e47685f4a39006c0a45adc06..171dd1f1bd00d265334bec4bbb07d07e8dc5f787 100644 (file)
@@ -4089,7 +4089,7 @@ class SQLCompiler(Compiled):
                 from_linter.froms[cte._de_clone()] = cte_name
 
             if not is_new_cte and embedded_in_current_named_cte:
-                return self.preparer.format_alias(cte, cte_name)  # type: ignore[no-any-return]  # noqa: E501
+                return self.preparer.format_alias(cte, cte_name)
 
             if cte_pre_alias_name:
                 text = self.preparer.format_alias(cte, cte_pre_alias_name)
index 544f6771a28835215da02d4a1142422037acfaa0..298c50ec0f3856a8504215228ea64558404f4d9b 100644 (file)
@@ -491,10 +491,10 @@ def _key_getters_for_crud_column(
             key: Union[ColumnClause[Any], str]
         ) -> Union[str, Tuple[str, str]]:
             str_key = c_key_role(key)
-            if hasattr(key, "table") and key.table in _et:  # type: ignore
+            if hasattr(key, "table") and key.table in _et:
                 return (key.table.name, str_key)  # type: ignore
             else:
-                return str_key  # type: ignore
+                return str_key
 
         def _getattr_col_key(
             col: ColumnClause[Any],
@@ -513,7 +513,7 @@ def _key_getters_for_crud_column(
                 return col.key
 
     else:
-        _column_as_key = functools.partial(  # type: ignore
+        _column_as_key = functools.partial(
             coercions.expect_as_key, roles.DMLColumnRole
         )
         _getattr_col_key = _col_bind_name = operator.attrgetter("key")  # type: ignore  # noqa: E501
index 09cc54d5466ccea832ca393efff36ca85a9f76f7..51d2cdcf97070d2c3c5c348d9a270de7a49df14d 100644 (file)
@@ -470,7 +470,7 @@ class CreateSchema(_CreateBase):
 
     __visit_name__ = "create_schema"
 
-    stringify_dialect = "default"  # type: ignore
+    stringify_dialect = "default"
 
     def __init__(
         self,
@@ -491,7 +491,7 @@ class DropSchema(_DropBase):
 
     __visit_name__ = "drop_schema"
 
-    stringify_dialect = "default"  # type: ignore
+    stringify_dialect = "default"
 
     def __init__(
         self,
index 3917b5f02397d90403c49091b0fa10c9ec58e95c..7f7329f419d609d8f4cd01854cf35cb410fcfc27 100644 (file)
@@ -1606,12 +1606,12 @@ class ColumnElement(
         *other: Any,
         **kwargs: Any,
     ) -> ColumnElement[Any]:
-        return op(self.comparator, *other, **kwargs)  # type: ignore[return-value,no-any-return]  # noqa: E501
+        return op(self.comparator, *other, **kwargs)  # type: ignore[no-any-return]  # noqa: E501
 
     def reverse_operate(
         self, op: operators.OperatorType, other: Any, **kwargs: Any
     ) -> ColumnElement[Any]:
-        return op(other, self.comparator, **kwargs)  # type: ignore[return-value,no-any-return]  # noqa: E501
+        return op(other, self.comparator, **kwargs)  # type: ignore[no-any-return]  # noqa: E501
 
     def _bind_param(
         self,
@@ -3125,7 +3125,7 @@ class BooleanClauseList(ExpressionClauseList[bool]):
                 }, *args)'.""",
                 version="1.4",
             )
-            return cls._construct_raw(operator)  # type: ignore[no-any-return]
+            return cls._construct_raw(operator)
 
         lcc, convert_clauses = cls._process_clauses_for_boolean(
             operator,
@@ -3155,7 +3155,7 @@ class BooleanClauseList(ExpressionClauseList[bool]):
             assert lcc
             # just one element.  return it as a single boolean element,
             # not a list and discard the operator.
-            return convert_clauses[0]  # type: ignore[no-any-return] # noqa: E501
+            return convert_clauses[0]
 
     @classmethod
     def _construct_for_whereclause(
index 455649cb96571a10a9c5fb113a83e1a8a6365a80..7aef605ac7219725bb1b5e20149f437a4ed93025 100644 (file)
@@ -718,7 +718,7 @@ class LinkedLambdaElement(StatementLambdaElement):
         opts: Union[Type[LambdaOptions], LambdaOptions],
     ):
         self.opts = opts
-        self.fn = fn  # type: ignore[assignment]
+        self.fn = fn
         self.parent_lambda = parent_lambda
 
         self.tracker_key = parent_lambda.tracker_key + (fn.__code__,)
index 6ec150424b8d6de4f89027bb5a26fa00f61a9f5f..cd878a5957cbf4eba5d5b83c9df78f297960e13a 100644 (file)
@@ -307,7 +307,7 @@ class Operators:
         )
 
         def against(other: Any) -> Operators:
-            return operator(self, other)  # type: ignore
+            return operator(self, other)
 
         return against
 
@@ -570,7 +570,7 @@ class ColumnOperators(Operators):
         return self.operate(le, other)
 
     # TODO: not sure why we have this
-    __hash__ = Operators.__hash__  # type: ignore
+    __hash__ = Operators.__hash__
 
     def __eq__(self, other: Any) -> ColumnOperators:  # type: ignore[override]
         """Implement the ``==`` operator.
index 8baf2de6a4260b161bed2572fd88ad5a1ff903ba..de6507145d72bcc04167f2544aded56b8c13a758 100644 (file)
@@ -1432,7 +1432,7 @@ class Table(
         elif schema is None:
             actual_schema = metadata.schema
         else:
-            actual_schema = schema  # type: ignore
+            actual_schema = schema
         key = _get_table_key(name, actual_schema)
         if key in metadata.tables:
             util.warn(
@@ -2451,14 +2451,8 @@ class Column(DialectKWArgs, SchemaItem, ColumnClause[_T]):
 
         # Constraint objects plus non-constraint-bound ForeignKey objects
         args: List[SchemaItem] = [
-            c._copy(**kw)
-            for c in self.constraints
-            if not c._type_bound  # type: ignore
-        ] + [
-            c._copy(**kw)  # type: ignore
-            for c in self.foreign_keys
-            if not c.constraint
-        ]
+            c._copy(**kw) for c in self.constraints if not c._type_bound
+        ] + [c._copy(**kw) for c in self.foreign_keys if not c.constraint]
 
         # ticket #5276
         column_kwargs = {}
@@ -3972,7 +3966,7 @@ class FetchedValue(SchemaEventTarget):
         if for_update == self.for_update:
             return self
         else:
-            return self._clone(for_update)  # type: ignore
+            return self._clone(for_update)
 
     def _copy(self) -> FetchedValue:
         return FetchedValue(self.for_update)
@@ -4150,7 +4144,7 @@ class Constraint(DialectKWArgs, HasConditionalDDL, SchemaItem):
         "and will be removed in a future release.",
     )
     def copy(self, **kw: Any) -> Self:
-        return self._copy(**kw)  # type: ignore
+        return self._copy(**kw)
 
     def _copy(self, **kw: Any) -> Self:
         raise NotImplementedError()
@@ -5309,7 +5303,7 @@ _NamingSchemaParameter = Union[
 
 
 DEFAULT_NAMING_CONVENTION: _NamingSchemaParameter = util.immutabledict(
-    {"ix": "ix_%(column_0_label)s"}  # type: ignore[arg-type]
+    {"ix": "ix_%(column_0_label)s"}
 )
 
 
index 71fca7e1f2b095a002bc0b70f08907503dcd2670..c1a47b0cecbd534d5a71d9b695656d6211b2fb4d 100644 (file)
@@ -323,9 +323,7 @@ class Selectable(ReturnsRows):
         object, returning a copy of this :class:`_expression.FromClause`.
 
         """
-        return util.preloaded.sql_util.ClauseAdapter(alias).traverse(  # type: ignore  # noqa: E501
-            self
-        )
+        return util.preloaded.sql_util.ClauseAdapter(alias).traverse(self)
 
     def corresponding_column(
         self, column: KeyedColumnElement[Any], require_embedded: bool = False
@@ -1420,7 +1418,7 @@ class Join(roles.DMLTableRole, FromClause):
                 continue
             for fk in sorted(
                 b.foreign_keys,
-                key=lambda fk: fk.parent._creation_order,  # type: ignore
+                key=lambda fk: fk.parent._creation_order,
             ):
                 if (
                     consider_as_foreign_keys is not None
@@ -1441,7 +1439,7 @@ class Join(roles.DMLTableRole, FromClause):
             if left is not b:
                 for fk in sorted(
                     left.foreign_keys,
-                    key=lambda fk: fk.parent._creation_order,  # type: ignore
+                    key=lambda fk: fk.parent._creation_order,
                 ):
                     if (
                         consider_as_foreign_keys is not None
@@ -4752,7 +4750,7 @@ class SelectState(util.MemoizedSlots, CompileState):
         Dict[str, ColumnElement[Any]],
     ]:
         with_cols: Dict[str, ColumnElement[Any]] = {
-            c._tq_label or c.key: c  # type: ignore
+            c._tq_label or c.key: c
             for c in self.statement._all_selected_columns
             if c._allow_label_resolve
         }
@@ -5020,7 +5018,7 @@ class _MemoizedSelectEntities(
         c.__dict__ = {k: v for k, v in self.__dict__.items()}
 
         c._is_clone_of = self.__dict__.get("_is_clone_of", self)
-        return c  # type: ignore
+        return c
 
     @classmethod
     def _generate_for_statement(cls, select_stmt: Select[Any]) -> None:
index 5c782f1db647e534ed735ada689e73201596f738..5758dff3c430ab1148665ad43502c7a448104ad9 100644 (file)
@@ -56,15 +56,15 @@ def _preconfigure_traversals(target_hierarchy: Type[Any]) -> None:
         if hasattr(cls, "_generate_cache_attrs") and hasattr(
             cls, "_traverse_internals"
         ):
-            cls._generate_cache_attrs()  # type: ignore
+            cls._generate_cache_attrs()
             _copy_internals.generate_dispatch(
-                cls,  # type: ignore
-                cls._traverse_internals,  # type: ignore
+                cls,
+                cls._traverse_internals,
                 "_generated_copy_internals_traversal",
             )
             _get_children.generate_dispatch(
-                cls,  # type: ignore
-                cls._traverse_internals,  # type: ignore
+                cls,
+                cls._traverse_internals,
                 "_generated_get_children_traversal",
             )
 
index 2be397288ec437886a8c7c54deacda3e379f8cc4..f839cf57d82801eb585af3aef4efeb3b667e67f5 100644 (file)
@@ -191,7 +191,7 @@ class TypeEngine(Visitable, Generic[_T]):
             op_fn, addtl_kw = default_comparator.operator_lookup[op.__name__]
             if kwargs:
                 addtl_kw = addtl_kw.union(kwargs)
-            return op_fn(self.expr, op, *other, **addtl_kw)  # type: ignore
+            return op_fn(self.expr, op, *other, **addtl_kw)
 
         @util.preload_module("sqlalchemy.sql.default_comparator")
         def reverse_operate(
@@ -201,7 +201,7 @@ class TypeEngine(Visitable, Generic[_T]):
             op_fn, addtl_kw = default_comparator.operator_lookup[op.__name__]
             if kwargs:
                 addtl_kw = addtl_kw.union(kwargs)
-            return op_fn(self.expr, op, other, reverse=True, **addtl_kw)  # type: ignore  # noqa: E501
+            return op_fn(self.expr, op, other, reverse=True, **addtl_kw)
 
         def _adapt_expression(
             self,
@@ -816,7 +816,7 @@ class TypeEngine(Visitable, Generic[_T]):
         best_uppercase = None
 
         if not isinstance(self, TypeEngine):
-            return self.__class__  # type: ignore  # mypy bug?
+            return self.__class__
 
         for t in self.__class__.__mro__:
             if (
@@ -2323,7 +2323,7 @@ def to_instance(
         return NULLTYPE
 
     if callable(typeobj):
-        return typeobj(*arg, **kw)  # type: ignore  # for pyright
+        return typeobj(*arg, **kw)
     else:
         return typeobj
 
index 0a50197a0d4a4493ee5dbed5b2d7f239dd878644..28480a5d437f0a89cb8c3c524ba03d3721760f7e 100644 (file)
@@ -1440,7 +1440,7 @@ def _offset_or_limit_clause_asint_if_possible(
     if clause is None:
         return None
     if hasattr(clause, "_limit_offset_value"):
-        value = clause._limit_offset_value  # type: ignore
+        value = clause._limit_offset_value
         return util.asint(value)
     else:
         return clause
@@ -1489,13 +1489,11 @@ def _make_slice(
             offset_clause = 0
 
         if start != 0:
-            offset_clause = offset_clause + start  # type: ignore
+            offset_clause = offset_clause + start
 
         if offset_clause == 0:
             offset_clause = None
         else:
-            offset_clause = _offset_or_limit_clause(
-                offset_clause  # type: ignore
-            )
+            offset_clause = _offset_or_limit_clause(offset_clause)
 
-    return limit_clause, offset_clause  # type: ignore
+    return limit_clause, offset_clause
index 2e793e862b79e515021bc31ca57b46ebcc4fffa3..c3f3c2b9f87a3054b179cb4775d580445fcd99b8 100644 (file)
@@ -189,7 +189,7 @@ class Properties(Generic[_T]):
         return dir(super()) + [str(k) for k in self._data.keys()]
 
     def __add__(self, other: Properties[_F]) -> List[Union[_T, _F]]:
-        return list(self) + list(other)  # type: ignore
+        return list(self) + list(other)
 
     def __setitem__(self, key: str, obj: _T) -> None:
         self._data[key] = obj
@@ -393,16 +393,16 @@ class UniqueAppender(Generic[_T]):
         self.data = data
         self._unique = {}
         if via:
-            self._data_appender = getattr(data, via)  # type: ignore[assignment]  # noqa: E501
+            self._data_appender = getattr(data, via)
         elif hasattr(data, "append"):
-            self._data_appender = cast("List[_T]", data).append  # type: ignore[assignment]  # noqa: E501
+            self._data_appender = cast("List[_T]", data).append
         elif hasattr(data, "add"):
-            self._data_appender = cast("Set[_T]", data).add  # type: ignore[assignment]  # noqa: E501
+            self._data_appender = cast("Set[_T]", data).add
 
     def append(self, item: _T) -> None:
         id_ = id(item)
         if id_ not in self._unique:
-            self._data_appender(item)  # type: ignore[call-arg]
+            self._data_appender(item)
             self._unique[id_] = True
 
     def __iter__(self) -> Iterator[_T]:
@@ -677,7 +677,7 @@ class ThreadLocalRegistry(ScopedRegistry[_T]):
             return self.registry.value  # type: ignore[no-any-return]
         except AttributeError:
             val = self.registry.value = self.createfunc()
-            return val  # type: ignore[no-any-return]
+            return val
 
     def has(self) -> bool:
         return hasattr(self.registry, "value")
index 2b6ae8750a0912212fd08accfec40611593d8e56..71d10a68579902689dc3ff75c0976c36d02bd99e 100644 (file)
@@ -69,7 +69,7 @@ def is_exit_exception(e: BaseException) -> bool:
 # Issue for context: https://github.com/python-greenlet/greenlet/issues/173
 
 
-class _AsyncIoGreenlet(greenlet):  # type: ignore
+class _AsyncIoGreenlet(greenlet):
     dead: bool
 
     def __init__(self, fn: Callable[..., Any], driver: greenlet):
@@ -147,7 +147,7 @@ def await_fallback(awaitable: Awaitable[_T]) -> _T:
                 "loop is already running; can't call await_fallback() here. "
                 "Was IO attempted in an unexpected place?"
             )
-        return loop.run_until_complete(awaitable)  # type: ignore[no-any-return]  # noqa: E501
+        return loop.run_until_complete(awaitable)
 
     return current.driver.switch(awaitable)  # type: ignore[no-any-return]
 
index 9962493b5cb375083ee0c6f0e0894cd63105b391..4f52d3bce67615edddd428fa35b26460ef7bc4da 100644 (file)
@@ -227,11 +227,11 @@ class OrderedSet(Set[_T]):
                     super().add(e)
 
     def __ior__(self, other: AbstractSet[_S]) -> OrderedSet[Union[_T, _S]]:
-        self.update(other)  # type: ignore
-        return self  # type: ignore
+        self.update(other)
+        return self
 
     def union(self, *other: Iterable[_S]) -> OrderedSet[Union[_T, _S]]:
-        result: OrderedSet[Union[_T, _S]] = self.copy()  # type: ignore
+        result: OrderedSet[Union[_T, _S]] = self.copy()
         result.update(*other)
         return result
 
index dd5851cb3cec4e858bae9ee8462f911a10d7394e..26d9924898be6a84a5752c545f534c309a545b21 100644 (file)
@@ -246,7 +246,7 @@ def deprecated_params(**specs: Tuple[str, str]) -> Callable[[_F], _F]:
 
         # latest mypy has opinions here, not sure if they implemented
         # Concatenate or something
-        @decorator  # type: ignore
+        @decorator
         def warned(fn: _F, *args: Any, **kwargs: Any) -> _F:
             for m in check_defaults:
                 if (defaults[m] is None and kwargs[m] is not None) or (
@@ -290,9 +290,9 @@ def deprecated_params(**specs: Tuple[str, str]) -> Callable[[_F], _F]:
                     for param, (version, message) in specs.items()
                 },
             )
-        decorated = warned(fn)  # type: ignore
+        decorated = warned(fn)
         decorated.__doc__ = doc
-        return decorated  # type: ignore[no-any-return]
+        return decorated
 
     return decorate
 
@@ -334,7 +334,7 @@ def _decorate_cls_with_warning(
             clsdict["__doc__"] = doc
             clsdict.pop("__dict__", None)
             clsdict.pop("__weakref__", None)
-            cls = type(cls.__name__, cls.__bases__, clsdict)  # type: ignore
+            cls = type(cls.__name__, cls.__bases__, clsdict)
             if constructor is not None:
                 constructor_fn = clsdict[constructor]
 
@@ -376,7 +376,7 @@ def _decorate_with_warning(
     else:
         doc_only = ""
 
-    @decorator  # type: ignore
+    @decorator
     def warned(fn: _F, *args: Any, **kwargs: Any) -> _F:
         skip_warning = not enable_warnings or kwargs.pop(
             "_sa_skip_warning", False
@@ -393,9 +393,9 @@ def _decorate_with_warning(
 
         doc = inject_docstring_text(doc, docstring_header, 1)
 
-    decorated = warned(func)  # type: ignore
+    decorated = warned(func)
     decorated.__doc__ = doc
     decorated._sa_warn = lambda: _warn_with_version(  # type: ignore
         message, version, wtype, stacklevel=3
     )
-    return decorated  # type: ignore[no-any-return]
+    return decorated
index 6c9afb5df62f11e08e4fea031d5cef4a705c7195..33d617e80c077c8c0019dab03d67971eb714bf37 100644 (file)
@@ -526,12 +526,10 @@ def get_callable_argspec(
             fn.__init__, no_self=no_self, _is_init=True
         )
     elif hasattr(fn, "__func__"):
-        return compat.inspect_getfullargspec(fn.__func__)  # type: ignore[attr-defined] # noqa: E501
+        return compat.inspect_getfullargspec(fn.__func__)
     elif hasattr(fn, "__call__"):
-        if inspect.ismethod(fn.__call__):  # type: ignore [operator]
-            return get_callable_argspec(
-                fn.__call__, no_self=no_self  # type: ignore [operator]
-            )
+        if inspect.ismethod(fn.__call__):
+            return get_callable_argspec(fn.__call__, no_self=no_self)
         else:
             raise TypeError("Can't inspect callable: %s" % fn)
     else:
@@ -1082,7 +1080,7 @@ class generic_fn_descriptor(Generic[_T_co]):
     __name__: str
 
     def __init__(self, fget: Callable[..., _T_co], doc: Optional[str] = None):
-        self.fget = fget  # type: ignore[assignment]
+        self.fget = fget
         self.__doc__ = doc or fget.__doc__
         self.__name__ = fget.__name__
 
@@ -1237,12 +1235,11 @@ class HasMemoized:
         __name__: str
 
         def __init__(self, fget: Callable[..., _T], doc: Optional[str] = None):
-            # https://github.com/python/mypy/issues/708
-            self.fget = fget  # type: ignore
+            self.fget = fget
             self.__doc__ = doc or fget.__doc__
             self.__name__ = fget.__name__
 
-        @overload  # type: ignore[override]
+        @overload
         def __get__(self: _MA, obj: None, cls: Any) -> _MA:
             ...
 
@@ -1476,7 +1473,7 @@ def assert_arg_type(
         if isinstance(argtype, tuple):
             raise exc.ArgumentError(
                 "Argument '%s' is expected to be one of type %s, got '%s'"
-                % (name, " or ".join("'%s'" % a for a in argtype), type(arg))  # type: ignore  # noqa: E501
+                % (name, " or ".join("'%s'" % a for a in argtype), type(arg))
             )
         else:
             raise exc.ArgumentError(
@@ -1527,7 +1524,7 @@ class classproperty(property):
         self.__doc__ = fget.__doc__
 
     def __get__(self, obj: Any, cls: Optional[type] = None) -> Any:
-        return self.fget(cls)  # type: ignore
+        return self.fget(cls)
 
 
 class hybridproperty(Generic[_T]):
index 6e2a97d20bb3d99c368e5eca8b4a28223de7bbda..3cdf49301f79b6b6b3e3678d34fbd0cabdec3afd 100644 (file)
@@ -66,14 +66,13 @@ mypy_path = "./lib/"
 show_error_codes = true
 incremental = true
 
-
 [[tool.mypy.overrides]]
 
 module = [
     "sqlalchemy.*"
 ]
 
-warn_unused_ignores = false
+warn_unused_ignores = true
 strict = true
 
 
index e68b4b44a736188fd35280d0b9bfbbc431cae573..f6289d19346a9daf2aef11e49f9e1a4b07f57fee 100644 (file)
@@ -2,10 +2,10 @@ from datetime import datetime
 
 from sqlalchemy import create_engine
 from sqlalchemy.orm import Mapped
+from sqlalchemy.orm import mapped_column
 from sqlalchemy.orm import registry
 from sqlalchemy.orm import Session
 from sqlalchemy.sql.functions import now
-from sqlalchemy.testing.schema import mapped_column
 
 mapper_registry: registry = registry()
 e = create_engine("sqlite:///database.db", echo=True)