From: Daniele Varrazzo Date: Thu, 30 May 2024 01:23:56 +0000 (+0200) Subject: refactor: drop use of typing.Type X-Git-Tag: 3.2.0~19^2~5 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=2a63d6094c94b0fa1d47fc33db3699299f443042;p=thirdparty%2Fpsycopg.git refactor: drop use of typing.Type --- diff --git a/psycopg/psycopg/_adapters_map.py b/psycopg/psycopg/_adapters_map.py index e4e72242f..55a3cf1e4 100644 --- a/psycopg/psycopg/_adapters_map.py +++ b/psycopg/psycopg/_adapters_map.py @@ -6,7 +6,7 @@ Mapping from types/oids to Dumpers/Loaders from __future__ import annotations -from typing import Any, Type, cast, TYPE_CHECKING +from typing import Any, cast, TYPE_CHECKING from . import pq from . import errors as e @@ -61,9 +61,9 @@ class AdaptersMap: types: TypesRegistry - _dumpers: dict[PyFormat, dict[type | str, Type[Dumper]]] - _dumpers_by_oid: list[dict[int, Type[Dumper]]] - _loaders: list[dict[int, Type[Loader]]] + _dumpers: dict[PyFormat, dict[type | str, type[Dumper]]] + _dumpers_by_oid: list[dict[int, type[Dumper]]] + _loaders: list[dict[int, type[Loader]]] # Record if a dumper or loader has an optimised version. _optimised: dict[type, type] = {} @@ -109,7 +109,7 @@ class AdaptersMap: def connection(self) -> "BaseConnection[Any]" | None: return None - def register_dumper(self, cls: type | str | None, dumper: Type[Dumper]) -> None: + def register_dumper(self, cls: type | str | None, dumper: type[Dumper]) -> None: """ Configure the context to use `!dumper` to convert objects of type `!cls`. @@ -160,7 +160,7 @@ class AdaptersMap: self._dumpers_by_oid[dumper.format][dumper.oid] = dumper - def register_loader(self, oid: int | str, loader: Type["Loader"]) -> None: + def register_loader(self, oid: int | str, loader: type[Loader]) -> None: """ Configure the context to use `!loader` to convert data of oid `!oid`. @@ -186,7 +186,7 @@ class AdaptersMap: self._loaders[fmt][oid] = loader - def get_dumper(self, cls: type, format: PyFormat) -> Type["Dumper"]: + def get_dumper(self, cls: type, format: PyFormat) -> type[Dumper]: """ Return the dumper class for the given type and format. @@ -225,7 +225,7 @@ class AdaptersMap: f" (format: {format.name})" ) - def get_dumper_by_oid(self, oid: int, format: pq.Format) -> Type["Dumper"]: + def get_dumper_by_oid(self, oid: int, format: pq.Format) -> type[Dumper]: """ Return the dumper class for the given oid and format. @@ -255,7 +255,7 @@ class AdaptersMap: ) raise e.ProgrammingError(msg) - def get_loader(self, oid: int, format: pq.Format) -> Type["Loader"] | None: + def get_loader(self, oid: int, format: pq.Format) -> type[Loader] | None: """ Return the loader class for the given oid and format. @@ -267,7 +267,7 @@ class AdaptersMap: return self._loaders[format].get(oid) @classmethod - def _get_optimised(self, cls: Type[RV]) -> Type[RV]: + def _get_optimised(self, cls: type[RV]) -> type[RV]: """Return the optimised version of a Dumper or Loader class. Return the input class itself if there is no optimised version. @@ -282,7 +282,7 @@ class AdaptersMap: from psycopg import types if cls.__module__.startswith(types.__name__): - new = cast(Type[RV], getattr(_psycopg, cls.__name__, None)) + new = cast(type[RV], getattr(_psycopg, cls.__name__, None)) if new: self._optimised[cls] = new return new diff --git a/psycopg/psycopg/_copy.py b/psycopg/psycopg/_copy.py index 8d779cc59..7cbd98853 100644 --- a/psycopg/psycopg/_copy.py +++ b/psycopg/psycopg/_copy.py @@ -11,7 +11,7 @@ from __future__ import annotations from abc import ABC, abstractmethod from types import TracebackType -from typing import Any, Iterator, Type, Tuple, Sequence, TYPE_CHECKING +from typing import Any, Iterator, Tuple, Sequence, TYPE_CHECKING from . import pq from . import errors as e @@ -71,7 +71,7 @@ class Copy(BaseCopy["Connection[Any]"]): def __exit__( self, - exc_type: Type[BaseException] | None, + exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> None: diff --git a/psycopg/psycopg/_copy_async.py b/psycopg/psycopg/_copy_async.py index 4be61bf82..8f3900542 100644 --- a/psycopg/psycopg/_copy_async.py +++ b/psycopg/psycopg/_copy_async.py @@ -8,7 +8,7 @@ from __future__ import annotations from abc import ABC, abstractmethod from types import TracebackType -from typing import Any, AsyncIterator, Type, Tuple, Sequence, TYPE_CHECKING +from typing import Any, AsyncIterator, Tuple, Sequence, TYPE_CHECKING from . import pq from . import errors as e @@ -68,7 +68,7 @@ class AsyncCopy(BaseCopy["AsyncConnection[Any]"]): async def __aexit__( self, - exc_type: Type[BaseException] | None, + exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> None: diff --git a/psycopg/psycopg/_cursor_base.py b/psycopg/psycopg/_cursor_base.py index 6e02586bd..3da9ce5b4 100644 --- a/psycopg/psycopg/_cursor_base.py +++ b/psycopg/psycopg/_cursor_base.py @@ -7,7 +7,7 @@ Psycopg BaseCursor object from __future__ import annotations from functools import partial -from typing import Any, Generic, Iterable, NoReturn, Sequence, Tuple, Type +from typing import Any, Generic, Iterable, NoReturn, Sequence, Tuple from typing import TYPE_CHECKING from . import pq @@ -55,7 +55,7 @@ class BaseCursor(Generic[ConnectionType, Row]): _tx: "Transformer" _make_row: RowMaker[Row] _pgconn: "PGconn" - _query_cls: Type[PostgresQuery] = PostgresQuery + _query_cls: type[PostgresQuery] = PostgresQuery def __init__(self, connection: ConnectionType): self._conn = connection diff --git a/psycopg/psycopg/_pipeline.py b/psycopg/psycopg/_pipeline.py index 09be343bb..3f4ad769d 100644 --- a/psycopg/psycopg/_pipeline.py +++ b/psycopg/psycopg/_pipeline.py @@ -8,7 +8,7 @@ from __future__ import annotations import logging from types import TracebackType -from typing import Any, Tuple, Type, TYPE_CHECKING +from typing import Any, Tuple, TYPE_CHECKING from . import pq from . import errors as e @@ -214,7 +214,7 @@ class Pipeline(BasePipeline): def __exit__( self, - exc_type: Type[BaseException] | None, + exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> None: @@ -254,7 +254,7 @@ class AsyncPipeline(BasePipeline): async def __aexit__( self, - exc_type: Type[BaseException] | None, + exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> None: diff --git a/psycopg/psycopg/_transformer.py b/psycopg/psycopg/_transformer.py index fe21d2b23..d5a0aacdb 100644 --- a/psycopg/psycopg/_transformer.py +++ b/psycopg/psycopg/_transformer.py @@ -6,12 +6,12 @@ This module exports the requested implementation to the rest of the package. # Copyright (C) 2023 The Psycopg Team -from typing import Type +from __future__ import annotations from . import abc from ._cmodule import _psycopg -Transformer: Type[abc.Transformer] +Transformer: type[abc.Transformer] if _psycopg: Transformer = _psycopg.Transformer diff --git a/psycopg/psycopg/connection.py b/psycopg/psycopg/connection.py index 3ad98bffa..c285add9e 100644 --- a/psycopg/psycopg/connection.py +++ b/psycopg/psycopg/connection.py @@ -12,8 +12,7 @@ from __future__ import annotations import logging from time import monotonic from types import TracebackType -from typing import Any, Generator, Iterator -from typing import Type, cast, overload, TYPE_CHECKING +from typing import Any, Generator, Iterator, cast, overload, TYPE_CHECKING from contextlib import contextmanager from . import pq @@ -62,8 +61,8 @@ class Connection(BaseConnection[Row]): __module__ = "psycopg" - cursor_factory: Type[Cursor[Row]] - server_cursor_factory: Type[ServerCursor[Row]] + cursor_factory: type[Cursor[Row]] + server_cursor_factory: type[ServerCursor[Row]] row_factory: RowFactory[Row] _pipeline: Pipeline | None @@ -87,7 +86,7 @@ class Connection(BaseConnection[Row]): prepare_threshold: int | None = 5, context: AdaptContext | None = None, row_factory: RowFactory[Row] | None = None, - cursor_factory: Type[Cursor[Row]] | None = None, + cursor_factory: type[Cursor[Row]] | None = None, **kwargs: ConnParam, ) -> Self: """ @@ -135,7 +134,7 @@ class Connection(BaseConnection[Row]): def __exit__( self, - exc_type: Type[BaseException] | None, + exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> None: diff --git a/psycopg/psycopg/connection_async.py b/psycopg/psycopg/connection_async.py index 5d4422963..12de50a54 100644 --- a/psycopg/psycopg/connection_async.py +++ b/psycopg/psycopg/connection_async.py @@ -9,8 +9,7 @@ from __future__ import annotations import logging from time import monotonic from types import TracebackType -from typing import Any, AsyncGenerator, AsyncIterator -from typing import Type, cast, overload, TYPE_CHECKING +from typing import Any, AsyncGenerator, AsyncIterator, cast, overload, TYPE_CHECKING from contextlib import asynccontextmanager from . import pq @@ -68,8 +67,8 @@ class AsyncConnection(BaseConnection[Row]): __module__ = "psycopg" - cursor_factory: Type[AsyncCursor[Row]] - server_cursor_factory: Type[AsyncServerCursor[Row]] + cursor_factory: type[AsyncCursor[Row]] + server_cursor_factory: type[AsyncServerCursor[Row]] row_factory: AsyncRowFactory[Row] _pipeline: AsyncPipeline | None @@ -93,7 +92,7 @@ class AsyncConnection(BaseConnection[Row]): prepare_threshold: int | None = 5, context: AdaptContext | None = None, row_factory: AsyncRowFactory[Row] | None = None, - cursor_factory: Type[AsyncCursor[Row]] | None = None, + cursor_factory: type[AsyncCursor[Row]] | None = None, **kwargs: ConnParam, ) -> Self: """ @@ -151,7 +150,7 @@ class AsyncConnection(BaseConnection[Row]): async def __aexit__( self, - exc_type: Type[BaseException] | None, + exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> None: diff --git a/psycopg/psycopg/cursor.py b/psycopg/psycopg/cursor.py index 52415e78c..6d1ddf019 100644 --- a/psycopg/psycopg/cursor.py +++ b/psycopg/psycopg/cursor.py @@ -10,7 +10,7 @@ Psycopg Cursor object. from __future__ import annotations from types import TracebackType -from typing import Any, Iterator, Iterable, Type, TYPE_CHECKING, overload +from typing import Any, Iterator, Iterable, TYPE_CHECKING, overload from contextlib import contextmanager from . import pq @@ -51,7 +51,7 @@ class Cursor(BaseCursor["Connection[Any]", Row]): def __exit__( self, - exc_type: Type[BaseException] | None, + exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> None: diff --git a/psycopg/psycopg/cursor_async.py b/psycopg/psycopg/cursor_async.py index 658da8788..b708d5d6c 100644 --- a/psycopg/psycopg/cursor_async.py +++ b/psycopg/psycopg/cursor_async.py @@ -7,7 +7,7 @@ Psycopg AsyncCursor object. from __future__ import annotations from types import TracebackType -from typing import Any, AsyncIterator, Iterable, Type, TYPE_CHECKING, overload +from typing import Any, AsyncIterator, Iterable, TYPE_CHECKING, overload from contextlib import asynccontextmanager from . import pq @@ -51,7 +51,7 @@ class AsyncCursor(BaseCursor["AsyncConnection[Any]", Row]): async def __aexit__( self, - exc_type: Type[BaseException] | None, + exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> None: diff --git a/psycopg/psycopg/errors.py b/psycopg/psycopg/errors.py index a81a2ae51..e0a82dfda 100644 --- a/psycopg/psycopg/errors.py +++ b/psycopg/psycopg/errors.py @@ -21,7 +21,7 @@ DBAPI-defined Exceptions are defined in the following hierarchy:: from __future__ import annotations from dataclasses import dataclass, field, fields -from typing import Any, Callable, NoReturn, Sequence, Tuple, Type, TYPE_CHECKING +from typing import Any, Callable, NoReturn, Sequence, Tuple, TYPE_CHECKING from asyncio import CancelledError from .pq.abc import PGconn, PGresult @@ -33,7 +33,7 @@ if TYPE_CHECKING: ErrorInfo: TypeAlias = None | PGresult | dict[int, bytes | None] -_sqlcodes: dict[str, "Type[Error]"] = {} +_sqlcodes: dict[str, type[Error]] = {} @dataclass @@ -532,7 +532,7 @@ def _info_to_dict(info: ErrorInfo) -> ErrorInfo: return info -def lookup(sqlstate: str) -> Type[Error]: +def lookup(sqlstate: str) -> type[Error]: """Lookup an error code or `constant name`__ and return its exception class. Raise `!KeyError` if the code is not found. @@ -561,14 +561,14 @@ def _is_pgresult(info: ErrorInfo) -> TypeGuard[PGresult]: return hasattr(info, "error_field") -def _class_for_state(sqlstate: str) -> Type[Error]: +def _class_for_state(sqlstate: str) -> type[Error]: try: return lookup(sqlstate) except KeyError: return get_base_exception(sqlstate) -def get_base_exception(sqlstate: str) -> Type[Error]: +def get_base_exception(sqlstate: str) -> type[Error]: return ( _base_exc_map.get(sqlstate[:2]) or _base_exc_map.get(sqlstate[:1]) diff --git a/psycopg/psycopg/pq/__init__.py b/psycopg/psycopg/pq/__init__.py index 650d66463..4d6f29671 100644 --- a/psycopg/psycopg/pq/__init__.py +++ b/psycopg/psycopg/pq/__init__.py @@ -13,7 +13,7 @@ from __future__ import annotations import os import logging -from typing import Callable, Type +from typing import Callable from . import abc from .misc import ConninfoOption, PGnotify, PGresAttDesc @@ -40,12 +40,12 @@ Certain features might not be available if the built version is too old. """ version: Callable[[], int] -PGconn: Type[abc.PGconn] -PGresult: Type[abc.PGresult] -Conninfo: Type[abc.Conninfo] -Escaping: Type[abc.Escaping] -PGcancel: Type[abc.PGcancel] -PGcancelConn: Type[abc.PGcancelConn] +PGconn: type[abc.PGconn] +PGresult: type[abc.PGresult] +Conninfo: type[abc.Conninfo] +Escaping: type[abc.Escaping] +PGcancel: type[abc.PGcancel] +PGcancelConn: type[abc.PGcancelConn] def import_from_libpq() -> None: diff --git a/psycopg/psycopg/rows.py b/psycopg/psycopg/rows.py index 53490a593..bf5f5fe98 100644 --- a/psycopg/psycopg/rows.py +++ b/psycopg/psycopg/rows.py @@ -8,7 +8,7 @@ from __future__ import annotations import functools from typing import Any, Callable, NamedTuple, NoReturn -from typing import TYPE_CHECKING, Protocol, Sequence, Tuple, Type +from typing import TYPE_CHECKING, Protocol, Sequence, Tuple from collections import namedtuple from . import pq @@ -145,12 +145,12 @@ def namedtuple_row( @functools.lru_cache(512) -def _make_nt(enc: str, *names: bytes) -> Type[NamedTuple]: +def _make_nt(enc: str, *names: bytes) -> type[NamedTuple]: snames = tuple(_as_python_identifier(n.decode(enc)) for n in names) return namedtuple("Row", snames) # type: ignore[return-value] -def class_row(cls: Type[T]) -> BaseRowFactory[T]: +def class_row(cls: type[T]) -> BaseRowFactory[T]: r"""Generate a row factory to represent rows as instances of the class `!cls`. The class must support every output column name as a keyword parameter. diff --git a/psycopg/psycopg/transaction.py b/psycopg/psycopg/transaction.py index 7aa7b1171..f44fc5ac5 100644 --- a/psycopg/psycopg/transaction.py +++ b/psycopg/psycopg/transaction.py @@ -9,7 +9,7 @@ from __future__ import annotations import logging from types import TracebackType -from typing import Generic, Iterator, Type, TYPE_CHECKING +from typing import Generic, Iterator, TYPE_CHECKING from . import pq from . import sql @@ -100,7 +100,7 @@ class BaseTransaction(Generic[ConnectionType]): def _exit_gen( self, - exc_type: Type[BaseException] | None, + exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> PQGen[bool]: @@ -247,7 +247,7 @@ class Transaction(BaseTransaction["Connection[Any]"]): def __exit__( self, - exc_type: Type[BaseException] | None, + exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> bool: @@ -276,7 +276,7 @@ class AsyncTransaction(BaseTransaction["AsyncConnection[Any]"]): async def __aexit__( self, - exc_type: Type[BaseException] | None, + exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> bool: diff --git a/psycopg/psycopg/types/array.py b/psycopg/psycopg/types/array.py index 2cae3d4ef..feb1254b9 100644 --- a/psycopg/psycopg/types/array.py +++ b/psycopg/psycopg/types/array.py @@ -9,7 +9,7 @@ from __future__ import annotations import re import struct from math import prod -from typing import Any, cast, Callable, Pattern, Set, Tuple, Type +from typing import Any, cast, Callable, Pattern, Set, Tuple from .. import pq from .. import errors as e @@ -337,7 +337,7 @@ def register_array(info: TypeInfo, context: AdaptContext | None = None) -> None: @cache -def _make_loader(name: str, oid: int, delimiter: str) -> Type[Loader]: +def _make_loader(name: str, oid: int, delimiter: str) -> type[Loader]: # Note: caching this function is really needed because, if the C extension # is available, the resulting type cannot be GC'd, so calling # register_array() in a loop results in a leak. See #647. @@ -349,13 +349,13 @@ def _make_loader(name: str, oid: int, delimiter: str) -> Type[Loader]: @cache def _make_dumper( name: str, oid: int, array_oid: int, delimiter: str -) -> Type[BaseListDumper]: +) -> type[BaseListDumper]: attribs = {"oid": array_oid, "element_oid": oid, "delimiter": delimiter.encode()} return type(f"{name.title()}ListDumper", (ListDumper,), attribs) @cache -def _make_binary_dumper(name: str, oid: int, array_oid: int) -> Type[BaseListDumper]: +def _make_binary_dumper(name: str, oid: int, array_oid: int) -> type[BaseListDumper]: attribs = {"oid": array_oid, "element_oid": oid} return type(f"{name.title()}ListBinaryDumper", (ListBinaryDumper,), attribs) diff --git a/psycopg/psycopg/types/composite.py b/psycopg/psycopg/types/composite.py index 5e09bd920..0f0fca0ef 100644 --- a/psycopg/psycopg/types/composite.py +++ b/psycopg/psycopg/types/composite.py @@ -10,7 +10,7 @@ import re import struct from collections import namedtuple from typing import Any, Callable, cast, Iterator -from typing import NamedTuple, Sequence, Tuple, Type, TYPE_CHECKING +from typing import NamedTuple, Sequence, Tuple, TYPE_CHECKING from .. import pq from .. import abc @@ -307,7 +307,7 @@ def register_composite( adapters = context.adapters if context else postgres.adapters # generate and register a customized text loader - loader: Type[BaseCompositeLoader] + loader: type[BaseCompositeLoader] loader = _make_loader(info.name, tuple(info.field_types), factory) adapters.register_loader(info.oid, loader) @@ -317,7 +317,7 @@ def register_composite( # If the factory is a type, create and register dumpers for it if isinstance(factory, type): - dumper: Type[Dumper] + dumper: type[Dumper] dumper = _make_binary_dumper(info.name, info.oid, tuple(info.field_types)) adapters.register_dumper(factory, dumper) @@ -335,7 +335,7 @@ def register_default_adapters(context: abc.AdaptContext) -> None: adapters.register_loader("record", RecordBinaryLoader) -def _nt_from_info(info: CompositeInfo) -> Type[NamedTuple]: +def _nt_from_info(info: CompositeInfo) -> type[NamedTuple]: name = _as_python_identifier(info.name) fields = tuple(_as_python_identifier(n) for n in info.field_names) return _make_nt(name, fields) @@ -346,14 +346,14 @@ def _nt_from_info(info: CompositeInfo) -> Type[NamedTuple]: @cache -def _make_nt(name: str, fields: Tuple[str, ...]) -> Type[NamedTuple]: +def _make_nt(name: str, fields: Tuple[str, ...]) -> type[NamedTuple]: return namedtuple(name, fields) # type: ignore[return-value] @cache def _make_loader( name: str, types: Tuple[int, ...], factory: Callable[..., Any] -) -> Type[BaseCompositeLoader]: +) -> type[BaseCompositeLoader]: return type( f"{name.title()}Loader", (CompositeLoader,), @@ -364,21 +364,21 @@ def _make_loader( @cache def _make_binary_loader( name: str, factory: Callable[..., Any] -) -> Type[BaseCompositeLoader]: +) -> type[BaseCompositeLoader]: return type( f"{name.title()}BinaryLoader", (CompositeBinaryLoader,), {"factory": factory} ) @cache -def _make_dumper(name: str, oid: int) -> Type[TupleDumper]: +def _make_dumper(name: str, oid: int) -> type[TupleDumper]: return type(f"{name.title()}Dumper", (TupleDumper,), {"oid": oid}) @cache def _make_binary_dumper( name: str, oid: int, field_types: Tuple[int, ...] -) -> Type[TupleBinaryDumper]: +) -> type[TupleBinaryDumper]: return type( f"{name.title()}BinaryDumper", (TupleBinaryDumper,), diff --git a/psycopg/psycopg/types/enum.py b/psycopg/psycopg/types/enum.py index b8676dcb2..1c4516e9b 100644 --- a/psycopg/psycopg/types/enum.py +++ b/psycopg/psycopg/types/enum.py @@ -5,7 +5,7 @@ Adapters for the enum type. from __future__ import annotations from enum import Enum -from typing import Any, Generic, Mapping, Sequence, Tuple, Type, cast, TYPE_CHECKING +from typing import Any, Generic, Mapping, Sequence, Tuple, cast, TYPE_CHECKING from .. import sql from .. import postgres @@ -47,7 +47,7 @@ class EnumInfo(TypeInfo): super().__init__(name, oid, array_oid) self.labels = labels # Will be set by register_enum() - self.enum: Type[Enum] | None = None + self.enum: type[Enum] | None = None @classmethod def _get_info_query(cls, conn: "BaseConnection[Any]") -> Query: @@ -74,7 +74,7 @@ class _BaseEnumLoader(Loader, Generic[E]): Loader for a specific Enum class """ - enum: Type[E] + enum: type[E] _load_map: EnumLoadMap[E] def load(self, data: Buffer) -> E: @@ -96,7 +96,7 @@ class _BaseEnumDumper(Dumper, Generic[E]): Dumper for a specific Enum class """ - enum: Type[E] + enum: type[E] _dump_map: EnumDumpMap[E] def dump(self, value: E) -> Buffer | None: @@ -123,7 +123,7 @@ class EnumBinaryDumper(EnumDumper): def register_enum( info: EnumInfo, context: AdaptContext | None = None, - enum: Type[E] | None = None, + enum: type[E] | None = None, *, mapping: EnumMapping[E] = None, ) -> None: @@ -142,7 +142,7 @@ def register_enum( raise TypeError("no info passed. Is the requested enum available?") if enum is None: - enum = cast(Type[E], _make_enum(info.name, tuple(info.labels))) + enum = cast(type[E], _make_enum(info.name, tuple(info.labels))) info.enum = enum adapters = context.adapters if context else postgres.adapters @@ -176,41 +176,38 @@ def _make_enum(name: str, labels: Tuple[str, ...]) -> Enum: @cache def _make_loader( - name: str, enum: Type[Enum], load_map: _HEnumLoadMap[E] -) -> Type[_BaseEnumLoader[E]]: + name: str, enum: type[Enum], load_map: _HEnumLoadMap[E] +) -> type[_BaseEnumLoader[E]]: attribs = {"enum": enum, "_load_map": dict(load_map)} return type(f"{name.title()}Loader", (_BaseEnumLoader,), attribs) @cache def _make_binary_loader( - name: str, enum: Type[Enum], load_map: _HEnumLoadMap[E] -) -> Type[_BaseEnumLoader[E]]: + name: str, enum: type[Enum], load_map: _HEnumLoadMap[E] +) -> type[_BaseEnumLoader[E]]: attribs = {"enum": enum, "_load_map": dict(load_map), "format": BINARY} return type(f"{name.title()}BinaryLoader", (_BaseEnumLoader,), attribs) @cache def _make_dumper( - enum: Type[Enum], oid: int, dump_map: _HEnumDumpMap[E] -) -> Type[_BaseEnumDumper[E]]: + enum: type[Enum], oid: int, dump_map: _HEnumDumpMap[E] +) -> type[_BaseEnumDumper[E]]: attribs = {"enum": enum, "oid": oid, "_dump_map": dict(dump_map)} return type(f"{enum.__name__}Dumper", (_BaseEnumDumper,), attribs) @cache def _make_binary_dumper( - enum: Type[Enum], oid: int, dump_map: _HEnumDumpMap[E] -) -> Type[_BaseEnumDumper[E]]: + enum: type[Enum], oid: int, dump_map: _HEnumDumpMap[E] +) -> type[_BaseEnumDumper[E]]: attribs = {"enum": enum, "oid": oid, "_dump_map": dict(dump_map), "format": BINARY} return type(f"{enum.__name__}BinaryDumper", (_BaseEnumDumper,), attribs) def _make_load_map( - info: EnumInfo, - enum: Type[E], - mapping: EnumMapping[E], - context: AdaptContext | None, + info: EnumInfo, enum: type[E], mapping: EnumMapping[E], context: AdaptContext | None ) -> _HEnumLoadMap[E]: enc = conn_encoding(context.connection if context else None) rv = [] @@ -235,10 +232,7 @@ def _make_load_map( def _make_dump_map( - info: EnumInfo, - enum: Type[E], - mapping: EnumMapping[E], - context: AdaptContext | None, + info: EnumInfo, enum: type[E], mapping: EnumMapping[E], context: AdaptContext | None ) -> _HEnumDumpMap[E]: enc = conn_encoding(context.connection if context else None) rv = [] diff --git a/psycopg/psycopg/types/hstore.py b/psycopg/psycopg/types/hstore.py index 98c90dd99..ad368d353 100644 --- a/psycopg/psycopg/types/hstore.py +++ b/psycopg/psycopg/types/hstore.py @@ -7,7 +7,6 @@ dict to hstore adaptation from __future__ import annotations import re -from typing import Type from .. import errors as e from .. import postgres @@ -134,7 +133,7 @@ def register_hstore(info: TypeInfo, context: AdaptContext | None = None) -> None @cache -def _make_hstore_dumper(oid_in: int) -> Type[BaseHstoreDumper]: +def _make_hstore_dumper(oid_in: int) -> type[BaseHstoreDumper]: """ Return an hstore dumper class configured using `oid_in`. diff --git a/psycopg/psycopg/types/json.py b/psycopg/psycopg/types/json.py index 161e2333b..18a2fd58f 100644 --- a/psycopg/psycopg/types/json.py +++ b/psycopg/psycopg/types/json.py @@ -7,7 +7,7 @@ Adapters for JSON types. from __future__ import annotations import json -from typing import Any, Callable, Tuple, Type +from typing import Any, Callable, Tuple from .. import abc from .. import _oids @@ -98,7 +98,7 @@ def set_json_loads( @cache -def _make_dumper(base: Type[abc.Dumper], dumps: JsonDumpsFunction) -> Type[abc.Dumper]: +def _make_dumper(base: type[abc.Dumper], dumps: JsonDumpsFunction) -> type[abc.Dumper]: name = base.__name__ if not name.startswith("Custom"): name = f"Custom{name}" @@ -106,7 +106,7 @@ def _make_dumper(base: Type[abc.Dumper], dumps: JsonDumpsFunction) -> Type[abc.D @cache -def _make_loader(base: Type[Loader], loads: JsonLoadsFunction) -> Type[Loader]: +def _make_loader(base: type[Loader], loads: JsonLoadsFunction) -> type[Loader]: name = base.__name__ if not name.startswith("Custom"): name = f"Custom{name}" @@ -223,14 +223,14 @@ class JsonbBinaryLoader(_JsonLoader): def _get_current_dumper( adapters: AdaptersMap, cls: type, format: PyFormat -) -> Type[abc.Dumper]: +) -> type[abc.Dumper]: try: return adapters.get_dumper(cls, format) except e.ProgrammingError: return _default_dumpers[cls, format] -_default_dumpers: dict[Tuple[Type[_JsonWrapper], PyFormat], Type[Dumper]] = { +_default_dumpers: dict[Tuple[type[_JsonWrapper], PyFormat], type[Dumper]] = { (Json, PyFormat.BINARY): JsonBinaryDumper, (Json, PyFormat.TEXT): JsonDumper, (Jsonb, PyFormat.BINARY): JsonbBinaryDumper, diff --git a/psycopg/psycopg/types/multirange.py b/psycopg/psycopg/types/multirange.py index 4cb24a0e0..b5dbdfffb 100644 --- a/psycopg/psycopg/types/multirange.py +++ b/psycopg/psycopg/types/multirange.py @@ -7,8 +7,7 @@ Support for multirange types adaptation. from __future__ import annotations from decimal import Decimal -from typing import Any, Generic, Iterable, MutableSequence -from typing import Type, overload, TYPE_CHECKING +from typing import Any, Generic, Iterable, MutableSequence, overload, TYPE_CHECKING from datetime import date, datetime from .. import sql @@ -395,7 +394,7 @@ def register_multirange( adapters = context.adapters if context else postgres.adapters # generate and register a customized text loader - loader: Type[BaseMultirangeLoader[Any]] + loader: type[BaseMultirangeLoader[Any]] loader = _make_loader(info.name, info.subtype_oid) adapters.register_loader(info.oid, loader) @@ -409,12 +408,12 @@ def register_multirange( @cache -def _make_loader(name: str, oid: int) -> Type[MultirangeLoader[Any]]: +def _make_loader(name: str, oid: int) -> type[MultirangeLoader[Any]]: return type(f"{name.title()}Loader", (MultirangeLoader,), {"subtype_oid": oid}) @cache -def _make_binary_loader(name: str, oid: int) -> Type[MultirangeBinaryLoader[Any]]: +def _make_binary_loader(name: str, oid: int) -> type[MultirangeBinaryLoader[Any]]: return type( f"{name.title()}BinaryLoader", (MultirangeBinaryLoader,), {"subtype_oid": oid} ) diff --git a/psycopg/psycopg/types/net.py b/psycopg/psycopg/types/net.py index 854ae98e8..4cbe0b3d6 100644 --- a/psycopg/psycopg/types/net.py +++ b/psycopg/psycopg/types/net.py @@ -6,7 +6,7 @@ Adapters for network types. from __future__ import annotations -from typing import Callable, Type, TYPE_CHECKING +from typing import Callable, TYPE_CHECKING from .. import _oids from ..pq import Format @@ -25,12 +25,12 @@ Network: TypeAlias = "ipaddress.IPv4Network" | "ipaddress.IPv6Network" ip_address: Callable[[str], Address] = None # type: ignore[assignment] ip_interface: Callable[[str], Interface] = None # type: ignore[assignment] ip_network: Callable[[str], Network] = None # type: ignore[assignment] -IPv4Address: "Type[ipaddress.IPv4Address]" = None # type: ignore[assignment] -IPv6Address: "Type[ipaddress.IPv6Address]" = None # type: ignore[assignment] -IPv4Interface: "Type[ipaddress.IPv4Interface]" = None # type: ignore[assignment] -IPv6Interface: "Type[ipaddress.IPv6Interface]" = None # type: ignore[assignment] -IPv4Network: "Type[ipaddress.IPv4Network]" = None # type: ignore[assignment] -IPv6Network: "Type[ipaddress.IPv6Network]" = None # type: ignore[assignment] +IPv4Address: "type[ipaddress.IPv4Address]" = None # type: ignore[assignment] +IPv6Address: "type[ipaddress.IPv6Address]" = None # type: ignore[assignment] +IPv4Interface: "type[ipaddress.IPv4Interface]" = None # type: ignore[assignment] +IPv6Interface: "type[ipaddress.IPv6Interface]" = None # type: ignore[assignment] +IPv4Network: "type[ipaddress.IPv4Network]" = None # type: ignore[assignment] +IPv6Network: "type[ipaddress.IPv6Network]" = None # type: ignore[assignment] PGSQL_AF_INET = 2 PGSQL_AF_INET6 = 3 diff --git a/psycopg/psycopg/types/range.py b/psycopg/psycopg/types/range.py index e5551e762..051112fa4 100644 --- a/psycopg/psycopg/types/range.py +++ b/psycopg/psycopg/types/range.py @@ -7,7 +7,7 @@ Support for range types adaptation. from __future__ import annotations import re -from typing import Any, Generic, Type, Tuple, cast, TYPE_CHECKING +from typing import Any, Generic, Tuple, cast, TYPE_CHECKING from decimal import Decimal from datetime import date, datetime @@ -586,7 +586,7 @@ def register_range(info: RangeInfo, context: AdaptContext | None = None) -> None adapters = context.adapters if context else postgres.adapters # generate and register a customized text loader - loader: Type[BaseRangeLoader[Any]] + loader: type[BaseRangeLoader[Any]] loader = _make_loader(info.name, info.subtype_oid) adapters.register_loader(info.oid, loader) @@ -600,12 +600,12 @@ def register_range(info: RangeInfo, context: AdaptContext | None = None) -> None @cache -def _make_loader(name: str, oid: int) -> Type[RangeLoader[Any]]: +def _make_loader(name: str, oid: int) -> type[RangeLoader[Any]]: return type(f"{name.title()}Loader", (RangeLoader,), {"subtype_oid": oid}) @cache -def _make_binary_loader(name: str, oid: int) -> Type[RangeBinaryLoader[Any]]: +def _make_binary_loader(name: str, oid: int) -> type[RangeBinaryLoader[Any]]: return type( f"{name.title()}BinaryLoader", (RangeBinaryLoader,), {"subtype_oid": oid} ) diff --git a/psycopg/psycopg/types/shapely.py b/psycopg/psycopg/types/shapely.py index 107ad520c..7c4da657a 100644 --- a/psycopg/psycopg/types/shapely.py +++ b/psycopg/psycopg/types/shapely.py @@ -4,8 +4,6 @@ Adapters for PostGIS geometries from __future__ import annotations -from typing import Type - from .. import postgres from ..abc import AdaptContext, Buffer from ..adapt import Dumper, Loader @@ -76,7 +74,7 @@ def register_shapely(info: TypeInfo, context: AdaptContext | None = None) -> Non @cache -def _make_dumper(oid_in: int) -> Type[BaseGeometryDumper]: +def _make_dumper(oid_in: int) -> type[BaseGeometryDumper]: class GeometryDumper(BaseGeometryDumper): oid = oid_in @@ -84,7 +82,7 @@ def _make_dumper(oid_in: int) -> Type[BaseGeometryDumper]: @cache -def _make_binary_dumper(oid_in: int) -> Type[BaseGeometryBinaryDumper]: +def _make_binary_dumper(oid_in: int) -> type[BaseGeometryBinaryDumper]: class GeometryBinaryDumper(BaseGeometryBinaryDumper): oid = oid_in diff --git a/psycopg_pool/psycopg_pool/_compat.py b/psycopg_pool/psycopg_pool/_compat.py index 3fc645cbe..fc13e29fb 100644 --- a/psycopg_pool/psycopg_pool/_compat.py +++ b/psycopg_pool/psycopg_pool/_compat.py @@ -4,8 +4,9 @@ compatibility functions for different Python versions # Copyright (C) 2021 The Psycopg Team +from __future__ import annotations + import sys -from typing import Type import psycopg.errors as e @@ -40,7 +41,7 @@ __all__ = [ # Workaround for psycopg < 3.0.8. # Timeout on NullPool connection mignt not work correctly. try: - ConnectionTimeout: Type[e.OperationalError] = e.ConnectionTimeout + ConnectionTimeout: type[e.OperationalError] = e.ConnectionTimeout except AttributeError: class DummyConnectionTimeout(e.OperationalError): diff --git a/psycopg_pool/psycopg_pool/null_pool.py b/psycopg_pool/psycopg_pool/null_pool.py index a891db5e3..2e48e470e 100644 --- a/psycopg_pool/psycopg_pool/null_pool.py +++ b/psycopg_pool/psycopg_pool/null_pool.py @@ -10,7 +10,7 @@ Psycopg null connection pool module (sync version). from __future__ import annotations import logging -from typing import Any, cast, Type +from typing import Any, cast from psycopg import Connection from psycopg.pq import TransactionStatus @@ -31,7 +31,7 @@ class NullConnectionPool(_BaseNullConnectionPool, ConnectionPool[CT]): self, conninfo: str = "", *, - connection_class: Type[CT] = cast(Type[CT], Connection), + connection_class: type[CT] = cast(type[CT], Connection), kwargs: dict[str, Any] | None = None, min_size: int = 0, max_size: int | None = None, diff --git a/psycopg_pool/psycopg_pool/null_pool_async.py b/psycopg_pool/psycopg_pool/null_pool_async.py index da255ecc4..7de17e752 100644 --- a/psycopg_pool/psycopg_pool/null_pool_async.py +++ b/psycopg_pool/psycopg_pool/null_pool_async.py @@ -7,7 +7,7 @@ Psycopg null connection pool module (async version). from __future__ import annotations import logging -from typing import Any, cast, Type +from typing import Any, cast from psycopg import AsyncConnection from psycopg.pq import TransactionStatus @@ -27,7 +27,7 @@ class AsyncNullConnectionPool(_BaseNullConnectionPool, AsyncConnectionPool[ACT]) self, conninfo: str = "", *, - connection_class: Type[ACT] = cast(Type[ACT], AsyncConnection), + connection_class: type[ACT] = cast(type[ACT], AsyncConnection), kwargs: dict[str, Any] | None = None, min_size: int = 0, # Note: min_size default value changed to 0. max_size: int | None = None, diff --git a/psycopg_pool/psycopg_pool/pool.py b/psycopg_pool/psycopg_pool/pool.py index a179f33fc..881832d39 100644 --- a/psycopg_pool/psycopg_pool/pool.py +++ b/psycopg_pool/psycopg_pool/pool.py @@ -15,7 +15,6 @@ from abc import ABC, abstractmethod from time import monotonic from types import TracebackType from typing import Any, Iterator, cast, Generic -from typing import Type from weakref import ref from contextlib import contextmanager @@ -42,7 +41,7 @@ class ConnectionPool(Generic[CT], BasePool): self, conninfo: str = "", *, - connection_class: Type[CT] = cast(Type[CT], Connection), + connection_class: type[CT] = cast(type[CT], Connection), kwargs: dict[str, Any] | None = None, min_size: int = 4, max_size: int | None = None, @@ -467,7 +466,7 @@ class ConnectionPool(Generic[CT], BasePool): def __exit__( self, - exc_type: Type[BaseException] | None, + exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> None: diff --git a/psycopg_pool/psycopg_pool/pool_async.py b/psycopg_pool/psycopg_pool/pool_async.py index 0811f5e40..4914f6685 100644 --- a/psycopg_pool/psycopg_pool/pool_async.py +++ b/psycopg_pool/psycopg_pool/pool_async.py @@ -12,7 +12,6 @@ from abc import ABC, abstractmethod from time import monotonic from types import TracebackType from typing import Any, AsyncIterator, cast, Generic -from typing import Type from weakref import ref from contextlib import asynccontextmanager @@ -41,7 +40,7 @@ class AsyncConnectionPool(Generic[ACT], BasePool): self, conninfo: str = "", *, - connection_class: Type[ACT] = cast(Type[ACT], AsyncConnection), + connection_class: type[ACT] = cast(type[ACT], AsyncConnection), kwargs: dict[str, Any] | None = None, min_size: int = 4, max_size: int | None = None, @@ -499,7 +498,7 @@ class AsyncConnectionPool(Generic[ACT], BasePool): async def __aexit__( self, - exc_type: Type[BaseException] | None, + exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> None: