adaptation rules.
- Adaptation configuration is performed by changing the
- `~psycopg.proto.AdaptContext.adapters` object of objects implementing the
- `~psycopg.proto.AdaptContext` protocols, for instance `~psycopg.Connection`
+ `~psycopg.abc.AdaptContext.adapters` object of objects implementing the
+ `~psycopg.abc.AdaptContext` protocols, for instance `~psycopg.Connection`
or `~psycopg.Cursor`.
- Every context object derived from another context inherits its adapters
:align: center
- The `!adapters` attribute are `AdaptersMap` instances, and contain the
- mapping from Python types and `~psycopg.proto.Dumper` classes, and from
- PostgreSQL oids to `~psycopg.proto.Loader` classes. Changing this mapping
+ mapping from Python types and `~psycopg.abc.Dumper` classes, and from
+ PostgreSQL oids to `~psycopg.abc.Loader` classes. Changing this mapping
(e.g. writing and registering your own adapters, or using a different
configuration of builtin adapters) affects how types are converted between
Python and PostgreSQL.
- - Dumpers (objects implementing the `~psycopg.proto.Dumper` protocol) are
+ - Dumpers (objects implementing the `~psycopg.abc.Dumper` protocol) are
the objects used to perform the conversion from a Python object to a bytes
sequence in a format understood by PostgreSQL. The string returned
*shouldn't be quoted*: the value will be passed to the database using
functions such as :pq:`PQexecParams()` so quoting and quotes escaping is
not necessary. The dumper usually also suggests the server what type to
- use, via its `~psycopg.proto.Dumper.oid` attribute.
+ use, via its `~psycopg.abc.Dumper.oid` attribute.
- - Loaders (objects implementing the `~psycopg.proto.Loader` protocol) are
+ - Loaders (objects implementing the `~psycopg.abc.Loader` protocol) are
the objects used to perform the opposite operation: reading a bytes
sequence from PostgreSQL and create a Python object out of it.
type to use (for instance the PostgreSQL type of a Python list depends on
the objects it contains, whether to use an :sql:`integer` or :sql:`bigint`
depends on the number size...) In these cases the mechanism provided by
- `~psycopg.proto.Dumper.get_key()` and `~psycopg.proto.Dumper.upgrade()` is
+ `~psycopg.abc.Dumper.get_key()` and `~psycopg.abc.Dumper.upgrade()` is
used.
- For every OID returned by the query, the `!Transformer` will instantiate a
-`proto` -- Psycopg abstract classes
-===================================
-
-TODO: rename to abc
+`!abc` -- Psycopg abstract classes
+==================================
The module exposes Psycopg definitions which can be used for static type
checking.
-.. module:: psycopg.proto
+.. module:: psycopg.abc
.. autoclass:: Dumper(cls, context=None)
This is an abstract base class: subclasses *must* at least implement the
`dump()` method and specify the `format`.
- The class implements the `~psycopg.proto.Dumper` protocol.
+ The class implements the `~psycopg.abc.Dumper` protocol.
.. automethod:: dump
This is an abstract base class: subclasses *must* at least implement the
`!load()` method and specify a `format`.
- The class implements the `~psycopg.proto.Loader` protocol.
+ The class implements the `~psycopg.abc.Loader` protocol.
.. automethod:: load
.. autoclass:: Transformer(context=None)
:param context: The context where the transformer should operate.
- :type context: `~psycopg.proto.AdaptContext`
+ :type context: `~psycopg.abc.AdaptContext`
errors
adapt
types
- proto
+ abc
pool
pq
from . import pq
from . import errors as e
+from .abc import Dumper, Loader
from ._enums import PyFormat as PyFormat
-from .proto import Dumper, Loader
from ._cmodule import _psycopg
from ._typeinfo import TypesRegistry
class AdaptersMap:
r"""
Establish how types should be converted between Python and PostgreSQL in
- an `~psycopg.proto.AdaptContext`.
+ an `~psycopg.abc.AdaptContext`.
`!AdaptersMap` maps Python types to `~psycopg.adapt.Dumper` classes to
define how Python types are converted to PostgreSQL, and maps OIDs to
Every `!AdaptContext` object has an underlying `!AdaptersMap` defining how
types are converted in that context, exposed as the
- `~psycopg.proto.AdaptContext.adapters` attribute: changing such map allows
+ `~psycopg.abc.AdaptContext.adapters` attribute: changing such map allows
to customise adaptation in a context without changing separated contexts.
When a context is created from another context (for instance when a
from ._queries import PostgresQuery
if TYPE_CHECKING:
- from .pq.proto import PGresult
+ from .pq.abc import PGresult
class Prepare(IntEnum):
from . import pq
from . import errors as e
from .sql import Composable
-from .proto import Query, Params
+from .abc import Query, Params
from ._enums import PyFormat
if TYPE_CHECKING:
- from .proto import Transformer
+ from .abc import Transformer
class QueryPart(NamedTuple):
import struct
from typing import Callable, cast, Optional, Tuple
-from .proto import Buffer
+from .abc import Buffer
from .compat import Protocol
PackInt = Callable[[int], bytes]
from . import pq
from . import postgres
from . import errors as e
+from .abc import LoadFunc, AdaptContext, PyFormat, DumperKey
from .rows import Row, RowMaker
-from .proto import LoadFunc, AdaptContext, PyFormat, DumperKey
from .postgres import INVALID_OID
if TYPE_CHECKING:
- from .pq.proto import PGresult
+ from .abc import Dumper, Loader
from .adapt import AdaptersMap
- from .proto import Dumper, Loader
+ from .pq.abc import PGresult
from .connection import BaseConnection
DumperCache = Dict[DumperKey, "Dumper"]
from typing import Sequence, Type, TypeVar, Union, TYPE_CHECKING
from . import errors as e
+from .abc import AdaptContext
from .rows import dict_row
-from .proto import AdaptContext
if TYPE_CHECKING:
from .connection import Connection, AsyncConnection
from datetime import timezone, tzinfo
from .compat import ZoneInfo
-from .pq.proto import PGconn
+from .pq.abc import PGconn
logger = logging.getLogger("psycopg")
if TYPE_CHECKING:
from .sql import Composable
from .rows import Row, RowMaker
- from .pq.proto import PGresult
+ from .pq.abc import PGresult
from .waiting import Wait, Ready
from .connection import BaseConnection
from ._adapters_map import AdaptersMap
from abc import ABC, abstractmethod
from typing import Any, Optional, Type, Tuple, Union, TYPE_CHECKING
-from . import pq, proto
+from . import pq, abc
from . import _adapters_map
-from .proto import AdaptContext, Buffer as Buffer
from ._enums import PyFormat as PyFormat
from ._cmodule import _psycopg
from .connection import BaseConnection
AdaptersMap = _adapters_map.AdaptersMap
+Buffer = abc.Buffer
-class Dumper(proto.Dumper, ABC):
+class Dumper(abc.Dumper, ABC):
"""
Convert Python object of the type *cls* to PostgreSQL representation.
"""
oid: int
"""The oid to pass to the server, if known."""
- def __init__(self, cls: type, context: Optional[AdaptContext] = None):
+ def __init__(self, cls: type, context: Optional[abc.AdaptContext] = None):
self.cls = cls
self.connection: Optional["BaseConnection[Any]"] = (
context.connection if context else None
self, obj: Any, format: PyFormat
) -> Union[type, Tuple[type, ...]]:
"""
- Implementation of the `~psycopg.proto.Dumper.get_key()` member of the
- `~psycopg.proto.Dumper` protocol. Look at its definition for details.
+ Implementation of the `~psycopg.abc.Dumper.get_key()` member of the
+ `~psycopg.abc.Dumper` protocol. Look at its definition for details.
This implementation returns the *cls* passed in the constructor.
Subclasses needing to specialise the PostgreSQL type according to the
def upgrade(self, obj: Any, format: PyFormat) -> "Dumper":
"""
- Implementation of the `~psycopg.proto.Dumper.upgrade()` member of the
- `~psycopg.proto.Dumper` protocol. Look at its definition for details.
+ Implementation of the `~psycopg.abc.Dumper.upgrade()` member of the
+ `~psycopg.abc.Dumper` protocol. Look at its definition for details.
This implementation just returns *self*. If a subclass implements
`get_key()` it should probably override `!upgrade()` too.
format: pq.Format
- def __init__(self, oid: int, context: Optional[AdaptContext] = None):
+ def __init__(self, oid: int, context: Optional[abc.AdaptContext] = None):
self.oid = oid
self.connection: Optional["BaseConnection[Any]"] = (
context.connection if context else None
...
-Transformer: Type["proto.Transformer"]
+Transformer: Type["abc.Transformer"]
# Override it with fast object if available
if _psycopg:
class RecursiveDumper(Dumper):
"""Dumper with a transformer to help dumping recursive types."""
- def __init__(self, cls: type, context: Optional[AdaptContext] = None):
+ def __init__(self, cls: type, context: Optional[abc.AdaptContext] = None):
super().__init__(cls, context)
self._tx = Transformer(context)
class RecursiveLoader(Loader):
"""Loader with a transformer to help loading recursive types."""
- def __init__(self, oid: int, context: Optional[AdaptContext] = None):
+ def __init__(self, oid: int, context: Optional[abc.AdaptContext] = None):
super().__init__(oid, context)
self._tx = Transformer(context)
from . import postgres
from . import encodings
from .pq import ConnStatus, ExecStatus, TransactionStatus, Format
+from .abc import ConnectionType, Params, PQGen, PQGenConn, Query, RV
from .sql import Composable
from .rows import Row, RowFactory, tuple_row, TupleRow
-from .proto import ConnectionType, Params, PQGen, PQGenConn
-from .proto import Query, RV
from .compat import asynccontextmanager
from .cursor import Cursor, AsyncCursor
from ._cmodule import _psycopg
CursorRow = TypeVar("CursorRow")
if TYPE_CHECKING:
- from .pq.proto import PGconn, PGresult
+ from .pq.abc import PGconn, PGresult
from .pool.base import BasePool
if _psycopg:
__module__ = "psycopg"
- def __init__(self, pgconn: pq.proto.PGconn):
+ def __init__(self, pgconn: pq.abc.PGconn):
self.pgconn = pgconn
@property
from . import pq
from . import errors as e
from .pq import ExecStatus
+from .abc import ConnectionType, PQGen, Transformer
from .adapt import PyFormat
-from .proto import ConnectionType, PQGen, Transformer
from .compat import create_task
from ._cmodule import _psycopg
from .generators import copy_from, copy_to, copy_end
if TYPE_CHECKING:
- from .pq.proto import PGresult
+ from .pq.abc import PGresult
from .cursor import BaseCursor, Cursor, AsyncCursor
from .connection import Connection, AsyncConnection # noqa: F401
from . import generators
from .pq import ExecStatus, Format
+from .abc import ConnectionType, Query, Params, PQGen
from .copy import Copy, AsyncCopy
from .rows import Row, RowFactory
-from .proto import ConnectionType, Query, Params, PQGen
from .compat import asynccontextmanager
from ._column import Column
from ._cmodule import _psycopg
from ._preparing import Prepare
if TYPE_CHECKING:
- from .proto import Transformer
- from .pq.proto import PGconn, PGresult
+ from .abc import Transformer
+ from .pq.abc import PGconn, PGresult
from .connection import BaseConnection # noqa: F401
from .connection import Connection, AsyncConnection # noqa: F401
from . import postgres
from .pq import Format, Escaping
+from .abc import AdaptContext
from .adapt import Dumper
-from .proto import AdaptContext
class DBAPITypeObject:
from typing import Any, Callable, Dict, Optional, Sequence, Tuple, Type, Union
from typing import cast
-from psycopg.pq.proto import PGresult
+
+from psycopg.pq.abc import PGresult
from psycopg.pq._enums import DiagnosticField
from . import pq
from . import errors as e
from .pq import ConnStatus, PollingStatus, ExecStatus
-from .proto import PQGen, PQGenConn
+from .abc import PQGen, PQGenConn
from .waiting import Wait, Ready
from .encodings import py_codecs
-from .pq.proto import PGconn, PGresult
+from .pq.abc import PGconn, PGresult
logger = logging.getLogger(__name__)
from typing import TYPE_CHECKING
from collections import Counter, deque
-from ..proto import ConnectionType
+from ..abc import ConnectionType
if TYPE_CHECKING:
from typing import Counter as TCounter
# Copyright (C) 2020-2021 The Psycopg Team
from ._typeinfo import TypeInfo, RangeInfo, TypesRegistry
-from .proto import AdaptContext
+from .abc import AdaptContext
from ._adapters_map import AdaptersMap
# Global objects with PostgreSQL builtins and globally registered user types.
import logging
from typing import Callable, List, Type
+from . import abc
from .misc import ConninfoOption, PGnotify, PGresAttDesc
from .misc import error_message
from ._enums import ConnStatus, DiagnosticField, ExecStatus, Format
from ._enums import Ping, PollingStatus, TransactionStatus
-from . import proto
logger = logging.getLogger(__name__)
"""
version: Callable[[], int]
-PGconn: Type[proto.PGconn]
-PGresult: Type[proto.PGresult]
-Conninfo: Type[proto.Conninfo]
-Escaping: Type[proto.Escaping]
-PGcancel: Type[proto.PGcancel]
+PGconn: Type[abc.PGconn]
+PGresult: Type[abc.PGresult]
+Conninfo: Type[abc.Conninfo]
+Escaping: Type[abc.Escaping]
+PGcancel: Type[abc.PGcancel]
def import_from_libpq() -> None:
from typing import cast, NamedTuple, Optional, Union
+from .abc import PGconn, PGresult
from ._enums import ConnStatus, TransactionStatus
-from .proto import PGconn, PGresult
class PGnotify(NamedTuple):
from ._enums import Format, ExecStatus
if TYPE_CHECKING:
- from . import proto
+ from . import abc
__impl__ = "python"
def __init__(self, pgconn_ptr: impl.PGconn_struct):
self._pgconn_ptr: Optional[impl.PGconn_struct] = pgconn_ptr
- self.notice_handler: Optional[
- Callable[["proto.PGresult"], None]
- ] = None
+ self.notice_handler: Optional[Callable[["abc.PGresult"], None]] = None
self.notify_handler: Optional[Callable[[PGnotify], None]] = None
self._notice_receiver = impl.PQnoticeReceiver( # type: ignore
def __init__(self, conn: Optional[PGconn] = None):
self.conn = conn
- def escape_literal(self, data: "proto.Buffer") -> memoryview:
+ def escape_literal(self, data: "abc.Buffer") -> memoryview:
if not self.conn:
raise e.OperationalError(
"escape_literal failed: no connection provided"
impl.PQfreemem(out)
return memoryview(rv)
- def escape_identifier(self, data: "proto.Buffer") -> memoryview:
+ def escape_identifier(self, data: "abc.Buffer") -> memoryview:
if not self.conn:
raise e.OperationalError(
"escape_identifier failed: no connection provided"
impl.PQfreemem(out)
return memoryview(rv)
- def escape_string(self, data: "proto.Buffer") -> memoryview:
+ def escape_string(self, data: "abc.Buffer") -> memoryview:
if not isinstance(data, bytes):
data = bytes(data)
return memoryview(out.value)
- def escape_bytea(self, data: "proto.Buffer") -> memoryview:
+ def escape_bytea(self, data: "abc.Buffer") -> memoryview:
len_out = c_size_t()
# TODO: might be able to do without a copy but it's a mess.
# the C library does it better anyway, so maybe not worth optimising
from . import pq
from . import sql
from . import errors as e
+from .abc import ConnectionType, Query, Params, PQGen
from .rows import Row, RowFactory
-from .proto import ConnectionType, Query, Params, PQGen
from .cursor import BaseCursor, execute
if TYPE_CHECKING:
from typing import Any, Iterator, List, Optional, Sequence, Union
from .pq import Escaping
+from .abc import AdaptContext
from .adapt import Transformer, PyFormat
-from .proto import AdaptContext
def quote(obj: Any, context: Optional[AdaptContext] = None) -> str:
from . import pq
from . import sql
from .pq import TransactionStatus
-from .proto import ConnectionType, PQGen
-from .pq.proto import PGresult
+from .abc import ConnectionType, PQGen
+from .pq.abc import PGresult
if TYPE_CHECKING:
from typing import Any # noqa: F401
from .. import pq
from .. import errors as e
from .. import postgres
+from ..abc import AdaptContext, Buffer, Dumper, DumperKey
from ..adapt import RecursiveDumper, RecursiveLoader, PyFormat
-from ..proto import AdaptContext, Buffer, Dumper, DumperKey
from .._struct import pack_len, unpack_len
from ..postgres import TEXT_OID, INVALID_OID
from .._typeinfo import TypeInfo
from .. import postgres
from ..pq import Format
+from ..abc import AdaptContext
from ..adapt import Buffer, Dumper, Loader
-from ..proto import AdaptContext
class BoolDumper(Dumper):
from .. import pq
from .. import postgres
+from ..abc import AdaptContext, Buffer
from ..adapt import PyFormat, RecursiveDumper, RecursiveLoader
-from ..proto import AdaptContext, Buffer
from .._struct import unpack_len
from ..postgres import TEXT_OID
from .._typeinfo import CompositeInfo as CompositeInfo # exported here
from .. import postgres
from ..pq import Format
from .._tz import get_tzinfo
+from ..abc import AdaptContext, DumperKey
from ..adapt import Buffer, Dumper, Loader, PyFormat
-from ..proto import AdaptContext, DumperKey
from ..errors import InterfaceError, DataError
from .._struct import pack_int4, pack_int8, unpack_int4, unpack_int8
from .. import postgres
from ..pq import Format
+from ..abc import AdaptContext
from ..adapt import Buffer, Dumper, Loader
-from ..proto import AdaptContext
from ..errors import DataError
JsonDumpsFunction = Callable[[Any], str]
from .. import postgres
from ..pq import Format
+from ..abc import AdaptContext
from ..adapt import Buffer, Dumper, Loader
-from ..proto import AdaptContext
if TYPE_CHECKING:
import ipaddress
# Copyright (C) 2020-2021 The Psycopg Team
from ..pq import Format
+from ..abc import AdaptContext
from ..adapt import Dumper
-from ..proto import AdaptContext
class NoneDumper(Dumper):
from .. import postgres
from .. import errors as e
from ..pq import Format
+from ..abc import AdaptContext
from ..adapt import Buffer, Dumper, Loader, PyFormat
-from ..proto import AdaptContext
from .._struct import pack_int2, pack_uint2, unpack_int2
from .._struct import pack_int4, pack_uint4, unpack_int4, unpack_uint4
from .._struct import pack_int8, unpack_int8
from .. import postgres
from ..pq import Format
+from ..abc import AdaptContext, Buffer, Dumper, DumperKey
from ..adapt import RecursiveDumper, RecursiveLoader, PyFormat
-from ..proto import AdaptContext, Buffer, Dumper, DumperKey
from .._struct import pack_len, unpack_len
from ..postgres import INVALID_OID
from .._typeinfo import RangeInfo as RangeInfo # exported here
from .. import postgres
from ..pq import Format, Escaping
+from ..abc import AdaptContext
from ..adapt import Buffer, Dumper, Loader
-from ..proto import AdaptContext
from ..errors import DataError
if TYPE_CHECKING:
- from ..pq.proto import Escaping as EscapingProto
+ from ..pq.abc import Escaping as EscapingProto
class _StrDumper(Dumper):
from .. import postgres
from ..pq import Format
+from ..abc import AdaptContext
from ..adapt import Buffer, Dumper, Loader
-from ..proto import AdaptContext
if TYPE_CHECKING:
import uuid
from selectors import DefaultSelector, EVENT_READ, EVENT_WRITE
from . import errors as e
-from .proto import PQGen, PQGenConn, RV
+from .abc import PQGen, PQGenConn, RV
class Wait(IntEnum):
from typing import Any, Iterable, List, Optional, Sequence, Tuple
from psycopg import pq
-from psycopg import proto
+from psycopg import abc
from psycopg.rows import Row, RowMaker
from psycopg.adapt import AdaptersMap, PyFormat
-from psycopg.proto import Dumper, Loader
-from psycopg.pq.proto import PGconn, PGresult
+from psycopg.pq.abc import PGconn, PGresult
from psycopg.connection import BaseConnection
-class Transformer(proto.AdaptContext):
- def __init__(self, context: Optional[proto.AdaptContext] = None): ...
+class Transformer(abc.AdaptContext):
+ def __init__(self, context: Optional[abc.AdaptContext] = None): ...
@property
def connection(self) -> Optional[BaseConnection[Any]]: ...
@property
def dump_sequence(
self, params: Sequence[Any], formats: Sequence[PyFormat]
) -> Tuple[List[Any], Tuple[int, ...], Sequence[pq.Format]]: ...
- def get_dumper(self, obj: Any, format: PyFormat) -> Dumper: ...
+ def get_dumper(self, obj: Any, format: PyFormat) -> abc.Dumper: ...
def load_rows(
self, row0: int, row1: int, make_row: RowMaker[Row]
) -> List[Row]: ...
def load_sequence(
self, record: Sequence[Optional[bytes]]
) -> Tuple[Any, ...]: ...
- def get_loader(self, oid: int, format: pq.Format) -> Loader: ...
+ def get_loader(self, oid: int, format: pq.Format) -> abc.Loader: ...
# Generators
-def connect(conninfo: str) -> proto.PQGenConn[PGconn]: ...
-def execute(pgconn: PGconn) -> proto.PQGen[List[PGresult]]: ...
+def connect(conninfo: str) -> abc.PQGenConn[PGconn]: ...
+def execute(pgconn: PGconn) -> abc.PQGen[List[PGresult]]: ...
# Copy support
def format_row_text(
- row: Sequence[Any], tx: proto.Transformer, out: Optional[bytearray] = None
+ row: Sequence[Any], tx: abc.Transformer, out: Optional[bytearray] = None
) -> bytearray: ...
def format_row_binary(
- row: Sequence[Any], tx: proto.Transformer, out: Optional[bytearray] = None
+ row: Sequence[Any], tx: abc.Transformer, out: Optional[bytearray] = None
) -> bytearray: ...
-def parse_row_text(data: bytes, tx: proto.Transformer) -> Tuple[Any, ...]: ...
-def parse_row_binary(
- data: bytes, tx: proto.Transformer
-) -> Tuple[Any, ...]: ...
+def parse_row_text(data: bytes, tx: abc.Transformer) -> Tuple[Any, ...]: ...
+def parse_row_binary(data: bytes, tx: abc.Transformer) -> Tuple[Any, ...]: ...
# vim: set syntax=python:
from typing import List
from psycopg import errors as e
-from psycopg.pq import proto, error_message
-from psycopg.proto import PQGen
+from psycopg.pq import abc, error_message
+from psycopg.abc import PQGen
from psycopg.waiting import Wait, Ready
cdef object WAIT_W = Wait.W
cdef object WAIT_RW = Wait.RW
cdef int READY_R = Ready.R
-def connect(conninfo: str) -> PQGenConn[proto.PGconn]:
+def connect(conninfo: str) -> PQGenConn[abc.PGconn]:
"""
Generator to create a database connection without blocking.
return conn
-def execute(pq.PGconn pgconn) -> PQGen[List[proto.PGresult]]:
+def execute(pq.PGconn pgconn) -> PQGen[List[abc.PGresult]]:
"""
Generator sending a query and returning results without blocking.
from typing import Optional, Tuple, Union
from psycopg import pq
-from psycopg.proto import Dumper, Loader, AdaptContext, PyFormat, Buffer
+from psycopg.abc import Dumper, Loader, AdaptContext, PyFormat, Buffer
def f() -> None: