.. code:: python
from datetime import date
-
- from psycopg.oids import postgres_types as builtins
from psycopg.types.datetime import DateLoader, DateDumper
class InfDateDumper(DateDumper):
else:
return super().load(data)
- InfDateDumper.register(date, cur)
- InfDateLoader.register(builtins["date"].oid, cur)
+ cur.adapters.register_dumper(date, InfDateDumper)
+ cur.adapters.register_loader("date", InfDateLoader)
cur.execute("SELECT %s::text, %s::text", [date(2020, 12, 31), date.max]).fetchone()
# ('2020-12-31', 'infinity')
from . import pq
from . import types
+from . import postgres
from .copy import Copy, AsyncCopy
-from .adapt import global_adapters
from .cursor import AnyCursor, AsyncCursor, Cursor
from .errors import Warning, Error, InterfaceError, DatabaseError
from .errors import DataError, OperationalError, IntegrityError
if logger.level == logging.NOTSET:
logger.setLevel(logging.WARNING)
-# register default adapters
-types.register_default_globals(global_adapters)
+# register default adapters for PostgreSQL
+postgres.register_default_adapters(postgres.adapters)
# DBAPI compliancy
connect = Connection.connect
apilevel = "2.0"
threadsafety = 2
paramstyle = "pyformat"
-BinaryTextDumper.register(Binary, global_adapters) # dbapi20
-BinaryBinaryDumper.register(Binary, global_adapters) # dbapi20
+postgres.adapters.register_dumper(Binary, BinaryTextDumper) # dbapi20
+postgres.adapters.register_dumper(Binary, BinaryBinaryDumper) # dbapi20
# Note: defining the exported methods helps both Sphynx in documenting that
--- /dev/null
+"""
+Mapping from types/oids to Dumpers/Loaders
+"""
+
+# Copyright (C) 2020-2021 The Psycopg Team
+
+from typing import Any, Dict, List, Optional, Type, TypeVar, Union
+from typing import cast, TYPE_CHECKING
+
+from . import pq
+from . import errors as e
+from ._enums import PyFormat as PyFormat
+from .proto import AdaptContext, Dumper, Loader
+from ._cmodule import _psycopg
+from ._typeinfo import TypesRegistry
+
+if TYPE_CHECKING:
+ from .connection import BaseConnection
+
+RV = TypeVar("RV")
+
+
+class AdaptersMap(AdaptContext):
+ """
+ Map oids to Loaders and types to Dumpers.
+
+ The object can start empty or copy from another object of the same class.
+ Copies are copy-on-write: if the maps are updated make a copy. This way
+ extending e.g. global map by a connection or a connection map from a cursor
+ is cheap: a copy is made only on customisation.
+ """
+
+ __module__ = "psycopg.adapt"
+
+ _dumpers: Dict[PyFormat, Dict[Union[type, str], Type[Dumper]]]
+ _loaders: List[Dict[int, Type[Loader]]]
+ types: TypesRegistry
+
+ # Record if a dumper or loader has an optimised version.
+ _optimised: Dict[type, type] = {}
+
+ def __init__(
+ self,
+ template: Optional["AdaptersMap"] = None,
+ types: Optional[TypesRegistry] = None,
+ ):
+ if template:
+ self._dumpers = template._dumpers.copy()
+ self._own_dumpers = _dumpers_shared.copy()
+ template._own_dumpers = _dumpers_shared.copy()
+ self._loaders = template._loaders[:]
+ self._own_loaders = [False, False]
+ template._own_loaders = [False, False]
+ self.types = TypesRegistry(template.types)
+ else:
+ self._dumpers = {fmt: {} for fmt in PyFormat}
+ self._own_dumpers = _dumpers_owned.copy()
+ self._loaders = [{}, {}]
+ self._own_loaders = [True, True]
+ self.types = types or TypesRegistry()
+
+ # implement the AdaptContext protocol too
+ @property
+ def adapters(self) -> "AdaptersMap":
+ return self
+
+ @property
+ def connection(self) -> Optional["BaseConnection[Any]"]:
+ return None
+
+ def register_dumper(
+ self, cls: Union[type, str], dumper: Type[Dumper]
+ ) -> None:
+ """
+ Configure the context to use *dumper* to convert object of type *cls*.
+ """
+ if not isinstance(cls, (str, type)):
+ raise TypeError(
+ f"dumpers should be registered on classes, got {cls} instead"
+ )
+
+ if _psycopg:
+ dumper = self._get_optimised(dumper)
+
+ # Register the dumper both as its format and as auto
+ # so that the last dumper registered is used in auto (%s) format
+ for fmt in (PyFormat.from_pq(dumper.format), PyFormat.AUTO):
+ if not self._own_dumpers[fmt]:
+ self._dumpers[fmt] = self._dumpers[fmt].copy()
+ self._own_dumpers[fmt] = True
+
+ self._dumpers[fmt][cls] = dumper
+
+ def register_loader(
+ self, oid: Union[int, str], loader: Type["Loader"]
+ ) -> None:
+ """
+ Configure the context to use *loader* to convert data of oid *oid*.
+ """
+ if isinstance(oid, str):
+ oid = self.types[oid].oid
+ if not isinstance(oid, int):
+ raise TypeError(
+ f"loaders should be registered on oid, got {oid} instead"
+ )
+
+ if _psycopg:
+ loader = self._get_optimised(loader)
+
+ fmt = loader.format
+ if not self._own_loaders[fmt]:
+ self._loaders[fmt] = self._loaders[fmt].copy()
+ self._own_loaders[fmt] = True
+
+ self._loaders[fmt][oid] = loader
+
+ def get_dumper(self, cls: type, format: PyFormat) -> Type["Dumper"]:
+ """
+ Return the dumper class for the given type and format.
+
+ Raise ProgrammingError if a class is not available.
+ """
+ try:
+ dmap = self._dumpers[format]
+ except KeyError:
+ raise ValueError(f"bad dumper format: {format}")
+
+ # Look for the right class, including looking at superclasses
+ for scls in cls.__mro__:
+ if scls in dmap:
+ return dmap[scls]
+
+ # If the adapter is not found, look for its name as a string
+ fqn = scls.__module__ + "." + scls.__qualname__
+ if fqn in dmap:
+ # Replace the class name with the class itself
+ d = dmap[scls] = dmap.pop(fqn)
+ return d
+
+ raise e.ProgrammingError(
+ f"cannot adapt type {cls.__name__}"
+ f" to format {PyFormat(format).name}"
+ )
+
+ def get_loader(
+ self, oid: int, format: pq.Format
+ ) -> Optional[Type["Loader"]]:
+ """
+ Return the loader class for the given oid and format.
+
+ Return None if not found.
+ """
+ return self._loaders[format].get(oid)
+
+ @classmethod
+ def _get_optimised(self, cls: Type[RV]) -> Type[RV]:
+ """Return the optimised version of a Dumper or Loader class.
+
+ Return the input class itself if there is no optimised version.
+ """
+ try:
+ return self._optimised[cls]
+ except KeyError:
+ pass
+
+ # Check if the class comes from psycopg.types and there is a class
+ # with the same name in psycopg_c._psycopg.
+ from psycopg import types
+
+ if cls.__module__.startswith(types.__name__):
+ new = cast(Type[RV], getattr(_psycopg, cls.__name__, None))
+ if new:
+ self._optimised[cls] = new
+ return new
+
+ self._optimised[cls] = cls
+ return cls
+
+
+# Micro-optimization: copying these objects is faster than creating new dicts
+_dumpers_owned = dict.fromkeys(PyFormat, True)
+_dumpers_shared = dict.fromkeys(PyFormat, False)
from collections import defaultdict
from . import pq
+from . import postgres
from . import errors as e
-from .oids import INVALID_OID
from .rows import Row, RowMaker
from .proto import LoadFunc, AdaptContext, PyFormat, DumperKey
+from .postgres import INVALID_OID
if TYPE_CHECKING:
from .pq.proto import PGresult
self._adapters = context.adapters
self._conn = context.connection
else:
- from .adapt import global_adapters
-
- self._adapters = global_adapters
+ self._adapters = postgres.adapters
self._conn = None
# mapping class, fmt -> Dumper instance
f"found {len(recs)} different types named {name}"
)
- def register(
- self,
- context: Optional["AdaptContext"] = None,
- ) -> None:
+ def register(self, context: Optional[AdaptContext] = None) -> None:
"""
Register the type information, globally or in the specified *context*.
"""
if context:
types = context.adapters.types
else:
- from .oids import postgres_types
+ from . import postgres
- types = postgres_types
+ types = postgres.types
types.add(self)
super().__init__(name, oid, array_oid)
self.subtype_oid = subtype_oid
- def register(
- self,
- context: Optional[AdaptContext] = None,
- ) -> None:
+ def register(self, context: Optional[AdaptContext] = None) -> None:
super().register(context)
from .types.range import register_adapters
# Copyright (C) 2020-2021 The Psycopg Team
from abc import ABC, abstractmethod
-from typing import Any, Dict, List, Optional, Type, Tuple, Union
-from typing import cast, TYPE_CHECKING, TypeVar
+from typing import Any, Optional, Type, Tuple, Union, TYPE_CHECKING
from . import pq
-from . import errors as e
-from ._enums import PyFormat as PyFormat
-from .oids import postgres_types
+from . import _adapters_map
from .proto import AdaptContext, Buffer as Buffer
+from ._enums import PyFormat as PyFormat
from ._cmodule import _psycopg
-from ._typeinfo import TypesRegistry
if TYPE_CHECKING:
from . import proto
from .connection import BaseConnection
-RV = TypeVar("RV")
+AdaptersMap = _adapters_map.AdaptersMap
class Dumper(ABC):
"""
Configure *context* to use this dumper to convert object of type *cls*.
"""
- adapters = context.adapters if context else global_adapters
+ from . import postgres
+
+ adapters = context.adapters if context else postgres.adapters
adapters.register_dumper(cls, this_cls)
"""
Configure *context* to use this loader to convert values with OID *oid*.
"""
- adapters = context.adapters if context else global_adapters
- adapters.register_loader(oid, cls)
-
-
-class AdaptersMap(AdaptContext):
- """
- Map oids to Loaders and types to Dumpers.
-
- The object can start empty or copy from another object of the same class.
- Copies are copy-on-write: if the maps are updated make a copy. This way
- extending e.g. global map by a connection or a connection map from a cursor
- is cheap: a copy is made only on customisation.
- """
-
- _dumpers: Dict[PyFormat, Dict[Union[type, str], Type["proto.Dumper"]]]
- _loaders: List[Dict[int, Type["proto.Loader"]]]
- types: TypesRegistry
-
- # Record if a dumper or loader has an optimised version.
- _optimised: Dict[type, type] = {}
-
- def __init__(
- self,
- template: Optional["AdaptersMap"] = None,
- types: Optional[TypesRegistry] = None,
- ):
- if template:
- self._dumpers = template._dumpers.copy()
- self._own_dumpers = _dumpers_shared.copy()
- template._own_dumpers = _dumpers_shared.copy()
- self._loaders = template._loaders[:]
- self._own_loaders = [False, False]
- template._own_loaders = [False, False]
- self.types = TypesRegistry(template.types)
- else:
- self._dumpers = {fmt: {} for fmt in PyFormat}
- self._own_dumpers = _dumpers_owned.copy()
- self._loaders = [{}, {}]
- self._own_loaders = [True, True]
- self.types = types or TypesRegistry()
-
- # implement the AdaptContext protocol too
- @property
- def adapters(self) -> "AdaptersMap":
- return self
+ from . import postgres
- @property
- def connection(self) -> Optional["BaseConnection[Any]"]:
- return None
-
- def register_dumper(
- self, cls: Union[type, str], dumper: Type[Dumper]
- ) -> None:
- """
- Configure the context to use *dumper* to convert object of type *cls*.
- """
- if not isinstance(cls, (str, type)):
- raise TypeError(
- f"dumpers should be registered on classes, got {cls} instead"
- )
-
- if _psycopg:
- dumper = self._get_optimised(dumper)
-
- # Register the dumper both as its format and as auto
- # so that the last dumper registered is used in auto (%s) format
- for fmt in (PyFormat.from_pq(dumper.format), PyFormat.AUTO):
- if not self._own_dumpers[fmt]:
- self._dumpers[fmt] = self._dumpers[fmt].copy()
- self._own_dumpers[fmt] = True
-
- self._dumpers[fmt][cls] = dumper
-
- def register_loader(
- self, oid: Union[int, str], loader: Type["proto.Loader"]
- ) -> None:
- """
- Configure the context to use *loader* to convert data of oid *oid*.
- """
- if isinstance(oid, str):
- oid = self.types[oid].oid
- if not isinstance(oid, int):
- raise TypeError(
- f"loaders should be registered on oid, got {oid} instead"
- )
-
- if _psycopg:
- loader = self._get_optimised(loader)
-
- fmt = loader.format
- if not self._own_loaders[fmt]:
- self._loaders[fmt] = self._loaders[fmt].copy()
- self._own_loaders[fmt] = True
-
- self._loaders[fmt][oid] = loader
-
- def get_dumper(self, cls: type, format: PyFormat) -> Type["proto.Dumper"]:
- """
- Return the dumper class for the given type and format.
-
- Raise ProgrammingError if a class is not available.
- """
- try:
- dmap = self._dumpers[format]
- except KeyError:
- raise ValueError(f"bad dumper format: {format}")
-
- # Look for the right class, including looking at superclasses
- for scls in cls.__mro__:
- if scls in dmap:
- return dmap[scls]
-
- # If the adapter is not found, look for its name as a string
- fqn = scls.__module__ + "." + scls.__qualname__
- if fqn in dmap:
- # Replace the class name with the class itself
- d = dmap[scls] = dmap.pop(fqn)
- return d
-
- raise e.ProgrammingError(
- f"cannot adapt type {cls.__name__}"
- f" to format {PyFormat(format).name}"
- )
-
- def get_loader(
- self, oid: int, format: pq.Format
- ) -> Optional[Type["proto.Loader"]]:
- """
- Return the loader class for the given oid and format.
-
- Return None if not found.
- """
- return self._loaders[format].get(oid)
-
- @classmethod
- def _get_optimised(self, cls: Type[RV]) -> Type[RV]:
- """Return the optimised version of a Dumper or Loader class.
-
- Return the input class itself if there is no optimised version.
- """
- try:
- return self._optimised[cls]
- except KeyError:
- pass
-
- # Check if the class comes from psycopg.types and there is a class
- # with the same name in psycopg_c._psycopg.
- from psycopg import types
-
- if cls.__module__.startswith(types.__name__):
- new = cast(Type[RV], getattr(_psycopg, cls.__name__, None))
- if new:
- self._optimised[cls] = new
- return new
-
- self._optimised[cls] = cls
- return cls
-
-
-_dumpers_owned = dict.fromkeys(PyFormat, True)
-_dumpers_shared = dict.fromkeys(PyFormat, False)
+ adapters = context.adapters if context else postgres.adapters
+ adapters.register_loader(oid, cls)
-global_adapters = AdaptersMap(types=postgres_types)
Transformer: Type["proto.Transformer"]
from . import adapt
from . import errors as e
from . import waiting
+from . import postgres
from . import encodings
from .pq import ConnStatus, ExecStatus, TransactionStatus, Format
from .sql import Composable
self.pgconn = pgconn # TODO: document this
self._row_factory = row_factory
self._autocommit = False
- self._adapters = adapt.AdaptersMap(adapt.global_adapters)
+ self._adapters = adapt.AdaptersMap(postgres.adapters)
self._notice_handlers: List[NoticeHandler] = []
self._notify_handlers: List[NotifyHandler] = []
from math import floor
from typing import Any, Optional, Sequence
+from . import postgres
from .pq import Format, Escaping
-from .oids import postgres_types as builtins
from .adapt import Dumper
from .proto import AdaptContext
class DBAPITypeObject:
def __init__(self, name: str, type_names: Sequence[str]):
self.name = name
- self.values = tuple(builtins[n].oid for n in type_names)
+ self.values = tuple(postgres.types[n].oid for n in type_names)
def __repr__(self) -> str:
return f"psycopg.{self.name}"
class BinaryBinaryDumper(Dumper):
format = Format.BINARY
- _oid = builtins["bytea"].oid
+ _oid = postgres.types["bytea"].oid
def dump(self, obj: Binary) -> bytes:
wrapped = obj.obj
"""
-Maps of builtin types and names
+Types configuration specific to PostgreSQL.
"""
# Copyright (C) 2020-2021 The Psycopg Team
from ._typeinfo import TypeInfo, RangeInfo, TypesRegistry
+from .proto import AdaptContext
+from ._adapters_map import AdaptersMap
# Global objects with PostgreSQL builtins and globally registered user types.
-postgres_types = TypesRegistry()
+types = TypesRegistry()
+# Global adapter maps with PostgreSQL types configuration
+adapters = AdaptersMap(types=types)
# Use tools/update_oids.py to update this data.
for t in [
RangeInfo("tstzrange", 3910, 3911, subtype_oid=1184),
# autogenerated: end
]:
- postgres_types.add(t)
+ types.add(t)
# A few oids used a bit everywhere
INVALID_OID = 0
-TEXT_OID = postgres_types["text"].oid
-TEXT_ARRAY_OID = postgres_types["text"].array_oid
+TEXT_OID = types["text"].oid
+TEXT_ARRAY_OID = types["text"].array_oid
+
+
+def register_default_adapters(context: AdaptContext) -> None:
+
+ from .types import array
+
+ array.register_default_adapters(context)
+
+ from .types import bool
+
+ bool.register_default_adapters(context)
+
+ from .types import composite
+
+ composite.register_default_adapters(context)
+
+ from .types import datetime
+
+ datetime.register_default_adapters(context)
+
+ from .types import json
+
+ json.register_default_adapters(context)
+
+ from .types import net
+
+ net.register_default_adapters(context)
+
+ from .types import none
+
+ none.register_default_adapters(context)
+
+ from .types import numeric
+
+ numeric.register_default_adapters(context)
+
+ from .types import range
+
+ range.register_default_adapters(context)
+
+ from .types import string
+
+ string.register_default_adapters(context)
+
+ from .types import uuid
+
+ uuid.register_default_adapters(context)
+
+ # Must come after all the types are registered
+ array.register_all_arrays(context)
if TYPE_CHECKING:
from .sql import Composable
from .rows import Row, RowMaker
- from .adapt import AdaptersMap
from .pq.proto import PGresult
-
from .waiting import Wait, Ready
from .connection import BaseConnection
+ from ._adapters_map import AdaptersMap
# An object implementing the buffer protocol
Buffer = Union[bytes, bytearray, memoryview]
# Copyright (C) 2020-2021 The Psycopg Team
-from typing import TYPE_CHECKING
-
-from . import net
-from . import bool
-from . import json
-from . import none
-from . import uuid
-from . import array
-from . import range
-from . import string
-from . import numeric
-from . import datetime
-from . import composite
-
from .._typeinfo import TypeInfo as TypeInfo # exported here
-
-if TYPE_CHECKING:
- from ..proto import AdaptContext
-
-
-def register_default_globals(ctx: "AdaptContext") -> None:
- net.register_default_globals(ctx)
- bool.register_default_globals(ctx)
- json.register_default_globals(ctx)
- none.register_default_globals(ctx)
- uuid.register_default_globals(ctx)
- array.register_default_globals(ctx)
- range.register_default_globals(ctx)
- string.register_default_globals(ctx)
- numeric.register_default_globals(ctx)
- datetime.register_default_globals(ctx)
- composite.register_default_globals(ctx)
-
- # Must come after all the types are registered
- array.register_all_arrays(ctx)
from .. import pq
from .. import errors as e
-from ..oids import postgres_types, TEXT_OID, TEXT_ARRAY_OID, INVALID_OID
+from .. import postgres
from ..adapt import RecursiveDumper, RecursiveLoader, PyFormat
from ..proto import AdaptContext, Buffer, Dumper, DumperKey
from .._struct import pack_len, unpack_len
+from ..postgres import TEXT_OID, INVALID_OID
from .._typeinfo import TypeInfo
_struct_head = struct.Struct("!III") # ndims, hasnull, elem oid
Callable[[bytes, int], Tuple[int, int]], _struct_dim.unpack_from
)
+TEXT_ARRAY_OID = postgres.types["text"].array_oid
+
class BaseListDumper(RecursiveDumper):
def __init__(self, cls: type, context: Optional[AdaptContext] = None):
super().__init__(cls, context)
self.sub_dumper: Optional[Dumper] = None
- self._types = context.adapters.types if context else postgres_types
+ self._types = context.adapters.types if context else postgres.types
def get_key(self, obj: List[Any], format: PyFormat) -> DumperKey:
item = self._find_list_element(obj)
def register_adapters(
- info: TypeInfo, context: Optional["AdaptContext"]
+ info: TypeInfo, context: Optional[AdaptContext] = None
) -> None:
+ adapters = context.adapters if context else postgres.adapters
for base in (ArrayLoader, ArrayBinaryLoader):
lname = f"{info.name.title()}{base.__name__}"
loader: Type[BaseArrayLoader] = type(
lname, (base,), {"base_oid": info.oid}
)
- loader.register(info.array_oid, context=context)
+ adapters.register_loader(info.array_oid, loader)
-def register_default_globals(ctx: AdaptContext) -> None:
- ListDumper.register(list, ctx)
- ListBinaryDumper.register(list, ctx)
+def register_default_adapters(context: AdaptContext) -> None:
+ context.adapters.register_dumper(list, ListDumper)
+ context.adapters.register_dumper(list, ListBinaryDumper)
-def register_all_arrays(ctx: AdaptContext) -> None:
+def register_all_arrays(context: AdaptContext) -> None:
"""
Associate the array oid of all the types in Loader.globals.
This function is designed to be called once at import time, after having
registered all the base loaders.
"""
- for t in ctx.adapters.types:
+ for t in context.adapters.types:
# TODO: handle different delimiters (box)
if t.array_oid and getattr(t, "delimiter", None) == ",":
- t.register(ctx)
+ t.register(context)
# Copyright (C) 2020-2021 The Psycopg Team
+from .. import postgres
from ..pq import Format
-from ..oids import postgres_types as builtins
from ..adapt import Buffer, Dumper, Loader
from ..proto import AdaptContext
class BoolDumper(Dumper):
format = Format.TEXT
- _oid = builtins["bool"].oid
+ _oid = postgres.types["bool"].oid
def dump(self, obj: bool) -> bytes:
return b"t" if obj else b"f"
class BoolBinaryDumper(Dumper):
format = Format.BINARY
- _oid = builtins["bool"].oid
+ _oid = postgres.types["bool"].oid
def dump(self, obj: bool) -> bytes:
return b"\x01" if obj else b"\x00"
return data != b"\x00"
-def register_default_globals(ctx: AdaptContext) -> None:
- BoolDumper.register(bool, ctx)
- BoolBinaryDumper.register(bool, ctx)
- BoolLoader.register("bool", ctx)
- BoolBinaryLoader.register("bool", ctx)
+def register_default_adapters(context: AdaptContext) -> None:
+ adapters = context.adapters
+ adapters.register_dumper(bool, BoolDumper)
+ adapters.register_dumper(bool, BoolBinaryDumper)
+ adapters.register_loader("bool", BoolLoader)
+ adapters.register_loader("bool", BoolBinaryLoader)
from typing import Sequence, Tuple, Type
from .. import pq
-from ..oids import TEXT_OID
+from .. import postgres
from ..adapt import PyFormat, RecursiveDumper, RecursiveLoader
from ..proto import AdaptContext, Buffer
from .._struct import unpack_len
+from ..postgres import TEXT_OID
from .._typeinfo import CompositeInfo as CompositeInfo # exported here
_struct_oidlen = struct.Struct("!Ii")
def register_adapters(
info: CompositeInfo,
- context: Optional["AdaptContext"],
+ context: Optional[AdaptContext] = None,
factory: Optional[Callable[..., Any]] = None,
) -> None:
if not factory:
factory = namedtuple(info.name, info.field_names) # type: ignore
+ adapters = context.adapters if context else postgres.adapters
+
# generate and register a customized text loader
loader: Type[BaseCompositeLoader] = type(
f"{info.name.title()}Loader",
"fields_types": info.field_types,
},
)
- loader.register(info.oid, context=context)
+ adapters.register_loader(info.oid, loader)
# generate and register a customized binary loader
loader = type(
(CompositeBinaryLoader,),
{"factory": factory},
)
- loader.register(info.oid, context=context)
+ adapters.register_loader(info.oid, loader)
-def register_default_globals(ctx: AdaptContext) -> None:
- TupleDumper.register(tuple, ctx)
- RecordLoader.register("record", ctx)
- RecordBinaryLoader.register("record", ctx)
+def register_default_adapters(context: AdaptContext) -> None:
+ adapters = context.adapters
+ adapters.register_dumper(tuple, TupleDumper)
+ adapters.register_loader("record", RecordLoader)
+ adapters.register_loader("record", RecordBinaryLoader)
from datetime import date, datetime, time, timedelta, timezone
from typing import Any, Callable, cast, Optional, Tuple, TYPE_CHECKING
+from .. import postgres
from ..pq import Format
from .._tz import get_tzinfo
-from ..oids import postgres_types as builtins
from ..adapt import Buffer, Dumper, Loader, PyFormat
from ..proto import AdaptContext, DumperKey
from ..errors import InterfaceError, DataError
class DateDumper(Dumper):
format = Format.TEXT
- _oid = builtins["date"].oid
+ _oid = postgres.types["date"].oid
def dump(self, obj: date) -> bytes:
# NOTE: whatever the PostgreSQL DateStyle input format (DMY, MDY, YMD)
class DateBinaryDumper(Dumper):
format = Format.BINARY
- _oid = builtins["date"].oid
+ _oid = postgres.types["date"].oid
def dump(self, obj: date) -> bytes:
days = obj.toordinal() - _pg_date_epoch_days
class TimeDumper(_BaseTimeTextDumper):
- _oid = builtins["time"].oid
+ _oid = postgres.types["time"].oid
def upgrade(self, obj: time, format: PyFormat) -> Dumper:
if not obj.tzinfo:
class TimeTzDumper(_BaseTimeTextDumper):
- _oid = builtins["timetz"].oid
+ _oid = postgres.types["timetz"].oid
class TimeBinaryDumper(_BaseTimeDumper):
format = Format.BINARY
- _oid = builtins["time"].oid
+ _oid = postgres.types["time"].oid
def dump(self, obj: time) -> bytes:
us = obj.microsecond + 1_000_000 * (
class TimeTzBinaryDumper(_BaseTimeDumper):
format = Format.BINARY
- _oid = builtins["timetz"].oid
+ _oid = postgres.types["timetz"].oid
def dump(self, obj: time) -> bytes:
us = obj.microsecond + 1_000_000 * (
class DatetimeDumper(_BaseDatetimeTextDumper):
- _oid = builtins["timestamptz"].oid
+ _oid = postgres.types["timestamptz"].oid
def upgrade(self, obj: datetime, format: PyFormat) -> Dumper:
if obj.tzinfo:
class DatetimeNoTzDumper(_BaseDatetimeTextDumper):
- _oid = builtins["timestamp"].oid
+ _oid = postgres.types["timestamp"].oid
class DatetimeBinaryDumper(_BaseDatetimeDumper):
format = Format.BINARY
- _oid = builtins["timestamptz"].oid
+ _oid = postgres.types["timestamptz"].oid
def dump(self, obj: datetime) -> bytes:
delta = obj - _pg_datetimetz_epoch
class DatetimeNoTzBinaryDumper(_BaseDatetimeDumper):
format = Format.BINARY
- _oid = builtins["timestamp"].oid
+ _oid = postgres.types["timestamp"].oid
def dump(self, obj: datetime) -> bytes:
delta = obj - _pg_datetime_epoch
class TimedeltaDumper(Dumper):
format = Format.TEXT
- _oid = builtins["interval"].oid
+ _oid = postgres.types["interval"].oid
def __init__(self, cls: type, context: Optional[AdaptContext] = None):
super().__init__(cls, context)
class TimedeltaBinaryDumper(Dumper):
format = Format.BINARY
- _oid = builtins["interval"].oid
+ _oid = postgres.types["interval"].oid
def dump(self, obj: timedelta) -> bytes:
micros = 1_000_000 * obj.seconds + obj.microseconds
_uspad = [0, 100_000, 10_000, 1_000, 100, 10, 1]
-def register_default_globals(ctx: AdaptContext) -> None:
- DateDumper.register("datetime.date", ctx)
- DateBinaryDumper.register("datetime.date", ctx)
- TimeDumper.register("datetime.time", ctx)
- TimeBinaryDumper.register("datetime.time", ctx)
- DatetimeDumper.register("datetime.datetime", ctx)
- DatetimeBinaryDumper.register("datetime.datetime", ctx)
- TimedeltaDumper.register("datetime.timedelta", ctx)
- TimedeltaBinaryDumper.register("datetime.timedelta", ctx)
- DateLoader.register("date", ctx)
- DateBinaryLoader.register("date", ctx)
- TimeLoader.register("time", ctx)
- TimeBinaryLoader.register("time", ctx)
- TimetzLoader.register("timetz", ctx)
- TimetzBinaryLoader.register("timetz", ctx)
- TimestampLoader.register("timestamp", ctx)
- TimestampBinaryLoader.register("timestamp", ctx)
- TimestamptzLoader.register("timestamptz", ctx)
- TimestamptzBinaryLoader.register("timestamptz", ctx)
- IntervalLoader.register("interval", ctx)
- IntervalBinaryLoader.register("interval", ctx)
+def register_default_adapters(context: AdaptContext) -> None:
+ adapters = context.adapters
+ adapters.register_dumper("datetime.date", DateDumper)
+ adapters.register_dumper("datetime.date", DateBinaryDumper)
+ adapters.register_dumper("datetime.time", TimeDumper)
+ adapters.register_dumper("datetime.time", TimeBinaryDumper)
+ adapters.register_dumper("datetime.datetime", DatetimeDumper)
+ adapters.register_dumper("datetime.datetime", DatetimeBinaryDumper)
+ adapters.register_dumper("datetime.timedelta", TimedeltaDumper)
+ adapters.register_dumper("datetime.timedelta", TimedeltaBinaryDumper)
+ adapters.register_loader("date", DateLoader)
+ adapters.register_loader("date", DateBinaryLoader)
+ adapters.register_loader("time", TimeLoader)
+ adapters.register_loader("time", TimeBinaryLoader)
+ adapters.register_loader("timetz", TimetzLoader)
+ adapters.register_loader("timetz", TimetzBinaryLoader)
+ adapters.register_loader("timestamp", TimestampLoader)
+ adapters.register_loader("timestamp", TimestampBinaryLoader)
+ adapters.register_loader("timestamptz", TimestamptzLoader)
+ adapters.register_loader("timestamptz", TimestamptzBinaryLoader)
+ adapters.register_loader("interval", IntervalLoader)
+ adapters.register_loader("interval", IntervalBinaryLoader)
import json
from typing import Any, Callable, Optional, Type, Union
+from .. import postgres
from ..pq import Format
-from ..oids import postgres_types as builtins
from ..adapt import Buffer, Dumper, Loader
from ..proto import AdaptContext
from ..errors import DataError
dumper: Type[_JsonDumper]
for wrapper, base in grid:
dumper = type(f"Custom{base.__name__}", (base,), {"_dumps": dumps})
- dumper.register(wrapper, context=context)
+ context.adapters.register_dumper(wrapper, dumper)
def set_json_loads(
loader: Type[_JsonLoader]
for tname, base in grid:
loader = type(f"Custom{base.__name__}", (base,), {"_loads": loads})
- loader.register(tname, context=context)
+ context.adapters.register_loader(tname, loader)
class _JsonWrapper:
class JsonDumper(_JsonDumper):
format = Format.TEXT
- _oid = builtins["json"].oid
+ _oid = postgres.types["json"].oid
class JsonBinaryDumper(_JsonDumper):
format = Format.BINARY
- _oid = builtins["json"].oid
+ _oid = postgres.types["json"].oid
class JsonbDumper(_JsonDumper):
format = Format.TEXT
- _oid = builtins["jsonb"].oid
+ _oid = postgres.types["jsonb"].oid
class JsonbBinaryDumper(_JsonDumper):
format = Format.BINARY
- _oid = builtins["jsonb"].oid
+ _oid = postgres.types["jsonb"].oid
def dump(self, obj: _JsonWrapper) -> bytes:
dumps = obj.dumps or self.dumps
return self.loads(data)
-def register_default_globals(ctx: AdaptContext) -> None:
+def register_default_adapters(context: AdaptContext) -> None:
+ adapters = context.adapters
+
# Currently json binary format is nothing different than text, maybe with
# an extra memcopy we can avoid.
- JsonBinaryDumper.register(Json, ctx)
- JsonDumper.register(Json, ctx)
- JsonbBinaryDumper.register(Jsonb, ctx)
- JsonbDumper.register(Jsonb, ctx)
- JsonLoader.register("json", ctx)
- JsonbLoader.register("jsonb", ctx)
- JsonBinaryLoader.register("json", ctx)
- JsonbBinaryLoader.register("jsonb", ctx)
+ adapters.register_dumper(Json, JsonBinaryDumper)
+ adapters.register_dumper(Json, JsonDumper)
+ adapters.register_dumper(Jsonb, JsonbBinaryDumper)
+ adapters.register_dumper(Jsonb, JsonbDumper)
+ adapters.register_loader("json", JsonLoader)
+ adapters.register_loader("jsonb", JsonbLoader)
+ adapters.register_loader("json", JsonBinaryLoader)
+ adapters.register_loader("jsonb", JsonbBinaryLoader)
from typing import Callable, Optional, Type, Union, TYPE_CHECKING
+from .. import postgres
from ..pq import Format
-from ..oids import postgres_types as builtins
from ..adapt import Buffer, Dumper, Loader
from ..proto import AdaptContext
class InterfaceDumper(Dumper):
format = Format.TEXT
- _oid = builtins["inet"].oid
+ _oid = postgres.types["inet"].oid
def dump(self, obj: Interface) -> bytes:
return str(obj).encode("utf8")
class NetworkDumper(Dumper):
format = Format.TEXT
- _oid = builtins["cidr"].oid
+ _oid = postgres.types["cidr"].oid
def dump(self, obj: Network) -> bytes:
return str(obj).encode("utf8")
class _AddressBinaryDumper(Dumper):
format = Format.BINARY
- _oid = builtins["inet"].oid
+ _oid = postgres.types["inet"].oid
_family: int
_prefixlen: int
class _InterfaceBinaryDumper(Dumper):
format = Format.BINARY
- _oid = builtins["inet"].oid
+ _oid = postgres.types["inet"].oid
_family: int
class _NetworkBinaryDumper(Dumper):
format = Format.BINARY
- _oid = builtins["cidr"].oid
+ _oid = postgres.types["cidr"].oid
_family: int
return ip_network(data.decode("utf8"))
-def register_default_globals(ctx: AdaptContext) -> None:
- InterfaceDumper.register("ipaddress.IPv4Address", ctx)
- InterfaceDumper.register("ipaddress.IPv6Address", ctx)
- InterfaceDumper.register("ipaddress.IPv4Interface", ctx)
- InterfaceDumper.register("ipaddress.IPv6Interface", ctx)
- NetworkDumper.register("ipaddress.IPv4Network", ctx)
- NetworkDumper.register("ipaddress.IPv6Network", ctx)
- IPv4AddressBinaryDumper.register("ipaddress.IPv4Address", ctx)
- IPv6AddressBinaryDumper.register("ipaddress.IPv6Address", ctx)
- IPv4InterfaceBinaryDumper.register("ipaddress.IPv4Interface", ctx)
- IPv6InterfaceBinaryDumper.register("ipaddress.IPv6Interface", ctx)
- IPv4NetworkBinaryDumper.register("ipaddress.IPv4Network", ctx)
- IPv6NetworkBinaryDumper.register("ipaddress.IPv6Network", ctx)
- InetLoader.register("inet", ctx)
- InetBinaryLoader.register("inet", ctx)
- CidrLoader.register("cidr", ctx)
- CidrBinaryLoader.register("cidr", ctx)
+def register_default_adapters(context: AdaptContext) -> None:
+ adapters = context.adapters
+ adapters.register_dumper("ipaddress.IPv4Address", InterfaceDumper)
+ adapters.register_dumper("ipaddress.IPv6Address", InterfaceDumper)
+ adapters.register_dumper("ipaddress.IPv4Interface", InterfaceDumper)
+ adapters.register_dumper("ipaddress.IPv6Interface", InterfaceDumper)
+ adapters.register_dumper("ipaddress.IPv4Network", NetworkDumper)
+ adapters.register_dumper("ipaddress.IPv6Network", NetworkDumper)
+ adapters.register_dumper("ipaddress.IPv4Address", IPv4AddressBinaryDumper)
+ adapters.register_dumper("ipaddress.IPv6Address", IPv6AddressBinaryDumper)
+ adapters.register_dumper(
+ "ipaddress.IPv4Interface", IPv4InterfaceBinaryDumper
+ )
+ adapters.register_dumper(
+ "ipaddress.IPv6Interface", IPv6InterfaceBinaryDumper
+ )
+ adapters.register_dumper("ipaddress.IPv4Network", IPv4NetworkBinaryDumper)
+ adapters.register_dumper("ipaddress.IPv6Network", IPv6NetworkBinaryDumper)
+ adapters.register_loader("inet", InetLoader)
+ adapters.register_loader("inet", InetBinaryLoader)
+ adapters.register_loader("cidr", CidrLoader)
+ adapters.register_loader("cidr", CidrBinaryLoader)
return b"NULL"
-def register_default_globals(ctx: AdaptContext) -> None:
- NoneDumper.register(type(None), ctx)
+def register_default_adapters(context: AdaptContext) -> None:
+ context.adapters.register_dumper(type(None), NoneDumper)
from typing import Any, Callable, DefaultDict, Dict, Tuple, Union, cast
from decimal import Decimal, DefaultContext, Context
+from .. import postgres
from .. import errors as e
from ..pq import Format
-from ..oids import postgres_types as builtins
from ..adapt import Buffer, Dumper, Loader, PyFormat
from ..proto import AdaptContext
from .._struct import pack_int2, pack_uint2, unpack_int2
class FloatDumper(_SpecialValuesDumper):
format = Format.TEXT
- _oid = builtins["float8"].oid
+ _oid = postgres.types["float8"].oid
_special = {
b"inf": b"'Infinity'::float8",
class FloatBinaryDumper(Dumper):
format = Format.BINARY
- _oid = builtins["float8"].oid
+ _oid = postgres.types["float8"].oid
def dump(self, obj: float) -> bytes:
return pack_float8(obj)
class DecimalDumper(_SpecialValuesDumper):
- _oid = builtins["numeric"].oid
+ _oid = postgres.types["numeric"].oid
def dump(self, obj: Decimal) -> bytes:
if obj.is_nan():
class Int2Dumper(_NumberDumper):
- _oid = builtins["int2"].oid
+ _oid = postgres.types["int2"].oid
class Int4Dumper(_NumberDumper):
- _oid = builtins["int4"].oid
+ _oid = postgres.types["int4"].oid
class Int8Dumper(_NumberDumper):
- _oid = builtins["int8"].oid
+ _oid = postgres.types["int8"].oid
class IntNumericDumper(_NumberDumper):
- _oid = builtins["numeric"].oid
+ _oid = postgres.types["numeric"].oid
class OidDumper(_NumberDumper):
- _oid = builtins["oid"].oid
+ _oid = postgres.types["oid"].oid
class IntDumper(Dumper):
class DecimalBinaryDumper(Dumper):
format = Format.BINARY
- _oid = builtins["numeric"].oid
+ _oid = postgres.types["numeric"].oid
def dump(self, obj: Decimal) -> Union[bytearray, bytes]:
sign, digits, exp = obj.as_tuple()
return out
-def register_default_globals(ctx: AdaptContext) -> None:
- IntDumper.register(int, ctx)
- IntBinaryDumper.register(int, ctx)
- FloatDumper.register(float, ctx)
- FloatBinaryDumper.register(float, ctx)
+def register_default_adapters(context: AdaptContext) -> None:
+ adapters = context.adapters
+ adapters.register_dumper(int, IntDumper)
+ adapters.register_dumper(int, IntBinaryDumper)
+ adapters.register_dumper(float, FloatDumper)
+ adapters.register_dumper(float, FloatBinaryDumper)
# The binary dumper is currently some 30% slower, so default to text
# (see tests/scripts/testdec.py for a rough benchmark)
- DecimalBinaryDumper.register("decimal.Decimal", ctx)
- DecimalDumper.register("decimal.Decimal", ctx)
- Int2Dumper.register(Int2, ctx)
- Int4Dumper.register(Int4, ctx)
- Int8Dumper.register(Int8, ctx)
- IntNumericDumper.register(IntNumeric, ctx)
- OidDumper.register(Oid, ctx)
- Int2BinaryDumper.register(Int2, ctx)
- Int4BinaryDumper.register(Int4, ctx)
- Int8BinaryDumper.register(Int8, ctx)
- OidBinaryDumper.register(Oid, ctx)
- IntLoader.register("int2", ctx)
- IntLoader.register("int4", ctx)
- IntLoader.register("int8", ctx)
- IntLoader.register("oid", ctx)
- Int2BinaryLoader.register("int2", ctx)
- Int4BinaryLoader.register("int4", ctx)
- Int8BinaryLoader.register("int8", ctx)
- OidBinaryLoader.register("oid", ctx)
- FloatLoader.register("float4", ctx)
- FloatLoader.register("float8", ctx)
- Float4BinaryLoader.register("float4", ctx)
- Float8BinaryLoader.register("float8", ctx)
- NumericLoader.register("numeric", ctx)
- NumericBinaryLoader.register("numeric", ctx)
+ adapters.register_dumper("decimal.Decimal", DecimalBinaryDumper)
+ adapters.register_dumper("decimal.Decimal", DecimalDumper)
+ adapters.register_dumper(Int2, Int2Dumper)
+ adapters.register_dumper(Int4, Int4Dumper)
+ adapters.register_dumper(Int8, Int8Dumper)
+ adapters.register_dumper(IntNumeric, IntNumericDumper)
+ adapters.register_dumper(Oid, OidDumper)
+ adapters.register_dumper(Int2, Int2BinaryDumper)
+ adapters.register_dumper(Int4, Int4BinaryDumper)
+ adapters.register_dumper(Int8, Int8BinaryDumper)
+ adapters.register_dumper(Oid, OidBinaryDumper)
+ adapters.register_loader("int2", IntLoader)
+ adapters.register_loader("int4", IntLoader)
+ adapters.register_loader("int8", IntLoader)
+ adapters.register_loader("oid", IntLoader)
+ adapters.register_loader("int2", Int2BinaryLoader)
+ adapters.register_loader("int4", Int4BinaryLoader)
+ adapters.register_loader("int8", Int8BinaryLoader)
+ adapters.register_loader("oid", OidBinaryLoader)
+ adapters.register_loader("float4", FloatLoader)
+ adapters.register_loader("float8", FloatLoader)
+ adapters.register_loader("float4", Float4BinaryLoader)
+ adapters.register_loader("float8", Float8BinaryLoader)
+ adapters.register_loader("numeric", NumericLoader)
+ adapters.register_loader("numeric", NumericBinaryLoader)
from decimal import Decimal
from datetime import date, datetime
+from .. import postgres
from ..pq import Format
-from ..oids import postgres_types as builtins, INVALID_OID
from ..adapt import RecursiveDumper, RecursiveLoader, PyFormat
from ..proto import AdaptContext, Buffer, Dumper, DumperKey
from .._struct import pack_len, unpack_len
+from ..postgres import INVALID_OID
from .._typeinfo import RangeInfo as RangeInfo # exported here
-
from .composite import SequenceDumper, BaseCompositeLoader
RANGE_EMPTY = 0x01 # range is empty
def __init__(self, cls: type, context: Optional[AdaptContext] = None):
super().__init__(cls, context)
self.sub_dumper: Optional[Dumper] = None
- self._types = context.adapters.types if context else builtins
+ self._types = context.adapters.types if context else postgres.types
self._adapt_format = PyFormat.from_pq(self.format)
def get_key(self, obj: Range[Any], format: PyFormat) -> DumperKey:
def register_adapters(
- info: RangeInfo, context: Optional["AdaptContext"]
+ info: RangeInfo, context: Optional[AdaptContext] = None
) -> None:
+ adapters = context.adapters if context else postgres.adapters
+
# generate and register a customized text loader
loader: Type[RangeLoader[Any]] = type(
f"{info.name.title()}Loader",
(RangeLoader,),
{"subtype_oid": info.subtype_oid},
)
- loader.register(info.oid, context=context)
+ adapters.register_loader(info.oid, loader)
# generate and register a customized binary loader
bloader: Type[RangeBinaryLoader[Any]] = type(
(RangeBinaryLoader,),
{"subtype_oid": info.subtype_oid},
)
- bloader.register(info.oid, context=context)
+ adapters.register_loader(info.oid, bloader)
# Text dumpers for builtin range types wrappers
class Int4RangeDumper(RangeDumper):
- _oid = builtins["int4range"].oid
+ _oid = postgres.types["int4range"].oid
class Int8RangeDumper(RangeDumper):
- _oid = builtins["int8range"].oid
+ _oid = postgres.types["int8range"].oid
class NumericRangeDumper(RangeDumper):
- _oid = builtins["numrange"].oid
+ _oid = postgres.types["numrange"].oid
class DateRangeDumper(RangeDumper):
- _oid = builtins["daterange"].oid
+ _oid = postgres.types["daterange"].oid
class TimestampRangeDumper(RangeDumper):
- _oid = builtins["tsrange"].oid
+ _oid = postgres.types["tsrange"].oid
class TimestamptzRangeDumper(RangeDumper):
- _oid = builtins["tstzrange"].oid
+ _oid = postgres.types["tstzrange"].oid
# Binary dumpers for builtin range types wrappers
class Int4RangeBinaryDumper(RangeBinaryDumper):
- _oid = builtins["int4range"].oid
+ _oid = postgres.types["int4range"].oid
class Int8RangeBinaryDumper(RangeBinaryDumper):
- _oid = builtins["int8range"].oid
+ _oid = postgres.types["int8range"].oid
class NumericRangeBinaryDumper(RangeBinaryDumper):
- _oid = builtins["numrange"].oid
+ _oid = postgres.types["numrange"].oid
class DateRangeBinaryDumper(RangeBinaryDumper):
- _oid = builtins["daterange"].oid
+ _oid = postgres.types["daterange"].oid
class TimestampRangeBinaryDumper(RangeBinaryDumper):
- _oid = builtins["tsrange"].oid
+ _oid = postgres.types["tsrange"].oid
class TimestamptzRangeBinaryDumper(RangeBinaryDumper):
- _oid = builtins["tstzrange"].oid
+ _oid = postgres.types["tstzrange"].oid
# Text loaders for builtin range types
class Int4RangeLoader(RangeLoader[int]):
- subtype_oid = builtins["int4"].oid
+ subtype_oid = postgres.types["int4"].oid
class Int8RangeLoader(RangeLoader[int]):
- subtype_oid = builtins["int8"].oid
+ subtype_oid = postgres.types["int8"].oid
class NumericRangeLoader(RangeLoader[Decimal]):
- subtype_oid = builtins["numeric"].oid
+ subtype_oid = postgres.types["numeric"].oid
class DateRangeLoader(RangeLoader[date]):
- subtype_oid = builtins["date"].oid
+ subtype_oid = postgres.types["date"].oid
class TimestampRangeLoader(RangeLoader[datetime]):
- subtype_oid = builtins["timestamp"].oid
+ subtype_oid = postgres.types["timestamp"].oid
class TimestampTZRangeLoader(RangeLoader[datetime]):
- subtype_oid = builtins["timestamptz"].oid
+ subtype_oid = postgres.types["timestamptz"].oid
# Binary loaders for builtin range types
class Int4RangeBinaryLoader(RangeBinaryLoader[int]):
- subtype_oid = builtins["int4"].oid
+ subtype_oid = postgres.types["int4"].oid
class Int8RangeBinaryLoader(RangeBinaryLoader[int]):
- subtype_oid = builtins["int8"].oid
+ subtype_oid = postgres.types["int8"].oid
class NumericRangeBinaryLoader(RangeBinaryLoader[Decimal]):
- subtype_oid = builtins["numeric"].oid
+ subtype_oid = postgres.types["numeric"].oid
class DateRangeBinaryLoader(RangeBinaryLoader[date]):
- subtype_oid = builtins["date"].oid
+ subtype_oid = postgres.types["date"].oid
class TimestampRangeBinaryLoader(RangeBinaryLoader[datetime]):
- subtype_oid = builtins["timestamp"].oid
+ subtype_oid = postgres.types["timestamp"].oid
class TimestampTZRangeBinaryLoader(RangeBinaryLoader[datetime]):
- subtype_oid = builtins["timestamptz"].oid
-
-
-def register_default_globals(ctx: AdaptContext) -> None:
- RangeBinaryDumper.register(Range, ctx)
- RangeDumper.register(Range, ctx)
- Int4RangeDumper.register(Int4Range, ctx)
- Int8RangeDumper.register(Int8Range, ctx)
- NumericRangeDumper.register(NumericRange, ctx)
- DateRangeDumper.register(DateRange, ctx)
- TimestampRangeDumper.register(TimestampRange, ctx)
- TimestamptzRangeDumper.register(TimestamptzRange, ctx)
- Int4RangeBinaryDumper.register(Int4Range, ctx)
- Int8RangeBinaryDumper.register(Int8Range, ctx)
- NumericRangeBinaryDumper.register(NumericRange, ctx)
- DateRangeBinaryDumper.register(DateRange, ctx)
- TimestampRangeBinaryDumper.register(TimestampRange, ctx)
- TimestamptzRangeBinaryDumper.register(TimestamptzRange, ctx)
- Int4RangeLoader.register("int4range", ctx)
- Int8RangeLoader.register("int8range", ctx)
- NumericRangeLoader.register("numrange", ctx)
- DateRangeLoader.register("daterange", ctx)
- TimestampRangeLoader.register("tsrange", ctx)
- TimestampTZRangeLoader.register("tstzrange", ctx)
- Int4RangeBinaryLoader.register("int4range", ctx)
- Int8RangeBinaryLoader.register("int8range", ctx)
- NumericRangeBinaryLoader.register("numrange", ctx)
- DateRangeBinaryLoader.register("daterange", ctx)
- TimestampRangeBinaryLoader.register("tsrange", ctx)
- TimestampTZRangeBinaryLoader.register("tstzrange", ctx)
+ subtype_oid = postgres.types["timestamptz"].oid
+
+
+def register_default_adapters(context: AdaptContext) -> None:
+ adapters = context.adapters
+ adapters.register_dumper(Range, RangeBinaryDumper)
+ adapters.register_dumper(Range, RangeDumper)
+ adapters.register_dumper(Int4Range, Int4RangeDumper)
+ adapters.register_dumper(Int8Range, Int8RangeDumper)
+ adapters.register_dumper(NumericRange, NumericRangeDumper)
+ adapters.register_dumper(DateRange, DateRangeDumper)
+ adapters.register_dumper(TimestampRange, TimestampRangeDumper)
+ adapters.register_dumper(TimestamptzRange, TimestamptzRangeDumper)
+ adapters.register_dumper(Int4Range, Int4RangeBinaryDumper)
+ adapters.register_dumper(Int8Range, Int8RangeBinaryDumper)
+ adapters.register_dumper(NumericRange, NumericRangeBinaryDumper)
+ adapters.register_dumper(DateRange, DateRangeBinaryDumper)
+ adapters.register_dumper(TimestampRange, TimestampRangeBinaryDumper)
+ adapters.register_dumper(TimestamptzRange, TimestamptzRangeBinaryDumper)
+ adapters.register_loader("int4range", Int4RangeLoader)
+ adapters.register_loader("int8range", Int8RangeLoader)
+ adapters.register_loader("numrange", NumericRangeLoader)
+ adapters.register_loader("daterange", DateRangeLoader)
+ adapters.register_loader("tsrange", TimestampRangeLoader)
+ adapters.register_loader("tstzrange", TimestampTZRangeLoader)
+ adapters.register_loader("int4range", Int4RangeBinaryLoader)
+ adapters.register_loader("int8range", Int8RangeBinaryLoader)
+ adapters.register_loader("numrange", NumericRangeBinaryLoader)
+ adapters.register_loader("daterange", DateRangeBinaryLoader)
+ adapters.register_loader("tsrange", TimestampRangeBinaryLoader)
+ adapters.register_loader("tstzrange", TimestampTZRangeBinaryLoader)
from typing import Optional, Union, TYPE_CHECKING
+from .. import postgres
from ..pq import Format, Escaping
-from ..oids import postgres_types as builtins
from ..adapt import Buffer, Dumper, Loader
from ..proto import AdaptContext
from ..errors import DataError
class StrBinaryDumper(_StrDumper):
format = Format.BINARY
- _oid = builtins["text"].oid
+ _oid = postgres.types["text"].oid
def dump(self, obj: str) -> bytes:
# the server will raise DataError subclass if the string contains 0x00
class BytesDumper(Dumper):
format = Format.TEXT
- _oid = builtins["bytea"].oid
+ _oid = postgres.types["bytea"].oid
def __init__(self, cls: type, context: Optional[AdaptContext] = None):
super().__init__(cls, context)
class BytesBinaryDumper(Dumper):
format = Format.BINARY
- _oid = builtins["bytea"].oid
+ _oid = postgres.types["bytea"].oid
def dump(
self, obj: Union[bytes, bytearray, memoryview]
return data
-def register_default_globals(ctx: "AdaptContext") -> None:
- from ..oids import INVALID_OID
+def register_default_adapters(context: AdaptContext) -> None:
+ adapters = context.adapters
# NOTE: the order the dumpers are registered is relevant.
# The last one registered becomes the default for each type.
# Normally, binary is the default dumper, except for text (which plays
# the role of unknown, so it can be cast automatically to other types).
- StrBinaryDumper.register(str, ctx)
- StrDumper.register(str, ctx)
- TextLoader.register(INVALID_OID, ctx)
- TextLoader.register("bpchar", ctx)
- TextLoader.register("name", ctx)
- TextLoader.register("text", ctx)
- TextLoader.register("varchar", ctx)
- TextBinaryLoader.register("bpchar", ctx)
- TextBinaryLoader.register("name", ctx)
- TextBinaryLoader.register("text", ctx)
- TextBinaryLoader.register("varchar", ctx)
-
- BytesDumper.register(bytes, ctx)
- BytesDumper.register(bytearray, ctx)
- BytesDumper.register(memoryview, ctx)
- BytesBinaryDumper.register(bytes, ctx)
- BytesBinaryDumper.register(bytearray, ctx)
- BytesBinaryDumper.register(memoryview, ctx)
- ByteaLoader.register("bytea", ctx)
- ByteaBinaryLoader.register(INVALID_OID, ctx)
- ByteaBinaryLoader.register("bytea", ctx)
+ adapters.register_dumper(str, StrBinaryDumper)
+ adapters.register_dumper(str, StrDumper)
+ adapters.register_loader(postgres.INVALID_OID, TextLoader)
+ adapters.register_loader("bpchar", TextLoader)
+ adapters.register_loader("name", TextLoader)
+ adapters.register_loader("text", TextLoader)
+ adapters.register_loader("varchar", TextLoader)
+ adapters.register_loader("bpchar", TextBinaryLoader)
+ adapters.register_loader("name", TextBinaryLoader)
+ adapters.register_loader("text", TextBinaryLoader)
+ adapters.register_loader("varchar", TextBinaryLoader)
+
+ adapters.register_dumper(bytes, BytesDumper)
+ adapters.register_dumper(bytearray, BytesDumper)
+ adapters.register_dumper(memoryview, BytesDumper)
+ adapters.register_dumper(bytes, BytesBinaryDumper)
+ adapters.register_dumper(bytearray, BytesBinaryDumper)
+ adapters.register_dumper(memoryview, BytesBinaryDumper)
+ adapters.register_loader("bytea", ByteaLoader)
+ adapters.register_loader(postgres.INVALID_OID, ByteaBinaryLoader)
+ adapters.register_loader("bytea", ByteaBinaryLoader)
from typing import Callable, Optional, TYPE_CHECKING
+from .. import postgres
from ..pq import Format
-from ..oids import postgres_types as builtins
from ..adapt import Buffer, Dumper, Loader
from ..proto import AdaptContext
class UUIDDumper(Dumper):
format = Format.TEXT
- _oid = builtins["uuid"].oid
+ _oid = postgres.types["uuid"].oid
def dump(self, obj: "uuid.UUID") -> bytes:
return obj.hex.encode("utf8")
return UUID(bytes=data)
-def register_default_globals(ctx: AdaptContext) -> None:
- UUIDDumper.register("uuid.UUID", ctx)
- UUIDBinaryDumper.register("uuid.UUID", ctx)
- UUIDLoader.register("uuid", ctx)
- UUIDBinaryLoader.register("uuid", ctx)
+def register_default_adapters(context: AdaptContext) -> None:
+ adapters = context.adapters
+ adapters.register_dumper("uuid.UUID", UUIDDumper)
+ adapters.register_dumper("uuid.UUID", UUIDBinaryDumper)
+ adapters.register_loader("uuid", UUIDLoader)
+ adapters.register_loader("uuid", UUIDBinaryLoader)
from typing import Any
cimport cython
-from cpython.bytes cimport PyBytes_AsStringAndSize
from cpython.bytearray cimport PyByteArray_FromStringAndSize, PyByteArray_Resize
from cpython.bytearray cimport PyByteArray_GET_SIZE, PyByteArray_AS_STRING
cpdef object upgrade(self, object obj, object format):
return self
- @classmethod
- def register(
- this_cls,
- cls: Union[type, str],
- context: Optional[AdaptContext] = None,
- int format = PQ_TEXT,
- ) -> None:
- if context is not None:
- adapters = context.adapters
- else:
- from psycopg.adapt import global_adapters as adapters
-
- adapters.register_dumper(cls, this_cls)
-
@staticmethod
cdef char *ensure_size(bytearray ba, Py_ssize_t offset, Py_ssize_t size) except NULL:
"""
cdef Py_ssize_t length
_buffer_as_string_and_size(data, &ptr, &length)
return self.cload(ptr, length)
-
- @classmethod
- def register(
- cls,
- oid: Union[int, str],
- context: Optional["AdaptContext"] = None,
- int format = PQ_TEXT,
- ) -> None:
- if context is not None:
- adapters = context.adapters
- else:
- from psycopg.adapt import global_adapters as adapters
-
- adapters.register_loader(oid, cls)
self.adapters = context.adapters
self.connection = context.connection
else:
- from psycopg.adapt import global_adapters
- self.adapters = global_adapters
+ from psycopg import postgres
+ self.adapters = postgres.adapters
self.connection = None
@property
import pytest
import psycopg
-from psycopg import pq, sql
+from psycopg import pq, sql, postgres
from psycopg.adapt import Transformer, PyFormat as Format, Dumper, Loader
-from psycopg.oids import postgres_types as builtins, TEXT_OID
from psycopg._cmodule import _psycopg
+from psycopg.postgres import types as builtins, TEXT_OID
@pytest.mark.parametrize(
def test_dump_connection_ctx(conn):
- make_bin_dumper("b").register(MyStr, conn)
- make_dumper("t").register(MyStr, conn)
+ conn.adapters.register_dumper(MyStr, make_bin_dumper("b"))
+ conn.adapters.register_dumper(MyStr, make_dumper("t"))
cur = conn.cursor()
cur.execute("select %s", [MyStr("hello")])
def test_dump_cursor_ctx(conn):
- make_bin_dumper("b").register(str, conn)
- make_dumper("t").register(str, conn)
+ conn.adapters.register_dumper(str, make_bin_dumper("b"))
+ conn.adapters.register_dumper(str, make_dumper("t"))
cur = conn.cursor()
- make_bin_dumper("bc").register(str, cur)
- make_dumper("tc").register(str, cur)
+ cur.adapters.register_dumper(str, make_bin_dumper("bc"))
+ cur.adapters.register_dumper(str, make_dumper("tc"))
cur.execute("select %s", [MyStr("hello")])
assert cur.fetchone() == ("hellotc",)
def dump(self, obj):
return (obj * 2).encode("utf-8")
- MyStrDumper.register(str, conn)
+ conn.adapters.register_dumper(str, MyStrDumper)
assert conn.execute("select %t", ["hello"]).fetchone()[0] == "hellohello"
def load(self, data):
return (bytes(data) * 2).decode("utf-8")
- MyTextLoader.register("text", conn)
+ conn.adapters.register_loader("text", MyTextLoader)
assert conn.execute("select 'hello'::text").fetchone()[0] == "hellohello"
def test_load_connection_ctx(conn):
- make_loader("t").register(TEXT_OID, conn)
- make_bin_loader("b").register(TEXT_OID, conn)
+ conn.adapters.register_loader(TEXT_OID, make_loader("t"))
+ conn.adapters.register_loader(TEXT_OID, make_bin_loader("b"))
r = conn.cursor(binary=False).execute("select 'hello'::text").fetchone()
assert r == ("hellot",)
def test_load_cursor_ctx(conn):
- make_loader("t").register(TEXT_OID, conn)
- make_bin_loader("b").register(TEXT_OID, conn)
+ conn.adapters.register_loader(TEXT_OID, make_loader("t"))
+ conn.adapters.register_loader(TEXT_OID, make_bin_loader("b"))
cur = conn.cursor()
- make_loader("tc").register(TEXT_OID, cur)
- make_bin_loader("bc").register(TEXT_OID, cur)
+ cur.adapters.register_loader(TEXT_OID, make_loader("tc"))
+ cur.adapters.register_loader(TEXT_OID, make_bin_loader("bc"))
assert cur.execute("select 'hello'::text").fetchone() == ("hellotc",)
cur.format = pq.Format.BINARY
def test_cow_dumpers(conn):
- make_dumper("t").register(str, conn)
+ conn.adapters.register_dumper(str, make_dumper("t"))
cur1 = conn.cursor()
cur2 = conn.cursor()
- make_dumper("c2").register(str, cur2)
+ cur2.adapters.register_dumper(str, make_dumper("c2"))
r = cur1.execute("select %s::text -- 1", ["hello"]).fetchone()
assert r == ("hellot",)
r = cur2.execute("select %s::text -- 1", ["hello"]).fetchone()
assert r == ("helloc2",)
- make_dumper("t1").register(str, conn)
+ conn.adapters.register_dumper(str, make_dumper("t1"))
r = cur1.execute("select %s::text -- 2", ["hello"]).fetchone()
assert r == ("hellot",)
r = cur2.execute("select %s::text -- 2", ["hello"]).fetchone()
def test_cow_loaders(conn):
- make_loader("t").register(TEXT_OID, conn)
+ conn.adapters.register_loader(TEXT_OID, make_loader("t"))
cur1 = conn.cursor()
cur2 = conn.cursor()
- make_loader("c2").register(TEXT_OID, cur2)
+ cur2.adapters.register_loader(TEXT_OID, make_loader("c2"))
assert cur1.execute("select 'hello'::text").fetchone() == ("hellot",)
assert cur2.execute("select 'hello'::text").fetchone() == ("helloc2",)
- make_loader("t1").register(TEXT_OID, conn)
+ conn.adapters.register_loader(TEXT_OID, make_loader("t1"))
assert cur1.execute("select 'hello2'::text").fetchone() == ("hello2t",)
assert cur2.execute("select 'hello2'::text").fetchone() == ("hello2c2",)
def test_load_cursor_ctx_nested(conn, sql, obj, fmt_out):
cur = conn.cursor(binary=fmt_out == pq.Format.BINARY)
if fmt_out == pq.Format.TEXT:
- make_loader("c").register(TEXT_OID, cur)
+ cur.adapters.register_loader("text", make_loader("c"))
else:
- make_bin_loader("c").register(TEXT_OID, cur)
+ cur.adapters.register_loader("text", make_bin_loader("c"))
cur.execute(f"select {sql}")
res = cur.fetchone()[0]
cur = conn.cursor()
bd = make_bin_dumper("b")
- bd.register(str, cur)
+ cur.adapters.register_dumper(str, bd)
td = make_dumper("t")
- td.register(str, cur)
+ cur.adapters.register_dumper(str, td)
assert cur.execute("select %s", ["hello"]).fetchone()[0] == "hellot"
assert cur.execute("select %t", ["hello"]).fetchone()[0] == "hellot"
assert cur.execute("select %b", ["hello"]).fetchone()[0] == "hellob"
- bd.register(str, cur)
+ cur.adapters.register_dumper(str, bd)
assert cur.execute("select %s", ["hello"]).fetchone()[0] == "hellob"
# All the registered adapters
reg_adapters = set()
adapters = (
- list(psycopg.global_adapters._dumpers.values())
- + psycopg.global_adapters._loaders
+ list(postgres.adapters._dumpers.values()) + postgres.adapters._loaders
)
assert len(adapters) == 5
for m in adapters:
def dump(self, obj):
return super().dump(obj) * 2
- MyStrDumper.register(str, conn)
+ conn.adapters.register_dumper(str, MyStrDumper)
cur = conn.cursor()
ensure_table(cur, sample_tabledef)
import psycopg
from psycopg import pq, sql, rows
-from psycopg.oids import postgres_types as builtins
from psycopg.adapt import PyFormat as Format
+from psycopg.postgres import types as builtins
from .utils import gc_collect
import psycopg
from psycopg import pq
from psycopg import sql
-from psycopg.oids import postgres_types as builtins
from psycopg.adapt import PyFormat as Format, Transformer
from psycopg.types import TypeInfo
+from psycopg.postgres import types as builtins
tests_str = [
from psycopg import pq
from psycopg import sql
-from psycopg.oids import postgres_types as builtins
from psycopg.adapt import Transformer, PyFormat as Format
+from psycopg.postgres import types as builtins
@pytest.mark.parametrize("fmt_in", [Format.AUTO, Format.TEXT, Format.BINARY])
import pytest
-from psycopg import pq
+from psycopg import pq, postgres
from psycopg.sql import Identifier
-from psycopg.oids import postgres_types as builtins
-from psycopg.adapt import PyFormat as Format, global_adapters
+from psycopg.adapt import PyFormat as Format
+from psycopg.postgres import types as builtins
from psycopg.types.composite import CompositeInfo
info.register()
for fmt in (pq.Format.TEXT, pq.Format.BINARY):
for oid in (info.oid, info.array_oid):
- assert global_adapters._loaders[fmt].pop(oid)
+ assert postgres.adapters._loaders[fmt].pop(oid)
cur = conn.cursor()
info.register(cur)
for fmt in (pq.Format.TEXT, pq.Format.BINARY):
for oid in (info.oid, info.array_oid):
- assert oid not in global_adapters._loaders[fmt]
+ assert oid not in postgres.adapters._loaders[fmt]
assert oid not in conn.adapters._loaders[fmt]
assert oid in cur.adapters._loaders[fmt]
info.register(conn)
for fmt in (pq.Format.TEXT, pq.Format.BINARY):
for oid in (info.oid, info.array_oid):
- assert oid not in global_adapters._loaders[fmt]
+ assert oid not in postgres.adapters._loaders[fmt]
assert oid in conn.adapters._loaders[fmt]
def test_infinity_date_example(self, conn):
# NOTE: this is an example in the docs. Make sure it doesn't regress when
# adding binary datetime adapters
- from psycopg.oids import postgres_types as builtins
+ from datetime import date
from psycopg.types.datetime import DateLoader, DateDumper
class InfDateDumper(DateDumper):
def dump(self, obj):
- if obj == dt.date.max:
+ if obj == date.max:
return b"infinity"
else:
return super().dump(obj)
class InfDateLoader(DateLoader):
def load(self, data):
if data == b"infinity":
- return dt.date.max
+ return date.max
else:
return super().load(data)
cur = conn.cursor()
- InfDateDumper.register(dt.date, cur)
- InfDateLoader.register(builtins["date"].oid, cur)
+ cur.adapters.register_dumper(date, InfDateDumper)
+ cur.adapters.register_loader("date", InfDateLoader)
rec = cur.execute(
- "SELECT %s::text, %s::text", [dt.date(2020, 12, 31), dt.date.max]
+ "SELECT %s::text, %s::text", [date(2020, 12, 31), date.max]
).fetchone()
assert rec == ("2020-12-31", "infinity")
rec = cur.execute(
"select '2020-12-31'::date, 'infinity'::date"
).fetchone()
- assert rec == (dt.date(2020, 12, 31), dt.date(9999, 12, 31))
+ assert rec == (date(2020, 12, 31), date(9999, 12, 31))
def test_load_copy(self, conn):
cur = conn.cursor(binary=False)
)
def test_numeric_as_float(conn, val):
cur = conn.cursor()
- FloatLoader.register(conn.adapters.types["numeric"].oid, cur)
+ cur.adapters.register_loader("numeric", FloatLoader)
val = Decimal(val)
cur.execute("select %s as val", (val,))
def update_python_oids() -> None:
queries = [version_sql, py_types_sql, py_ranges_sql]
- fn = ROOT / "psycopg/psycopg/oids.py"
+ fn = ROOT / "psycopg/psycopg/postgres.py"
update_file(fn, queries)
sp.check_call(["black", "-q", fn])