]> git.ipfire.org Git - thirdparty/psycopg.git/commitdiff
Add postgres module
authorDaniele Varrazzo <daniele.varrazzo@gmail.com>
Sun, 11 Jul 2021 02:14:17 +0000 (04:14 +0200)
committerDaniele Varrazzo <daniele.varrazzo@gmail.com>
Sun, 11 Jul 2021 12:43:58 +0000 (14:43 +0200)
The module exports PostgreSQL specific configurations: the mapping
oid -> types and the mapping from oid/types to dumpers/loaders.

33 files changed:
docs/advanced/adapt.rst
psycopg/psycopg/__init__.py
psycopg/psycopg/_adapters_map.py [new file with mode: 0644]
psycopg/psycopg/_transform.py
psycopg/psycopg/_typeinfo.py
psycopg/psycopg/adapt.py
psycopg/psycopg/connection.py
psycopg/psycopg/dbapi20.py
psycopg/psycopg/postgres.py [moved from psycopg/psycopg/oids.py with 71% similarity]
psycopg/psycopg/proto.py
psycopg/psycopg/types/__init__.py
psycopg/psycopg/types/array.py
psycopg/psycopg/types/bool.py
psycopg/psycopg/types/composite.py
psycopg/psycopg/types/datetime.py
psycopg/psycopg/types/json.py
psycopg/psycopg/types/net.py
psycopg/psycopg/types/none.py
psycopg/psycopg/types/numeric.py
psycopg/psycopg/types/range.py
psycopg/psycopg/types/string.py
psycopg/psycopg/types/uuid.py
psycopg_c/psycopg_c/_psycopg/adapt.pyx
psycopg_c/psycopg_c/_psycopg/transform.pyx
tests/test_adapt.py
tests/test_copy.py
tests/test_cursor.py
tests/types/test_array.py
tests/types/test_bool.py
tests/types/test_composite.py
tests/types/test_datetime.py
tests/types/test_numeric.py
tools/update_oids.py

index e24dcc615ddae539a17fc43f93c760317ef9372e..96555fbf86cf24dab16ff6ecc1e74f00107c373b 100644 (file)
@@ -79,8 +79,6 @@ cursor):
 .. code:: python
 
     from datetime import date
-
-    from psycopg.oids import postgres_types as builtins
     from psycopg.types.datetime import DateLoader, DateDumper
 
     class InfDateDumper(DateDumper):
@@ -97,8 +95,8 @@ cursor):
             else:
                 return super().load(data)
 
-    InfDateDumper.register(date, cur)
-    InfDateLoader.register(builtins["date"].oid, cur)
+    cur.adapters.register_dumper(date, InfDateDumper)
+    cur.adapters.register_loader("date", InfDateLoader)
 
     cur.execute("SELECT %s::text, %s::text", [date(2020, 12, 31), date.max]).fetchone()
     # ('2020-12-31', 'infinity')
index 0b81125bc30d87467472fabf36343ff7643bef53..dcf1cc3136726a9363c1927b76f55a69f4f77e29 100644 (file)
@@ -8,8 +8,8 @@ import logging
 
 from . import pq
 from . import types
+from . import postgres
 from .copy import Copy, AsyncCopy
-from .adapt import global_adapters
 from .cursor import AnyCursor, AsyncCursor, Cursor
 from .errors import Warning, Error, InterfaceError, DatabaseError
 from .errors import DataError, OperationalError, IntegrityError
@@ -32,16 +32,16 @@ logger = logging.getLogger("psycopg")
 if logger.level == logging.NOTSET:
     logger.setLevel(logging.WARNING)
 
-# register default adapters
-types.register_default_globals(global_adapters)
+# register default adapters for PostgreSQL
+postgres.register_default_adapters(postgres.adapters)
 
 # DBAPI compliancy
 connect = Connection.connect
 apilevel = "2.0"
 threadsafety = 2
 paramstyle = "pyformat"
-BinaryTextDumper.register(Binary, global_adapters)  # dbapi20
-BinaryBinaryDumper.register(Binary, global_adapters)  # dbapi20
+postgres.adapters.register_dumper(Binary, BinaryTextDumper)  # dbapi20
+postgres.adapters.register_dumper(Binary, BinaryBinaryDumper)  # dbapi20
 
 
 # Note: defining the exported methods helps both Sphynx in documenting that
diff --git a/psycopg/psycopg/_adapters_map.py b/psycopg/psycopg/_adapters_map.py
new file mode 100644 (file)
index 0000000..ac6313a
--- /dev/null
@@ -0,0 +1,182 @@
+"""
+Mapping from types/oids to Dumpers/Loaders
+"""
+
+# Copyright (C) 2020-2021 The Psycopg Team
+
+from typing import Any, Dict, List, Optional, Type, TypeVar, Union
+from typing import cast, TYPE_CHECKING
+
+from . import pq
+from . import errors as e
+from ._enums import PyFormat as PyFormat
+from .proto import AdaptContext, Dumper, Loader
+from ._cmodule import _psycopg
+from ._typeinfo import TypesRegistry
+
+if TYPE_CHECKING:
+    from .connection import BaseConnection
+
+RV = TypeVar("RV")
+
+
+class AdaptersMap(AdaptContext):
+    """
+    Map oids to Loaders and types to Dumpers.
+
+    The object can start empty or copy from another object of the same class.
+    Copies are copy-on-write: if the maps are updated make a copy. This way
+    extending e.g. global map by a connection or a connection map from a cursor
+    is cheap: a copy is made only on customisation.
+    """
+
+    __module__ = "psycopg.adapt"
+
+    _dumpers: Dict[PyFormat, Dict[Union[type, str], Type[Dumper]]]
+    _loaders: List[Dict[int, Type[Loader]]]
+    types: TypesRegistry
+
+    # Record if a dumper or loader has an optimised version.
+    _optimised: Dict[type, type] = {}
+
+    def __init__(
+        self,
+        template: Optional["AdaptersMap"] = None,
+        types: Optional[TypesRegistry] = None,
+    ):
+        if template:
+            self._dumpers = template._dumpers.copy()
+            self._own_dumpers = _dumpers_shared.copy()
+            template._own_dumpers = _dumpers_shared.copy()
+            self._loaders = template._loaders[:]
+            self._own_loaders = [False, False]
+            template._own_loaders = [False, False]
+            self.types = TypesRegistry(template.types)
+        else:
+            self._dumpers = {fmt: {} for fmt in PyFormat}
+            self._own_dumpers = _dumpers_owned.copy()
+            self._loaders = [{}, {}]
+            self._own_loaders = [True, True]
+            self.types = types or TypesRegistry()
+
+    # implement the AdaptContext protocol too
+    @property
+    def adapters(self) -> "AdaptersMap":
+        return self
+
+    @property
+    def connection(self) -> Optional["BaseConnection[Any]"]:
+        return None
+
+    def register_dumper(
+        self, cls: Union[type, str], dumper: Type[Dumper]
+    ) -> None:
+        """
+        Configure the context to use *dumper* to convert object of type *cls*.
+        """
+        if not isinstance(cls, (str, type)):
+            raise TypeError(
+                f"dumpers should be registered on classes, got {cls} instead"
+            )
+
+        if _psycopg:
+            dumper = self._get_optimised(dumper)
+
+        # Register the dumper both as its format and as auto
+        # so that the last dumper registered is used in auto (%s) format
+        for fmt in (PyFormat.from_pq(dumper.format), PyFormat.AUTO):
+            if not self._own_dumpers[fmt]:
+                self._dumpers[fmt] = self._dumpers[fmt].copy()
+                self._own_dumpers[fmt] = True
+
+            self._dumpers[fmt][cls] = dumper
+
+    def register_loader(
+        self, oid: Union[int, str], loader: Type["Loader"]
+    ) -> None:
+        """
+        Configure the context to use *loader* to convert data of oid *oid*.
+        """
+        if isinstance(oid, str):
+            oid = self.types[oid].oid
+        if not isinstance(oid, int):
+            raise TypeError(
+                f"loaders should be registered on oid, got {oid} instead"
+            )
+
+        if _psycopg:
+            loader = self._get_optimised(loader)
+
+        fmt = loader.format
+        if not self._own_loaders[fmt]:
+            self._loaders[fmt] = self._loaders[fmt].copy()
+            self._own_loaders[fmt] = True
+
+        self._loaders[fmt][oid] = loader
+
+    def get_dumper(self, cls: type, format: PyFormat) -> Type["Dumper"]:
+        """
+        Return the dumper class for the given type and format.
+
+        Raise ProgrammingError if a class is not available.
+        """
+        try:
+            dmap = self._dumpers[format]
+        except KeyError:
+            raise ValueError(f"bad dumper format: {format}")
+
+        # Look for the right class, including looking at superclasses
+        for scls in cls.__mro__:
+            if scls in dmap:
+                return dmap[scls]
+
+            # If the adapter is not found, look for its name as a string
+            fqn = scls.__module__ + "." + scls.__qualname__
+            if fqn in dmap:
+                # Replace the class name with the class itself
+                d = dmap[scls] = dmap.pop(fqn)
+                return d
+
+        raise e.ProgrammingError(
+            f"cannot adapt type {cls.__name__}"
+            f" to format {PyFormat(format).name}"
+        )
+
+    def get_loader(
+        self, oid: int, format: pq.Format
+    ) -> Optional[Type["Loader"]]:
+        """
+        Return the loader class for the given oid and format.
+
+        Return None if not found.
+        """
+        return self._loaders[format].get(oid)
+
+    @classmethod
+    def _get_optimised(self, cls: Type[RV]) -> Type[RV]:
+        """Return the optimised version of a Dumper or Loader class.
+
+        Return the input class itself if there is no optimised version.
+        """
+        try:
+            return self._optimised[cls]
+        except KeyError:
+            pass
+
+        # Check if the class comes from psycopg.types and there is a class
+        # with the same name in psycopg_c._psycopg.
+        from psycopg import types
+
+        if cls.__module__.startswith(types.__name__):
+            new = cast(Type[RV], getattr(_psycopg, cls.__name__, None))
+            if new:
+                self._optimised[cls] = new
+                return new
+
+        self._optimised[cls] = cls
+        return cls
+
+
+# Micro-optimization: copying these objects is faster than creating new dicts
+_dumpers_owned = dict.fromkeys(PyFormat, True)
+_dumpers_shared = dict.fromkeys(PyFormat, False)
index c821ea859b95813583be5ef411760000745ab360..aa77115e3786b9064591b25a670c946cd1758842 100644 (file)
@@ -9,10 +9,11 @@ from typing import DefaultDict, TYPE_CHECKING
 from collections import defaultdict
 
 from . import pq
+from . import postgres
 from . import errors as e
-from .oids import INVALID_OID
 from .rows import Row, RowMaker
 from .proto import LoadFunc, AdaptContext, PyFormat, DumperKey
+from .postgres import INVALID_OID
 
 if TYPE_CHECKING:
     from .pq.proto import PGresult
@@ -44,9 +45,7 @@ class Transformer(AdaptContext):
             self._adapters = context.adapters
             self._conn = context.connection
         else:
-            from .adapt import global_adapters
-
-            self._adapters = global_adapters
+            self._adapters = postgres.adapters
             self._conn = None
 
         # mapping class, fmt -> Dumper instance
index e1a18cb308a155d12d7f1f606403bcc817fce7ee..386f832adc5539d588e12f01eacca8bf35c6f6c5 100644 (file)
@@ -111,19 +111,16 @@ class TypeInfo:
                 f"found {len(recs)} different types named {name}"
             )
 
-    def register(
-        self,
-        context: Optional["AdaptContext"] = None,
-    ) -> None:
+    def register(self, context: Optional[AdaptContext] = None) -> None:
         """
         Register the type information, globally or in the specified *context*.
         """
         if context:
             types = context.adapters.types
         else:
-            from .oids import postgres_types
+            from . import postgres
 
-            types = postgres_types
+            types = postgres.types
 
         types.add(self)
 
@@ -151,10 +148,7 @@ class RangeInfo(TypeInfo):
         super().__init__(name, oid, array_oid)
         self.subtype_oid = subtype_oid
 
-    def register(
-        self,
-        context: Optional[AdaptContext] = None,
-    ) -> None:
+    def register(self, context: Optional[AdaptContext] = None) -> None:
         super().register(context)
 
         from .types.range import register_adapters
index da8d2cccdc27df96f34b8598a2269ebc92fb24f2..d2d61daabc3585cd6fe6357e8916121240d5b18c 100644 (file)
@@ -5,22 +5,19 @@ Entry point into the adaptation system.
 # Copyright (C) 2020-2021 The Psycopg Team
 
 from abc import ABC, abstractmethod
-from typing import Any, Dict, List, Optional, Type, Tuple, Union
-from typing import cast, TYPE_CHECKING, TypeVar
+from typing import Any, Optional, Type, Tuple, Union, TYPE_CHECKING
 
 from . import pq
-from . import errors as e
-from ._enums import PyFormat as PyFormat
-from .oids import postgres_types
+from . import _adapters_map
 from .proto import AdaptContext, Buffer as Buffer
+from ._enums import PyFormat as PyFormat
 from ._cmodule import _psycopg
-from ._typeinfo import TypesRegistry
 
 if TYPE_CHECKING:
     from . import proto
     from .connection import BaseConnection
 
-RV = TypeVar("RV")
+AdaptersMap = _adapters_map.AdaptersMap
 
 
 class Dumper(ABC):
@@ -97,7 +94,9 @@ class Dumper(ABC):
         """
         Configure *context* to use this dumper to convert object of type *cls*.
         """
-        adapters = context.adapters if context else global_adapters
+        from . import postgres
+
+        adapters = context.adapters if context else postgres.adapters
         adapters.register_dumper(cls, this_cls)
 
 
@@ -126,169 +125,11 @@ class Loader(ABC):
         """
         Configure *context* to use this loader to convert values with OID *oid*.
         """
-        adapters = context.adapters if context else global_adapters
-        adapters.register_loader(oid, cls)
-
-
-class AdaptersMap(AdaptContext):
-    """
-    Map oids to Loaders and types to Dumpers.
-
-    The object can start empty or copy from another object of the same class.
-    Copies are copy-on-write: if the maps are updated make a copy. This way
-    extending e.g. global map by a connection or a connection map from a cursor
-    is cheap: a copy is made only on customisation.
-    """
-
-    _dumpers: Dict[PyFormat, Dict[Union[type, str], Type["proto.Dumper"]]]
-    _loaders: List[Dict[int, Type["proto.Loader"]]]
-    types: TypesRegistry
-
-    # Record if a dumper or loader has an optimised version.
-    _optimised: Dict[type, type] = {}
-
-    def __init__(
-        self,
-        template: Optional["AdaptersMap"] = None,
-        types: Optional[TypesRegistry] = None,
-    ):
-        if template:
-            self._dumpers = template._dumpers.copy()
-            self._own_dumpers = _dumpers_shared.copy()
-            template._own_dumpers = _dumpers_shared.copy()
-            self._loaders = template._loaders[:]
-            self._own_loaders = [False, False]
-            template._own_loaders = [False, False]
-            self.types = TypesRegistry(template.types)
-        else:
-            self._dumpers = {fmt: {} for fmt in PyFormat}
-            self._own_dumpers = _dumpers_owned.copy()
-            self._loaders = [{}, {}]
-            self._own_loaders = [True, True]
-            self.types = types or TypesRegistry()
-
-    # implement the AdaptContext protocol too
-    @property
-    def adapters(self) -> "AdaptersMap":
-        return self
+        from . import postgres
 
-    @property
-    def connection(self) -> Optional["BaseConnection[Any]"]:
-        return None
-
-    def register_dumper(
-        self, cls: Union[type, str], dumper: Type[Dumper]
-    ) -> None:
-        """
-        Configure the context to use *dumper* to convert object of type *cls*.
-        """
-        if not isinstance(cls, (str, type)):
-            raise TypeError(
-                f"dumpers should be registered on classes, got {cls} instead"
-            )
-
-        if _psycopg:
-            dumper = self._get_optimised(dumper)
-
-        # Register the dumper both as its format and as auto
-        # so that the last dumper registered is used in auto (%s) format
-        for fmt in (PyFormat.from_pq(dumper.format), PyFormat.AUTO):
-            if not self._own_dumpers[fmt]:
-                self._dumpers[fmt] = self._dumpers[fmt].copy()
-                self._own_dumpers[fmt] = True
-
-            self._dumpers[fmt][cls] = dumper
-
-    def register_loader(
-        self, oid: Union[int, str], loader: Type["proto.Loader"]
-    ) -> None:
-        """
-        Configure the context to use *loader* to convert data of oid *oid*.
-        """
-        if isinstance(oid, str):
-            oid = self.types[oid].oid
-        if not isinstance(oid, int):
-            raise TypeError(
-                f"loaders should be registered on oid, got {oid} instead"
-            )
-
-        if _psycopg:
-            loader = self._get_optimised(loader)
-
-        fmt = loader.format
-        if not self._own_loaders[fmt]:
-            self._loaders[fmt] = self._loaders[fmt].copy()
-            self._own_loaders[fmt] = True
-
-        self._loaders[fmt][oid] = loader
-
-    def get_dumper(self, cls: type, format: PyFormat) -> Type["proto.Dumper"]:
-        """
-        Return the dumper class for the given type and format.
-
-        Raise ProgrammingError if a class is not available.
-        """
-        try:
-            dmap = self._dumpers[format]
-        except KeyError:
-            raise ValueError(f"bad dumper format: {format}")
-
-        # Look for the right class, including looking at superclasses
-        for scls in cls.__mro__:
-            if scls in dmap:
-                return dmap[scls]
-
-            # If the adapter is not found, look for its name as a string
-            fqn = scls.__module__ + "." + scls.__qualname__
-            if fqn in dmap:
-                # Replace the class name with the class itself
-                d = dmap[scls] = dmap.pop(fqn)
-                return d
-
-        raise e.ProgrammingError(
-            f"cannot adapt type {cls.__name__}"
-            f" to format {PyFormat(format).name}"
-        )
-
-    def get_loader(
-        self, oid: int, format: pq.Format
-    ) -> Optional[Type["proto.Loader"]]:
-        """
-        Return the loader class for the given oid and format.
-
-        Return None if not found.
-        """
-        return self._loaders[format].get(oid)
-
-    @classmethod
-    def _get_optimised(self, cls: Type[RV]) -> Type[RV]:
-        """Return the optimised version of a Dumper or Loader class.
-
-        Return the input class itself if there is no optimised version.
-        """
-        try:
-            return self._optimised[cls]
-        except KeyError:
-            pass
-
-        # Check if the class comes from psycopg.types and there is a class
-        # with the same name in psycopg_c._psycopg.
-        from psycopg import types
-
-        if cls.__module__.startswith(types.__name__):
-            new = cast(Type[RV], getattr(_psycopg, cls.__name__, None))
-            if new:
-                self._optimised[cls] = new
-                return new
-
-        self._optimised[cls] = cls
-        return cls
-
-
-_dumpers_owned = dict.fromkeys(PyFormat, True)
-_dumpers_shared = dict.fromkeys(PyFormat, False)
+        adapters = context.adapters if context else postgres.adapters
+        adapters.register_loader(oid, cls)
 
-global_adapters = AdaptersMap(types=postgres_types)
 
 Transformer: Type["proto.Transformer"]
 
index 5faedf2616ab8460d48a4db6a27b5c9a28392d04..48f8bc40aa3eb5da4a8021e1867e3c931ead28ad 100644 (file)
@@ -20,6 +20,7 @@ from . import pq
 from . import adapt
 from . import errors as e
 from . import waiting
+from . import postgres
 from . import encodings
 from .pq import ConnStatus, ExecStatus, TransactionStatus, Format
 from .sql import Composable
@@ -106,7 +107,7 @@ class BaseConnection(AdaptContext, Generic[Row]):
         self.pgconn = pgconn  # TODO: document this
         self._row_factory = row_factory
         self._autocommit = False
-        self._adapters = adapt.AdaptersMap(adapt.global_adapters)
+        self._adapters = adapt.AdaptersMap(postgres.adapters)
         self._notice_handlers: List[NoticeHandler] = []
         self._notify_handlers: List[NotifyHandler] = []
 
index 44e57af1fe0342b2d0f4ec47981a35d22a38d6ab..a6a75db226ca1cd5e6071fd424eeef78cb3c8bd1 100644 (file)
@@ -9,8 +9,8 @@ import datetime as dt
 from math import floor
 from typing import Any, Optional, Sequence
 
+from . import postgres
 from .pq import Format, Escaping
-from .oids import postgres_types as builtins
 from .adapt import Dumper
 from .proto import AdaptContext
 
@@ -18,7 +18,7 @@ from .proto import AdaptContext
 class DBAPITypeObject:
     def __init__(self, name: str, type_names: Sequence[str]):
         self.name = name
-        self.values = tuple(builtins[n].oid for n in type_names)
+        self.values = tuple(postgres.types[n].oid for n in type_names)
 
     def __repr__(self) -> str:
         return f"psycopg.{self.name}"
@@ -61,7 +61,7 @@ class Binary:
 class BinaryBinaryDumper(Dumper):
 
     format = Format.BINARY
-    _oid = builtins["bytea"].oid
+    _oid = postgres.types["bytea"].oid
 
     def dump(self, obj: Binary) -> bytes:
         wrapped = obj.obj
similarity index 71%
rename from psycopg/psycopg/oids.py
rename to psycopg/psycopg/postgres.py
index 057765a988e7269efb16cc3dc7123a6f570fe302..c48e1399622df7ce4a91ae3475dd4b0e8f2da19a 100644 (file)
@@ -1,14 +1,18 @@
 """
-Maps of builtin types and names
+Types configuration specific to PostgreSQL.
 """
 
 # Copyright (C) 2020-2021 The Psycopg Team
 
 from ._typeinfo import TypeInfo, RangeInfo, TypesRegistry
+from .proto import AdaptContext
+from ._adapters_map import AdaptersMap
 
 # Global objects with PostgreSQL builtins and globally registered user types.
-postgres_types = TypesRegistry()
+types = TypesRegistry()
 
+# Global adapter maps with PostgreSQL types configuration
+adapters = AdaptersMap(types=types)
 
 # Use tools/update_oids.py to update this data.
 for t in [
@@ -85,10 +89,60 @@ for t in [
     RangeInfo("tstzrange", 3910, 3911, subtype_oid=1184),
     # autogenerated: end
 ]:
-    postgres_types.add(t)
+    types.add(t)
 
 
 # A few oids used a bit everywhere
 INVALID_OID = 0
-TEXT_OID = postgres_types["text"].oid
-TEXT_ARRAY_OID = postgres_types["text"].array_oid
+TEXT_OID = types["text"].oid
+TEXT_ARRAY_OID = types["text"].array_oid
+
+
+def register_default_adapters(context: AdaptContext) -> None:
+
+    from .types import array
+
+    array.register_default_adapters(context)
+
+    from .types import bool
+
+    bool.register_default_adapters(context)
+
+    from .types import composite
+
+    composite.register_default_adapters(context)
+
+    from .types import datetime
+
+    datetime.register_default_adapters(context)
+
+    from .types import json
+
+    json.register_default_adapters(context)
+
+    from .types import net
+
+    net.register_default_adapters(context)
+
+    from .types import none
+
+    none.register_default_adapters(context)
+
+    from .types import numeric
+
+    numeric.register_default_adapters(context)
+
+    from .types import range
+
+    range.register_default_adapters(context)
+
+    from .types import string
+
+    string.register_default_adapters(context)
+
+    from .types import uuid
+
+    uuid.register_default_adapters(context)
+
+    # Must come after all the types are registered
+    array.register_all_arrays(context)
index f318404a9041b5cde8f0a22966bda6b761ed7bf9..16f10025a5c55e41f8d323e4b33b525c3c2be52a 100644 (file)
@@ -15,11 +15,10 @@ from .compat import Protocol
 if TYPE_CHECKING:
     from .sql import Composable
     from .rows import Row, RowMaker
-    from .adapt import AdaptersMap
     from .pq.proto import PGresult
-
     from .waiting import Wait, Ready
     from .connection import BaseConnection
+    from ._adapters_map import AdaptersMap
 
 # An object implementing the buffer protocol
 Buffer = Union[bytes, bytearray, memoryview]
index 1d2dae888a2551a9d7b33b48ccde81fa876661f1..6fa4511bf0593bdf81e6d215a3abc6eb41a3aa42 100644 (file)
@@ -4,38 +4,4 @@ psycopg types package
 
 # Copyright (C) 2020-2021 The Psycopg Team
 
-from typing import TYPE_CHECKING
-
-from . import net
-from . import bool
-from . import json
-from . import none
-from . import uuid
-from . import array
-from . import range
-from . import string
-from . import numeric
-from . import datetime
-from . import composite
-
 from .._typeinfo import TypeInfo as TypeInfo  # exported here
-
-if TYPE_CHECKING:
-    from ..proto import AdaptContext
-
-
-def register_default_globals(ctx: "AdaptContext") -> None:
-    net.register_default_globals(ctx)
-    bool.register_default_globals(ctx)
-    json.register_default_globals(ctx)
-    none.register_default_globals(ctx)
-    uuid.register_default_globals(ctx)
-    array.register_default_globals(ctx)
-    range.register_default_globals(ctx)
-    string.register_default_globals(ctx)
-    numeric.register_default_globals(ctx)
-    datetime.register_default_globals(ctx)
-    composite.register_default_globals(ctx)
-
-    # Must come after all the types are registered
-    array.register_all_arrays(ctx)
index 67ff2692d82c05421c8250ac7fe930ebb524ecc8..1cc09148f023451586877e36cb4a7f6caf264ae1 100644 (file)
@@ -11,10 +11,11 @@ from typing import cast
 
 from .. import pq
 from .. import errors as e
-from ..oids import postgres_types, TEXT_OID, TEXT_ARRAY_OID, INVALID_OID
+from .. import postgres
 from ..adapt import RecursiveDumper, RecursiveLoader, PyFormat
 from ..proto import AdaptContext, Buffer, Dumper, DumperKey
 from .._struct import pack_len, unpack_len
+from ..postgres import TEXT_OID, INVALID_OID
 from .._typeinfo import TypeInfo
 
 _struct_head = struct.Struct("!III")  # ndims, hasnull, elem oid
@@ -28,12 +29,14 @@ _unpack_dim = cast(
     Callable[[bytes, int], Tuple[int, int]], _struct_dim.unpack_from
 )
 
+TEXT_ARRAY_OID = postgres.types["text"].array_oid
+
 
 class BaseListDumper(RecursiveDumper):
     def __init__(self, cls: type, context: Optional[AdaptContext] = None):
         super().__init__(cls, context)
         self.sub_dumper: Optional[Dumper] = None
-        self._types = context.adapters.types if context else postgres_types
+        self._types = context.adapters.types if context else postgres.types
 
     def get_key(self, obj: List[Any], format: PyFormat) -> DumperKey:
         item = self._find_list_element(obj)
@@ -317,29 +320,30 @@ class ArrayBinaryLoader(BaseArrayLoader):
 
 
 def register_adapters(
-    info: TypeInfo, context: Optional["AdaptContext"]
+    info: TypeInfo, context: Optional[AdaptContext] = None
 ) -> None:
+    adapters = context.adapters if context else postgres.adapters
     for base in (ArrayLoader, ArrayBinaryLoader):
         lname = f"{info.name.title()}{base.__name__}"
         loader: Type[BaseArrayLoader] = type(
             lname, (base,), {"base_oid": info.oid}
         )
-        loader.register(info.array_oid, context=context)
+        adapters.register_loader(info.array_oid, loader)
 
 
-def register_default_globals(ctx: AdaptContext) -> None:
-    ListDumper.register(list, ctx)
-    ListBinaryDumper.register(list, ctx)
+def register_default_adapters(context: AdaptContext) -> None:
+    context.adapters.register_dumper(list, ListDumper)
+    context.adapters.register_dumper(list, ListBinaryDumper)
 
 
-def register_all_arrays(ctx: AdaptContext) -> None:
+def register_all_arrays(context: AdaptContext) -> None:
     """
     Associate the array oid of all the types in Loader.globals.
 
     This function is designed to be called once at import time, after having
     registered all the base loaders.
     """
-    for t in ctx.adapters.types:
+    for t in context.adapters.types:
         # TODO: handle different delimiters (box)
         if t.array_oid and getattr(t, "delimiter", None) == ",":
-            t.register(ctx)
+            t.register(context)
index 87ff29f272e68bcca2e1327d623b382f04a2d0c8..c9b9de8a798c3a556c65529721470529443e39f2 100644 (file)
@@ -4,8 +4,8 @@ Adapters for booleans.
 
 # Copyright (C) 2020-2021 The Psycopg Team
 
+from .. import postgres
 from ..pq import Format
-from ..oids import postgres_types as builtins
 from ..adapt import Buffer, Dumper, Loader
 from ..proto import AdaptContext
 
@@ -13,7 +13,7 @@ from ..proto import AdaptContext
 class BoolDumper(Dumper):
 
     format = Format.TEXT
-    _oid = builtins["bool"].oid
+    _oid = postgres.types["bool"].oid
 
     def dump(self, obj: bool) -> bytes:
         return b"t" if obj else b"f"
@@ -25,7 +25,7 @@ class BoolDumper(Dumper):
 class BoolBinaryDumper(Dumper):
 
     format = Format.BINARY
-    _oid = builtins["bool"].oid
+    _oid = postgres.types["bool"].oid
 
     def dump(self, obj: bool) -> bytes:
         return b"\x01" if obj else b"\x00"
@@ -47,8 +47,9 @@ class BoolBinaryLoader(Loader):
         return data != b"\x00"
 
 
-def register_default_globals(ctx: AdaptContext) -> None:
-    BoolDumper.register(bool, ctx)
-    BoolBinaryDumper.register(bool, ctx)
-    BoolLoader.register("bool", ctx)
-    BoolBinaryLoader.register("bool", ctx)
+def register_default_adapters(context: AdaptContext) -> None:
+    adapters = context.adapters
+    adapters.register_dumper(bool, BoolDumper)
+    adapters.register_dumper(bool, BoolBinaryDumper)
+    adapters.register_loader("bool", BoolLoader)
+    adapters.register_loader("bool", BoolBinaryLoader)
index b15ba06998bfb1632b1757c8156579d55016e404..5ea8613a91e9f2254a67771e55a5f0fc99daa94c 100644 (file)
@@ -11,10 +11,11 @@ from typing import Any, Callable, cast, Iterator, List, Optional
 from typing import Sequence, Tuple, Type
 
 from .. import pq
-from ..oids import TEXT_OID
+from .. import postgres
 from ..adapt import PyFormat, RecursiveDumper, RecursiveLoader
 from ..proto import AdaptContext, Buffer
 from .._struct import unpack_len
+from ..postgres import TEXT_OID
 from .._typeinfo import CompositeInfo as CompositeInfo  # exported here
 
 _struct_oidlen = struct.Struct("!Ii")
@@ -184,12 +185,14 @@ class CompositeBinaryLoader(RecordBinaryLoader):
 
 def register_adapters(
     info: CompositeInfo,
-    context: Optional["AdaptContext"],
+    context: Optional[AdaptContext] = None,
     factory: Optional[Callable[..., Any]] = None,
 ) -> None:
     if not factory:
         factory = namedtuple(info.name, info.field_names)  # type: ignore
 
+    adapters = context.adapters if context else postgres.adapters
+
     # generate and register a customized text loader
     loader: Type[BaseCompositeLoader] = type(
         f"{info.name.title()}Loader",
@@ -199,7 +202,7 @@ def register_adapters(
             "fields_types": info.field_types,
         },
     )
-    loader.register(info.oid, context=context)
+    adapters.register_loader(info.oid, loader)
 
     # generate and register a customized binary loader
     loader = type(
@@ -207,10 +210,11 @@ def register_adapters(
         (CompositeBinaryLoader,),
         {"factory": factory},
     )
-    loader.register(info.oid, context=context)
+    adapters.register_loader(info.oid, loader)
 
 
-def register_default_globals(ctx: AdaptContext) -> None:
-    TupleDumper.register(tuple, ctx)
-    RecordLoader.register("record", ctx)
-    RecordBinaryLoader.register("record", ctx)
+def register_default_adapters(context: AdaptContext) -> None:
+    adapters = context.adapters
+    adapters.register_dumper(tuple, TupleDumper)
+    adapters.register_loader("record", RecordLoader)
+    adapters.register_loader("record", RecordBinaryLoader)
index 8717bce2e41bea2d001326ef4f2426347947996c..e9317f6ef8c52eb896fc102f35fe65c1c792ca1a 100644 (file)
@@ -10,9 +10,9 @@ import struct
 from datetime import date, datetime, time, timedelta, timezone
 from typing import Any, Callable, cast, Optional, Tuple, TYPE_CHECKING
 
+from .. import postgres
 from ..pq import Format
 from .._tz import get_tzinfo
-from ..oids import postgres_types as builtins
 from ..adapt import Buffer, Dumper, Loader, PyFormat
 from ..proto import AdaptContext, DumperKey
 from ..errors import InterfaceError, DataError
@@ -43,7 +43,7 @@ _py_date_min_days = date.min.toordinal()
 class DateDumper(Dumper):
 
     format = Format.TEXT
-    _oid = builtins["date"].oid
+    _oid = postgres.types["date"].oid
 
     def dump(self, obj: date) -> bytes:
         # NOTE: whatever the PostgreSQL DateStyle input format (DMY, MDY, YMD)
@@ -54,7 +54,7 @@ class DateDumper(Dumper):
 class DateBinaryDumper(Dumper):
 
     format = Format.BINARY
-    _oid = builtins["date"].oid
+    _oid = postgres.types["date"].oid
 
     def dump(self, obj: date) -> bytes:
         days = obj.toordinal() - _pg_date_epoch_days
@@ -84,7 +84,7 @@ class _BaseTimeTextDumper(_BaseTimeDumper):
 
 class TimeDumper(_BaseTimeTextDumper):
 
-    _oid = builtins["time"].oid
+    _oid = postgres.types["time"].oid
 
     def upgrade(self, obj: time, format: PyFormat) -> Dumper:
         if not obj.tzinfo:
@@ -95,13 +95,13 @@ class TimeDumper(_BaseTimeTextDumper):
 
 class TimeTzDumper(_BaseTimeTextDumper):
 
-    _oid = builtins["timetz"].oid
+    _oid = postgres.types["timetz"].oid
 
 
 class TimeBinaryDumper(_BaseTimeDumper):
 
     format = Format.BINARY
-    _oid = builtins["time"].oid
+    _oid = postgres.types["time"].oid
 
     def dump(self, obj: time) -> bytes:
         us = obj.microsecond + 1_000_000 * (
@@ -119,7 +119,7 @@ class TimeBinaryDumper(_BaseTimeDumper):
 class TimeTzBinaryDumper(_BaseTimeDumper):
 
     format = Format.BINARY
-    _oid = builtins["timetz"].oid
+    _oid = postgres.types["timetz"].oid
 
     def dump(self, obj: time) -> bytes:
         us = obj.microsecond + 1_000_000 * (
@@ -155,7 +155,7 @@ class _BaseDatetimeTextDumper(_BaseDatetimeDumper):
 
 class DatetimeDumper(_BaseDatetimeTextDumper):
 
-    _oid = builtins["timestamptz"].oid
+    _oid = postgres.types["timestamptz"].oid
 
     def upgrade(self, obj: datetime, format: PyFormat) -> Dumper:
         if obj.tzinfo:
@@ -166,13 +166,13 @@ class DatetimeDumper(_BaseDatetimeTextDumper):
 
 class DatetimeNoTzDumper(_BaseDatetimeTextDumper):
 
-    _oid = builtins["timestamp"].oid
+    _oid = postgres.types["timestamp"].oid
 
 
 class DatetimeBinaryDumper(_BaseDatetimeDumper):
 
     format = Format.BINARY
-    _oid = builtins["timestamptz"].oid
+    _oid = postgres.types["timestamptz"].oid
 
     def dump(self, obj: datetime) -> bytes:
         delta = obj - _pg_datetimetz_epoch
@@ -191,7 +191,7 @@ class DatetimeBinaryDumper(_BaseDatetimeDumper):
 class DatetimeNoTzBinaryDumper(_BaseDatetimeDumper):
 
     format = Format.BINARY
-    _oid = builtins["timestamp"].oid
+    _oid = postgres.types["timestamp"].oid
 
     def dump(self, obj: datetime) -> bytes:
         delta = obj - _pg_datetime_epoch
@@ -204,7 +204,7 @@ class DatetimeNoTzBinaryDumper(_BaseDatetimeDumper):
 class TimedeltaDumper(Dumper):
 
     format = Format.TEXT
-    _oid = builtins["interval"].oid
+    _oid = postgres.types["interval"].oid
 
     def __init__(self, cls: type, context: Optional[AdaptContext] = None):
         super().__init__(cls, context)
@@ -231,7 +231,7 @@ class TimedeltaDumper(Dumper):
 class TimedeltaBinaryDumper(Dumper):
 
     format = Format.BINARY
-    _oid = builtins["interval"].oid
+    _oid = postgres.types["interval"].oid
 
     def dump(self, obj: timedelta) -> bytes:
         micros = 1_000_000 * obj.seconds + obj.microseconds
@@ -735,24 +735,25 @@ _month_abbr = {
 _uspad = [0, 100_000, 10_000, 1_000, 100, 10, 1]
 
 
-def register_default_globals(ctx: AdaptContext) -> None:
-    DateDumper.register("datetime.date", ctx)
-    DateBinaryDumper.register("datetime.date", ctx)
-    TimeDumper.register("datetime.time", ctx)
-    TimeBinaryDumper.register("datetime.time", ctx)
-    DatetimeDumper.register("datetime.datetime", ctx)
-    DatetimeBinaryDumper.register("datetime.datetime", ctx)
-    TimedeltaDumper.register("datetime.timedelta", ctx)
-    TimedeltaBinaryDumper.register("datetime.timedelta", ctx)
-    DateLoader.register("date", ctx)
-    DateBinaryLoader.register("date", ctx)
-    TimeLoader.register("time", ctx)
-    TimeBinaryLoader.register("time", ctx)
-    TimetzLoader.register("timetz", ctx)
-    TimetzBinaryLoader.register("timetz", ctx)
-    TimestampLoader.register("timestamp", ctx)
-    TimestampBinaryLoader.register("timestamp", ctx)
-    TimestamptzLoader.register("timestamptz", ctx)
-    TimestamptzBinaryLoader.register("timestamptz", ctx)
-    IntervalLoader.register("interval", ctx)
-    IntervalBinaryLoader.register("interval", ctx)
+def register_default_adapters(context: AdaptContext) -> None:
+    adapters = context.adapters
+    adapters.register_dumper("datetime.date", DateDumper)
+    adapters.register_dumper("datetime.date", DateBinaryDumper)
+    adapters.register_dumper("datetime.time", TimeDumper)
+    adapters.register_dumper("datetime.time", TimeBinaryDumper)
+    adapters.register_dumper("datetime.datetime", DatetimeDumper)
+    adapters.register_dumper("datetime.datetime", DatetimeBinaryDumper)
+    adapters.register_dumper("datetime.timedelta", TimedeltaDumper)
+    adapters.register_dumper("datetime.timedelta", TimedeltaBinaryDumper)
+    adapters.register_loader("date", DateLoader)
+    adapters.register_loader("date", DateBinaryLoader)
+    adapters.register_loader("time", TimeLoader)
+    adapters.register_loader("time", TimeBinaryLoader)
+    adapters.register_loader("timetz", TimetzLoader)
+    adapters.register_loader("timetz", TimetzBinaryLoader)
+    adapters.register_loader("timestamp", TimestampLoader)
+    adapters.register_loader("timestamp", TimestampBinaryLoader)
+    adapters.register_loader("timestamptz", TimestamptzLoader)
+    adapters.register_loader("timestamptz", TimestamptzBinaryLoader)
+    adapters.register_loader("interval", IntervalLoader)
+    adapters.register_loader("interval", IntervalBinaryLoader)
index 7f194635fa188b52aa2d4de2bd7c1828fbf6f4ad..88e6922655ba56819dfaa380ec89f9c9dc003d44 100644 (file)
@@ -7,8 +7,8 @@ Adapers for JSON types.
 import json
 from typing import Any, Callable, Optional, Type, Union
 
+from .. import postgres
 from ..pq import Format
-from ..oids import postgres_types as builtins
 from ..adapt import Buffer, Dumper, Loader
 from ..proto import AdaptContext
 from ..errors import DataError
@@ -51,7 +51,7 @@ def set_json_dumps(
         dumper: Type[_JsonDumper]
         for wrapper, base in grid:
             dumper = type(f"Custom{base.__name__}", (base,), {"_dumps": dumps})
-            dumper.register(wrapper, context=context)
+            context.adapters.register_dumper(wrapper, dumper)
 
 
 def set_json_loads(
@@ -85,7 +85,7 @@ def set_json_loads(
         loader: Type[_JsonLoader]
         for tname, base in grid:
             loader = type(f"Custom{base.__name__}", (base,), {"_loads": loads})
-            loader.register(tname, context=context)
+            context.adapters.register_loader(tname, loader)
 
 
 class _JsonWrapper:
@@ -130,25 +130,25 @@ class _JsonDumper(Dumper):
 class JsonDumper(_JsonDumper):
 
     format = Format.TEXT
-    _oid = builtins["json"].oid
+    _oid = postgres.types["json"].oid
 
 
 class JsonBinaryDumper(_JsonDumper):
 
     format = Format.BINARY
-    _oid = builtins["json"].oid
+    _oid = postgres.types["json"].oid
 
 
 class JsonbDumper(_JsonDumper):
 
     format = Format.TEXT
-    _oid = builtins["jsonb"].oid
+    _oid = postgres.types["jsonb"].oid
 
 
 class JsonbBinaryDumper(_JsonDumper):
 
     format = Format.BINARY
-    _oid = builtins["jsonb"].oid
+    _oid = postgres.types["jsonb"].oid
 
     def dump(self, obj: _JsonWrapper) -> bytes:
         dumps = obj.dumps or self.dumps
@@ -197,14 +197,16 @@ class JsonbBinaryLoader(_JsonLoader):
         return self.loads(data)
 
 
-def register_default_globals(ctx: AdaptContext) -> None:
+def register_default_adapters(context: AdaptContext) -> None:
+    adapters = context.adapters
+
     # Currently json binary format is nothing different than text, maybe with
     # an extra memcopy we can avoid.
-    JsonBinaryDumper.register(Json, ctx)
-    JsonDumper.register(Json, ctx)
-    JsonbBinaryDumper.register(Jsonb, ctx)
-    JsonbDumper.register(Jsonb, ctx)
-    JsonLoader.register("json", ctx)
-    JsonbLoader.register("jsonb", ctx)
-    JsonBinaryLoader.register("json", ctx)
-    JsonbBinaryLoader.register("jsonb", ctx)
+    adapters.register_dumper(Json, JsonBinaryDumper)
+    adapters.register_dumper(Json, JsonDumper)
+    adapters.register_dumper(Jsonb, JsonbBinaryDumper)
+    adapters.register_dumper(Jsonb, JsonbDumper)
+    adapters.register_loader("json", JsonLoader)
+    adapters.register_loader("jsonb", JsonbLoader)
+    adapters.register_loader("json", JsonBinaryLoader)
+    adapters.register_loader("jsonb", JsonbBinaryLoader)
index bd9942fb4918198b5c5b3c5f1521930adaace2da..e3e844ea3d90a416ca01637f8d2755a9fc880cc3 100644 (file)
@@ -6,8 +6,8 @@ Adapters for network types.
 
 from typing import Callable, Optional, Type, Union, TYPE_CHECKING
 
+from .. import postgres
 from ..pq import Format
-from ..oids import postgres_types as builtins
 from ..adapt import Buffer, Dumper, Loader
 from ..proto import AdaptContext
 
@@ -39,7 +39,7 @@ IPV6_PREFIXLEN = 128
 class InterfaceDumper(Dumper):
 
     format = Format.TEXT
-    _oid = builtins["inet"].oid
+    _oid = postgres.types["inet"].oid
 
     def dump(self, obj: Interface) -> bytes:
         return str(obj).encode("utf8")
@@ -48,7 +48,7 @@ class InterfaceDumper(Dumper):
 class NetworkDumper(Dumper):
 
     format = Format.TEXT
-    _oid = builtins["cidr"].oid
+    _oid = postgres.types["cidr"].oid
 
     def dump(self, obj: Network) -> bytes:
         return str(obj).encode("utf8")
@@ -67,7 +67,7 @@ class _IPv6Mixin:
 class _AddressBinaryDumper(Dumper):
 
     format = Format.BINARY
-    _oid = builtins["inet"].oid
+    _oid = postgres.types["inet"].oid
 
     _family: int
     _prefixlen: int
@@ -89,7 +89,7 @@ class IPv6AddressBinaryDumper(_IPv6Mixin, _AddressBinaryDumper):
 class _InterfaceBinaryDumper(Dumper):
 
     format = Format.BINARY
-    _oid = builtins["inet"].oid
+    _oid = postgres.types["inet"].oid
 
     _family: int
 
@@ -110,7 +110,7 @@ class IPv6InterfaceBinaryDumper(_IPv6Mixin, _InterfaceBinaryDumper):
 class _NetworkBinaryDumper(Dumper):
 
     format = Format.BINARY
-    _oid = builtins["cidr"].oid
+    _oid = postgres.types["cidr"].oid
 
     _family: int
 
@@ -209,20 +209,25 @@ class CidrBinaryLoader(_LazyIpaddress):
         return ip_network(data.decode("utf8"))
 
 
-def register_default_globals(ctx: AdaptContext) -> None:
-    InterfaceDumper.register("ipaddress.IPv4Address", ctx)
-    InterfaceDumper.register("ipaddress.IPv6Address", ctx)
-    InterfaceDumper.register("ipaddress.IPv4Interface", ctx)
-    InterfaceDumper.register("ipaddress.IPv6Interface", ctx)
-    NetworkDumper.register("ipaddress.IPv4Network", ctx)
-    NetworkDumper.register("ipaddress.IPv6Network", ctx)
-    IPv4AddressBinaryDumper.register("ipaddress.IPv4Address", ctx)
-    IPv6AddressBinaryDumper.register("ipaddress.IPv6Address", ctx)
-    IPv4InterfaceBinaryDumper.register("ipaddress.IPv4Interface", ctx)
-    IPv6InterfaceBinaryDumper.register("ipaddress.IPv6Interface", ctx)
-    IPv4NetworkBinaryDumper.register("ipaddress.IPv4Network", ctx)
-    IPv6NetworkBinaryDumper.register("ipaddress.IPv6Network", ctx)
-    InetLoader.register("inet", ctx)
-    InetBinaryLoader.register("inet", ctx)
-    CidrLoader.register("cidr", ctx)
-    CidrBinaryLoader.register("cidr", ctx)
+def register_default_adapters(context: AdaptContext) -> None:
+    adapters = context.adapters
+    adapters.register_dumper("ipaddress.IPv4Address", InterfaceDumper)
+    adapters.register_dumper("ipaddress.IPv6Address", InterfaceDumper)
+    adapters.register_dumper("ipaddress.IPv4Interface", InterfaceDumper)
+    adapters.register_dumper("ipaddress.IPv6Interface", InterfaceDumper)
+    adapters.register_dumper("ipaddress.IPv4Network", NetworkDumper)
+    adapters.register_dumper("ipaddress.IPv6Network", NetworkDumper)
+    adapters.register_dumper("ipaddress.IPv4Address", IPv4AddressBinaryDumper)
+    adapters.register_dumper("ipaddress.IPv6Address", IPv6AddressBinaryDumper)
+    adapters.register_dumper(
+        "ipaddress.IPv4Interface", IPv4InterfaceBinaryDumper
+    )
+    adapters.register_dumper(
+        "ipaddress.IPv6Interface", IPv6InterfaceBinaryDumper
+    )
+    adapters.register_dumper("ipaddress.IPv4Network", IPv4NetworkBinaryDumper)
+    adapters.register_dumper("ipaddress.IPv6Network", IPv6NetworkBinaryDumper)
+    adapters.register_loader("inet", InetLoader)
+    adapters.register_loader("inet", InetBinaryLoader)
+    adapters.register_loader("cidr", CidrLoader)
+    adapters.register_loader("cidr", CidrBinaryLoader)
index f200dd235d4d6110c263e3351c3a1364647dcf35..6f9fdc2b0d0fc9f5f84539373d4656b40a56ca3d 100644 (file)
@@ -24,5 +24,5 @@ class NoneDumper(Dumper):
         return b"NULL"
 
 
-def register_default_globals(ctx: AdaptContext) -> None:
-    NoneDumper.register(type(None), ctx)
+def register_default_adapters(context: AdaptContext) -> None:
+    context.adapters.register_dumper(type(None), NoneDumper)
index 7a38e7dc1e478069ce6e31d247b57259fa9812b2..72e218f3192dae75ef8cbb9683061ac57f3940e8 100644 (file)
@@ -9,9 +9,9 @@ from math import log
 from typing import Any, Callable, DefaultDict, Dict, Tuple, Union, cast
 from decimal import Decimal, DefaultContext, Context
 
+from .. import postgres
 from .. import errors as e
 from ..pq import Format
-from ..oids import postgres_types as builtins
 from ..adapt import Buffer, Dumper, Loader, PyFormat
 from ..proto import AdaptContext
 from .._struct import pack_int2, pack_uint2, unpack_int2
@@ -57,7 +57,7 @@ class _SpecialValuesDumper(_NumberDumper):
 class FloatDumper(_SpecialValuesDumper):
 
     format = Format.TEXT
-    _oid = builtins["float8"].oid
+    _oid = postgres.types["float8"].oid
 
     _special = {
         b"inf": b"'Infinity'::float8",
@@ -69,7 +69,7 @@ class FloatDumper(_SpecialValuesDumper):
 class FloatBinaryDumper(Dumper):
 
     format = Format.BINARY
-    _oid = builtins["float8"].oid
+    _oid = postgres.types["float8"].oid
 
     def dump(self, obj: float) -> bytes:
         return pack_float8(obj)
@@ -77,7 +77,7 @@ class FloatBinaryDumper(Dumper):
 
 class DecimalDumper(_SpecialValuesDumper):
 
-    _oid = builtins["numeric"].oid
+    _oid = postgres.types["numeric"].oid
 
     def dump(self, obj: Decimal) -> bytes:
         if obj.is_nan():
@@ -94,23 +94,23 @@ class DecimalDumper(_SpecialValuesDumper):
 
 
 class Int2Dumper(_NumberDumper):
-    _oid = builtins["int2"].oid
+    _oid = postgres.types["int2"].oid
 
 
 class Int4Dumper(_NumberDumper):
-    _oid = builtins["int4"].oid
+    _oid = postgres.types["int4"].oid
 
 
 class Int8Dumper(_NumberDumper):
-    _oid = builtins["int8"].oid
+    _oid = postgres.types["int8"].oid
 
 
 class IntNumericDumper(_NumberDumper):
-    _oid = builtins["numeric"].oid
+    _oid = postgres.types["numeric"].oid
 
 
 class OidDumper(_NumberDumper):
-    _oid = builtins["oid"].oid
+    _oid = postgres.types["oid"].oid
 
 
 class IntDumper(Dumper):
@@ -372,7 +372,7 @@ NUMERIC_NINF_BIN = _pack_numeric_head(0, 0, NUMERIC_NINF, 0)
 class DecimalBinaryDumper(Dumper):
 
     format = Format.BINARY
-    _oid = builtins["numeric"].oid
+    _oid = postgres.types["numeric"].oid
 
     def dump(self, obj: Decimal) -> Union[bytearray, bytes]:
         sign, digits, exp = obj.as_tuple()
@@ -434,35 +434,36 @@ class DecimalBinaryDumper(Dumper):
         return out
 
 
-def register_default_globals(ctx: AdaptContext) -> None:
-    IntDumper.register(int, ctx)
-    IntBinaryDumper.register(int, ctx)
-    FloatDumper.register(float, ctx)
-    FloatBinaryDumper.register(float, ctx)
+def register_default_adapters(context: AdaptContext) -> None:
+    adapters = context.adapters
+    adapters.register_dumper(int, IntDumper)
+    adapters.register_dumper(int, IntBinaryDumper)
+    adapters.register_dumper(float, FloatDumper)
+    adapters.register_dumper(float, FloatBinaryDumper)
     # The binary dumper is currently some 30% slower, so default to text
     # (see tests/scripts/testdec.py for a rough benchmark)
-    DecimalBinaryDumper.register("decimal.Decimal", ctx)
-    DecimalDumper.register("decimal.Decimal", ctx)
-    Int2Dumper.register(Int2, ctx)
-    Int4Dumper.register(Int4, ctx)
-    Int8Dumper.register(Int8, ctx)
-    IntNumericDumper.register(IntNumeric, ctx)
-    OidDumper.register(Oid, ctx)
-    Int2BinaryDumper.register(Int2, ctx)
-    Int4BinaryDumper.register(Int4, ctx)
-    Int8BinaryDumper.register(Int8, ctx)
-    OidBinaryDumper.register(Oid, ctx)
-    IntLoader.register("int2", ctx)
-    IntLoader.register("int4", ctx)
-    IntLoader.register("int8", ctx)
-    IntLoader.register("oid", ctx)
-    Int2BinaryLoader.register("int2", ctx)
-    Int4BinaryLoader.register("int4", ctx)
-    Int8BinaryLoader.register("int8", ctx)
-    OidBinaryLoader.register("oid", ctx)
-    FloatLoader.register("float4", ctx)
-    FloatLoader.register("float8", ctx)
-    Float4BinaryLoader.register("float4", ctx)
-    Float8BinaryLoader.register("float8", ctx)
-    NumericLoader.register("numeric", ctx)
-    NumericBinaryLoader.register("numeric", ctx)
+    adapters.register_dumper("decimal.Decimal", DecimalBinaryDumper)
+    adapters.register_dumper("decimal.Decimal", DecimalDumper)
+    adapters.register_dumper(Int2, Int2Dumper)
+    adapters.register_dumper(Int4, Int4Dumper)
+    adapters.register_dumper(Int8, Int8Dumper)
+    adapters.register_dumper(IntNumeric, IntNumericDumper)
+    adapters.register_dumper(Oid, OidDumper)
+    adapters.register_dumper(Int2, Int2BinaryDumper)
+    adapters.register_dumper(Int4, Int4BinaryDumper)
+    adapters.register_dumper(Int8, Int8BinaryDumper)
+    adapters.register_dumper(Oid, OidBinaryDumper)
+    adapters.register_loader("int2", IntLoader)
+    adapters.register_loader("int4", IntLoader)
+    adapters.register_loader("int8", IntLoader)
+    adapters.register_loader("oid", IntLoader)
+    adapters.register_loader("int2", Int2BinaryLoader)
+    adapters.register_loader("int4", Int4BinaryLoader)
+    adapters.register_loader("int8", Int8BinaryLoader)
+    adapters.register_loader("oid", OidBinaryLoader)
+    adapters.register_loader("float4", FloatLoader)
+    adapters.register_loader("float8", FloatLoader)
+    adapters.register_loader("float4", Float4BinaryLoader)
+    adapters.register_loader("float8", Float8BinaryLoader)
+    adapters.register_loader("numeric", NumericLoader)
+    adapters.register_loader("numeric", NumericBinaryLoader)
index e929ba00519e3194261c7242c0729c76ba5bfe12..2be4806ba1681e69e934330d3b9514abb6c8c2e1 100644 (file)
@@ -10,13 +10,13 @@ from typing import cast
 from decimal import Decimal
 from datetime import date, datetime
 
+from .. import postgres
 from ..pq import Format
-from ..oids import postgres_types as builtins, INVALID_OID
 from ..adapt import RecursiveDumper, RecursiveLoader, PyFormat
 from ..proto import AdaptContext, Buffer, Dumper, DumperKey
 from .._struct import pack_len, unpack_len
+from ..postgres import INVALID_OID
 from .._typeinfo import RangeInfo as RangeInfo  # exported here
-
 from .composite import SequenceDumper, BaseCompositeLoader
 
 RANGE_EMPTY = 0x01  # range is empty
@@ -256,7 +256,7 @@ class BaseRangeDumper(RecursiveDumper):
     def __init__(self, cls: type, context: Optional[AdaptContext] = None):
         super().__init__(cls, context)
         self.sub_dumper: Optional[Dumper] = None
-        self._types = context.adapters.types if context else builtins
+        self._types = context.adapters.types if context else postgres.types
         self._adapt_format = PyFormat.from_pq(self.format)
 
     def get_key(self, obj: Range[Any], format: PyFormat) -> DumperKey:
@@ -437,15 +437,17 @@ _int2parens = {ord(c): c for c in "[]()"}
 
 
 def register_adapters(
-    info: RangeInfo, context: Optional["AdaptContext"]
+    info: RangeInfo, context: Optional[AdaptContext] = None
 ) -> None:
+    adapters = context.adapters if context else postgres.adapters
+
     # generate and register a customized text loader
     loader: Type[RangeLoader[Any]] = type(
         f"{info.name.title()}Loader",
         (RangeLoader,),
         {"subtype_oid": info.subtype_oid},
     )
-    loader.register(info.oid, context=context)
+    adapters.register_loader(info.oid, loader)
 
     # generate and register a customized binary loader
     bloader: Type[RangeBinaryLoader[Any]] = type(
@@ -453,7 +455,7 @@ def register_adapters(
         (RangeBinaryLoader,),
         {"subtype_oid": info.subtype_oid},
     )
-    bloader.register(info.oid, context=context)
+    adapters.register_loader(info.oid, bloader)
 
 
 # Text dumpers for builtin range types wrappers
@@ -462,27 +464,27 @@ def register_adapters(
 
 
 class Int4RangeDumper(RangeDumper):
-    _oid = builtins["int4range"].oid
+    _oid = postgres.types["int4range"].oid
 
 
 class Int8RangeDumper(RangeDumper):
-    _oid = builtins["int8range"].oid
+    _oid = postgres.types["int8range"].oid
 
 
 class NumericRangeDumper(RangeDumper):
-    _oid = builtins["numrange"].oid
+    _oid = postgres.types["numrange"].oid
 
 
 class DateRangeDumper(RangeDumper):
-    _oid = builtins["daterange"].oid
+    _oid = postgres.types["daterange"].oid
 
 
 class TimestampRangeDumper(RangeDumper):
-    _oid = builtins["tsrange"].oid
+    _oid = postgres.types["tsrange"].oid
 
 
 class TimestamptzRangeDumper(RangeDumper):
-    _oid = builtins["tstzrange"].oid
+    _oid = postgres.types["tstzrange"].oid
 
 
 # Binary dumpers for builtin range types wrappers
@@ -491,107 +493,108 @@ class TimestamptzRangeDumper(RangeDumper):
 
 
 class Int4RangeBinaryDumper(RangeBinaryDumper):
-    _oid = builtins["int4range"].oid
+    _oid = postgres.types["int4range"].oid
 
 
 class Int8RangeBinaryDumper(RangeBinaryDumper):
-    _oid = builtins["int8range"].oid
+    _oid = postgres.types["int8range"].oid
 
 
 class NumericRangeBinaryDumper(RangeBinaryDumper):
-    _oid = builtins["numrange"].oid
+    _oid = postgres.types["numrange"].oid
 
 
 class DateRangeBinaryDumper(RangeBinaryDumper):
-    _oid = builtins["daterange"].oid
+    _oid = postgres.types["daterange"].oid
 
 
 class TimestampRangeBinaryDumper(RangeBinaryDumper):
-    _oid = builtins["tsrange"].oid
+    _oid = postgres.types["tsrange"].oid
 
 
 class TimestamptzRangeBinaryDumper(RangeBinaryDumper):
-    _oid = builtins["tstzrange"].oid
+    _oid = postgres.types["tstzrange"].oid
 
 
 # Text loaders for builtin range types
 
 
 class Int4RangeLoader(RangeLoader[int]):
-    subtype_oid = builtins["int4"].oid
+    subtype_oid = postgres.types["int4"].oid
 
 
 class Int8RangeLoader(RangeLoader[int]):
-    subtype_oid = builtins["int8"].oid
+    subtype_oid = postgres.types["int8"].oid
 
 
 class NumericRangeLoader(RangeLoader[Decimal]):
-    subtype_oid = builtins["numeric"].oid
+    subtype_oid = postgres.types["numeric"].oid
 
 
 class DateRangeLoader(RangeLoader[date]):
-    subtype_oid = builtins["date"].oid
+    subtype_oid = postgres.types["date"].oid
 
 
 class TimestampRangeLoader(RangeLoader[datetime]):
-    subtype_oid = builtins["timestamp"].oid
+    subtype_oid = postgres.types["timestamp"].oid
 
 
 class TimestampTZRangeLoader(RangeLoader[datetime]):
-    subtype_oid = builtins["timestamptz"].oid
+    subtype_oid = postgres.types["timestamptz"].oid
 
 
 # Binary loaders for builtin range types
 
 
 class Int4RangeBinaryLoader(RangeBinaryLoader[int]):
-    subtype_oid = builtins["int4"].oid
+    subtype_oid = postgres.types["int4"].oid
 
 
 class Int8RangeBinaryLoader(RangeBinaryLoader[int]):
-    subtype_oid = builtins["int8"].oid
+    subtype_oid = postgres.types["int8"].oid
 
 
 class NumericRangeBinaryLoader(RangeBinaryLoader[Decimal]):
-    subtype_oid = builtins["numeric"].oid
+    subtype_oid = postgres.types["numeric"].oid
 
 
 class DateRangeBinaryLoader(RangeBinaryLoader[date]):
-    subtype_oid = builtins["date"].oid
+    subtype_oid = postgres.types["date"].oid
 
 
 class TimestampRangeBinaryLoader(RangeBinaryLoader[datetime]):
-    subtype_oid = builtins["timestamp"].oid
+    subtype_oid = postgres.types["timestamp"].oid
 
 
 class TimestampTZRangeBinaryLoader(RangeBinaryLoader[datetime]):
-    subtype_oid = builtins["timestamptz"].oid
-
-
-def register_default_globals(ctx: AdaptContext) -> None:
-    RangeBinaryDumper.register(Range, ctx)
-    RangeDumper.register(Range, ctx)
-    Int4RangeDumper.register(Int4Range, ctx)
-    Int8RangeDumper.register(Int8Range, ctx)
-    NumericRangeDumper.register(NumericRange, ctx)
-    DateRangeDumper.register(DateRange, ctx)
-    TimestampRangeDumper.register(TimestampRange, ctx)
-    TimestamptzRangeDumper.register(TimestamptzRange, ctx)
-    Int4RangeBinaryDumper.register(Int4Range, ctx)
-    Int8RangeBinaryDumper.register(Int8Range, ctx)
-    NumericRangeBinaryDumper.register(NumericRange, ctx)
-    DateRangeBinaryDumper.register(DateRange, ctx)
-    TimestampRangeBinaryDumper.register(TimestampRange, ctx)
-    TimestamptzRangeBinaryDumper.register(TimestamptzRange, ctx)
-    Int4RangeLoader.register("int4range", ctx)
-    Int8RangeLoader.register("int8range", ctx)
-    NumericRangeLoader.register("numrange", ctx)
-    DateRangeLoader.register("daterange", ctx)
-    TimestampRangeLoader.register("tsrange", ctx)
-    TimestampTZRangeLoader.register("tstzrange", ctx)
-    Int4RangeBinaryLoader.register("int4range", ctx)
-    Int8RangeBinaryLoader.register("int8range", ctx)
-    NumericRangeBinaryLoader.register("numrange", ctx)
-    DateRangeBinaryLoader.register("daterange", ctx)
-    TimestampRangeBinaryLoader.register("tsrange", ctx)
-    TimestampTZRangeBinaryLoader.register("tstzrange", ctx)
+    subtype_oid = postgres.types["timestamptz"].oid
+
+
+def register_default_adapters(context: AdaptContext) -> None:
+    adapters = context.adapters
+    adapters.register_dumper(Range, RangeBinaryDumper)
+    adapters.register_dumper(Range, RangeDumper)
+    adapters.register_dumper(Int4Range, Int4RangeDumper)
+    adapters.register_dumper(Int8Range, Int8RangeDumper)
+    adapters.register_dumper(NumericRange, NumericRangeDumper)
+    adapters.register_dumper(DateRange, DateRangeDumper)
+    adapters.register_dumper(TimestampRange, TimestampRangeDumper)
+    adapters.register_dumper(TimestamptzRange, TimestamptzRangeDumper)
+    adapters.register_dumper(Int4Range, Int4RangeBinaryDumper)
+    adapters.register_dumper(Int8Range, Int8RangeBinaryDumper)
+    adapters.register_dumper(NumericRange, NumericRangeBinaryDumper)
+    adapters.register_dumper(DateRange, DateRangeBinaryDumper)
+    adapters.register_dumper(TimestampRange, TimestampRangeBinaryDumper)
+    adapters.register_dumper(TimestamptzRange, TimestamptzRangeBinaryDumper)
+    adapters.register_loader("int4range", Int4RangeLoader)
+    adapters.register_loader("int8range", Int8RangeLoader)
+    adapters.register_loader("numrange", NumericRangeLoader)
+    adapters.register_loader("daterange", DateRangeLoader)
+    adapters.register_loader("tsrange", TimestampRangeLoader)
+    adapters.register_loader("tstzrange", TimestampTZRangeLoader)
+    adapters.register_loader("int4range", Int4RangeBinaryLoader)
+    adapters.register_loader("int8range", Int8RangeBinaryLoader)
+    adapters.register_loader("numrange", NumericRangeBinaryLoader)
+    adapters.register_loader("daterange", DateRangeBinaryLoader)
+    adapters.register_loader("tsrange", TimestampRangeBinaryLoader)
+    adapters.register_loader("tstzrange", TimestampTZRangeBinaryLoader)
index b40b0ae275e123bfa2b51d0da47615f00bf08724..8c1f6a16c0682319bf8ab9f19063ad4b99544fa6 100644 (file)
@@ -6,8 +6,8 @@ Adapters for textual types.
 
 from typing import Optional, Union, TYPE_CHECKING
 
+from .. import postgres
 from ..pq import Format, Escaping
-from ..oids import postgres_types as builtins
 from ..adapt import Buffer, Dumper, Loader
 from ..proto import AdaptContext
 from ..errors import DataError
@@ -33,7 +33,7 @@ class _StrDumper(Dumper):
 class StrBinaryDumper(_StrDumper):
 
     format = Format.BINARY
-    _oid = builtins["text"].oid
+    _oid = postgres.types["text"].oid
 
     def dump(self, obj: str) -> bytes:
         # the server will raise DataError subclass if the string contains 0x00
@@ -84,7 +84,7 @@ class TextBinaryLoader(TextLoader):
 class BytesDumper(Dumper):
 
     format = Format.TEXT
-    _oid = builtins["bytea"].oid
+    _oid = postgres.types["bytea"].oid
 
     def __init__(self, cls: type, context: Optional[AdaptContext] = None):
         super().__init__(cls, context)
@@ -101,7 +101,7 @@ class BytesDumper(Dumper):
 class BytesBinaryDumper(Dumper):
 
     format = Format.BINARY
-    _oid = builtins["bytea"].oid
+    _oid = postgres.types["bytea"].oid
 
     def dump(
         self, obj: Union[bytes, bytearray, memoryview]
@@ -132,31 +132,31 @@ class ByteaBinaryLoader(Loader):
         return data
 
 
-def register_default_globals(ctx: "AdaptContext") -> None:
-    from ..oids import INVALID_OID
+def register_default_adapters(context: AdaptContext) -> None:
+    adapters = context.adapters
 
     # NOTE: the order the dumpers are registered is relevant.
     # The last one registered becomes the default for each type.
     # Normally, binary is the default dumper, except for text (which plays
     # the role of unknown, so it can be cast automatically to other types).
-    StrBinaryDumper.register(str, ctx)
-    StrDumper.register(str, ctx)
-    TextLoader.register(INVALID_OID, ctx)
-    TextLoader.register("bpchar", ctx)
-    TextLoader.register("name", ctx)
-    TextLoader.register("text", ctx)
-    TextLoader.register("varchar", ctx)
-    TextBinaryLoader.register("bpchar", ctx)
-    TextBinaryLoader.register("name", ctx)
-    TextBinaryLoader.register("text", ctx)
-    TextBinaryLoader.register("varchar", ctx)
-
-    BytesDumper.register(bytes, ctx)
-    BytesDumper.register(bytearray, ctx)
-    BytesDumper.register(memoryview, ctx)
-    BytesBinaryDumper.register(bytes, ctx)
-    BytesBinaryDumper.register(bytearray, ctx)
-    BytesBinaryDumper.register(memoryview, ctx)
-    ByteaLoader.register("bytea", ctx)
-    ByteaBinaryLoader.register(INVALID_OID, ctx)
-    ByteaBinaryLoader.register("bytea", ctx)
+    adapters.register_dumper(str, StrBinaryDumper)
+    adapters.register_dumper(str, StrDumper)
+    adapters.register_loader(postgres.INVALID_OID, TextLoader)
+    adapters.register_loader("bpchar", TextLoader)
+    adapters.register_loader("name", TextLoader)
+    adapters.register_loader("text", TextLoader)
+    adapters.register_loader("varchar", TextLoader)
+    adapters.register_loader("bpchar", TextBinaryLoader)
+    adapters.register_loader("name", TextBinaryLoader)
+    adapters.register_loader("text", TextBinaryLoader)
+    adapters.register_loader("varchar", TextBinaryLoader)
+
+    adapters.register_dumper(bytes, BytesDumper)
+    adapters.register_dumper(bytearray, BytesDumper)
+    adapters.register_dumper(memoryview, BytesDumper)
+    adapters.register_dumper(bytes, BytesBinaryDumper)
+    adapters.register_dumper(bytearray, BytesBinaryDumper)
+    adapters.register_dumper(memoryview, BytesBinaryDumper)
+    adapters.register_loader("bytea", ByteaLoader)
+    adapters.register_loader(postgres.INVALID_OID, ByteaBinaryLoader)
+    adapters.register_loader("bytea", ByteaBinaryLoader)
index 0767c488065ad59df6f30c90663da4b25cbd4fa0..917e25366f19cbdd1231f055b34b2c61e865d7c0 100644 (file)
@@ -6,8 +6,8 @@ Adapters for the UUID type.
 
 from typing import Callable, Optional, TYPE_CHECKING
 
+from .. import postgres
 from ..pq import Format
-from ..oids import postgres_types as builtins
 from ..adapt import Buffer, Dumper, Loader
 from ..proto import AdaptContext
 
@@ -22,7 +22,7 @@ UUID: Callable[..., "uuid.UUID"]
 class UUIDDumper(Dumper):
 
     format = Format.TEXT
-    _oid = builtins["uuid"].oid
+    _oid = postgres.types["uuid"].oid
 
     def dump(self, obj: "uuid.UUID") -> bytes:
         return obj.hex.encode("utf8")
@@ -64,8 +64,9 @@ class UUIDBinaryLoader(UUIDLoader):
         return UUID(bytes=data)
 
 
-def register_default_globals(ctx: AdaptContext) -> None:
-    UUIDDumper.register("uuid.UUID", ctx)
-    UUIDBinaryDumper.register("uuid.UUID", ctx)
-    UUIDLoader.register("uuid", ctx)
-    UUIDBinaryLoader.register("uuid", ctx)
+def register_default_adapters(context: AdaptContext) -> None:
+    adapters = context.adapters
+    adapters.register_dumper("uuid.UUID", UUIDDumper)
+    adapters.register_dumper("uuid.UUID", UUIDBinaryDumper)
+    adapters.register_loader("uuid", UUIDLoader)
+    adapters.register_loader("uuid", UUIDBinaryLoader)
index 771745d03b15fe19f59ae912c5021fb23825a810..ce640f8e29c9041a4ca40d7cd2659bbfb4ab553a 100644 (file)
@@ -16,7 +16,6 @@ equivalent C implementations.
 from typing import Any
 
 cimport cython
-from cpython.bytes cimport PyBytes_AsStringAndSize
 from cpython.bytearray cimport PyByteArray_FromStringAndSize, PyByteArray_Resize
 from cpython.bytearray cimport PyByteArray_GET_SIZE, PyByteArray_AS_STRING
 
@@ -104,20 +103,6 @@ cdef class CDumper:
     cpdef object upgrade(self, object obj, object format):
         return self
 
-    @classmethod
-    def register(
-        this_cls,
-        cls: Union[type, str],
-        context: Optional[AdaptContext] = None,
-        int format = PQ_TEXT,
-    ) -> None:
-        if context is not None:
-            adapters = context.adapters
-        else:
-            from psycopg.adapt import global_adapters as adapters
-
-        adapters.register_dumper(cls, this_cls)
-
     @staticmethod
     cdef char *ensure_size(bytearray ba, Py_ssize_t offset, Py_ssize_t size) except NULL:
         """
@@ -152,17 +137,3 @@ cdef class CLoader:
         cdef Py_ssize_t length
         _buffer_as_string_and_size(data, &ptr, &length)
         return self.cload(ptr, length)
-
-    @classmethod
-    def register(
-        cls,
-        oid: Union[int, str],
-        context: Optional["AdaptContext"] = None,
-        int format = PQ_TEXT,
-    ) -> None:
-        if context is not None:
-            adapters = context.adapters
-        else:
-            from psycopg.adapt import global_adapters as adapters
-
-        adapters.register_loader(oid, cls)
index 61976b375c82d34ad6d3375cef45b94ba9969289..414f3a04b13ac8cbdc43f50a49b4ffcf5af1296a 100644 (file)
@@ -88,8 +88,8 @@ cdef class Transformer:
             self.adapters = context.adapters
             self.connection = context.connection
         else:
-            from psycopg.adapt import global_adapters
-            self.adapters = global_adapters
+            from psycopg import postgres
+            self.adapters = postgres.adapters
             self.connection = None
 
     @property
index 3f9d93c5b42640b5fd504a3f5cee2ac98195583b..dcc4e56ef45d858c310c465ab4eb8bbd6f6fe230 100644 (file)
@@ -4,10 +4,10 @@ from types import ModuleType
 import pytest
 
 import psycopg
-from psycopg import pq, sql
+from psycopg import pq, sql, postgres
 from psycopg.adapt import Transformer, PyFormat as Format, Dumper, Loader
-from psycopg.oids import postgres_types as builtins, TEXT_OID
 from psycopg._cmodule import _psycopg
+from psycopg.postgres import types as builtins, TEXT_OID
 
 
 @pytest.mark.parametrize(
@@ -45,8 +45,8 @@ def test_quote(data, result):
 
 
 def test_dump_connection_ctx(conn):
-    make_bin_dumper("b").register(MyStr, conn)
-    make_dumper("t").register(MyStr, conn)
+    conn.adapters.register_dumper(MyStr, make_bin_dumper("b"))
+    conn.adapters.register_dumper(MyStr, make_dumper("t"))
 
     cur = conn.cursor()
     cur.execute("select %s", [MyStr("hello")])
@@ -58,12 +58,12 @@ def test_dump_connection_ctx(conn):
 
 
 def test_dump_cursor_ctx(conn):
-    make_bin_dumper("b").register(str, conn)
-    make_dumper("t").register(str, conn)
+    conn.adapters.register_dumper(str, make_bin_dumper("b"))
+    conn.adapters.register_dumper(str, make_dumper("t"))
 
     cur = conn.cursor()
-    make_bin_dumper("bc").register(str, cur)
-    make_dumper("tc").register(str, cur)
+    cur.adapters.register_dumper(str, make_bin_dumper("bc"))
+    cur.adapters.register_dumper(str, make_dumper("tc"))
 
     cur.execute("select %s", [MyStr("hello")])
     assert cur.fetchone() == ("hellotc",)
@@ -101,7 +101,7 @@ def test_subclass_dumper(conn):
         def dump(self, obj):
             return (obj * 2).encode("utf-8")
 
-    MyStrDumper.register(str, conn)
+    conn.adapters.register_dumper(str, MyStrDumper)
     assert conn.execute("select %t", ["hello"]).fetchone()[0] == "hellohello"
 
 
@@ -138,7 +138,7 @@ def test_subclass_loader(conn):
         def load(self, data):
             return (bytes(data) * 2).decode("utf-8")
 
-    MyTextLoader.register("text", conn)
+    conn.adapters.register_loader("text", MyTextLoader)
     assert conn.execute("select 'hello'::text").fetchone()[0] == "hellohello"
 
 
@@ -157,8 +157,8 @@ def test_cast(data, format, type, result):
 
 
 def test_load_connection_ctx(conn):
-    make_loader("t").register(TEXT_OID, conn)
-    make_bin_loader("b").register(TEXT_OID, conn)
+    conn.adapters.register_loader(TEXT_OID, make_loader("t"))
+    conn.adapters.register_loader(TEXT_OID, make_bin_loader("b"))
 
     r = conn.cursor(binary=False).execute("select 'hello'::text").fetchone()
     assert r == ("hellot",)
@@ -167,12 +167,12 @@ def test_load_connection_ctx(conn):
 
 
 def test_load_cursor_ctx(conn):
-    make_loader("t").register(TEXT_OID, conn)
-    make_bin_loader("b").register(TEXT_OID, conn)
+    conn.adapters.register_loader(TEXT_OID, make_loader("t"))
+    conn.adapters.register_loader(TEXT_OID, make_bin_loader("b"))
 
     cur = conn.cursor()
-    make_loader("tc").register(TEXT_OID, cur)
-    make_bin_loader("bc").register(TEXT_OID, cur)
+    cur.adapters.register_loader(TEXT_OID, make_loader("tc"))
+    cur.adapters.register_loader(TEXT_OID, make_bin_loader("bc"))
 
     assert cur.execute("select 'hello'::text").fetchone() == ("hellotc",)
     cur.format = pq.Format.BINARY
@@ -185,18 +185,18 @@ def test_load_cursor_ctx(conn):
 
 
 def test_cow_dumpers(conn):
-    make_dumper("t").register(str, conn)
+    conn.adapters.register_dumper(str, make_dumper("t"))
 
     cur1 = conn.cursor()
     cur2 = conn.cursor()
-    make_dumper("c2").register(str, cur2)
+    cur2.adapters.register_dumper(str, make_dumper("c2"))
 
     r = cur1.execute("select %s::text -- 1", ["hello"]).fetchone()
     assert r == ("hellot",)
     r = cur2.execute("select %s::text -- 1", ["hello"]).fetchone()
     assert r == ("helloc2",)
 
-    make_dumper("t1").register(str, conn)
+    conn.adapters.register_dumper(str, make_dumper("t1"))
     r = cur1.execute("select %s::text -- 2", ["hello"]).fetchone()
     assert r == ("hellot",)
     r = cur2.execute("select %s::text -- 2", ["hello"]).fetchone()
@@ -204,16 +204,16 @@ def test_cow_dumpers(conn):
 
 
 def test_cow_loaders(conn):
-    make_loader("t").register(TEXT_OID, conn)
+    conn.adapters.register_loader(TEXT_OID, make_loader("t"))
 
     cur1 = conn.cursor()
     cur2 = conn.cursor()
-    make_loader("c2").register(TEXT_OID, cur2)
+    cur2.adapters.register_loader(TEXT_OID, make_loader("c2"))
 
     assert cur1.execute("select 'hello'::text").fetchone() == ("hellot",)
     assert cur2.execute("select 'hello'::text").fetchone() == ("helloc2",)
 
-    make_loader("t1").register(TEXT_OID, conn)
+    conn.adapters.register_loader(TEXT_OID, make_loader("t1"))
     assert cur1.execute("select 'hello2'::text").fetchone() == ("hello2t",)
     assert cur2.execute("select 'hello2'::text").fetchone() == ("hello2c2",)
 
@@ -226,9 +226,9 @@ def test_cow_loaders(conn):
 def test_load_cursor_ctx_nested(conn, sql, obj, fmt_out):
     cur = conn.cursor(binary=fmt_out == pq.Format.BINARY)
     if fmt_out == pq.Format.TEXT:
-        make_loader("c").register(TEXT_OID, cur)
+        cur.adapters.register_loader("text", make_loader("c"))
     else:
-        make_bin_loader("c").register(TEXT_OID, cur)
+        cur.adapters.register_loader("text", make_bin_loader("c"))
 
     cur.execute(f"select {sql}")
     res = cur.fetchone()[0]
@@ -271,15 +271,15 @@ def test_last_dumper_registered_ctx(conn):
     cur = conn.cursor()
 
     bd = make_bin_dumper("b")
-    bd.register(str, cur)
+    cur.adapters.register_dumper(str, bd)
     td = make_dumper("t")
-    td.register(str, cur)
+    cur.adapters.register_dumper(str, td)
 
     assert cur.execute("select %s", ["hello"]).fetchone()[0] == "hellot"
     assert cur.execute("select %t", ["hello"]).fetchone()[0] == "hellot"
     assert cur.execute("select %b", ["hello"]).fetchone()[0] == "hellob"
 
-    bd.register(str, cur)
+    cur.adapters.register_dumper(str, bd)
     assert cur.execute("select %s", ["hello"]).fetchone()[0] == "hellob"
 
 
@@ -336,8 +336,7 @@ def test_optimised_adapters():
     # All the registered adapters
     reg_adapters = set()
     adapters = (
-        list(psycopg.global_adapters._dumpers.values())
-        + psycopg.global_adapters._loaders
+        list(postgres.adapters._dumpers.values()) + postgres.adapters._loaders
     )
     assert len(adapters) == 5
     for m in adapters:
index 8e0fafbafa8bdc66dc8c0e253b82d6e137f3e3ae..725862e732b4586602858cd369975c35f91aa5b3 100644 (file)
@@ -267,7 +267,7 @@ def test_subclass_adapter(conn, format):
         def dump(self, obj):
             return super().dump(obj) * 2
 
-    MyStrDumper.register(str, conn)
+    conn.adapters.register_dumper(str, MyStrDumper)
 
     cur = conn.cursor()
     ensure_table(cur, sample_tabledef)
index 729a6ccfe5437a095c311069e5d928acbf873eb8..5c40f6460867199b11ad600275dbbc93932e02cc 100644 (file)
@@ -7,8 +7,8 @@ import pytest
 
 import psycopg
 from psycopg import pq, sql, rows
-from psycopg.oids import postgres_types as builtins
 from psycopg.adapt import PyFormat as Format
+from psycopg.postgres import types as builtins
 
 from .utils import gc_collect
 
index 45fed9530eaef9d7aee4957324760e1d3794c56b..dbee442f3a451a79de445981aa302eef60a304e5 100644 (file)
@@ -2,9 +2,9 @@ import pytest
 import psycopg
 from psycopg import pq
 from psycopg import sql
-from psycopg.oids import postgres_types as builtins
 from psycopg.adapt import PyFormat as Format, Transformer
 from psycopg.types import TypeInfo
+from psycopg.postgres import types as builtins
 
 
 tests_str = [
index 04fd618863cf1729f92fe400d50ee32e3a2b491c..27dddaa15596e8fb39fce05861738b7bf5d700f2 100644 (file)
@@ -2,8 +2,8 @@ import pytest
 
 from psycopg import pq
 from psycopg import sql
-from psycopg.oids import postgres_types as builtins
 from psycopg.adapt import Transformer, PyFormat as Format
+from psycopg.postgres import types as builtins
 
 
 @pytest.mark.parametrize("fmt_in", [Format.AUTO, Format.TEXT, Format.BINARY])
index 621309dfc04921c8cf08fe988366eb26d693953c..749c7376b0d2b80dfb1ec7b2f9f777613f6aa19e 100644 (file)
@@ -1,9 +1,9 @@
 import pytest
 
-from psycopg import pq
+from psycopg import pq, postgres
 from psycopg.sql import Identifier
-from psycopg.oids import postgres_types as builtins
-from psycopg.adapt import PyFormat as Format, global_adapters
+from psycopg.adapt import PyFormat as Format
+from psycopg.postgres import types as builtins
 from psycopg.types.composite import CompositeInfo
 
 
@@ -218,18 +218,18 @@ def test_register_scope(conn, testcomp):
     info.register()
     for fmt in (pq.Format.TEXT, pq.Format.BINARY):
         for oid in (info.oid, info.array_oid):
-            assert global_adapters._loaders[fmt].pop(oid)
+            assert postgres.adapters._loaders[fmt].pop(oid)
 
     cur = conn.cursor()
     info.register(cur)
     for fmt in (pq.Format.TEXT, pq.Format.BINARY):
         for oid in (info.oid, info.array_oid):
-            assert oid not in global_adapters._loaders[fmt]
+            assert oid not in postgres.adapters._loaders[fmt]
             assert oid not in conn.adapters._loaders[fmt]
             assert oid in cur.adapters._loaders[fmt]
 
     info.register(conn)
     for fmt in (pq.Format.TEXT, pq.Format.BINARY):
         for oid in (info.oid, info.array_oid):
-            assert oid not in global_adapters._loaders[fmt]
+            assert oid not in postgres.adapters._loaders[fmt]
             assert oid in conn.adapters._loaders[fmt]
index 1b4c62abe1916ab09c87ced6482c3d8888a7915f..0e979094b8ef620ff1758772e6b4fe6e1704ef34 100644 (file)
@@ -585,12 +585,12 @@ class TestInterval:
     def test_infinity_date_example(self, conn):
         # NOTE: this is an example in the docs. Make sure it doesn't regress when
         # adding binary datetime adapters
-        from psycopg.oids import postgres_types as builtins
+        from datetime import date
         from psycopg.types.datetime import DateLoader, DateDumper
 
         class InfDateDumper(DateDumper):
             def dump(self, obj):
-                if obj == dt.date.max:
+                if obj == date.max:
                     return b"infinity"
                 else:
                     return super().dump(obj)
@@ -598,22 +598,22 @@ class TestInterval:
         class InfDateLoader(DateLoader):
             def load(self, data):
                 if data == b"infinity":
-                    return dt.date.max
+                    return date.max
                 else:
                     return super().load(data)
 
         cur = conn.cursor()
-        InfDateDumper.register(dt.date, cur)
-        InfDateLoader.register(builtins["date"].oid, cur)
+        cur.adapters.register_dumper(date, InfDateDumper)
+        cur.adapters.register_loader("date", InfDateLoader)
 
         rec = cur.execute(
-            "SELECT %s::text, %s::text", [dt.date(2020, 12, 31), dt.date.max]
+            "SELECT %s::text, %s::text", [date(2020, 12, 31), date.max]
         ).fetchone()
         assert rec == ("2020-12-31", "infinity")
         rec = cur.execute(
             "select '2020-12-31'::date, 'infinity'::date"
         ).fetchone()
-        assert rec == (dt.date(2020, 12, 31), dt.date(9999, 12, 31))
+        assert rec == (date(2020, 12, 31), date(9999, 12, 31))
 
     def test_load_copy(self, conn):
         cur = conn.cursor(binary=False)
index 16e98cbe8fe018a924ece09a0c08f0b953af067c..d8461f0350e4add1f707df5527b63d8cba7aea74 100644 (file)
@@ -523,7 +523,7 @@ def test_load_numeric_binary_inf(conn, val, expr):
 )
 def test_numeric_as_float(conn, val):
     cur = conn.cursor()
-    FloatLoader.register(conn.adapters.types["numeric"].oid, cur)
+    cur.adapters.register_loader("numeric", FloatLoader)
 
     val = Decimal(val)
     cur.execute("select %s as val", (val,))
index ec591bc1a09209699155983ce8ae4a384b4fa2db..2b9b3643fbd957b85c7a08629c4c72eb02710a3c 100755 (executable)
@@ -73,7 +73,7 @@ order by typname
 
 def update_python_oids() -> None:
     queries = [version_sql, py_types_sql, py_ranges_sql]
-    fn = ROOT / "psycopg/psycopg/oids.py"
+    fn = ROOT / "psycopg/psycopg/postgres.py"
     update_file(fn, queries)
     sp.check_call(["black", "-q", fn])