import struct
from collections import namedtuple
from typing import Any, Callable, cast, Dict, Iterator, List, Optional
-from typing import Sequence, Tuple, Type
+from typing import NamedTuple, Sequence, Tuple, Type
from .. import pq
from .. import abc
from .. import postgres
from ..adapt import Transformer, PyFormat, RecursiveDumper, Loader, Dumper
+from .._compat import cache
from .._struct import pack_len, unpack_len
from ..postgres import TEXT_OID
from .._typeinfo import CompositeInfo as CompositeInfo # exported here
class TupleBinaryDumper(Dumper):
format = pq.Format.BINARY
- # Subclasses must set an info
- info: CompositeInfo
+ # Subclasses must set this info
+ _field_types: Tuple[int, ...]
def __init__(self, cls: type, context: Optional[abc.AdaptContext] = None):
super().__init__(cls, context)
# in case the composite contains another composite. Make sure to use
# a separate Transformer instance instead.
self._tx = Transformer(context)
- self._tx.set_dumper_types(self.info.field_types, self.format)
+ self._tx.set_dumper_types(self._field_types, self.format)
- nfields = len(self.info.field_types)
+ nfields = len(self._field_types)
self._formats = (PyFormat.from_pq(self.format),) * nfields
def dump(self, obj: Tuple[Any, ...]) -> bytearray:
adapted = self._tx.dump_sequence(obj, self._formats)
for i in range(len(obj)):
b = adapted[i]
- oid = self.info.field_types[i]
+ oid = self._field_types[i]
if b is not None:
out += _pack_oidlen(oid, len(b))
out += b
info.register(context)
if not factory:
- factory = namedtuple( # type: ignore
- _as_python_identifier(info.name),
- [_as_python_identifier(n) for n in info.field_names],
- )
+ factory = _nt_from_info(info)
adapters = context.adapters if context else postgres.adapters
# generate and register a customized text loader
- loader: Type[BaseCompositeLoader] = type(
- f"{info.name.title()}Loader",
- (CompositeLoader,),
- {
- "factory": factory,
- "fields_types": info.field_types,
- },
- )
+ loader: Type[BaseCompositeLoader]
+ loader = _make_loader(info.name, tuple(info.field_types), factory)
adapters.register_loader(info.oid, loader)
# generate and register a customized binary loader
- loader = type(
- f"{info.name.title()}BinaryLoader",
- (CompositeBinaryLoader,),
- {"factory": factory},
- )
+ loader = _make_binary_loader(info.name, factory)
adapters.register_loader(info.oid, loader)
# If the factory is a type, create and register dumpers for it
if isinstance(factory, type):
- dumper = type(
- f"{info.name.title()}BinaryDumper",
- (TupleBinaryDumper,),
- {"oid": info.oid, "info": info},
- )
+ dumper: Type[Dumper]
+ dumper = _make_binary_dumper(info.name, info.oid, tuple(info.field_types))
adapters.register_dumper(factory, dumper)
# Default to the text dumper because it is more flexible
- dumper = type(f"{info.name.title()}Dumper", (TupleDumper,), {"oid": info.oid})
+ dumper = _make_dumper(info.name, info.oid)
adapters.register_dumper(factory, dumper)
info.python_type = factory
adapters.register_dumper(tuple, TupleDumper)
adapters.register_loader("record", RecordLoader)
adapters.register_loader("record", RecordBinaryLoader)
+
+
+def _nt_from_info(info: CompositeInfo) -> Type[NamedTuple]:
+ name = _as_python_identifier(info.name)
+ fields = tuple(_as_python_identifier(n) for n in info.field_names)
+ return _make_nt(name, fields)
+
+
+# Cache all dynamically-generated types to avoid leaks in case the types
+# cannot be GC'd.
+
+
+@cache
+def _make_nt(name: str, fields: Tuple[str, ...]) -> Type[NamedTuple]:
+ return namedtuple(name, fields) # type: ignore[return-value]
+
+
+@cache
+def _make_loader(
+ name: str, types: Tuple[int, ...], factory: Callable[..., Any]
+) -> Type[BaseCompositeLoader]:
+ return type(
+ f"{name.title()}Loader",
+ (CompositeLoader,),
+ {"factory": factory, "fields_types": list(types)},
+ )
+
+
+@cache
+def _make_binary_loader(
+ name: str, factory: Callable[..., Any]
+) -> Type[BaseCompositeLoader]:
+ return type(
+ f"{name.title()}BinaryLoader", (CompositeBinaryLoader,), {"factory": factory}
+ )
+
+
+@cache
+def _make_dumper(name: str, oid: int) -> Type[TupleDumper]:
+ return type(f"{name.title()}Dumper", (TupleDumper,), {"oid": oid})
+
+
+@cache
+def _make_binary_dumper(
+ name: str, oid: int, field_types: Tuple[int, ...]
+) -> Type[TupleBinaryDumper]:
+ return type(
+ f"{name.title()}BinaryDumper",
+ (TupleBinaryDumper,),
+ {"oid": oid, "_field_types": field_types},
+ )
Adapters for the enum type.
"""
from enum import Enum
-from typing import Any, Dict, Generic, Optional, Mapping, Sequence
+from typing import Dict, Generic, Optional, Mapping, Sequence
from typing import Tuple, Type, TypeVar, Union, cast
from typing_extensions import TypeAlias
from ..pq import Format
from ..abc import AdaptContext
from ..adapt import Buffer, Dumper, Loader
+from .._compat import cache
from .._encodings import conn_encoding
from .._typeinfo import EnumInfo as EnumInfo # exported here
EnumLoadMap: TypeAlias = Dict[bytes, E]
EnumMapping: TypeAlias = Union[Mapping[E, str], Sequence[Tuple[E, str]], None]
+# Hashable versions
+_HEnumDumpMap: TypeAlias = Tuple[Tuple[E, bytes], ...]
+_HEnumLoadMap: TypeAlias = Tuple[Tuple[bytes, E], ...]
+
+TEXT = Format.TEXT
+BINARY = Format.BINARY
+
class _BaseEnumLoader(Loader, Generic[E]):
"""
class EnumBinaryDumper(EnumDumper):
- format = Format.BINARY
+ format = BINARY
def register_enum(
raise TypeError("no info passed. Is the requested enum available?")
if enum is None:
- enum = cast(Type[E], Enum(info.name.title(), info.labels, module=__name__))
+ enum = cast(Type[E], _make_enum(info.name, tuple(info.labels)))
info.enum = enum
adapters = context.adapters if context else postgres.adapters
info.register(context)
load_map = _make_load_map(info, enum, mapping, context)
- attribs: Dict[str, Any] = {"enum": info.enum, "_load_map": load_map}
- name = f"{info.name.title()}Loader"
- loader = type(name, (_BaseEnumLoader,), attribs)
+ loader = _make_loader(info.name, info.enum, load_map)
adapters.register_loader(info.oid, loader)
- name = f"{info.name.title()}BinaryLoader"
- loader = type(name, (_BaseEnumLoader,), {**attribs, "format": Format.BINARY})
+ loader = _make_binary_loader(info.name, info.enum, load_map)
adapters.register_loader(info.oid, loader)
dump_map = _make_dump_map(info, enum, mapping, context)
- attribs = {"oid": info.oid, "enum": info.enum, "_dump_map": dump_map}
- name = f"{enum.__name__}Dumper"
- dumper = type(name, (_BaseEnumDumper,), attribs)
+ dumper = _make_dumper(info.enum, info.oid, dump_map)
adapters.register_dumper(info.enum, dumper)
- name = f"{enum.__name__}BinaryDumper"
- dumper = type(name, (_BaseEnumDumper,), {**attribs, "format": Format.BINARY})
+ dumper = _make_binary_dumper(info.enum, info.oid, dump_map)
adapters.register_dumper(info.enum, dumper)
+# Cache all dynamically-generated types to avoid leaks in case the types
+# cannot be GC'd.
+
+
+@cache
+def _make_enum(name: str, labels: Tuple[str, ...]) -> Enum:
+ return Enum(name.title(), labels, module=__name__)
+
+
+@cache
+def _make_loader(
+ name: str, enum: Type[Enum], load_map: _HEnumLoadMap[E]
+) -> Type[_BaseEnumLoader[E]]:
+ attribs = {"enum": enum, "_load_map": dict(load_map)}
+ return type(f"{name.title()}Loader", (_BaseEnumLoader,), attribs)
+
+
+@cache
+def _make_binary_loader(
+ name: str, enum: Type[Enum], load_map: _HEnumLoadMap[E]
+) -> Type[_BaseEnumLoader[E]]:
+ attribs = {"enum": enum, "_load_map": dict(load_map), "format": BINARY}
+ return type(f"{name.title()}BinaryLoader", (_BaseEnumLoader,), attribs)
+
+
+@cache
+def _make_dumper(
+ enum: Type[Enum], oid: int, dump_map: _HEnumDumpMap[E]
+) -> Type[_BaseEnumDumper[E]]:
+ attribs = {"enum": enum, "oid": oid, "_dump_map": dict(dump_map)}
+ return type(f"{enum.__name__}Dumper", (_BaseEnumDumper,), attribs)
+
+
+@cache
+def _make_binary_dumper(
+ enum: Type[Enum], oid: int, dump_map: _HEnumDumpMap[E]
+) -> Type[_BaseEnumDumper[E]]:
+ attribs = {"enum": enum, "oid": oid, "_dump_map": dict(dump_map), "format": BINARY}
+ return type(f"{enum.__name__}BinaryDumper", (_BaseEnumDumper,), attribs)
+
+
def _make_load_map(
info: EnumInfo,
enum: Type[E],
mapping: EnumMapping[E],
context: Optional[AdaptContext],
-) -> EnumLoadMap[E]:
+) -> _HEnumLoadMap[E]:
enc = conn_encoding(context.connection if context else None)
- rv: EnumLoadMap[E] = {}
+ rv = []
for label in info.labels:
try:
member = enum[label]
# will get a DataError on fetch.
pass
else:
- rv[label.encode(enc)] = member
+ rv.append((label.encode(enc), member))
if mapping:
if isinstance(mapping, Mapping):
mapping = list(mapping.items())
for member, label in mapping:
- rv[label.encode(enc)] = member
+ rv.append((label.encode(enc), member))
- return rv
+ return tuple(rv)
def _make_dump_map(
enum: Type[E],
mapping: EnumMapping[E],
context: Optional[AdaptContext],
-) -> EnumDumpMap[E]:
+) -> _HEnumDumpMap[E]:
enc = conn_encoding(context.connection if context else None)
- rv: EnumDumpMap[E] = {}
+ rv = []
for member in enum:
- rv[member] = member.name.encode(enc)
+ rv.append((member, member.name.encode(enc)))
if mapping:
if isinstance(mapping, Mapping):
mapping = list(mapping.items())
for member, label in mapping:
- rv[member] = label.encode(enc)
+ rv.append((member, label.encode(enc)))
- return rv
+ return tuple(rv)
def register_default_adapters(context: AdaptContext) -> None:
# Copyright (C) 2021 The Psycopg Team
import re
-from typing import Dict, List, Optional
+from typing import Dict, List, Optional, Type
from typing_extensions import TypeAlias
from .. import errors as e
from .. import postgres
from ..abc import Buffer, AdaptContext
from ..adapt import PyFormat, RecursiveDumper, RecursiveLoader
+from .._compat import cache
from ..postgres import TEXT_OID
from .._typeinfo import TypeInfo
adapters = context.adapters if context else postgres.adapters
# Generate and register a customized text dumper
- class HstoreDumper(BaseHstoreDumper):
- oid = info.oid
-
- adapters.register_dumper(dict, HstoreDumper)
+ adapters.register_dumper(dict, _make_hstore_dumper(info.oid))
# register the text loader on the oid
adapters.register_loader(info.oid, HstoreLoader)
+
+
+# Cache all dynamically-generated types to avoid leaks in case the types
+# cannot be GC'd.
+
+
+@cache
+def _make_hstore_dumper(oid_in: int) -> Type[BaseHstoreDumper]:
+ """
+ Return an hstore dumper class configured using `oid_in`.
+
+ Avoid to create new classes if the oid configured is the same.
+ """
+
+ class HstoreDumper(BaseHstoreDumper):
+ oid = oid_in
+
+ return HstoreDumper
from ..pq import Format
from ..adapt import Buffer, Dumper, Loader, PyFormat, AdaptersMap
from ..errors import DataError
+from .._compat import cache
JsonDumpsFunction = Callable[[Any], Union[str, bytes]]
JsonLoadsFunction = Callable[[Union[str, bytes]], Any]
(Jsonb, PyFormat.BINARY),
(Jsonb, PyFormat.TEXT),
]
- dumper: Type[_JsonDumper]
for wrapper, format in grid:
base = _get_current_dumper(adapters, wrapper, format)
- name = base.__name__
- if not base.__name__.startswith("Custom"):
- name = f"Custom{name}"
- dumper = type(name, (base,), {"_dumps": dumps})
+ dumper = _make_dumper(base, dumps)
adapters.register_dumper(wrapper, dumper)
("jsonb", JsonbLoader),
("jsonb", JsonbBinaryLoader),
]
- loader: Type[_JsonLoader]
for tname, base in grid:
- loader = type(f"Custom{base.__name__}", (base,), {"_loads": loads})
+ loader = _make_loader(base, loads)
context.adapters.register_loader(tname, loader)
+# Cache all dynamically-generated types to avoid leaks in case the types
+# cannot be GC'd.
+
+
+@cache
+def _make_dumper(base: Type[abc.Dumper], dumps: JsonDumpsFunction) -> Type[abc.Dumper]:
+ name = base.__name__
+ if not name.startswith("Custom"):
+ name = f"Custom{name}"
+ return type(name, (base,), {"_dumps": dumps})
+
+
+@cache
+def _make_loader(base: Type[Loader], loads: JsonLoadsFunction) -> Type[Loader]:
+ name = base.__name__
+ if not name.startswith("Custom"):
+ name = f"Custom{name}"
+ return type(name, (base,), {"_loads": loads})
+
+
class _JsonWrapper:
__slots__ = ("obj", "dumps")
from ..pq import Format
from ..abc import AdaptContext, Buffer, Dumper, DumperKey
from ..adapt import RecursiveDumper, RecursiveLoader, PyFormat
+from .._compat import cache
from .._struct import pack_len, unpack_len
from ..postgres import INVALID_OID, TEXT_OID
from .._typeinfo import MultirangeInfo as MultirangeInfo # exported here
adapters = context.adapters if context else postgres.adapters
# generate and register a customized text loader
- loader: Type[MultirangeLoader[Any]] = type(
- f"{info.name.title()}Loader",
- (MultirangeLoader,),
- {"subtype_oid": info.subtype_oid},
- )
+ loader: Type[BaseMultirangeLoader[Any]]
+ loader = _make_loader(info.name, info.subtype_oid)
adapters.register_loader(info.oid, loader)
# generate and register a customized binary loader
- bloader: Type[MultirangeBinaryLoader[Any]] = type(
- f"{info.name.title()}BinaryLoader",
- (MultirangeBinaryLoader,),
- {"subtype_oid": info.subtype_oid},
+ loader = _make_binary_loader(info.name, info.subtype_oid)
+ adapters.register_loader(info.oid, loader)
+
+
+# Cache all dynamically-generated types to avoid leaks in case the types
+# cannot be GC'd.
+
+
+@cache
+def _make_loader(name: str, oid: int) -> Type[MultirangeLoader[Any]]:
+ return type(f"{name.title()}Loader", (MultirangeLoader,), {"subtype_oid": oid})
+
+
+@cache
+def _make_binary_loader(name: str, oid: int) -> Type[MultirangeBinaryLoader[Any]]:
+ return type(
+ f"{name.title()}BinaryLoader", (MultirangeBinaryLoader,), {"subtype_oid": oid}
)
- adapters.register_loader(info.oid, bloader)
# Text dumpers for builtin multirange types wrappers
from ..pq import Format
from ..abc import AdaptContext, Buffer, Dumper, DumperKey
from ..adapt import RecursiveDumper, RecursiveLoader, PyFormat
+from .._compat import cache
from .._struct import pack_len, unpack_len
from ..postgres import INVALID_OID, TEXT_OID
from .._typeinfo import RangeInfo as RangeInfo # exported here
adapters = context.adapters if context else postgres.adapters
# generate and register a customized text loader
- loader: Type[RangeLoader[Any]] = type(
- f"{info.name.title()}Loader",
- (RangeLoader,),
- {"subtype_oid": info.subtype_oid},
- )
+ loader: Type[BaseRangeLoader[Any]]
+ loader = _make_loader(info.name, info.subtype_oid)
adapters.register_loader(info.oid, loader)
# generate and register a customized binary loader
- bloader: Type[RangeBinaryLoader[Any]] = type(
- f"{info.name.title()}BinaryLoader",
- (RangeBinaryLoader,),
- {"subtype_oid": info.subtype_oid},
+ loader = _make_binary_loader(info.name, info.subtype_oid)
+ adapters.register_loader(info.oid, loader)
+
+
+# Cache all dynamically-generated types to avoid leaks in case the types
+# cannot be GC'd.
+
+
+@cache
+def _make_loader(name: str, oid: int) -> Type[RangeLoader[Any]]:
+ return type(f"{name.title()}Loader", (RangeLoader,), {"subtype_oid": oid})
+
+
+@cache
+def _make_binary_loader(name: str, oid: int) -> Type[RangeBinaryLoader[Any]]:
+ return type(
+ f"{name.title()}BinaryLoader", (RangeBinaryLoader,), {"subtype_oid": oid}
)
- adapters.register_loader(info.oid, bloader)
# Text dumpers for builtin range types wrappers
Adapters for PostGIS geometries
"""
-from typing import Optional
+from typing import Optional, Type
from .. import postgres
from ..abc import AdaptContext, Buffer
from ..adapt import Dumper, Loader
from ..pq import Format
+from .._compat import cache
from .._typeinfo import TypeInfo
info.register(context)
adapters = context.adapters if context else postgres.adapters
+ adapters.register_loader(info.oid, GeometryBinaryLoader)
+ adapters.register_loader(info.oid, GeometryLoader)
+ # Default binary dump
+ adapters.register_dumper(BaseGeometry, _make_dumper(info.oid))
+ adapters.register_dumper(BaseGeometry, _make_binary_dumper(info.oid))
+
+
+# Cache all dynamically-generated types to avoid leaks in case the types
+# cannot be GC'd.
+
+
+@cache
+def _make_dumper(oid_in: int) -> Type[BaseGeometryDumper]:
class GeometryDumper(BaseGeometryDumper):
- oid = info.oid
+ oid = oid_in
+
+ return GeometryDumper
+
+@cache
+def _make_binary_dumper(oid_in: int) -> Type[BaseGeometryBinaryDumper]:
class GeometryBinaryDumper(BaseGeometryBinaryDumper):
- oid = info.oid
+ oid = oid_in
- adapters.register_loader(info.oid, GeometryBinaryLoader)
- adapters.register_loader(info.oid, GeometryLoader)
- # Default binary dump
- adapters.register_dumper(BaseGeometry, GeometryDumper)
- adapters.register_dumper(BaseGeometry, GeometryBinaryDumper)
+ return GeometryBinaryDumper