]> git.ipfire.org Git - thirdparty/psycopg.git/commitdiff
feat: add Transformer.from_context() method
authorDaniele Varrazzo <daniele.varrazzo@gmail.com>
Sat, 8 Jan 2022 23:01:56 +0000 (00:01 +0100)
committerDaniele Varrazzo <daniele.varrazzo@gmail.com>
Tue, 10 May 2022 17:13:26 +0000 (19:13 +0200)
Allow to reuse the same transformer in nested context, instead of
creating new ones. Even if transformer copies are shallow, we may end up
creating several in certain places.

psycopg/psycopg/_transform.py
psycopg/psycopg/abc.py
psycopg/psycopg/adapt.py
psycopg/psycopg/sql.py
psycopg/psycopg/types/composite.py
psycopg_c/psycopg_c/_psycopg.pyi
psycopg_c/psycopg_c/_psycopg/transform.pyx

index 11194558be4de71c894dacd78c8a2f4902e66735..4f93f3639e3c6bb70cce1f8d69d51caf68a1ca4d 100644 (file)
@@ -75,6 +75,18 @@ class Transformer(AdaptContext):
         # the length of the result columns
         self._row_loaders: List[LoadFunc] = []
 
+    @classmethod
+    def from_context(cls, context: Optional[AdaptContext]) -> "Transformer":
+        """
+        Return a Transformer from an AdaptContext.
+
+        If the context is a Transformer instance, just return it.
+        """
+        if isinstance(context, Transformer):
+            return context
+        else:
+            return cls(context)
+
     @property
     def connection(self) -> Optional["BaseConnection[Any]"]:
         return self._conn
index d23873fe56e25edfe5102f5d0d93af20c4b5ddee..0bbcebe29d51e5bfc4b62d66c01182465163b8b1 100644 (file)
@@ -196,6 +196,10 @@ class Transformer(Protocol):
     def __init__(self, context: Optional[AdaptContext] = None):
         ...
 
+    @classmethod
+    def from_context(cls, context: Optional[AdaptContext]) -> "Transformer":
+        ...
+
     @property
     def connection(self) -> Optional["BaseConnection[Any]"]:
         ...
index 8341a5a29981c14a9a6327ecba453c245dad440d..ccf89ce2fbb6a6b9800c58ace717073b7f00c9db 100644 (file)
@@ -151,7 +151,7 @@ class RecursiveDumper(Dumper):
 
     def __init__(self, cls: type, context: Optional[abc.AdaptContext] = None):
         super().__init__(cls, context)
-        self._tx = Transformer(context)
+        self._tx = Transformer.from_context(context)
 
 
 class RecursiveLoader(Loader):
@@ -159,4 +159,4 @@ class RecursiveLoader(Loader):
 
     def __init__(self, oid: int, context: Optional[abc.AdaptContext] = None):
         super().__init__(oid, context)
-        self._tx = Transformer(context)
+        self._tx = Transformer.from_context(context)
index ade3e4de4bbb4b5101c79d578affa898848edd59..39ecfc9e821fd92aff2127bf640c899d55ecb36b 100644 (file)
@@ -390,7 +390,7 @@ class Literal(Composable):
     """
 
     def as_bytes(self, context: Optional[AdaptContext]) -> bytes:
-        tx = Transformer(context)
+        tx = Transformer.from_context(context)
         dumper = tx.get_dumper(self._obj, PyFormat.TEXT)
         return dumper.quote(self._obj)
 
index a5340993e048091b7621678014a4459ee9232e2a..4174854b1e8d6a83651f126eefbaed011efddb40 100644 (file)
@@ -13,7 +13,7 @@ from typing import Sequence, Tuple, Type
 from .. import pq
 from .. import postgres
 from ..abc import AdaptContext, Buffer
-from ..adapt import PyFormat, RecursiveDumper, RecursiveLoader
+from ..adapt import Transformer, PyFormat, RecursiveDumper, Loader
 from .._struct import pack_len, unpack_len
 from ..postgres import TEXT_OID
 from .._typeinfo import CompositeInfo as CompositeInfo  # exported here
@@ -94,7 +94,11 @@ class TupleBinaryDumper(RecursiveDumper):
         return out
 
 
-class BaseCompositeLoader(RecursiveLoader):
+class BaseCompositeLoader(Loader):
+    def __init__(self, oid: int, context: Optional[AdaptContext] = None):
+        super().__init__(oid, context)
+        self._tx = Transformer(context)
+
     def _parse_record(self, data: bytes) -> Iterator[Optional[bytes]]:
         """
         Split a non-empty representation of a composite type into components.
@@ -138,11 +142,14 @@ class RecordLoader(BaseCompositeLoader):
         )
 
 
-class RecordBinaryLoader(RecursiveLoader):
-
+class RecordBinaryLoader(Loader):
     format = pq.Format.BINARY
     _types_set = False
 
+    def __init__(self, oid: int, context: Optional[AdaptContext] = None):
+        super().__init__(oid, context)
+        self._tx = Transformer(context)
+
     def load(self, data: Buffer) -> Tuple[Any, ...]:
         if not self._types_set:
             self._config_types(data)
index 6668e337f4b8396e9217cc873e4e7688b7a6ce0f..3bcd4961d76b5d5346d3565edb8f23c82d5f805c 100644 (file)
@@ -22,6 +22,8 @@ class Transformer(abc.AdaptContext):
     types: Optional[Tuple[int, ...]]
     formats: Optional[List[pq.Format]]
     def __init__(self, context: Optional[abc.AdaptContext] = None): ...
+    @classmethod
+    def from_context(cls, context: Optional[abc.AdaptContext]) -> "Transformer": ...
     @property
     def connection(self) -> Optional[BaseConnection[Any]]: ...
     @property
index ffd8bf99ed2e664b9af18c8b2ad3fe8ce9c3e26d..9544de84f0fae65c05f8133e4efe4e35024fc30a 100644 (file)
@@ -104,6 +104,18 @@ cdef class Transformer:
 
         self.types = self.formats = None
 
+    @classmethod
+    def from_context(cls, context: Optional["AdaptContext"]):
+        """
+        Return a Transformer from an AdaptContext.
+
+        If the context is a Transformer instance, just return it.
+        """
+        if isinstance(context, Transformer):
+            return context
+        else:
+            return cls(context)
+
     @property
     def pgresult(self) -> Optional[PGresult]:
         return self._pgresult