]> git.ipfire.org Git - thirdparty/psycopg.git/commitdiff
Merge branch 'master' into row-factory
authorDaniele Varrazzo <daniele.varrazzo@gmail.com>
Fri, 12 Feb 2021 01:36:01 +0000 (02:36 +0100)
committerDaniele Varrazzo <daniele.varrazzo@gmail.com>
Fri, 12 Feb 2021 01:36:01 +0000 (02:36 +0100)
1  2 
psycopg3/psycopg3/_transform.py
psycopg3/psycopg3/cursor.py
psycopg3/psycopg3/proto.py
psycopg3/psycopg3/server_cursor.py
psycopg3_c/psycopg3_c/_psycopg3.pyi
psycopg3_c/psycopg3_c/_psycopg3/transform.pyx

Simple merge
index c24a3f985ceadd4e8ca84e87ce5ca00e03aa4f3e,807ee1f422401e642eac16c34612bf0351100fcb..db87c83d247be08a1517849651281d4ceef08282
@@@ -277,11 -270,8 +276,10 @@@ class BaseCursor(Generic[ConnectionType
              return None
  
          elif res.status == ExecStatus.SINGLE_TUPLE:
-             if self._row_factory:
+             self.pgresult = res
+             self._tx.set_pgresult(res, set_loaders=first)
++            if first and self._row_factory:
 +                self._tx.make_row = self._row_factory(self)
-             self.pgresult = res  # will set it on the transformer too
-             # TODO: the transformer may do excessive work here: create a
-             # path that doesn't clear the loaders every time.
              return res
  
          elif res.status in (ExecStatus.TUPLES_OK, ExecStatus.COMMAND_OK):
  
          self._results = list(results)
          self.pgresult = results[0]
+         self._tx.set_pgresult(results[0])
 +        if self._row_factory:
 +            self._tx.make_row = self._row_factory(self)
          nrows = self.pgresult.command_tuples
          if nrows is not None:
              if self._rowcount < 0:
@@@ -527,8 -518,9 +528,9 @@@ class Cursor(BaseCursor["Connection"])
                  rec = self._tx.load_row(0)
                  assert rec is not None
                  yield rec
+                 first = False
  
 -    def fetchone(self) -> Optional[Sequence[Any]]:
 +    def fetchone(self) -> Optional[Row]:
          """
          Return the next record from the current recordset.
  
@@@ -643,15 -635,17 +645,17 @@@ class AsyncCursor(BaseCursor["AsyncConn
  
      async def stream(
          self, query: Query, params: Optional[Params] = None
 -    ) -> AsyncIterator[Sequence[Any]]:
 +    ) -> AsyncIterator[Row]:
          async with self._conn.lock:
              await self._conn.wait(self._stream_send_gen(query, params))
-             while await self._conn.wait(self._stream_fetchone_gen()):
+             first = True
+             while await self._conn.wait(self._stream_fetchone_gen(first)):
                  rec = self._tx.load_row(0)
                  assert rec is not None
                  yield rec
+                 first = False
  
 -    async def fetchone(self) -> Optional[Sequence[Any]]:
 +    async def fetchone(self) -> Optional[Row]:
          self._check_result()
          rv = self._tx.load_row(self._pos)
          if rv is not None:
Simple merge
Simple merge
index fc603dd3e390de68249706e13d5e276b3c2d6d49,5b9adf88a5604671e1305c88ef24d78c34c5a2d7..756c34e63ff0085447064c5b94884d43b7685f74
@@@ -22,13 -22,10 +22,14 @@@ class Transformer(proto.AdaptContext)
      @property
      def adapters(self) -> AdaptersMap: ...
      @property
 +    def make_row(self) -> Optional[proto.RowMaker]: ...
 +    @make_row.setter
 +    def make_row(self, row_maker: proto.RowMaker) -> None: ...
 +    @property
      def pgresult(self) -> Optional[PGresult]: ...
-     @pgresult.setter
-     def pgresult(self, result: Optional[PGresult]) -> None: ...
+     def set_pgresult(
+         self, result: Optional["PGresult"], set_loaders: bool = True
+     ) -> None: ...
      def set_row_types(
          self, types: Sequence[int], formats: Sequence[pq.Format]
      ) -> None: ...