]> git.ipfire.org Git - thirdparty/psycopg.git/commitdiff
chore: bump black to 24.1.0
authorDaniele Varrazzo <daniele.varrazzo@gmail.com>
Fri, 26 Jan 2024 23:44:49 +0000 (23:44 +0000)
committerDaniele Varrazzo <daniele.varrazzo@gmail.com>
Fri, 26 Jan 2024 23:53:28 +0000 (23:53 +0000)
39 files changed:
.flake8
psycopg/.flake8
psycopg/psycopg/_copy_base.py
psycopg/psycopg/_struct.py
psycopg/psycopg/_typeinfo.py
psycopg/psycopg/abc.py
psycopg/psycopg/adapt.py
psycopg/psycopg/connection.py
psycopg/psycopg/connection_async.py
psycopg/psycopg/cursor.py
psycopg/psycopg/cursor_async.py
psycopg/psycopg/pq/abc.py
psycopg/psycopg/rows.py
psycopg/psycopg/server_cursor.py
psycopg/psycopg/types/enum.py
psycopg/psycopg/types/multirange.py
psycopg/psycopg/types/numeric.py
psycopg/setup.cfg
psycopg_c/.flake8
psycopg_pool/.flake8
psycopg_pool/psycopg_pool/null_pool.py
psycopg_pool/psycopg_pool/pool.py
psycopg_pool/psycopg_pool/pool_async.py
psycopg_pool/psycopg_pool/sched.py
tests/conftest.py
tests/constraints.txt
tests/pool/test_pool.py
tests/pool/test_pool_async.py
tests/pool/test_pool_common.py
tests/pool/test_pool_null.py
tests/pool/test_pool_null_async.py
tests/scripts/dectest.py
tests/scripts/pipeline-demo.py
tests/test_connection.py
tests/test_connection_async.py
tests/test_copy.py
tests/test_cursor_common.py
tests/test_tpc.py
tests/types/test_numeric.py

diff --git a/.flake8 b/.flake8
index ec4053fb2be8d6fa8f90aeb773b09aab0c3e2579..d2473a1ae417a99c6d018cb6508ddabf37c63317 100644 (file)
--- a/.flake8
+++ b/.flake8
@@ -1,6 +1,6 @@
 [flake8]
 max-line-length = 88
-ignore = W503, E203
+ignore = W503, E203, E704
 extend-exclude = .venv build
 per-file-ignores =
     # Autogenerated section
index 67fb0245c38715e659df23790d909f6f59dce3d1..33b08d768ceb0002ba839d2c779d8f3c2320287d 100644 (file)
@@ -1,6 +1,6 @@
 [flake8]
 max-line-length = 88
-ignore = W503, E203
+ignore = W503, E203, E704
 per-file-ignores =
     # Autogenerated section
     psycopg/errors.py: E125, E128, E302
index 140744ff1c1e5e375d26c144dcec3f96576f914c..9194b266bca5c66434561a3027ca2b4da5507494 100644 (file)
@@ -210,20 +210,16 @@ class Formatter(ABC):
         self._row_mode = False  # true if the user is using write_row()
 
     @abstractmethod
-    def parse_row(self, data: Buffer) -> Optional[Tuple[Any, ...]]:
-        ...
+    def parse_row(self, data: Buffer) -> Optional[Tuple[Any, ...]]: ...
 
     @abstractmethod
-    def write(self, buffer: Union[Buffer, str]) -> Buffer:
-        ...
+    def write(self, buffer: Union[Buffer, str]) -> Buffer: ...
 
     @abstractmethod
-    def write_row(self, row: Sequence[Any]) -> Buffer:
-        ...
+    def write_row(self, row: Sequence[Any]) -> Buffer: ...
 
     @abstractmethod
-    def end(self) -> Buffer:
-        ...
+    def end(self) -> Buffer: ...
 
 
 class TextFormatter(Formatter):
index 5f5c3f2c27e7756b317e5c3b9a9906817b1ecabc..7232a20bd90b309ccbd17de66344ecce0410e18d 100644 (file)
@@ -18,8 +18,7 @@ UnpackFloat: TypeAlias = Callable[[Buffer], Tuple[float]]
 
 
 class UnpackLen(Protocol):
-    def __call__(self, data: Buffer, start: Optional[int]) -> Tuple[int]:
-        ...
+    def __call__(self, data: Buffer, start: Optional[int]) -> Tuple[int]: ...
 
 
 pack_int2 = cast(PackInt, struct.Struct("!h").pack)
index 68eea41e01e7399b346bd612bdf439b9745af44b..fc170492a5683340fe19fc8223f6040bdaa56fda 100644 (file)
@@ -58,15 +58,13 @@ class TypeInfo:
     @classmethod
     def fetch(
         cls: Type[T], conn: "Connection[Any]", name: Union[str, sql.Identifier]
-    ) -> Optional[T]:
-        ...
+    ) -> Optional[T]: ...
 
     @overload
     @classmethod
     async def fetch(
         cls: Type[T], conn: "AsyncConnection[Any]", name: Union[str, sql.Identifier]
-    ) -> Optional[T]:
-        ...
+    ) -> Optional[T]: ...
 
     @classmethod
     def fetch(
@@ -238,12 +236,10 @@ class TypesRegistry:
                 yield t
 
     @overload
-    def __getitem__(self, key: Union[str, int]) -> TypeInfo:
-        ...
+    def __getitem__(self, key: Union[str, int]) -> TypeInfo: ...
 
     @overload
-    def __getitem__(self, key: Tuple[Type[T], int]) -> T:
-        ...
+    def __getitem__(self, key: Tuple[Type[T], int]) -> T: ...
 
     def __getitem__(self, key: RegistryKey) -> TypeInfo:
         """
@@ -264,12 +260,10 @@ class TypesRegistry:
             raise KeyError(f"couldn't find the type {key!r} in the types registry")
 
     @overload
-    def get(self, key: Union[str, int]) -> Optional[TypeInfo]:
-        ...
+    def get(self, key: Union[str, int]) -> Optional[TypeInfo]: ...
 
     @overload
-    def get(self, key: Tuple[Type[T], int]) -> Optional[T]:
-        ...
+    def get(self, key: Tuple[Type[T], int]) -> Optional[T]: ...
 
     def get(self, key: RegistryKey) -> Optional[TypeInfo]:
         """
index 1e0b3e5038fe0bf3945e53482b530d24af8f9944..58111ff23510e9aae8c49e7896b477f6d7e5236b 100644 (file)
@@ -57,8 +57,7 @@ class WaitFunc(Protocol):
 
     def __call__(
         self, gen: PQGen[RV], fileno: int, timeout: Optional[float] = None
-    ) -> RV:
-        ...
+    ) -> RV: ...
 
 
 # Adaptation types
@@ -109,8 +108,7 @@ class Dumper(Protocol):
     oid: int
     """The oid to pass to the server, if known; 0 otherwise (class attribute)."""
 
-    def __init__(self, cls: type, context: Optional[AdaptContext] = None):
-        ...
+    def __init__(self, cls: type, context: Optional[AdaptContext] = None): ...
 
     def dump(self, obj: Any) -> Buffer:
         """Convert the object `!obj` to PostgreSQL representation.
@@ -190,8 +188,7 @@ class Loader(Protocol):
     This is a class attribute.
     """
 
-    def __init__(self, oid: int, context: Optional[AdaptContext] = None):
-        ...
+    def __init__(self, oid: int, context: Optional[AdaptContext] = None): ...
 
     def load(self, data: Buffer) -> Any:
         """
@@ -206,28 +203,22 @@ class Transformer(Protocol):
     types: Optional[Tuple[int, ...]]
     formats: Optional[List[pq.Format]]
 
-    def __init__(self, context: Optional[AdaptContext] = None):
-        ...
+    def __init__(self, context: Optional[AdaptContext] = None): ...
 
     @classmethod
-    def from_context(cls, context: Optional[AdaptContext]) -> "Transformer":
-        ...
+    def from_context(cls, context: Optional[AdaptContext]) -> "Transformer": ...
 
     @property
-    def connection(self) -> Optional["BaseConnection[Any]"]:
-        ...
+    def connection(self) -> Optional["BaseConnection[Any]"]: ...
 
     @property
-    def encoding(self) -> str:
-        ...
+    def encoding(self) -> str: ...
 
     @property
-    def adapters(self) -> "AdaptersMap":
-        ...
+    def adapters(self) -> "AdaptersMap": ...
 
     @property
-    def pgresult(self) -> Optional["PGresult"]:
-        ...
+    def pgresult(self) -> Optional["PGresult"]: ...
 
     def set_pgresult(
         self,
@@ -235,34 +226,26 @@ class Transformer(Protocol):
         *,
         set_loaders: bool = True,
         format: Optional[pq.Format] = None
-    ) -> None:
-        ...
+    ) -> None: ...
 
-    def set_dumper_types(self, types: Sequence[int], format: pq.Format) -> None:
-        ...
+    def set_dumper_types(self, types: Sequence[int], format: pq.Format) -> None: ...
 
-    def set_loader_types(self, types: Sequence[int], format: pq.Format) -> None:
-        ...
+    def set_loader_types(self, types: Sequence[int], format: pq.Format) -> None: ...
 
     def dump_sequence(
         self, params: Sequence[Any], formats: Sequence[PyFormat]
-    ) -> Sequence[Optional[Buffer]]:
-        ...
+    ) -> Sequence[Optional[Buffer]]: ...
 
-    def as_literal(self, obj: Any) -> bytes:
-        ...
+    def as_literal(self, obj: Any) -> bytes: ...
 
-    def get_dumper(self, obj: Any, format: PyFormat) -> Dumper:
-        ...
+    def get_dumper(self, obj: Any, format: PyFormat) -> Dumper: ...
 
-    def load_rows(self, row0: int, row1: int, make_row: "RowMaker[Row]") -> List["Row"]:
-        ...
+    def load_rows(
+        self, row0: int, row1: int, make_row: "RowMaker[Row]"
+    ) -> List["Row"]: ...
 
-    def load_row(self, row: int, make_row: "RowMaker[Row]") -> Optional["Row"]:
-        ...
+    def load_row(self, row: int, make_row: "RowMaker[Row]") -> Optional["Row"]: ...
 
-    def load_sequence(self, record: Sequence[Optional[Buffer]]) -> Tuple[Any, ...]:
-        ...
+    def load_sequence(self, record: Sequence[Optional[Buffer]]) -> Tuple[Any, ...]: ...
 
-    def get_loader(self, oid: int, format: pq.Format) -> Loader:
-        ...
+    def get_loader(self, oid: int, format: pq.Format) -> Loader: ...
index 31a7104296d660bd8f3b834b19afa70de78056f0..7d6a191d8d68df009a9573a393bca0ad733259e1 100644 (file)
@@ -46,8 +46,7 @@ class Dumper(abc.Dumper, ABC):
         )
 
     @abstractmethod
-    def dump(self, obj: Any) -> Buffer:
-        ...
+    def dump(self, obj: Any) -> Buffer: ...
 
     def quote(self, obj: Any) -> Buffer:
         """
index 12873bbb3856ab3976b811cf52972d26b65bb393..c7f1519eeb80279d753cfe23402d709cb355e401 100644 (file)
@@ -165,14 +165,12 @@ class Connection(BaseConnection[Row]):
         self.pgconn.finish()
 
     @overload
-    def cursor(self, *, binary: bool = False) -> Cursor[Row]:
-        ...
+    def cursor(self, *, binary: bool = False) -> Cursor[Row]: ...
 
     @overload
     def cursor(
         self, *, binary: bool = False, row_factory: RowFactory[CursorRow]
-    ) -> Cursor[CursorRow]:
-        ...
+    ) -> Cursor[CursorRow]: ...
 
     @overload
     def cursor(
@@ -182,8 +180,7 @@ class Connection(BaseConnection[Row]):
         binary: bool = False,
         scrollable: Optional[bool] = None,
         withhold: bool = False,
-    ) -> ServerCursor[Row]:
-        ...
+    ) -> ServerCursor[Row]: ...
 
     @overload
     def cursor(
@@ -194,8 +191,7 @@ class Connection(BaseConnection[Row]):
         row_factory: RowFactory[CursorRow],
         scrollable: Optional[bool] = None,
         withhold: bool = False,
-    ) -> ServerCursor[CursorRow]:
-        ...
+    ) -> ServerCursor[CursorRow]: ...
 
     def cursor(
         self,
index 2f28fc95305077fe94f0cb0dfa715c159c5265f1..585888d9d583daf3172ccf64904996b00a53c6ff 100644 (file)
@@ -180,14 +180,12 @@ class AsyncConnection(BaseConnection[Row]):
         self.pgconn.finish()
 
     @overload
-    def cursor(self, *, binary: bool = False) -> AsyncCursor[Row]:
-        ...
+    def cursor(self, *, binary: bool = False) -> AsyncCursor[Row]: ...
 
     @overload
     def cursor(
         self, *, binary: bool = False, row_factory: AsyncRowFactory[CursorRow]
-    ) -> AsyncCursor[CursorRow]:
-        ...
+    ) -> AsyncCursor[CursorRow]: ...
 
     @overload
     def cursor(
@@ -197,8 +195,7 @@ class AsyncConnection(BaseConnection[Row]):
         binary: bool = False,
         scrollable: Optional[bool] = None,
         withhold: bool = False,
-    ) -> AsyncServerCursor[Row]:
-        ...
+    ) -> AsyncServerCursor[Row]: ...
 
     @overload
     def cursor(
@@ -209,8 +206,7 @@ class AsyncConnection(BaseConnection[Row]):
         row_factory: AsyncRowFactory[CursorRow],
         scrollable: Optional[bool] = None,
         withhold: bool = False,
-    ) -> AsyncServerCursor[CursorRow]:
-        ...
+    ) -> AsyncServerCursor[CursorRow]: ...
 
     def cursor(
         self,
index 10741c95fd2eec7c4dd8f408c8a7c236986b8d98..6b48929bce9348de6c5f557e3179f04eb8eacd44 100644 (file)
@@ -34,12 +34,12 @@ class Cursor(BaseCursor["Connection[Any]", Row]):
     __slots__ = ()
 
     @overload
-    def __init__(self, connection: Connection[Row]):
-        ...
+    def __init__(self, connection: Connection[Row]): ...
 
     @overload
-    def __init__(self, connection: Connection[Any], *, row_factory: RowFactory[Row]):
-        ...
+    def __init__(
+        self, connection: Connection[Any], *, row_factory: RowFactory[Row]
+    ): ...
 
     def __init__(
         self,
index 603560155c2800b6cda9206a38da7df9ab33d044..55dc9a5c2616f29e38aa7ea68e9b5bc09e61cad8 100644 (file)
@@ -31,14 +31,12 @@ class AsyncCursor(BaseCursor["AsyncConnection[Any]", Row]):
     __slots__ = ()
 
     @overload
-    def __init__(self, connection: AsyncConnection[Row]):
-        ...
+    def __init__(self, connection: AsyncConnection[Row]): ...
 
     @overload
     def __init__(
         self, connection: AsyncConnection[Any], *, row_factory: AsyncRowFactory[Row]
-    ):
-        ...
+    ): ...
 
     def __init__(
         self,
index e16bf11ba4a9f8ce209442cd64ea6febc0575aee..13a077211204a396eb144eb11ad3a00bd05b5f86 100644 (file)
@@ -22,112 +22,83 @@ class PGconn(Protocol):
     notify_handler: Optional[Callable[["PGnotify"], None]]
 
     @classmethod
-    def connect(cls, conninfo: bytes) -> "PGconn":
-        ...
+    def connect(cls, conninfo: bytes) -> "PGconn": ...
 
     @classmethod
-    def connect_start(cls, conninfo: bytes) -> "PGconn":
-        ...
+    def connect_start(cls, conninfo: bytes) -> "PGconn": ...
 
-    def connect_poll(self) -> int:
-        ...
+    def connect_poll(self) -> int: ...
 
-    def finish(self) -> None:
-        ...
+    def finish(self) -> None: ...
 
     @property
-    def info(self) -> List["ConninfoOption"]:
-        ...
+    def info(self) -> List["ConninfoOption"]: ...
 
-    def reset(self) -> None:
-        ...
+    def reset(self) -> None: ...
 
-    def reset_start(self) -> None:
-        ...
+    def reset_start(self) -> None: ...
 
-    def reset_poll(self) -> int:
-        ...
+    def reset_poll(self) -> int: ...
 
     @classmethod
-    def ping(self, conninfo: bytes) -> int:
-        ...
+    def ping(self, conninfo: bytes) -> int: ...
 
     @property
-    def db(self) -> bytes:
-        ...
+    def db(self) -> bytes: ...
 
     @property
-    def user(self) -> bytes:
-        ...
+    def user(self) -> bytes: ...
 
     @property
-    def password(self) -> bytes:
-        ...
+    def password(self) -> bytes: ...
 
     @property
-    def host(self) -> bytes:
-        ...
+    def host(self) -> bytes: ...
 
     @property
-    def hostaddr(self) -> bytes:
-        ...
+    def hostaddr(self) -> bytes: ...
 
     @property
-    def port(self) -> bytes:
-        ...
+    def port(self) -> bytes: ...
 
     @property
-    def tty(self) -> bytes:
-        ...
+    def tty(self) -> bytes: ...
 
     @property
-    def options(self) -> bytes:
-        ...
+    def options(self) -> bytes: ...
 
     @property
-    def status(self) -> int:
-        ...
+    def status(self) -> int: ...
 
     @property
-    def transaction_status(self) -> int:
-        ...
+    def transaction_status(self) -> int: ...
 
-    def parameter_status(self, name: bytes) -> Optional[bytes]:
-        ...
+    def parameter_status(self, name: bytes) -> Optional[bytes]: ...
 
     @property
-    def error_message(self) -> bytes:
-        ...
+    def error_message(self) -> bytes: ...
 
     @property
-    def server_version(self) -> int:
-        ...
+    def server_version(self) -> int: ...
 
     @property
-    def socket(self) -> int:
-        ...
+    def socket(self) -> int: ...
 
     @property
-    def backend_pid(self) -> int:
-        ...
+    def backend_pid(self) -> int: ...
 
     @property
-    def needs_password(self) -> bool:
-        ...
+    def needs_password(self) -> bool: ...
 
     @property
-    def used_password(self) -> bool:
-        ...
+    def used_password(self) -> bool: ...
 
     @property
-    def ssl_in_use(self) -> bool:
-        ...
+    def ssl_in_use(self) -> bool: ...
 
-    def exec_(self, command: bytes) -> "PGresult":
-        ...
+    def exec_(self, command: bytes) -> "PGresult": ...
 
-    def send_query(self, command: bytes) -> None:
-        ...
+    def send_query(self, command: bytes) -> None: ...
 
     def exec_params(
         self,
@@ -136,8 +107,7 @@ class PGconn(Protocol):
         param_types: Optional[Sequence[int]] = None,
         param_formats: Optional[Sequence[int]] = None,
         result_format: int = Format.TEXT,
-    ) -> "PGresult":
-        ...
+    ) -> "PGresult": ...
 
     def send_query_params(
         self,
@@ -146,16 +116,14 @@ class PGconn(Protocol):
         param_types: Optional[Sequence[int]] = None,
         param_formats: Optional[Sequence[int]] = None,
         result_format: int = Format.TEXT,
-    ) -> None:
-        ...
+    ) -> None: ...
 
     def send_prepare(
         self,
         name: bytes,
         command: bytes,
         param_types: Optional[Sequence[int]] = None,
-    ) -> None:
-        ...
+    ) -> None: ...
 
     def send_query_prepared(
         self,
@@ -163,16 +131,14 @@ class PGconn(Protocol):
         param_values: Optional[Sequence[Optional[Buffer]]],
         param_formats: Optional[Sequence[int]] = None,
         result_format: int = Format.TEXT,
-    ) -> None:
-        ...
+    ) -> None: ...
 
     def prepare(
         self,
         name: bytes,
         command: bytes,
         param_types: Optional[Sequence[int]] = None,
-    ) -> "PGresult":
-        ...
+    ) -> "PGresult": ...
 
     def exec_prepared(
         self,
@@ -180,216 +146,153 @@ class PGconn(Protocol):
         param_values: Optional[Sequence[Buffer]],
         param_formats: Optional[Sequence[int]] = None,
         result_format: int = 0,
-    ) -> "PGresult":
-        ...
+    ) -> "PGresult": ...
 
-    def describe_prepared(self, name: bytes) -> "PGresult":
-        ...
+    def describe_prepared(self, name: bytes) -> "PGresult": ...
 
-    def send_describe_prepared(self, name: bytes) -> None:
-        ...
+    def send_describe_prepared(self, name: bytes) -> None: ...
 
-    def describe_portal(self, name: bytes) -> "PGresult":
-        ...
+    def describe_portal(self, name: bytes) -> "PGresult": ...
 
-    def send_describe_portal(self, name: bytes) -> None:
-        ...
+    def send_describe_portal(self, name: bytes) -> None: ...
 
-    def close_prepared(self, name: bytes) -> "PGresult":
-        ...
+    def close_prepared(self, name: bytes) -> "PGresult": ...
 
-    def send_close_prepared(self, name: bytes) -> None:
-        ...
+    def send_close_prepared(self, name: bytes) -> None: ...
 
-    def close_portal(self, name: bytes) -> "PGresult":
-        ...
+    def close_portal(self, name: bytes) -> "PGresult": ...
 
-    def send_close_portal(self, name: bytes) -> None:
-        ...
+    def send_close_portal(self, name: bytes) -> None: ...
 
-    def get_result(self) -> Optional["PGresult"]:
-        ...
+    def get_result(self) -> Optional["PGresult"]: ...
 
-    def consume_input(self) -> None:
-        ...
+    def consume_input(self) -> None: ...
 
-    def is_busy(self) -> int:
-        ...
+    def is_busy(self) -> int: ...
 
     @property
-    def nonblocking(self) -> int:
-        ...
+    def nonblocking(self) -> int: ...
 
     @nonblocking.setter
-    def nonblocking(self, arg: int) -> None:
-        ...
+    def nonblocking(self, arg: int) -> None: ...
 
-    def flush(self) -> int:
-        ...
+    def flush(self) -> int: ...
 
-    def set_single_row_mode(self) -> None:
-        ...
+    def set_single_row_mode(self) -> None: ...
 
-    def get_cancel(self) -> "PGcancel":
-        ...
+    def get_cancel(self) -> "PGcancel": ...
 
-    def notifies(self) -> Optional["PGnotify"]:
-        ...
+    def notifies(self) -> Optional["PGnotify"]: ...
 
-    def put_copy_data(self, buffer: Buffer) -> int:
-        ...
+    def put_copy_data(self, buffer: Buffer) -> int: ...
 
-    def put_copy_end(self, error: Optional[bytes] = None) -> int:
-        ...
+    def put_copy_end(self, error: Optional[bytes] = None) -> int: ...
 
-    def get_copy_data(self, async_: int) -> Tuple[int, memoryview]:
-        ...
+    def get_copy_data(self, async_: int) -> Tuple[int, memoryview]: ...
 
-    def trace(self, fileno: int) -> None:
-        ...
+    def trace(self, fileno: int) -> None: ...
 
-    def set_trace_flags(self, flags: Trace) -> None:
-        ...
+    def set_trace_flags(self, flags: Trace) -> None: ...
 
-    def untrace(self) -> None:
-        ...
+    def untrace(self) -> None: ...
 
     def encrypt_password(
         self, passwd: bytes, user: bytes, algorithm: Optional[bytes] = None
-    ) -> bytes:
-        ...
+    ) -> bytes: ...
 
-    def make_empty_result(self, exec_status: int) -> "PGresult":
-        ...
+    def make_empty_result(self, exec_status: int) -> "PGresult": ...
 
     @property
-    def pipeline_status(self) -> int:
-        ...
+    def pipeline_status(self) -> int: ...
 
-    def enter_pipeline_mode(self) -> None:
-        ...
+    def enter_pipeline_mode(self) -> None: ...
 
-    def exit_pipeline_mode(self) -> None:
-        ...
+    def exit_pipeline_mode(self) -> None: ...
 
-    def pipeline_sync(self) -> None:
-        ...
+    def pipeline_sync(self) -> None: ...
 
-    def send_flush_request(self) -> None:
-        ...
+    def send_flush_request(self) -> None: ...
 
 
 class PGresult(Protocol):
-    def clear(self) -> None:
-        ...
+    def clear(self) -> None: ...
 
     @property
-    def status(self) -> int:
-        ...
+    def status(self) -> int: ...
 
     @property
-    def error_message(self) -> bytes:
-        ...
+    def error_message(self) -> bytes: ...
 
-    def error_field(self, fieldcode: int) -> Optional[bytes]:
-        ...
+    def error_field(self, fieldcode: int) -> Optional[bytes]: ...
 
     @property
-    def ntuples(self) -> int:
-        ...
+    def ntuples(self) -> int: ...
 
     @property
-    def nfields(self) -> int:
-        ...
+    def nfields(self) -> int: ...
 
-    def fname(self, column_number: int) -> Optional[bytes]:
-        ...
+    def fname(self, column_number: int) -> Optional[bytes]: ...
 
-    def ftable(self, column_number: int) -> int:
-        ...
+    def ftable(self, column_number: int) -> int: ...
 
-    def ftablecol(self, column_number: int) -> int:
-        ...
+    def ftablecol(self, column_number: int) -> int: ...
 
-    def fformat(self, column_number: int) -> int:
-        ...
+    def fformat(self, column_number: int) -> int: ...
 
-    def ftype(self, column_number: int) -> int:
-        ...
+    def ftype(self, column_number: int) -> int: ...
 
-    def fmod(self, column_number: int) -> int:
-        ...
+    def fmod(self, column_number: int) -> int: ...
 
-    def fsize(self, column_number: int) -> int:
-        ...
+    def fsize(self, column_number: int) -> int: ...
 
     @property
-    def binary_tuples(self) -> int:
-        ...
+    def binary_tuples(self) -> int: ...
 
-    def get_value(self, row_number: int, column_number: int) -> Optional[bytes]:
-        ...
+    def get_value(self, row_number: int, column_number: int) -> Optional[bytes]: ...
 
     @property
-    def nparams(self) -> int:
-        ...
+    def nparams(self) -> int: ...
 
-    def param_type(self, param_number: int) -> int:
-        ...
+    def param_type(self, param_number: int) -> int: ...
 
     @property
-    def command_status(self) -> Optional[bytes]:
-        ...
+    def command_status(self) -> Optional[bytes]: ...
 
     @property
-    def command_tuples(self) -> Optional[int]:
-        ...
+    def command_tuples(self) -> Optional[int]: ...
 
     @property
-    def oid_value(self) -> int:
-        ...
+    def oid_value(self) -> int: ...
 
-    def set_attributes(self, descriptions: List["PGresAttDesc"]) -> None:
-        ...
+    def set_attributes(self, descriptions: List["PGresAttDesc"]) -> None: ...
 
 
 class PGcancel(Protocol):
-    def free(self) -> None:
-        ...
+    def free(self) -> None: ...
 
-    def cancel(self) -> None:
-        ...
+    def cancel(self) -> None: ...
 
 
 class Conninfo(Protocol):
     @classmethod
-    def get_defaults(cls) -> List["ConninfoOption"]:
-        ...
+    def get_defaults(cls) -> List["ConninfoOption"]: ...
 
     @classmethod
-    def parse(cls, conninfo: bytes) -> List["ConninfoOption"]:
-        ...
+    def parse(cls, conninfo: bytes) -> List["ConninfoOption"]: ...
 
     @classmethod
-    def _options_from_array(cls, opts: Sequence[Any]) -> List["ConninfoOption"]:
-        ...
+    def _options_from_array(cls, opts: Sequence[Any]) -> List["ConninfoOption"]: ...
 
 
 class Escaping(Protocol):
-    def __init__(self, conn: Optional[PGconn] = None):
-        ...
+    def __init__(self, conn: Optional[PGconn] = None): ...
 
-    def escape_literal(self, data: Buffer) -> bytes:
-        ...
+    def escape_literal(self, data: Buffer) -> bytes: ...
 
-    def escape_identifier(self, data: Buffer) -> bytes:
-        ...
+    def escape_identifier(self, data: Buffer) -> bytes: ...
 
-    def escape_string(self, data: Buffer) -> bytes:
-        ...
+    def escape_string(self, data: Buffer) -> bytes: ...
 
-    def escape_bytea(self, data: Buffer) -> bytes:
-        ...
+    def escape_bytea(self, data: Buffer) -> bytes: ...
 
-    def unescape_bytea(self, data: Buffer) -> bytes:
-        ...
+    def unescape_bytea(self, data: Buffer) -> bytes: ...
index 8e4a5c00eb423224336318750a2ea4aedca21a60..1aa910a6403794dae0eaaabb1fd656ae1ee40861 100644 (file)
@@ -43,8 +43,7 @@ class RowMaker(Protocol[Row]):
     Typically, `!RowMaker` functions are returned by `RowFactory`.
     """
 
-    def __call__(self, __values: Sequence[Any]) -> Row:
-        ...
+    def __call__(self, __values: Sequence[Any]) -> Row: ...
 
 
 class RowFactory(Protocol[Row]):
@@ -61,8 +60,7 @@ class RowFactory(Protocol[Row]):
     use the values to create a dictionary for each record.
     """
 
-    def __call__(self, __cursor: "Cursor[Any]") -> RowMaker[Row]:
-        ...
+    def __call__(self, __cursor: "Cursor[Any]") -> RowMaker[Row]: ...
 
 
 class AsyncRowFactory(Protocol[Row]):
@@ -70,8 +68,7 @@ class AsyncRowFactory(Protocol[Row]):
     Like `RowFactory`, taking an async cursor as argument.
     """
 
-    def __call__(self, __cursor: "AsyncCursor[Any]") -> RowMaker[Row]:
-        ...
+    def __call__(self, __cursor: "AsyncCursor[Any]") -> RowMaker[Row]: ...
 
 
 class BaseRowFactory(Protocol[Row]):
@@ -79,8 +76,7 @@ class BaseRowFactory(Protocol[Row]):
     Like `RowFactory`, taking either type of cursor as argument.
     """
 
-    def __call__(self, __cursor: "BaseCursor[Any, Any]") -> RowMaker[Row]:
-        ...
+    def __call__(self, __cursor: "BaseCursor[Any, Any]") -> RowMaker[Row]: ...
 
 
 TupleRow: TypeAlias = Tuple[Any, ...]
index 1c6e77aa10f96b33647029e9be2083edbf29c7a4..2f5f44739d96ac1be8d3720a239351885a4076f8 100644 (file)
@@ -222,8 +222,7 @@ class ServerCursor(ServerCursorMixin["Connection[Any]", Row], Cursor[Row]):
         *,
         scrollable: Optional[bool] = None,
         withhold: bool = False,
-    ):
-        ...
+    ): ...
 
     @overload
     def __init__(
@@ -234,8 +233,7 @@ class ServerCursor(ServerCursorMixin["Connection[Any]", Row], Cursor[Row]):
         row_factory: RowFactory[Row],
         scrollable: Optional[bool] = None,
         withhold: bool = False,
-    ):
-        ...
+    ): ...
 
     def __init__(
         self,
@@ -363,8 +361,7 @@ class AsyncServerCursor(
         *,
         scrollable: Optional[bool] = None,
         withhold: bool = False,
-    ):
-        ...
+    ): ...
 
     @overload
     def __init__(
@@ -375,8 +372,7 @@ class AsyncServerCursor(
         row_factory: AsyncRowFactory[Row],
         scrollable: Optional[bool] = None,
         withhold: bool = False,
-    ):
-        ...
+    ): ...
 
     def __init__(
         self,
index d8a12b5acd6fc24f8d91b7a31063ea33f67f0c82..6e20dd3cef764aab3fecc893c517ac0fabc7e230 100644 (file)
@@ -1,6 +1,7 @@
 """
 Adapters for the enum type.
 """
+
 from enum import Enum
 from typing import Any, Dict, Generic, Optional, Mapping, Sequence
 from typing import Tuple, Type, Union, cast, TYPE_CHECKING
index d672f6be8d4fb7d1697ddc1402c0419b9d951475..51f61d1a79445a9a7f0d33a036238afe63c15ec5 100644 (file)
@@ -91,12 +91,10 @@ class Multirange(MutableSequence[Range[T]]):
         return f"{{{', '.join(map(str, self._ranges))}}}"
 
     @overload
-    def __getitem__(self, index: int) -> Range[T]:
-        ...
+    def __getitem__(self, index: int) -> Range[T]: ...
 
     @overload
-    def __getitem__(self, index: slice) -> "Multirange[T]":
-        ...
+    def __getitem__(self, index: slice) -> "Multirange[T]": ...
 
     def __getitem__(self, index: Union[int, slice]) -> "Union[Range[T],Multirange[T]]":
         if isinstance(index, int):
@@ -108,12 +106,10 @@ class Multirange(MutableSequence[Range[T]]):
         return len(self._ranges)
 
     @overload
-    def __setitem__(self, index: int, value: Range[T]) -> None:
-        ...
+    def __setitem__(self, index: int, value: Range[T]) -> None: ...
 
     @overload
-    def __setitem__(self, index: slice, value: Iterable[Range[T]]) -> None:
-        ...
+    def __setitem__(self, index: slice, value: Iterable[Range[T]]) -> None: ...
 
     def __setitem__(
         self,
index f394bdac7dc13a19b5ccdee8787b508fde74c6cb..1817740fd6bd504af9b70661c5f7430d899652a5 100644 (file)
@@ -379,8 +379,7 @@ class _MixedNumericDumper(Dumper, ABC):
                 _MixedNumericDumper.int_classes = int
 
     @abstractmethod
-    def dump(self, obj: Union[Decimal, int, "numpy.integer[Any]"]) -> Buffer:
-        ...
+    def dump(self, obj: Union[Decimal, int, "numpy.integer[Any]"]) -> Buffer: ...
 
 
 class NumericDumper(_MixedNumericDumper):
index f734c40ec99a0f178ac44795df1bf0e318b36882..fbb544677844d2d6b7634ebd9fd033af2116596d 100644 (file)
@@ -74,7 +74,7 @@ test =
     pytest-randomly >= 3.5
 dev =
     ast-comments >= 1.1.2
-    black >= 23.1.0
+    black >= 24.1.0
     codespell >= 2.2
     dnspython >= 2.1
     flake8 >= 4.0
index 2ae629c2d4d3d195def647f96d7bce3cdbab83b6..40a061b1e954b4de28869853a004d04fcd5459b7 100644 (file)
@@ -1,3 +1,3 @@
 [flake8]
 max-line-length = 88
-ignore = W503, E203
+ignore = W503, E203, E704
index 2ae629c2d4d3d195def647f96d7bce3cdbab83b6..40a061b1e954b4de28869853a004d04fcd5459b7 100644 (file)
@@ -1,3 +1,3 @@
 [flake8]
 max-line-length = 88
-ignore = W503, E203
+ignore = W503, E203, E704
index a5408d08e5cf2a663f493bf1c32b353e786d0ce3..0be5d614820e927a59de563db0d86716fc2cc279 100644 (file)
@@ -26,6 +26,7 @@ logger = logging.getLogger("psycopg.pool")
 
 
 class NullConnectionPool(_BaseNullConnectionPool, ConnectionPool[CT]):
+
     def __init__(
         self,
         conninfo: str = "",
@@ -47,6 +48,7 @@ class NullConnectionPool(_BaseNullConnectionPool, ConnectionPool[CT]):
         reconnect_failed: Optional[ConnectFailedCB] = None,
         num_workers: int = 3,
     ):  # Note: min_size default value changed to 0.
+
         super().__init__(
             conninfo,
             open=open,
index 7ab999b1eaa6c1b764c1ee7fa0f0a9179c3fde5e..a021e14a0ba94d089877a3417fc6e34b932461ed 100644 (file)
@@ -913,8 +913,7 @@ class MaintenanceTask(ABC):
         pool.run_task(self)
 
     @abstractmethod
-    def _run(self, pool: ConnectionPool[Any]) -> None:
-        ...
+    def _run(self, pool: ConnectionPool[Any]) -> None: ...
 
 
 class StopWorker(MaintenanceTask):
@@ -925,6 +924,7 @@ class StopWorker(MaintenanceTask):
 
 
 class AddConnection(MaintenanceTask):
+
     def __init__(
         self,
         pool: ConnectionPool[Any],
index fd8f63782bb6206a1c2171943a8703623bfa9cc8..d0770dd778681675e697e0ebff41a1b350764453 100644 (file)
@@ -962,8 +962,7 @@ class MaintenanceTask(ABC):
         pool.run_task(self)
 
     @abstractmethod
-    async def _run(self, pool: AsyncConnectionPool[Any]) -> None:
-        ...
+    async def _run(self, pool: AsyncConnectionPool[Any]) -> None: ...
 
 
 class StopWorker(MaintenanceTask):
index 58cadc36df1f499fcfadef0ff36dcf38f5750071..40954b99605dc42388525e83f329cd50fa3ca59f 100644 (file)
@@ -27,6 +27,7 @@ logger = logging.getLogger(__name__)
 
 
 class Scheduler:
+
     def __init__(self) -> None:
         self._queue: List[Task] = []
         self._lock = Lock()
index 98b03426e0c30c36e5c08c2b855d4e33e002f5e6..cc1273f78eb90df9b1df3f6ba2829153e4c359fa 100644 (file)
@@ -73,9 +73,9 @@ def pytest_sessionstart(session):
 
 asyncio_options: Dict[str, Any] = {}
 if sys.platform == "win32":
-    asyncio_options[
-        "loop_factory"
-    ] = asyncio.WindowsSelectorEventLoopPolicy().new_event_loop
+    asyncio_options["loop_factory"] = (
+        asyncio.WindowsSelectorEventLoopPolicy().new_event_loop
+    )
 
 
 @pytest.fixture(
index a676a993f593fb40fe94032858a1799659834caa..5c106971cdb2212607d92b87c162ac1ec43d7ad4 100644 (file)
@@ -17,7 +17,7 @@ pytest-cov == 3.0.0
 pytest-randomly == 3.5.0
 
 # From the 'dev' extra
-black == 23.1.0
+black == 24.1.0
 dnspython == 2.1.0
 flake8 == 4.0.0
 types-setuptools == 57.4.0
index e2aeb7cc39164dd5f239adca2766d902816d07fb..ff8fafd885d79f5350a9eca136c58f354b0b6c62 100644 (file)
@@ -42,10 +42,11 @@ def test_bad_size(dsn, min_size, max_size):
 
 
 class MyRow(Dict[str, Any]):
-    ...
+    pass
 
 
 def test_generic_connection_type(dsn):
+
     def configure(conn: psycopg.Connection[Any]) -> None:
         set_autocommit(conn, True)
 
@@ -78,10 +79,12 @@ def test_generic_connection_type(dsn):
 
 
 def test_non_generic_connection_type(dsn):
+
     def configure(conn: psycopg.Connection[Any]) -> None:
         set_autocommit(conn, True)
 
     class MyConnection(psycopg.Connection[MyRow]):
+
         def __init__(self, *args: Any, **kwargs: Any):
             kwargs["row_factory"] = class_row(MyRow)
             super().__init__(*args, **kwargs)
@@ -638,6 +641,7 @@ def test_uniform_use(dsn):
 @pytest.mark.slow
 @pytest.mark.timing
 def test_resize(dsn):
+
     def sampler():
         sleep(0.05)  # ensure sampling happens after shrink check
         while True:
index 0bc84f8bc6428cc41f13590b121ef8c44ae7e7e2..6a699e40b489f815c31464f12f13921cbb438dc5 100644 (file)
@@ -42,7 +42,7 @@ async def test_bad_size(dsn, min_size, max_size):
 
 
 class MyRow(Dict[str, Any]):
-    ...
+    pass
 
 
 async def test_generic_connection_type(dsn):
index ddf78a693638723d0a229380e92de76af7fd60d7..a8815da200508d40d23697b46912d750f8df27eb 100644 (file)
@@ -35,6 +35,7 @@ def test_defaults(pool_cls, dsn):
 
 
 def test_connection_class(pool_cls, dsn):
+
     class MyConn(psycopg.Connection[Any]):
         pass
 
@@ -158,6 +159,7 @@ def test_configure_broken(pool_cls, dsn, caplog):
 @pytest.mark.timing
 @pytest.mark.crdb_skip("backend pid")
 def test_queue(pool_cls, dsn):
+
     def worker(n):
         t0 = time()
         with p.connection() as conn:
@@ -182,6 +184,7 @@ def test_queue(pool_cls, dsn):
 
 @pytest.mark.slow
 def test_queue_size(pool_cls, dsn):
+
     def worker(t, ev=None):
         try:
             with p.connection():
@@ -217,6 +220,7 @@ def test_queue_size(pool_cls, dsn):
 @pytest.mark.timing
 @pytest.mark.crdb_skip("backend pid")
 def test_queue_timeout(pool_cls, dsn):
+
     def worker(n):
         t0 = time()
         try:
@@ -246,6 +250,7 @@ def test_queue_timeout(pool_cls, dsn):
 @pytest.mark.slow
 @pytest.mark.timing
 def test_dead_client(pool_cls, dsn):
+
     def worker(i, timeout):
         try:
             with p.connection(timeout=timeout) as conn:
@@ -273,6 +278,7 @@ def test_dead_client(pool_cls, dsn):
 @pytest.mark.timing
 @pytest.mark.crdb_skip("backend pid")
 def test_queue_timeout_override(pool_cls, dsn):
+
     def worker(n):
         t0 = time()
         timeout = 0.25 if n == 3 else None
@@ -382,6 +388,7 @@ def test_close_connection_on_pool_close(pool_cls, dsn):
 
 
 def test_closed_queue(pool_cls, dsn):
+
     def w1():
         with p.connection() as conn:
             e1.set()  # Tell w0 that w1 got a connection
@@ -493,6 +500,7 @@ def test_jitter(pool_cls):
 @pytest.mark.slow
 @pytest.mark.timing
 def test_stats_measures(pool_cls, dsn):
+
     def worker(n):
         with p.connection() as conn:
             conn.execute("select pg_sleep(0.2)")
@@ -532,6 +540,7 @@ def test_stats_measures(pool_cls, dsn):
 @pytest.mark.slow
 @pytest.mark.timing
 def test_stats_usage(pool_cls, dsn):
+
     def worker(n):
         try:
             with p.connection(timeout=0.3) as conn:
@@ -613,6 +622,7 @@ def test_check_init(pool_cls, dsn):
 
 @pytest.mark.slow
 def test_check_timeout(pool_cls, dsn):
+
     def check(conn):
         raise Exception()
 
index fbe698df68658a6cf77e65e2fe0cce8dae3f12ff..c54014572cda00ba6f33c4ef88fc05e9120855e4 100644 (file)
@@ -40,10 +40,11 @@ def test_bad_size(dsn, min_size, max_size):
 
 
 class MyRow(Dict[str, Any]):
-    ...
+    pass
 
 
 def test_generic_connection_type(dsn):
+
     def configure(conn: psycopg.Connection[Any]) -> None:
         set_autocommit(conn, True)
 
@@ -76,10 +77,12 @@ def test_generic_connection_type(dsn):
 
 
 def test_non_generic_connection_type(dsn):
+
     def configure(conn: psycopg.Connection[Any]) -> None:
         set_autocommit(conn, True)
 
     class MyConnection(psycopg.Connection[MyRow]):
+
         def __init__(self, *args: Any, **kwargs: Any):
             kwargs["row_factory"] = class_row(MyRow)
             super().__init__(*args, **kwargs)
index 09c0e21509050d9bf71c3a6b7022d1bc75e8960f..b610045ccb8bfd1357c35b725936a56562f14ceb 100644 (file)
@@ -40,7 +40,7 @@ async def test_bad_size(dsn, min_size, max_size):
 
 
 class MyRow(Dict[str, Any]):
-    ...
+    pass
 
 
 async def test_generic_connection_type(dsn):
index a49f11685b069c33d9f7958e3513308c312d75e0..0fb65ad90ea633c4b969d2a16478916e1dee29ac 100644 (file)
@@ -1,6 +1,7 @@
 """
 A quick and rough performance comparison of text vs. binary Decimal adaptation
 """
+
 from random import randrange
 from decimal import Decimal
 import psycopg
index ec952293a18d79209ab7cce649935f9d596201fc..74cc04d1f8635f6b3a1ab6b9eccafe00757edb7f 100644 (file)
@@ -7,6 +7,7 @@ We do not fetch results explicitly (using cursor.fetch*()), this is
 handled by execute() calls when pgconn socket is read-ready, which
 happens when the output buffer is full.
 """
+
 import argparse
 import asyncio
 import logging
index 8456dba45aa8237af43990f564b212b0734f5b6e..1d9217d2b9893d3a4f3fb04a3753afa5befd41f5 100644 (file)
@@ -34,6 +34,7 @@ def test_connect_bad(conn_cls):
 
 
 def test_connect_str_subclass(conn_cls, dsn):
+
     class MyString(str):
         pass
 
@@ -467,6 +468,7 @@ def test_connect_args(
     ],
 )
 def test_connect_badargs(conn_cls, monkeypatch, pgconn, args, kwargs, exctype):
+
     def fake_connect(conninfo):
         return pgconn
         yield
@@ -642,6 +644,7 @@ def test_cursor_factory(conn):
 
 
 def test_cursor_factory_connect(conn_cls, dsn):
+
     class MyCursor(psycopg.Cursor[psycopg.rows.Row]):
         pass
 
index 2e950aff4f2016fd32e138e4aaa1595bc8576346..a98f0f80b2de6cf08c1f2a1211740d977241d2c1 100644 (file)
@@ -637,7 +637,7 @@ async def test_cursor_factory(aconn):
     async with aconn.cursor() as cur:
         assert isinstance(cur, MyCursor)
 
-    async with (await aconn.execute("select 1")) as cur:
+    async with await aconn.execute("select 1") as cur:
         assert isinstance(cur, MyCursor)
 
 
index fda854e60bea1adeee1eea27bb3e7b6f0888f5ef..55f9e3b77499bf44555ce30caae87ae41ec1c3d7 100644 (file)
@@ -306,6 +306,7 @@ def test_subclass_adapter(conn, format):
         BaseDumper = StrBinaryDumper  # type: ignore
 
     class MyStrDumper(BaseDumper):
+
         def dump(self, obj):
             return super().dump(obj) * 2
 
@@ -641,6 +642,7 @@ def test_worker_life(conn, format, buffer):
 
 
 def test_worker_error_propagated(conn, monkeypatch):
+
     def copy_to_broken(pgconn, buffer):
         raise ZeroDivisionError
         yield
@@ -803,6 +805,7 @@ def test_copy_table_across(conn_cls, dsn, faker, mode):
 
 
 class DataGenerator:
+
     def __init__(self, conn, nrecs, srec, offset=0, block_size=8192):
         self.conn = conn
         self.nrecs = nrecs
index 159e67cb1e92de995418ffe24bb6622c4af0c42f..d5b0d15136e78229a0e3beca406ceb8a1d15cb23 100644 (file)
@@ -576,6 +576,7 @@ def test_row_factory_none(conn):
 
 
 def test_bad_row_factory(conn):
+
     def broken_factory(cur):
         1 / 0
 
@@ -584,6 +585,7 @@ def test_bad_row_factory(conn):
         cur.execute("select 1")
 
     def broken_maker(cur):
+
         def make_row(seq):
             1 / 0
 
index 41023ccc87f92af303bba9048172827d917bc5ef..864f8a7868e77b3616cd13460a19138b87d1ec69 100644 (file)
@@ -22,6 +22,7 @@ def test_tpc_disabled(conn, pipeline):
 
 
 class TestTPC:
+
     def test_tpc_commit(self, conn, tpc):
         xid = conn.xid(1, "gtrid", "bqual")
         assert conn.info.transaction_status == TransactionStatus.IDLE
index fffc32f31ba0ac8ac25d36527f3502294156d486..8e5db7668c78efc28e5389ada3e8fe05201237f2 100644 (file)
@@ -402,9 +402,11 @@ def test_dump_numeric_binary(conn, expr):
 @pytest.mark.parametrize(
     "fmt_in",
     [
-        f
-        if f != PyFormat.BINARY
-        else pytest.param(f, marks=pytest.mark.crdb_skip("binary decimal"))
+        (
+            f
+            if f != PyFormat.BINARY
+            else pytest.param(f, marks=pytest.mark.crdb_skip("binary decimal"))
+        )
         for f in PyFormat
     ],
 )