From: Daniele Varrazzo Date: Fri, 26 Jan 2024 23:44:49 +0000 (+0000) Subject: chore: bump black to 24.1.0 X-Git-Tag: 3.2.0~93 X-Git-Url: http://git.ipfire.org/?a=commitdiff_plain;h=95b884c348e881e5f65616ae8356fbfc34caf451;p=thirdparty%2Fpsycopg.git chore: bump black to 24.1.0 --- diff --git a/.flake8 b/.flake8 index ec4053fb2..d2473a1ae 100644 --- a/.flake8 +++ b/.flake8 @@ -1,6 +1,6 @@ [flake8] max-line-length = 88 -ignore = W503, E203 +ignore = W503, E203, E704 extend-exclude = .venv build per-file-ignores = # Autogenerated section diff --git a/psycopg/.flake8 b/psycopg/.flake8 index 67fb0245c..33b08d768 100644 --- a/psycopg/.flake8 +++ b/psycopg/.flake8 @@ -1,6 +1,6 @@ [flake8] max-line-length = 88 -ignore = W503, E203 +ignore = W503, E203, E704 per-file-ignores = # Autogenerated section psycopg/errors.py: E125, E128, E302 diff --git a/psycopg/psycopg/_copy_base.py b/psycopg/psycopg/_copy_base.py index 140744ff1..9194b266b 100644 --- a/psycopg/psycopg/_copy_base.py +++ b/psycopg/psycopg/_copy_base.py @@ -210,20 +210,16 @@ class Formatter(ABC): self._row_mode = False # true if the user is using write_row() @abstractmethod - def parse_row(self, data: Buffer) -> Optional[Tuple[Any, ...]]: - ... + def parse_row(self, data: Buffer) -> Optional[Tuple[Any, ...]]: ... @abstractmethod - def write(self, buffer: Union[Buffer, str]) -> Buffer: - ... + def write(self, buffer: Union[Buffer, str]) -> Buffer: ... @abstractmethod - def write_row(self, row: Sequence[Any]) -> Buffer: - ... + def write_row(self, row: Sequence[Any]) -> Buffer: ... @abstractmethod - def end(self) -> Buffer: - ... + def end(self) -> Buffer: ... class TextFormatter(Formatter): diff --git a/psycopg/psycopg/_struct.py b/psycopg/psycopg/_struct.py index 5f5c3f2c2..7232a20bd 100644 --- a/psycopg/psycopg/_struct.py +++ b/psycopg/psycopg/_struct.py @@ -18,8 +18,7 @@ UnpackFloat: TypeAlias = Callable[[Buffer], Tuple[float]] class UnpackLen(Protocol): - def __call__(self, data: Buffer, start: Optional[int]) -> Tuple[int]: - ... + def __call__(self, data: Buffer, start: Optional[int]) -> Tuple[int]: ... pack_int2 = cast(PackInt, struct.Struct("!h").pack) diff --git a/psycopg/psycopg/_typeinfo.py b/psycopg/psycopg/_typeinfo.py index 68eea41e0..fc170492a 100644 --- a/psycopg/psycopg/_typeinfo.py +++ b/psycopg/psycopg/_typeinfo.py @@ -58,15 +58,13 @@ class TypeInfo: @classmethod def fetch( cls: Type[T], conn: "Connection[Any]", name: Union[str, sql.Identifier] - ) -> Optional[T]: - ... + ) -> Optional[T]: ... @overload @classmethod async def fetch( cls: Type[T], conn: "AsyncConnection[Any]", name: Union[str, sql.Identifier] - ) -> Optional[T]: - ... + ) -> Optional[T]: ... @classmethod def fetch( @@ -238,12 +236,10 @@ class TypesRegistry: yield t @overload - def __getitem__(self, key: Union[str, int]) -> TypeInfo: - ... + def __getitem__(self, key: Union[str, int]) -> TypeInfo: ... @overload - def __getitem__(self, key: Tuple[Type[T], int]) -> T: - ... + def __getitem__(self, key: Tuple[Type[T], int]) -> T: ... def __getitem__(self, key: RegistryKey) -> TypeInfo: """ @@ -264,12 +260,10 @@ class TypesRegistry: raise KeyError(f"couldn't find the type {key!r} in the types registry") @overload - def get(self, key: Union[str, int]) -> Optional[TypeInfo]: - ... + def get(self, key: Union[str, int]) -> Optional[TypeInfo]: ... @overload - def get(self, key: Tuple[Type[T], int]) -> Optional[T]: - ... + def get(self, key: Tuple[Type[T], int]) -> Optional[T]: ... def get(self, key: RegistryKey) -> Optional[TypeInfo]: """ diff --git a/psycopg/psycopg/abc.py b/psycopg/psycopg/abc.py index 1e0b3e503..58111ff23 100644 --- a/psycopg/psycopg/abc.py +++ b/psycopg/psycopg/abc.py @@ -57,8 +57,7 @@ class WaitFunc(Protocol): def __call__( self, gen: PQGen[RV], fileno: int, timeout: Optional[float] = None - ) -> RV: - ... + ) -> RV: ... # Adaptation types @@ -109,8 +108,7 @@ class Dumper(Protocol): oid: int """The oid to pass to the server, if known; 0 otherwise (class attribute).""" - def __init__(self, cls: type, context: Optional[AdaptContext] = None): - ... + def __init__(self, cls: type, context: Optional[AdaptContext] = None): ... def dump(self, obj: Any) -> Buffer: """Convert the object `!obj` to PostgreSQL representation. @@ -190,8 +188,7 @@ class Loader(Protocol): This is a class attribute. """ - def __init__(self, oid: int, context: Optional[AdaptContext] = None): - ... + def __init__(self, oid: int, context: Optional[AdaptContext] = None): ... def load(self, data: Buffer) -> Any: """ @@ -206,28 +203,22 @@ class Transformer(Protocol): types: Optional[Tuple[int, ...]] formats: Optional[List[pq.Format]] - def __init__(self, context: Optional[AdaptContext] = None): - ... + def __init__(self, context: Optional[AdaptContext] = None): ... @classmethod - def from_context(cls, context: Optional[AdaptContext]) -> "Transformer": - ... + def from_context(cls, context: Optional[AdaptContext]) -> "Transformer": ... @property - def connection(self) -> Optional["BaseConnection[Any]"]: - ... + def connection(self) -> Optional["BaseConnection[Any]"]: ... @property - def encoding(self) -> str: - ... + def encoding(self) -> str: ... @property - def adapters(self) -> "AdaptersMap": - ... + def adapters(self) -> "AdaptersMap": ... @property - def pgresult(self) -> Optional["PGresult"]: - ... + def pgresult(self) -> Optional["PGresult"]: ... def set_pgresult( self, @@ -235,34 +226,26 @@ class Transformer(Protocol): *, set_loaders: bool = True, format: Optional[pq.Format] = None - ) -> None: - ... + ) -> None: ... - def set_dumper_types(self, types: Sequence[int], format: pq.Format) -> None: - ... + def set_dumper_types(self, types: Sequence[int], format: pq.Format) -> None: ... - def set_loader_types(self, types: Sequence[int], format: pq.Format) -> None: - ... + def set_loader_types(self, types: Sequence[int], format: pq.Format) -> None: ... def dump_sequence( self, params: Sequence[Any], formats: Sequence[PyFormat] - ) -> Sequence[Optional[Buffer]]: - ... + ) -> Sequence[Optional[Buffer]]: ... - def as_literal(self, obj: Any) -> bytes: - ... + def as_literal(self, obj: Any) -> bytes: ... - def get_dumper(self, obj: Any, format: PyFormat) -> Dumper: - ... + def get_dumper(self, obj: Any, format: PyFormat) -> Dumper: ... - def load_rows(self, row0: int, row1: int, make_row: "RowMaker[Row]") -> List["Row"]: - ... + def load_rows( + self, row0: int, row1: int, make_row: "RowMaker[Row]" + ) -> List["Row"]: ... - def load_row(self, row: int, make_row: "RowMaker[Row]") -> Optional["Row"]: - ... + def load_row(self, row: int, make_row: "RowMaker[Row]") -> Optional["Row"]: ... - def load_sequence(self, record: Sequence[Optional[Buffer]]) -> Tuple[Any, ...]: - ... + def load_sequence(self, record: Sequence[Optional[Buffer]]) -> Tuple[Any, ...]: ... - def get_loader(self, oid: int, format: pq.Format) -> Loader: - ... + def get_loader(self, oid: int, format: pq.Format) -> Loader: ... diff --git a/psycopg/psycopg/adapt.py b/psycopg/psycopg/adapt.py index 31a710429..7d6a191d8 100644 --- a/psycopg/psycopg/adapt.py +++ b/psycopg/psycopg/adapt.py @@ -46,8 +46,7 @@ class Dumper(abc.Dumper, ABC): ) @abstractmethod - def dump(self, obj: Any) -> Buffer: - ... + def dump(self, obj: Any) -> Buffer: ... def quote(self, obj: Any) -> Buffer: """ diff --git a/psycopg/psycopg/connection.py b/psycopg/psycopg/connection.py index 12873bbb3..c7f1519ee 100644 --- a/psycopg/psycopg/connection.py +++ b/psycopg/psycopg/connection.py @@ -165,14 +165,12 @@ class Connection(BaseConnection[Row]): self.pgconn.finish() @overload - def cursor(self, *, binary: bool = False) -> Cursor[Row]: - ... + def cursor(self, *, binary: bool = False) -> Cursor[Row]: ... @overload def cursor( self, *, binary: bool = False, row_factory: RowFactory[CursorRow] - ) -> Cursor[CursorRow]: - ... + ) -> Cursor[CursorRow]: ... @overload def cursor( @@ -182,8 +180,7 @@ class Connection(BaseConnection[Row]): binary: bool = False, scrollable: Optional[bool] = None, withhold: bool = False, - ) -> ServerCursor[Row]: - ... + ) -> ServerCursor[Row]: ... @overload def cursor( @@ -194,8 +191,7 @@ class Connection(BaseConnection[Row]): row_factory: RowFactory[CursorRow], scrollable: Optional[bool] = None, withhold: bool = False, - ) -> ServerCursor[CursorRow]: - ... + ) -> ServerCursor[CursorRow]: ... def cursor( self, diff --git a/psycopg/psycopg/connection_async.py b/psycopg/psycopg/connection_async.py index 2f28fc953..585888d9d 100644 --- a/psycopg/psycopg/connection_async.py +++ b/psycopg/psycopg/connection_async.py @@ -180,14 +180,12 @@ class AsyncConnection(BaseConnection[Row]): self.pgconn.finish() @overload - def cursor(self, *, binary: bool = False) -> AsyncCursor[Row]: - ... + def cursor(self, *, binary: bool = False) -> AsyncCursor[Row]: ... @overload def cursor( self, *, binary: bool = False, row_factory: AsyncRowFactory[CursorRow] - ) -> AsyncCursor[CursorRow]: - ... + ) -> AsyncCursor[CursorRow]: ... @overload def cursor( @@ -197,8 +195,7 @@ class AsyncConnection(BaseConnection[Row]): binary: bool = False, scrollable: Optional[bool] = None, withhold: bool = False, - ) -> AsyncServerCursor[Row]: - ... + ) -> AsyncServerCursor[Row]: ... @overload def cursor( @@ -209,8 +206,7 @@ class AsyncConnection(BaseConnection[Row]): row_factory: AsyncRowFactory[CursorRow], scrollable: Optional[bool] = None, withhold: bool = False, - ) -> AsyncServerCursor[CursorRow]: - ... + ) -> AsyncServerCursor[CursorRow]: ... def cursor( self, diff --git a/psycopg/psycopg/cursor.py b/psycopg/psycopg/cursor.py index 10741c95f..6b48929bc 100644 --- a/psycopg/psycopg/cursor.py +++ b/psycopg/psycopg/cursor.py @@ -34,12 +34,12 @@ class Cursor(BaseCursor["Connection[Any]", Row]): __slots__ = () @overload - def __init__(self, connection: Connection[Row]): - ... + def __init__(self, connection: Connection[Row]): ... @overload - def __init__(self, connection: Connection[Any], *, row_factory: RowFactory[Row]): - ... + def __init__( + self, connection: Connection[Any], *, row_factory: RowFactory[Row] + ): ... def __init__( self, diff --git a/psycopg/psycopg/cursor_async.py b/psycopg/psycopg/cursor_async.py index 603560155..55dc9a5c2 100644 --- a/psycopg/psycopg/cursor_async.py +++ b/psycopg/psycopg/cursor_async.py @@ -31,14 +31,12 @@ class AsyncCursor(BaseCursor["AsyncConnection[Any]", Row]): __slots__ = () @overload - def __init__(self, connection: AsyncConnection[Row]): - ... + def __init__(self, connection: AsyncConnection[Row]): ... @overload def __init__( self, connection: AsyncConnection[Any], *, row_factory: AsyncRowFactory[Row] - ): - ... + ): ... def __init__( self, diff --git a/psycopg/psycopg/pq/abc.py b/psycopg/psycopg/pq/abc.py index e16bf11ba..13a077211 100644 --- a/psycopg/psycopg/pq/abc.py +++ b/psycopg/psycopg/pq/abc.py @@ -22,112 +22,83 @@ class PGconn(Protocol): notify_handler: Optional[Callable[["PGnotify"], None]] @classmethod - def connect(cls, conninfo: bytes) -> "PGconn": - ... + def connect(cls, conninfo: bytes) -> "PGconn": ... @classmethod - def connect_start(cls, conninfo: bytes) -> "PGconn": - ... + def connect_start(cls, conninfo: bytes) -> "PGconn": ... - def connect_poll(self) -> int: - ... + def connect_poll(self) -> int: ... - def finish(self) -> None: - ... + def finish(self) -> None: ... @property - def info(self) -> List["ConninfoOption"]: - ... + def info(self) -> List["ConninfoOption"]: ... - def reset(self) -> None: - ... + def reset(self) -> None: ... - def reset_start(self) -> None: - ... + def reset_start(self) -> None: ... - def reset_poll(self) -> int: - ... + def reset_poll(self) -> int: ... @classmethod - def ping(self, conninfo: bytes) -> int: - ... + def ping(self, conninfo: bytes) -> int: ... @property - def db(self) -> bytes: - ... + def db(self) -> bytes: ... @property - def user(self) -> bytes: - ... + def user(self) -> bytes: ... @property - def password(self) -> bytes: - ... + def password(self) -> bytes: ... @property - def host(self) -> bytes: - ... + def host(self) -> bytes: ... @property - def hostaddr(self) -> bytes: - ... + def hostaddr(self) -> bytes: ... @property - def port(self) -> bytes: - ... + def port(self) -> bytes: ... @property - def tty(self) -> bytes: - ... + def tty(self) -> bytes: ... @property - def options(self) -> bytes: - ... + def options(self) -> bytes: ... @property - def status(self) -> int: - ... + def status(self) -> int: ... @property - def transaction_status(self) -> int: - ... + def transaction_status(self) -> int: ... - def parameter_status(self, name: bytes) -> Optional[bytes]: - ... + def parameter_status(self, name: bytes) -> Optional[bytes]: ... @property - def error_message(self) -> bytes: - ... + def error_message(self) -> bytes: ... @property - def server_version(self) -> int: - ... + def server_version(self) -> int: ... @property - def socket(self) -> int: - ... + def socket(self) -> int: ... @property - def backend_pid(self) -> int: - ... + def backend_pid(self) -> int: ... @property - def needs_password(self) -> bool: - ... + def needs_password(self) -> bool: ... @property - def used_password(self) -> bool: - ... + def used_password(self) -> bool: ... @property - def ssl_in_use(self) -> bool: - ... + def ssl_in_use(self) -> bool: ... - def exec_(self, command: bytes) -> "PGresult": - ... + def exec_(self, command: bytes) -> "PGresult": ... - def send_query(self, command: bytes) -> None: - ... + def send_query(self, command: bytes) -> None: ... def exec_params( self, @@ -136,8 +107,7 @@ class PGconn(Protocol): param_types: Optional[Sequence[int]] = None, param_formats: Optional[Sequence[int]] = None, result_format: int = Format.TEXT, - ) -> "PGresult": - ... + ) -> "PGresult": ... def send_query_params( self, @@ -146,16 +116,14 @@ class PGconn(Protocol): param_types: Optional[Sequence[int]] = None, param_formats: Optional[Sequence[int]] = None, result_format: int = Format.TEXT, - ) -> None: - ... + ) -> None: ... def send_prepare( self, name: bytes, command: bytes, param_types: Optional[Sequence[int]] = None, - ) -> None: - ... + ) -> None: ... def send_query_prepared( self, @@ -163,16 +131,14 @@ class PGconn(Protocol): param_values: Optional[Sequence[Optional[Buffer]]], param_formats: Optional[Sequence[int]] = None, result_format: int = Format.TEXT, - ) -> None: - ... + ) -> None: ... def prepare( self, name: bytes, command: bytes, param_types: Optional[Sequence[int]] = None, - ) -> "PGresult": - ... + ) -> "PGresult": ... def exec_prepared( self, @@ -180,216 +146,153 @@ class PGconn(Protocol): param_values: Optional[Sequence[Buffer]], param_formats: Optional[Sequence[int]] = None, result_format: int = 0, - ) -> "PGresult": - ... + ) -> "PGresult": ... - def describe_prepared(self, name: bytes) -> "PGresult": - ... + def describe_prepared(self, name: bytes) -> "PGresult": ... - def send_describe_prepared(self, name: bytes) -> None: - ... + def send_describe_prepared(self, name: bytes) -> None: ... - def describe_portal(self, name: bytes) -> "PGresult": - ... + def describe_portal(self, name: bytes) -> "PGresult": ... - def send_describe_portal(self, name: bytes) -> None: - ... + def send_describe_portal(self, name: bytes) -> None: ... - def close_prepared(self, name: bytes) -> "PGresult": - ... + def close_prepared(self, name: bytes) -> "PGresult": ... - def send_close_prepared(self, name: bytes) -> None: - ... + def send_close_prepared(self, name: bytes) -> None: ... - def close_portal(self, name: bytes) -> "PGresult": - ... + def close_portal(self, name: bytes) -> "PGresult": ... - def send_close_portal(self, name: bytes) -> None: - ... + def send_close_portal(self, name: bytes) -> None: ... - def get_result(self) -> Optional["PGresult"]: - ... + def get_result(self) -> Optional["PGresult"]: ... - def consume_input(self) -> None: - ... + def consume_input(self) -> None: ... - def is_busy(self) -> int: - ... + def is_busy(self) -> int: ... @property - def nonblocking(self) -> int: - ... + def nonblocking(self) -> int: ... @nonblocking.setter - def nonblocking(self, arg: int) -> None: - ... + def nonblocking(self, arg: int) -> None: ... - def flush(self) -> int: - ... + def flush(self) -> int: ... - def set_single_row_mode(self) -> None: - ... + def set_single_row_mode(self) -> None: ... - def get_cancel(self) -> "PGcancel": - ... + def get_cancel(self) -> "PGcancel": ... - def notifies(self) -> Optional["PGnotify"]: - ... + def notifies(self) -> Optional["PGnotify"]: ... - def put_copy_data(self, buffer: Buffer) -> int: - ... + def put_copy_data(self, buffer: Buffer) -> int: ... - def put_copy_end(self, error: Optional[bytes] = None) -> int: - ... + def put_copy_end(self, error: Optional[bytes] = None) -> int: ... - def get_copy_data(self, async_: int) -> Tuple[int, memoryview]: - ... + def get_copy_data(self, async_: int) -> Tuple[int, memoryview]: ... - def trace(self, fileno: int) -> None: - ... + def trace(self, fileno: int) -> None: ... - def set_trace_flags(self, flags: Trace) -> None: - ... + def set_trace_flags(self, flags: Trace) -> None: ... - def untrace(self) -> None: - ... + def untrace(self) -> None: ... def encrypt_password( self, passwd: bytes, user: bytes, algorithm: Optional[bytes] = None - ) -> bytes: - ... + ) -> bytes: ... - def make_empty_result(self, exec_status: int) -> "PGresult": - ... + def make_empty_result(self, exec_status: int) -> "PGresult": ... @property - def pipeline_status(self) -> int: - ... + def pipeline_status(self) -> int: ... - def enter_pipeline_mode(self) -> None: - ... + def enter_pipeline_mode(self) -> None: ... - def exit_pipeline_mode(self) -> None: - ... + def exit_pipeline_mode(self) -> None: ... - def pipeline_sync(self) -> None: - ... + def pipeline_sync(self) -> None: ... - def send_flush_request(self) -> None: - ... + def send_flush_request(self) -> None: ... class PGresult(Protocol): - def clear(self) -> None: - ... + def clear(self) -> None: ... @property - def status(self) -> int: - ... + def status(self) -> int: ... @property - def error_message(self) -> bytes: - ... + def error_message(self) -> bytes: ... - def error_field(self, fieldcode: int) -> Optional[bytes]: - ... + def error_field(self, fieldcode: int) -> Optional[bytes]: ... @property - def ntuples(self) -> int: - ... + def ntuples(self) -> int: ... @property - def nfields(self) -> int: - ... + def nfields(self) -> int: ... - def fname(self, column_number: int) -> Optional[bytes]: - ... + def fname(self, column_number: int) -> Optional[bytes]: ... - def ftable(self, column_number: int) -> int: - ... + def ftable(self, column_number: int) -> int: ... - def ftablecol(self, column_number: int) -> int: - ... + def ftablecol(self, column_number: int) -> int: ... - def fformat(self, column_number: int) -> int: - ... + def fformat(self, column_number: int) -> int: ... - def ftype(self, column_number: int) -> int: - ... + def ftype(self, column_number: int) -> int: ... - def fmod(self, column_number: int) -> int: - ... + def fmod(self, column_number: int) -> int: ... - def fsize(self, column_number: int) -> int: - ... + def fsize(self, column_number: int) -> int: ... @property - def binary_tuples(self) -> int: - ... + def binary_tuples(self) -> int: ... - def get_value(self, row_number: int, column_number: int) -> Optional[bytes]: - ... + def get_value(self, row_number: int, column_number: int) -> Optional[bytes]: ... @property - def nparams(self) -> int: - ... + def nparams(self) -> int: ... - def param_type(self, param_number: int) -> int: - ... + def param_type(self, param_number: int) -> int: ... @property - def command_status(self) -> Optional[bytes]: - ... + def command_status(self) -> Optional[bytes]: ... @property - def command_tuples(self) -> Optional[int]: - ... + def command_tuples(self) -> Optional[int]: ... @property - def oid_value(self) -> int: - ... + def oid_value(self) -> int: ... - def set_attributes(self, descriptions: List["PGresAttDesc"]) -> None: - ... + def set_attributes(self, descriptions: List["PGresAttDesc"]) -> None: ... class PGcancel(Protocol): - def free(self) -> None: - ... + def free(self) -> None: ... - def cancel(self) -> None: - ... + def cancel(self) -> None: ... class Conninfo(Protocol): @classmethod - def get_defaults(cls) -> List["ConninfoOption"]: - ... + def get_defaults(cls) -> List["ConninfoOption"]: ... @classmethod - def parse(cls, conninfo: bytes) -> List["ConninfoOption"]: - ... + def parse(cls, conninfo: bytes) -> List["ConninfoOption"]: ... @classmethod - def _options_from_array(cls, opts: Sequence[Any]) -> List["ConninfoOption"]: - ... + def _options_from_array(cls, opts: Sequence[Any]) -> List["ConninfoOption"]: ... class Escaping(Protocol): - def __init__(self, conn: Optional[PGconn] = None): - ... + def __init__(self, conn: Optional[PGconn] = None): ... - def escape_literal(self, data: Buffer) -> bytes: - ... + def escape_literal(self, data: Buffer) -> bytes: ... - def escape_identifier(self, data: Buffer) -> bytes: - ... + def escape_identifier(self, data: Buffer) -> bytes: ... - def escape_string(self, data: Buffer) -> bytes: - ... + def escape_string(self, data: Buffer) -> bytes: ... - def escape_bytea(self, data: Buffer) -> bytes: - ... + def escape_bytea(self, data: Buffer) -> bytes: ... - def unescape_bytea(self, data: Buffer) -> bytes: - ... + def unescape_bytea(self, data: Buffer) -> bytes: ... diff --git a/psycopg/psycopg/rows.py b/psycopg/psycopg/rows.py index 8e4a5c00e..1aa910a64 100644 --- a/psycopg/psycopg/rows.py +++ b/psycopg/psycopg/rows.py @@ -43,8 +43,7 @@ class RowMaker(Protocol[Row]): Typically, `!RowMaker` functions are returned by `RowFactory`. """ - def __call__(self, __values: Sequence[Any]) -> Row: - ... + def __call__(self, __values: Sequence[Any]) -> Row: ... class RowFactory(Protocol[Row]): @@ -61,8 +60,7 @@ class RowFactory(Protocol[Row]): use the values to create a dictionary for each record. """ - def __call__(self, __cursor: "Cursor[Any]") -> RowMaker[Row]: - ... + def __call__(self, __cursor: "Cursor[Any]") -> RowMaker[Row]: ... class AsyncRowFactory(Protocol[Row]): @@ -70,8 +68,7 @@ class AsyncRowFactory(Protocol[Row]): Like `RowFactory`, taking an async cursor as argument. """ - def __call__(self, __cursor: "AsyncCursor[Any]") -> RowMaker[Row]: - ... + def __call__(self, __cursor: "AsyncCursor[Any]") -> RowMaker[Row]: ... class BaseRowFactory(Protocol[Row]): @@ -79,8 +76,7 @@ class BaseRowFactory(Protocol[Row]): Like `RowFactory`, taking either type of cursor as argument. """ - def __call__(self, __cursor: "BaseCursor[Any, Any]") -> RowMaker[Row]: - ... + def __call__(self, __cursor: "BaseCursor[Any, Any]") -> RowMaker[Row]: ... TupleRow: TypeAlias = Tuple[Any, ...] diff --git a/psycopg/psycopg/server_cursor.py b/psycopg/psycopg/server_cursor.py index 1c6e77aa1..2f5f44739 100644 --- a/psycopg/psycopg/server_cursor.py +++ b/psycopg/psycopg/server_cursor.py @@ -222,8 +222,7 @@ class ServerCursor(ServerCursorMixin["Connection[Any]", Row], Cursor[Row]): *, scrollable: Optional[bool] = None, withhold: bool = False, - ): - ... + ): ... @overload def __init__( @@ -234,8 +233,7 @@ class ServerCursor(ServerCursorMixin["Connection[Any]", Row], Cursor[Row]): row_factory: RowFactory[Row], scrollable: Optional[bool] = None, withhold: bool = False, - ): - ... + ): ... def __init__( self, @@ -363,8 +361,7 @@ class AsyncServerCursor( *, scrollable: Optional[bool] = None, withhold: bool = False, - ): - ... + ): ... @overload def __init__( @@ -375,8 +372,7 @@ class AsyncServerCursor( row_factory: AsyncRowFactory[Row], scrollable: Optional[bool] = None, withhold: bool = False, - ): - ... + ): ... def __init__( self, diff --git a/psycopg/psycopg/types/enum.py b/psycopg/psycopg/types/enum.py index d8a12b5ac..6e20dd3ce 100644 --- a/psycopg/psycopg/types/enum.py +++ b/psycopg/psycopg/types/enum.py @@ -1,6 +1,7 @@ """ Adapters for the enum type. """ + from enum import Enum from typing import Any, Dict, Generic, Optional, Mapping, Sequence from typing import Tuple, Type, Union, cast, TYPE_CHECKING diff --git a/psycopg/psycopg/types/multirange.py b/psycopg/psycopg/types/multirange.py index d672f6be8..51f61d1a7 100644 --- a/psycopg/psycopg/types/multirange.py +++ b/psycopg/psycopg/types/multirange.py @@ -91,12 +91,10 @@ class Multirange(MutableSequence[Range[T]]): return f"{{{', '.join(map(str, self._ranges))}}}" @overload - def __getitem__(self, index: int) -> Range[T]: - ... + def __getitem__(self, index: int) -> Range[T]: ... @overload - def __getitem__(self, index: slice) -> "Multirange[T]": - ... + def __getitem__(self, index: slice) -> "Multirange[T]": ... def __getitem__(self, index: Union[int, slice]) -> "Union[Range[T],Multirange[T]]": if isinstance(index, int): @@ -108,12 +106,10 @@ class Multirange(MutableSequence[Range[T]]): return len(self._ranges) @overload - def __setitem__(self, index: int, value: Range[T]) -> None: - ... + def __setitem__(self, index: int, value: Range[T]) -> None: ... @overload - def __setitem__(self, index: slice, value: Iterable[Range[T]]) -> None: - ... + def __setitem__(self, index: slice, value: Iterable[Range[T]]) -> None: ... def __setitem__( self, diff --git a/psycopg/psycopg/types/numeric.py b/psycopg/psycopg/types/numeric.py index f394bdac7..1817740fd 100644 --- a/psycopg/psycopg/types/numeric.py +++ b/psycopg/psycopg/types/numeric.py @@ -379,8 +379,7 @@ class _MixedNumericDumper(Dumper, ABC): _MixedNumericDumper.int_classes = int @abstractmethod - def dump(self, obj: Union[Decimal, int, "numpy.integer[Any]"]) -> Buffer: - ... + def dump(self, obj: Union[Decimal, int, "numpy.integer[Any]"]) -> Buffer: ... class NumericDumper(_MixedNumericDumper): diff --git a/psycopg/setup.cfg b/psycopg/setup.cfg index f734c40ec..fbb544677 100644 --- a/psycopg/setup.cfg +++ b/psycopg/setup.cfg @@ -74,7 +74,7 @@ test = pytest-randomly >= 3.5 dev = ast-comments >= 1.1.2 - black >= 23.1.0 + black >= 24.1.0 codespell >= 2.2 dnspython >= 2.1 flake8 >= 4.0 diff --git a/psycopg_c/.flake8 b/psycopg_c/.flake8 index 2ae629c2d..40a061b1e 100644 --- a/psycopg_c/.flake8 +++ b/psycopg_c/.flake8 @@ -1,3 +1,3 @@ [flake8] max-line-length = 88 -ignore = W503, E203 +ignore = W503, E203, E704 diff --git a/psycopg_pool/.flake8 b/psycopg_pool/.flake8 index 2ae629c2d..40a061b1e 100644 --- a/psycopg_pool/.flake8 +++ b/psycopg_pool/.flake8 @@ -1,3 +1,3 @@ [flake8] max-line-length = 88 -ignore = W503, E203 +ignore = W503, E203, E704 diff --git a/psycopg_pool/psycopg_pool/null_pool.py b/psycopg_pool/psycopg_pool/null_pool.py index a5408d08e..0be5d6148 100644 --- a/psycopg_pool/psycopg_pool/null_pool.py +++ b/psycopg_pool/psycopg_pool/null_pool.py @@ -26,6 +26,7 @@ logger = logging.getLogger("psycopg.pool") class NullConnectionPool(_BaseNullConnectionPool, ConnectionPool[CT]): + def __init__( self, conninfo: str = "", @@ -47,6 +48,7 @@ class NullConnectionPool(_BaseNullConnectionPool, ConnectionPool[CT]): reconnect_failed: Optional[ConnectFailedCB] = None, num_workers: int = 3, ): # Note: min_size default value changed to 0. + super().__init__( conninfo, open=open, diff --git a/psycopg_pool/psycopg_pool/pool.py b/psycopg_pool/psycopg_pool/pool.py index 7ab999b1e..a021e14a0 100644 --- a/psycopg_pool/psycopg_pool/pool.py +++ b/psycopg_pool/psycopg_pool/pool.py @@ -913,8 +913,7 @@ class MaintenanceTask(ABC): pool.run_task(self) @abstractmethod - def _run(self, pool: ConnectionPool[Any]) -> None: - ... + def _run(self, pool: ConnectionPool[Any]) -> None: ... class StopWorker(MaintenanceTask): @@ -925,6 +924,7 @@ class StopWorker(MaintenanceTask): class AddConnection(MaintenanceTask): + def __init__( self, pool: ConnectionPool[Any], diff --git a/psycopg_pool/psycopg_pool/pool_async.py b/psycopg_pool/psycopg_pool/pool_async.py index fd8f63782..d0770dd77 100644 --- a/psycopg_pool/psycopg_pool/pool_async.py +++ b/psycopg_pool/psycopg_pool/pool_async.py @@ -962,8 +962,7 @@ class MaintenanceTask(ABC): pool.run_task(self) @abstractmethod - async def _run(self, pool: AsyncConnectionPool[Any]) -> None: - ... + async def _run(self, pool: AsyncConnectionPool[Any]) -> None: ... class StopWorker(MaintenanceTask): diff --git a/psycopg_pool/psycopg_pool/sched.py b/psycopg_pool/psycopg_pool/sched.py index 58cadc36d..40954b996 100644 --- a/psycopg_pool/psycopg_pool/sched.py +++ b/psycopg_pool/psycopg_pool/sched.py @@ -27,6 +27,7 @@ logger = logging.getLogger(__name__) class Scheduler: + def __init__(self) -> None: self._queue: List[Task] = [] self._lock = Lock() diff --git a/tests/conftest.py b/tests/conftest.py index 98b03426e..cc1273f78 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -73,9 +73,9 @@ def pytest_sessionstart(session): asyncio_options: Dict[str, Any] = {} if sys.platform == "win32": - asyncio_options[ - "loop_factory" - ] = asyncio.WindowsSelectorEventLoopPolicy().new_event_loop + asyncio_options["loop_factory"] = ( + asyncio.WindowsSelectorEventLoopPolicy().new_event_loop + ) @pytest.fixture( diff --git a/tests/constraints.txt b/tests/constraints.txt index a676a993f..5c106971c 100644 --- a/tests/constraints.txt +++ b/tests/constraints.txt @@ -17,7 +17,7 @@ pytest-cov == 3.0.0 pytest-randomly == 3.5.0 # From the 'dev' extra -black == 23.1.0 +black == 24.1.0 dnspython == 2.1.0 flake8 == 4.0.0 types-setuptools == 57.4.0 diff --git a/tests/pool/test_pool.py b/tests/pool/test_pool.py index e2aeb7cc3..ff8fafd88 100644 --- a/tests/pool/test_pool.py +++ b/tests/pool/test_pool.py @@ -42,10 +42,11 @@ def test_bad_size(dsn, min_size, max_size): class MyRow(Dict[str, Any]): - ... + pass def test_generic_connection_type(dsn): + def configure(conn: psycopg.Connection[Any]) -> None: set_autocommit(conn, True) @@ -78,10 +79,12 @@ def test_generic_connection_type(dsn): def test_non_generic_connection_type(dsn): + def configure(conn: psycopg.Connection[Any]) -> None: set_autocommit(conn, True) class MyConnection(psycopg.Connection[MyRow]): + def __init__(self, *args: Any, **kwargs: Any): kwargs["row_factory"] = class_row(MyRow) super().__init__(*args, **kwargs) @@ -638,6 +641,7 @@ def test_uniform_use(dsn): @pytest.mark.slow @pytest.mark.timing def test_resize(dsn): + def sampler(): sleep(0.05) # ensure sampling happens after shrink check while True: diff --git a/tests/pool/test_pool_async.py b/tests/pool/test_pool_async.py index 0bc84f8bc..6a699e40b 100644 --- a/tests/pool/test_pool_async.py +++ b/tests/pool/test_pool_async.py @@ -42,7 +42,7 @@ async def test_bad_size(dsn, min_size, max_size): class MyRow(Dict[str, Any]): - ... + pass async def test_generic_connection_type(dsn): diff --git a/tests/pool/test_pool_common.py b/tests/pool/test_pool_common.py index ddf78a693..a8815da20 100644 --- a/tests/pool/test_pool_common.py +++ b/tests/pool/test_pool_common.py @@ -35,6 +35,7 @@ def test_defaults(pool_cls, dsn): def test_connection_class(pool_cls, dsn): + class MyConn(psycopg.Connection[Any]): pass @@ -158,6 +159,7 @@ def test_configure_broken(pool_cls, dsn, caplog): @pytest.mark.timing @pytest.mark.crdb_skip("backend pid") def test_queue(pool_cls, dsn): + def worker(n): t0 = time() with p.connection() as conn: @@ -182,6 +184,7 @@ def test_queue(pool_cls, dsn): @pytest.mark.slow def test_queue_size(pool_cls, dsn): + def worker(t, ev=None): try: with p.connection(): @@ -217,6 +220,7 @@ def test_queue_size(pool_cls, dsn): @pytest.mark.timing @pytest.mark.crdb_skip("backend pid") def test_queue_timeout(pool_cls, dsn): + def worker(n): t0 = time() try: @@ -246,6 +250,7 @@ def test_queue_timeout(pool_cls, dsn): @pytest.mark.slow @pytest.mark.timing def test_dead_client(pool_cls, dsn): + def worker(i, timeout): try: with p.connection(timeout=timeout) as conn: @@ -273,6 +278,7 @@ def test_dead_client(pool_cls, dsn): @pytest.mark.timing @pytest.mark.crdb_skip("backend pid") def test_queue_timeout_override(pool_cls, dsn): + def worker(n): t0 = time() timeout = 0.25 if n == 3 else None @@ -382,6 +388,7 @@ def test_close_connection_on_pool_close(pool_cls, dsn): def test_closed_queue(pool_cls, dsn): + def w1(): with p.connection() as conn: e1.set() # Tell w0 that w1 got a connection @@ -493,6 +500,7 @@ def test_jitter(pool_cls): @pytest.mark.slow @pytest.mark.timing def test_stats_measures(pool_cls, dsn): + def worker(n): with p.connection() as conn: conn.execute("select pg_sleep(0.2)") @@ -532,6 +540,7 @@ def test_stats_measures(pool_cls, dsn): @pytest.mark.slow @pytest.mark.timing def test_stats_usage(pool_cls, dsn): + def worker(n): try: with p.connection(timeout=0.3) as conn: @@ -613,6 +622,7 @@ def test_check_init(pool_cls, dsn): @pytest.mark.slow def test_check_timeout(pool_cls, dsn): + def check(conn): raise Exception() diff --git a/tests/pool/test_pool_null.py b/tests/pool/test_pool_null.py index fbe698df6..c54014572 100644 --- a/tests/pool/test_pool_null.py +++ b/tests/pool/test_pool_null.py @@ -40,10 +40,11 @@ def test_bad_size(dsn, min_size, max_size): class MyRow(Dict[str, Any]): - ... + pass def test_generic_connection_type(dsn): + def configure(conn: psycopg.Connection[Any]) -> None: set_autocommit(conn, True) @@ -76,10 +77,12 @@ def test_generic_connection_type(dsn): def test_non_generic_connection_type(dsn): + def configure(conn: psycopg.Connection[Any]) -> None: set_autocommit(conn, True) class MyConnection(psycopg.Connection[MyRow]): + def __init__(self, *args: Any, **kwargs: Any): kwargs["row_factory"] = class_row(MyRow) super().__init__(*args, **kwargs) diff --git a/tests/pool/test_pool_null_async.py b/tests/pool/test_pool_null_async.py index 09c0e2150..b610045cc 100644 --- a/tests/pool/test_pool_null_async.py +++ b/tests/pool/test_pool_null_async.py @@ -40,7 +40,7 @@ async def test_bad_size(dsn, min_size, max_size): class MyRow(Dict[str, Any]): - ... + pass async def test_generic_connection_type(dsn): diff --git a/tests/scripts/dectest.py b/tests/scripts/dectest.py index a49f11685..0fb65ad90 100644 --- a/tests/scripts/dectest.py +++ b/tests/scripts/dectest.py @@ -1,6 +1,7 @@ """ A quick and rough performance comparison of text vs. binary Decimal adaptation """ + from random import randrange from decimal import Decimal import psycopg diff --git a/tests/scripts/pipeline-demo.py b/tests/scripts/pipeline-demo.py index ec952293a..74cc04d1f 100644 --- a/tests/scripts/pipeline-demo.py +++ b/tests/scripts/pipeline-demo.py @@ -7,6 +7,7 @@ We do not fetch results explicitly (using cursor.fetch*()), this is handled by execute() calls when pgconn socket is read-ready, which happens when the output buffer is full. """ + import argparse import asyncio import logging diff --git a/tests/test_connection.py b/tests/test_connection.py index 8456dba45..1d9217d2b 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -34,6 +34,7 @@ def test_connect_bad(conn_cls): def test_connect_str_subclass(conn_cls, dsn): + class MyString(str): pass @@ -467,6 +468,7 @@ def test_connect_args( ], ) def test_connect_badargs(conn_cls, monkeypatch, pgconn, args, kwargs, exctype): + def fake_connect(conninfo): return pgconn yield @@ -642,6 +644,7 @@ def test_cursor_factory(conn): def test_cursor_factory_connect(conn_cls, dsn): + class MyCursor(psycopg.Cursor[psycopg.rows.Row]): pass diff --git a/tests/test_connection_async.py b/tests/test_connection_async.py index 2e950aff4..a98f0f80b 100644 --- a/tests/test_connection_async.py +++ b/tests/test_connection_async.py @@ -637,7 +637,7 @@ async def test_cursor_factory(aconn): async with aconn.cursor() as cur: assert isinstance(cur, MyCursor) - async with (await aconn.execute("select 1")) as cur: + async with await aconn.execute("select 1") as cur: assert isinstance(cur, MyCursor) diff --git a/tests/test_copy.py b/tests/test_copy.py index fda854e60..55f9e3b77 100644 --- a/tests/test_copy.py +++ b/tests/test_copy.py @@ -306,6 +306,7 @@ def test_subclass_adapter(conn, format): BaseDumper = StrBinaryDumper # type: ignore class MyStrDumper(BaseDumper): + def dump(self, obj): return super().dump(obj) * 2 @@ -641,6 +642,7 @@ def test_worker_life(conn, format, buffer): def test_worker_error_propagated(conn, monkeypatch): + def copy_to_broken(pgconn, buffer): raise ZeroDivisionError yield @@ -803,6 +805,7 @@ def test_copy_table_across(conn_cls, dsn, faker, mode): class DataGenerator: + def __init__(self, conn, nrecs, srec, offset=0, block_size=8192): self.conn = conn self.nrecs = nrecs diff --git a/tests/test_cursor_common.py b/tests/test_cursor_common.py index 159e67cb1..d5b0d1513 100644 --- a/tests/test_cursor_common.py +++ b/tests/test_cursor_common.py @@ -576,6 +576,7 @@ def test_row_factory_none(conn): def test_bad_row_factory(conn): + def broken_factory(cur): 1 / 0 @@ -584,6 +585,7 @@ def test_bad_row_factory(conn): cur.execute("select 1") def broken_maker(cur): + def make_row(seq): 1 / 0 diff --git a/tests/test_tpc.py b/tests/test_tpc.py index 41023ccc8..864f8a786 100644 --- a/tests/test_tpc.py +++ b/tests/test_tpc.py @@ -22,6 +22,7 @@ def test_tpc_disabled(conn, pipeline): class TestTPC: + def test_tpc_commit(self, conn, tpc): xid = conn.xid(1, "gtrid", "bqual") assert conn.info.transaction_status == TransactionStatus.IDLE diff --git a/tests/types/test_numeric.py b/tests/types/test_numeric.py index fffc32f31..8e5db7668 100644 --- a/tests/types/test_numeric.py +++ b/tests/types/test_numeric.py @@ -402,9 +402,11 @@ def test_dump_numeric_binary(conn, expr): @pytest.mark.parametrize( "fmt_in", [ - f - if f != PyFormat.BINARY - else pytest.param(f, marks=pytest.mark.crdb_skip("binary decimal")) + ( + f + if f != PyFormat.BINARY + else pytest.param(f, marks=pytest.mark.crdb_skip("binary decimal")) + ) for f in PyFormat ], )