From: Daniele Varrazzo Date: Sun, 30 Jan 2022 16:57:45 +0000 (+0000) Subject: Apply string manipulations from black --preview X-Git-Tag: pool-3.1~3^2 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=7a327ec6af1e902ea4721a8735681ff1325c4d9b;p=thirdparty%2Fpsycopg.git Apply string manipulations from black --preview They are stable under the current black and look an improvement. --- diff --git a/psycopg/psycopg/_dns.py b/psycopg/psycopg/_dns.py index e8860f49e..93448f24a 100644 --- a/psycopg/psycopg/_dns.py +++ b/psycopg/psycopg/_dns.py @@ -89,8 +89,7 @@ async def resolve_hostaddr_async(params: Dict[str, Any]) -> Dict[str, Any]: # ProgrammingError would have been more appropriate, but this is # what the raise if the libpq fails connect in the same case. raise e.OperationalError( - f"cannot match {len(hosts_in)} hosts with {len(ports_in)}" - " port numbers" + f"cannot match {len(hosts_in)} hosts with {len(ports_in)} port numbers" ) ports_out = [] @@ -241,8 +240,7 @@ class Rfc2782Resolver: # ProgrammingError would have been more appropriate, but this is # what the raise if the libpq fails connect in the same case. raise e.OperationalError( - f"cannot match {len(hosts_in)} hosts with {len(ports_in)}" - " port numbers" + f"cannot match {len(hosts_in)} hosts with {len(ports_in)} port numbers" ) out = [] diff --git a/psycopg/psycopg/_queries.py b/psycopg/psycopg/_queries.py index b19c01159..f98912103 100644 --- a/psycopg/psycopg/_queries.py +++ b/psycopg/psycopg/_queries.py @@ -170,7 +170,7 @@ def _validate_and_reorder_params( sequence = False else: raise TypeError( - f"query parameters should be a sequence or a mapping," + "query parameters should be a sequence or a mapping," f" got {type(vars).__name__}" ) @@ -193,7 +193,7 @@ def _validate_and_reorder_params( return [vars[item] for item in order or ()] # type: ignore[call-overload] except KeyError: raise e.ProgrammingError( - f"query parameter missing:" + "query parameter missing:" f" {', '.join(sorted(i for i in order or () if i not in vars))}" ) @@ -250,7 +250,7 @@ def _split_query(query: bytes, encoding: str = "ascii") -> List[QueryPart]: if ph == b"%(": raise e.ProgrammingError( - f"incomplete placeholder:" + "incomplete placeholder:" f" '{query[m.span(0)[0]:].split()[0].decode(encoding)}'" ) elif ph == b"% ": @@ -261,7 +261,7 @@ def _split_query(query: bytes, encoding: str = "ascii") -> List[QueryPart]: ) elif ph[-1:] not in b"sbt": raise e.ProgrammingError( - f"only '%s', '%b', '%t' are allowed as placeholders, got" + "only '%s', '%b', '%t' are allowed as placeholders, got" f" '{m.group(0).decode(encoding)}'" ) diff --git a/psycopg/psycopg/connection.py b/psycopg/psycopg/connection.py index 88498e75e..f4e17b9ed 100644 --- a/psycopg/psycopg/connection.py +++ b/psycopg/psycopg/connection.py @@ -148,7 +148,7 @@ class BaseConnection(Generic[Row]): warn( f"connection {self} was deleted while still open." - f" Please use 'with' or '.close()' to close the connection", + " Please use 'with' or '.close()' to close the connection", ResourceWarning, ) @@ -435,7 +435,7 @@ class BaseConnection(Generic[Row]): if self.pgconn.status == ConnStatus.BAD: raise e.OperationalError("the connection is closed") raise e.InterfaceError( - f"cannot execute operations: the connection is" + "cannot execute operations: the connection is" f" in status {self.pgconn.status}" ) @@ -524,7 +524,7 @@ class BaseConnection(Generic[Row]): if self.pgconn.transaction_status != TransactionStatus.IDLE: raise e.ProgrammingError( - f"can't start two-phase transaction: connection in status" + "can't start two-phase transaction: connection in status" f" {TransactionStatus(self.pgconn.transaction_status).name}" ) @@ -543,8 +543,7 @@ class BaseConnection(Generic[Row]): ) if self._tpc[1]: raise e.ProgrammingError( - "'tpc_prepare()' cannot be used during a prepared" - " two-phase transaction" + "'tpc_prepare()' cannot be used during a prepared two-phase transaction" ) xid = self._tpc[0] self._tpc = (xid, True) diff --git a/psycopg/psycopg/copy.py b/psycopg/psycopg/copy.py index e8b45adae..dd4734607 100644 --- a/psycopg/psycopg/copy.py +++ b/psycopg/psycopg/copy.py @@ -618,10 +618,9 @@ _unpack_int2 = struct.Struct("!h").unpack_from _unpack_int4 = struct.Struct("!i").unpack_from _binary_signature = ( - # Signature, flags, extra length - b"PGCOPY\n\xff\r\n\0" - b"\x00\x00\x00\x00" - b"\x00\x00\x00\x00" + b"PGCOPY\n\xff\r\n\0" # Signature + b"\x00\x00\x00\x00" # flags + b"\x00\x00\x00\x00" # extra length ) _binary_trailer = b"\xff\xff" _binary_null = b"\xff\xff\xff\xff" diff --git a/psycopg/psycopg/cursor.py b/psycopg/psycopg/cursor.py index 2b172a0b9..2f710e18e 100644 --- a/psycopg/psycopg/cursor.py +++ b/psycopg/psycopg/cursor.py @@ -422,8 +422,7 @@ class BaseCursor(Generic[ConnectionType, Row]): ) else: raise e.InternalError( - f"unexpected result status from query:" - f" {ExecStatus(result.status).name}" + f"unexpected result status from query: {ExecStatus(result.status).name}" ) def _set_current_result(self, i: int, format: Optional[Format] = None) -> None: diff --git a/psycopg/psycopg/pq/_pq_ctypes.py b/psycopg/psycopg/pq/_pq_ctypes.py index 605c86f0a..43dfc1ad4 100644 --- a/psycopg/psycopg/pq/_pq_ctypes.py +++ b/psycopg/psycopg/pq/_pq_ctypes.py @@ -197,7 +197,7 @@ if libpq_version >= 120000: def PQhostaddr(pgconn: PGconn_struct) -> bytes: if not _PQhostaddr: raise NotSupportedError( - f"PQhostaddr requires libpq from PostgreSQL 12," + "PQhostaddr requires libpq from PostgreSQL 12," f" {libpq_version} available instead" ) @@ -585,7 +585,7 @@ if libpq_version >= 140000: def PQsetTraceFlags(pgconn: PGconn_struct, flags: int) -> None: if not _PQsetTraceFlags: raise NotSupportedError( - f"PQsetTraceFlags requires libpq from PostgreSQL 14," + "PQsetTraceFlags requires libpq from PostgreSQL 14," f" {libpq_version} available instead" ) @@ -618,7 +618,7 @@ def PQencryptPasswordConn( ) -> Optional[bytes]: if not _PQencryptPasswordConn: raise NotSupportedError( - f"PQencryptPasswordConn requires libpq from PostgreSQL 10," + "PQencryptPasswordConn requires libpq from PostgreSQL 10," f" {libpq_version} available instead" ) @@ -675,7 +675,7 @@ if libpq_version >= 140000: def PQpipelineStatus(pgconn: PGconn_struct) -> int: if not _PQpipelineStatus: raise NotSupportedError( - f"PQpipelineStatus requires libpq from PostgreSQL 14," + "PQpipelineStatus requires libpq from PostgreSQL 14," f" {libpq_version} available instead" ) return _PQpipelineStatus(pgconn) @@ -684,7 +684,7 @@ def PQpipelineStatus(pgconn: PGconn_struct) -> int: def PQenterPipelineMode(pgconn: PGconn_struct) -> int: if not _PQenterPipelineMode: raise NotSupportedError( - f"PQenterPipelineMode requires libpq from PostgreSQL 14," + "PQenterPipelineMode requires libpq from PostgreSQL 14," f" {libpq_version} available instead" ) return _PQenterPipelineMode(pgconn) @@ -693,7 +693,7 @@ def PQenterPipelineMode(pgconn: PGconn_struct) -> int: def PQexitPipelineMode(pgconn: PGconn_struct) -> int: if not _PQexitPipelineMode: raise NotSupportedError( - f"PQexitPipelineMode requires libpq from PostgreSQL 14," + "PQexitPipelineMode requires libpq from PostgreSQL 14," f" {libpq_version} available instead" ) return _PQexitPipelineMode(pgconn) @@ -702,7 +702,7 @@ def PQexitPipelineMode(pgconn: PGconn_struct) -> int: def PQpipelineSync(pgconn: PGconn_struct) -> int: if not _PQpipelineSync: raise NotSupportedError( - f"PQpipelineSync requires libpq from PostgreSQL 14," + "PQpipelineSync requires libpq from PostgreSQL 14," f" {libpq_version} available instead" ) return _PQpipelineSync(pgconn) @@ -711,7 +711,7 @@ def PQpipelineSync(pgconn: PGconn_struct) -> int: def PQsendFlushRequest(pgconn: PGconn_struct) -> int: if not _PQsendFlushRequest: raise NotSupportedError( - f"PQsendFlushRequest requires libpq from PostgreSQL 14," + "PQsendFlushRequest requires libpq from PostgreSQL 14," f" {libpq_version} available instead" ) return _PQsendFlushRequest(pgconn) diff --git a/psycopg/psycopg/server_cursor.py b/psycopg/psycopg/server_cursor.py index b10f066c5..2ac01c953 100644 --- a/psycopg/psycopg/server_cursor.py +++ b/psycopg/psycopg/server_cursor.py @@ -196,7 +196,7 @@ class ServerCursor(Cursor[Row]): if not self.closed: warn( f"the server-side cursor {self} was deleted while still open." - f" Please use 'with' or '.close()' to close the cursor properly", + " Please use 'with' or '.close()' to close the cursor properly", ResourceWarning, ) @@ -333,7 +333,7 @@ class AsyncServerCursor(AsyncCursor[Row]): if not self.closed: warn( f"the server-side cursor {self} was deleted while still open." - f" Please use 'with' or '.close()' to close the cursor properly", + " Please use 'with' or '.close()' to close the cursor properly", ResourceWarning, ) diff --git a/psycopg/psycopg/sql.py b/psycopg/psycopg/sql.py index acfeac0bf..76bd9539c 100644 --- a/psycopg/psycopg/sql.py +++ b/psycopg/psycopg/sql.py @@ -160,7 +160,7 @@ class Composed(Composable): joiner = SQL(joiner) elif not isinstance(joiner, SQL): raise TypeError( - f"Composed.join() argument must be strings or SQL," + "Composed.join() argument must be strings or SQL," f" got {joiner!r} instead" ) diff --git a/psycopg/psycopg/types/multirange.py b/psycopg/psycopg/types/multirange.py index 5f22a993f..5846edcfa 100644 --- a/psycopg/psycopg/types/multirange.py +++ b/psycopg/psycopg/types/multirange.py @@ -264,7 +264,7 @@ class MultirangeLoader(BaseMultirangeLoader[T]): def load(self, data: Buffer) -> Multirange[T]: if not data or data[0] != _START_INT: raise e.DataError( - f"malformed multirange starting with" + "malformed multirange starting with" f" {bytes(data[:1]).decode('utf8', 'replace')}" ) diff --git a/tests/conftest.py b/tests/conftest.py index fc4831474..6085eeaf6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -41,8 +41,9 @@ def pytest_addoption(parser): parser.addoption( "--no-collect-ok", action="store_true", - help="If no test is collected, exit with 0 instead of 5" - " (useful with --lfnf=none).", + help=( + "If no test collected, exit with 0 instead of 5 (useful with --lfnf=none)." + ), ) parser.addoption( diff --git a/tests/fix_db.py b/tests/fix_db.py index b5c932ad9..1702a4ad2 100644 --- a/tests/fix_db.py +++ b/tests/fix_db.py @@ -14,8 +14,10 @@ def pytest_addoption(parser): "--test-dsn", metavar="DSN", default=os.environ.get("PSYCOPG_TEST_DSN"), - help="Connection string to run database tests requiring a connection" - " [you can also use the PSYCOPG_TEST_DSN env var].", + help=( + "Connection string to run database tests requiring a connection" + " [you can also use the PSYCOPG_TEST_DSN env var]." + ), ) parser.addoption( "--pq-tracefile", diff --git a/tests/fix_faker.py b/tests/fix_faker.py index 7befd5ebd..59e184d37 100644 --- a/tests/fix_faker.py +++ b/tests/fix_faker.py @@ -153,8 +153,7 @@ class Faker: if len(r) > 200: r = f"{r[:200]}... ({len(r)} chars)" raise Exception( - f"value {r!r} at record {i} column0 {j}" - f" failed insert: {e}" + f"value {r!r} at record {i} column0 {j} failed insert: {e}" ) from None # just in case, but hopefully we should have triggered the problem @@ -179,8 +178,7 @@ class Faker: if len(r) > 200: r = f"{r[:200]}... ({len(r)} chars)" raise Exception( - f"value {r!r} at record {i} column0 {j}" - f" failed insert: {e}" + f"value {r!r} at record {i} column0 {j} failed insert: {e}" ) from None # just in case, but hopefully we should have triggered the problem diff --git a/tests/pq/test_escaping.py b/tests/pq/test_escaping.py index 7db6248c6..59259c9b1 100644 --- a/tests/pq/test_escaping.py +++ b/tests/pq/test_escaping.py @@ -145,7 +145,7 @@ def test_escape_string_badenc(pgconn): esc.escape_string(data) -@pytest.mark.parametrize("data", [(b"hello\00world"), (b"\00\00\00\00")]) +@pytest.mark.parametrize("data", [b"hello\00world", b"\00\00\00\00"]) def test_escape_bytea(pgconn, data): exp = rb"\x" + b"".join(b"%02x" % c for c in data) esc = pq.Escaping(pgconn) @@ -174,7 +174,7 @@ def test_escape_1char(pgconn): assert rv == exp -@pytest.mark.parametrize("data", [(b"hello\00world"), (b"\00\00\00\00")]) +@pytest.mark.parametrize("data", [b"hello\00world", b"\00\00\00\00"]) def test_unescape_bytea(pgconn, data): enc = rb"\x" + b"".join(b"%02x" % c for c in data) esc = pq.Escaping(pgconn) diff --git a/tests/test_dns_srv.py b/tests/test_dns_srv.py index d269a6d69..5a7fa2d18 100644 --- a/tests/test_dns_srv.py +++ b/tests/test_dns_srv.py @@ -25,15 +25,13 @@ samples_ok = [ ), ( "host=_pg._tcp.bar.com", - ( - "host=db1.example.com,db4.example.com,db3.example.com,db2.example.com" - " port=5432,5432,5433,5432" - ), + "host=db1.example.com,db4.example.com,db3.example.com,db2.example.com" + " port=5432,5432,5433,5432", None, ), ( "host=service.foo.com port=srv", - ("host=service.example.com port=15432"), + "host=service.example.com port=15432", None, ), # No resolution diff --git a/tests/test_prepared.py b/tests/test_prepared.py index cfb4b0495..8cacddd46 100644 --- a/tests/test_prepared.py +++ b/tests/test_prepared.py @@ -249,8 +249,7 @@ def test_change_type_savepoint(conn): conn.execute("CREATE TYPE prepenum AS ENUM ('foo', 'bar', 'baz')") conn.execute("CREATE TABLE preptable(id integer, bar prepenum[])") conn.cursor().execute( - "INSERT INTO preptable (bar) " - "VALUES (%(enum_col)s::prepenum[])", + "INSERT INTO preptable (bar) VALUES (%(enum_col)s::prepenum[])", {"enum_col": ["foo"]}, ) raise ZeroDivisionError() diff --git a/tests/test_server_cursor.py b/tests/test_server_cursor.py index 759800676..9b77fa852 100644 --- a/tests/test_server_cursor.py +++ b/tests/test_server_cursor.py @@ -374,7 +374,7 @@ def test_itersize(conn, commands): cmds = commands.popall() assert len(cmds) == 2 for cmd in cmds: - assert ("fetch forward 2") in cmd.lower() + assert "fetch forward 2" in cmd.lower() def test_cant_scroll_by_default(conn): diff --git a/tests/test_server_cursor_async.py b/tests/test_server_cursor_async.py index 14e98b5cf..f169cad5a 100644 --- a/tests/test_server_cursor_async.py +++ b/tests/test_server_cursor_async.py @@ -385,7 +385,7 @@ async def test_itersize(aconn, acommands): cmds = acommands.popall() assert len(cmds) == 2 for cmd in cmds: - assert ("fetch forward 2") in cmd.lower() + assert "fetch forward 2" in cmd.lower() async def test_cant_scroll_by_default(aconn):