]> git.ipfire.org Git - thirdparty/psycopg.git/commitdiff
Apply string manipulations from black --preview
authorDaniele Varrazzo <daniele.varrazzo@gmail.com>
Sun, 30 Jan 2022 16:57:45 +0000 (16:57 +0000)
committerDaniele Varrazzo <daniele.varrazzo@gmail.com>
Sun, 30 Jan 2022 16:57:45 +0000 (16:57 +0000)
They are stable under the current black and look an improvement.

17 files changed:
psycopg/psycopg/_dns.py
psycopg/psycopg/_queries.py
psycopg/psycopg/connection.py
psycopg/psycopg/copy.py
psycopg/psycopg/cursor.py
psycopg/psycopg/pq/_pq_ctypes.py
psycopg/psycopg/server_cursor.py
psycopg/psycopg/sql.py
psycopg/psycopg/types/multirange.py
tests/conftest.py
tests/fix_db.py
tests/fix_faker.py
tests/pq/test_escaping.py
tests/test_dns_srv.py
tests/test_prepared.py
tests/test_server_cursor.py
tests/test_server_cursor_async.py

index e8860f49e5c8d2c62a953ec45844e1d763486a79..93448f24a86a6297ffe7b5a35b5c764dce17863f 100644 (file)
@@ -89,8 +89,7 @@ async def resolve_hostaddr_async(params: Dict[str, Any]) -> Dict[str, Any]:
             # ProgrammingError would have been more appropriate, but this is
             # what the raise if the libpq fails connect in the same case.
             raise e.OperationalError(
-                f"cannot match {len(hosts_in)} hosts with {len(ports_in)}"
-                " port numbers"
+                f"cannot match {len(hosts_in)} hosts with {len(ports_in)} port numbers"
             )
         ports_out = []
 
@@ -241,8 +240,7 @@ class Rfc2782Resolver:
             # ProgrammingError would have been more appropriate, but this is
             # what the raise if the libpq fails connect in the same case.
             raise e.OperationalError(
-                f"cannot match {len(hosts_in)} hosts with {len(ports_in)}"
-                " port numbers"
+                f"cannot match {len(hosts_in)} hosts with {len(ports_in)} port numbers"
             )
 
         out = []
index b19c0115967b0e7a9dcedb495b9280f195fbdd2f..f98912103b9cb3179d10b89d5e4556523fe6d446 100644 (file)
@@ -170,7 +170,7 @@ def _validate_and_reorder_params(
         sequence = False
     else:
         raise TypeError(
-            f"query parameters should be a sequence or a mapping,"
+            "query parameters should be a sequence or a mapping,"
             f" got {type(vars).__name__}"
         )
 
@@ -193,7 +193,7 @@ def _validate_and_reorder_params(
             return [vars[item] for item in order or ()]  # type: ignore[call-overload]
         except KeyError:
             raise e.ProgrammingError(
-                f"query parameter missing:"
+                "query parameter missing:"
                 f" {', '.join(sorted(i for i in order or () if i not in vars))}"
             )
 
@@ -250,7 +250,7 @@ def _split_query(query: bytes, encoding: str = "ascii") -> List[QueryPart]:
 
         if ph == b"%(":
             raise e.ProgrammingError(
-                f"incomplete placeholder:"
+                "incomplete placeholder:"
                 f" '{query[m.span(0)[0]:].split()[0].decode(encoding)}'"
             )
         elif ph == b"% ":
@@ -261,7 +261,7 @@ def _split_query(query: bytes, encoding: str = "ascii") -> List[QueryPart]:
             )
         elif ph[-1:] not in b"sbt":
             raise e.ProgrammingError(
-                f"only '%s', '%b', '%t' are allowed as placeholders, got"
+                "only '%s', '%b', '%t' are allowed as placeholders, got"
                 f" '{m.group(0).decode(encoding)}'"
             )
 
index 88498e75e4c1a20218d9a01610af53edc5a6756e..f4e17b9ede9b8d2490cd9894def433dc28e3d18b 100644 (file)
@@ -148,7 +148,7 @@ class BaseConnection(Generic[Row]):
 
         warn(
             f"connection {self} was deleted while still open."
-            f" Please use 'with' or '.close()' to close the connection",
+            " Please use 'with' or '.close()' to close the connection",
             ResourceWarning,
         )
 
@@ -435,7 +435,7 @@ class BaseConnection(Generic[Row]):
         if self.pgconn.status == ConnStatus.BAD:
             raise e.OperationalError("the connection is closed")
         raise e.InterfaceError(
-            f"cannot execute operations: the connection is"
+            "cannot execute operations: the connection is"
             f" in status {self.pgconn.status}"
         )
 
@@ -524,7 +524,7 @@ class BaseConnection(Generic[Row]):
 
         if self.pgconn.transaction_status != TransactionStatus.IDLE:
             raise e.ProgrammingError(
-                f"can't start two-phase transaction: connection in status"
+                "can't start two-phase transaction: connection in status"
                 f" {TransactionStatus(self.pgconn.transaction_status).name}"
             )
 
@@ -543,8 +543,7 @@ class BaseConnection(Generic[Row]):
             )
         if self._tpc[1]:
             raise e.ProgrammingError(
-                "'tpc_prepare()' cannot be used during a prepared"
-                " two-phase transaction"
+                "'tpc_prepare()' cannot be used during a prepared two-phase transaction"
             )
         xid = self._tpc[0]
         self._tpc = (xid, True)
index e8b45adae70de596c0c1e2ad4488f3d3c7f6cd4c..dd47346079be8bcddf8242ccbef3ac54785f50f6 100644 (file)
@@ -618,10 +618,9 @@ _unpack_int2 = struct.Struct("!h").unpack_from
 _unpack_int4 = struct.Struct("!i").unpack_from
 
 _binary_signature = (
-    # Signature, flags, extra length
-    b"PGCOPY\n\xff\r\n\0"
-    b"\x00\x00\x00\x00"
-    b"\x00\x00\x00\x00"
+    b"PGCOPY\n\xff\r\n\0"  # Signature
+    b"\x00\x00\x00\x00"  # flags
+    b"\x00\x00\x00\x00"  # extra length
 )
 _binary_trailer = b"\xff\xff"
 _binary_null = b"\xff\xff\xff\xff"
index 2b172a0b939cdace66d0c3815dbf485cb1657a1e..2f710e18e106f0eaaef5aea55a48fd6cc8016312 100644 (file)
@@ -422,8 +422,7 @@ class BaseCursor(Generic[ConnectionType, Row]):
             )
         else:
             raise e.InternalError(
-                f"unexpected result status from query:"
-                f" {ExecStatus(result.status).name}"
+                f"unexpected result status from query: {ExecStatus(result.status).name}"
             )
 
     def _set_current_result(self, i: int, format: Optional[Format] = None) -> None:
index 605c86f0a39befb0acd2684de98e82af3ee2a57a..43dfc1ad492ff957375ec427c51a3bc42da72d70 100644 (file)
@@ -197,7 +197,7 @@ if libpq_version >= 120000:
 def PQhostaddr(pgconn: PGconn_struct) -> bytes:
     if not _PQhostaddr:
         raise NotSupportedError(
-            f"PQhostaddr requires libpq from PostgreSQL 12,"
+            "PQhostaddr requires libpq from PostgreSQL 12,"
             f" {libpq_version} available instead"
         )
 
@@ -585,7 +585,7 @@ if libpq_version >= 140000:
 def PQsetTraceFlags(pgconn: PGconn_struct, flags: int) -> None:
     if not _PQsetTraceFlags:
         raise NotSupportedError(
-            f"PQsetTraceFlags requires libpq from PostgreSQL 14,"
+            "PQsetTraceFlags requires libpq from PostgreSQL 14,"
             f" {libpq_version} available instead"
         )
 
@@ -618,7 +618,7 @@ def PQencryptPasswordConn(
 ) -> Optional[bytes]:
     if not _PQencryptPasswordConn:
         raise NotSupportedError(
-            f"PQencryptPasswordConn requires libpq from PostgreSQL 10,"
+            "PQencryptPasswordConn requires libpq from PostgreSQL 10,"
             f" {libpq_version} available instead"
         )
 
@@ -675,7 +675,7 @@ if libpq_version >= 140000:
 def PQpipelineStatus(pgconn: PGconn_struct) -> int:
     if not _PQpipelineStatus:
         raise NotSupportedError(
-            f"PQpipelineStatus requires libpq from PostgreSQL 14,"
+            "PQpipelineStatus requires libpq from PostgreSQL 14,"
             f" {libpq_version} available instead"
         )
     return _PQpipelineStatus(pgconn)
@@ -684,7 +684,7 @@ def PQpipelineStatus(pgconn: PGconn_struct) -> int:
 def PQenterPipelineMode(pgconn: PGconn_struct) -> int:
     if not _PQenterPipelineMode:
         raise NotSupportedError(
-            f"PQenterPipelineMode requires libpq from PostgreSQL 14,"
+            "PQenterPipelineMode requires libpq from PostgreSQL 14,"
             f" {libpq_version} available instead"
         )
     return _PQenterPipelineMode(pgconn)
@@ -693,7 +693,7 @@ def PQenterPipelineMode(pgconn: PGconn_struct) -> int:
 def PQexitPipelineMode(pgconn: PGconn_struct) -> int:
     if not _PQexitPipelineMode:
         raise NotSupportedError(
-            f"PQexitPipelineMode requires libpq from PostgreSQL 14,"
+            "PQexitPipelineMode requires libpq from PostgreSQL 14,"
             f" {libpq_version} available instead"
         )
     return _PQexitPipelineMode(pgconn)
@@ -702,7 +702,7 @@ def PQexitPipelineMode(pgconn: PGconn_struct) -> int:
 def PQpipelineSync(pgconn: PGconn_struct) -> int:
     if not _PQpipelineSync:
         raise NotSupportedError(
-            f"PQpipelineSync requires libpq from PostgreSQL 14,"
+            "PQpipelineSync requires libpq from PostgreSQL 14,"
             f" {libpq_version} available instead"
         )
     return _PQpipelineSync(pgconn)
@@ -711,7 +711,7 @@ def PQpipelineSync(pgconn: PGconn_struct) -> int:
 def PQsendFlushRequest(pgconn: PGconn_struct) -> int:
     if not _PQsendFlushRequest:
         raise NotSupportedError(
-            f"PQsendFlushRequest requires libpq from PostgreSQL 14,"
+            "PQsendFlushRequest requires libpq from PostgreSQL 14,"
             f" {libpq_version} available instead"
         )
     return _PQsendFlushRequest(pgconn)
index b10f066c564ffc6e49d3d7c0cb263fa495b28cad..2ac01c953b76a3a5387275a323c355867ef727e9 100644 (file)
@@ -196,7 +196,7 @@ class ServerCursor(Cursor[Row]):
         if not self.closed:
             warn(
                 f"the server-side cursor {self} was deleted while still open."
-                f" Please use 'with' or '.close()' to close the cursor properly",
+                " Please use 'with' or '.close()' to close the cursor properly",
                 ResourceWarning,
             )
 
@@ -333,7 +333,7 @@ class AsyncServerCursor(AsyncCursor[Row]):
         if not self.closed:
             warn(
                 f"the server-side cursor {self} was deleted while still open."
-                f" Please use 'with' or '.close()' to close the cursor properly",
+                " Please use 'with' or '.close()' to close the cursor properly",
                 ResourceWarning,
             )
 
index acfeac0bf4fe8faf825ce3410c03b87b5efcf2c3..76bd9539c22580fb3ee7e790dff4d870c563263f 100644 (file)
@@ -160,7 +160,7 @@ class Composed(Composable):
             joiner = SQL(joiner)
         elif not isinstance(joiner, SQL):
             raise TypeError(
-                f"Composed.join() argument must be strings or SQL,"
+                "Composed.join() argument must be strings or SQL,"
                 f" got {joiner!r} instead"
             )
 
index 5f22a993f768d22e9dbadab00b94c611e4a7dbf1..5846edcfa8c20f5c5adea04292e19420d943de74 100644 (file)
@@ -264,7 +264,7 @@ class MultirangeLoader(BaseMultirangeLoader[T]):
     def load(self, data: Buffer) -> Multirange[T]:
         if not data or data[0] != _START_INT:
             raise e.DataError(
-                f"malformed multirange starting with"
+                "malformed multirange starting with"
                 f" {bytes(data[:1]).decode('utf8', 'replace')}"
             )
 
index fc48314743e043dfdc101b47763364b6e5f68747..6085eeaf675f0d3e042f42e503ec7d249c176ad3 100644 (file)
@@ -41,8 +41,9 @@ def pytest_addoption(parser):
     parser.addoption(
         "--no-collect-ok",
         action="store_true",
-        help="If no test is collected, exit with 0 instead of 5"
-        " (useful with --lfnf=none).",
+        help=(
+            "If no test collected, exit with 0 instead of 5 (useful with --lfnf=none)."
+        ),
     )
 
     parser.addoption(
index b5c932ad94c4b3e83e9f770d39dbaf29515268bd..1702a4ad248eda3a2421fd2d49dbaccf0af17601 100644 (file)
@@ -14,8 +14,10 @@ def pytest_addoption(parser):
         "--test-dsn",
         metavar="DSN",
         default=os.environ.get("PSYCOPG_TEST_DSN"),
-        help="Connection string to run database tests requiring a connection"
-        " [you can also use the PSYCOPG_TEST_DSN env var].",
+        help=(
+            "Connection string to run database tests requiring a connection"
+            " [you can also use the PSYCOPG_TEST_DSN env var]."
+        ),
     )
     parser.addoption(
         "--pq-tracefile",
index 7befd5ebd8b4e7f0dd483e4f3efd81e868acdc22..59e184d37e305039e3bb52af28476d0bd5d2f879 100644 (file)
@@ -153,8 +153,7 @@ class Faker:
                         if len(r) > 200:
                             r = f"{r[:200]}... ({len(r)} chars)"
                         raise Exception(
-                            f"value {r!r} at record {i} column0 {j}"
-                            f" failed insert: {e}"
+                            f"value {r!r} at record {i} column0 {j} failed insert: {e}"
                         ) from None
 
             # just in case, but hopefully we should have triggered the problem
@@ -179,8 +178,7 @@ class Faker:
                         if len(r) > 200:
                             r = f"{r[:200]}... ({len(r)} chars)"
                         raise Exception(
-                            f"value {r!r} at record {i} column0 {j}"
-                            f" failed insert: {e}"
+                            f"value {r!r} at record {i} column0 {j} failed insert: {e}"
                         ) from None
 
             # just in case, but hopefully we should have triggered the problem
index 7db6248c69da9ecfd8fd86008baa104b722a23f3..59259c9b15925a781e4c91f4c41fcae2533f8c38 100644 (file)
@@ -145,7 +145,7 @@ def test_escape_string_badenc(pgconn):
         esc.escape_string(data)
 
 
-@pytest.mark.parametrize("data", [(b"hello\00world"), (b"\00\00\00\00")])
+@pytest.mark.parametrize("data", [b"hello\00world", b"\00\00\00\00"])
 def test_escape_bytea(pgconn, data):
     exp = rb"\x" + b"".join(b"%02x" % c for c in data)
     esc = pq.Escaping(pgconn)
@@ -174,7 +174,7 @@ def test_escape_1char(pgconn):
         assert rv == exp
 
 
-@pytest.mark.parametrize("data", [(b"hello\00world"), (b"\00\00\00\00")])
+@pytest.mark.parametrize("data", [b"hello\00world", b"\00\00\00\00"])
 def test_unescape_bytea(pgconn, data):
     enc = rb"\x" + b"".join(b"%02x" % c for c in data)
     esc = pq.Escaping(pgconn)
index d269a6d69ca5203de8f74745d9c68fa575347ccf..5a7fa2d18b51909454cecb1de6fc9bc7f62c5c4e 100644 (file)
@@ -25,15 +25,13 @@ samples_ok = [
     ),
     (
         "host=_pg._tcp.bar.com",
-        (
-            "host=db1.example.com,db4.example.com,db3.example.com,db2.example.com"
-            " port=5432,5432,5433,5432"
-        ),
+        "host=db1.example.com,db4.example.com,db3.example.com,db2.example.com"
+        " port=5432,5432,5433,5432",
         None,
     ),
     (
         "host=service.foo.com port=srv",
-        ("host=service.example.com port=15432"),
+        "host=service.example.com port=15432",
         None,
     ),
     # No resolution
index cfb4b0495236d5abc93b7d6aa97bc58119711168..8cacddd46e5561ef527f67ce8aacaaefc1230498 100644 (file)
@@ -249,8 +249,7 @@ def test_change_type_savepoint(conn):
                     conn.execute("CREATE TYPE prepenum AS ENUM ('foo', 'bar', 'baz')")
                     conn.execute("CREATE TABLE preptable(id integer, bar prepenum[])")
                     conn.cursor().execute(
-                        "INSERT INTO preptable (bar) "
-                        "VALUES (%(enum_col)s::prepenum[])",
+                        "INSERT INTO preptable (bar) VALUES (%(enum_col)s::prepenum[])",
                         {"enum_col": ["foo"]},
                     )
                     raise ZeroDivisionError()
index 75980067676aaff82af5cff86b08b30daebc0365..9b77fa8527d46b773031b5119ca63c22b2d4b067 100644 (file)
@@ -374,7 +374,7 @@ def test_itersize(conn, commands):
         cmds = commands.popall()
         assert len(cmds) == 2
         for cmd in cmds:
-            assert ("fetch forward 2") in cmd.lower()
+            assert "fetch forward 2" in cmd.lower()
 
 
 def test_cant_scroll_by_default(conn):
index 14e98b5cf3a6981db12942195167d1f149447ad3..f169cad5ae771b1c783f22ef6e230f0a705146e8 100644 (file)
@@ -385,7 +385,7 @@ async def test_itersize(aconn, acommands):
         cmds = acommands.popall()
         assert len(cmds) == 2
         for cmd in cmds:
-            assert ("fetch forward 2") in cmd.lower()
+            assert "fetch forward 2" in cmd.lower()
 
 
 async def test_cant_scroll_by_default(aconn):