]> git.ipfire.org Git - thirdparty/psycopg.git/commitdiff
Don't pass "utf8" to str.encode() and bytes.decode()
authorDaniele Varrazzo <daniele.varrazzo@gmail.com>
Sun, 12 Sep 2021 22:25:57 +0000 (00:25 +0200)
committerDaniele Varrazzo <daniele.varrazzo@gmail.com>
Sun, 12 Sep 2021 22:25:57 +0000 (00:25 +0200)
"utf8" is the default: not passing it is marginally faster.

38 files changed:
.gitignore
psycopg/psycopg/_preparing.py
psycopg/psycopg/_tz.py
psycopg/psycopg/connection.py
psycopg/psycopg/conninfo.py
psycopg/psycopg/cursor.py
psycopg/psycopg/encodings.py
psycopg/psycopg/generators.py
psycopg/psycopg/pq/misc.py
psycopg/psycopg/types/array.py
psycopg/psycopg/types/datetime.py
psycopg/psycopg/types/json.py
psycopg/psycopg/types/net.py
psycopg/psycopg/types/numeric.py
psycopg/psycopg/types/uuid.py
psycopg_c/psycopg_c/_psycopg/generators.pyx
psycopg_c/psycopg_c/types/datetime.pyx
psycopg_c/psycopg_c/types/string.pyx
psycopg_c/setup.py
tests/adapters_example.py
tests/fix_db.py
tests/pq/test_escaping.py
tests/pq/test_pgconn.py
tests/test_adapt.py
tests/test_connection.py
tests/test_connection_async.py
tests/test_conninfo.py
tests/test_copy.py
tests/test_copy_async.py
tests/test_cursor.py
tests/test_cursor_async.py
tests/test_encodings.py
tests/test_prepared.py
tests/test_prepared_async.py
tests/test_sql.py
tests/types/test_array.py
tests/types/test_hstore.py
tests/types/test_string.py

index c32de56d15c6be419b2f367742f3f5896784deb6..15807e28ad9b00c140fe91af6e75f80a9dccfa43 100644 (file)
@@ -1,8 +1,8 @@
 *.egg-info/
 .tox
 /.eggs/
-/build
-/dist
+build/
+dist/
 *.pstats
 .mypy_cache
 __pycache__/
index 13c07901ac39428fca2083d2adff438d298c36e8..32e2c1d90255280dbf8de128c416326ff170bfe3 100644 (file)
@@ -60,7 +60,7 @@ class PrepareManager:
 
         if value >= self.prepare_threshold or prepare:
             # The query has been executed enough times and needs to be prepared
-            name = f"_pg3_{self._prepared_idx}".encode("utf-8")
+            name = f"_pg3_{self._prepared_idx}".encode()
             self._prepared_idx += 1
             return Prepare.SHOULD, name
         else:
index a51047c8290c85d774f800dc8e99be6feeecb421..337188eb571b9bf4238167d2835f2cbe46617ece 100644 (file)
@@ -25,7 +25,7 @@ def get_tzinfo(pgconn: Optional[PGconn]) -> tzinfo:
     try:
         return _timezones[tzname]
     except KeyError:
-        sname = tzname.decode("utf8") if tzname else "UTC"
+        sname = tzname.decode() if tzname else "UTC"
         try:
             zi: tzinfo = ZoneInfo(sname)
         except KeyError:
index 5999f6ac06e6c22fccfa52a3a85b6f73bafcaf68..b834bcc4aefc98107e8024da8394dd3466d58428 100644 (file)
@@ -454,7 +454,7 @@ class BaseConnection(Generic[Row]):
             else:
                 raise e.InterfaceError(
                     f"unexpected result {ExecStatus(result.status).name}"
-                    f" from command {command.decode('utf8')!r}"
+                    f" from command {command.decode()!r}"
                 )
         return result
 
@@ -477,7 +477,7 @@ class BaseConnection(Generic[Row]):
         if self.isolation_level is not None:
             val = IsolationLevel(self.isolation_level)
             parts.append(b"ISOLATION LEVEL")
-            parts.append(val.name.replace("_", " ").encode("utf8"))
+            parts.append(val.name.replace("_", " ").encode())
 
         if self.read_only is not None:
             parts.append(b"READ ONLY" if self.read_only else b"READ WRITE")
index 87c61120e067b0ae33c68ff6c35cf8b75cb122da..1007fca542810da961f31a59468e2c6b0f3b5867 100644 (file)
@@ -75,7 +75,7 @@ def conninfo_to_dict(conninfo: str = "", **kwargs: Any) -> Dict[str, Any]:
     """
     opts = _parse_conninfo(conninfo)
     rv = {
-        opt.keyword.decode("utf8"): opt.val.decode("utf8")
+        opt.keyword.decode(): opt.val.decode()
         for opt in opts
         if opt.val is not None
     }
@@ -94,7 +94,7 @@ def _parse_conninfo(conninfo: str) -> List[pq.ConninfoOption]:
     Return the result of pq.Conninfo.parse() on success.
     """
     try:
-        return pq.Conninfo.parse(conninfo.encode("utf8"))
+        return pq.Conninfo.parse(conninfo.encode())
     except e.OperationalError as ex:
         raise e.ProgrammingError(str(ex))
 
@@ -181,7 +181,7 @@ class ConnectionInfo:
         }
         # Not returned by the libq. Bug? Bet we're using SSH.
         defaults.setdefault(b"channel_binding", b"prefer")
-        defaults[b"passfile"] = str(Path.home() / ".pgpass").encode("utf-8")
+        defaults[b"passfile"] = str(Path.home() / ".pgpass").encode()
 
         return {
             i.keyword.decode(pyenc): i.val.decode(pyenc)
index 1dceedd05f1375f6231cd8385dd2ef0eee545db4..477ed84c6c234b4da16a0817329767c31e083218 100644 (file)
@@ -167,10 +167,7 @@ class BaseCursor(Generic[ConnectionType, Row]):
         `!None` if the cursor doesn't have a result available.
         """
         msg = self.pgresult.command_status if self.pgresult else None
-        if msg:
-            return msg.decode("utf-8")
-        else:
-            return None
+        return msg.decode() if msg else None
 
     def _make_row_maker(self) -> RowMaker[Row]:
         raise NotImplementedError
index 126826a0296bab9fb9fbfa99ce04f99cf0e942eb..80a725f19ed75a4a9149f7e95edadb772219e442 100644 (file)
@@ -58,9 +58,9 @@ _py_codecs = {
 
 py_codecs: Dict[Union[bytes, str], str] = {}
 py_codecs.update((k, v) for k, v in _py_codecs.items())
-py_codecs.update((k.encode("utf-8"), v) for k, v in _py_codecs.items())
+py_codecs.update((k.encode(), v) for k, v in _py_codecs.items())
 
-pg_codecs = {v: k.encode("utf-8") for k, v in _py_codecs.items()}
+pg_codecs = {v: k.encode() for k, v in _py_codecs.items()}
 
 
 def py2pg(name: str) -> bytes:
index 86c13c2887471e07047aa149772e66cce5ce4184..770e7f350ea1a0f4e119c9a9d56cf156487fe4b2 100644 (file)
@@ -34,7 +34,7 @@ def connect(conninfo: str) -> PQGenConn[PGconn]:
     Generator to create a database connection without blocking.
 
     """
-    conn = pq.PGconn.connect_start(conninfo.encode("utf8"))
+    conn = pq.PGconn.connect_start(conninfo.encode())
     while 1:
         if conn.status == ConnStatus.BAD:
             raise e.OperationalError(
index b5f2c63de7b7359faa3723662812ae41398e0cad..c6d8a7ccd39349bbc519423c3f977ca8b3c6a948 100644 (file)
@@ -96,12 +96,12 @@ def connection_summary(pgconn: PGconn) -> str:
 
         status = TransactionStatus(pgconn.transaction_status).name
         if not pgconn.host.startswith(b"/"):
-            parts.append(("host", pgconn.host.decode("utf-8")))
+            parts.append(("host", pgconn.host.decode()))
         if pgconn.port != b"5432":
-            parts.append(("port", pgconn.port.decode("utf-8")))
+            parts.append(("port", pgconn.port.decode()))
         if pgconn.user != pgconn.db:
-            parts.append(("user", pgconn.user.decode("utf-8")))
-        parts.append(("database", pgconn.db.decode("utf-8")))
+            parts.append(("user", pgconn.user.decode()))
+        parts.append(("database", pgconn.db.decode()))
     else:
         status = ConnStatus(pgconn.status).name
 
index cbc403b19e9fad972ea249a653de9ed901c2676c..61eafcf0b711608c05739089f21a203904e7e846 100644 (file)
@@ -128,7 +128,7 @@ class ListDumper(BaseListDumper):
         if sd.oid != INVALID_OID:
             info = self._get_base_type_info(sd.oid)
             dumper.oid = info.array_oid or TEXT_ARRAY_OID
-            dumper.delimiter = info.delimiter.encode("utf-8")
+            dumper.delimiter = info.delimiter.encode()
         else:
             dumper.oid = INVALID_OID
 
@@ -431,7 +431,7 @@ def register_array(
     name = f"{info.name.title()}{base.__name__}"
     attribs = {
         "base_oid": info.oid,
-        "delimiter": info.delimiter.encode("utf-8"),
+        "delimiter": info.delimiter.encode(),
     }
     loader = type(name, (base,), attribs)
     adapters.register_loader(info.array_oid, loader)
@@ -447,7 +447,7 @@ def register_array(
     attribs = {
         "oid": info.array_oid,
         "element_oid": info.oid,
-        "delimiter": info.delimiter.encode("utf-8"),
+        "delimiter": info.delimiter.encode(),
     }
     dumper = type(name, (base,), attribs)
     adapters.register_dumper(None, dumper)
index c900cb37986ea2df1d4fc274cebc435993bf647e..bde2a542547e30913cb4716549ded43f2b5b12ae 100644 (file)
@@ -47,7 +47,7 @@ class DateDumper(Dumper):
     def dump(self, obj: date) -> bytes:
         # NOTE: whatever the PostgreSQL DateStyle input format (DMY, MDY, YMD)
         # the YYYY-MM-DD is always understood correctly.
-        return str(obj).encode("utf8")
+        return str(obj).encode()
 
 
 class DateBinaryDumper(Dumper):
@@ -75,7 +75,7 @@ class _BaseTimeDumper(Dumper):
 
 class _BaseTimeTextDumper(_BaseTimeDumper):
     def dump(self, obj: time) -> bytes:
-        return str(obj).encode("utf8")
+        return str(obj).encode()
 
 
 class TimeDumper(_BaseTimeTextDumper):
@@ -143,7 +143,7 @@ class _BaseDatetimeTextDumper(_BaseDatetimeDumper):
     def dump(self, obj: datetime) -> bytes:
         # NOTE: whatever the PostgreSQL DateStyle input format (DMY, MDY, YMD)
         # the YYYY-MM-DD is always understood correctly.
-        return str(obj).encode("utf8")
+        return str(obj).encode()
 
 
 class DatetimeDumper(_BaseDatetimeTextDumper):
@@ -208,7 +208,7 @@ class TimedeltaDumper(Dumper):
                 setattr(self, "dump", self._dump_sql)
 
     def dump(self, obj: timedelta) -> bytes:
-        return str(obj).encode("utf8")
+        return str(obj).encode()
 
     def _dump_sql(self, obj: timedelta) -> bytes:
         # sql_standard format needs explicit signs
index cd99bec4ae83f6f38b0be732dd5a69f8712b9a7d..440c24d2b487dd1efbd0921978082e4073bcd04f 100644 (file)
@@ -122,7 +122,7 @@ class _JsonDumper(Dumper):
 
     def dump(self, obj: _JsonWrapper) -> bytes:
         dumps = obj.dumps or self.dumps
-        return dumps(obj.obj).encode("utf-8")
+        return dumps(obj.obj).encode()
 
 
 class JsonDumper(_JsonDumper):
@@ -148,7 +148,7 @@ class JsonbBinaryDumper(_JsonDumper):
 
     def dump(self, obj: _JsonWrapper) -> bytes:
         dumps = obj.dumps or self.dumps
-        return b"\x01" + dumps(obj.obj).encode("utf-8")
+        return b"\x01" + dumps(obj.obj).encode()
 
 
 class _JsonLoader(Loader):
index 82ad42064539efe916aa81f936a22a3e54be0de0..0cb21c1ea6fb2a6aed9a4957793de770e1039203 100644 (file)
@@ -41,7 +41,7 @@ class InterfaceDumper(Dumper):
     oid = postgres.types["inet"].oid
 
     def dump(self, obj: Interface) -> bytes:
-        return str(obj).encode("utf8")
+        return str(obj).encode()
 
 
 class NetworkDumper(Dumper):
@@ -49,7 +49,7 @@ class NetworkDumper(Dumper):
     oid = postgres.types["cidr"].oid
 
     def dump(self, obj: Network) -> bytes:
-        return str(obj).encode("utf8")
+        return str(obj).encode()
 
 
 class AddressBinaryDumper(Dumper):
@@ -110,9 +110,9 @@ class InetLoader(_LazyIpaddress):
             data = bytes(data)
 
         if b"/" in data:
-            return ip_interface(data.decode("utf8"))
+            return ip_interface(data.decode())
         else:
-            return ip_address(data.decode("utf8"))
+            return ip_address(data.decode())
 
 
 class InetBinaryLoader(_LazyIpaddress):
@@ -142,7 +142,7 @@ class CidrLoader(_LazyIpaddress):
         if isinstance(data, memoryview):
             data = bytes(data)
 
-        return ip_network(data.decode("utf8"))
+        return ip_network(data.decode())
 
 
 class CidrBinaryLoader(_LazyIpaddress):
@@ -160,7 +160,7 @@ class CidrBinaryLoader(_LazyIpaddress):
         else:
             return IPv6Network((packed, prefix))
 
-        return ip_network(data.decode("utf8"))
+        return ip_network(data.decode())
 
 
 def register_default_adapters(context: AdaptContext) -> None:
index 1b0be0f2576a383b122c11c3c026a022a7526177..b0baf9db6a269f07c8d286f275676f4c2791cd99 100644 (file)
@@ -33,7 +33,7 @@ from .._wrappers import (
 
 class _NumberDumper(Dumper):
     def dump(self, obj: Any) -> bytes:
-        return str(obj).encode("utf8")
+        return str(obj).encode()
 
     def quote(self, obj: Any) -> bytes:
         value = self.dump(obj)
@@ -94,7 +94,7 @@ class DecimalDumper(_SpecialValuesDumper):
             # cover NaN and sNaN
             return b"NaN"
         else:
-            return str(obj).encode("utf8")
+            return str(obj).encode()
 
     _special = {
         b"Infinity": b"'Infinity'::numeric",
@@ -286,7 +286,7 @@ class NumericLoader(Loader):
     def load(self, data: Buffer) -> Decimal:
         if isinstance(data, memoryview):
             data = bytes(data)
-        return Decimal(data.decode("utf8"))
+        return Decimal(data.decode())
 
 
 DEC_DIGITS = 4  # decimal digits per Postgres "digit"
index cb88652040743e1f6cd31402665ed2a7c1cf3d41..a3b962d83f8e77c98c82f72a6fa6b1dfc34ff02d 100644 (file)
@@ -24,7 +24,7 @@ class UUIDDumper(Dumper):
     oid = postgres.types["uuid"].oid
 
     def dump(self, obj: "uuid.UUID") -> bytes:
-        return obj.hex.encode("utf8")
+        return obj.hex.encode()
 
 
 class UUIDBinaryDumper(UUIDDumper):
@@ -47,7 +47,7 @@ class UUIDLoader(Loader):
     def load(self, data: Buffer) -> "uuid.UUID":
         if isinstance(data, memoryview):
             data = bytes(data)
-        return UUID(data.decode("utf8"))
+        return UUID(data.decode())
 
 
 class UUIDBinaryLoader(UUIDLoader):
index 78a91fbc2167d5ddf6f725385377533310474e1a..c57ac9b444cc6baeb67586381e51f91b708aaa79 100644 (file)
@@ -24,7 +24,7 @@ def connect(conninfo: str) -> PQGenConn[abc.PGconn]:
     Generator to create a database connection without blocking.
 
     """
-    cdef pq.PGconn conn = pq.PGconn.connect_start(conninfo.encode("utf8"))
+    cdef pq.PGconn conn = pq.PGconn.connect_start(conninfo.encode())
     logger.debug("connection started, status %s", conn.status)
     cdef libpq.PGconn *pgconn_ptr = conn._pgconn_ptr
     cdef int conn_status = libpq.PQstatus(pgconn_ptr)
index 1aaa17803672243bb538b4132a4f725187c3d9d7..99cc7db72e9141356723ac95c21c9978d048961e 100644 (file)
@@ -1115,7 +1115,7 @@ cdef object _timezone_from_connection(pq.PGconn pgconn, __cache={}):
     if ptr != NULL:
         return <object>ptr
 
-    sname = tzname.decode("utf8") if tzname else "UTC"
+    sname = tzname.decode() if tzname else "UTC"
     try:
         zi = ZoneInfo(sname)
     except KeyError:
index 239b804d8d0fcfda308cec66f4f4aaa9c7abf548..c4558504d169489f0381ee223b55e2eed90217ec 100644 (file)
@@ -43,7 +43,7 @@ cdef class _BaseStrDumper(CDumper):
                 self._bytes_encoding = b"utf-8"
                 self.is_utf8 = 1
             else:
-                self._bytes_encoding = pg2py(pgenc).encode("utf-8")
+                self._bytes_encoding = pg2py(pgenc).encode()
                 if self._bytes_encoding == b"ascii":
                     self.is_utf8 = 1
             self.encoding = PyBytes_AsString(self._bytes_encoding)
@@ -124,7 +124,7 @@ cdef class _TextLoader(CLoader):
                 self._bytes_encoding = b"utf-8"
                 self.is_utf8 = 1
             else:
-                self._bytes_encoding = pg2py(pgenc).encode("utf-8")
+                self._bytes_encoding = pg2py(pgenc).encode()
 
             if pgenc == b"SQL_ASCII":
                 self.encoding = NULL
index 71ea0af47c7356200d80c586a11beea19e3a067f..5eb5ee1729cb6633a8cc55f85ff9b510baaf0cf2 100644 (file)
@@ -36,7 +36,7 @@ def get_config(what: str) -> str:
         log.error(f"couldn't run {pg_config!r} --{what}: %s", e)
         raise
     else:
-        return out.stdout.strip().decode("utf8")
+        return out.stdout.strip().decode()
 
 
 class psycopg_build_ext(build_ext):
index 322a09c7e33ffaa3473818f31d560fe995687aa6..8930b1a689c134ddfd54b8a78e570adfa0c98e9a 100644 (file)
@@ -21,7 +21,7 @@ class MyStrDumper:
         self._cls = cls
 
     def dump(self, obj: str) -> bytes:
-        return (obj * 2).encode("utf-8")
+        return (obj * 2).encode()
 
     def quote(self, obj: str) -> bytes:
         value = self.dump(obj)
@@ -42,7 +42,7 @@ class MyTextLoader:
         pass
 
     def load(self, data: Buffer) -> str:
-        return (bytes(data) * 2).decode("utf-8")
+        return (bytes(data) * 2).decode()
 
 
 # This should be the definition of psycopg.adapt.DumperKey, but mypy doesn't
index 3954a3ce5f88127e3004646d74c3cf697e83baab..3cba37b5905c758fa47a7c4fc22d890c4ecd86d1 100644 (file)
@@ -37,7 +37,7 @@ def pgconn(dsn, request):
     """Return a PGconn connection open to `--test-dsn`."""
     from psycopg import pq
 
-    conn = pq.PGconn.connect(dsn.encode("utf8"))
+    conn = pq.PGconn.connect(dsn.encode())
     if conn.status != pq.ConnStatus.OK:
         pytest.fail(
             f"bad connection: {conn.error_message.decode('utf8', 'replace')}"
index fde461872fcc13819b6df261f8d6806ff7ea3ca2..6743dc80fad13f4e1d6f3a759402bc756544ebc6 100644 (file)
@@ -145,7 +145,7 @@ def test_escape_string_badconn(pgconn):
 def test_escape_string_badenc(pgconn):
     res = pgconn.exec_(b"set client_encoding to 'UTF8'")
     assert res.status == pq.ExecStatus.COMMAND_OK
-    data = "\u20ac".encode("utf8")[:-1]
+    data = "\u20ac".encode()[:-1]
     esc = pq.Escaping(pgconn)
     with pytest.raises(psycopg.OperationalError):
         esc.escape_string(data)
index c45ed81298ade33e278cfb619d140755fcf3549d..cb00e10d14a236ae2885ca2f11aa3e4f2798213b 100644 (file)
@@ -14,7 +14,7 @@ from ..utils import gc_collect
 
 
 def test_connectdb(dsn):
-    conn = pq.PGconn.connect(dsn.encode("utf8"))
+    conn = pq.PGconn.connect(dsn.encode())
     assert conn.status == pq.ConnStatus.OK, conn.error_message
 
 
@@ -30,7 +30,7 @@ def test_connectdb_badtype(baddsn):
 
 
 def test_connect_async(dsn):
-    conn = pq.PGconn.connect_start(dsn.encode("utf8"))
+    conn = pq.PGconn.connect_start(dsn.encode())
     conn.nonblocking = 1
     while 1:
         assert conn.status != pq.ConnStatus.BAD
@@ -53,9 +53,7 @@ def test_connect_async(dsn):
 
 def test_connect_async_bad(dsn):
     parsed_dsn = {
-        e.keyword: e.val
-        for e in pq.Conninfo.parse(dsn.encode("utf8"))
-        if e.val
+        e.keyword: e.val for e in pq.Conninfo.parse(dsn.encode()) if e.val
     }
     parsed_dsn[b"dbname"] = b"psycopg_test_not_for_real"
     dsn = b" ".join(b"%s='%s'" % item for item in parsed_dsn.items())
@@ -84,7 +82,7 @@ def test_finish(pgconn):
 
 
 def test_weakref(dsn):
-    conn = pq.PGconn.connect(dsn.encode("utf8"))
+    conn = pq.PGconn.connect(dsn.encode())
     w = weakref.ref(conn)
     conn.finish()
     del conn
@@ -114,7 +112,7 @@ def test_info(dsn, pgconn):
     assert dbname.dispchar == b""
     assert dbname.dispsize == 20
 
-    parsed = pq.Conninfo.parse(dsn.encode("utf8"))
+    parsed = pq.Conninfo.parse(dsn.encode())
     name = [o.val for o in parsed if o.keyword == b"dbname"][0]
     user = [o.val for o in parsed if o.keyword == b"user"][0]
     assert dbname.val == (name or user)
@@ -164,7 +162,7 @@ def test_reset_async(pgconn):
 
 
 def test_ping(dsn):
-    rv = pq.PGconn.ping(dsn.encode("utf8"))
+    rv = pq.PGconn.ping(dsn.encode())
     assert rv == pq.Ping.OK
 
     rv = pq.PGconn.ping(b"port=9999")
@@ -248,7 +246,7 @@ def test_transaction_status(pgconn):
 
 def test_parameter_status(dsn, monkeypatch):
     monkeypatch.setenv("PGAPPNAME", "psycopg tests")
-    pgconn = pq.PGconn.connect(dsn.encode("utf8"))
+    pgconn = pq.PGconn.connect(dsn.encode())
     assert pgconn.parameter_status(b"application_name") == b"psycopg tests"
     assert pgconn.parameter_status(b"wat") is None
     pgconn.finish()
@@ -291,9 +289,7 @@ def test_server_version(pgconn):
 def test_socket(pgconn):
     socket = pgconn.socket
     assert socket > 0
-    pgconn.exec_(
-        f"select pg_terminate_backend({pgconn.backend_pid})".encode("utf8")
-    )
+    pgconn.exec_(f"select pg_terminate_backend({pgconn.backend_pid})".encode())
     # TODO: on my box it raises OperationalError as it should. Not on Travis,
     # so let's see if at least an ok value comes out of it.
     try:
@@ -333,7 +329,7 @@ def test_used_password(pgconn, dsn, monkeypatch):
     # Note that the server may still need a password passed via pgpass
     # so it may be that has_password is false but still a password was
     # requested by the server and passed by libpq.
-    info = pq.Conninfo.parse(dsn.encode("utf8"))
+    info = pq.Conninfo.parse(dsn.encode())
     has_password = (
         "PGPASSWORD" in os.environ
         or [i for i in info if i.keyword == b"password"][0].val is not None
@@ -515,6 +511,6 @@ def test_str(pgconn, dsn):
     pgconn.finish()
     assert "[BAD]" in str(pgconn)
 
-    pgconn2 = pq.PGconn.connect_start(dsn.encode("utf8"))
+    pgconn2 = pq.PGconn.connect_start(dsn.encode())
     assert "[" in str(pgconn2)
     assert "[IDLE]" not in str(pgconn2)
index 9480147b60f6b4b913a9389b7fe080487acfc17e..41a86c75e49166207e5cb61fe236aa5e8f5a3802 100644 (file)
@@ -128,7 +128,7 @@ def test_subclass_dumper(conn):
 
     class MyStrDumper(StrDumper):
         def dump(self, obj):
-            return (obj * 2).encode("utf-8")
+            return (obj * 2).encode()
 
     conn.adapters.register_dumper(str, MyStrDumper)
     assert conn.execute("select %t", ["hello"]).fetchone()[0] == "hellohello"
@@ -165,7 +165,7 @@ def test_subclass_loader(conn):
 
     class MyTextLoader(TextLoader):
         def load(self, data):
-            return (bytes(data) * 2).decode("utf-8")
+            return (bytes(data) * 2).decode()
 
     conn.adapters.register_loader("text", MyTextLoader)
     assert conn.execute("select 'hello'::text").fetchone()[0] == "hellohello"
index 23e8407fdf04825a309466e9f46ffd9ea75b9721..be6917650965cc6f0f01a74e2aa4ed62c11e5472 100644 (file)
@@ -325,9 +325,7 @@ def test_set_encoding(conn):
 )
 def test_normalize_encoding(conn, enc, out, codec):
     conn.client_encoding = enc
-    assert (
-        conn.pgconn.parameter_status(b"client_encoding").decode("utf-8") == out
-    )
+    assert conn.pgconn.parameter_status(b"client_encoding").decode() == out
     assert conn.client_encoding == codec
 
 
@@ -344,9 +342,7 @@ def test_normalize_encoding(conn, enc, out, codec):
 def test_encoding_env_var(dsn, monkeypatch, enc, out, codec):
     monkeypatch.setenv("PGCLIENTENCODING", enc)
     conn = psycopg.connect(dsn)
-    assert (
-        conn.pgconn.parameter_status(b"client_encoding").decode("utf-8") == out
-    )
+    assert conn.pgconn.parameter_status(b"client_encoding").decode() == out
     assert conn.client_encoding == codec
 
 
index 57ecfa6b81ced120dc166fd13e5c3efd6c6ee74c..7c71cb0b26688418383a95bb8312b5b549281170 100644 (file)
@@ -343,10 +343,7 @@ async def test_set_encoding(aconn):
 )
 async def test_normalize_encoding(aconn, enc, out, codec):
     await aconn.set_client_encoding(enc)
-    assert (
-        aconn.pgconn.parameter_status(b"client_encoding").decode("utf-8")
-        == out
-    )
+    assert aconn.pgconn.parameter_status(b"client_encoding").decode() == out
     assert aconn.client_encoding == codec
 
 
@@ -363,10 +360,7 @@ async def test_normalize_encoding(aconn, enc, out, codec):
 async def test_encoding_env_var(dsn, monkeypatch, enc, out, codec):
     monkeypatch.setenv("PGCLIENTENCODING", enc)
     aconn = await psycopg.AsyncConnection.connect(dsn)
-    assert (
-        aconn.pgconn.parameter_status(b"client_encoding").decode("utf-8")
-        == out
-    )
+    assert aconn.pgconn.parameter_status(b"client_encoding").decode() == out
     assert aconn.client_encoding == codec
 
 
index c36f75a7e1dea3b899cd092a5523c610afde33bc..8410aad2f322eb3431a30894d702ef18ee5a47a0 100644 (file)
@@ -105,7 +105,7 @@ class TestConnectionInfo:
             pytest.skip("hostaddr not supported on libpq < 12")
 
         info_val = getattr(conn.info, info_attr)
-        pgconn_val = getattr(conn.pgconn, pgconn_attr).decode("utf-8")
+        pgconn_val = getattr(conn.pgconn, pgconn_attr).decode()
         assert info_val == pgconn_val
 
         conn.close()
@@ -118,7 +118,7 @@ class TestConnectionInfo:
             conn.info.hostaddr
 
     def test_port(self, conn):
-        assert conn.info.port == int(conn.pgconn.port.decode("utf-8"))
+        assert conn.info.port == int(conn.pgconn.port.decode())
         conn.close()
         with pytest.raises(psycopg.OperationalError):
             conn.info.port
@@ -176,7 +176,7 @@ class TestConnectionInfo:
 
     def test_no_password(self, dsn):
         dsn2 = make_conninfo(dsn, password="the-pass-word")
-        pgconn = psycopg.pq.PGconn.connect_start(dsn2.encode("utf8"))
+        pgconn = psycopg.pq.PGconn.connect_start(dsn2.encode())
         info = ConnectionInfo(pgconn)
         assert info.password == "the-pass-word"
         assert "password" not in info.get_parameters()
@@ -184,7 +184,7 @@ class TestConnectionInfo:
 
     def test_dsn_no_password(self, dsn):
         dsn2 = make_conninfo(dsn, password="the-pass-word")
-        pgconn = psycopg.pq.PGconn.connect_start(dsn2.encode("utf8"))
+        pgconn = psycopg.pq.PGconn.connect_start(dsn2.encode())
         info = ConnectionInfo(pgconn)
         assert info.password == "the-pass-word"
         assert "password" not in info.dsn
index 500251d5e7b8eb40780d9abf62e76c1d01956182..c3f7545fe06aba6e0b847e6ecd6af63ba194f3ca 100644 (file)
@@ -249,7 +249,7 @@ def test_copy_in_str(conn):
     cur = conn.cursor()
     ensure_table(cur, sample_tabledef)
     with cur.copy("copy copy_in from stdin (format text)") as copy:
-        copy.write(sample_text.decode("utf8"))
+        copy.write(sample_text.decode())
 
     data = cur.execute("select * from copy_in order by 1").fetchall()
     assert data == sample_records
@@ -260,7 +260,7 @@ def test_copy_in_str_binary(conn):
     ensure_table(cur, sample_tabledef)
     with pytest.raises(e.QueryCanceled):
         with cur.copy("copy copy_in from stdin (format binary)") as copy:
-            copy.write(sample_text.decode("utf8"))
+            copy.write(sample_text.decode())
 
     assert conn.pgconn.transaction_status == conn.TransactionStatus.INERROR
 
@@ -428,7 +428,7 @@ def test_copy_from_to_bytes(conn):
     cur = conn.cursor()
     with cur.copy("copy copy_in from stdin") as copy:
         for block in gen.blocks():
-            copy.write(block.encode("utf8"))
+            copy.write(block.encode())
 
     gen.assert_data()
 
@@ -639,7 +639,7 @@ def py_to_raw(item, fmt):
         if isinstance(item, int):
             return bytes([0, 0, 0, item])
         elif isinstance(item, str):
-            return item.encode("utf8")
+            return item.encode()
     return item
 
 
@@ -696,6 +696,6 @@ class DataGenerator:
             if not block:
                 break
             if isinstance(block, str):
-                block = block.encode("utf8")
+                block = block.encode()
             m.update(block)
         return m.hexdigest()
index 40ac2cc04eeb3049623150998129f621fdd6e61d..34f9dd2d459d82ef88514694a8d37d2f6f6f4499 100644 (file)
@@ -229,7 +229,7 @@ async def test_copy_in_str(aconn):
     cur = aconn.cursor()
     await ensure_table(cur, sample_tabledef)
     async with cur.copy("copy copy_in from stdin (format text)") as copy:
-        await copy.write(sample_text.decode("utf8"))
+        await copy.write(sample_text.decode())
 
     await cur.execute("select * from copy_in order by 1")
     data = await cur.fetchall()
@@ -241,7 +241,7 @@ async def test_copy_in_str_binary(aconn):
     await ensure_table(cur, sample_tabledef)
     with pytest.raises(e.QueryCanceled):
         async with cur.copy("copy copy_in from stdin (format binary)") as copy:
-            await copy.write(sample_text.decode("utf8"))
+            await copy.write(sample_text.decode())
 
     assert aconn.pgconn.transaction_status == aconn.TransactionStatus.INERROR
 
@@ -418,7 +418,7 @@ async def test_copy_from_to_bytes(aconn):
     cur = aconn.cursor()
     async with cur.copy("copy copy_in from stdin") as copy:
         for block in gen.blocks():
-            await copy.write(block.encode("utf8"))
+            await copy.write(block.encode())
 
     await gen.assert_data()
 
@@ -682,6 +682,6 @@ class DataGenerator:
             if not block:
                 break
             if isinstance(block, str):
-                block = block.encode("utf8")
+                block = block.encode()
             m.update(block)
         return m.hexdigest()
index 4445d1898b6427a7b921d8134633f7bdbc49264d..3032dbaea6e7822508d855af6844c909b1291be2 100644 (file)
@@ -502,7 +502,7 @@ def test_stream_binary_cursor_text_override(conn):
     for rec in cur.stream("select generate_series(1, 2)", binary=False):
         recs.append(rec)
         assert cur.pgresult.fformat(0) == 0
-        assert cur.pgresult.get_value(0, 0) == str(rec[0]).encode("utf8")
+        assert cur.pgresult.get_value(0, 0) == str(rec[0]).encode()
 
     assert recs == [(1,), (2,)]
 
index c561e9fab45d3356b677c85e22b9825816e0762a..e069f8bd106c6f54be573a6c9c502a0a0c251bea 100644 (file)
@@ -498,7 +498,7 @@ async def test_stream_binary_cursor_text_override(aconn):
     async for rec in cur.stream("select generate_series(1, 2)", binary=False):
         recs.append(rec)
         assert cur.pgresult.fformat(0) == 0
-        assert cur.pgresult.get_value(0, 0) == str(rec[0]).encode("utf8")
+        assert cur.pgresult.get_value(0, 0) == str(rec[0]).encode()
 
     assert recs == [(1,), (2,)]
 
index 9572813508a5cafb914aadfeed45028360cbc80c..a4811ac40d6394aa6c91dda4a1e140997560edce 100644 (file)
@@ -22,7 +22,7 @@ def test_names_normalised():
     ],
 )
 def test_py2pg(pyenc, pgenc):
-    assert encodings.py2pg(pyenc) == pgenc.encode("utf8")
+    assert encodings.py2pg(pyenc) == pgenc.encode()
 
 
 @pytest.mark.parametrize(
@@ -34,10 +34,10 @@ def test_py2pg(pyenc, pgenc):
     ],
 )
 def test_pg2py(pyenc, pgenc):
-    assert encodings.pg2py(pgenc.encode("utf-8")) == pyenc
+    assert encodings.pg2py(pgenc.encode()) == pyenc
 
 
 @pytest.mark.parametrize("pgenc", ["MULE_INTERNAL", "EUC_TW"])
 def test_pg2py_missing(pgenc):
     with pytest.raises(psycopg.NotSupportedError):
-        encodings.pg2py(pgenc.encode("utf-8"))
+        encodings.pg2py(pgenc.encode())
index bfe56114847bd69427d0faae36353981f5e2bb0f..68bcf7c3247f8d9d788d04fa8d2e0420f6e802d6 100644 (file)
@@ -143,7 +143,7 @@ def test_evict_lru(conn):
     assert len(conn._prepared._prepared) == 5
     assert conn._prepared._prepared[b"select 'a'", ()] == b"_pg3_0"
     for i in [9, 8, 7, 6]:
-        assert conn._prepared._prepared[f"select {i}".encode("utf8"), ()] == 1
+        assert conn._prepared._prepared[f"select {i}".encode(), ()] == 1
 
     cur = conn.execute("select statement from pg_prepared_statements")
     assert cur.fetchall() == [("select 'a'",)]
@@ -158,9 +158,8 @@ def test_evict_lru_deallocate(conn):
 
     assert len(conn._prepared._prepared) == 5
     for i in [9, 8, 7, 6, "'a'"]:
-        assert conn._prepared._prepared[
-            f"select {i}".encode("utf8"), ()
-        ].startswith(b"_pg3_")
+        name = conn._prepared._prepared[f"select {i}".encode(), ()]
+        assert name.startswith(b"_pg3_")
 
     cur = conn.execute(
         "select statement from pg_prepared_statements order by prepare_time",
index c9c4cd48aa2d02d1387a00f2c00dc359ec77b9c9..dc139c1d9bd1df529cc4fb852c53f0e5d21d9670 100644 (file)
@@ -151,7 +151,7 @@ async def test_evict_lru(aconn):
     assert len(aconn._prepared._prepared) == 5
     assert aconn._prepared._prepared[b"select 'a'", ()] == b"_pg3_0"
     for i in [9, 8, 7, 6]:
-        assert aconn._prepared._prepared[f"select {i}".encode("utf8"), ()] == 1
+        assert aconn._prepared._prepared[f"select {i}".encode(), ()] == 1
 
     cur = await aconn.execute("select statement from pg_prepared_statements")
     assert await cur.fetchall() == [("select 'a'",)]
@@ -167,7 +167,7 @@ async def test_evict_lru_deallocate(aconn):
     assert len(aconn._prepared._prepared) == 5
     for i in [9, 8, 7, 6, "'a'"]:
         assert aconn._prepared._prepared[
-            f"select {i}".encode("utf8"), ()
+            f"select {i}".encode(), ()
         ].startswith(b"_pg3_")
 
     cur = await aconn.execute(
index 4382a0de8d49c82cb3edd51c7e285e1b8910d9c2..dcdd058c064747e5265a9e595419d8916ac3813c 100644 (file)
@@ -329,7 +329,7 @@ class TestLiteral:
         )
 
         conn.client_encoding = "utf8"
-        assert sql.Literal(eur).as_bytes(conn) == f"'{eur}'".encode("utf8")
+        assert sql.Literal(eur).as_bytes(conn) == f"'{eur}'".encode()
         conn.client_encoding = "latin9"
         assert sql.Literal(eur).as_bytes(conn) == f"'{eur}'".encode("latin9")
 
@@ -411,7 +411,7 @@ class TestSQL:
         assert sql.SQL("foo").as_bytes(conn) == b"foo"
 
         conn.client_encoding = "utf8"
-        assert sql.SQL(eur).as_bytes(conn) == eur.encode("utf8")
+        assert sql.SQL(eur).as_bytes(conn) == eur.encode()
 
         conn.client_encoding = "latin9"
         assert sql.SQL(eur).as_bytes(conn) == eur.encode("latin9")
@@ -485,7 +485,7 @@ class TestComposed:
         obj = sql.Composed([sql.SQL("foo"), sql.SQL(eur)])
 
         conn.client_encoding = "utf8"
-        assert obj.as_bytes(conn) == ("foo" + eur).encode("utf8")
+        assert obj.as_bytes(conn) == ("foo" + eur).encode()
 
         conn.client_encoding = "latin9"
         assert obj.as_bytes(conn) == ("foo" + eur).encode("latin9")
index d7f38291bacc047055e77ce2e338a3077566c1b0..194b0f21573f1f90cda6b2b716f904ec735d994c 100644 (file)
@@ -233,7 +233,7 @@ def test_dump_list_no_comma_separator(conn):
         oid = psycopg.postgres.types["box"].oid
 
         def dump(self, box):
-            return ("(%s,%s),(%s,%s)" % box.coords).encode("utf8")
+            return ("(%s,%s),(%s,%s)" % box.coords).encode()
 
     conn.adapters.register_dumper(Box, BoxDumper)
 
index 6ec75662cb672e8930000c56c2dd596dc1c4c474..5c951548f3cf5ddacbb278bbb83f696b292cecac 100644 (file)
@@ -24,7 +24,7 @@ from psycopg.types.hstore import HstoreLoader, register_hstore
 )
 def test_parse_ok(s, d):
     loader = HstoreLoader(dict, None)
-    assert loader.load(s.encode("utf8")) == d
+    assert loader.load(s.encode()) == d
 
 
 @pytest.mark.parametrize(
@@ -41,7 +41,7 @@ def test_parse_ok(s, d):
 def test_parse_bad(s):
     with pytest.raises(psycopg.DataError):
         loader = HstoreLoader(dict, None)
-        loader.load(s.encode("utf8"))
+        loader.load(s.encode())
 
 
 def test_register_conn(hstore, conn):
index bdedc6d18bad3cd31f8b2a0fff33e93a08d63b78..de32b9aa5e88586dd5aca88eea0e7f63ced86819 100644 (file)
@@ -192,7 +192,7 @@ def test_load_ascii(conn, typename, fmt_out):
 
     conn.client_encoding = "ascii"
     cur.execute(f"select chr(%s)::{typename}", (ord(eur),))
-    assert cur.fetchone()[0] == eur.encode("utf8")
+    assert cur.fetchone()[0] == eur.encode()
 
     stmt = sql.SQL("copy (select chr({})) to stdout (format {})").format(
         ord(eur), sql.SQL(fmt_out.name)
@@ -201,7 +201,7 @@ def test_load_ascii(conn, typename, fmt_out):
         copy.set_types([typename])
         (res,) = copy.read_row()
 
-    assert res == eur.encode("utf8")
+    assert res == eur.encode()
 
 
 @pytest.mark.parametrize("fmt_in", [Format.AUTO, Format.TEXT, Format.BINARY])
@@ -221,7 +221,7 @@ def test_text_array_ascii(conn, fmt_in, fmt_out):
     conn.client_encoding = "ascii"
     cur = conn.cursor(binary=fmt_out)
     a = list(map(chr, range(1, 256))) + [eur]
-    exp = [s.encode("utf8") for s in a]
+    exp = [s.encode() for s in a]
     (res,) = cur.execute(f"select %{fmt_in}::text[]", (a,)).fetchone()
     assert res == exp