]> git.ipfire.org Git - thirdparty/psycopg.git/commitdiff
refactor: while 1 -> while True
authorDaniele Varrazzo <daniele.varrazzo@gmail.com>
Sun, 15 May 2022 15:54:14 +0000 (17:54 +0200)
committerDaniele Varrazzo <daniele.varrazzo@gmail.com>
Tue, 17 May 2022 19:28:29 +0000 (21:28 +0200)
Brain legacy of ancient Python versions where the latter required a
global lookup.

20 files changed:
psycopg/psycopg/connection.py
psycopg/psycopg/connection_async.py
psycopg/psycopg/cursor.py
psycopg/psycopg/cursor_async.py
psycopg/psycopg/generators.py
psycopg/psycopg/types/array.py
psycopg/psycopg/waiting.py
psycopg_c/psycopg_c/_psycopg/generators.pyx
psycopg_c/psycopg_c/pq/conninfo.pyx
tests/fix_faker.py
tests/pq/test_async.py
tests/pq/test_pgconn.py
tests/test_client_cursor.py
tests/test_client_cursor_async.py
tests/test_copy.py
tests/test_copy_async.py
tests/test_cursor.py
tests/test_cursor_async.py
tests/test_server_cursor.py
tests/test_server_cursor_async.py

index e8d06de72e732a9d393d5e45c457b1d6e11a9a3a..bf0d71affc58bdf919d4e9f3b2bcb2f2a6cdeeb3 100644 (file)
@@ -898,7 +898,7 @@ class Connection(BaseConnection[Row]):
         """
         Yield `Notify` objects as soon as they are received from the database.
         """
-        while 1:
+        while True:
             with self.lock:
                 ns = self.wait(notifies(self.pgconn))
             enc = pgconn_encoding(self.pgconn)
index 3411dc6de7cbaead37b0d480b173dfbbaa1e83e4..5741597db287797a4efc0dc51c844cfa1e696791 100644 (file)
@@ -296,7 +296,7 @@ class AsyncConnection(BaseConnection[Row]):
                 yield tx
 
     async def notifies(self) -> AsyncGenerator[Notify, None]:
-        while 1:
+        while True:
             async with self.lock:
                 ns = await self.wait(notifies(self.pgconn))
             enc = pgconn_encoding(self.pgconn)
index 557556037bfd29d652091af37ae45066a65b8e65..3b4d01325a99979271588cb7f891d67713be2ed4 100644 (file)
@@ -837,7 +837,7 @@ class Cursor(BaseCursor["Connection[Any]", Row]):
         def load(pos: int) -> Optional[Row]:
             return self._tx.load_row(pos, self._make_row)
 
-        while 1:
+        while True:
             row = load(self._pos)
             if row is None:
                 break
index 142845abcf60de4df26414f72b9bae98fb4026bd..7f7f9443be56166b0ef7038382c2633e2d5d8f59 100644 (file)
@@ -179,7 +179,7 @@ class AsyncCursor(BaseCursor["AsyncConnection[Any]", Row]):
         def load(pos: int) -> Optional[Row]:
             return self._tx.load_row(pos, self._make_row)
 
-        while 1:
+        while True:
             row = load(self._pos)
             if row is None:
                 break
index 86b5a0bba4225fcf4dd7477f2bd39a569b4f8053..6c27e5cb82342ef64f6f37e2c28f3106f90a1f27 100644 (file)
@@ -36,7 +36,7 @@ def connect(conninfo: str) -> PQGenConn[PGconn]:
 
     """
     conn = pq.PGconn.connect_start(conninfo.encode())
-    while 1:
+    while True:
         if conn.status == ConnStatus.BAD:
             encoding = conninfo_encoding(conninfo)
             raise e.OperationalError(
@@ -91,7 +91,7 @@ def send(pgconn: PGconn) -> PQGen[None]:
     After this generator has finished you may want to cycle using `fetch()`
     to retrieve the results available.
     """
-    while 1:
+    while True:
         f = pgconn.flush()
         if f == 0:
             break
@@ -114,7 +114,7 @@ def fetch_many(pgconn: PGconn) -> PQGen[List[PGresult]]:
     or error).
     """
     results: List[PGresult] = []
-    while 1:
+    while True:
         res = yield from fetch(pgconn)
         if not res:
             break
@@ -219,7 +219,7 @@ def notifies(pgconn: PGconn) -> PQGen[List[pq.PGnotify]]:
     pgconn.consume_input()
 
     ns = []
-    while 1:
+    while True:
         n = pgconn.notifies()
         if n:
             ns.append(n)
@@ -230,7 +230,7 @@ def notifies(pgconn: PGconn) -> PQGen[List[pq.PGnotify]]:
 
 
 def copy_from(pgconn: PGconn) -> PQGen[Union[memoryview, PGresult]]:
-    while 1:
+    while True:
         nbytes, data = pgconn.get_copy_data(1)
         if nbytes != 0:
             break
@@ -273,7 +273,7 @@ def copy_end(pgconn: PGconn, error: Optional[bytes]) -> PQGen[PGresult]:
         yield Wait.W
 
     # Repeat until it the message is flushed to the server
-    while 1:
+    while True:
         yield Wait.W
         f = pgconn.flush()
         if f == 0:
index d8d7f8756dc869c2108b313a9f58564069edfa9a..ef9429b1575ed0fada12c2251326bb3a8164f10e 100644 (file)
@@ -381,7 +381,7 @@ class ArrayBinaryLoader(BaseArrayLoader):
         dims = [_unpack_dim(data, i)[0] for i in list(range(12, p, 8))]
 
         def consume(p: int) -> Iterator[Any]:
-            while 1:
+            while True:
                 size = unpack_len(data, p)[0]
                 p += 4
                 if size != -1:
index 807de6343ed9d42fb0156ff1904c3ae107c967ba..25e828fc487de9e8cfdcb8c9aa753e49ee1d91bb 100644 (file)
@@ -50,7 +50,7 @@ def wait_selector(gen: PQGen[RV], fileno: int, timeout: Optional[float] = None)
     try:
         s = next(gen)
         with DefaultSelector() as sel:
-            while 1:
+            while True:
                 sel.register(fileno, s)
                 rlist = None
                 while not rlist:
@@ -85,7 +85,7 @@ def wait_conn(gen: PQGenConn[RV], timeout: Optional[float] = None) -> RV:
         if not timeout:
             timeout = None
         with DefaultSelector() as sel:
-            while 1:
+            while True:
                 sel.register(fileno, s)
                 rlist = sel.select(timeout=timeout)
                 sel.unregister(fileno)
@@ -124,7 +124,7 @@ async def wait_async(gen: PQGen[RV], fileno: int) -> RV:
 
     try:
         s = next(gen)
-        while 1:
+        while True:
             reader = s & Wait.R
             writer = s & Wait.W
             if not reader and not writer:
@@ -178,7 +178,7 @@ async def wait_conn_async(gen: PQGenConn[RV], timeout: Optional[float] = None) -
         fileno, s = next(gen)
         if not timeout:
             timeout = None
-        while 1:
+        while True:
             reader = s & Wait.R
             writer = s & Wait.W
             if not reader and not writer:
@@ -226,7 +226,7 @@ def wait_epoll(gen: PQGen[RV], fileno: int, timeout: Optional[float] = None) ->
         with select.epoll() as epoll:
             evmask = poll_evmasks[s]
             epoll.register(fileno, evmask)
-            while 1:
+            while True:
                 fileevs = None
                 while not fileevs:
                     fileevs = epoll.poll(timeout)
index 4afb53508be69554b5c35a0e810b95bf7b3abbd3..85c3a0b00b700142d9407fe9357583dc243559ea 100644 (file)
@@ -31,7 +31,7 @@ def connect(conninfo: str) -> PQGenConn[abc.PGconn]:
     cdef int conn_status = libpq.PQstatus(pgconn_ptr)
     cdef int poll_status
 
-    while 1:
+    while True:
         if conn_status == libpq.CONNECTION_BAD:
             encoding = conninfo_encoding(conninfo)
             raise e.OperationalError(
@@ -92,7 +92,7 @@ def send(pq.PGconn pgconn) -> PQGen[None]:
     cdef int status
     cdef int cires
 
-    while 1:
+    while True:
         if libpq.PQflush(pgconn_ptr) == 0:
             break
 
@@ -122,7 +122,7 @@ def fetch_many(pq.PGconn pgconn) -> PQGen[List[PGresult]]:
     cdef pq.PGresult result
     cdef libpq.PGresult *pgres
 
-    while 1:
+    while True:
         result = yield from fetch(pgconn)
         if result is None:
             break
@@ -161,7 +161,7 @@ def fetch(pq.PGconn pgconn) -> PQGen[Optional[PGresult]]:
 
     if libpq.PQisBusy(pgconn_ptr):
         yield WAIT_R
-        while 1:
+        while True:
             with nogil:
                 cires = libpq.PQconsumeInput(pgconn_ptr)
                 if cires == 1:
@@ -176,7 +176,7 @@ def fetch(pq.PGconn pgconn) -> PQGen[Optional[PGresult]]:
 
     # Consume notifies
     if notify_handler is not None:
-        while 1:
+        while True:
             pynotify = pgconn.notifies()
             if pynotify is None:
                 break
@@ -184,7 +184,7 @@ def fetch(pq.PGconn pgconn) -> PQGen[Optional[PGresult]]:
                 notify_handler, <PyObject *>pynotify, NULL
             )
     else:
-        while 1:
+        while True:
             notify = libpq.PQnotifies(pgconn_ptr)
             if notify is NULL:
                 break
index b0d2cf5ad84ce06428b065baf9966ae478af2e5f..3443de1baef91a0fbfce92d4baa9ec1173fc9fb0 100644 (file)
@@ -41,7 +41,7 @@ cdef _options_from_array(libpq.PQconninfoOption *opts):
     rv = []
     cdef int i = 0
     cdef libpq.PQconninfoOption* opt
-    while 1:
+    while True:
         opt = opts + i
         if opt.keyword is NULL:
             break
index 86c3c0ee809b13ea1f346b922e092c2e3de9b145..d1603219c65061beb31b500c0f05f77ad8323d30 100644 (file)
@@ -488,7 +488,7 @@ class Faker:
         # don't make empty lists because they regularly fail cast
         length = randrange(1, self.list_max_length)
         spec = spec[1]
-        while 1:
+        while True:
             rv = [self.make(spec) for i in range(length)]
 
             # TODO multirange lists fail binary dump if the last element is
index 0d0e4aa7285578161b60b21a83ebd00ff4e4bdfc..fee58f0755c812733c39d7f60eea02fd182cfe2c 100644 (file)
@@ -20,7 +20,7 @@ def test_send_query(pgconn):
 
     # send loop
     waited_on_send = 0
-    while 1:
+    while True:
         f = pgconn.flush()
         if f == 0:
             break
@@ -40,7 +40,7 @@ def test_send_query(pgconn):
 
     # read loop
     results = []
-    while 1:
+    while True:
         pgconn.consume_input()
         if pgconn.is_busy():
             select([pgconn.socket], [], [])
index dfd0a6a807733e97df393370120109effd83e783..4545ed59ad6e1149d0e2e5d9186ca179611758f5 100644 (file)
@@ -32,7 +32,7 @@ def test_connectdb_badtype(baddsn):
 def test_connect_async(dsn):
     conn = pq.PGconn.connect_start(dsn.encode())
     conn.nonblocking = 1
-    while 1:
+    while True:
         assert conn.status != pq.ConnStatus.BAD
         rv = conn.connect_poll()
         if rv == pq.PollingStatus.OK:
@@ -56,7 +56,7 @@ def test_connect_async_bad(dsn):
     parsed_dsn[b"dbname"] = b"psycopg_test_not_for_real"
     dsn = b" ".join(b"%s='%s'" % item for item in parsed_dsn.items())
     conn = pq.PGconn.connect_start(dsn)
-    while 1:
+    while True:
         assert conn.status != pq.ConnStatus.BAD, conn.error_message
         rv = conn.connect_poll()
         if rv == pq.PollingStatus.FAILED:
@@ -149,7 +149,7 @@ def test_reset_async(pgconn):
     pgconn.exec_(b"select pg_terminate_backend(pg_backend_pid())")
     assert pgconn.status == pq.ConnStatus.BAD
     pgconn.reset_start()
-    while 1:
+    while True:
         rv = pgconn.reset_poll()
         if rv == pq.PollingStatus.READING:
             select([pgconn.socket], [], [])
index 6c94b5eefed4ed5ca4a42b7486b482b19c148bfd..dca40221d9929ccf7a5b5412463485aba24c9aa5 100644 (file)
@@ -764,12 +764,12 @@ def test_leak(dsn, faker, fetch, row_factory):
                 cur.execute(faker.select_stmt)
 
                 if fetch == "one":
-                    while 1:
+                    while True:
                         tmp = cur.fetchone()
                         if tmp is None:
                             break
                 elif fetch == "many":
-                    while 1:
+                    while True:
                         tmp = cur.fetchmany(3)
                         if not tmp:
                             break
index 80f14e6154943759daa37484b38c828ca8ff541c..a4730886f7dd4efa68fcc00471c03f89881cc4dc 100644 (file)
@@ -637,12 +637,12 @@ async def test_leak(dsn, faker, fetch, row_factory):
                 await cur.execute(faker.select_stmt)
 
                 if fetch == "one":
-                    while 1:
+                    while True:
                         tmp = await cur.fetchone()
                         if tmp is None:
                             break
                 elif fetch == "many":
-                    while 1:
+                    while True:
                         tmp = await cur.fetchmany(3)
                         if not tmp:
                             break
index 15187f10227b974568fd3d77f5b28e87480502a7..bda6f1109a2cca2b2cee75e5961c03287c729c36 100644 (file)
@@ -152,7 +152,7 @@ def test_copy_out_allchars(conn, format):
     )
     with cur.copy(query) as copy:
         copy.set_types(["text"])
-        while 1:
+        while True:
             row = copy.read_row()
             if not row:
                 break
@@ -167,7 +167,7 @@ def test_read_row_notypes(conn, format):
     cur = conn.cursor()
     with cur.copy(f"copy ({sample_values}) to stdout (format {format.name})") as copy:
         rows = []
-        while 1:
+        while True:
             row = copy.read_row()
             if not row:
                 break
@@ -648,14 +648,14 @@ def test_copy_to_leaks(dsn, faker, fmt, set_types, method):
                         copy.set_types(faker.types_names)
 
                     if method == "read":
-                        while 1:
+                        while True:
                             tmp = copy.read()
                             if not tmp:
                                 break
                     elif method == "iter":
                         list(copy)
                     elif method == "row":
-                        while 1:
+                        while True:
                             tmp = copy.read_row()  # type: ignore[assignment]
                             if tmp is None:
                                 break
@@ -807,7 +807,7 @@ class DataGenerator:
 
     def sha(self, f):
         m = hashlib.sha256()
-        while 1:
+        while True:
             block = f.read()
             if not block:
                 break
index c5df84f03ff09b854a716dffcd72caeb07649d02..b759b055401a14eb1947798e1266acecb9044c50 100644 (file)
@@ -134,7 +134,7 @@ async def test_copy_out_allchars(aconn, format):
     )
     async with cur.copy(query) as copy:
         copy.set_types(["text"])
-        while 1:
+        while True:
             row = await copy.read_row()
             if not row:
                 break
@@ -151,7 +151,7 @@ async def test_read_row_notypes(aconn, format):
         f"copy ({sample_values}) to stdout (format {format.name})"
     ) as copy:
         rows = []
-        while 1:
+        while True:
             row = await copy.read_row()
             if not row:
                 break
@@ -648,14 +648,14 @@ async def test_copy_to_leaks(dsn, faker, fmt, set_types, method):
                         copy.set_types(faker.types_names)
 
                     if method == "read":
-                        while 1:
+                        while True:
                             tmp = await copy.read()
                             if not tmp:
                                 break
                     elif method == "iter":
                         await alist(copy)
                     elif method == "row":
-                        while 1:
+                        while True:
                             tmp = await copy.read_row()  # type: ignore[assignment]
                             if tmp is None:
                                 break
@@ -796,7 +796,7 @@ class DataGenerator:
 
     def sha(self, f):
         m = hashlib.sha256()
-        while 1:
+        while True:
             block = f.read()
             if not block:
                 break
index 3c7e4e4437ea11025d32d67f8d1a9fc564a94b55..534f6066e0e1e441d0e41ae53ca8b0e5ade31716 100644 (file)
@@ -814,12 +814,12 @@ def test_leak(dsn, faker, fmt, fmt_out, fetch, row_factory):
                 cur.execute(faker.select_stmt)
 
                 if fetch == "one":
-                    while 1:
+                    while True:
                         tmp = cur.fetchone()
                         if tmp is None:
                             break
                 elif fetch == "many":
-                    while 1:
+                    while True:
                         tmp = cur.fetchmany(3)
                         if not tmp:
                             break
index 39d8e8327d03cd42d1d2d13a89dfd0ea3827d6f9..3af0e5775ba22804d955d3c051e0471e1725e7a5 100644 (file)
@@ -684,12 +684,12 @@ async def test_leak(dsn, faker, fmt, fmt_out, fetch, row_factory):
                 await cur.execute(faker.select_stmt)
 
                 if fetch == "one":
-                    while 1:
+                    while True:
                         tmp = await cur.fetchone()
                         if tmp is None:
                             break
                 elif fetch == "many":
-                    while 1:
+                    while True:
                         tmp = await cur.fetchmany(3)
                         if not tmp:
                             break
index e1a9da446a89e37761a74dd7e5eee1efebe07b90..fd47b684d35a5bbd188c02aedcdb457067f955a9 100644 (file)
@@ -339,7 +339,7 @@ def test_row_factory(conn):
     cur.execute("select generate_series(1, 3) as x")
     recs = cur.fetchall()
     cur.scroll(0, "absolute")
-    while 1:
+    while True:
         rec = cur.fetchone()
         if not rec:
             break
index 8ccf68651cbd9dd8fca5efee0f6bb24afc4cfba6..396e48149d75d4f8d4571173ddc390e991a19292 100644 (file)
@@ -349,7 +349,7 @@ async def test_row_factory(aconn):
     await cur.execute("select generate_series(1, 3) as x")
     recs = await cur.fetchall()
     await cur.scroll(0, "absolute")
-    while 1:
+    while True:
         rec = await cur.fetchone()
         if not rec:
             break