"""
Yield `Notify` objects as soon as they are received from the database.
"""
- while 1:
+ while True:
with self.lock:
ns = self.wait(notifies(self.pgconn))
enc = pgconn_encoding(self.pgconn)
yield tx
async def notifies(self) -> AsyncGenerator[Notify, None]:
- while 1:
+ while True:
async with self.lock:
ns = await self.wait(notifies(self.pgconn))
enc = pgconn_encoding(self.pgconn)
def load(pos: int) -> Optional[Row]:
return self._tx.load_row(pos, self._make_row)
- while 1:
+ while True:
row = load(self._pos)
if row is None:
break
def load(pos: int) -> Optional[Row]:
return self._tx.load_row(pos, self._make_row)
- while 1:
+ while True:
row = load(self._pos)
if row is None:
break
"""
conn = pq.PGconn.connect_start(conninfo.encode())
- while 1:
+ while True:
if conn.status == ConnStatus.BAD:
encoding = conninfo_encoding(conninfo)
raise e.OperationalError(
After this generator has finished you may want to cycle using `fetch()`
to retrieve the results available.
"""
- while 1:
+ while True:
f = pgconn.flush()
if f == 0:
break
or error).
"""
results: List[PGresult] = []
- while 1:
+ while True:
res = yield from fetch(pgconn)
if not res:
break
pgconn.consume_input()
ns = []
- while 1:
+ while True:
n = pgconn.notifies()
if n:
ns.append(n)
def copy_from(pgconn: PGconn) -> PQGen[Union[memoryview, PGresult]]:
- while 1:
+ while True:
nbytes, data = pgconn.get_copy_data(1)
if nbytes != 0:
break
yield Wait.W
# Repeat until it the message is flushed to the server
- while 1:
+ while True:
yield Wait.W
f = pgconn.flush()
if f == 0:
dims = [_unpack_dim(data, i)[0] for i in list(range(12, p, 8))]
def consume(p: int) -> Iterator[Any]:
- while 1:
+ while True:
size = unpack_len(data, p)[0]
p += 4
if size != -1:
try:
s = next(gen)
with DefaultSelector() as sel:
- while 1:
+ while True:
sel.register(fileno, s)
rlist = None
while not rlist:
if not timeout:
timeout = None
with DefaultSelector() as sel:
- while 1:
+ while True:
sel.register(fileno, s)
rlist = sel.select(timeout=timeout)
sel.unregister(fileno)
try:
s = next(gen)
- while 1:
+ while True:
reader = s & Wait.R
writer = s & Wait.W
if not reader and not writer:
fileno, s = next(gen)
if not timeout:
timeout = None
- while 1:
+ while True:
reader = s & Wait.R
writer = s & Wait.W
if not reader and not writer:
with select.epoll() as epoll:
evmask = poll_evmasks[s]
epoll.register(fileno, evmask)
- while 1:
+ while True:
fileevs = None
while not fileevs:
fileevs = epoll.poll(timeout)
cdef int conn_status = libpq.PQstatus(pgconn_ptr)
cdef int poll_status
- while 1:
+ while True:
if conn_status == libpq.CONNECTION_BAD:
encoding = conninfo_encoding(conninfo)
raise e.OperationalError(
cdef int status
cdef int cires
- while 1:
+ while True:
if libpq.PQflush(pgconn_ptr) == 0:
break
cdef pq.PGresult result
cdef libpq.PGresult *pgres
- while 1:
+ while True:
result = yield from fetch(pgconn)
if result is None:
break
if libpq.PQisBusy(pgconn_ptr):
yield WAIT_R
- while 1:
+ while True:
with nogil:
cires = libpq.PQconsumeInput(pgconn_ptr)
if cires == 1:
# Consume notifies
if notify_handler is not None:
- while 1:
+ while True:
pynotify = pgconn.notifies()
if pynotify is None:
break
notify_handler, <PyObject *>pynotify, NULL
)
else:
- while 1:
+ while True:
notify = libpq.PQnotifies(pgconn_ptr)
if notify is NULL:
break
rv = []
cdef int i = 0
cdef libpq.PQconninfoOption* opt
- while 1:
+ while True:
opt = opts + i
if opt.keyword is NULL:
break
# don't make empty lists because they regularly fail cast
length = randrange(1, self.list_max_length)
spec = spec[1]
- while 1:
+ while True:
rv = [self.make(spec) for i in range(length)]
# TODO multirange lists fail binary dump if the last element is
# send loop
waited_on_send = 0
- while 1:
+ while True:
f = pgconn.flush()
if f == 0:
break
# read loop
results = []
- while 1:
+ while True:
pgconn.consume_input()
if pgconn.is_busy():
select([pgconn.socket], [], [])
def test_connect_async(dsn):
conn = pq.PGconn.connect_start(dsn.encode())
conn.nonblocking = 1
- while 1:
+ while True:
assert conn.status != pq.ConnStatus.BAD
rv = conn.connect_poll()
if rv == pq.PollingStatus.OK:
parsed_dsn[b"dbname"] = b"psycopg_test_not_for_real"
dsn = b" ".join(b"%s='%s'" % item for item in parsed_dsn.items())
conn = pq.PGconn.connect_start(dsn)
- while 1:
+ while True:
assert conn.status != pq.ConnStatus.BAD, conn.error_message
rv = conn.connect_poll()
if rv == pq.PollingStatus.FAILED:
pgconn.exec_(b"select pg_terminate_backend(pg_backend_pid())")
assert pgconn.status == pq.ConnStatus.BAD
pgconn.reset_start()
- while 1:
+ while True:
rv = pgconn.reset_poll()
if rv == pq.PollingStatus.READING:
select([pgconn.socket], [], [])
cur.execute(faker.select_stmt)
if fetch == "one":
- while 1:
+ while True:
tmp = cur.fetchone()
if tmp is None:
break
elif fetch == "many":
- while 1:
+ while True:
tmp = cur.fetchmany(3)
if not tmp:
break
await cur.execute(faker.select_stmt)
if fetch == "one":
- while 1:
+ while True:
tmp = await cur.fetchone()
if tmp is None:
break
elif fetch == "many":
- while 1:
+ while True:
tmp = await cur.fetchmany(3)
if not tmp:
break
)
with cur.copy(query) as copy:
copy.set_types(["text"])
- while 1:
+ while True:
row = copy.read_row()
if not row:
break
cur = conn.cursor()
with cur.copy(f"copy ({sample_values}) to stdout (format {format.name})") as copy:
rows = []
- while 1:
+ while True:
row = copy.read_row()
if not row:
break
copy.set_types(faker.types_names)
if method == "read":
- while 1:
+ while True:
tmp = copy.read()
if not tmp:
break
elif method == "iter":
list(copy)
elif method == "row":
- while 1:
+ while True:
tmp = copy.read_row() # type: ignore[assignment]
if tmp is None:
break
def sha(self, f):
m = hashlib.sha256()
- while 1:
+ while True:
block = f.read()
if not block:
break
)
async with cur.copy(query) as copy:
copy.set_types(["text"])
- while 1:
+ while True:
row = await copy.read_row()
if not row:
break
f"copy ({sample_values}) to stdout (format {format.name})"
) as copy:
rows = []
- while 1:
+ while True:
row = await copy.read_row()
if not row:
break
copy.set_types(faker.types_names)
if method == "read":
- while 1:
+ while True:
tmp = await copy.read()
if not tmp:
break
elif method == "iter":
await alist(copy)
elif method == "row":
- while 1:
+ while True:
tmp = await copy.read_row() # type: ignore[assignment]
if tmp is None:
break
def sha(self, f):
m = hashlib.sha256()
- while 1:
+ while True:
block = f.read()
if not block:
break
cur.execute(faker.select_stmt)
if fetch == "one":
- while 1:
+ while True:
tmp = cur.fetchone()
if tmp is None:
break
elif fetch == "many":
- while 1:
+ while True:
tmp = cur.fetchmany(3)
if not tmp:
break
await cur.execute(faker.select_stmt)
if fetch == "one":
- while 1:
+ while True:
tmp = await cur.fetchone()
if tmp is None:
break
elif fetch == "many":
- while 1:
+ while True:
tmp = await cur.fetchmany(3)
if not tmp:
break
cur.execute("select generate_series(1, 3) as x")
recs = cur.fetchall()
cur.scroll(0, "absolute")
- while 1:
+ while True:
rec = cur.fetchone()
if not rec:
break
await cur.execute("select generate_series(1, 3) as x")
recs = await cur.fetchall()
await cur.scroll(0, "absolute")
- while 1:
+ while True:
rec = await cur.fetchone()
if not rec:
break