]> git.ipfire.org Git - thirdparty/psycopg.git/commitdiff
Retry some flaky tests
authorDaniele Varrazzo <daniele.varrazzo@gmail.com>
Sat, 17 Jul 2021 01:52:51 +0000 (03:52 +0200)
committerDaniele Varrazzo <daniele.varrazzo@gmail.com>
Sun, 18 Jul 2021 01:34:21 +0000 (03:34 +0200)
tests/pool/test_pool.py
tests/pool/test_pool_async.py
tests/test_concurrency.py
tests/test_concurrency_async.py

index 10509483810e5076364638975de55926f1fbb12a..e8727a343ac7273de13b06fbaa6cfbe72aa36855 100644 (file)
@@ -929,7 +929,7 @@ def test_stats_measures(dsn):
 
 @pytest.mark.slow
 @pytest.mark.timing
-def test_stats_usage(dsn):
+def test_stats_usage(dsn, retries):
     def worker(n):
         try:
             with p.connection(timeout=0.3) as conn:
@@ -937,29 +937,31 @@ def test_stats_usage(dsn):
         except pool.PoolTimeout:
             pass
 
-    with pool.ConnectionPool(dsn, min_size=3) as p:
-        p.wait(2.0)
+    for retry in retries:
+        with retry:
+            with pool.ConnectionPool(dsn, min_size=3) as p:
+                p.wait(2.0)
 
-        ts = [Thread(target=worker, args=(i,)) for i in range(7)]
-        [t.start() for t in ts]
-        [t.join() for t in ts]
-        stats = p.get_stats()
-        assert stats["requests_num"] == 7
-        assert stats["requests_queued"] == 4
-        assert 850 <= stats["requests_wait_ms"] <= 950
-        assert stats["requests_errors"] == 1
-        assert 1150 <= stats["usage_ms"] <= 1250
-        assert stats.get("returns_bad", 0) == 0
+                ts = [Thread(target=worker, args=(i,)) for i in range(7)]
+                [t.start() for t in ts]
+                [t.join() for t in ts]
+                stats = p.get_stats()
+                assert stats["requests_num"] == 7
+                assert stats["requests_queued"] == 4
+                assert 850 <= stats["requests_wait_ms"] <= 950
+                assert stats["requests_errors"] == 1
+                assert 1150 <= stats["usage_ms"] <= 1250
+                assert stats.get("returns_bad", 0) == 0
 
-        with p.connection() as conn:
-            conn.close()
-        p.wait()
-        stats = p.pop_stats()
-        assert stats["requests_num"] == 8
-        assert stats["returns_bad"] == 1
-        with p.connection():
-            pass
-        assert p.get_stats()["requests_num"] == 1
+                with p.connection() as conn:
+                    conn.close()
+                p.wait()
+                stats = p.pop_stats()
+                assert stats["requests_num"] == 8
+                assert stats["returns_bad"] == 1
+                with p.connection():
+                    pass
+                assert p.get_stats()["requests_num"] == 1
 
 
 @pytest.mark.slow
index bbf859208e561c9d0480c65a418539a126630f59..37a6733d7d38cbbabc4529584bf96ec2ed51c3a9 100644 (file)
@@ -940,7 +940,7 @@ async def test_stats_measures(dsn):
 
 @pytest.mark.slow
 @pytest.mark.timing
-async def test_stats_usage(dsn):
+async def test_stats_usage(dsn, retries):
     async def worker(n):
         try:
             async with p.connection(timeout=0.3) as conn:
@@ -948,28 +948,30 @@ async def test_stats_usage(dsn):
         except pool.PoolTimeout:
             pass
 
-    async with pool.AsyncConnectionPool(dsn, min_size=3) as p:
-        await p.wait(2.0)
+    async for retry in retries:
+        with retry:
+            async with pool.AsyncConnectionPool(dsn, min_size=3) as p:
+                await p.wait(2.0)
 
-        ts = [create_task(worker(i)) for i in range(7)]
-        await asyncio.gather(*ts)
-        stats = p.get_stats()
-        assert stats["requests_num"] == 7
-        assert stats["requests_queued"] == 4
-        assert 850 <= stats["requests_wait_ms"] <= 950
-        assert stats["requests_errors"] == 1
-        assert 1150 <= stats["usage_ms"] <= 1250
-        assert stats.get("returns_bad", 0) == 0
+                ts = [create_task(worker(i)) for i in range(7)]
+                await asyncio.gather(*ts)
+                stats = p.get_stats()
+                assert stats["requests_num"] == 7
+                assert stats["requests_queued"] == 4
+                assert 850 <= stats["requests_wait_ms"] <= 950
+                assert stats["requests_errors"] == 1
+                assert 1150 <= stats["usage_ms"] <= 1250
+                assert stats.get("returns_bad", 0) == 0
 
-        async with p.connection() as conn:
-            await conn.close()
-        await p.wait()
-        stats = p.pop_stats()
-        assert stats["requests_num"] == 8
-        assert stats["returns_bad"] == 1
-        async with p.connection():
-            pass
-        assert p.get_stats()["requests_num"] == 1
+                async with p.connection() as conn:
+                    await conn.close()
+                await p.wait()
+                stats = p.pop_stats()
+                assert stats["requests_num"] == 8
+                assert stats["returns_bad"] == 1
+                async with p.connection():
+                    pass
+                assert p.get_stats()["requests_num"] == 1
 
 
 @pytest.mark.slow
index b25bf4b48cdde31b270bfaa145b2c3fc3f23c46e..f09cb50068e59f79f900421488598dda3be27ba2 100644 (file)
@@ -15,7 +15,7 @@ import psycopg
 
 
 @pytest.mark.slow
-def test_concurrent_execution(dsn):
+def test_concurrent_execution(dsn, retries):
     def worker():
         cnn = psycopg.connect(dsn)
         cur = cnn.cursor()
@@ -23,14 +23,16 @@ def test_concurrent_execution(dsn):
         cur.close()
         cnn.close()
 
-    t1 = threading.Thread(target=worker)
-    t2 = threading.Thread(target=worker)
-    t0 = time.time()
-    t1.start()
-    t2.start()
-    t1.join()
-    t2.join()
-    assert time.time() - t0 < 0.8, "something broken in concurrency"
+    for retry in retries:
+        with retry:
+            t1 = threading.Thread(target=worker)
+            t2 = threading.Thread(target=worker)
+            t0 = time.time()
+            t1.start()
+            t2.start()
+            t1.join()
+            t2.join()
+            assert time.time() - t0 < 0.8, "something broken in concurrency"
 
 
 @pytest.mark.slow
index 190b6478e57896764477f9166c2e2c114d504ded..3b509f17d77eaafbe74c5272702923a194227897 100644 (file)
@@ -42,7 +42,7 @@ async def test_commit_concurrency(aconn):
 
 
 @pytest.mark.slow
-async def test_concurrent_execution(dsn):
+async def test_concurrent_execution(dsn, retries):
     async def worker():
         cnn = await psycopg.AsyncConnection.connect(dsn)
         cur = cnn.cursor()
@@ -50,10 +50,12 @@ async def test_concurrent_execution(dsn):
         await cur.close()
         await cnn.close()
 
-    workers = [worker(), worker()]
-    t0 = time.time()
-    await asyncio.gather(*workers)
-    assert time.time() - t0 < 0.8, "something broken in concurrency"
+    async for retry in retries:
+        with retry:
+            workers = [worker(), worker()]
+            t0 = time.time()
+            await asyncio.gather(*workers)
+            assert time.time() - t0 < 0.8, "something broken in concurrency"
 
 
 @pytest.mark.slow