]> git.ipfire.org Git - thirdparty/psycopg.git/commitdiff
Change pool stat requests_timeout to requests_errors, include queue full
authorDaniele Varrazzo <daniele.varrazzo@gmail.com>
Fri, 12 Mar 2021 14:40:49 +0000 (15:40 +0100)
committerDaniele Varrazzo <daniele.varrazzo@gmail.com>
Fri, 12 Mar 2021 14:50:10 +0000 (15:50 +0100)
docs/advanced/pool.rst
psycopg3/psycopg3/pool/async_pool.py
psycopg3/psycopg3/pool/base.py
psycopg3/psycopg3/pool/pool.py
tests/pool/test_pool.py
tests/pool/test_pool_async.py

index f790755626d88d2d07c21343a96b9fc59af8dcd0..9a907726c14cc4d425c30275ccdcaf80f8084e6d 100644 (file)
@@ -221,7 +221,8 @@ Metric                  Meaning
  ``requests_queued``    Number of requests queued because a connection wasn't
                         immediately available in the pool
  ``requests_wait_ms``   Total time in the queue for the clients waiting
- ``requests_timeouts``  Number of waiting clients whose request timed out
+ ``requests_errors``    Number of connection requests resulting in an error
+                        (timeouts, queue full...)
  ``returns_bad``        Number of connections returned to the pool in a bad
                         state
  ``connections_num``    Number of connection attempts made by the pool to the
index 5cb14c846fa9937a8ddba9a48972b7cc7d0a5f22..0a3e904d348df6aade5baf96dfaec264888effa8 100644 (file)
@@ -134,6 +134,7 @@ class AsyncConnectionPool(BasePool[AsyncConnection]):
                     self._nconns_min = len(self._pool)
             else:
                 if self.max_waiting and len(self._waiting) >= self.max_waiting:
+                    self._stats[self._REQUESTS_ERRORS] += 1
                     raise TooManyRequests(
                         f"the pool {self.name!r} has aleady"
                         f" {len(self._waiting)} requests waiting"
@@ -163,7 +164,7 @@ class AsyncConnectionPool(BasePool[AsyncConnection]):
             try:
                 conn = await pos.wait(timeout=timeout)
             except Exception:
-                self._stats[self._REQUESTS_TIMEOUTS] += 1
+                self._stats[self._REQUESTS_ERRORS] += 1
                 raise
             finally:
                 t1 = monotonic()
index 919ffce8b0ab594e0505c85b393959287dc000a2..664c07d1d2188f5ab6ea0e28ed1ec4697c774229 100644 (file)
@@ -33,7 +33,7 @@ class BasePool(Generic[ConnectionType]):
     _REQUESTS_NUM = "requests_num"
     _REQUESTS_QUEUED = "requests_queued"
     _REQUESTS_WAIT_MS = "requests_wait_ms"
-    _REQUESTS_TIMEOUTS = "requests_timeouts"
+    _REQUESTS_ERRORS = "requests_errors"
     _USAGE_MS = "usage_ms"
     _RETURNS_BAD = "returns_bad"
     _CONNECTIONS_NUM = "connections_num"
index f24f3b9f94a55fd19de4d36edfe86c190b11b57b..691b777fc1266a46c180f3d104a7861d021a7d5c 100644 (file)
@@ -177,6 +177,7 @@ class ConnectionPool(BasePool[Connection]):
                     self._nconns_min = len(self._pool)
             else:
                 if self.max_waiting and len(self._waiting) >= self.max_waiting:
+                    self._stats[self._REQUESTS_ERRORS] += 1
                     raise TooManyRequests(
                         f"the pool {self.name!r} has aleady"
                         f" {len(self._waiting)} requests waiting"
@@ -207,7 +208,7 @@ class ConnectionPool(BasePool[Connection]):
             try:
                 conn = pos.wait(timeout=timeout)
             except Exception:
-                self._stats[self._REQUESTS_TIMEOUTS] += 1
+                self._stats[self._REQUESTS_ERRORS] += 1
                 raise
             finally:
                 t1 = monotonic()
index 45162d43cc584b286a4be83bbd052a2e6f2dfcc9..93a59ad8296af1cffd26bdff0a415b6bb88ad32a 100644 (file)
@@ -323,6 +323,7 @@ def test_queue_size(dsn):
     assert isinstance(errors[0], pool.TooManyRequests)
     assert p.name in str(errors[0])
     assert str(p.max_waiting) in str(errors[0])
+    assert p.get_stats()["requests_errors"] == 1
 
 
 @pytest.mark.slow
@@ -919,7 +920,7 @@ def test_stats_usage(dsn):
         assert stats["requests_num"] == 7
         assert stats["requests_queued"] == 4
         assert 850 <= stats["requests_wait_ms"] <= 950
-        assert stats["requests_timeouts"] == 1
+        assert stats["requests_errors"] == 1
         assert 1150 <= stats["usage_ms"] <= 1250
         assert stats.get("returns_bad", 0) == 0
 
index 5c9ca80beb00af27081c18b0301fe6004c043f0d..d360f002fd8085b28b415296dd689d95ecde365b 100644 (file)
@@ -339,6 +339,7 @@ async def test_queue_size(dsn):
     assert isinstance(errors[0], pool.TooManyRequests)
     assert p.name in str(errors[0])
     assert str(p.max_waiting) in str(errors[0])
+    assert p.get_stats()["requests_errors"] == 1
 
 
 @pytest.mark.slow
@@ -916,7 +917,7 @@ async def test_stats_usage(dsn):
         assert stats["requests_num"] == 7
         assert stats["requests_queued"] == 4
         assert 850 <= stats["requests_wait_ms"] <= 950
-        assert stats["requests_timeouts"] == 1
+        assert stats["requests_errors"] == 1
         assert 1150 <= stats["usage_ms"] <= 1250
         assert stats.get("returns_bad", 0) == 0