]> git.ipfire.org Git - thirdparty/curl.git/commitdiff
CURLOPT_PIPEWAIT: allow waited reuse also for subsequent connections
authorStefan Eissing <stefan@eissing.org>
Thu, 9 Feb 2023 15:07:34 +0000 (16:07 +0100)
committerDaniel Stenberg <daniel@haxx.se>
Tue, 21 Feb 2023 10:12:48 +0000 (11:12 +0100)
As tested in test_02_07, when firing off 200 urls with --parallel, 199
wait for the first connection to be established. if that is multiuse,
urls are added up to its capacity.

The first url over capacity opens another connection. But subsequent
urls found the same situation and open a connection too. They should
have waited for the second connection to actually connect and make its
capacity known.

This change fixes that by

- setting `connkeep()` early in the HTTP setup handler. as otherwise
  a new connection is marked as closeit by default and not considered
  for multiuse at all
- checking the "connected" status for a candidate always and continuing
  to PIPEWAIT if no alternative is found.

pytest:
- removed "skip" from test_02_07
- added test_02_07b to check that http/1.1 continues to work as before

Closes #10456

lib/http.c
lib/url.c
tests/tests-httpd/test_02_download.py

index cb585e72fdc3b5cf368de9a9fad9e3c061e5e77c..ee31aa2c6ea30232ddc97bea88e13cafbe048496 100644 (file)
@@ -233,6 +233,7 @@ static CURLcode http_setup_conn(struct Curl_easy *data,
 
   Curl_mime_initpart(&http->form);
   data->req.p.http = http;
+  connkeep(conn, "HTTP default");
 
   if((data->state.httpwant == CURL_HTTP_VERSION_3)
      || (data->state.httpwant == CURL_HTTP_VERSION_3ONLY)) {
index 1bb93df913dc4b003cca039adbcfbdfde25c569e..1dc2a4b8fdc0b852eb7a7e0ba1ae92101bcc16b1 100644 (file)
--- a/lib/url.c
+++ b/lib/url.c
@@ -1170,14 +1170,14 @@ ConnectionExists(struct Curl_easy *data,
             continue;
           }
         }
+      }
 
-        if(!Curl_conn_is_connected(check, FIRSTSOCKET)) {
-          foundPendingCandidate = TRUE;
-          /* Don't pick a connection that hasn't connected yet */
-          infof(data, "Connection #%ld isn't open enough, can't reuse",
-                check->connection_id);
-          continue;
-        }
+      if(!Curl_conn_is_connected(check, FIRSTSOCKET)) {
+        foundPendingCandidate = TRUE;
+        /* Don't pick a connection that hasn't connected yet */
+        infof(data, "Connection #%ld isn't open enough, can't reuse",
+              check->connection_id);
+        continue;
       }
 
 #ifdef USE_UNIX_SOCKETS
index 60ceeae3890b9fb8c01140531853d751e24f2a61..22cb260657cfde9de15dfcdcd88b973a3b22da41 100644 (file)
@@ -143,23 +143,39 @@ class TestDownload:
             # http2 parallel transfers will use one connection (common limit is 100)
             assert r.total_connects == 1
 
-    # download 500 files parallel (max of 200), only h2
-    @pytest.mark.skip(reason="TODO: we get 101 connections created. PIPEWAIT needs a fix")
+    # download files parallel, check connection reuse/multiplex
     @pytest.mark.parametrize("proto", ['h2', 'h3'])
-    def test_02_07_download_500_parallel(self, env: Env,
-                                         httpd, nghttpx, repeat, proto):
+    def test_02_07_download_reuse(self, env: Env,
+                                  httpd, nghttpx, repeat, proto):
         if proto == 'h3' and not env.have_h3():
             pytest.skip("h3 not supported")
+        count=200
         curl = CurlClient(env=env)
-        urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-499]'
+        urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
         r = curl.http_download(urls=[urln], alpn_proto=proto,
-                               with_stats=False, extra_args=[
+                               with_stats=True, extra_args=[
             '--parallel', '--parallel-max', '200'
         ])
         assert r.exit_code == 0, f'{r}'
-        r.check_stats(count=500, exp_status=200)
-        # http2 should now use 2 connections, at most 5
-        assert r.total_connects <= 5, "h2 should use fewer connections here"
+        r.check_stats(count=count, exp_status=200)
+        # should have used 2 connections only (test servers allow 100 req/conn)
+        assert r.total_connects == 2, "h2 should use fewer connections here"
+
+    # download files parallel with http/1.1, check connection not reused
+    @pytest.mark.parametrize("proto", ['http/1.1'])
+    def test_02_07b_download_reuse(self, env: Env,
+                                   httpd, nghttpx, repeat, proto):
+        count=20
+        curl = CurlClient(env=env)
+        urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
+        r = curl.http_download(urls=[urln], alpn_proto=proto,
+                               with_stats=True, extra_args=[
+            '--parallel', '--parallel-max', '200'
+        ])
+        assert r.exit_code == 0, f'{r}'
+        r.check_stats(count=count, exp_status=200)
+        # http/1.1 should have used count connections
+        assert r.total_connects == count, "http/1.1 should use this many connections"
 
     @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
     def test_02_08_1MB_serial(self, env: Env,