From: Stefan Eissing Date: Thu, 9 Feb 2023 15:07:34 +0000 (+0100) Subject: CURLOPT_PIPEWAIT: allow waited reuse also for subsequent connections X-Git-Tag: curl-8_0_0~180 X-Git-Url: http://git.ipfire.org/?a=commitdiff_plain;h=821f6e2a89de8aec1c7da3c0f381b92b2b801efc;p=thirdparty%2Fcurl.git CURLOPT_PIPEWAIT: allow waited reuse also for subsequent connections As tested in test_02_07, when firing off 200 urls with --parallel, 199 wait for the first connection to be established. if that is multiuse, urls are added up to its capacity. The first url over capacity opens another connection. But subsequent urls found the same situation and open a connection too. They should have waited for the second connection to actually connect and make its capacity known. This change fixes that by - setting `connkeep()` early in the HTTP setup handler. as otherwise a new connection is marked as closeit by default and not considered for multiuse at all - checking the "connected" status for a candidate always and continuing to PIPEWAIT if no alternative is found. pytest: - removed "skip" from test_02_07 - added test_02_07b to check that http/1.1 continues to work as before Closes #10456 --- diff --git a/lib/http.c b/lib/http.c index cb585e72fd..ee31aa2c6e 100644 --- a/lib/http.c +++ b/lib/http.c @@ -233,6 +233,7 @@ static CURLcode http_setup_conn(struct Curl_easy *data, Curl_mime_initpart(&http->form); data->req.p.http = http; + connkeep(conn, "HTTP default"); if((data->state.httpwant == CURL_HTTP_VERSION_3) || (data->state.httpwant == CURL_HTTP_VERSION_3ONLY)) { diff --git a/lib/url.c b/lib/url.c index 1bb93df913..1dc2a4b8fd 100644 --- a/lib/url.c +++ b/lib/url.c @@ -1170,14 +1170,14 @@ ConnectionExists(struct Curl_easy *data, continue; } } + } - if(!Curl_conn_is_connected(check, FIRSTSOCKET)) { - foundPendingCandidate = TRUE; - /* Don't pick a connection that hasn't connected yet */ - infof(data, "Connection #%ld isn't open enough, can't reuse", - check->connection_id); - continue; - } + if(!Curl_conn_is_connected(check, FIRSTSOCKET)) { + foundPendingCandidate = TRUE; + /* Don't pick a connection that hasn't connected yet */ + infof(data, "Connection #%ld isn't open enough, can't reuse", + check->connection_id); + continue; } #ifdef USE_UNIX_SOCKETS diff --git a/tests/tests-httpd/test_02_download.py b/tests/tests-httpd/test_02_download.py index 60ceeae389..22cb260657 100644 --- a/tests/tests-httpd/test_02_download.py +++ b/tests/tests-httpd/test_02_download.py @@ -143,23 +143,39 @@ class TestDownload: # http2 parallel transfers will use one connection (common limit is 100) assert r.total_connects == 1 - # download 500 files parallel (max of 200), only h2 - @pytest.mark.skip(reason="TODO: we get 101 connections created. PIPEWAIT needs a fix") + # download files parallel, check connection reuse/multiplex @pytest.mark.parametrize("proto", ['h2', 'h3']) - def test_02_07_download_500_parallel(self, env: Env, - httpd, nghttpx, repeat, proto): + def test_02_07_download_reuse(self, env: Env, + httpd, nghttpx, repeat, proto): if proto == 'h3' and not env.have_h3(): pytest.skip("h3 not supported") + count=200 curl = CurlClient(env=env) - urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-499]' + urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]' r = curl.http_download(urls=[urln], alpn_proto=proto, - with_stats=False, extra_args=[ + with_stats=True, extra_args=[ '--parallel', '--parallel-max', '200' ]) assert r.exit_code == 0, f'{r}' - r.check_stats(count=500, exp_status=200) - # http2 should now use 2 connections, at most 5 - assert r.total_connects <= 5, "h2 should use fewer connections here" + r.check_stats(count=count, exp_status=200) + # should have used 2 connections only (test servers allow 100 req/conn) + assert r.total_connects == 2, "h2 should use fewer connections here" + + # download files parallel with http/1.1, check connection not reused + @pytest.mark.parametrize("proto", ['http/1.1']) + def test_02_07b_download_reuse(self, env: Env, + httpd, nghttpx, repeat, proto): + count=20 + curl = CurlClient(env=env) + urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]' + r = curl.http_download(urls=[urln], alpn_proto=proto, + with_stats=True, extra_args=[ + '--parallel', '--parallel-max', '200' + ]) + assert r.exit_code == 0, f'{r}' + r.check_stats(count=count, exp_status=200) + # http/1.1 should have used count connections + assert r.total_connects == count, "http/1.1 should use this many connections" @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) def test_02_08_1MB_serial(self, env: Env,