From: Stefan Eissing Date: Mon, 12 May 2025 13:49:49 +0000 (+0200) Subject: pytest-xdist: pytest in parallel X-Git-Tag: curl-8_14_0~72 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=30ef79ed937ca0fc7592ff73d162398773c6a5aa;p=thirdparty%2Fcurl.git pytest-xdist: pytest in parallel Require now pytest-xdist from tests/http/requirements.txt and run pytest in 'auto' parallel mode (counts cpu cores). For CI runs, set the worker count to 4, overriding the core count of 2 exposed in the images. - use Filelock to generate allocated ports at start for all workers and have subsequent workers just read the file and take the ports for their slot - make httpd config clearing a function fixture so every test starts with a clean httpd config - have fixture `configures_httpd` as parameter of test cases that configure httpd anyway, saving one reload - add pytest-xdist and filelock to required pyhton modules - add installs to ruff CI - give live checks waiting for a server to start up longer time - add fixtures to tests that rely on a server - do not stop servers unnecessarily. failures may not start them properly again, leading to unexpected fails in whatever follows - add a https: port to httpd that is *not* back by QUIC to allow failover tests without stopping the QUIC server Closes #17295 --- diff --git a/.github/workflows/checksrc.yml b/.github/workflows/checksrc.yml index eef725d5bd..4083cc3c56 100644 --- a/.github/workflows/checksrc.yml +++ b/.github/workflows/checksrc.yml @@ -62,7 +62,7 @@ jobs: codespell python3-pip python3-networkx python3-pydot python3-yaml \ python3-toml python3-markupsafe python3-jinja2 python3-tabulate \ python3-typing-extensions python3-libcst python3-impacket \ - python3-websockets python3-pytest + python3-websockets python3-pytest python3-filelock python3-pytest-xdist python3 -m pip install --break-system-packages cmakelint==1.4.3 pytype==2024.10.11 ruff==0.11.9 - name: spellcheck diff --git a/.github/workflows/http3-linux.yml b/.github/workflows/http3-linux.yml index ffbc6c00da..80cae92d43 100644 --- a/.github/workflows/http3-linux.yml +++ b/.github/workflows/http3-linux.yml @@ -529,6 +529,7 @@ jobs: CURL_TEST_EVENT: 1 CURL_CI: github PYTEST_ADDOPTS: '--color=yes' + PYTEST_XDIST_AUTO_NUM_WORKERS: 4 run: | source $HOME/venv/bin/activate if [ -n '${{ matrix.build.generate }}' ]; then diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index 8f86b2ec76..cd9083eab5 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -694,6 +694,7 @@ jobs: env: CURL_CI: github PYTEST_ADDOPTS: '--color=yes' + PYTEST_XDIST_AUTO_NUM_WORKERS: 4 run: | [ -x "$HOME/venv/bin/activate" ] && source $HOME/venv/bin/activate if [ -n '${{ matrix.build.generate }}' ]; then diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml index 8e8cf3a337..224662a027 100644 --- a/.github/workflows/macos.yml +++ b/.github/workflows/macos.yml @@ -337,6 +337,7 @@ jobs: env: CURL_CI: github PYTEST_ADDOPTS: '--color=yes' + PYTEST_XDIST_AUTO_NUM_WORKERS: 4 run: | source $HOME/venv/bin/activate if [ -n '${{ matrix.build.generate }}' ]; then diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 11736d1cd6..defc1b5eb2 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -111,5 +111,5 @@ curl_add_runtests(test-ci "-a -p ~flaky ~timing-dependent -r --retry=5 -j curl_add_runtests(test-torture "-a -t -j20") curl_add_runtests(test-event "-a -e") -curl_add_pytests(curl-pytest "") -curl_add_pytests(curl-pytest-ci "-v") +curl_add_pytests(curl-pytest "-n auto") +curl_add_pytests(curl-pytest-ci "-n auto -v") diff --git a/tests/Makefile.am b/tests/Makefile.am index 0206135c63..3030b33500 100644 --- a/tests/Makefile.am +++ b/tests/Makefile.am @@ -189,7 +189,7 @@ event-test: perlcheck all default-pytest: ci-pytest ci-pytest: all - srcdir=$(srcdir) $(PYTEST) -v $(srcdir)/http + srcdir=$(srcdir) $(PYTEST) -n auto -v $(srcdir)/http checksrc: (cd libtest && $(MAKE) checksrc) diff --git a/tests/http/conftest.py b/tests/http/conftest.py index b29cf38bb7..678ef164ee 100644 --- a/tests/http/conftest.py +++ b/tests/http/conftest.py @@ -26,14 +26,17 @@ import logging import os import sys import platform -from typing import Generator +from typing import Generator, Union import pytest +from testenv.env import EnvConfig + sys.path.append(os.path.join(os.path.dirname(__file__), '.')) from testenv import Env, Nghttpx, Httpd, NghttpxQuic, NghttpxFwd + def pytest_report_header(config): # Env inits its base properties only once, we can report them here env = Env() @@ -43,20 +46,20 @@ def pytest_report_header(config): f' curl: Version: {env.curl_version_string()}', f' curl: Features: {env.curl_features_string()}', f' curl: Protocols: {env.curl_protocols_string()}', - f' httpd: {env.httpd_version()}, http:{env.http_port} https:{env.https_port}', - f' httpd-proxy: {env.httpd_version()}, http:{env.proxy_port} https:{env.proxys_port}' + f' httpd: {env.httpd_version()}', + f' httpd-proxy: {env.httpd_version()}' ] if env.have_h3(): report.extend([ - f' nghttpx: {env.nghttpx_version()}, h3:{env.https_port}' + f' nghttpx: {env.nghttpx_version()}' ]) if env.has_caddy(): report.extend([ - f' Caddy: {env.caddy_version()}, http:{env.caddy_http_port} https:{env.caddy_https_port}' + f' Caddy: {env.caddy_version()}' ]) if env.has_vsftpd(): report.extend([ - f' VsFTPD: {env.vsftpd_version()}, ftp:{env.ftp_port}, ftps:{env.ftps_port}' + f' VsFTPD: {env.vsftpd_version()}' ]) buildinfo_fn = os.path.join(env.build_dir, 'buildinfo.txt') if os.path.exists(buildinfo_fn): @@ -67,14 +70,18 @@ def pytest_report_header(config): report.extend([line]) return '\n'.join(report) -# TODO: remove this and repeat argument everywhere, pytest-repeat can be used to repeat tests -def pytest_generate_tests(metafunc): - if "repeat" in metafunc.fixturenames: - metafunc.parametrize('repeat', [0]) -@pytest.fixture(scope="package") -def env(pytestconfig) -> Env: - env = Env(pytestconfig=pytestconfig) +@pytest.fixture(scope='session') +def env_config(pytestconfig, testrun_uid, worker_id) -> EnvConfig: + env_config = EnvConfig(pytestconfig=pytestconfig, + testrun_uid=testrun_uid, + worker_id=worker_id) + return env_config + + +@pytest.fixture(scope='session', autouse=True) +def env(pytestconfig, env_config) -> Env: + env = Env(pytestconfig=pytestconfig, env_config=env_config) level = logging.DEBUG if env.verbose > 0 else logging.INFO logging.getLogger('').setLevel(level=level) if not env.curl_has_protocol('http'): @@ -87,37 +94,52 @@ def env(pytestconfig) -> Env: env.setup() return env -@pytest.fixture(scope="package", autouse=True) -def log_global_env_facts(record_testsuite_property, env): - record_testsuite_property("http-port", env.http_port) - -@pytest.fixture(scope='package') +@pytest.fixture(scope='session') def httpd(env) -> Generator[Httpd, None, None]: httpd = Httpd(env=env) if not httpd.exists(): pytest.skip(f'httpd not found: {env.httpd}') httpd.clear_logs() - if not httpd.start(): - pytest.fail(f'failed to start httpd: {env.httpd}') + assert httpd.initial_start() yield httpd httpd.stop() -@pytest.fixture(scope='package') -def nghttpx(env, httpd) -> Generator[Nghttpx, None, None]: +@pytest.fixture(scope='session') +def nghttpx(env, httpd) -> Generator[Union[Nghttpx,bool], None, None]: nghttpx = NghttpxQuic(env=env) - if nghttpx.exists() and (env.have_h3() or nghttpx.https_port > 0): + if nghttpx.exists() and env.have_h3(): nghttpx.clear_logs() - assert nghttpx.start() - yield nghttpx - nghttpx.stop() + assert nghttpx.initial_start() + yield nghttpx + nghttpx.stop() + else: + yield False -@pytest.fixture(scope='package') -def nghttpx_fwd(env, httpd) -> Generator[Nghttpx, None, None]: + +@pytest.fixture(scope='session') +def nghttpx_fwd(env, httpd) -> Generator[Union[Nghttpx,bool], None, None]: nghttpx = NghttpxFwd(env=env) - if nghttpx.exists() and (env.have_h3() or nghttpx.https_port > 0): + if nghttpx.exists(): nghttpx.clear_logs() - assert nghttpx.start() - yield nghttpx - nghttpx.stop() + assert nghttpx.initial_start() + yield nghttpx + nghttpx.stop() + else: + yield False + + +@pytest.fixture(scope='session') +def configures_httpd(env, httpd) -> Generator[bool, None, None]: + # include this fixture as test parameter if the test configures httpd itself + yield True + + +@pytest.fixture(autouse=True, scope='function') +def server_reset(request, env, httpd): + # make sure httpd is in default configuration when a test starts + if 'configures_httpd' not in request.node._fixtureinfo.argnames: + httpd.clear_extra_configs() + httpd.set_proxy_auth(False) + httpd.reload_if_config_changed() diff --git a/tests/http/requirements.txt b/tests/http/requirements.txt index fb0e01383f..31fc479f6f 100644 --- a/tests/http/requirements.txt +++ b/tests/http/requirements.txt @@ -25,6 +25,8 @@ # pytest cryptography +filelock multipart websockets psutil +pytest-xdist diff --git a/tests/http/scorecard.py b/tests/http/scorecard.py index 7f8cd2a766..ba9f6d9eb4 100644 --- a/tests/http/scorecard.py +++ b/tests/http/scorecard.py @@ -776,11 +776,11 @@ def main(): f'httpd not found: {env.httpd}' httpd.clear_logs() server_docs = httpd.docs_dir - assert httpd.start() + assert httpd.initial_start() if protocol == 'h3': nghttpx = NghttpxQuic(env=env) nghttpx.clear_logs() - assert nghttpx.start() + assert nghttpx.initial_start() server_descr = f'nghttpx: https:{env.h3_port} [backend httpd: {env.httpd_version()}, https:{env.https_port}]' server_port = env.h3_port else: @@ -803,10 +803,10 @@ def main(): assert httpd.exists(), \ f'httpd not found: {env.httpd}' httpd.clear_logs() - assert httpd.start() + assert httpd.initial_start() caddy = Caddy(env=env) caddy.clear_logs() - assert caddy.start() + assert caddy.initial_start() server_descr = f'Caddy: {env.caddy_version()}, http:{env.caddy_http_port} https:{env.caddy_https_port}{backend}' server_port = caddy.port server_docs = caddy.docs_dir diff --git a/tests/http/test_01_basic.py b/tests/http/test_01_basic.py index f8936f373f..dd593af8ea 100644 --- a/tests/http/test_01_basic.py +++ b/tests/http/test_01_basic.py @@ -36,13 +36,6 @@ log = logging.getLogger(__name__) class TestBasic: - @pytest.fixture(autouse=True, scope='class') - def _class_scope(self, env, httpd, nghttpx): - if env.have_h3(): - nghttpx.start_if_needed() - httpd.clear_extra_configs() - httpd.reload() - # simple http: GET def test_01_01_http_get(self, env: Env, httpd): curl = CurlClient(env=env) @@ -99,7 +92,8 @@ class TestBasic: r.check_stats(http_status=200, count=1, remote_port=env.port_for(alpn_proto=proto), remote_ip='127.0.0.1') - assert r.stats[0]['time_connect'] > 0, f'{r.stats[0]}' + # there are cases where time_connect is reported as 0 + assert r.stats[0]['time_connect'] >= 0, f'{r.stats[0]}' assert r.stats[0]['time_appconnect'] > 0, f'{r.stats[0]}' # simple https: HEAD @@ -162,7 +156,7 @@ class TestBasic: pytest.skip("h3 not supported") curl = CurlClient(env=env) url = f'https://{env.authority_for(env.domain1, proto)}' \ - f'/curltest/tweak?x-hd={48 * 1024}' + f'/curltest/tweak?x-hd={48 * 1024}' r = curl.http_get(url=url, alpn_proto=proto, extra_args=[]) r.check_exit_code(0) assert len(r.responses) == 1, f'{r.responses}' @@ -172,14 +166,14 @@ class TestBasic: @pytest.mark.skipif(condition=not Env.httpd_is_at_least('2.4.64'), reason='httpd must be at least 2.4.64') @pytest.mark.parametrize("proto", ['http/1.1', 'h2']) - def test_01_12_xlarge_resp_headers(self, env: Env, httpd, proto): + def test_01_12_xlarge_resp_headers(self, env: Env, httpd, configures_httpd, proto): httpd.set_extra_config('base', [ f'H2MaxHeaderBlockLen {130 * 1024}', ]) - httpd.reload() + httpd.reload_if_config_changed() curl = CurlClient(env=env) url = f'https://{env.authority_for(env.domain1, proto)}' \ - f'/curltest/tweak?x-hd={128 * 1024}' + f'/curltest/tweak?x-hd={128 * 1024}' r = curl.http_get(url=url, alpn_proto=proto, extra_args=[]) r.check_exit_code(0) assert len(r.responses) == 1, f'{r.responses}' @@ -189,15 +183,15 @@ class TestBasic: @pytest.mark.skipif(condition=not Env.httpd_is_at_least('2.4.64'), reason='httpd must be at least 2.4.64') @pytest.mark.parametrize("proto", ['http/1.1', 'h2']) - def test_01_13_megalarge_resp_headers(self, env: Env, httpd, proto): + def test_01_13_megalarge_resp_headers(self, env: Env, httpd, configures_httpd, proto): httpd.set_extra_config('base', [ 'LogLevel http2:trace2', f'H2MaxHeaderBlockLen {130 * 1024}', ]) - httpd.reload() + httpd.reload_if_config_changed() curl = CurlClient(env=env) url = f'https://{env.authority_for(env.domain1, proto)}' \ - f'/curltest/tweak?x-hd1={128 * 1024}' + f'/curltest/tweak?x-hd1={128 * 1024}' r = curl.http_get(url=url, alpn_proto=proto, extra_args=[]) if proto == 'h2': r.check_exit_code(16) # CURLE_HTTP2 @@ -209,15 +203,15 @@ class TestBasic: @pytest.mark.skipif(condition=not Env.httpd_is_at_least('2.4.64'), reason='httpd must be at least 2.4.64') @pytest.mark.parametrize("proto", ['http/1.1', 'h2']) - def test_01_14_gigalarge_resp_headers(self, env: Env, httpd, proto): + def test_01_14_gigalarge_resp_headers(self, env: Env, httpd, configures_httpd, proto): httpd.set_extra_config('base', [ 'LogLevel http2:trace2', f'H2MaxHeaderBlockLen {1024 * 1024}', ]) - httpd.reload() + httpd.reload_if_config_changed() curl = CurlClient(env=env) url = f'https://{env.authority_for(env.domain1, proto)}' \ - f'/curltest/tweak?x-hd={256 * 1024}' + f'/curltest/tweak?x-hd={256 * 1024}' r = curl.http_get(url=url, alpn_proto=proto, extra_args=[]) if proto == 'h2': r.check_exit_code(16) # CURLE_HTTP2 @@ -228,15 +222,15 @@ class TestBasic: @pytest.mark.skipif(condition=not Env.httpd_is_at_least('2.4.64'), reason='httpd must be at least 2.4.64') @pytest.mark.parametrize("proto", ['http/1.1', 'h2']) - def test_01_15_gigalarge_resp_headers(self, env: Env, httpd, proto): + def test_01_15_gigalarge_resp_headers(self, env: Env, httpd, configures_httpd, proto): httpd.set_extra_config('base', [ 'LogLevel http2:trace2', f'H2MaxHeaderBlockLen {1024 * 1024}', ]) - httpd.reload() + httpd.reload_if_config_changed() curl = CurlClient(env=env) url = f'https://{env.authority_for(env.domain1, proto)}' \ - f'/curltest/tweak?x-hd1={256 * 1024}' + f'/curltest/tweak?x-hd1={256 * 1024}' r = curl.http_get(url=url, alpn_proto=proto, extra_args=[]) if proto == 'h2': r.check_exit_code(16) # CURLE_HTTP2 @@ -245,7 +239,7 @@ class TestBasic: # http: invalid request headers, GET, issue #16998 @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) - def test_01_16_inv_req_get(self, env: Env, httpd, proto): + def test_01_16_inv_req_get(self, env: Env, httpd, nghttpx, proto): if proto == 'h3' and not env.have_h3(): pytest.skip("h3 not supported") curl = CurlClient(env=env) diff --git a/tests/http/test_02_download.py b/tests/http/test_02_download.py index b55f022338..d91e20488c 100644 --- a/tests/http/test_02_download.py +++ b/tests/http/test_02_download.py @@ -41,13 +41,6 @@ log = logging.getLogger(__name__) class TestDownload: - @pytest.fixture(autouse=True, scope='class') - def _class_scope(self, env, httpd, nghttpx): - if env.have_h3(): - nghttpx.start_if_needed() - httpd.clear_extra_configs() - httpd.reload() - @pytest.fixture(autouse=True, scope='class') def _class_scope(self, env, httpd): indir = httpd.docs_dir @@ -281,7 +274,7 @@ class TestDownload: remote_ip='127.0.0.1') @pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests") - def test_02_20_h2_small_frames(self, env: Env, httpd): + def test_02_20_h2_small_frames(self, env: Env, httpd, configures_httpd): # Test case to reproduce content corruption as observed in # https://github.com/curl/curl/issues/10525 # To reliably reproduce, we need an Apache httpd that supports @@ -290,11 +283,7 @@ class TestDownload: httpd.set_extra_config(env.domain1, lines=[ 'H2MaxDataFrameLen 1024', ]) - assert httpd.stop() - if not httpd.start(): - # no, not supported, bail out - httpd.set_extra_config(env.domain1, lines=None) - assert httpd.start() + if not httpd.reload_if_config_changed(): pytest.skip('H2MaxDataFrameLen not supported') # ok, make 100 downloads with 2 parallel running and they # are expected to stumble into the issue when using `lib/http2.c` @@ -308,10 +297,6 @@ class TestDownload: r.check_response(count=count, http_status=200) srcfile = os.path.join(httpd.docs_dir, 'data-1m') self.check_downloads(curl, srcfile, count) - # restore httpd defaults - httpd.set_extra_config(env.domain1, lines=None) - assert httpd.stop() - assert httpd.start() # download serial via lib client, pause/resume at different offsets @pytest.mark.parametrize("pause_offset", [0, 10*1024, 100*1023, 640000]) @@ -592,9 +577,10 @@ class TestDownload: '--parallel', '--http2' ]) r.check_response(http_status=200, count=count) - # we see 3 connections, because Apache only every serves a single - # request via Upgrade: and then closed the connection. - assert r.total_connects == 3, r.dump_logs() + # we see up to 3 connections, because Apache wants to serve only a single + # request via Upgrade: and then closes the connection. But if a new + # request comes in time, it might still get served. + assert r.total_connects <= 3, r.dump_logs() # nghttpx is the only server we have that supports TLS early data @pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx") diff --git a/tests/http/test_03_goaway.py b/tests/http/test_03_goaway.py index f7232ce64a..9a7ebbab4e 100644 --- a/tests/http/test_03_goaway.py +++ b/tests/http/test_03_goaway.py @@ -38,24 +38,18 @@ log = logging.getLogger(__name__) class TestGoAway: - @pytest.fixture(autouse=True, scope='class') - def _class_scope(self, env, httpd, nghttpx): - if env.have_h3(): - nghttpx.start_if_needed() - httpd.clear_extra_configs() - httpd.reload() - # download files sequentially with delay, reload server for GOAWAY def test_03_01_h2_goaway(self, env: Env, httpd, nghttpx): proto = 'h2' count = 3 self.r = None + def long_run(): curl = CurlClient(env=env) # send 10 chunks of 1024 bytes in a response body with 100ms delay in between urln = f'https://{env.authority_for(env.domain1, proto)}' \ - f'/curltest/tweak?id=[0-{count - 1}]'\ - '&chunks=10&chunk_size=1024&chunk_delay=100ms' + f'/curltest/tweak?id=[0-{count - 1}]'\ + '&chunks=10&chunk_size=1024&chunk_delay=100ms' self.r = curl.http_download(urls=[urln], alpn_proto=proto) t = Thread(target=long_run) @@ -86,12 +80,13 @@ class TestGoAway: pytest.skip('OpenSSL QUIC fails here') count = 3 self.r = None + def long_run(): curl = CurlClient(env=env) # send 10 chunks of 1024 bytes in a response body with 100ms delay in between urln = f'https://{env.authority_for(env.domain1, proto)}' \ - f'/curltest/tweak?id=[0-{count - 1}]'\ - '&chunks=10&chunk_size=1024&chunk_delay=100ms' + f'/curltest/tweak?id=[0-{count - 1}]'\ + '&chunks=10&chunk_size=1024&chunk_delay=100ms' self.r = curl.http_download(urls=[urln], alpn_proto=proto) t = Thread(target=long_run) @@ -99,7 +94,7 @@ class TestGoAway: # each request will take a second, reload the server in the middle # of the first one. time.sleep(1.5) - assert nghttpx.reload(timeout=timedelta(seconds=2)) + assert nghttpx.reload(timeout=timedelta(seconds=Env.SERVER_TIMEOUT)) t.join() r: ExecResult = self.r # this should take `count` seconds to retrieve, maybe a little less @@ -116,13 +111,14 @@ class TestGoAway: proto = 'http/1.1' count = 3 self.r = None + def long_run(): curl = CurlClient(env=env) # send 10 chunks of 1024 bytes in a response body with 100ms delay in between # pause 2 seconds between requests urln = f'https://{env.authority_for(env.domain1, proto)}' \ - f'/curltest/tweak?id=[0-{count - 1}]'\ - '&chunks=10&chunk_size=1024&chunk_delay=100ms' + f'/curltest/tweak?id=[0-{count - 1}]'\ + '&chunks=10&chunk_size=1024&chunk_delay=100ms' self.r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[ '--rate', '30/m', ]) diff --git a/tests/http/test_04_stuttered.py b/tests/http/test_04_stuttered.py index f78e1c3f99..3ba5c6169c 100644 --- a/tests/http/test_04_stuttered.py +++ b/tests/http/test_04_stuttered.py @@ -38,13 +38,6 @@ log = logging.getLogger(__name__) @pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs") class TestStuttered: - @pytest.fixture(autouse=True, scope='class') - def _class_scope(self, env, httpd, nghttpx): - if env.have_h3(): - nghttpx.start_if_needed() - httpd.clear_extra_configs() - httpd.reload() - # download 1 file, check that delayed response works in general @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) def test_04_01_download_1(self, env: Env, httpd, nghttpx, proto): @@ -53,8 +46,8 @@ class TestStuttered: count = 1 curl = CurlClient(env=env) urln = f'https://{env.authority_for(env.domain1, proto)}' \ - f'/curltest/tweak?id=[0-{count - 1}]'\ - '&chunks=100&chunk_size=100&chunk_delay=10ms' + f'/curltest/tweak?id=[0-{count - 1}]'\ + '&chunks=100&chunk_size=100&chunk_delay=10ms' r = curl.http_download(urls=[urln], alpn_proto=proto) r.check_response(count=1, http_status=200) @@ -70,8 +63,8 @@ class TestStuttered: curl = CurlClient(env=env) url1 = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]' urln = f'https://{env.authority_for(env.domain1, proto)}' \ - f'/curltest/tweak?id=[0-{count-1}]'\ - '&chunks=100&chunk_size=100&chunk_delay=10ms' + f'/curltest/tweak?id=[0-{count-1}]'\ + '&chunks=100&chunk_size=100&chunk_delay=10ms' r = curl.http_download(urls=[url1, urln], alpn_proto=proto, extra_args=['--parallel']) r.check_response(count=warmups+count, http_status=200) @@ -92,8 +85,8 @@ class TestStuttered: curl = CurlClient(env=env) url1 = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]' urln = f'https://{env.authority_for(env.domain1, proto)}' \ - f'/curltest/tweak?id=[0-{count - 1}]'\ - '&chunks=1000&chunk_size=10&chunk_delay=100us' + f'/curltest/tweak?id=[0-{count - 1}]'\ + '&chunks=1000&chunk_size=10&chunk_delay=100us' r = curl.http_download(urls=[url1, urln], alpn_proto=proto, extra_args=['--parallel']) r.check_response(count=warmups+count, http_status=200) @@ -114,8 +107,8 @@ class TestStuttered: curl = CurlClient(env=env) url1 = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]' urln = f'https://{env.authority_for(env.domain1, proto)}' \ - f'/curltest/tweak?id=[0-{count - 1}]'\ - '&chunks=10000&chunk_size=1&chunk_delay=50us' + f'/curltest/tweak?id=[0-{count - 1}]'\ + '&chunks=10000&chunk_size=1&chunk_delay=50us' r = curl.http_download(urls=[url1, urln], alpn_proto=proto, extra_args=['--parallel']) r.check_response(count=warmups+count, http_status=200) diff --git a/tests/http/test_05_errors.py b/tests/http/test_05_errors.py index 74ff1d6a7c..10d8972df2 100644 --- a/tests/http/test_05_errors.py +++ b/tests/http/test_05_errors.py @@ -37,13 +37,6 @@ log = logging.getLogger(__name__) reason=f"httpd version too old for this: {Env.httpd_version()}") class TestErrors: - @pytest.fixture(autouse=True, scope='class') - def _class_scope(self, env, httpd, nghttpx): - if env.have_h3(): - nghttpx.start_if_needed() - httpd.clear_extra_configs() - httpd.reload() - # download 1 file, check that we get CURLE_PARTIAL_FILE @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) def test_05_01_partial_1(self, env: Env, httpd, nghttpx, proto): @@ -54,8 +47,8 @@ class TestErrors: count = 1 curl = CurlClient(env=env) urln = f'https://{env.authority_for(env.domain1, proto)}' \ - f'/curltest/tweak?id=[0-{count - 1}]'\ - '&chunks=3&chunk_size=16000&body_error=reset' + f'/curltest/tweak?id=[0-{count - 1}]'\ + '&chunks=3&chunk_size=16000&body_error=reset' r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[ '--retry', '0' ]) @@ -71,13 +64,15 @@ class TestErrors: def test_05_02_partial_20(self, env: Env, httpd, nghttpx, proto): if proto == 'h3' and not env.have_h3(): pytest.skip("h3 not supported") + if proto == 'h3' and env.curl_uses_ossl_quic(): + pytest.skip("openssl-quic is flaky in yielding proper error codes") if proto == 'h3' and env.curl_uses_lib('msh3'): pytest.skip("msh3 stalls here") count = 20 curl = CurlClient(env=env) urln = f'https://{env.authority_for(env.domain1, proto)}' \ - f'/curltest/tweak?id=[0-{count - 1}]'\ - '&chunks=5&chunk_size=16000&body_error=reset' + f'/curltest/tweak?id=[0-{count - 1}]'\ + '&chunks=5&chunk_size=16000&body_error=reset' r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[ '--retry', '0', '--parallel', ]) @@ -121,7 +116,7 @@ class TestErrors: count = 10 if proto == 'h2' else 1 curl = CurlClient(env=env) url = f'https://{env.authority_for(env.domain1, proto)}'\ - f'/curltest/shutdown_unclean?id=[0-{count-1}]&chunks=4' + f'/curltest/shutdown_unclean?id=[0-{count-1}]&chunks=4' r = curl.http_download(urls=[url], alpn_proto=proto, extra_args=[ '--parallel', ]) diff --git a/tests/http/test_06_eyeballs.py b/tests/http/test_06_eyeballs.py index cfe68595f0..038721fbf9 100644 --- a/tests/http/test_06_eyeballs.py +++ b/tests/http/test_06_eyeballs.py @@ -35,13 +35,6 @@ log = logging.getLogger(__name__) class TestEyeballs: - @pytest.fixture(autouse=True, scope='class') - def _class_scope(self, env, httpd, nghttpx): - if env.have_h3(): - nghttpx.start_if_needed() - httpd.clear_extra_configs() - httpd.reload() - # download using only HTTP/3 on working server @pytest.mark.skipif(condition=not Env.have_h3(), reason="missing HTTP/3 support") def test_06_01_h3_only(self, env: Env, httpd, nghttpx): @@ -54,18 +47,16 @@ class TestEyeballs: # download using only HTTP/3 on missing server @pytest.mark.skipif(condition=not Env.have_h3(), reason="missing HTTP/3 support") def test_06_02_h3_only(self, env: Env, httpd, nghttpx): - nghttpx.stop_if_running() curl = CurlClient(env=env) - urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json' + urln = f'https://{env.domain1}:{env.https_only_tcp_port}/data.json' r = curl.http_download(urls=[urln], extra_args=['--http3-only']) r.check_response(exitcode=7, http_status=None) # download using HTTP/3 on missing server with fallback on h2 @pytest.mark.skipif(condition=not Env.have_h3(), reason="missing HTTP/3 support") def test_06_03_h3_fallback_h2(self, env: Env, httpd, nghttpx): - nghttpx.stop_if_running() curl = CurlClient(env=env) - urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json' + urln = f'https://{env.domain1}:{env.https_only_tcp_port}/data.json' r = curl.http_download(urls=[urln], extra_args=['--http3']) r.check_response(count=1, http_status=200) assert r.stats[0]['http_version'] == '2' @@ -73,9 +64,8 @@ class TestEyeballs: # download using HTTP/3 on missing server with fallback on http/1.1 @pytest.mark.skipif(condition=not Env.have_h3(), reason="missing HTTP/3 support") def test_06_04_h3_fallback_h1(self, env: Env, httpd, nghttpx): - nghttpx.stop_if_running() curl = CurlClient(env=env) - urln = f'https://{env.authority_for(env.domain2, "h3")}/data.json' + urln = f'https://{env.domain2}:{env.https_only_tcp_port}/data.json' r = curl.http_download(urls=[urln], extra_args=['--http3']) r.check_response(count=1, http_status=200) assert r.stats[0]['http_version'] == '1.1' diff --git a/tests/http/test_07_upload.py b/tests/http/test_07_upload.py index ef1a0ed228..9bae7041e7 100644 --- a/tests/http/test_07_upload.py +++ b/tests/http/test_07_upload.py @@ -30,9 +30,9 @@ import logging import os import re import pytest -from typing import List +from typing import List, Union -from testenv import Env, CurlClient, LocalClient +from testenv import Env, CurlClient, LocalClient, ExecResult log = logging.getLogger(__name__) @@ -42,16 +42,12 @@ class TestUpload: @pytest.fixture(autouse=True, scope='class') def _class_scope(self, env, httpd, nghttpx): - if env.have_h3(): - nghttpx.start_if_needed() env.make_data_file(indir=env.gen_dir, fname="data-10k", fsize=10*1024) env.make_data_file(indir=env.gen_dir, fname="data-63k", fsize=63*1024) env.make_data_file(indir=env.gen_dir, fname="data-64k", fsize=64*1024) env.make_data_file(indir=env.gen_dir, fname="data-100k", fsize=100*1024) env.make_data_file(indir=env.gen_dir, fname="data-1m+", fsize=(1024*1024)+1) env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024) - httpd.clear_extra_configs() - httpd.reload() # upload small data, check that this is what was echoed @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) @@ -190,7 +186,7 @@ class TestUpload: '-n', f'{count}', '-S', f'{upload_size}', '-V', proto, url ]) r.check_exit_code(0) - self.check_downloads(client, [f"{upload_size}"], count) + self.check_downloads(client, r, [f"{upload_size}"], count) @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) def test_07_16_hx_put_reuse(self, env: Env, httpd, nghttpx, proto): @@ -206,7 +202,7 @@ class TestUpload: '-n', f'{count}', '-S', f'{upload_size}', '-R', '-V', proto, url ]) r.check_exit_code(0) - self.check_downloads(client, [f"{upload_size}"], count) + self.check_downloads(client, r, [f"{upload_size}"], count) @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) def test_07_17_hx_post_reuse(self, env: Env, httpd, nghttpx, proto): @@ -222,7 +218,7 @@ class TestUpload: '-n', f'{count}', '-M', 'POST', '-S', f'{upload_size}', '-R', '-V', proto, url ]) r.check_exit_code(0) - self.check_downloads(client, ["x" * upload_size], count) + self.check_downloads(client, r, ["x" * upload_size], count) # upload data parallel, check that they were echoed @pytest.mark.parametrize("proto", ['h2', 'h3']) @@ -258,7 +254,7 @@ class TestUpload: r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto, extra_args=['--parallel']) r.check_response(count=count, http_status=200) - self.check_download(count, fdata, curl) + self.check_download(r, count, fdata, curl) # upload large data parallel to a URL that denies uploads @pytest.mark.parametrize("proto", ['h2', 'h3']) @@ -275,8 +271,8 @@ class TestUpload: r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto, extra_args=['--parallel']) # depending on timing and protocol, we might get CURLE_PARTIAL_FILE or - # CURLE_HTTP3 or CURLE_HTTP2_STREAM - r.check_stats(count=count, exitcode=[18, 92, 95]) + # CURLE_SEND_ERROR or CURLE_HTTP3 or CURLE_HTTP2_STREAM + r.check_stats(count=count, exitcode=[18, 55, 92, 95]) # PUT 100k @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) @@ -290,7 +286,7 @@ class TestUpload: curl = CurlClient(env=env) url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]' r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto, - extra_args=['--parallel']) + extra_args=['--parallel']) r.check_stats(count=count, http_status=200, exitcode=0) exp_data = [f'{os.path.getsize(fdata)}'] r.check_response(count=count, http_status=200) @@ -310,7 +306,7 @@ class TestUpload: curl = CurlClient(env=env) url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]&chunk_delay=2ms' r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto, - extra_args=['--parallel']) + extra_args=['--parallel']) r.check_stats(count=count, http_status=200, exitcode=0) exp_data = [f'{os.path.getsize(fdata)}'] r.check_response(count=count, http_status=200) @@ -522,17 +518,17 @@ class TestUpload: respdata = open(curl.response_file(0)).readlines() assert respdata == indata - def check_download(self, count, srcfile, curl): + def check_download(self, r: ExecResult, count: int, srcfile: Union[str, os.PathLike], curl: CurlClient): for i in range(count): dfile = curl.download_file(i) - assert os.path.exists(dfile) + assert os.path.exists(dfile), f'download {dfile} missing\n{r.dump_logs()}' if not filecmp.cmp(srcfile, dfile, shallow=False): diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(), b=open(dfile).readlines(), fromfile=srcfile, tofile=dfile, n=1)) - assert False, f'download {dfile} differs:\n{diff}' + assert False, f'download {dfile} differs:\n{diff}\n{r.dump_logs()}' # upload data, pause, let connection die with an incomplete response # issues #11769 #13260 @@ -547,7 +543,7 @@ class TestUpload: pytest.skip(f'example client not built: {client.name}') url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]&die_after=0' r = client.run(['-V', proto, url]) - if r.exit_code == 18: # PARTIAL_FILE is always ok + if r.exit_code == 18: # PARTIAL_FILE is always ok pass elif proto == 'h2': # CURLE_HTTP2, CURLE_HTTP2_STREAM @@ -595,6 +591,8 @@ class TestUpload: def test_07_43_upload_denied(self, env: Env, httpd, nghttpx, proto): if proto == 'h3' and not env.have_h3(): pytest.skip("h3 not supported") + if proto == 'h3' and env.curl_uses_ossl_quic(): + pytest.skip("openssl-quic is flaky in filed PUTs") if proto == 'h3' and env.curl_uses_lib('msh3'): pytest.skip("msh3 fails here") fdata = os.path.join(env.gen_dir, 'data-10m') @@ -604,7 +602,7 @@ class TestUpload: url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?'\ f'id=[0-{count-1}]&max_upload={max_upload}' r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto, - extra_args=['--trace-config', 'all']) + extra_args=['--trace-config', 'all']) r.check_stats(count=count, http_status=413, exitcode=0) # speed limited on put handler @@ -652,7 +650,7 @@ class TestUpload: read_delay = 1 curl = CurlClient(env=env) url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-0]'\ - f'&read_delay={read_delay}s' + f'&read_delay={read_delay}s' r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto, extra_args=[ '--expect100-timeout', f'{read_delay+1}' ]) @@ -665,7 +663,7 @@ class TestUpload: read_delay = 2 curl = CurlClient(env=env) url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-0]'\ - f'&read_delay={read_delay}s' + f'&read_delay={read_delay}s' r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto, extra_args=[ '--expect100-timeout', f'{read_delay-1}' ]) @@ -730,24 +728,26 @@ class TestUpload: '-V', proto, url ]) r.check_exit_code(0) - self.check_downloads(client, [f"{upload_size}"], count) + self.check_downloads(client, r, [f"{upload_size}"], count) earlydata = {} for line in r.trace_lines: m = re.match(r'^\[t-(\d+)] EarlyData: (-?\d+)', line) if m: earlydata[int(m.group(1))] = int(m.group(2)) - assert earlydata[0] == 0, f'{earlydata}' - assert earlydata[1] == exp_early, f'{earlydata}' + assert earlydata[0] == 0, f'{earlydata}\n{r.dump_logs()}' + # depending on cpu load, curl might not upload as much before + # the handshake starts and early data stops. + assert 102 <= earlydata[1] <= exp_early, f'{earlydata}\n{r.dump_logs()}' - def check_downloads(self, client, source: List[str], count: int, + def check_downloads(self, client, r, source: List[str], count: int, complete: bool = True): for i in range(count): dfile = client.download_file(i) - assert os.path.exists(dfile) + assert os.path.exists(dfile), f'download {dfile} missing\n{r.dump_logs()}' if complete: diff = "".join(difflib.unified_diff(a=source, b=open(dfile).readlines(), fromfile='-', tofile=dfile, n=1)) - assert not diff, f'download {dfile} differs:\n{diff}' + assert not diff, f'download {dfile} differs:\n{diff}\n{r.dump_logs()}' diff --git a/tests/http/test_08_caddy.py b/tests/http/test_08_caddy.py index cd16e0d954..5276612439 100644 --- a/tests/http/test_08_caddy.py +++ b/tests/http/test_08_caddy.py @@ -44,7 +44,7 @@ class TestCaddy: @pytest.fixture(autouse=True, scope='class') def caddy(self, env): caddy = Caddy(env=env) - assert caddy.start() + assert caddy.initial_start() yield caddy caddy.stop() @@ -152,8 +152,8 @@ class TestCaddy: if proto == 'h3' and env.curl_uses_lib('msh3'): pytest.skip("msh3 itself crashes") if proto == 'http/1.1' and env.curl_uses_lib('mbedtls'): - pytest.skip("mbedtls 3.6.0 fails on 50 connections with: "\ - "ssl_handshake returned: (-0x7F00) SSL - Memory allocation failed") + pytest.skip("mbedtls 3.6.0 fails on 50 connections with: " + "ssl_handshake returned: (-0x7F00) SSL - Memory allocation failed") count = 50 curl = CurlClient(env=env) urln = f'https://{env.domain1}:{caddy.port}/data10.data?[0-{count-1}]' diff --git a/tests/http/test_09_push.py b/tests/http/test_09_push.py index 122c74d8b7..af5b482a64 100644 --- a/tests/http/test_09_push.py +++ b/tests/http/test_09_push.py @@ -44,6 +44,8 @@ class TestPush: env.make_data_file(indir=push_dir, fname="data1", fsize=1*1024) env.make_data_file(indir=push_dir, fname="data2", fsize=1*1024) env.make_data_file(indir=push_dir, fname="data3", fsize=1*1024) + + def httpd_configure(self, env, httpd): httpd.set_extra_config(env.domain1, [ 'H2EarlyHints on', '', @@ -55,13 +57,11 @@ class TestPush: '', ]) # activate the new config - httpd.reload() - yield - httpd.clear_extra_configs() - httpd.reload() + httpd.reload_if_config_changed() # download a file that triggers a "103 Early Hints" response - def test_09_01_h2_early_hints(self, env: Env, httpd): + def test_09_01_h2_early_hints(self, env: Env, httpd, configures_httpd): + self.httpd_configure(env, httpd) curl = CurlClient(env=env) url = f'https://{env.domain1}:{env.https_port}/push/data1' r = curl.http_download(urls=[url], alpn_proto='h2', with_stats=False, @@ -72,7 +72,8 @@ class TestPush: assert 'link' in r.responses[0]['header'], f'{r.responses[0]}' assert r.responses[0]['header']['link'] == '; rel=preload', f'{r.responses[0]}' - def test_09_02_h2_push(self, env: Env, httpd): + def test_09_02_h2_push(self, env: Env, httpd, configures_httpd): + self.httpd_configure(env, httpd) # use localhost as we do not have resolve support in local client url = f'https://localhost:{env.https_port}/push/data1' client = LocalClient(name='h2-serverpush', env=env) diff --git a/tests/http/test_10_proxy.py b/tests/http/test_10_proxy.py index 190d155239..bf89a44311 100644 --- a/tests/http/test_10_proxy.py +++ b/tests/http/test_10_proxy.py @@ -47,8 +47,9 @@ class TestProxy: nghttpx_fwd.start_if_needed() env.make_data_file(indir=env.gen_dir, fname="data-100k", fsize=100*1024) env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024) - httpd.clear_extra_configs() - httpd.reload() + indir = httpd.docs_dir + env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024) + env.make_data_file(indir=indir, fname="data-1m", fsize=1024*1024) def get_tunnel_proto_used(self, r: ExecResult): for line in r.trace_lines: @@ -110,7 +111,7 @@ class TestProxy: assert respdata == indata # download http: via http: proxytunnel - def test_10_03_proxytunnel_http(self, env: Env, httpd): + def test_10_03_proxytunnel_http(self, env: Env, httpd, nghttpx_fwd): curl = CurlClient(env=env) url = f'http://localhost:{env.http_port}/data.json' xargs = curl.get_proxy_args(proxys=False, tunnel=True) @@ -133,7 +134,7 @@ class TestProxy: # download https: with proto via http: proxytunnel @pytest.mark.parametrize("proto", ['http/1.1', 'h2']) @pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL") - def test_10_05_proxytunnel_http(self, env: Env, httpd, proto): + def test_10_05_proxytunnel_http(self, env: Env, httpd, nghttpx_fwd, proto): curl = CurlClient(env=env) url = f'https://localhost:{env.https_port}/data.json' xargs = curl.get_proxy_args(proxys=False, tunnel=True) @@ -258,7 +259,7 @@ class TestProxy: url = f'https://localhost:{env.https_port}/data.json' proxy_args = curl.get_proxy_args(tunnel=True, proto=tunnel) r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True, - extra_args=proxy_args) + extra_args=proxy_args) r1.check_response(count=1, http_status=200) assert self.get_tunnel_proto_used(r1) == 'HTTP/2' \ if tunnel == 'h2' else 'HTTP/1.1' @@ -267,7 +268,7 @@ class TestProxy: x2_args.append('--next') x2_args.extend(proxy_args) r2 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True, - extra_args=x2_args) + extra_args=x2_args) r2.check_response(count=2, http_status=200) assert r2.total_connects == 1 @@ -283,7 +284,7 @@ class TestProxy: url = f'https://localhost:{env.https_port}/data.json' proxy_args = curl.get_proxy_args(tunnel=True, proto=tunnel) r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True, - extra_args=proxy_args) + extra_args=proxy_args) r1.check_response(count=1, http_status=200) assert self.get_tunnel_proto_used(r1) == 'HTTP/2' \ if tunnel == 'h2' else 'HTTP/1.1' @@ -293,7 +294,7 @@ class TestProxy: x2_args.extend(proxy_args) x2_args.extend(['--proxy-tls13-ciphers', 'TLS_AES_256_GCM_SHA384']) r2 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True, - extra_args=x2_args) + extra_args=x2_args) r2.check_response(count=2, http_status=200) assert r2.total_connects == 2 @@ -309,7 +310,7 @@ class TestProxy: url = f'http://localhost:{env.http_port}/data.json' proxy_args = curl.get_proxy_args(tunnel=True, proto=tunnel) r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True, - extra_args=proxy_args) + extra_args=proxy_args) r1.check_response(count=1, http_status=200) assert self.get_tunnel_proto_used(r1) == 'HTTP/2' \ if tunnel == 'h2' else 'HTTP/1.1' @@ -319,7 +320,7 @@ class TestProxy: x2_args.extend(proxy_args) x2_args.extend(['--proxy-tls13-ciphers', 'TLS_AES_256_GCM_SHA384']) r2 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True, - extra_args=x2_args) + extra_args=x2_args) r2.check_response(count=2, http_status=200) assert r2.total_connects == 2 @@ -335,7 +336,7 @@ class TestProxy: url = f'https://localhost:{env.https_port}/data.json' proxy_args = curl.get_proxy_args(tunnel=True, proto=tunnel) r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True, - extra_args=proxy_args) + extra_args=proxy_args) r1.check_response(count=1, http_status=200) assert self.get_tunnel_proto_used(r1) == 'HTTP/2' \ if tunnel == 'h2' else 'HTTP/1.1' @@ -345,7 +346,7 @@ class TestProxy: x2_args.extend(proxy_args) x2_args.extend(['--tls13-ciphers', 'TLS_AES_256_GCM_SHA384']) r2 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True, - extra_args=x2_args) + extra_args=x2_args) r2.check_response(count=2, http_status=200) assert r2.total_connects == 2 @@ -364,7 +365,7 @@ class TestProxy: extra_args=xargs) if env.curl_uses_lib('mbedtls') and \ not env.curl_lib_version_at_least('mbedtls', '3.5.0'): - r.check_exit_code(60) # CURLE_PEER_FAILED_VERIFICATION + r.check_exit_code(60) # CURLE_PEER_FAILED_VERIFICATION else: r.check_response(count=1, http_status=200, protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1') diff --git a/tests/http/test_11_unix.py b/tests/http/test_11_unix.py index 7e921878d1..2f2db483f4 100644 --- a/tests/http/test_11_unix.py +++ b/tests/http/test_11_unix.py @@ -37,12 +37,14 @@ from testenv import Env, CurlClient log = logging.getLogger(__name__) + class UDSFaker: def __init__(self, path): self._uds_path = path self._done = False self._socket = None + self._thread = None @property def path(self): diff --git a/tests/http/test_12_reuse.py b/tests/http/test_12_reuse.py index 747e31eb58..bb813c5ee7 100644 --- a/tests/http/test_12_reuse.py +++ b/tests/http/test_12_reuse.py @@ -40,12 +40,12 @@ class TestReuse: # check if HTTP/1.1 handles 'Connection: close' correctly @pytest.mark.parametrize("proto", ['http/1.1']) - def test_12_01_h1_conn_close(self, env: Env, httpd, nghttpx, proto): + def test_12_01_h1_conn_close(self, env: Env, httpd, configures_httpd, nghttpx, proto): httpd.clear_extra_configs() httpd.set_extra_config('base', [ 'MaxKeepAliveRequests 1', ]) - httpd.reload() + httpd.reload_if_config_changed() count = 100 curl = CurlClient(env=env) urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]' @@ -59,12 +59,12 @@ class TestReuse: @pytest.mark.skipif(condition=Env.httpd_is_at_least('2.5.0'), reason="httpd 2.5+ handles KeepAlives different") @pytest.mark.parametrize("proto", ['http/1.1']) - def test_12_02_h1_conn_timeout(self, env: Env, httpd, nghttpx, proto): + def test_12_02_h1_conn_timeout(self, env: Env, httpd, configures_httpd, nghttpx, proto): httpd.clear_extra_configs() httpd.set_extra_config('base', [ 'KeepAliveTimeout 1', ]) - httpd.reload() + httpd.reload_if_config_changed() count = 5 curl = CurlClient(env=env) urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]' @@ -76,10 +76,7 @@ class TestReuse: assert r.total_connects == count @pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported") - def test_12_03_as_follow_h2h3(self, env: Env, httpd, nghttpx): - # Without '--http*` an Alt-Svc redirection from h2 to h3 is allowed - httpd.clear_extra_configs() - httpd.reload() + def test_12_03_as_follow_h2h3(self, env: Env, httpd, configures_httpd, nghttpx): # write a alt-svc file that advises h3 instead of h2 asfile = os.path.join(env.gen_dir, 'alt-svc-12_03.txt') self.create_asfile(asfile, f'h2 {env.domain1} {env.https_port} h3 {env.domain1} {env.h3_port}') @@ -92,10 +89,7 @@ class TestReuse: assert r.stats[0]['http_version'] == '3', f'{r.stats}' @pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported") - def test_12_04_as_follow_h3h2(self, env: Env, httpd, nghttpx): - # With '--http3` an Alt-Svc redirection from h3 to h2 is allowed - httpd.clear_extra_configs() - httpd.reload() + def test_12_04_as_follow_h3h2(self, env: Env, httpd, configures_httpd, nghttpx): count = 2 # write a alt-svc file the advises h2 instead of h3 asfile = os.path.join(env.gen_dir, 'alt-svc-12_04.txt') @@ -116,10 +110,8 @@ class TestReuse: assert s['http_version'] == '2', f'{s}' @pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported") - def test_12_05_as_follow_h3h1(self, env: Env, httpd, nghttpx): + def test_12_05_as_follow_h3h1(self, env: Env, httpd, configures_httpd, nghttpx): # With '--http3` an Alt-Svc redirection from h3 to h1 is allowed - httpd.clear_extra_configs() - httpd.reload() count = 2 # write a alt-svc file the advises h1 instead of h3 asfile = os.path.join(env.gen_dir, 'alt-svc-12_05.txt') @@ -140,10 +132,8 @@ class TestReuse: assert s['http_version'] == '1.1', f'{s}' @pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported") - def test_12_06_as_ignore_h3h1(self, env: Env, httpd, nghttpx): + def test_12_06_as_ignore_h3h1(self, env: Env, httpd, configures_httpd, nghttpx): # With '--http3-only` an Alt-Svc redirection from h3 to h1 is ignored - httpd.clear_extra_configs() - httpd.reload() count = 2 # write a alt-svc file the advises h1 instead of h3 asfile = os.path.join(env.gen_dir, 'alt-svc-12_05.txt') @@ -164,10 +154,8 @@ class TestReuse: assert s['http_version'] == '3', f'{s}' @pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported") - def test_12_07_as_ignore_h2h3(self, env: Env, httpd, nghttpx): + def test_12_07_as_ignore_h2h3(self, env: Env, httpd, configures_httpd, nghttpx): # With '--http2` an Alt-Svc redirection from h2 to h3 is ignored - httpd.clear_extra_configs() - httpd.reload() # write a alt-svc file that advises h3 instead of h2 asfile = os.path.join(env.gen_dir, 'alt-svc-12_03.txt') self.create_asfile(asfile, f'h2 {env.domain1} {env.https_port} h3 {env.domain1} {env.h3_port}') diff --git a/tests/http/test_13_proxy_auth.py b/tests/http/test_13_proxy_auth.py index 07926e376a..6a9a7e6a84 100644 --- a/tests/http/test_13_proxy_auth.py +++ b/tests/http/test_13_proxy_auth.py @@ -39,16 +39,9 @@ log = logging.getLogger(__name__) reason=f"missing: {Env.incomplete_reason()}") class TestProxyAuth: - @pytest.fixture(autouse=True, scope='class') - def _class_scope(self, env, httpd, nghttpx_fwd): - if env.have_nghttpx(): - nghttpx_fwd.start_if_needed() - httpd.clear_extra_configs() + def httpd_configure(self, env, httpd): httpd.set_proxy_auth(True) - httpd.reload() - yield - httpd.set_proxy_auth(False) - httpd.reload() + httpd.reload_if_config_changed() def get_tunnel_proto_used(self, r: ExecResult): for line in r.trace_lines: @@ -59,7 +52,8 @@ class TestProxyAuth: return None # download via http: proxy (no tunnel), no auth - def test_13_01_proxy_no_auth(self, env: Env, httpd): + def test_13_01_proxy_no_auth(self, env: Env, httpd, configures_httpd): + self.httpd_configure(env, httpd) curl = CurlClient(env=env) url = f'http://localhost:{env.http_port}/data.json' r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True, @@ -67,7 +61,8 @@ class TestProxyAuth: r.check_response(count=1, http_status=407) # download via http: proxy (no tunnel), auth - def test_13_02_proxy_auth(self, env: Env, httpd): + def test_13_02_proxy_auth(self, env: Env, httpd, configures_httpd): + self.httpd_configure(env, httpd) curl = CurlClient(env=env) url = f'http://localhost:{env.http_port}/data.json' xargs = curl.get_proxy_args(proxys=False) @@ -79,7 +74,8 @@ class TestProxyAuth: @pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'), reason='curl lacks HTTPS-proxy support') @pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available") - def test_13_03_proxys_no_auth(self, env: Env, httpd, nghttpx_fwd): + def test_13_03_proxys_no_auth(self, env: Env, httpd, configures_httpd, nghttpx_fwd): + self.httpd_configure(env, httpd) curl = CurlClient(env=env) url = f'http://localhost:{env.http_port}/data.json' xargs = curl.get_proxy_args(proxys=True) @@ -90,7 +86,8 @@ class TestProxyAuth: @pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'), reason='curl lacks HTTPS-proxy support') @pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available") - def test_13_04_proxys_auth(self, env: Env, httpd, nghttpx_fwd): + def test_13_04_proxys_auth(self, env: Env, httpd, configures_httpd, nghttpx_fwd): + self.httpd_configure(env, httpd) curl = CurlClient(env=env) url = f'http://localhost:{env.http_port}/data.json' xargs = curl.get_proxy_args(proxys=True) @@ -99,7 +96,8 @@ class TestProxyAuth: extra_args=xargs) r.check_response(count=1, http_status=200) - def test_13_05_tunnel_http_no_auth(self, env: Env, httpd): + def test_13_05_tunnel_http_no_auth(self, env: Env, httpd, configures_httpd, nghttpx_fwd): + self.httpd_configure(env, httpd) curl = CurlClient(env=env) url = f'http://localhost:{env.http_port}/data.json' xargs = curl.get_proxy_args(proxys=False, tunnel=True) @@ -108,7 +106,8 @@ class TestProxyAuth: # expect "COULD_NOT_CONNECT" r.check_response(exitcode=56, http_status=None) - def test_13_06_tunnel_http_auth(self, env: Env, httpd): + def test_13_06_tunnel_http_auth(self, env: Env, httpd, configures_httpd): + self.httpd_configure(env, httpd) curl = CurlClient(env=env) url = f'http://localhost:{env.http_port}/data.json' xargs = curl.get_proxy_args(proxys=False, tunnel=True) @@ -122,7 +121,8 @@ class TestProxyAuth: reason='curl lacks HTTPS-proxy support') @pytest.mark.parametrize("proto", ['http/1.1', 'h2']) @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2']) - def test_13_07_tunnels_no_auth(self, env: Env, httpd, proto, tunnel): + def test_13_07_tunnels_no_auth(self, env: Env, httpd, configures_httpd, nghttpx_fwd, proto, tunnel): + self.httpd_configure(env, httpd) if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'): pytest.skip('only supported with nghttp2') curl = CurlClient(env=env) @@ -140,7 +140,8 @@ class TestProxyAuth: reason='curl lacks HTTPS-proxy support') @pytest.mark.parametrize("proto", ['http/1.1', 'h2']) @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2']) - def test_13_08_tunnels_auth(self, env: Env, httpd, proto, tunnel): + def test_13_08_tunnels_auth(self, env: Env, httpd, configures_httpd, nghttpx_fwd, proto, tunnel): + self.httpd_configure(env, httpd) if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'): pytest.skip('only supported with nghttp2') curl = CurlClient(env=env) @@ -156,7 +157,8 @@ class TestProxyAuth: @pytest.mark.skipif(condition=not Env.curl_has_feature('SPNEGO'), reason='curl lacks SPNEGO support') - def test_13_09_negotiate_http(self, env: Env, httpd): + def test_13_09_negotiate_http(self, env: Env, httpd, configures_httpd): + self.httpd_configure(env, httpd) run_env = os.environ.copy() run_env['https_proxy'] = f'http://127.0.0.1:{env.proxy_port}' curl = CurlClient(env=env, run_env=run_env) diff --git a/tests/http/test_14_auth.py b/tests/http/test_14_auth.py index 13193b53b8..d52b42fce4 100644 --- a/tests/http/test_14_auth.py +++ b/tests/http/test_14_auth.py @@ -38,11 +38,7 @@ class TestAuth: @pytest.fixture(autouse=True, scope='class') def _class_scope(self, env, httpd, nghttpx): - if env.have_h3(): - nghttpx.start_if_needed() env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024) - httpd.clear_extra_configs() - httpd.reload() # download 1 file, not authenticated @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) @@ -71,6 +67,8 @@ class TestAuth: def test_14_03_digest_put_auth(self, env: Env, httpd, nghttpx, proto): if proto == 'h3' and not env.have_h3(): pytest.skip("h3 not supported") + if proto == 'h3' and env.curl_uses_ossl_quic(): + pytest.skip("openssl-quic is flaky in retrying POST") data='0123456789' curl = CurlClient(env=env) url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json' diff --git a/tests/http/test_15_tracing.py b/tests/http/test_15_tracing.py index 1054d0a26a..428fbd4194 100644 --- a/tests/http/test_15_tracing.py +++ b/tests/http/test_15_tracing.py @@ -89,8 +89,7 @@ class TestTracing: m = re.match(r'^([0-9:.]+) \[0-[0x]] .+ \[TCP].+', line) if m is not None: found_tcp = True - if not found_tcp: - assert False, f'TCP filter does not appear in trace "all": {r.stderr}' + assert found_tcp, f'TCP filter does not appear in trace "all": {r.stderr}' # trace all, no TCP, no time def test_15_05_trace_all(self, env: Env, httpd): diff --git a/tests/http/test_16_info.py b/tests/http/test_16_info.py index f69d1e3de0..ac4f4cc866 100644 --- a/tests/http/test_16_info.py +++ b/tests/http/test_16_info.py @@ -36,19 +36,13 @@ log = logging.getLogger(__name__) class TestInfo: - @pytest.fixture(autouse=True, scope='class') - def _class_scope(self, env, httpd, nghttpx): - if env.have_h3(): - nghttpx.start_if_needed() - httpd.clear_extra_configs() - httpd.reload() - @pytest.fixture(autouse=True, scope='class') def _class_scope(self, env, httpd): indir = httpd.docs_dir env.make_data_file(indir=indir, fname="data-10k", fsize=10*1024) env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024) env.make_data_file(indir=indir, fname="data-1m", fsize=1024*1024) + env.make_data_file(indir=env.gen_dir, fname="data-100k", fsize=100*1024) # download plain file @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) @@ -131,6 +125,10 @@ class TestInfo: assert key in s, f'stat #{idx} "{key}" missing: {s}' assert s[key] > 0, f'stat #{idx} "{key}" not positive: {s}' + def check_stat_positive_or_0(self, s, idx, key): + assert key in s, f'stat #{idx} "{key}" missing: {s}' + assert s[key] >= 0, f'stat #{idx} "{key}" not positive: {s}' + def check_stat_zero(self, s, key): assert key in s, f'stat "{key}" missing: {s}' assert s[key] == 0, f'stat "{key}" not zero: {s}' @@ -138,15 +136,16 @@ class TestInfo: def check_stat_times(self, s, idx): # check timings reported on a transfer for consistency url = s['url_effective'] + # connect time is sometimes reported as 0 by openssl-quic (sigh) + self.check_stat_positive_or_0(s, idx, 'time_connect') # all stat keys which reporting timings all_keys = { - 'time_appconnect', 'time_connect', 'time_redirect', + 'time_appconnect', 'time_redirect', 'time_pretransfer', 'time_starttransfer', 'time_total' } # stat keys where we expect a positive value pos_keys = {'time_pretransfer', 'time_starttransfer', 'time_total', 'time_queue'} if s['num_connects'] > 0: - pos_keys.add('time_connect') if url.startswith('https:'): pos_keys.add('time_appconnect') if s['num_redirects'] > 0: diff --git a/tests/http/test_17_ssl_use.py b/tests/http/test_17_ssl_use.py index f0b5377a1d..8aa9ce2994 100644 --- a/tests/http/test_17_ssl_use.py +++ b/tests/http/test_17_ssl_use.py @@ -41,16 +41,8 @@ class TestSSLUse: @pytest.fixture(autouse=True, scope='class') def _class_scope(self, env, httpd, nghttpx): env.make_data_file(indir=httpd.docs_dir, fname="data-10k", fsize=10*1024) - if env.have_h3(): - nghttpx.start_if_needed() - @pytest.fixture(autouse=True, scope='function') - def _function_scope(self, request, env, httpd): - httpd.clear_extra_configs() - if 'httpd' not in request.node._fixtureinfo.argnames: - httpd.reload_if_config_changed() - - def test_17_01_sslinfo_plain(self, env: Env, nghttpx): + def test_17_01_sslinfo_plain(self, env: Env, httpd): proto = 'http/1.1' curl = CurlClient(env=env) url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo' @@ -61,7 +53,7 @@ class TestSSLUse: assert r.json['SSL_SESSION_RESUMED'] == 'Initial', f'{r.json}' @pytest.mark.parametrize("tls_max", ['1.2', '1.3']) - def test_17_02_sslinfo_reconnect(self, env: Env, tls_max): + def test_17_02_sslinfo_reconnect(self, env: Env, tls_max, httpd): proto = 'http/1.1' count = 3 exp_resumed = 'Resumed' @@ -82,7 +74,7 @@ class TestSSLUse: curl = CurlClient(env=env, run_env=run_env) # tell the server to close the connection after each request urln = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo?'\ - f'id=[0-{count-1}]&close' + f'id=[0-{count-1}]&close' r = curl.http_download(urls=[urln], alpn_proto=proto, with_stats=True, extra_args=xargs) r.check_response(count=count, http_status=200) @@ -102,7 +94,7 @@ class TestSSLUse: # use host name with trailing dot, verify handshake @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) - def test_17_03_trailing_dot(self, env: Env, proto): + def test_17_03_trailing_dot(self, env: Env, proto, httpd, nghttpx): if proto == 'h3' and not env.have_h3(): pytest.skip("h3 not supported") curl = CurlClient(env=env) @@ -117,7 +109,7 @@ class TestSSLUse: # use host name with double trailing dot, verify handshake @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) - def test_17_04_double_dot(self, env: Env, proto): + def test_17_04_double_dot(self, env: Env, proto, httpd, nghttpx): if proto == 'h3' and not env.have_h3(): pytest.skip("h3 not supported") curl = CurlClient(env=env) @@ -139,7 +131,7 @@ class TestSSLUse: # use ip address for connect @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) - def test_17_05_ip_addr(self, env: Env, proto): + def test_17_05_ip_addr(self, env: Env, proto, httpd, nghttpx): if env.curl_uses_lib('bearssl'): pytest.skip("BearSSL does not support cert verification with IP addresses") if env.curl_uses_lib('mbedtls'): @@ -158,7 +150,7 @@ class TestSSLUse: # use localhost for connect @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) - def test_17_06_localhost(self, env: Env, proto): + def test_17_06_localhost(self, env: Env, proto, httpd, nghttpx): if proto == 'h3' and not env.have_h3(): pytest.skip("h3 not supported") curl = CurlClient(env=env) @@ -198,15 +190,19 @@ class TestSSLUse: ret.append(pytest.param(tls_proto, ciphers13, ciphers12, succeed13, succeed12, id=id)) return ret - @pytest.mark.parametrize("tls_proto, ciphers13, ciphers12, succeed13, succeed12", gen_test_17_07_list()) - def test_17_07_ssl_ciphers(self, env: Env, httpd, tls_proto, ciphers13, ciphers12, succeed13, succeed12): + @pytest.mark.parametrize( + "tls_proto, ciphers13, ciphers12, succeed13, succeed12", + gen_test_17_07_list()) + def test_17_07_ssl_ciphers(self, env: Env, httpd, configures_httpd, + tls_proto, ciphers13, ciphers12, + succeed13, succeed12): # to test setting cipher suites, the AES 256 ciphers are disabled in the test server httpd.set_extra_config('base', [ 'SSLCipherSuite SSL' - ' ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256' - ':ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305', + ' ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256' + ':ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305', 'SSLCipherSuite TLSv1.3' - ' TLS_AES_128_GCM_SHA256:TLS_CHACHA20_POLY1305_SHA256', + ' TLS_AES_128_GCM_SHA256:TLS_CHACHA20_POLY1305_SHA256', f'SSLProtocol {tls_proto}' ]) httpd.reload_if_config_changed() @@ -251,7 +247,7 @@ class TestSSLUse: assert r.exit_code != 0, r.dump_logs() @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) - def test_17_08_cert_status(self, env: Env, proto): + def test_17_08_cert_status(self, env: Env, proto, httpd, nghttpx): if proto == 'h3' and not env.have_h3(): pytest.skip("h3 not supported") if not env.curl_uses_lib('openssl') and \ @@ -275,7 +271,7 @@ class TestSSLUse: for min_ver in range(-2, 4)] @pytest.mark.parametrize("tls_proto, max_ver, min_ver", gen_test_17_09_list()) - def test_17_09_ssl_min_max(self, env: Env, httpd, tls_proto, max_ver, min_ver): + def test_17_09_ssl_min_max(self, env: Env, httpd, configures_httpd, tls_proto, max_ver, min_ver): httpd.set_extra_config('base', [ f'SSLProtocol {tls_proto}', 'SSLCipherSuite ALL:@SECLEVEL=0', @@ -347,7 +343,7 @@ class TestSSLUse: # use host name server has no certificate for @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) - def test_17_11_wrong_host(self, env: Env, proto): + def test_17_11_wrong_host(self, env: Env, proto, httpd, nghttpx): if proto == 'h3' and not env.have_h3(): pytest.skip("h3 not supported") curl = CurlClient(env=env) @@ -358,7 +354,7 @@ class TestSSLUse: # use host name server has no cert for with --insecure @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3']) - def test_17_12_insecure(self, env: Env, proto): + def test_17_12_insecure(self, env: Env, proto, httpd, nghttpx): if proto == 'h3' and not env.have_h3(): pytest.skip("h3 not supported") curl = CurlClient(env=env) @@ -372,7 +368,7 @@ class TestSSLUse: # connect to an expired certificate @pytest.mark.parametrize("proto", ['http/1.1', 'h2']) - def test_17_14_expired_cert(self, env: Env, proto): + def test_17_14_expired_cert(self, env: Env, proto, httpd): if proto == 'h3' and not env.have_h3(): pytest.skip("h3 not supported") curl = CurlClient(env=env) @@ -399,7 +395,7 @@ class TestSSLUse: def test_17_15_session_export(self, env: Env, httpd): proto = 'http/1.1' if env.curl_uses_lib('libressl'): - pytest.skip('Libressl resumption does not work inTLSv1.3') + pytest.skip('Libressl resumption does not work inTLSv1.3') if env.curl_uses_lib('rustls-ffi'): pytest.skip('rustsls does not expose sessions') if env.curl_uses_lib('bearssl'): @@ -430,7 +426,7 @@ class TestSSLUse: # verify the ciphers are ignored when talking TLSv1.3 only # see issue #16232 - def test_17_16_h3_ignore_ciphers12(self, env: Env): + def test_17_16_h3_ignore_ciphers12(self, env: Env, httpd, nghttpx): proto = 'h3' if proto == 'h3' and not env.have_h3(): pytest.skip("h3 not supported") @@ -443,7 +439,7 @@ class TestSSLUse: ]) assert r.exit_code == 0, f'{r}' - def test_17_17_h1_ignore_ciphers13(self, env: Env): + def test_17_17_h1_ignore_ciphers13(self, env: Env, httpd): proto = 'http/1.1' curl = CurlClient(env=env) url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo' @@ -471,14 +467,14 @@ class TestSSLUse: pytest.param("-GROUP-ALL:+GROUP-X25519", "TLSv1.3", ['TLS_CHACHA20_POLY1305_SHA256'], True, id='TLSv1.3-group-only-X25519'), pytest.param("-GROUP-ALL:+GROUP-SECP192R1", "", [], False, id='group-only-SECP192R1'), ]) - def test_17_18_gnutls_priority(self, env: Env, httpd, priority, tls_proto, ciphers, success): + def test_17_18_gnutls_priority(self, env: Env, httpd, configures_httpd, priority, tls_proto, ciphers, success): # to test setting cipher suites, the AES 256 ciphers are disabled in the test server httpd.set_extra_config('base', [ 'SSLCipherSuite SSL' - ' ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256' - ':ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305', + ' ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256' + ':ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305', 'SSLCipherSuite TLSv1.3' - ' TLS_AES_128_GCM_SHA256:TLS_CHACHA20_POLY1305_SHA256', + ' TLS_AES_128_GCM_SHA256:TLS_CHACHA20_POLY1305_SHA256', ]) httpd.reload_if_config_changed() proto = 'http/1.1' diff --git a/tests/http/test_18_methods.py b/tests/http/test_18_methods.py index a30f469048..afca5867ec 100644 --- a/tests/http/test_18_methods.py +++ b/tests/http/test_18_methods.py @@ -37,10 +37,6 @@ class TestMethods: @pytest.fixture(autouse=True, scope='class') def _class_scope(self, env, httpd, nghttpx): - if env.have_h3(): - nghttpx.start_if_needed() - httpd.clear_extra_configs() - httpd.reload_if_config_changed() indir = httpd.docs_dir env.make_data_file(indir=indir, fname="data-10k", fsize=10*1024) env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024) @@ -66,6 +62,6 @@ class TestMethods: count = 1 curl = CurlClient(env=env) url = f'https://{env.authority_for(env.domain1, proto)}/curltest/tweak?id=[0-{count-1}]'\ - '&chunks=1&chunk_size=0&chunk_delay=10ms' + '&chunks=1&chunk_size=0&chunk_delay=10ms' r = curl.http_delete(urls=[url], alpn_proto=proto) r.check_stats(count=count, http_status=204, exitcode=0) diff --git a/tests/http/test_19_shutdown.py b/tests/http/test_19_shutdown.py index fad1314fc4..fe6d594547 100644 --- a/tests/http/test_19_shutdown.py +++ b/tests/http/test_19_shutdown.py @@ -37,13 +37,6 @@ log = logging.getLogger(__name__) class TestShutdown: - @pytest.fixture(autouse=True, scope='class') - def _class_scope(self, env, httpd, nghttpx): - if env.have_h3(): - nghttpx.start_if_needed() - httpd.clear_extra_configs() - httpd.reload() - @pytest.fixture(autouse=True, scope='class') def _class_scope(self, env, httpd): indir = httpd.docs_dir @@ -62,13 +55,14 @@ class TestShutdown: if 'CURL_DEBUG' in run_env: del run_env['CURL_DEBUG'] curl = CurlClient(env=env, run_env=run_env) - url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]' + port = env.port_for(alpn_proto=proto) + url = f'https://{env.domain1}:{port}/data.json?[0-1]' r = curl.http_download(urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=[ '--parallel' ]) r.check_response(http_status=200, count=2) assert r.tcpdump - assert len(r.tcpdump.stats) != 0, f'Expected TCP RSTs packets: {r.tcpdump.stderr}' + assert len(r.tcpdump.get_rsts(ports=[port])) != 0, f'Expected TCP RSTs packets: {r.tcpdump.stderr}' # check with `tcpdump` that we do NOT see TCP RST when CURL_GRACEFUL_SHUTDOWN set @pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available") @@ -82,13 +76,14 @@ class TestShutdown: 'CURL_DEBUG': 'ssl,tcp,lib-ids,multi' }) curl = CurlClient(env=env, run_env=run_env) - url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]' + port = env.port_for(alpn_proto=proto) + url = f'https://{env.domain1}:{port}/data.json?[0-1]' r = curl.http_download(urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=[ '--parallel' ]) r.check_response(http_status=200, count=2) assert r.tcpdump - assert len(r.tcpdump.stats) == 0, 'Unexpected TCP RSTs packets' + assert len(r.tcpdump.get_rsts(ports=[port])) == 0, 'Unexpected TCP RST packets' # run downloads where the server closes the connection after each request @pytest.mark.parametrize("proto", ['http/1.1']) @@ -105,7 +100,7 @@ class TestShutdown: r = curl.http_download(urls=[url], alpn_proto=proto) r.check_response(http_status=200, count=count) shutdowns = [line for line in r.trace_lines - if re.match(r'.*\[SHUTDOWN\] shutdown, done=1', line)] + if re.match(r'.*\[SHUTDOWN] shutdown, done=1', line)] assert len(shutdowns) == count, f'{shutdowns}' # run downloads with CURLOPT_FORBID_REUSE set, meaning *we* close @@ -128,7 +123,7 @@ class TestShutdown: ]) r.check_exit_code(0) shutdowns = [line for line in r.trace_lines - if re.match(r'.*SHUTDOWN\] shutdown, done=1', line)] + if re.match(r'.*SHUTDOWN] shutdown, done=1', line)] assert len(shutdowns) == count, f'{shutdowns}' # run event-based downloads with CURLOPT_FORBID_REUSE set, meaning *we* close @@ -153,7 +148,7 @@ class TestShutdown: r.check_response(http_status=200, count=count) # check that we closed all connections closings = [line for line in r.trace_lines - if re.match(r'.*SHUTDOWN\] (force )?closing', line)] + if re.match(r'.*SHUTDOWN] (force )?closing', line)] assert len(closings) == count, f'{closings}' # check that all connection sockets were removed from event removes = [line for line in r.trace_lines @@ -178,7 +173,7 @@ class TestShutdown: r.check_response(http_status=200, count=2) # check connection cache closings shutdowns = [line for line in r.trace_lines - if re.match(r'.*SHUTDOWN\] shutdown, done=1', line)] + if re.match(r'.*SHUTDOWN] shutdown, done=1', line)] assert len(shutdowns) == 1, f'{shutdowns}' # run connection pressure, many small transfers, not reusing connections, @@ -197,7 +192,7 @@ class TestShutdown: if not client.exists(): pytest.skip(f'example client not built: {client.name}') r = client.run(args=[ - '-n', f'{count}', #that many transfers + '-n', f'{count}', # that many transfers '-f', # forbid conn reuse '-m', '10', # max parallel '-T', '5', # max total conns at a time @@ -206,6 +201,6 @@ class TestShutdown: ]) r.check_exit_code(0) shutdowns = [line for line in r.trace_lines - if re.match(r'.*SHUTDOWN\] shutdown, done=1', line)] + if re.match(r'.*SHUTDOWN] shutdown, done=1', line)] # we see less clean shutdowns as total limit forces early closes assert len(shutdowns) < count, f'{shutdowns}' diff --git a/tests/http/test_20_websockets.py b/tests/http/test_20_websockets.py index 38222697b1..18aa0341a8 100644 --- a/tests/http/test_20_websockets.py +++ b/tests/http/test_20_websockets.py @@ -27,12 +27,15 @@ import logging import os import shutil +import socket import subprocess import time from datetime import datetime, timedelta +from typing import Dict import pytest from testenv import Env, CurlClient, LocalClient +from testenv.ports import alloc_ports_and_do log = logging.getLogger(__name__) @@ -42,9 +45,13 @@ log = logging.getLogger(__name__) reason='curl lacks ws protocol support') class TestWebsockets: - def check_alive(self, env, timeout=5): + PORT_SPECS = { + 'ws': socket.SOCK_STREAM, + } + + def check_alive(self, env, port, timeout=5): curl = CurlClient(env=env) - url = f'http://localhost:{env.ws_port}/' + url = f'http://localhost:{port}/' end = datetime.now() + timedelta(seconds=timeout) while datetime.now() < end: r = curl.http_download(urls=[url]) @@ -63,20 +70,36 @@ class TestWebsockets: @pytest.fixture(autouse=True, scope='class') def ws_echo(self, env): - run_dir = os.path.join(env.gen_dir, 'ws-echo-server') - err_file = os.path.join(run_dir, 'stderr') - self._rmrf(run_dir) - self._mkpath(run_dir) - - with open(err_file, 'w') as cerr: - cmd = os.path.join(env.project_dir, - 'tests/http/testenv/ws_echo_server.py') - args = [cmd, '--port', str(env.ws_port)] - p = subprocess.Popen(args=args, cwd=run_dir, stderr=cerr, - stdout=cerr) - assert self.check_alive(env) + self.run_dir = os.path.join(env.gen_dir, 'ws-echo-server') + err_file = os.path.join(self.run_dir, 'stderr') + self._rmrf(self.run_dir) + self._mkpath(self.run_dir) + self.cmd = os.path.join(env.project_dir, + 'tests/http/testenv/ws_echo_server.py') + self.wsproc = None + self.cerr = None + + def startup(ports: Dict[str, int]) -> bool: + wargs = [self.cmd, '--port', str(ports['ws'])] + log.info(f'start_ {wargs}') + self.wsproc = subprocess.Popen(args=wargs, + cwd=self.run_dir, + stderr=self.cerr, + stdout=self.cerr) + if self.check_alive(env, ports['ws']): + env.update_ports(ports) + return True + log.error(f'not alive {wargs}') + self.wsproc.terminate() + self.wsproc = None + return False + + with open(err_file, 'w') as self.cerr: + assert alloc_ports_and_do(TestWebsockets.PORT_SPECS, startup, + env.gen_root, max_tries=3) + assert self.wsproc yield - p.terminate() + self.wsproc.terminate() def test_20_01_basic(self, env: Env, ws_echo): curl = CurlClient(env=env) @@ -147,7 +170,6 @@ class TestWebsockets: pytest.skip(f'example client not built: {client.name}') url = f'ws://localhost:{env.ws_port}/' count = 10 - large = 512 * 1024 large = 20000 r = client.run(args=['-c', str(count), '-m', str(large), url]) r.check_exit_code(0) diff --git a/tests/http/test_30_vsftpd.py b/tests/http/test_30_vsftpd.py index eab10c1754..79fe335066 100644 --- a/tests/http/test_30_vsftpd.py +++ b/tests/http/test_30_vsftpd.py @@ -43,7 +43,7 @@ class TestVsFTPD: @pytest.fixture(autouse=True, scope='class') def vsftpd(self, env): vsftpd = VsFTPD(env=env) - assert vsftpd.start() + assert vsftpd.initial_start() yield vsftpd vsftpd.stop() @@ -148,7 +148,11 @@ class TestVsFTPD: r = curl.ftp_get(urls=[url], with_stats=True, with_tcpdump=True) r.check_stats(count=count, http_status=226) assert r.tcpdump - assert len(r.tcpdump.stats) == 0, 'Unexpected TCP RSTs packets' + # vsftp closes control connection without niceties, + # look only at ports from DATA connection. + data_ports = vsftpd.get_data_ports(r) + assert len(data_ports), f'unable to find FTP data port connected to\n{r.dump_logs()}' + assert len(r.tcpdump.get_rsts(ports=data_ports)) == 0, 'Unexpected TCP RST packets' # check with `tcpdump` if curl causes any TCP RST packets @pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available") @@ -163,7 +167,11 @@ class TestVsFTPD: r = curl.ftp_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, with_tcpdump=True) r.check_stats(count=count, http_status=226) assert r.tcpdump - assert len(r.tcpdump.stats) == 0, 'Unexpected TCP RSTs packets' + # vsftp closes control connection without niceties, + # look only at ports from DATA connection. + data_ports = vsftpd.get_data_ports(r) + assert len(data_ports), f'unable to find FTP data port connected to\n{r.dump_logs()}' + assert len(r.tcpdump.get_rsts(ports=data_ports)) == 0, 'Unexpected TCP RST packets' def test_30_08_active_download(self, env: Env, vsftpd: VsFTPD): docname = 'data-10k' diff --git a/tests/http/test_31_vsftpds.py b/tests/http/test_31_vsftpds.py index f7f88491eb..bb10f4f14d 100644 --- a/tests/http/test_31_vsftpds.py +++ b/tests/http/test_31_vsftpds.py @@ -47,7 +47,7 @@ class TestVsFTPD: if not TestVsFTPD.SUPPORTS_SSL: pytest.skip('vsftpd does not seem to support SSL') vsftpds = VsFTPD(env=env, with_ssl=True) - if not vsftpds.start(): + if not vsftpds.initial_start(): vsftpds.stop() TestVsFTPD.SUPPORTS_SSL = False pytest.skip('vsftpd does not seem to support SSL') @@ -155,8 +155,10 @@ class TestVsFTPD: r = curl.ftp_ssl_get(urls=[url], with_stats=True, with_tcpdump=True) r.check_stats(count=count, http_status=226) # vsftp closes control connection without niceties, - # disregard RST packets it sent from its port to curl - assert len(r.tcpdump.stats_excluding(src_port=env.ftps_port)) == 0, 'Unexpected TCP RSTs packets' + # look only at ports from DATA connection. + data_ports = vsftpds.get_data_ports(r) + assert len(data_ports), f'unable to find FTP data port connected to\n{r.dump_logs()}' + assert len(r.tcpdump.get_rsts(ports=data_ports)) == 0, 'Unexpected TCP RST packets' # check with `tcpdump` if curl causes any TCP RST packets @pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available") @@ -171,8 +173,10 @@ class TestVsFTPD: r = curl.ftp_ssl_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, with_tcpdump=True) r.check_stats(count=count, http_status=226) # vsftp closes control connection without niceties, - # disregard RST packets it sent from its port to curl - assert len(r.tcpdump.stats_excluding(src_port=env.ftps_port)) == 0, 'Unexpected TCP RSTs packets' + # look only at ports from DATA connection. + data_ports = vsftpds.get_data_ports(r) + assert len(data_ports), f'unable to find FTP data port connected to\n{r.dump_logs()}' + assert len(r.tcpdump.get_rsts(ports=data_ports)) == 0, 'Unexpected TCP RST packets' def test_31_08_upload_ascii(self, env: Env, vsftpds: VsFTPD): docname = 'upload-ascii' diff --git a/tests/http/test_32_ftps_vsftpd.py b/tests/http/test_32_ftps_vsftpd.py index 8270d54edc..d44a47d208 100644 --- a/tests/http/test_32_ftps_vsftpd.py +++ b/tests/http/test_32_ftps_vsftpd.py @@ -47,7 +47,7 @@ class TestFtpsVsFTPD: if not TestFtpsVsFTPD.SUPPORTS_SSL: pytest.skip('vsftpd does not seem to support SSL') vsftpds = VsFTPD(env=env, with_ssl=True, ssl_implicit=True) - if not vsftpds.start(): + if not vsftpds.initial_start(): vsftpds.stop() TestFtpsVsFTPD.SUPPORTS_SSL = False pytest.skip('vsftpd does not seem to support SSL') @@ -167,8 +167,10 @@ class TestFtpsVsFTPD: r = curl.ftp_get(urls=[url], with_stats=True, with_tcpdump=True) r.check_stats(count=count, http_status=226) # vsftp closes control connection without niceties, - # disregard RST packets it sent from its port to curl - assert len(r.tcpdump.stats_excluding(src_port=env.ftps_port)) == 0, 'Unexpected TCP RSTs packets' + # look only at ports from DATA connection. + data_ports = vsftpds.get_data_ports(r) + assert len(data_ports), f'unable to find FTP data port connected to\n{r.dump_logs()}' + assert len(r.tcpdump.get_rsts(ports=data_ports)) == 0, 'Unexpected TCP RST packets' # check with `tcpdump` if curl causes any TCP RST packets @pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available") @@ -183,8 +185,10 @@ class TestFtpsVsFTPD: r = curl.ftp_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, with_tcpdump=True) r.check_stats(count=count, http_status=226) # vsftp closes control connection without niceties, - # disregard RST packets it sent from its port to curl - assert len(r.tcpdump.stats_excluding(src_port=env.ftps_port)) == 0, 'Unexpected TCP RSTs packets' + # look only at ports from DATA connection. + data_ports = vsftpds.get_data_ports(r) + assert len(data_ports), f'unable to find FTP data port connected to\n{r.dump_logs()}' + assert len(r.tcpdump.get_rsts(ports=data_ports)) == 0, 'Unexpected TCP RST packets' def test_32_08_upload_ascii(self, env: Env, vsftpds: VsFTPD): docname = 'upload-ascii' diff --git a/tests/http/testenv/caddy.py b/tests/http/testenv/caddy.py index 748ef3d5fe..c6f38b3416 100644 --- a/tests/http/testenv/caddy.py +++ b/tests/http/testenv/caddy.py @@ -26,20 +26,27 @@ # import logging import os +import socket import subprocess import time from datetime import timedelta, datetime from json import JSONEncoder +from typing import Dict from .curl import CurlClient from .env import Env - +from .ports import alloc_ports_and_do log = logging.getLogger(__name__) class Caddy: + PORT_SPECS = { + 'caddy': socket.SOCK_STREAM, + 'caddys': socket.SOCK_STREAM, + } + def __init__(self, env: Env): self.env = env self._caddy = os.environ['CADDY'] if 'CADDY' in os.environ else env.caddy @@ -49,6 +56,8 @@ class Caddy: self._error_log = os.path.join(self._caddy_dir, 'caddy.log') self._tmp_dir = os.path.join(self._caddy_dir, 'tmp') self._process = None + self._http_port = 0 + self._https_port = 0 self._rmf(self._error_log) @property @@ -57,7 +66,7 @@ class Caddy: @property def port(self) -> int: - return self.env.caddy_https_port + return self._https_port def clear_logs(self): self._rmf(self._error_log) @@ -73,7 +82,24 @@ class Caddy: return self.start() return True + def initial_start(self): + + def startup(ports: Dict[str, int]) -> bool: + self._http_port = ports['caddy'] + self._https_port = ports['caddys'] + if self.start(): + self.env.update_ports(ports) + return True + self.stop() + self._http_port = 0 + self._https_port = 0 + return False + + return alloc_ports_and_do(Caddy.PORT_SPECS, startup, + self.env.gen_root, max_tries=3) + def start(self, wait_live=True): + assert self._http_port > 0 and self._https_port > 0 self._mkpath(self._tmp_dir) if self._process: self.stop() @@ -85,12 +111,7 @@ class Caddy: self._process = subprocess.Popen(args=args, cwd=self._caddy_dir, stderr=caddyerr) if self._process.returncode is not None: return False - return not wait_live or self.wait_live(timeout=timedelta(seconds=5)) - - def stop_if_running(self): - if self.is_running(): - return self.stop() - return True + return not wait_live or self.wait_live(timeout=timedelta(seconds=Env.SERVER_TIMEOUT)) def stop(self, wait_dead=True): self._mkpath(self._tmp_dir) @@ -155,22 +176,25 @@ class Caddy: with open(self._conf_file, 'w') as fd: conf = [ # base server config '{', - f' http_port {self.env.caddy_http_port}', - f' https_port {self.env.caddy_https_port}', - f' servers :{self.env.caddy_https_port} {{', + f' http_port {self._http_port}', + f' https_port {self._https_port}', + f' servers :{self._https_port} {{', ' protocols h3 h2 h1', ' }', '}', - f'{domain1}:{self.env.caddy_https_port} {{', + f'{domain1}:{self._https_port} {{', ' file_server * {', f' root {self._docs_dir}', ' }', f' tls {creds1.cert_file} {creds1.pkey_file}', '}', - f'{domain2} {{', - f' reverse_proxy /* http://localhost:{self.env.http_port} {{', - ' }', - f' tls {creds2.cert_file} {creds2.pkey_file}', - '}', ] + if self.env.http_port > 0: + conf.extend([ + f'{domain2} {{', + f' reverse_proxy /* http://localhost:{self.env.http_port} {{', + ' }', + f' tls {creds2.cert_file} {creds2.pkey_file}', + '}', + ]) fd.write("\n".join(conf)) diff --git a/tests/http/testenv/curl.py b/tests/http/testenv/curl.py index cb253c077d..eb4d5d327f 100644 --- a/tests/http/testenv/curl.py +++ b/tests/http/testenv/curl.py @@ -117,20 +117,22 @@ class RunTcpDump: self._stdoutfile = os.path.join(self._run_dir, 'tcpdump.out') self._stderrfile = os.path.join(self._run_dir, 'tcpdump.err') - @property - def stats(self) -> Optional[List[str]]: + def get_rsts(self, ports: List[int]|None = None) -> Optional[List[str]]: if self._proc: raise Exception('tcpdump still running') - return [line - for line in open(self._stdoutfile) - if re.match(r'.* IP 127\.0\.0\.1\.\d+ [<>] 127\.0\.0\.1\.\d+:.*', line)] + lines = [] + for line in open(self._stdoutfile): + m = re.match(r'.* IP 127\.0\.0\.1\.(\d+) [<>] 127\.0\.0\.1\.(\d+):.*', line) + if m: + sport = int(m.group(1)) + dport = int(m.group(2)) + if ports is None or sport in ports or dport in ports: + lines.append(line) + return lines - def stats_excluding(self, src_port) -> Optional[List[str]]: - if self._proc: - raise Exception('tcpdump still running') - return [line - for line in self.stats - if not re.match(r'.* IP 127\.0\.0\.1\.' + str(src_port) + ' >.*', line)] + @property + def stats(self) -> Optional[List[str]]: + return self.get_rsts() @property def stderr(self) -> List[str]: diff --git a/tests/http/testenv/env.py b/tests/http/testenv/env.py index a4ab7a9801..4a73ca6fbb 100644 --- a/tests/http/testenv/env.py +++ b/tests/http/testenv/env.py @@ -29,15 +29,16 @@ import logging import os import re import shutil -import socket import subprocess import tempfile from configparser import ConfigParser, ExtendedInterpolation from datetime import timedelta -from typing import Optional +from typing import Optional, Dict + +import pytest +from filelock import FileLock from .certs import CertificateSpec, Credentials, TestCA -from .ports import alloc_ports log = logging.getLogger(__name__) @@ -59,9 +60,16 @@ CURL = os.path.join(TOP_PATH, 'src', 'curl') class EnvConfig: - def __init__(self): + def __init__(self, pytestconfig: Optional[pytest.Config] = None, + testrun_uid=None, + worker_id=None): + self.pytestconfig = pytestconfig + self.testrun_uid = testrun_uid + self.worker_id = worker_id if worker_id is not None else 'master' self.tests_dir = TESTS_HTTPD_PATH - self.gen_dir = os.path.join(self.tests_dir, 'gen') + self.gen_root = self.gen_dir = os.path.join(self.tests_dir, 'gen') + if self.worker_id != 'master': + self.gen_dir = os.path.join(self.gen_dir, self.worker_id) self.project_dir = os.path.dirname(os.path.dirname(self.tests_dir)) self.build_dir = TOP_PATH self.config = DEF_CONFIG @@ -114,19 +122,8 @@ class EnvConfig: prot.lower() for prot in line[11:].split(' ') } - self.ports = alloc_ports(port_specs={ - 'ftp': socket.SOCK_STREAM, - 'ftps': socket.SOCK_STREAM, - 'http': socket.SOCK_STREAM, - 'https': socket.SOCK_STREAM, - 'nghttpx_https': socket.SOCK_STREAM, - 'proxy': socket.SOCK_STREAM, - 'proxys': socket.SOCK_STREAM, - 'h2proxys': socket.SOCK_STREAM, - 'caddy': socket.SOCK_STREAM, - 'caddys': socket.SOCK_STREAM, - 'ws': socket.SOCK_STREAM, - }) + self.ports = {} + self.httpd = self.config['httpd']['httpd'] self.apxs = self.config['httpd']['apxs'] if len(self.apxs) == 0: @@ -287,9 +284,16 @@ class EnvConfig: def tcpdmp(self) -> Optional[str]: return self._tcpdump + def clear_locks(self): + ca_lock = os.path.join(self.gen_root, 'ca/ca.lock') + if os.path.exists(ca_lock): + os.remove(ca_lock) + class Env: + SERVER_TIMEOUT = 30 # seconds to wait for server to come up/reload + CONFIG = EnvConfig() @staticmethod @@ -434,7 +438,9 @@ class Env: def tcpdump() -> Optional[str]: return Env.CONFIG.tcpdmp - def __init__(self, pytestconfig=None): + def __init__(self, pytestconfig=None, env_config=None): + if env_config: + Env.CONFIG = env_config self._verbose = pytestconfig.option.verbose \ if pytestconfig is not None else 0 self._ca = None @@ -442,11 +448,14 @@ class Env: def issue_certs(self): if self._ca is None: - ca_dir = os.path.join(self.CONFIG.gen_dir, 'ca') - self._ca = TestCA.create_root(name=self.CONFIG.tld, - store_dir=ca_dir, - key_type="rsa2048") - self._ca.issue_certs(self.CONFIG.cert_specs) + ca_dir = os.path.join(self.CONFIG.gen_root, 'ca') + os.makedirs(ca_dir, exist_ok=True) + lock_file = os.path.join(ca_dir, 'ca.lock') + with FileLock(lock_file): + self._ca = TestCA.create_root(name=self.CONFIG.tld, + store_dir=ca_dir, + key_type="rsa2048") + self._ca.issue_certs(self.CONFIG.cert_specs) def setup(self): os.makedirs(self.gen_dir, exist_ok=True) @@ -475,6 +484,10 @@ class Env: def gen_dir(self) -> str: return self.CONFIG.gen_dir + @property + def gen_root(self) -> str: + return self.CONFIG.gen_root + @property def project_dir(self) -> str: return self.CONFIG.project_dir @@ -519,14 +532,25 @@ class Env: def expired_domain(self) -> str: return self.CONFIG.expired_domain + @property + def ports(self) -> Dict[str, int]: + return self.CONFIG.ports + + def update_ports(self, ports: Dict[str, int]): + self.CONFIG.ports.update(ports) + @property def http_port(self) -> int: - return self.CONFIG.ports['http'] + return self.CONFIG.ports.get('http', 0) @property def https_port(self) -> int: return self.CONFIG.ports['https'] + @property + def https_only_tcp_port(self) -> int: + return self.CONFIG.ports['https-tcp-only'] + @property def nghttpx_https_port(self) -> int: return self.CONFIG.ports['nghttpx_https'] diff --git a/tests/http/testenv/httpd.py b/tests/http/testenv/httpd.py index 28c7f6959a..34d0dd7b3c 100644 --- a/tests/http/testenv/httpd.py +++ b/tests/http/testenv/httpd.py @@ -27,16 +27,18 @@ import inspect import logging import os +import shutil +import socket import subprocess from datetime import timedelta, datetime from json import JSONEncoder import time -from typing import List, Union, Optional +from typing import List, Union, Optional, Dict import copy from .curl import CurlClient, ExecResult from .env import Env - +from .ports import alloc_ports_and_do log = logging.getLogger(__name__) @@ -61,6 +63,14 @@ class Httpd: MOD_CURLTEST = None + PORT_SPECS = { + 'http': socket.SOCK_STREAM, + 'https': socket.SOCK_STREAM, + 'https-tcp-only': socket.SOCK_STREAM, + 'proxy': socket.SOCK_STREAM, + 'proxys': socket.SOCK_STREAM, + } + def __init__(self, env: Env, proxy_auth: bool = False): self.env = env self._apache_dir = os.path.join(env.gen_dir, 'apache') @@ -79,6 +89,7 @@ class Httpd: self._proxy_auth_basic = proxy_auth self._extra_configs = {} self._loaded_extra_configs = None + self._loaded_proxy_auth = None assert env.apxs p = subprocess.run(args=[env.apxs, '-q', 'libexecdir'], capture_output=True, text=True) @@ -89,7 +100,8 @@ class Httpd: raise Exception('apache modules dir cannot be found') if not os.path.exists(self._mods_dir): raise Exception(f'apache modules dir does not exist: {self._mods_dir}') - self._process = None + self._maybe_running = False + self.ports = {} self._rmf(self._error_log) self._init_curltest() @@ -138,8 +150,25 @@ class Httpd: "-k", cmd] return self._run(args=args) + def initial_start(self): + + def startup(ports: Dict[str, int]) -> bool: + self.ports.update(ports) + if self.start(): + self.env.update_ports(ports) + return True + self.stop() + self.ports.clear() + return False + + return alloc_ports_and_do(Httpd.PORT_SPECS, startup, + self.env.gen_root, max_tries=3) + def start(self): - if self._process: + # assure ports are allocated + for key, _ in Httpd.PORT_SPECS.items(): + assert self.ports[key] is not None + if self._maybe_running: self.stop() self._write_config() with open(self._error_log, 'a') as fd: @@ -147,35 +176,41 @@ class Httpd: with open(os.path.join(self._apache_dir, 'xxx'), 'a') as fd: fd.write('start of server\n') r = self._cmd_httpd('start') - if r.exit_code != 0: + if r.exit_code != 0 or len(r.stderr): log.error(f'failed to start httpd: {r}') + self.stop() return False self._loaded_extra_configs = copy.deepcopy(self._extra_configs) - return self.wait_live(timeout=timedelta(seconds=5)) + self._loaded_proxy_auth = self._proxy_auth_basic + return self.wait_live(timeout=timedelta(seconds=Env.SERVER_TIMEOUT)) def stop(self): r = self._cmd_httpd('stop') self._loaded_extra_configs = None + self._loaded_proxy_auth = None if r.exit_code == 0: - return self.wait_dead(timeout=timedelta(seconds=5)) + return self.wait_dead(timeout=timedelta(seconds=Env.SERVER_TIMEOUT)) log.fatal(f'stopping httpd failed: {r}') return r.exit_code == 0 - def restart(self): - self.stop() - return self.start() - def reload(self): self._write_config() r = self._cmd_httpd("graceful") + if r.exit_code != 0: + log.error(f'failed to reload httpd: {r}') + return False self._loaded_extra_configs = None + self._loaded_proxy_auth = None if r.exit_code != 0: log.error(f'failed to reload httpd: {r}') self._loaded_extra_configs = copy.deepcopy(self._extra_configs) - return self.wait_live(timeout=timedelta(seconds=5)) + self._loaded_proxy_auth = self._proxy_auth_basic + return self.wait_live(timeout=timedelta(seconds=Env.SERVER_TIMEOUT)) def reload_if_config_changed(self): - if self._loaded_extra_configs == self._extra_configs: + if self._maybe_running and \ + self._loaded_extra_configs == self._extra_configs and \ + self._loaded_proxy_auth == self._proxy_auth_basic: return True return self.reload() @@ -183,8 +218,9 @@ class Httpd: curl = CurlClient(env=self.env, run_dir=self._tmp_dir) try_until = datetime.now() + timeout while datetime.now() < try_until: - r = curl.http_get(url=f'http://{self.env.domain1}:{self.env.http_port}/') + r = curl.http_get(url=f'http://{self.env.domain1}:{self.ports["http"]}/') if r.exit_code != 0: + self._maybe_running = False return True time.sleep(.1) log.debug(f"Server still responding after {timeout}") @@ -195,11 +231,12 @@ class Httpd: timeout=timeout.total_seconds()) try_until = datetime.now() + timeout while datetime.now() < try_until: - r = curl.http_get(url=f'http://{self.env.domain1}:{self.env.http_port}/') + r = curl.http_get(url=f'http://{self.env.domain1}:{self.ports["http"]}/') if r.exit_code == 0: + self._maybe_running = True return True time.sleep(.1) - log.debug(f"Server still not responding after {timeout}") + log.error(f"Server still not responding after {timeout}") return False def _rmf(self, path): @@ -225,6 +262,7 @@ class Httpd: proxy_creds = self.env.get_credentials(proxy_domain) assert proxy_creds # convince pytype this isn't None self._mkpath(self._conf_dir) + self._mkpath(self._docs_dir) self._mkpath(self._logs_dir) self._mkpath(self._tmp_dir) self._mkpath(os.path.join(self._docs_dir, 'two')) @@ -257,25 +295,24 @@ class Httpd: f'ServerRoot "{self._apache_dir}"', 'DefaultRuntimeDir logs', 'PidFile httpd.pid', + f'ServerName {self.env.tld}', f'ErrorLog {self._error_log}', f'LogLevel {self._get_log_level()}', 'StartServers 4', 'ReadBufferSize 16000', 'H2MinWorkers 16', 'H2MaxWorkers 256', - f'Listen {self.env.http_port}', - f'Listen {self.env.https_port}', - f'Listen {self.env.proxy_port}', - f'Listen {self.env.proxys_port}', f'TypesConfig "{self._conf_dir}/mime.types', 'SSLSessionCache "shmcb:ssl_gcache_data(32000)"', 'AddEncoding x-gzip .gz .tgz .gzip', 'AddHandler type-map .var', ] + conf.extend([f'Listen {port}' for _, port in self.ports.items()]) + if 'base' in self._extra_configs: conf.extend(self._extra_configs['base']) conf.extend([ # plain http host for domain1 - f'', + f'', f' ServerName {domain1}', ' ServerAlias localhost', f' DocumentRoot "{self._docs_dir}"', @@ -288,7 +325,24 @@ class Httpd: '', ]) conf.extend([ # https host for domain1, h1 + h2 - f'', + f'', + f' ServerName {domain1}', + ' ServerAlias localhost', + ' Protocols h2 http/1.1', + ' SSLEngine on', + f' SSLCertificateFile {creds1.cert_file}', + f' SSLCertificateKeyFile {creds1.pkey_file}', + f' DocumentRoot "{self._docs_dir}"', + ]) + conf.extend(self._curltest_conf(domain1)) + if domain1 in self._extra_configs: + conf.extend(self._extra_configs[domain1]) + conf.extend([ + '', + '', + ]) + conf.extend([ # https host for domain1, h1 + h2, tcp only + f'', f' ServerName {domain1}', ' ServerAlias localhost', ' Protocols h2 http/1.1', @@ -306,7 +360,7 @@ class Httpd: ]) # Alternate to domain1 with BROTLI compression conf.extend([ # https host for domain1, h1 + h2 - f'', + f'', f' ServerName {domain1brotli}', ' Protocols h2 http/1.1', ' SSLEngine on', @@ -323,7 +377,7 @@ class Httpd: '', ]) conf.extend([ # plain http host for domain2 - f'', + f'', f' ServerName {domain2}', ' ServerAlias localhost', f' DocumentRoot "{self._docs_dir}"', @@ -334,8 +388,25 @@ class Httpd: '', '', ]) + self._mkpath(os.path.join(self._docs_dir, 'two')) conf.extend([ # https host for domain2, no h2 - f'', + f'', + f' ServerName {domain2}', + ' Protocols http/1.1', + ' SSLEngine on', + f' SSLCertificateFile {creds2.cert_file}', + f' SSLCertificateKeyFile {creds2.pkey_file}', + f' DocumentRoot "{self._docs_dir}/two"', + ]) + conf.extend(self._curltest_conf(domain2)) + if domain2 in self._extra_configs: + conf.extend(self._extra_configs[domain2]) + conf.extend([ + '', + '', + ]) + conf.extend([ # https host for domain2, no h2, tcp only + f'', f' ServerName {domain2}', ' Protocols http/1.1', ' SSLEngine on', @@ -350,8 +421,9 @@ class Httpd: '', '', ]) + self._mkpath(os.path.join(self._docs_dir, 'expired')) conf.extend([ # https host for expired domain - f'', + f'', f' ServerName {exp_domain}', ' Protocols h2 http/1.1', ' SSLEngine on', @@ -367,13 +439,13 @@ class Httpd: '', ]) conf.extend([ # http forward proxy - f'', + f'', f' ServerName {proxy_domain}', ' Protocols h2c http/1.1', ' ProxyRequests On', ' H2ProxyRequests On', ' ProxyVia On', - f' AllowCONNECT {self.env.http_port} {self.env.https_port}', + f' AllowCONNECT {self.ports["http"]} {self.ports["https"]}', ]) conf.extend(self._get_proxy_conf()) conf.extend([ @@ -381,7 +453,7 @@ class Httpd: '', ]) conf.extend([ # https forward proxy - f'', + f'', f' ServerName {proxy_domain}', ' Protocols h2 http/1.1', ' SSLEngine on', @@ -390,7 +462,7 @@ class Httpd: ' ProxyRequests On', ' H2ProxyRequests On', ' ProxyVia On', - f' AllowCONNECT {self.env.http_port} {self.env.https_port}', + f' AllowCONNECT {self.ports["http"]} {self.ports["https"]}', ]) conf.extend(self._get_proxy_conf()) conf.extend([ @@ -486,12 +558,17 @@ class Httpd: if Httpd.MOD_CURLTEST is not None: return local_dir = os.path.dirname(inspect.getfile(Httpd)) - p = subprocess.run([self.env.apxs, '-c', 'mod_curltest.c'], - capture_output=True, - cwd=os.path.join(local_dir, 'mod_curltest')) + out_dir = os.path.join(self.env.gen_dir, 'mod_curltest') + out_source = os.path.join(out_dir, 'mod_curltest.c') + if not os.path.exists(out_dir): + os.mkdir(out_dir) + if not os.path.exists(out_source): + shutil.copy(os.path.join(local_dir, 'mod_curltest/mod_curltest.c'), out_source) + p = subprocess.run([ + self.env.apxs, '-c', out_source + ], capture_output=True, cwd=out_dir) rv = p.returncode if rv != 0: log.error(f"compiling mod_curltest failed: {p.stderr}") raise Exception(f"compiling mod_curltest failed: {p.stderr}") - Httpd.MOD_CURLTEST = os.path.join( - local_dir, 'mod_curltest/.libs/mod_curltest.so') + Httpd.MOD_CURLTEST = os.path.join(out_dir, '.libs/mod_curltest.so') diff --git a/tests/http/testenv/nghttpx.py b/tests/http/testenv/nghttpx.py index 03200beba9..650ced0f3b 100644 --- a/tests/http/testenv/nghttpx.py +++ b/tests/http/testenv/nghttpx.py @@ -27,25 +27,26 @@ import logging import os import signal +import socket import subprocess import time -from typing import Optional +from typing import Optional, Dict from datetime import datetime, timedelta from .env import Env from .curl import CurlClient - +from .ports import alloc_ports_and_do log = logging.getLogger(__name__) class Nghttpx: - def __init__(self, env: Env, port: int, https_port: int, name: str): + def __init__(self, env: Env, name: str): self.env = env self._name = name - self._port = port - self._https_port = https_port + self._port = 0 + self._https_port = 0 self._cmd = env.nghttpx self._run_dir = os.path.join(env.gen_dir, name) self._pid_file = os.path.join(self._run_dir, 'nghttpx.pid') @@ -81,13 +82,11 @@ class Nghttpx: return self.start() return True - def start(self, wait_live=True): + def initial_start(self): pass - def stop_if_running(self): - if self.is_running(): - return self.stop() - return True + def start(self, wait_live=True): + pass def stop(self, wait_dead=True): self._mkpath(self._tmp_dir) @@ -125,7 +124,7 @@ class Nghttpx: os.kill(running.pid, signal.SIGKILL) running.terminate() running.wait(1) - return self.wait_live(timeout=timedelta(seconds=5)) + return self.wait_live(timeout=timedelta(seconds=Env.SERVER_TIMEOUT)) return False def wait_dead(self, timeout: timedelta): @@ -169,7 +168,6 @@ class Nghttpx: ]) if r.exit_code == 0: return True - log.debug(f'waiting for nghttpx to become responsive: {r}') time.sleep(.1) log.error(f"Server still not responding after {timeout}") return False @@ -192,9 +190,27 @@ class Nghttpx: class NghttpxQuic(Nghttpx): + PORT_SPECS = { + 'nghttpx_https': socket.SOCK_STREAM, + } + def __init__(self, env: Env): - super().__init__(env=env, name='nghttpx-quic', port=env.h3_port, - https_port=env.nghttpx_https_port) + super().__init__(env=env, name='nghttpx-quic') + self._https_port = env.https_port + + def initial_start(self): + + def startup(ports: Dict[str, int]) -> bool: + self._port = ports['nghttpx_https'] + if self.start(): + self.env.update_ports(ports) + return True + self.stop() + self._port = 0 + return False + + return alloc_ports_and_do(NghttpxQuic.PORT_SPECS, startup, + self.env.gen_root, max_tries=3) def start(self, wait_live=True): self._mkpath(self._tmp_dir) @@ -206,7 +222,7 @@ class NghttpxQuic(Nghttpx): self._cmd, f'--frontend=*,{self.env.h3_port};quic', '--frontend-quic-early-data', - f'--frontend=*,{self.env.nghttpx_https_port};tls', + f'--frontend=*,{self._port};tls', f'--backend=127.0.0.1,{self.env.https_port};{self.env.domain1};sni={self.env.domain1};proto=h2;tls', f'--backend=127.0.0.1,{self.env.http_port}', '--log-level=INFO', @@ -226,16 +242,34 @@ class NghttpxQuic(Nghttpx): self._process = subprocess.Popen(args=args, stderr=ngerr) if self._process.returncode is not None: return False - return not wait_live or self.wait_live(timeout=timedelta(seconds=5)) + return not wait_live or self.wait_live(timeout=timedelta(seconds=Env.SERVER_TIMEOUT)) class NghttpxFwd(Nghttpx): + PORT_SPECS = { + 'h2proxys': socket.SOCK_STREAM, + } + def __init__(self, env: Env): - super().__init__(env=env, name='nghttpx-fwd', port=env.h2proxys_port, - https_port=0) + super().__init__(env=env, name='nghttpx-fwd') + + def initial_start(self): + + def startup(ports: Dict[str, int]) -> bool: + self._port = ports['h2proxys'] + if self.start(): + self.env.update_ports(ports) + return True + self.stop() + self._port = 0 + return False + + return alloc_ports_and_do(NghttpxFwd.PORT_SPECS, startup, + self.env.gen_root, max_tries=3) def start(self, wait_live=True): + assert self._port > 0 self._mkpath(self._tmp_dir) if self._process: self.stop() @@ -244,7 +278,7 @@ class NghttpxFwd(Nghttpx): args = [ self._cmd, '--http2-proxy', - f'--frontend=*,{self.env.h2proxys_port}', + f'--frontend=*,{self._port}', f'--backend=127.0.0.1,{self.env.proxy_port}', '--log-level=INFO', f'--pid-file={self._pid_file}', @@ -258,13 +292,13 @@ class NghttpxFwd(Nghttpx): self._process = subprocess.Popen(args=args, stderr=ngerr) if self._process.returncode is not None: return False - return not wait_live or self.wait_live(timeout=timedelta(seconds=5)) + return not wait_live or self.wait_live(timeout=timedelta(seconds=Env.SERVER_TIMEOUT)) def wait_dead(self, timeout: timedelta): curl = CurlClient(env=self.env, run_dir=self._tmp_dir) try_until = datetime.now() + timeout while datetime.now() < try_until: - check_url = f'https://{self.env.proxy_domain}:{self.env.h2proxys_port}/' + check_url = f'https://{self.env.proxy_domain}:{self._port}/' r = curl.http_get(url=check_url) if r.exit_code != 0: return True @@ -277,13 +311,12 @@ class NghttpxFwd(Nghttpx): curl = CurlClient(env=self.env, run_dir=self._tmp_dir) try_until = datetime.now() + timeout while datetime.now() < try_until: - check_url = f'https://{self.env.proxy_domain}:{self.env.h2proxys_port}/' + check_url = f'https://{self.env.proxy_domain}:{self._port}/' r = curl.http_get(url=check_url, extra_args=[ '--trace', 'curl.trace', '--trace-time' ]) if r.exit_code == 0: return True - log.debug(f'waiting for nghttpx-fwd to become responsive: {r}') time.sleep(.1) log.error(f"Server still not responding after {timeout}") return False diff --git a/tests/http/testenv/ports.py b/tests/http/testenv/ports.py index d6f9a3c218..97159eb0cc 100644 --- a/tests/http/testenv/ports.py +++ b/tests/http/testenv/ports.py @@ -25,15 +25,19 @@ ########################################################################### # import logging +import os import socket +from collections.abc import Callable from typing import Dict +from filelock import FileLock + log = logging.getLogger(__name__) -def alloc_ports(port_specs: Dict[str, int]) -> Dict[str, int]: - ports = {} +def alloc_port_set(port_specs: Dict[str, int]) -> Dict[str, int]: socks = [] + ports = {} for name, ptype in port_specs.items(): try: s = socket.socket(type=ptype) @@ -45,3 +49,15 @@ def alloc_ports(port_specs: Dict[str, int]) -> Dict[str, int]: for s in socks: s.close() return ports + + +def alloc_ports_and_do(port_spec: Dict[str, int], + do_func: Callable[[Dict[str, int]], bool], + gen_dir, max_tries=1) -> bool: + lock_file = os.path.join(gen_dir, 'ports.lock') + with FileLock(lock_file): + for _ in range(max_tries): + port_set = alloc_port_set(port_spec) + if do_func(port_set): + return True + return False diff --git a/tests/http/testenv/vsftpd.py b/tests/http/testenv/vsftpd.py index 1829962d28..38aacd99a0 100644 --- a/tests/http/testenv/vsftpd.py +++ b/tests/http/testenv/vsftpd.py @@ -26,14 +26,17 @@ # import logging import os +import re +import socket import subprocess import time from datetime import datetime, timedelta +from typing import List, Dict -from .curl import CurlClient +from .curl import CurlClient, ExecResult from .env import Env - +from .ports import alloc_ports_and_do log = logging.getLogger(__name__) @@ -43,16 +46,23 @@ class VsFTPD: def __init__(self, env: Env, with_ssl=False, ssl_implicit=False): self.env = env self._cmd = env.vsftpd + self._port = 0 self._with_ssl = with_ssl self._ssl_implicit = ssl_implicit and with_ssl self._scheme = 'ftps' if self._ssl_implicit else 'ftp' if self._with_ssl: - self._port = self.env.ftps_port - name = 'vsftpds' + self.name = 'vsftpds' + self._port_skey = 'ftps' + self._port_specs = { + 'ftps': socket.SOCK_STREAM, + } else: - self._port = self.env.ftp_port - name = 'vsftpd' - self._vsftpd_dir = os.path.join(env.gen_dir, name) + self.name = 'vsftpd' + self._port_skey = 'ftp' + self._port_specs = { + 'ftp': socket.SOCK_STREAM, + } + self._vsftpd_dir = os.path.join(env.gen_dir, self.name) self._run_dir = os.path.join(self._vsftpd_dir, 'run') self._docs_dir = os.path.join(self._vsftpd_dir, 'docs') self._tmp_dir = os.path.join(self._vsftpd_dir, 'tmp') @@ -92,11 +102,6 @@ class VsFTPD: return self.start() return True - def stop_if_running(self): - if self.is_running(): - return self.stop() - return True - def stop(self, wait_dead=True): self._mkpath(self._tmp_dir) if self._process: @@ -110,7 +115,22 @@ class VsFTPD: self.stop() return self.start() + def initial_start(self): + + def startup(ports: Dict[str, int]) -> bool: + self._port = ports[self._port_skey] + if self.start(): + self.env.update_ports(ports) + return True + self.stop() + self._port = 0 + return False + + return alloc_ports_and_do(self._port_specs, startup, + self.env.gen_root, max_tries=3) + def start(self, wait_live=True): + assert self._port > 0 self._mkpath(self._tmp_dir) if self._process: self.stop() @@ -123,7 +143,7 @@ class VsFTPD: self._process = subprocess.Popen(args=args, stderr=procerr) if self._process.returncode is not None: return False - return not wait_live or self.wait_live(timeout=timedelta(seconds=5)) + return not wait_live or self.wait_live(timeout=timedelta(seconds=Env.SERVER_TIMEOUT)) def wait_dead(self, timeout: timedelta): curl = CurlClient(env=self.env, run_dir=self._tmp_dir) @@ -148,7 +168,6 @@ class VsFTPD: ]) if r.exit_code == 0: return True - log.debug(f'waiting for vsftpd to become responsive: {r}') time.sleep(.1) log.error(f"Server still not responding after {timeout}") return False @@ -199,3 +218,7 @@ class VsFTPD: ]) with open(self._conf_file, 'w') as fd: fd.write("\n".join(conf)) + + def get_data_ports(self, r: ExecResult) -> List[int]: + return [int(m.group(1)) for line in r.trace_lines if + (m := re.match(r'.*Connected 2nd connection to .* port (\d+)', line))]