]> git.ipfire.org Git - thirdparty/apache/httpd.git/commitdiff
tests, sync tests from trunk with skips for features/fixes awaiting backport
authorStefan Eissing <icing@apache.org>
Thu, 1 Jun 2023 14:01:51 +0000 (14:01 +0000)
committerStefan Eissing <icing@apache.org>
Thu, 1 Jun 2023 14:01:51 +0000 (14:01 +0000)
git-svn-id: https://svn.apache.org/repos/asf/httpd/httpd/branches/2.4.x@1910167 13f79535-47bb-0310-9956-ffa450edef68

17 files changed:
test/modules/http2/env.py
test/modules/http2/htdocs/cgi/ssi/include.inc [new file with mode: 0644]
test/modules/http2/htdocs/cgi/ssi/test.html [new file with mode: 0644]
test/modules/http2/mod_h2test/mod_h2test.c
test/modules/http2/test_004_post.py
test/modules/http2/test_007_ssi.py [new file with mode: 0644]
test/modules/http2/test_008_ranges.py [new file with mode: 0644]
test/modules/http2/test_009_timing.py [new file with mode: 0644]
test/modules/http2/test_105_timeout.py
test/modules/http2/test_107_frame_lengths.py [new file with mode: 0644]
test/modules/http2/test_200_header_invalid.py
test/modules/http2/test_401_early_hints.py
test/modules/http2/test_500_proxy.py
test/modules/http2/test_601_h2proxy_twisted.py [new file with mode: 0644]
test/pyhttpd/conf/httpd.conf.template
test/pyhttpd/curl.py
test/pyhttpd/env.py

index e00aef439d5fbeb7bac84e30b894cc2324c5fd2c..34d196d6bd63a6aa99a730070a087efcab9f024a 100644 (file)
@@ -17,7 +17,7 @@ class H2TestSetup(HttpdTestSetup):
     def __init__(self, env: 'HttpdTestEnv'):
         super().__init__(env=env)
         self.add_source_dir(os.path.dirname(inspect.getfile(H2TestSetup)))
-        self.add_modules(["http2", "proxy_http2", "cgid", "autoindex", "ssl"])
+        self.add_modules(["http2", "proxy_http2", "cgid", "autoindex", "ssl", "include"])
 
     def make(self):
         super().make()
diff --git a/test/modules/http2/htdocs/cgi/ssi/include.inc b/test/modules/http2/htdocs/cgi/ssi/include.inc
new file mode 100644 (file)
index 0000000..8bd8689
--- /dev/null
@@ -0,0 +1 @@
+Hello include<br>
diff --git a/test/modules/http2/htdocs/cgi/ssi/test.html b/test/modules/http2/htdocs/cgi/ssi/test.html
new file mode 100644 (file)
index 0000000..1782358
--- /dev/null
@@ -0,0 +1,9 @@
+<!doctype html>
+<html>
+<head><meta charset="UTF-8"></head>
+<body>
+    test<br>
+    <!--#include virtual="./include.inc"-->
+    hello<br>
+</body>
+</html>
index b5ee8ad6e4e2eba8062454012a4427e4cdf68018..f20b9547e7045c87ac587fb2a2452143806b2ac2 100644 (file)
@@ -138,7 +138,12 @@ static int h2test_echo_handler(request_rec *r)
     char buffer[8192];
     const char *ct;
     long l;
-    
+    int i;
+    apr_time_t chunk_delay = 0;
+    apr_array_header_t *args = NULL;
+    apr_size_t blen, fail_after = 0;
+    int fail_requested = 0, error_bucket = 1;
+
     if (strcmp(r->handler, "h2test-echo")) {
         return DECLINED;
     }
@@ -146,6 +151,40 @@ static int h2test_echo_handler(request_rec *r)
         return DECLINED;
     }
 
+    if(r->args) {
+        args = apr_cstr_split(r->args, "&", 1, r->pool);
+        for(i = 0; i < args->nelts; ++i) {
+            char *s, *val, *arg = APR_ARRAY_IDX(args, i, char*);
+            s = strchr(arg, '=');
+            if(s) {
+                *s = '\0';
+                val = s + 1;
+                if(!strcmp("id", arg)) {
+                    /* accepted, but not processed */
+                    continue;
+                }
+                else if(!strcmp("chunk_delay", arg)) {
+                    rv = duration_parse(&chunk_delay, val, "s");
+                    if(APR_SUCCESS == rv) {
+                        continue;
+                    }
+                }
+                else if(!strcmp("fail_after", arg)) {
+                    fail_after = (int)apr_atoi64(val);
+                    if(fail_after >= 0) {
+                      fail_requested = 1;
+                      continue;
+                    }
+                }
+            }
+            ap_log_rerror(APLOG_MARK, APLOG_ERR, 0, r, "query parameter not "
+                          "understood: '%s' in %s",
+                          arg, r->args);
+            ap_die(HTTP_BAD_REQUEST, r);
+            return OK;
+        }
+    }
+
     ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "echo_handler: processing request");
     r->status = 200;
     r->clength = -1;
@@ -166,12 +205,26 @@ static int h2test_echo_handler(request_rec *r)
         while (0 < (l = ap_get_client_block(r, &buffer[0], sizeof(buffer)))) {
             ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
                           "echo_handler: copying %ld bytes from request body", l);
-            rv = apr_brigade_write(bb, NULL, NULL, buffer, l);
+            blen = (apr_size_t)l;
+            if (fail_requested) {
+              if (blen > fail_after) {
+                blen = fail_after;
+              }
+              fail_after -= blen;
+            }
+            rv = apr_brigade_write(bb, NULL, NULL, buffer, blen);
             if (APR_SUCCESS != rv) goto cleanup;
+            if (chunk_delay) {
+                apr_sleep(chunk_delay);
+            }
             rv = ap_pass_brigade(r->output_filters, bb);
             if (APR_SUCCESS != rv) goto cleanup;
             ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
                           "echo_handler: passed %ld bytes from request body", l);
+            if (fail_requested && fail_after == 0) {
+              rv = APR_EINVAL;
+              goto cleanup;
+            }
         }
     }
     /* we are done */
@@ -195,6 +248,12 @@ cleanup:
         ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "echo_handler: request handled");
         return OK;
     }
+    else if (error_bucket) {
+        int status = ap_map_http_request_error(rv, HTTP_BAD_REQUEST);
+        b = ap_bucket_error_create(status, NULL, r->pool, c->bucket_alloc);
+        APR_BRIGADE_INSERT_TAIL(bb, b);
+        ap_pass_brigade(r->output_filters, bb);
+    }
     else {
         /* no way to know what type of error occurred */
         ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "h2test_echo_handler failed");
@@ -419,18 +478,20 @@ static int h2test_error_handler(request_rec *r)
                     }
                 }
                 else if (!strcmp("delay", arg)) {
-                    rv = duration_parse(&delay, r->args, "s");
+                    rv = duration_parse(&delay, val, "s");
                     if (APR_SUCCESS == rv) {
                         continue;
                     }
                 }
                 else if (!strcmp("body_delay", arg)) {
-                    rv = duration_parse(&body_delay, r->args, "s");
+                    rv = duration_parse(&body_delay, val, "s");
                     if (APR_SUCCESS == rv) {
                         continue;
                     }
                 }
             }
+            ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "error_handler: "
+                  "did not understand '%s'", arg);
             ap_die(HTTP_BAD_REQUEST, r);
             return OK;
         }
index 9a5560c83de14dfed96cca81e8958917243943c5..cefdef586216026595b5b06d53d41522872666e7 100644 (file)
@@ -18,7 +18,8 @@ class TestPost:
     @pytest.fixture(autouse=True, scope='class')
     def _class_scope(self, env):
         TestPost._local_dir = os.path.dirname(inspect.getfile(TestPost))
-        H2Conf(env).add_vhost_cgi().install()
+        conf = H2Conf(env)
+        conf.add_vhost_cgi(proxy_self=True, h2proxy_self=True).install()
         assert env.apache_restart() == 0
 
     def local_src(self, fname):
@@ -59,10 +60,11 @@ class TestPost:
         self.curl_upload_and_verify(env, "data-1k", ["-v", "--http1.1", "-H", "Expect: 100-continue"])
         self.curl_upload_and_verify(env, "data-1k", ["-v", "--http2", "-H", "Expect: 100-continue"])
 
-    @pytest.mark.skipif(True, reason="python3 regresses in chunked inputs to cgi")
     def test_h2_004_06(self, env):
-        self.curl_upload_and_verify(env, "data-1k", ["--http1.1", "-H", "Content-Length: "])
-        self.curl_upload_and_verify(env, "data-1k", ["--http2", "-H", "Content-Length: "])
+        self.curl_upload_and_verify(env, "data-1k", [
+            "--http1.1", "-H", "Content-Length:", "-H", "Transfer-Encoding: chunked"
+        ])
+        self.curl_upload_and_verify(env, "data-1k", ["--http2", "-H", "Content-Length:"])
 
     @pytest.mark.parametrize("name, value", [
         ("HTTP2", "on"),
@@ -152,46 +154,6 @@ class TestPost:
     def test_h2_004_25(self, env, name, repeat):
         self.nghttp_upload_and_verify(env, name, ["--no-content-length"])
 
-    def test_h2_004_30(self, env):
-        # issue: #203
-        resource = "data-1k"
-        full_length = 1000
-        chunk = 200
-        self.curl_upload_and_verify(env, resource, ["-v", "--http2"])
-        logfile = os.path.join(env.server_logs_dir, "test_004_30")
-        if os.path.isfile(logfile):
-            os.remove(logfile)
-        H2Conf(env).add("""
-LogFormat "{ \\"request\\": \\"%r\\", \\"status\\": %>s, \\"bytes_resp_B\\": %B, \\"bytes_tx_O\\": %O, \\"bytes_rx_I\\": %I, \\"bytes_rx_tx_S\\": %S }" issue_203
-CustomLog logs/test_004_30 issue_203
-        """).add_vhost_cgi().install()
-        assert env.apache_restart() == 0
-        url = env.mkurl("https", "cgi", "/files/{0}".format(resource))
-        r = env.curl_get(url, 5, options=["--http2"])
-        assert r.response["status"] == 200
-        r = env.curl_get(url, 5, options=["--http1.1", "-H", "Range: bytes=0-{0}".format(chunk-1)])
-        assert 206 == r.response["status"]
-        assert chunk == len(r.response["body"].decode('utf-8'))
-        r = env.curl_get(url, 5, options=["--http2", "-H", "Range: bytes=0-{0}".format(chunk-1)])
-        assert 206 == r.response["status"]
-        assert chunk == len(r.response["body"].decode('utf-8'))
-        # Wait for log completeness
-        time.sleep(1)
-        # now check what response lengths have actually been reported
-        lines = open(logfile).readlines()
-        log_h2_full = json.loads(lines[-3])
-        log_h1 = json.loads(lines[-2])
-        log_h2 = json.loads(lines[-1])
-        assert log_h2_full['bytes_rx_I'] > 0
-        assert log_h2_full['bytes_resp_B'] == full_length
-        assert log_h2_full['bytes_tx_O'] > full_length
-        assert log_h1['bytes_rx_I'] > 0         # input bytes received
-        assert log_h1['bytes_resp_B'] == chunk  # response bytes sent (payload)
-        assert log_h1['bytes_tx_O'] > chunk     # output bytes sent
-        assert log_h2['bytes_rx_I'] > 0
-        assert log_h2['bytes_resp_B'] == chunk
-        assert log_h2['bytes_tx_O'] > chunk
-        
     def test_h2_004_40(self, env):
         # echo content using h2test_module "echo" handler
         def post_and_verify(fname, options=None):
@@ -218,3 +180,27 @@ CustomLog logs/test_004_30 issue_203
             assert src == filepart.get_payload(decode=True)
         
         post_and_verify("data-1k", [])
+
+    def test_h2_004_41(self, env):
+        # reproduce PR66597, double chunked encoding on redirects
+        if not env.httpd_is_at_least('2.5.0'):
+            pytest.skip(f'needs r1909932+r1909982 from trunk')
+        conf = H2Conf(env, extras={
+            f'cgi.{env.http_tld}': [
+                f'<Directory {env.server_docs_dir}/cgi/xxx>',
+                '  RewriteEngine On',
+                '  RewriteRule .* /proxy/echo.py [QSA]',
+                '</Directory>',
+            ]
+        })
+        conf.add_vhost_cgi(proxy_self=True).install()
+        assert env.apache_restart() == 0
+        url = env.mkurl("https", "cgi", "/xxx/test.json")
+        r = env.curl_post_data(url, data="0123456789", options=[])
+        assert r.exit_code == 0
+        assert 200 <= r.response["status"] < 300
+        assert r.response['body'] == b'0123456789'
+        r = env.curl_post_data(url, data="0123456789", options=["-H", "Content-Length:"])
+        assert r.exit_code == 0
+        assert 200 <= r.response["status"] < 300
+        assert r.response['body'] == b'0123456789'
diff --git a/test/modules/http2/test_007_ssi.py b/test/modules/http2/test_007_ssi.py
new file mode 100644 (file)
index 0000000..97e38df
--- /dev/null
@@ -0,0 +1,43 @@
+import re
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestSSI:
+
+    @pytest.fixture(autouse=True, scope='class')
+    def _class_scope(self, env):
+        conf = H2Conf(env, extras={
+            f'cgi.{env.http_tld}': [
+                'AddOutputFilter INCLUDES .html',
+                '<Location "/ssi">',
+                '  Options +Includes',
+                '</Location>',
+            ],
+        })
+        conf.add_vhost_cgi(
+            proxy_self=True, h2proxy_self=True
+        ).add_vhost_test1(
+            proxy_self=True, h2proxy_self=True
+        ).install()
+        assert env.apache_restart() == 0
+
+    # SSI test from https://bz.apache.org/bugzilla/show_bug.cgi?id=66483
+    def test_h2_007_01(self, env):
+        url = env.mkurl("https", "cgi", "/ssi/test.html")
+        r = env.curl_get(url, 5)
+        assert r.response["status"] == 200
+        assert r.stdout == '''<!doctype html>
+<html>
+<head><meta charset="UTF-8"></head>
+<body>
+    test<br>
+    Hello include<br>
+
+    hello<br>
+</body>
+</html>
+''' , f'{r}'
+
diff --git a/test/modules/http2/test_008_ranges.py b/test/modules/http2/test_008_ranges.py
new file mode 100644 (file)
index 0000000..c0b0fa1
--- /dev/null
@@ -0,0 +1,146 @@
+import inspect
+import json
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestRanges:
+
+    LOGFILE = ""
+
+    @pytest.fixture(autouse=True, scope='class')
+    def _class_scope(self, env):
+        TestRanges.LOGFILE = os.path.join(env.server_logs_dir, "test_008")
+        TestRanges.SRCDIR = os.path.dirname(inspect.getfile(TestRanges))
+        if os.path.isfile(TestRanges.LOGFILE):
+            os.remove(TestRanges.LOGFILE)
+        destdir = os.path.join(env.gen_dir, 'apache/htdocs/test1')
+        env.make_data_file(indir=destdir, fname="data-100m", fsize=100*1024*1024)
+        conf = H2Conf(env=env)
+        conf.add([
+            "CustomLog logs/test_008 combined"
+        ])
+        conf.add_vhost_cgi()
+        conf.add_vhost_test1()
+        conf.install()
+        assert env.apache_restart() == 0
+
+    def test_h2_008_01(self, env):
+        # issue: #203
+        resource = "data-1k"
+        full_length = 1000
+        chunk = 200
+        self.curl_upload_and_verify(env, resource, ["-v", "--http2"])
+        assert env.apache_restart() == 0
+        url = env.mkurl("https", "cgi", f"/files/{resource}?01full")
+        r = env.curl_get(url, 5, options=["--http2"])
+        assert r.response["status"] == 200
+        url = env.mkurl("https", "cgi", f"/files/{resource}?01range")
+        r = env.curl_get(url, 5, options=["--http1.1", "-H", "Range: bytes=0-{0}".format(chunk-1)])
+        assert 206 == r.response["status"]
+        assert chunk == len(r.response["body"].decode('utf-8'))
+        r = env.curl_get(url, 5, options=["--http2", "-H", "Range: bytes=0-{0}".format(chunk-1)])
+        assert 206 == r.response["status"]
+        assert chunk == len(r.response["body"].decode('utf-8'))
+        # Restart for logs to be flushed out
+        assert env.apache_restart() == 0
+        # now check what response lengths have actually been reported
+        detected = {}
+        for line in open(TestRanges.LOGFILE).readlines():
+            e = json.loads(line)
+            if e['request'] == f'GET /files/{resource}?01full HTTP/2.0':
+                assert e['bytes_rx_I'] > 0
+                assert e['bytes_resp_B'] == full_length
+                assert e['bytes_tx_O'] > full_length
+                detected['h2full'] = 1
+            elif e['request'] == f'GET /files/{resource}?01range HTTP/2.0':
+                assert e['bytes_rx_I'] > 0
+                assert e['bytes_resp_B'] == chunk
+                assert e['bytes_tx_O'] > chunk
+                assert e['bytes_tx_O'] < chunk + 256 # response + frame stuff
+                detected['h2range'] = 1
+            elif e['request'] == f'GET /files/{resource}?01range HTTP/1.1':
+                assert e['bytes_rx_I'] > 0         # input bytes received
+                assert e['bytes_resp_B'] == chunk  # response bytes sent (payload)
+                assert e['bytes_tx_O'] > chunk     # output bytes sent
+                detected['h1range'] = 1
+        assert 'h1range' in detected, f'HTTP/1.1 range request not found in {TestRanges.LOGFILE}'
+        assert 'h2range' in detected, f'HTTP/2 range request not found in {TestRanges.LOGFILE}'
+        assert 'h2full' in detected, f'HTTP/2 full request not found in {TestRanges.LOGFILE}'
+
+    def test_h2_008_02(self, env, repeat):
+        path = '/002.jpg'
+        res_len = 90364
+        url = env.mkurl("https", "test1", f'{path}?02full')
+        r = env.curl_get(url, 5)
+        assert r.response["status"] == 200
+        assert "HTTP/2" == r.response["protocol"]
+        h = r.response["header"]
+        assert "accept-ranges" in h
+        assert "bytes" == h["accept-ranges"]
+        assert "content-length" in h
+        clen = h["content-length"]
+        assert int(clen) == res_len
+        # get the first 1024 bytes of the resource, 206 status, but content-length as original
+        url = env.mkurl("https", "test1", f'{path}?02range')
+        r = env.curl_get(url, 5, options=["-H", "range: bytes=0-1023"])
+        assert 206 == r.response["status"]
+        assert "HTTP/2" == r.response["protocol"]
+        assert 1024 == len(r.response["body"])
+        assert "content-length" in h
+        assert clen == h["content-length"]
+        # Restart for logs to be flushed out
+        assert env.apache_restart() == 0
+        # now check what response lengths have actually been reported
+        found = False
+        for line in open(TestRanges.LOGFILE).readlines():
+            e = json.loads(line)
+            if e['request'] == f'GET {path}?02range HTTP/2.0':
+                assert e['bytes_rx_I'] > 0
+                assert e['bytes_resp_B'] == 1024
+                assert e['bytes_tx_O'] > 1024
+                assert e['bytes_tx_O'] < 1024 + 256  # response  and frame stuff
+                found = True
+                break
+        assert found, f'request not found in {self.LOGFILE}'
+
+    # send a paced curl download that aborts in the middle of the transfer
+    def test_h2_008_03(self, env, repeat):
+        if not env.httpd_is_at_least('2.5.0'):
+            pytest.skip(f'needs r1909769 from trunk')
+        path = '/data-100m'
+        url = env.mkurl("https", "test1", f'{path}?03broken')
+        r = env.curl_get(url, 5, options=[
+            '--limit-rate', '2k', '-m', '2'
+        ])
+        assert r.exit_code != 0, f'{r}'
+        found = False
+        for line in open(TestRanges.LOGFILE).readlines():
+            e = json.loads(line)
+            if e['request'] == f'GET {path}?03broken HTTP/2.0':
+                assert e['bytes_rx_I'] > 0
+                assert e['bytes_resp_B'] == 100*1024*1024
+                assert e['bytes_tx_O'] > 1024
+                assert e['bytes_tx_O'] < 10*1024*1024  # curl buffers, but not that much
+                found = True
+                break
+        assert found, f'request not found in {self.LOGFILE}'
+
+    # upload and GET again using curl, compare to original content
+    def curl_upload_and_verify(self, env, fname, options=None):
+        url = env.mkurl("https", "cgi", "/upload.py")
+        fpath = os.path.join(env.gen_dir, fname)
+        r = env.curl_upload(url, fpath, options=options)
+        assert r.exit_code == 0, f"{r}"
+        assert 200 <= r.response["status"] < 300
+
+        r2 = env.curl_get(r.response["header"]["location"])
+        assert r2.exit_code == 0
+        assert r2.response["status"] == 200
+        with open(os.path.join(TestRanges.SRCDIR, fpath), mode='rb') as file:
+            src = file.read()
+        assert src == r2.response["body"]
+
diff --git a/test/modules/http2/test_009_timing.py b/test/modules/http2/test_009_timing.py
new file mode 100644 (file)
index 0000000..eca28bc
--- /dev/null
@@ -0,0 +1,76 @@
+import inspect
+import json
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestTiming:
+
+    LOGFILE = ""
+
+    @pytest.fixture(autouse=True, scope='class')
+    def _class_scope(self, env):
+        TestTiming.LOGFILE = os.path.join(env.server_logs_dir, "test_009")
+        if os.path.isfile(TestTiming.LOGFILE):
+            os.remove(TestTiming.LOGFILE)
+        conf = H2Conf(env=env)
+        conf.add([
+            "CustomLog logs/test_009 combined"
+        ])
+        conf.add_vhost_cgi()
+        conf.add_vhost_test1()
+        conf.install()
+        assert env.apache_restart() == 0
+
+    # check that we get a positive time_taken reported on a simple GET
+    def test_h2_009_01(self, env):
+        path = '/002.jpg'
+        url = env.mkurl("https", "test1", f'{path}?01')
+        args = [
+            env.h2load, "-n", "1", "-c", "1", "-m", "1",
+            f"--connect-to=localhost:{env.https_port}",
+            f"--base-uri={url}", url
+        ]
+        r = env.run(args)
+        # Restart for logs to be flushed out
+        assert env.apache_restart() == 0
+        found = False
+        for line in open(TestTiming.LOGFILE).readlines():
+            e = json.loads(line)
+            if e['request'] == f'GET {path}?01 HTTP/2.0':
+                assert e['time_taken'] > 0
+                found = True
+        assert found, f'request not found in {TestTiming.LOGFILE}'
+
+    # test issue #253, where time_taken in a keepalive situation is not
+    # reported until the next request arrives
+    def test_h2_009_02(self, env):
+        if not env.httpd_is_at_least('2.5.0'):
+            pytest.skip(f'needs r1909769 from trunk')
+        baseurl = env.mkurl("https", "test1", '/')
+        tscript = os.path.join(env.gen_dir, 'h2load-timing-009_02')
+        with open(tscript, 'w') as fd:
+            fd.write('\n'.join([
+                f'0.0\t/002.jpg?02a',        # 1st request right away
+                f'1000.0\t/002.jpg?02b',     # 2nd a second later
+            ]))
+        args = [
+            env.h2load,
+            f'--timing-script-file={tscript}',
+            f"--connect-to=localhost:{env.https_port}",
+            f"--base-uri={baseurl}"
+        ]
+        r = env.run(args)
+        # Restart for logs to be flushed out
+        assert env.apache_restart() == 0
+        found = False
+        for line in open(TestTiming.LOGFILE).readlines():
+            e = json.loads(line)
+            if e['request'] == f'GET /002.jpg?02a HTTP/2.0':
+                assert e['time_taken'] > 0
+                assert e['time_taken'] < 500 * 1000, f'time for 1st request not reported correctly'
+                found = True
+        assert found, f'request not found in {TestTiming.LOGFILE}'
index 13aa8ed07afd765eae8fc783bdd6805ba6c8518c..f7d3859cafb412fd436569741a9715f7c5e79bf6 100644 (file)
@@ -128,22 +128,25 @@ class TestTimeout:
     def test_h2_105_12(self, env):
         # long connection timeout, short stream timeout
         # sending a slow POST
-        if env.httpd_is_at_least("2.5.0"):
-            conf = H2Conf(env)
-            conf.add_vhost_cgi()
-            conf.add("Timeout 10")
-            conf.add("H2StreamTimeout 1")
-            conf.install()
-            assert env.apache_restart() == 0
-            url = env.mkurl("https", "cgi", "/h2test/delay?5")
-            piper = CurlPiper(env=env, url=url)
-            piper.start()
-            for _ in range(3):
-                time.sleep(2)
-                try:
-                    piper.send("0123456789\n")
-                except BrokenPipeError:
-                    break
-            piper.close()
-            assert piper.response
-            assert piper.response['status'] == 408, f"{piper.response}"
+        if not env.curl_is_at_least('8.0.0'):
+            pytest.skip(f'need at least curl v8.0.0 for this')
+        if not env.httpd_is_at_least("2.5.0"):
+            pytest.skip(f'need at least httpd 2.5.0 for this')
+        conf = H2Conf(env)
+        conf.add_vhost_cgi()
+        conf.add("Timeout 10")
+        conf.add("H2StreamTimeout 1")
+        conf.install()
+        assert env.apache_restart() == 0
+        url = env.mkurl("https", "cgi", "/h2test/delay?5")
+        piper = CurlPiper(env=env, url=url)
+        piper.start()
+        for _ in range(3):
+            time.sleep(2)
+            try:
+                piper.send("0123456789\n")
+            except BrokenPipeError:
+                break
+        piper.close()
+        assert piper.response, f'{piper}'
+        assert piper.response['status'] == 408, f"{piper.response}"
diff --git a/test/modules/http2/test_107_frame_lengths.py b/test/modules/http2/test_107_frame_lengths.py
new file mode 100644 (file)
index 0000000..f53f8eb
--- /dev/null
@@ -0,0 +1,53 @@
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+def mk_text_file(fpath: str, lines: int):
+    t110 = ""
+    for _ in range(11):
+        t110 += "0123456789"
+    with open(fpath, "w") as fd:
+        for i in range(lines):
+            fd.write("{0:015d}: ".format(i))  # total 128 bytes per line
+            fd.write(t110)
+            fd.write("\n")
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestFrameLengths:
+
+    URI_PATHS = []
+
+    @pytest.fixture(autouse=True, scope='class')
+    def _class_scope(self, env):
+        docs_a = os.path.join(env.server_docs_dir, "cgi/files")
+        for fsize in [10, 100]:
+            fname = f'0-{fsize}k.txt'
+            mk_text_file(os.path.join(docs_a, fname), 8 * fsize)
+            self.URI_PATHS.append(f"/files/{fname}")
+
+    @pytest.mark.parametrize("data_frame_len", [
+        99, 1024, 8192
+    ])
+    def test_h2_107_01(self, env, data_frame_len):
+        if not env.httpd_is_at_least('2.5.0'):
+            pytest.skip(f'needs r1907696+r1907697 from trunk')
+        conf = H2Conf(env, extras={
+            f'cgi.{env.http_tld}': [
+                f'H2MaxDataFrameLen {data_frame_len}',
+            ]
+        })
+        conf.add_vhost_cgi()
+        conf.install()
+        assert env.apache_restart() == 0
+        for p in self.URI_PATHS:
+            url = env.mkurl("https", "cgi", p)
+            r = env.nghttp().get(url, options=[
+                '--header=Accept-Encoding: none',
+            ])
+            assert r.response["status"] == 200
+            assert len(r.results["data_lengths"]) > 0, f'{r}'
+            too_large = [ x for x in r.results["data_lengths"] if x > data_frame_len]
+            assert len(too_large) == 0, f'{p}: {r.results["data_lengths"]}'
index 2e94c509a4b5510d2270a69d1052ba7eead712b2..fe9448784059f359b2c68041b1c0fd9b2c853681 100644 (file)
@@ -12,24 +12,27 @@ class TestInvalidHeaders:
         assert env.apache_restart() == 0
 
     # let the hecho.py CGI echo chars < 0x20 in field name
-    # for almost all such characters, the stream gets aborted with a h2 error and 
-    # there will be no http status, cr and lf are handled special
+    # for almost all such characters, the stream returns a 500
+    # or in httpd >= 2.5.0 gets aborted with a h2 error
+    # cr is handled special
     def test_h2_200_01(self, env):
         url = env.mkurl("https", "cgi", "/hecho.py")
         for x in range(1, 32):
-            r = env.curl_post_data(url, "name=x%%%02xx&value=yz" % x)
-            if x in [10]:
-                assert 0 == r.exit_code, "unexpected exit code for char 0x%02x" % x
-                assert 500 == r.response["status"], "unexpected status for char 0x%02x" % x
-            elif x in [13]:
-                assert 0 == r.exit_code, "unexpected exit code for char 0x%02x" % x
-                assert 200 == r.response["status"], "unexpected status for char 0x%02x" % x
+            data = f'name=x%{x:02x}x&value=yz'
+            r = env.curl_post_data(url, data)
+            if x in [13]:
+                assert 0 == r.exit_code, f'unexpected exit code for char 0x{x:02}'
+                assert 200 == r.response["status"], f'unexpected status for char 0x{x:02}'
+            elif x in [10] or env.httpd_is_at_least('2.5.0'):
+                assert 0 == r.exit_code, f'unexpected exit code for char 0x{x:02}'
+                assert 500 == r.response["status"], f'unexpected status for char 0x{x:02}'
             else:
-                assert 0 != r.exit_code, "unexpected exit code for char 0x%02x" % x
+                assert 0 != r.exit_code, f'unexpected exit code for char 0x{x:02}'
 
     # let the hecho.py CGI echo chars < 0x20 in field value
-    # for almost all such characters, the stream gets aborted with a h2 error and 
-    # there will be no http status, cr and lf are handled special
+    # for almost all such characters, the stream returns a 500
+    # or in httpd >= 2.5.0 gets aborted with a h2 error
+    # cr and lf are handled special
     def test_h2_200_02(self, env):
         url = env.mkurl("https", "cgi", "/hecho.py")
         for x in range(1, 32):
@@ -38,6 +41,9 @@ class TestInvalidHeaders:
                 if x in [10, 13]:
                     assert 0 == r.exit_code, "unexpected exit code for char 0x%02x" % x
                     assert 200 == r.response["status"], "unexpected status for char 0x%02x" % x
+                elif env.httpd_is_at_least('2.5.0'):
+                    assert 0 == r.exit_code, f'unexpected exit code for char 0x{x:02}'
+                    assert 500 == r.response["status"], f'unexpected status for char 0x{x:02}'
                 else:
                     assert 0 != r.exit_code, "unexpected exit code for char 0x%02x" % x
 
@@ -46,10 +52,18 @@ class TestInvalidHeaders:
         url = env.mkurl("https", "cgi", "/hecho.py")
         for h in ["10", "7f"]:
             r = env.curl_post_data(url, "name=x%%%s&value=yz" % h)
-            assert 0 != r.exit_code
+            if env.httpd_is_at_least('2.5.0'):
+                assert 0 == r.exit_code, f"unexpected exit code for char 0x{h:02}"
+                assert 500 == r.response["status"], f"unexpected exit code for char 0x{h:02}"
+            else:
+                assert 0 != r.exit_code
             r = env.curl_post_data(url, "name=x&value=y%%%sz" % h)
-            assert 0 != r.exit_code
-    
+            if env.httpd_is_at_least('2.5.0'):
+                assert 0 == r.exit_code, f"unexpected exit code for char 0x{h:02}"
+                assert 500 == r.response["status"], f"unexpected exit code for char 0x{h:02}"
+            else:
+                assert 0 != r.exit_code
+
     # test header field lengths check, LimitRequestLine (default 8190)
     def test_h2_200_10(self, env):
         url = env.mkurl("https", "cgi", "/")
index f73dcc4c8c8f6adc0f8454aa9645cefd5e54f87a..0991d94e983ea24fe7f24665491581374eb6cc54 100644 (file)
@@ -9,6 +9,8 @@ class TestEarlyHints:
 
     @pytest.fixture(autouse=True, scope='class')
     def _class_scope(self, env):
+        if not env.httpd_is_at_least('2.5.0'):
+            pytest.skip(f'needs r1909769 from trunk')
         H2Conf(env).start_vhost(domains=[f"hints.{env.http_tld}"],
                                 port=env.https_port, doc_root="htdocs/test1"
         ).add("""
@@ -21,6 +23,13 @@ class TestEarlyHints:
         <Location /006-nohints.html>
             Header add Link "</006/006.css>;rel=preload"
         </Location>
+        <Location /006-early.html>
+            H2EarlyHint Link "</006/006.css>;rel=preload;as=style"
+        </Location>
+        <Location /006-early-no-push.html>
+            H2Push off
+            H2EarlyHint Link "</006/006.css>;rel=preload;as=style"
+        </Location>
         """).end_vhost(
         ).install()
         assert env.apache_restart() == 0
@@ -45,3 +54,30 @@ class TestEarlyHints:
         promises = r.results["streams"][r.response["id"]]["promises"]
         assert 1 == len(promises)
         assert "previous" not in r.response
+
+    # H2EarlyHints enabled in general, check that it works for H2EarlyHint
+    def test_h2_401_33(self, env, repeat):
+        url = env.mkurl("https", "hints", "/006-early.html")
+        r = env.nghttp().get(url)
+        assert r.response["status"] == 200
+        promises = r.results["streams"][r.response["id"]]["promises"]
+        assert 1 == len(promises)
+        early = r.response["previous"]
+        assert early
+        assert 103 == int(early["header"][":status"])
+        assert early["header"]["link"] == '</006/006.css>;rel=preload;as=style'
+
+    # H2EarlyHints enabled, no PUSH, check that it works for H2EarlyHint
+    def test_h2_401_34(self, env, repeat):
+        if not env.httpd_is_at_least('2.5.0'):
+            pytest.skip(f'needs r1909769 from trunk')
+        url = env.mkurl("https", "hints", "/006-early-no-push.html")
+        r = env.nghttp().get(url)
+        assert r.response["status"] == 200
+        promises = r.results["streams"][r.response["id"]]["promises"]
+        assert 0 == len(promises)
+        early = r.response["previous"]
+        assert early
+        assert 103 == int(early["header"][":status"])
+        assert early["header"]["link"] == '</006/006.css>;rel=preload;as=style'
+
index 2e6141574c2ab8ec53daf911fe1284a4afa7a2bf..88a8ece3f6e98667b91a3da1ef95f080db6cf1bf 100644 (file)
@@ -49,11 +49,17 @@ class TestProxy:
             src = file.read()
         assert r2.response["body"] == src
 
-    def test_h2_500_10(self, env, repeat):
-        self.curl_upload_and_verify(env, "data-1k", ["--http2"])
-        self.curl_upload_and_verify(env, "data-10k", ["--http2"])
-        self.curl_upload_and_verify(env, "data-100k", ["--http2"])
-        self.curl_upload_and_verify(env, "data-1m", ["--http2"])
+    @pytest.mark.parametrize("name", [
+        "data-1k", "data-10k", "data-100k", "data-1m",
+    ])
+    def test_h2_500_10(self, env, name, repeat):
+        self.curl_upload_and_verify(env, name, ["--http2"])
+
+    def test_h2_500_11(self, env):
+        self.curl_upload_and_verify(env, "data-1k", [
+            "--http1.1", "-H", "Content-Length:", "-H", "Transfer-Encoding: chunked"
+        ])
+        self.curl_upload_and_verify(env, "data-1k", ["--http2", "-H", "Content-Length:"])
 
     # POST some data using nghttp and see it echo'ed properly back
     def nghttp_post_and_verify(self, env, fname, options=None):
@@ -71,17 +77,17 @@ class TestProxy:
                 fd.write(r.stderr)
             assert r.response["body"] == src
 
-    def test_h2_500_20(self, env, repeat):
-        self.nghttp_post_and_verify(env, "data-1k", [])
-        self.nghttp_post_and_verify(env, "data-10k", [])
-        self.nghttp_post_and_verify(env, "data-100k", [])
-        self.nghttp_post_and_verify(env, "data-1m", [])
+    @pytest.mark.parametrize("name", [
+        "data-1k", "data-10k", "data-100k", "data-1m",
+    ])
+    def test_h2_500_20(self, env, name, repeat):
+        self.nghttp_post_and_verify(env, name, [])
 
-    def test_h2_500_21(self, env, repeat):
-        self.nghttp_post_and_verify(env, "data-1k", ["--no-content-length"])
-        self.nghttp_post_and_verify(env, "data-10k", ["--no-content-length"])
-        self.nghttp_post_and_verify(env, "data-100k", ["--no-content-length"])
-        self.nghttp_post_and_verify(env, "data-1m", ["--no-content-length"])
+    @pytest.mark.parametrize("name", [
+        "data-1k", "data-10k", "data-100k", "data-1m",
+    ])
+    def test_h2_500_21(self, env, name, repeat):
+        self.nghttp_post_and_verify(env, name, ["--no-content-length"])
 
     # upload and GET again using nghttp, compare to original content
     def nghttp_upload_and_verify(self, env, fname, options=None):
@@ -101,17 +107,17 @@ class TestProxy:
             src = file.read()
         assert src == r2.response["body"]
 
-    def test_h2_500_22(self, env):
-        self.nghttp_upload_and_verify(env, "data-1k", [])
-        self.nghttp_upload_and_verify(env, "data-10k", [])
-        self.nghttp_upload_and_verify(env, "data-100k", [])
-        self.nghttp_upload_and_verify(env, "data-1m", [])
+    @pytest.mark.parametrize("name", [
+        "data-1k", "data-10k", "data-100k", "data-1m",
+    ])
+    def test_h2_500_22(self, env, name):
+        self.nghttp_upload_and_verify(env, name, [])
 
-    def test_h2_500_23(self, env):
-        self.nghttp_upload_and_verify(env, "data-1k", ["--no-content-length"])
-        self.nghttp_upload_and_verify(env, "data-10k", ["--no-content-length"])
-        self.nghttp_upload_and_verify(env, "data-100k", ["--no-content-length"])
-        self.nghttp_upload_and_verify(env, "data-1m", ["--no-content-length"])
+    @pytest.mark.parametrize("name", [
+        "data-1k", "data-10k", "data-100k", "data-1m",
+    ])
+    def test_h2_500_23(self, env, name):
+        self.nghttp_upload_and_verify(env, name, ["--no-content-length"])
 
     # upload using nghttp and check returned status
     def nghttp_upload_stat(self, env, fname, options=None):
@@ -124,7 +130,7 @@ class TestProxy:
         assert r.response["header"]["location"]
 
     def test_h2_500_24(self, env):
-        for i in range(100):
+        for i in range(50):
             self.nghttp_upload_stat(env, "data-1k", ["--no-content-length"])
 
     # lets do some error tests
diff --git a/test/modules/http2/test_601_h2proxy_twisted.py b/test/modules/http2/test_601_h2proxy_twisted.py
new file mode 100644 (file)
index 0000000..276558e
--- /dev/null
@@ -0,0 +1,91 @@
+import json
+import logging
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+log = logging.getLogger(__name__)
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestH2ProxyTwisted:
+
+    @pytest.fixture(autouse=True, scope='class')
+    def _class_scope(self, env):
+        H2Conf(env).add_vhost_cgi(proxy_self=True, h2proxy_self=True).install()
+        assert env.apache_restart() == 0
+
+    @pytest.mark.parametrize("name", [
+        "data-1k", "data-10k", "data-100k", "data-1m",
+    ])
+    def test_h2_601_01_echo_uploads(self, env, name):
+        fpath = os.path.join(env.gen_dir, name)
+        url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo")
+        r = env.curl_upload(url, fpath, options=[])
+        assert r.exit_code == 0
+        assert 200 <= r.response["status"] < 300
+        # we POST a form, so echoed input is larger than the file itself
+        assert len(r.response["body"]) > os.path.getsize(fpath)
+
+    @pytest.mark.parametrize("name", [
+        "data-1k", "data-10k", "data-100k", "data-1m",
+    ])
+    def test_h2_601_02_echo_delayed(self, env, name):
+        fpath = os.path.join(env.gen_dir, name)
+        url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo?chunk_delay=10ms")
+        r = env.curl_upload(url, fpath, options=[])
+        assert r.exit_code == 0
+        assert 200 <= r.response["status"] < 300
+        # we POST a form, so echoed input is larger than the file itself
+        assert len(r.response["body"]) > os.path.getsize(fpath)
+
+    @pytest.mark.parametrize("name", [
+        "data-1k", "data-10k", "data-100k", "data-1m",
+    ])
+    def test_h2_601_03_echo_fail_early(self, env, name):
+        if not env.httpd_is_at_least('2.5.0'):
+            pytest.skip(f'needs r1910157 from trunk')
+        fpath = os.path.join(env.gen_dir, name)
+        url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo?fail_after=512")
+        r = env.curl_upload(url, fpath, options=[])
+        # 92 is curl's CURLE_HTTP2_STREAM
+        assert r.exit_code == 92 or r.response["status"] == 502
+
+    @pytest.mark.parametrize("name", [
+        "data-1k", "data-10k", "data-100k", "data-1m",
+    ])
+    def test_h2_601_04_echo_fail_late(self, env, name):
+        if not env.httpd_is_at_least('2.5.0'):
+            pytest.skip(f'needs r1910157 from trunk')
+        fpath = os.path.join(env.gen_dir, name)
+        url = env.mkurl("https", "cgi", f"/h2proxy/h2test/echo?fail_after={os.path.getsize(fpath)}")
+        r = env.curl_upload(url, fpath, options=[])
+        # 92 is curl's CURLE_HTTP2_STREAM
+        assert r.exit_code == 92 or r.response["status"] == 502
+
+    def test_h2_601_05_echo_fail_many(self, env):
+        if not env.httpd_is_at_least('2.5.0'):
+            pytest.skip(f'needs r1910157 from trunk')
+        if not env.curl_is_at_least('8.0.0'):
+            pytest.skip(f'need at least curl v8.0.0 for this')
+        count = 200
+        fpath = os.path.join(env.gen_dir, "data-100k")
+        args = [env.curl, '--parallel', '--parallel-max', '20']
+        for i in range(count):
+            if i > 0:
+                args.append('--next')
+            url = env.mkurl("https", "cgi", f"/h2proxy/h2test/echo?id={i}&fail_after={os.path.getsize(fpath)}")
+            args.extend(env.curl_resolve_args(url=url))
+            args.extend([
+                '-o', '/dev/null', '-w', '%{json}\\n', '--form', f'file=@{fpath}', url
+            ])
+        log.error(f'run: {args}')
+        r = env.run(args)
+        stats = []
+        for line in r.stdout.splitlines():
+            stats.append(json.loads(line))
+        assert len(stats) == count
+        for st in stats:
+            assert st['exitcode'] == 92 or st['http_code'] == 502, f'unexpected: {st}'
index f44935e68c8db4476afac2f337e920af387de2f3..255b88ad05ff5af1348e5a90034979d016136d30 100644 (file)
@@ -6,7 +6,7 @@ Include "conf/modules.conf"
 DocumentRoot "${server_dir}/htdocs"
 
 <IfModule log_config_module>
-    LogFormat "%h %l %u %t \"%r\" %>s %O \"%{Referer}i\" \"%{User-Agent}i\" %k" combined
+    LogFormat "{ \"request\": \"%r\", \"status\": %>s, \"bytes_resp_B\": %B, \"bytes_tx_O\": %O, \"bytes_rx_I\": %I, \"bytes_rx_tx_S\": %S, \"time_taken\": %D }" combined
     LogFormat "%h %l %u %t \"%r\" %>s %b" common
     CustomLog "logs/access_log" combined
 
index 2b6840b1611869df68e31b5461279b100ae49888..84ef9c8161a40569e20b4a9b75a55a38ab4aa706 100644 (file)
@@ -31,9 +31,14 @@ class CurlPiper:
     def response(self):
         return self._r.response if self._r else None
 
+    def __repr__(self):
+        return f'CurlPiper[exitcode={self._exitcode}, stderr={self._stderr}, stdout={self._stdout}]'
+
     def start(self):
         self.args, self.headerfile = self.env.curl_complete_args([self.url], timeout=5, options=[
-            "-T", "-", "-X", "POST", "--trace-ascii", "%", "--trace-time"])
+            "-T", "-", "-X", "POST", "--trace-ascii", "%", "--trace-time"
+        ])
+        self.args.append(self.url)
         sys.stderr.write("starting: {0}\n".format(self.args))
         self.proc = subprocess.Popen(self.args, stdin=subprocess.PIPE,
                                      stdout=subprocess.PIPE,
index 818f18a5c495ee1e9421c268f98f1269911da53e..842e369cbced47a8e4e15f283aa291a80667e670 100644 (file)
@@ -237,6 +237,8 @@ class HttpdTestEnv:
         if HttpdTestEnv.LIBEXEC_DIR is None:
             HttpdTestEnv.LIBEXEC_DIR = self._libexec_dir = self.get_apxs_var('LIBEXECDIR')
         self._curl = self.config.get('global', 'curl_bin')
+        if 'CURL' in os.environ:
+            self._curl = os.environ['CURL']
         self._nghttp = self.config.get('global', 'nghttp')
         if self._nghttp is None:
             self._nghttp = 'nghttp'
@@ -320,6 +322,10 @@ class HttpdTestEnv:
             for name in self._httpd_log_modules:
                 self._log_interesting += f" {name}:{log_level}"
 
+    @property
+    def curl(self) -> str:
+        return self._curl
+
     @property
     def apxs(self) -> str:
         return self._apxs
@@ -665,19 +671,11 @@ class HttpdTestEnv:
                 os.remove(os.path.join(self.gen_dir, fname))
         self._curl_headerfiles_n = 0
 
-    def curl_complete_args(self, urls, stdout_list=False,
-                           timeout=None, options=None,
-                           insecure=False, force_resolve=True):
-        u = urlparse(urls[0])
-        #assert u.hostname, f"hostname not in url: {urls[0]}"
-        headerfile = f"{self.gen_dir}/curl.headers.{self._curl_headerfiles_n}"
-        self._curl_headerfiles_n += 1
+    def curl_resolve_args(self, url, insecure=False, force_resolve=True, options=None):
+        u = urlparse(url)
 
         args = [
-            self._curl, "-s", "--path-as-is", "-D", headerfile,
         ]
-        if stdout_list:
-            args.extend(['-w', '%{stdout}' + HttpdTestSetup.CURL_STDOUT_SEPARATOR])
         if u.scheme == 'http':
             pass
         elif insecure:
@@ -689,19 +687,33 @@ class HttpdTestEnv:
             if ca_pem:
                 args.extend(["--cacert", ca_pem])
 
-        if self._current_test is not None:
-            args.extend(["-H", f'AP-Test-Name: {self._current_test}'])
-
         if force_resolve and u.hostname and u.hostname != 'localhost' \
                 and u.hostname != self._httpd_addr \
                 and not re.match(r'^(\d+|\[|:).*', u.hostname):
-            assert u.port, f"port not in url: {urls[0]}"
+            assert u.port, f"port not in url: {url}"
             args.extend(["--resolve", f"{u.hostname}:{u.port}:{self._httpd_addr}"])
+        return args
+
+    def curl_complete_args(self, urls, stdout_list=False,
+                           timeout=None, options=None,
+                           insecure=False, force_resolve=True):
+        headerfile = f"{self.gen_dir}/curl.headers.{self._curl_headerfiles_n}"
+        self._curl_headerfiles_n += 1
+
+        args = [
+            self._curl, "-s", "--path-as-is", "-D", headerfile,
+        ]
+        args.extend(self.curl_resolve_args(urls[0], insecure=insecure,
+                                           force_resolve=force_resolve,
+                                           options=options))
+        if stdout_list:
+            args.extend(['-w', '%{stdout}' + HttpdTestSetup.CURL_STDOUT_SEPARATOR])
+        if self._current_test is not None:
+            args.extend(["-H", f'AP-Test-Name: {self._current_test}'])
         if timeout is not None and int(timeout) > 0:
             args.extend(["--connect-timeout", str(int(timeout))])
         if options:
             args.extend(options)
-        args += urls
         return args, headerfile
 
     def curl_parse_headerfile(self, headerfile: str, r: ExecResult = None) -> ExecResult:
@@ -769,6 +781,7 @@ class HttpdTestEnv:
             urls=urls, stdout_list=stdout_list,
             timeout=timeout, options=options, insecure=insecure,
             force_resolve=force_resolve)
+        args += urls
         r = self.run(args, stdout_list=stdout_list)
         if r.exit_code == 0:
             self.curl_parse_headerfile(headerfile, r=r)
@@ -837,3 +850,18 @@ class HttpdTestEnv:
                 }
             run.add_results({"h2load": stats})
         return run
+
+    def make_data_file(self, indir: str, fname: str, fsize: int) -> str:
+        fpath = os.path.join(indir, fname)
+        s10 = "0123456789"
+        s = (101 * s10) + s10[0:3]
+        with open(fpath, 'w') as fd:
+            for i in range(int(fsize / 1024)):
+                fd.write(f"{i:09d}-{s}\n")
+            remain = int(fsize % 1024)
+            if remain != 0:
+                i = int(fsize / 1024) + 1
+                s = f"{i:09d}-{s}\n"
+                fd.write(s[0:remain])
+        return fpath
+