]> git.ipfire.org Git - thirdparty/apache/httpd.git/commitdiff
tests: synch with recent changes from mod_h2 repository
authorStefan Eissing <icing@apache.org>
Thu, 1 Jun 2023 10:13:28 +0000 (10:13 +0000)
committerStefan Eissing <icing@apache.org>
Thu, 1 Jun 2023 10:13:28 +0000 (10:13 +0000)
git-svn-id: https://svn.apache.org/repos/asf/httpd/httpd/trunk@1910156 13f79535-47bb-0310-9956-ffa450edef68

test/modules/http2/mod_h2test/mod_h2test.c
test/modules/http2/test_004_post.py
test/modules/http2/test_105_timeout.py
test/modules/http2/test_200_header_invalid.py
test/modules/http2/test_500_proxy.py
test/modules/http2/test_601_h2proxy_twisted.py [new file with mode: 0644]
test/pyhttpd/curl.py
test/pyhttpd/env.py

index b5ee8ad6e4e2eba8062454012a4427e4cdf68018..f20b9547e7045c87ac587fb2a2452143806b2ac2 100644 (file)
@@ -138,7 +138,12 @@ static int h2test_echo_handler(request_rec *r)
     char buffer[8192];
     const char *ct;
     long l;
-    
+    int i;
+    apr_time_t chunk_delay = 0;
+    apr_array_header_t *args = NULL;
+    apr_size_t blen, fail_after = 0;
+    int fail_requested = 0, error_bucket = 1;
+
     if (strcmp(r->handler, "h2test-echo")) {
         return DECLINED;
     }
@@ -146,6 +151,40 @@ static int h2test_echo_handler(request_rec *r)
         return DECLINED;
     }
 
+    if(r->args) {
+        args = apr_cstr_split(r->args, "&", 1, r->pool);
+        for(i = 0; i < args->nelts; ++i) {
+            char *s, *val, *arg = APR_ARRAY_IDX(args, i, char*);
+            s = strchr(arg, '=');
+            if(s) {
+                *s = '\0';
+                val = s + 1;
+                if(!strcmp("id", arg)) {
+                    /* accepted, but not processed */
+                    continue;
+                }
+                else if(!strcmp("chunk_delay", arg)) {
+                    rv = duration_parse(&chunk_delay, val, "s");
+                    if(APR_SUCCESS == rv) {
+                        continue;
+                    }
+                }
+                else if(!strcmp("fail_after", arg)) {
+                    fail_after = (int)apr_atoi64(val);
+                    if(fail_after >= 0) {
+                      fail_requested = 1;
+                      continue;
+                    }
+                }
+            }
+            ap_log_rerror(APLOG_MARK, APLOG_ERR, 0, r, "query parameter not "
+                          "understood: '%s' in %s",
+                          arg, r->args);
+            ap_die(HTTP_BAD_REQUEST, r);
+            return OK;
+        }
+    }
+
     ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "echo_handler: processing request");
     r->status = 200;
     r->clength = -1;
@@ -166,12 +205,26 @@ static int h2test_echo_handler(request_rec *r)
         while (0 < (l = ap_get_client_block(r, &buffer[0], sizeof(buffer)))) {
             ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
                           "echo_handler: copying %ld bytes from request body", l);
-            rv = apr_brigade_write(bb, NULL, NULL, buffer, l);
+            blen = (apr_size_t)l;
+            if (fail_requested) {
+              if (blen > fail_after) {
+                blen = fail_after;
+              }
+              fail_after -= blen;
+            }
+            rv = apr_brigade_write(bb, NULL, NULL, buffer, blen);
             if (APR_SUCCESS != rv) goto cleanup;
+            if (chunk_delay) {
+                apr_sleep(chunk_delay);
+            }
             rv = ap_pass_brigade(r->output_filters, bb);
             if (APR_SUCCESS != rv) goto cleanup;
             ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r,
                           "echo_handler: passed %ld bytes from request body", l);
+            if (fail_requested && fail_after == 0) {
+              rv = APR_EINVAL;
+              goto cleanup;
+            }
         }
     }
     /* we are done */
@@ -195,6 +248,12 @@ cleanup:
         ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "echo_handler: request handled");
         return OK;
     }
+    else if (error_bucket) {
+        int status = ap_map_http_request_error(rv, HTTP_BAD_REQUEST);
+        b = ap_bucket_error_create(status, NULL, r->pool, c->bucket_alloc);
+        APR_BRIGADE_INSERT_TAIL(bb, b);
+        ap_pass_brigade(r->output_filters, bb);
+    }
     else {
         /* no way to know what type of error occurred */
         ap_log_rerror(APLOG_MARK, APLOG_TRACE1, rv, r, "h2test_echo_handler failed");
@@ -419,18 +478,20 @@ static int h2test_error_handler(request_rec *r)
                     }
                 }
                 else if (!strcmp("delay", arg)) {
-                    rv = duration_parse(&delay, r->args, "s");
+                    rv = duration_parse(&delay, val, "s");
                     if (APR_SUCCESS == rv) {
                         continue;
                     }
                 }
                 else if (!strcmp("body_delay", arg)) {
-                    rv = duration_parse(&body_delay, r->args, "s");
+                    rv = duration_parse(&body_delay, val, "s");
                     if (APR_SUCCESS == rv) {
                         continue;
                     }
                 }
             }
+            ap_log_rerror(APLOG_MARK, APLOG_TRACE1, 0, r, "error_handler: "
+                  "did not understand '%s'", arg);
             ap_die(HTTP_BAD_REQUEST, r);
             return OK;
         }
index e7938f007bdb247441c3439e131b392689ecfeb6..0095e69990e44b5221ceb4d12f39d830f5995d94 100644 (file)
@@ -19,7 +19,7 @@ class TestPost:
     def _class_scope(self, env):
         TestPost._local_dir = os.path.dirname(inspect.getfile(TestPost))
         conf = H2Conf(env)
-        conf.add_vhost_cgi(proxy_self=True).install()
+        conf.add_vhost_cgi(proxy_self=True, h2proxy_self=True).install()
         assert env.apache_restart() == 0
 
     def local_src(self, fname):
index 13aa8ed07afd765eae8fc783bdd6805ba6c8518c..f7d3859cafb412fd436569741a9715f7c5e79bf6 100644 (file)
@@ -128,22 +128,25 @@ class TestTimeout:
     def test_h2_105_12(self, env):
         # long connection timeout, short stream timeout
         # sending a slow POST
-        if env.httpd_is_at_least("2.5.0"):
-            conf = H2Conf(env)
-            conf.add_vhost_cgi()
-            conf.add("Timeout 10")
-            conf.add("H2StreamTimeout 1")
-            conf.install()
-            assert env.apache_restart() == 0
-            url = env.mkurl("https", "cgi", "/h2test/delay?5")
-            piper = CurlPiper(env=env, url=url)
-            piper.start()
-            for _ in range(3):
-                time.sleep(2)
-                try:
-                    piper.send("0123456789\n")
-                except BrokenPipeError:
-                    break
-            piper.close()
-            assert piper.response
-            assert piper.response['status'] == 408, f"{piper.response}"
+        if not env.curl_is_at_least('8.0.0'):
+            pytest.skip(f'need at least curl v8.0.0 for this')
+        if not env.httpd_is_at_least("2.5.0"):
+            pytest.skip(f'need at least httpd 2.5.0 for this')
+        conf = H2Conf(env)
+        conf.add_vhost_cgi()
+        conf.add("Timeout 10")
+        conf.add("H2StreamTimeout 1")
+        conf.install()
+        assert env.apache_restart() == 0
+        url = env.mkurl("https", "cgi", "/h2test/delay?5")
+        piper = CurlPiper(env=env, url=url)
+        piper.start()
+        for _ in range(3):
+            time.sleep(2)
+            try:
+                piper.send("0123456789\n")
+            except BrokenPipeError:
+                break
+        piper.close()
+        assert piper.response, f'{piper}'
+        assert piper.response['status'] == 408, f"{piper.response}"
index 80ad5a16501dd0eb97ea94a4622abc44f9207db1..fe9448784059f359b2c68041b1c0fd9b2c853681 100644 (file)
@@ -13,6 +13,7 @@ class TestInvalidHeaders:
 
     # let the hecho.py CGI echo chars < 0x20 in field name
     # for almost all such characters, the stream returns a 500
+    # or in httpd >= 2.5.0 gets aborted with a h2 error
     # cr is handled special
     def test_h2_200_01(self, env):
         url = env.mkurl("https", "cgi", "/hecho.py")
@@ -22,12 +23,15 @@ class TestInvalidHeaders:
             if x in [13]:
                 assert 0 == r.exit_code, f'unexpected exit code for char 0x{x:02}'
                 assert 200 == r.response["status"], f'unexpected status for char 0x{x:02}'
+            elif x in [10] or env.httpd_is_at_least('2.5.0'):
+                assert 0 == r.exit_code, f'unexpected exit code for char 0x{x:02}'
+                assert 500 == r.response["status"], f'unexpected status for char 0x{x:02}'
             else:
-                assert 0 == r.exit_code, f'"unexpected exit code for char 0x{x:02}'
-                assert 500 == r.response["status"], f'posting "{data}" unexpected status, {r}'
+                assert 0 != r.exit_code, f'unexpected exit code for char 0x{x:02}'
 
     # let the hecho.py CGI echo chars < 0x20 in field value
     # for almost all such characters, the stream returns a 500
+    # or in httpd >= 2.5.0 gets aborted with a h2 error
     # cr and lf are handled special
     def test_h2_200_02(self, env):
         url = env.mkurl("https", "cgi", "/hecho.py")
@@ -37,20 +41,28 @@ class TestInvalidHeaders:
                 if x in [10, 13]:
                     assert 0 == r.exit_code, "unexpected exit code for char 0x%02x" % x
                     assert 200 == r.response["status"], "unexpected status for char 0x%02x" % x
+                elif env.httpd_is_at_least('2.5.0'):
+                    assert 0 == r.exit_code, f'unexpected exit code for char 0x{x:02}'
+                    assert 500 == r.response["status"], f'unexpected status for char 0x{x:02}'
                 else:
-                    assert 0 == r.exit_code, "unexpected exit code for char 0x%02x" % x
-                    assert 500 == r.response["status"], "unexpected status for char 0x%02x" % x
+                    assert 0 != r.exit_code, "unexpected exit code for char 0x%02x" % x
 
     # let the hecho.py CGI echo 0x10 and 0x7f in field name and value
     def test_h2_200_03(self, env):
         url = env.mkurl("https", "cgi", "/hecho.py")
         for h in ["10", "7f"]:
             r = env.curl_post_data(url, "name=x%%%s&value=yz" % h)
-            assert 0 == r.exit_code, "unexpected exit code for char 0x%02x" % h
-            assert 500 == r.response["status"], "unexpected status for char 0x%02x" % h
+            if env.httpd_is_at_least('2.5.0'):
+                assert 0 == r.exit_code, f"unexpected exit code for char 0x{h:02}"
+                assert 500 == r.response["status"], f"unexpected exit code for char 0x{h:02}"
+            else:
+                assert 0 != r.exit_code
             r = env.curl_post_data(url, "name=x&value=y%%%sz" % h)
-            assert 0 == r.exit_code, "unexpected exit code for char 0x%02x" % h
-            assert 500 == r.response["status"], "unexpected status for char 0x%02x" % h
+            if env.httpd_is_at_least('2.5.0'):
+                assert 0 == r.exit_code, f"unexpected exit code for char 0x{h:02}"
+                assert 500 == r.response["status"], f"unexpected exit code for char 0x{h:02}"
+            else:
+                assert 0 != r.exit_code
 
     # test header field lengths check, LimitRequestLine (default 8190)
     def test_h2_200_10(self, env):
index 6ab8275b11dba0c6caf352abdd402b2c95825159..306568e2d5a5c3ea1c1b4a6812c21ca9eaf1010f 100644 (file)
@@ -146,14 +146,16 @@ class TestProxy:
 
     # produce an error during response body
     def test_h2_500_31(self, env, repeat):
-        pytest.skip("needs fix in core protocol handling")
+        if env.httpd_is_at_least("2.5.0"):
+            pytest.skip("needs fix in core protocol handling")
         url = env.mkurl("https", "cgi", "/proxy/h2test/error?body_error=timeout")
         r = env.curl_get(url)
         assert r.exit_code != 0, r
 
     # produce an error, fail to generate an error bucket
     def test_h2_500_32(self, env, repeat):
-        pytest.skip("needs fix in core protocol handling")
+        if env.httpd_is_at_least("2.5.0"):
+            pytest.skip("needs fix in core protocol handling")
         url = env.mkurl("https", "cgi", "/proxy/h2test/error?body_error=timeout&error_bucket=0")
         r = env.curl_get(url)
         assert r.exit_code != 0, r
diff --git a/test/modules/http2/test_601_h2proxy_twisted.py b/test/modules/http2/test_601_h2proxy_twisted.py
new file mode 100644 (file)
index 0000000..748a494
--- /dev/null
@@ -0,0 +1,89 @@
+import json
+import logging
+import os
+import pytest
+
+from .env import H2Conf, H2TestEnv
+
+
+log = logging.getLogger(__name__)
+
+
+@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here")
+class TestH2ProxyTwisted:
+
+    @pytest.fixture(autouse=True, scope='class')
+    def _class_scope(self, env):
+        H2Conf(env).add_vhost_cgi(proxy_self=True, h2proxy_self=True).install()
+        assert env.apache_restart() == 0
+
+    @pytest.mark.parametrize("name", [
+        "data-1k", "data-10k", "data-100k", "data-1m",
+    ])
+    def test_h2_601_01_echo_uploads(self, env, name):
+        fpath = os.path.join(env.gen_dir, name)
+        url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo")
+        r = env.curl_upload(url, fpath, options=[])
+        assert r.exit_code == 0
+        assert 200 <= r.response["status"] < 300
+        # we POST a form, so echoed input is larger than the file itself
+        assert len(r.response["body"]) > os.path.getsize(fpath)
+
+    @pytest.mark.parametrize("name", [
+        "data-1k", "data-10k", "data-100k", "data-1m",
+    ])
+    def test_h2_601_02_echo_delayed(self, env, name):
+        fpath = os.path.join(env.gen_dir, name)
+        url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo?chunk_delay=10ms")
+        r = env.curl_upload(url, fpath, options=[])
+        assert r.exit_code == 0
+        assert 200 <= r.response["status"] < 300
+        # we POST a form, so echoed input is larger than the file itself
+        assert len(r.response["body"]) > os.path.getsize(fpath)
+
+    @pytest.mark.parametrize("name", [
+        "data-1k", "data-10k", "data-100k", "data-1m",
+    ])
+    def test_h2_601_03_echo_fail_early(self, env, name):
+        if env.httpd_is_at_least("2.5.0"):
+            pytest.skip("needs mod_proxy_http2 fix")
+        fpath = os.path.join(env.gen_dir, name)
+        url = env.mkurl("https", "cgi", "/h2proxy/h2test/echo?fail_after=512")
+        r = env.curl_upload(url, fpath, options=[])
+        # 92 is curl's CURLE_HTTP2_STREAM
+        assert r.exit_code == 92 or r.response["status"] == 502
+
+    @pytest.mark.parametrize("name", [
+        "data-1k", "data-10k", "data-100k", "data-1m",
+    ])
+    def test_h2_601_04_echo_fail_late(self, env, name):
+        if env.httpd_is_at_least("2.5.0"):
+            pytest.skip("needs mod_proxy_http2 fix")
+        fpath = os.path.join(env.gen_dir, name)
+        url = env.mkurl("https", "cgi", f"/h2proxy/h2test/echo?fail_after={os.path.getsize(fpath)}")
+        r = env.curl_upload(url, fpath, options=[])
+        # 92 is curl's CURLE_HTTP2_STREAM
+        assert r.exit_code == 92 or r.response["status"] == 502
+
+    def test_h2_601_05_echo_fail_many(self, env):
+        if env.httpd_is_at_least("2.5.0"):
+            pytest.skip("needs mod_proxy_http2 fix")
+        count = 200
+        fpath = os.path.join(env.gen_dir, "data-100k")
+        args = [env.curl, '--parallel', '--parallel-max', '20']
+        for i in range(count):
+            if i > 0:
+                args.append('--next')
+            url = env.mkurl("https", "cgi", f"/h2proxy/h2test/echo?id={i}&fail_after={os.path.getsize(fpath)}")
+            args.extend(env.curl_resolve_args(url=url))
+            args.extend([
+                '-o', '/dev/null', '-w', '%{json}\\n', '--form', f'file=@{fpath}', url
+            ])
+        log.error(f'run: {args}')
+        r = env.run(args)
+        stats = []
+        for line in r.stdout.splitlines():
+            stats.append(json.loads(line))
+        assert len(stats) == count
+        for st in stats:
+            assert st['exitcode'] == 92 or st['http_code'] == 502, f'unexpected: {st}'
index 2b6840b1611869df68e31b5461279b100ae49888..84ef9c8161a40569e20b4a9b75a55a38ab4aa706 100644 (file)
@@ -31,9 +31,14 @@ class CurlPiper:
     def response(self):
         return self._r.response if self._r else None
 
+    def __repr__(self):
+        return f'CurlPiper[exitcode={self._exitcode}, stderr={self._stderr}, stdout={self._stdout}]'
+
     def start(self):
         self.args, self.headerfile = self.env.curl_complete_args([self.url], timeout=5, options=[
-            "-T", "-", "-X", "POST", "--trace-ascii", "%", "--trace-time"])
+            "-T", "-", "-X", "POST", "--trace-ascii", "%", "--trace-time"
+        ])
+        self.args.append(self.url)
         sys.stderr.write("starting: {0}\n".format(self.args))
         self.proc = subprocess.Popen(self.args, stdin=subprocess.PIPE,
                                      stdout=subprocess.PIPE,
index 2f31de91c864c69a682fb2405a719cf988c1ddd5..842e369cbced47a8e4e15f283aa291a80667e670 100644 (file)
@@ -322,6 +322,10 @@ class HttpdTestEnv:
             for name in self._httpd_log_modules:
                 self._log_interesting += f" {name}:{log_level}"
 
+    @property
+    def curl(self) -> str:
+        return self._curl
+
     @property
     def apxs(self) -> str:
         return self._apxs
@@ -667,19 +671,11 @@ class HttpdTestEnv:
                 os.remove(os.path.join(self.gen_dir, fname))
         self._curl_headerfiles_n = 0
 
-    def curl_complete_args(self, urls, stdout_list=False,
-                           timeout=None, options=None,
-                           insecure=False, force_resolve=True):
-        u = urlparse(urls[0])
-        #assert u.hostname, f"hostname not in url: {urls[0]}"
-        headerfile = f"{self.gen_dir}/curl.headers.{self._curl_headerfiles_n}"
-        self._curl_headerfiles_n += 1
+    def curl_resolve_args(self, url, insecure=False, force_resolve=True, options=None):
+        u = urlparse(url)
 
         args = [
-            self._curl, "-s", "--path-as-is", "-D", headerfile,
         ]
-        if stdout_list:
-            args.extend(['-w', '%{stdout}' + HttpdTestSetup.CURL_STDOUT_SEPARATOR])
         if u.scheme == 'http':
             pass
         elif insecure:
@@ -691,19 +687,33 @@ class HttpdTestEnv:
             if ca_pem:
                 args.extend(["--cacert", ca_pem])
 
-        if self._current_test is not None:
-            args.extend(["-H", f'AP-Test-Name: {self._current_test}'])
-
         if force_resolve and u.hostname and u.hostname != 'localhost' \
                 and u.hostname != self._httpd_addr \
                 and not re.match(r'^(\d+|\[|:).*', u.hostname):
-            assert u.port, f"port not in url: {urls[0]}"
+            assert u.port, f"port not in url: {url}"
             args.extend(["--resolve", f"{u.hostname}:{u.port}:{self._httpd_addr}"])
+        return args
+
+    def curl_complete_args(self, urls, stdout_list=False,
+                           timeout=None, options=None,
+                           insecure=False, force_resolve=True):
+        headerfile = f"{self.gen_dir}/curl.headers.{self._curl_headerfiles_n}"
+        self._curl_headerfiles_n += 1
+
+        args = [
+            self._curl, "-s", "--path-as-is", "-D", headerfile,
+        ]
+        args.extend(self.curl_resolve_args(urls[0], insecure=insecure,
+                                           force_resolve=force_resolve,
+                                           options=options))
+        if stdout_list:
+            args.extend(['-w', '%{stdout}' + HttpdTestSetup.CURL_STDOUT_SEPARATOR])
+        if self._current_test is not None:
+            args.extend(["-H", f'AP-Test-Name: {self._current_test}'])
         if timeout is not None and int(timeout) > 0:
             args.extend(["--connect-timeout", str(int(timeout))])
         if options:
             args.extend(options)
-        args += urls
         return args, headerfile
 
     def curl_parse_headerfile(self, headerfile: str, r: ExecResult = None) -> ExecResult:
@@ -771,6 +781,7 @@ class HttpdTestEnv:
             urls=urls, stdout_list=stdout_list,
             timeout=timeout, options=options, insecure=insecure,
             force_resolve=force_resolve)
+        args += urls
         r = self.run(args, stdout_list=stdout_list)
         if r.exit_code == 0:
             self.curl_parse_headerfile(headerfile, r=r)