From: Stefan Eissing Date: Wed, 23 Aug 2023 07:52:33 +0000 (+0000) Subject: Merge r1911291 from trunk: X-Git-Tag: 2.4.58-rc1-candidate~51 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=c973637478a6880b569e8574d0486fb6688bf93a;p=thirdparty%2Fapache%2Fhttpd.git Merge r1911291 from trunk: *) mod_http2: Fix reporting of `Total Accesses` in server-status to not count HTTP/2 requests twice. Fixes PR 66801. git-svn-id: https://svn.apache.org/repos/asf/httpd/httpd/branches/2.4.x@1911858 13f79535-47bb-0310-9956-ffa450edef68 --- diff --git a/changes-entries/pr66801.txt b/changes-entries/pr66801.txt new file mode 100644 index 00000000000..5fee4bce913 --- /dev/null +++ b/changes-entries/pr66801.txt @@ -0,0 +1,3 @@ + *) mod_http2: Fix reporting of `Total Accesses` in server-status to not count + HTTP/2 requests twice. Fixes PR 66801. + [Stefan Eissing] diff --git a/modules/http2/h2_mplx.c b/modules/http2/h2_mplx.c index 6e2d5776f2f..f3c40b539a3 100644 --- a/modules/http2/h2_mplx.c +++ b/modules/http2/h2_mplx.c @@ -333,7 +333,6 @@ h2_mplx *h2_mplx_c1_create(int child_num, apr_uint32_t id, h2_stream *stream0, apr_pollset_add(m->pollset, &conn_ctx->pfd); } - m->scratch_r = apr_pcalloc(m->pool, sizeof(*m->scratch_r)); m->max_spare_transits = 3; m->c2_transits = apr_array_make(m->pool, (int)m->max_spare_transits, sizeof(h2_c2_transit*)); @@ -546,16 +545,6 @@ const h2_stream *h2_mplx_c2_stream_get(h2_mplx *m, int stream_id) } -static void c1_update_scoreboard(h2_mplx *m, h2_stream *stream) -{ - if (stream->c2) { - m->scratch_r->connection = stream->c2; - m->scratch_r->bytes_sent = stream->out_frame_octets; - ap_increment_counts(m->c1->sbh, m->scratch_r); - m->scratch_r->connection = NULL; - } -} - static void c1_purge_streams(h2_mplx *m) { h2_stream *stream; @@ -565,8 +554,6 @@ static void c1_purge_streams(h2_mplx *m) stream = APR_ARRAY_IDX(m->spurge, i, h2_stream*); ap_assert(stream->state == H2_SS_CLEANUP); - c1_update_scoreboard(m, stream); - if (stream->input) { h2_beam_destroy(stream->input, m->c1); stream->input = NULL; diff --git a/modules/http2/h2_mplx.h b/modules/http2/h2_mplx.h index ecb4de9cc6f..781ddf2f4d5 100644 --- a/modules/http2/h2_mplx.h +++ b/modules/http2/h2_mplx.h @@ -99,8 +99,6 @@ struct h2_mplx { struct h2_workers *workers; /* h2 workers process wide instance */ - request_rec *scratch_r; /* pseudo request_rec for scoreboard reporting */ - apr_uint32_t max_spare_transits; /* max number of transit pools idling */ apr_array_header_t *c2_transits; /* base pools for running c2 connections */ }; diff --git a/test/modules/http2/test_008_ranges.py b/test/modules/http2/test_008_ranges.py index ce86399a5d4..2d4d7e5be72 100644 --- a/test/modules/http2/test_008_ranges.py +++ b/test/modules/http2/test_008_ranges.py @@ -1,6 +1,7 @@ import inspect import json import os +import re import pytest from .env import H2Conf, H2TestEnv @@ -19,10 +20,16 @@ class TestRanges: os.remove(TestRanges.LOGFILE) destdir = os.path.join(env.gen_dir, 'apache/htdocs/test1') env.make_data_file(indir=destdir, fname="data-100m", fsize=100*1024*1024) - conf = H2Conf(env=env) - conf.add([ - "CustomLog logs/test_008 combined" - ]) + conf = H2Conf(env=env, extras={ + 'base': [ + 'CustomLog logs/test_008 combined' + ], + f'test1.{env.http_tld}': [ + '', + ' SetHandler server-status', + '', + ] + }) conf.add_vhost_cgi() conf.add_vhost_test1() conf.install() @@ -127,151 +134,38 @@ class TestRanges: break assert found, f'request not found in {self.LOGFILE}' - # upload and GET again using curl, compare to original content - def curl_upload_and_verify(self, env, fname, options=None): - url = env.mkurl("https", "cgi", "/upload.py") - fpath = os.path.join(env.gen_dir, fname) - r = env.curl_upload(url, fpath, options=options) - assert r.exit_code == 0, f"{r}" - assert 200 <= r.response["status"] < 300 - - r2 = env.curl_get(r.response["header"]["location"]) - assert r2.exit_code == 0 - assert r2.response["status"] == 200 - with open(os.path.join(TestRanges.SRCDIR, fpath), mode='rb') as file: - src = file.read() - assert src == r2.response["body"] - -import inspect -import json -import os -import pytest - -from .env import H2Conf, H2TestEnv - - -@pytest.mark.skipif(condition=H2TestEnv.is_unsupported, reason="mod_http2 not supported here") -class TestRanges: - - LOGFILE = "" - - @pytest.fixture(autouse=True, scope='class') - def _class_scope(self, env): - TestRanges.LOGFILE = os.path.join(env.server_logs_dir, "test_008") - TestRanges.SRCDIR = os.path.dirname(inspect.getfile(TestRanges)) - if os.path.isfile(TestRanges.LOGFILE): - os.remove(TestRanges.LOGFILE) - destdir = os.path.join(env.gen_dir, 'apache/htdocs/test1') - env.make_data_file(indir=destdir, fname="data-100m", fsize=100*1024*1024) - conf = H2Conf(env=env) - conf.add([ - "CustomLog logs/test_008 combined" - ]) - conf.add_vhost_cgi() - conf.add_vhost_test1() - conf.install() - assert env.apache_restart() == 0 - - def test_h2_008_01(self, env): - # issue: #203 - resource = "data-1k" - full_length = 1000 - chunk = 200 - self.curl_upload_and_verify(env, resource, ["-v", "--http2"]) - assert env.apache_restart() == 0 - url = env.mkurl("https", "cgi", f"/files/{resource}?01full") - r = env.curl_get(url, 5, options=["--http2"]) - assert r.response["status"] == 200 - url = env.mkurl("https", "cgi", f"/files/{resource}?01range") - r = env.curl_get(url, 5, options=["--http1.1", "-H", "Range: bytes=0-{0}".format(chunk-1)]) - assert 206 == r.response["status"] - assert chunk == len(r.response["body"].decode('utf-8')) - r = env.curl_get(url, 5, options=["--http2", "-H", "Range: bytes=0-{0}".format(chunk-1)]) - assert 206 == r.response["status"] - assert chunk == len(r.response["body"].decode('utf-8')) - # Restart for logs to be flushed out - assert env.apache_restart() == 0 - # now check what response lengths have actually been reported - detected = {} - for line in open(TestRanges.LOGFILE).readlines(): - e = json.loads(line) - if e['request'] == f'GET /files/{resource}?01full HTTP/2.0': - assert e['bytes_rx_I'] > 0 - assert e['bytes_resp_B'] == full_length - assert e['bytes_tx_O'] > full_length - detected['h2full'] = 1 - elif e['request'] == f'GET /files/{resource}?01range HTTP/2.0': - assert e['bytes_rx_I'] > 0 - assert e['bytes_resp_B'] == chunk - assert e['bytes_tx_O'] > chunk - assert e['bytes_tx_O'] < chunk + 256 # response + frame stuff - detected['h2range'] = 1 - elif e['request'] == f'GET /files/{resource}?01range HTTP/1.1': - assert e['bytes_rx_I'] > 0 # input bytes received - assert e['bytes_resp_B'] == chunk # response bytes sent (payload) - assert e['bytes_tx_O'] > chunk # output bytes sent - detected['h1range'] = 1 - assert 'h1range' in detected, f'HTTP/1.1 range request not found in {TestRanges.LOGFILE}' - assert 'h2range' in detected, f'HTTP/2 range request not found in {TestRanges.LOGFILE}' - assert 'h2full' in detected, f'HTTP/2 full request not found in {TestRanges.LOGFILE}' - - def test_h2_008_02(self, env, repeat): - path = '/002.jpg' - res_len = 90364 - url = env.mkurl("https", "test1", f'{path}?02full') - r = env.curl_get(url, 5) - assert r.response["status"] == 200 - assert "HTTP/2" == r.response["protocol"] - h = r.response["header"] - assert "accept-ranges" in h - assert "bytes" == h["accept-ranges"] - assert "content-length" in h - clen = h["content-length"] - assert int(clen) == res_len - # get the first 1024 bytes of the resource, 206 status, but content-length as original - url = env.mkurl("https", "test1", f'{path}?02range') - r = env.curl_get(url, 5, options=["-H", "range: bytes=0-1023"]) - assert 206 == r.response["status"] - assert "HTTP/2" == r.response["protocol"] - assert 1024 == len(r.response["body"]) - assert "content-length" in h - assert clen == h["content-length"] - # Restart for logs to be flushed out - assert env.apache_restart() == 0 - # now check what response lengths have actually been reported - found = False - for line in open(TestRanges.LOGFILE).readlines(): - e = json.loads(line) - if e['request'] == f'GET {path}?02range HTTP/2.0': - assert e['bytes_rx_I'] > 0 - assert e['bytes_resp_B'] == 1024 - assert e['bytes_tx_O'] > 1024 - assert e['bytes_tx_O'] < 1024 + 256 # response and frame stuff - found = True - break - assert found, f'request not found in {self.LOGFILE}' - - # send a paced curl download that aborts in the middle of the transfer - def test_h2_008_03(self, env, repeat): - if not env.httpd_is_at_least('2.5.0'): - pytest.skip(f'needs r1909769 from trunk') + # test server-status reporting + # see + def test_h2_008_04(self, env, repeat): path = '/data-100m' - url = env.mkurl("https", "test1", f'{path}?03broken') - r = env.curl_get(url, 5, options=[ - '--limit-rate', '2k', '-m', '2' - ]) - assert r.exit_code != 0, f'{r}' - found = False - for line in open(TestRanges.LOGFILE).readlines(): - e = json.loads(line) - if e['request'] == f'GET {path}?03broken HTTP/2.0': - assert e['bytes_rx_I'] > 0 - assert e['bytes_resp_B'] == 100*1024*1024 - assert e['bytes_tx_O'] > 1024 - assert e['bytes_tx_O'] < 10*1024*1024 # curl buffers, but not that much - found = True - break - assert found, f'request not found in {self.LOGFILE}' + assert env.apache_restart() == 0 + stats = self.get_server_status(env) + # we see the server uptime check request here + assert 1 == int(stats['Total Accesses']), f'{stats}' + assert 1 == int(stats['Total kBytes']), f'{stats}' + count = 10 + url = env.mkurl("https", "test1", f'/data-100m?[0-{count-1}]') + r = env.curl_get(url, 5, options=['--http2', '-H', f'Range: bytes=0-{4096}']) + assert r.exit_code == 0, f'{r}' + stats = self.get_server_status(env) + # amount reported is larger than (count *4k), the net payload + # but does not exceed an additional 4k + assert (4*count)+1 <= int(stats['Total kBytes']) + assert (4*(count+1))+1 > int(stats['Total kBytes']) + # total requests is now at 1 from the start, plus the stat check, + # plus the count transfers we did. + assert (2+count) == int(stats['Total Accesses']) + + def get_server_status(self, env): + status_url = env.mkurl("https", "test1", '/status?auto') + r = env.curl_get(status_url, 5) + assert r.exit_code == 0, f'{r}' + stats = {} + for line in r.stdout.splitlines(): + m = re.match(r'([^:]+): (.*)', line) + if m: + stats[m.group(1)] = m.group(2) + return stats # upload and GET again using curl, compare to original content def curl_upload_and_verify(self, env, fname, options=None):