]> git.ipfire.org Git - thirdparty/curl.git/commitdiff
pytests: scorecard upload tests
authorStefan Eissing <stefan@eissing.org>
Wed, 17 Jul 2024 10:26:40 +0000 (12:26 +0200)
committerDaniel Stenberg <daniel@haxx.se>
Thu, 18 Jul 2024 12:45:40 +0000 (14:45 +0200)
- add upload tests to scorecard, invoke with
  > python3 tests/http/scorecard.py -u h1|h2|h3
- add a reverse proxy setup from Caddy to httpd for
  upload tests since Caddy does not have other PUT/POST handling
- add caddy tests in test_08 for POST/PUT
- increase read buffer in mod_curltest for larger reads

Closes #14208

tests/http/scorecard.py
tests/http/test_08_caddy.py
tests/http/testenv/caddy.py
tests/http/testenv/httpd.py
tests/http/testenv/mod_curltest/mod_curltest.c

index 8b615179205f60d0e43f9f4337c5032e3edf4fff..4e4c2c8cb1e7f4e49e360ee13c9a453abf63cb3a 100644 (file)
@@ -106,7 +106,7 @@ class ScoreCard:
             while flen < fsize:
                 fd.write(data1k)
                 flen += len(data1k)
-        return flen
+        return fpath
 
     def _check_downloads(self, r: ExecResult, count: int):
         error = ''
@@ -261,6 +261,163 @@ class ScoreCard:
                 scores[via][label] = results
         return scores
 
+    def _check_uploads(self, r: ExecResult, count: int):
+        error = ''
+        if r.exit_code != 0:
+            error += f'exit={r.exit_code} '
+        if r.exit_code != 0 or len(r.stats) != count:
+            error += f'stats={len(r.stats)}/{count} '
+        fails = [s for s in r.stats if s['response_code'] != 200]
+        if len(fails) > 0:
+            error += f'{len(fails)} failed'
+        for f in fails:
+            error += f'[{f["response_code"]}]'
+        return error if len(error) > 0 else None
+
+    def upload_single(self, url: str, proto: str, fpath: str, count: int):
+        sample_size = count
+        count = 1
+        samples = []
+        errors = []
+        profiles = []
+        self.info(f'single...')
+        for i in range(sample_size):
+            curl = CurlClient(env=self.env, silent=self._silent_curl)
+            r = curl.http_put(urls=[url], fdata=fpath, alpn_proto=proto,
+                              with_headers=False, with_profile=True)
+            err = self._check_uploads(r, count)
+            if err:
+                errors.append(err)
+            else:
+                total_size = sum([s['size_upload'] for s in r.stats])
+                samples.append(total_size / r.duration.total_seconds())
+                profiles.append(r.profile)
+        return {
+            'count': count,
+            'samples': sample_size,
+            'max-parallel': 1,
+            'speed': mean(samples) if len(samples) else -1,
+            'errors': errors,
+            'stats': RunProfile.AverageStats(profiles) if len(profiles) else {},
+        }
+
+    def upload_serial(self, url: str, proto: str, fpath: str, count: int):
+        sample_size = 1
+        samples = []
+        errors = []
+        profiles = []
+        url = f'{url}?id=[0-{count - 1}]'
+        self.info(f'serial...')
+        for i in range(sample_size):
+            curl = CurlClient(env=self.env, silent=self._silent_curl)
+            r = curl.http_put(urls=[url], fdata=fpath, alpn_proto=proto,
+                              with_headers=False, with_profile=True)
+            err = self._check_uploads(r, count)
+            if err:
+                errors.append(err)
+            else:
+                total_size = sum([s['size_upload'] for s in r.stats])
+                samples.append(total_size / r.duration.total_seconds())
+                profiles.append(r.profile)
+        return {
+            'count': count,
+            'samples': sample_size,
+            'max-parallel': 1,
+            'speed': mean(samples) if len(samples) else -1,
+            'errors': errors,
+            'stats': RunProfile.AverageStats(profiles) if len(profiles) else {},
+        }
+
+    def upload_parallel(self, url: str, proto: str, fpath: str, count: int):
+        sample_size = 1
+        samples = []
+        errors = []
+        profiles = []
+        max_parallel = count
+        url = f'{url}?id=[0-{count - 1}]'
+        self.info(f'parallel...')
+        for i in range(sample_size):
+            curl = CurlClient(env=self.env, silent=self._silent_curl)
+            r = curl.http_put(urls=[url], fdata=fpath, alpn_proto=proto,
+                              with_headers=False, with_profile=True,
+                              extra_args=[
+                                   '--parallel',
+                                    '--parallel-max', str(max_parallel)
+                              ])
+            err = self._check_uploads(r, count)
+            if err:
+                errors.append(err)
+            else:
+                total_size = sum([s['size_upload'] for s in r.stats])
+                samples.append(total_size / r.duration.total_seconds())
+                profiles.append(r.profile)
+        return {
+            'count': count,
+            'samples': sample_size,
+            'max-parallel': max_parallel,
+            'speed': mean(samples) if len(samples) else -1,
+            'errors': errors,
+            'stats': RunProfile.AverageStats(profiles) if len(profiles) else {},
+        }
+
+    def upload_url(self, label: str, url: str, fpath: str, proto: str, count: int):
+        self.info(f'  {count}x{label}: ')
+        props = {
+            'single': self.upload_single(url=url, proto=proto, fpath=fpath,
+                                         count=10),
+        }
+        if count > 1:
+            props['serial'] = self.upload_serial(url=url, proto=proto,
+                                                 fpath=fpath, count=count)
+            props['parallel'] = self.upload_parallel(url=url, proto=proto,
+                                                     fpath=fpath, count=count)
+        self.info(f'ok.\n')
+        return props
+
+    def uploads(self, proto: str, count: int,
+                  fsizes: List[int]) -> Dict[str, Any]:
+        scores = {}
+        if self.httpd:
+            if proto == 'h3':
+                port = self.env.h3_port
+                via = 'nghttpx'
+                descr = f'port {port}, proxying httpd'
+            else:
+                port = self.env.https_port
+                via = 'httpd'
+                descr = f'port {port}'
+            self.info(f'{via} uploads\n')
+            scores[via] = {
+                'description': descr,
+            }
+            for fsize in fsizes:
+                label = self.fmt_size(fsize)
+                fname = f'upload{label}.data'
+                fpath = self._make_docs_file(docs_dir=self.env.gen_dir,
+                                             fname=fname, fsize=fsize)
+                url = f'https://{self.env.domain1}:{port}/curltest/put'
+                results = self.upload_url(label=label, url=url, fpath=fpath,
+                                          proto=proto, count=count)
+                scores[via][label] = results
+        if self.caddy:
+            port = self.caddy.port
+            via = 'caddy'
+            descr = f'port {port}'
+            self.info('caddy uploads\n')
+            scores[via] = {
+                'description': descr,
+            }
+            for fsize in fsizes:
+                label = self.fmt_size(fsize)
+                fname = f'upload{label}.data'
+                fpath = self._make_docs_file(docs_dir=self.env.gen_dir,
+                                             fname=fname, fsize=fsize)
+                url = f'https://{self.env.domain2}:{port}/curltest/put'
+                results = self.upload_url(label=label, url=url, fpath=fpath,
+                                          proto=proto, count=count)
+                scores[via][label] = results
+        return scores
+
     def do_requests(self, url: str, proto: str, count: int,
                     max_parallel: int = 1):
         sample_size = 1
@@ -346,6 +503,8 @@ class ScoreCard:
                     handshakes: bool = True,
                     downloads: Optional[List[int]] = None,
                     download_count: int = 50,
+                    uploads: Optional[List[int]] = None,
+                    upload_count: int = 50,
                     req_count=5000,
                     requests: bool = True):
         self.info(f"scoring {proto}\n")
@@ -389,6 +548,10 @@ class ScoreCard:
             score['downloads'] = self.downloads(proto=proto,
                                                 count=download_count,
                                                 fsizes=downloads)
+        if uploads and len(uploads) > 0:
+            score['uploads'] = self.uploads(proto=proto,
+                                                count=upload_count,
+                                                fsizes=uploads)
         if requests:
             score['requests'] = self.requests(proto=proto, req_count=req_count)
         self.info("\n")
@@ -470,6 +633,53 @@ class ScoreCard:
                     else:
                         print(f' {"-":^20}')
 
+        if 'uploads' in score:
+            # get the key names of all sizes and measurements made
+            sizes = []
+            measures = []
+            m_names = {}
+            mcol_width = 12
+            mcol_sw = 17
+            for server, server_score in score['uploads'].items():
+                for sskey, ssval in server_score.items():
+                    if isinstance(ssval, str):
+                        continue
+                    if sskey not in sizes:
+                        sizes.append(sskey)
+                    for mkey, mval in server_score[sskey].items():
+                        if mkey not in measures:
+                            measures.append(mkey)
+                            m_names[mkey] = f'{mkey}({mval["count"]}x{mval["max-parallel"]})'
+
+            print('Uploads')
+            print(f'  {"Server":<8} {"Size":>8}', end='')
+            for m in measures: print(f' {m_names[m]:>{mcol_width}} {"[cpu/rss]":<{mcol_sw}}', end='')
+            print(f' {"Errors":^20}')
+
+            for server in score['uploads']:
+                for size in sizes:
+                    size_score = score['uploads'][server][size]
+                    print(f'  {server:<8} {size:>8}', end='')
+                    errors = []
+                    for key, val in size_score.items():
+                        if 'errors' in val:
+                            errors.extend(val['errors'])
+                    for m in measures:
+                        if m in size_score:
+                            print(f' {self.fmt_mbs(size_score[m]["speed"]):>{mcol_width}}', end='')
+                            stats = size_score[m]["stats"]
+                            if 'cpu' in stats:
+                                s = f'[{stats["cpu"]:>.1f}%/{self.fmt_size(stats["rss"])}]'
+                            else:
+                                s = '[???/???]'
+                            print(f' {s:<{mcol_sw}}', end='')
+                        else:
+                            print(' '*mcol_width, end='')
+                    if len(errors):
+                        print(f' {"/".join(errors):<20}')
+                    else:
+                        print(f' {"-":^20}')
+
         if 'requests' in score:
             sizes = []
             measures = []
@@ -551,6 +761,12 @@ def main():
                         default=50, help="perform that many downloads")
     parser.add_argument("--download-parallel", action='store', type=int,
                         default=0, help="perform that many downloads in parallel (default all)")
+    parser.add_argument("-u", "--uploads", action='store_true',
+                        default=False, help="evaluate uploads")
+    parser.add_argument("--upload", action='append', type=str,
+                        default=None, help="evaluate upload size")
+    parser.add_argument("--upload-count", action='store', type=int,
+                        default=50, help="perform that many uploads")
     parser.add_argument("-r", "--requests", action='store_true',
                         default=False, help="evaluate requests")
     parser.add_argument("--request-count", action='store', type=int,
@@ -578,11 +794,20 @@ def main():
         downloads = []
         for x in args.download:
             downloads.extend([parse_size(s) for s in x.split(',')])
+
+    uploads = [1024 * 1024, 10 * 1024 * 1024, 100 * 1024 * 1024]
+    if args.upload is not None:
+        uploads = []
+        for x in args.upload:
+            uploads.extend([parse_size(s) for s in x.split(',')])
+
     requests = True
-    if args.downloads or args.requests or args.handshakes:
+    if args.downloads or args.uploads or args.requests or args.handshakes:
         handshakes = args.handshakes
         if not args.downloads:
             downloads = None
+        if not args.uploads:
+            uploads = None
         requests = args.requests
 
     test_httpd = protocol != 'h3'
@@ -599,14 +824,14 @@ def main():
     nghttpx = None
     caddy = None
     try:
-        if test_httpd:
+        if test_httpd or (test_caddy and uploads):
             print(f'httpd: {env.httpd_version()}, http:{env.http_port} https:{env.https_port}')
             httpd = Httpd(env=env)
             assert httpd.exists(), \
                 f'httpd not found: {env.httpd}'
             httpd.clear_logs()
             assert httpd.start()
-            if 'h3' == protocol:
+            if test_httpd and 'h3' == protocol:
                 nghttpx = NghttpxQuic(env=env)
                 nghttpx.clear_logs()
                 assert nghttpx.start()
@@ -616,13 +841,16 @@ def main():
             caddy.clear_logs()
             assert caddy.start()
 
-        card = ScoreCard(env=env, httpd=httpd, nghttpx=nghttpx, caddy=caddy,
+        card = ScoreCard(env=env, httpd=httpd if test_httpd else None,
+                         nghttpx=nghttpx, caddy=caddy if test_caddy else None,
                          verbose=args.verbose, curl_verbose=args.curl_verbose,
                          download_parallel=args.download_parallel)
         score = card.score_proto(proto=protocol,
                                  handshakes=handshakes,
                                  downloads=downloads,
                                  download_count=args.download_count,
+                                 uploads=uploads,
+                                 upload_count=args.upload_count,
                                  req_count=args.request_count,
                                  requests=requests)
         if args.json:
index 4a75ef552367832b665b799ff55121d72d5f6569..1e07664124890278d6df2d7a5835817f6ee6e611 100644 (file)
@@ -61,6 +61,7 @@ class TestCaddy:
         self._make_docs_file(docs_dir=caddy.docs_dir, fname='data5.data', fsize=5*1024*1024)
         self._make_docs_file(docs_dir=caddy.docs_dir, fname='data10.data', fsize=10*1024*1024)
         self._make_docs_file(docs_dir=caddy.docs_dir, fname='data100.data', fsize=100*1024*1024)
+        env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024)
 
     # download 1 file
     @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
@@ -167,10 +168,9 @@ class TestCaddy:
         else:
             assert r.total_connects == 1, r.dump_logs()
 
-    # upload data parallel, check that they were echoed
-    @pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")
-    @pytest.mark.parametrize("proto", ['h2', 'h3'])
-    def test_08_06_upload_parallel(self, env: Env, caddy, repeat, proto):
+    # post data parallel, check that they were echoed
+    @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+    def test_08_06_post_parallel(self, env: Env, httpd, caddy, repeat, proto):
         if proto == 'h3' and not env.have_h3():
             pytest.skip("h3 not supported")
         if proto == 'h3' and env.curl_uses_lib('msh3'):
@@ -179,8 +179,29 @@ class TestCaddy:
         count = 20
         data = '0123456789'
         curl = CurlClient(env=env)
-        url = f'https://{env.domain1}:{caddy.port}/data10.data?[0-{count-1}]'
+        url = f'https://{env.domain2}:{caddy.port}/curltest/echo?id=[0-{count-1}]'
         r = curl.http_upload(urls=[url], data=data, alpn_proto=proto,
                              extra_args=['--parallel'])
-        exp_status = 405 if env.caddy_is_at_least('2.7.0') else 200
-        r.check_stats(count=count, http_status=exp_status, exitcode=0)
+        r.check_stats(count=count, http_status=200, exitcode=0)
+        for i in range(0,count):
+            respdata = open(curl.response_file(i)).readlines()
+            assert respdata == [data]
+
+    # put large file, check that they length were echoed
+    @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+    def test_08_07_put_large(self, env: Env, httpd, caddy, repeat, proto):
+        if proto == 'h3' and not env.have_h3():
+            pytest.skip("h3 not supported")
+        if proto == 'h3' and env.curl_uses_lib('msh3'):
+            pytest.skip("msh3 stalls here")
+        # limit since we use a separate connection in h1<
+        count = 1
+        fdata = os.path.join(env.gen_dir, 'data-10m')
+        curl = CurlClient(env=env)
+        url = f'https://{env.domain2}:{caddy.port}/curltest/put?id=[0-{count-1}]'
+        r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto)
+        exp_data = [f'{os.path.getsize(fdata)}']
+        r.check_response(count=count, http_status=200)
+        for i in range(count):
+            respdata = open(curl.response_file(i)).readlines()
+            assert respdata == exp_data
index ea1343a950fb51395933c5a93e2e21bf9e8e1564..12f38a75cb2758748fa35acdea5af505bf949b71 100644 (file)
@@ -141,6 +141,8 @@ class Caddy:
     def _write_config(self):
         domain1 = self.env.domain1
         creds1 = self.env.get_credentials(domain1)
+        domain2 = self.env.domain2
+        creds2 = self.env.get_credentials(domain2)
         self._mkpath(self._docs_dir)
         self._mkpath(self._tmp_dir)
         with open(os.path.join(self._docs_dir, 'data.json'), 'w') as fd:
@@ -163,5 +165,10 @@ class Caddy:
                 f'  }}',
                 f'  tls {creds1.cert_file} {creds1.pkey_file}',
                 f'}}',
+                f'{domain2} {{',
+                f'  reverse_proxy /* http://localhost:{self.env.http_port} {{',
+                f'  }}',
+                f'  tls {creds2.cert_file} {creds2.pkey_file}',
+                f'}}',
             ]
             fd.write("\n".join(conf))
index a8247de4b2fa0c6bff0afb566cc6eb4bc023406e..4771ea36069c65b05d4dc4161610c81e3b8f4d1f 100644 (file)
@@ -1,4 +1,5 @@
 #!/usr/bin/env python3
+#!/usr/bin/env python3
 # -*- coding: utf-8 -*-
 #***************************************************************************
 #                                  _   _ ____  _
@@ -313,6 +314,18 @@ class Httpd:
                 f'</VirtualHost>',
                 f'',
             ])
+            conf.extend([  # plain http host for domain2
+                f'<VirtualHost *:{self.env.http_port}>',
+                f'    ServerName {domain2}',
+                f'    ServerAlias localhost',
+                f'    DocumentRoot "{self._docs_dir}"',
+                f'    Protocols h2c http/1.1',
+            ])
+            conf.extend(self._curltest_conf(domain2))
+            conf.extend([
+                f'</VirtualHost>',
+                f'',
+            ])
             conf.extend([  # https host for domain2, no h2
                 f'<VirtualHost *:{self.env.https_port}>',
                 f'    ServerName {domain2}',
index 1c495a2c033276732047c642dc7341b8113dbe51..58b9525631c9fc58192ffaec677dd309b9d4e848 100644 (file)
@@ -522,7 +522,7 @@ static int curltest_put_handler(request_rec *r)
   apr_bucket_brigade *bb;
   apr_bucket *b;
   apr_status_t rv;
-  char buffer[16*1024];
+  char buffer[128*1024];
   const char *ct;
   apr_off_t rbody_len = 0;
   const char *s_rbody_len;