import json
import logging
import os
+import re
import sys
-from datetime import datetime
from statistics import mean
-from typing import Dict, Any
+from typing import Dict, Any, Optional, List
from testenv import Env, Httpd, Nghttpx, CurlClient, Caddy, ExecResult
class ScoreCard:
- def __init__(self):
- self.verbose = 0
- self.env = None
- self.httpd = None
- self.nghttpx = None
- self.caddy = None
+ def __init__(self, env: Env,
+ httpd: Optional[Httpd],
+ nghttpx: Optional[Nghttpx],
+ caddy: Optional[Caddy],
+ verbose: int,
+ curl_verbose: int):
+ self.verbose = verbose
+ self.env = env
+ self.httpd = httpd
+ self.nghttpx = nghttpx
+ self.caddy = caddy
+ self._silent_curl = not curl_verbose
def info(self, msg):
if self.verbose > 0:
hs_samples = []
errors = []
for i in range(sample_size):
- curl = CurlClient(env=self.env, silent=True)
+ curl = CurlClient(env=self.env, silent=self._silent_curl)
url = f'https://{authority}/'
- r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True)
+ r = curl.http_download(urls=[url], alpn_proto=proto,
+ no_save=True)
if r.exit_code == 0 and len(r.stats) == 1:
c_samples.append(r.stats[0]['time_connect'])
hs_samples.append(r.stats[0]['time_appconnect'])
hs_samples = []
errors = []
for i in range(sample_size):
- curl = CurlClient(env=self.env, silent=True)
+ curl = CurlClient(env=self.env, silent=self._silent_curl)
args = [
'--http3-only' if proto == 'h3' else '--http2',
f'--{ipv}', f'https://{authority}/'
hs_samples.append(r.stats[0]['time_appconnect'])
else:
errors.append(f'exit={r.exit_code}')
- props[authority][f'{ipv}-connect'] = mean(c_samples) if len(c_samples) else -1
- props[authority][f'{ipv}-handshake'] = mean(hs_samples) if len(hs_samples) else -1
+ props[authority][f'{ipv}-connect'] = mean(c_samples) \
+ if len(c_samples) else -1
+ props[authority][f'{ipv}-handshake'] = mean(hs_samples) \
+ if len(hs_samples) else -1
props[authority][f'{ipv}-errors'] = errors
self.info('ok.\n')
return props
errors = []
self.info(f'single...')
for i in range(sample_size):
- curl = CurlClient(env=self.env, silent=True)
+ curl = CurlClient(env=self.env, silent=self._silent_curl)
r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True,
with_headers=False)
err = self._check_downloads(r, count)
url = f'{url}?[0-{count - 1}]'
self.info(f'serial...')
for i in range(sample_size):
- curl = CurlClient(env=self.env, silent=True)
+ curl = CurlClient(env=self.env, silent=self._silent_curl)
r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True,
with_headers=False)
err = self._check_downloads(r, count)
url = f'{url}?[0-{count - 1}]'
self.info(f'parallel...')
for i in range(sample_size):
- curl = CurlClient(env=self.env, silent=True)
+ curl = CurlClient(env=self.env, silent=self._silent_curl)
r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True,
with_headers=False,
- extra_args=['--parallel', '--parallel-max', str(count)])
+ extra_args=['--parallel',
+ '--parallel-max', str(count)])
err = self._check_downloads(r, count)
if err:
errors.append(err)
'errors': errors
}
- def download_url(self, url: str, proto: str, count: int):
- self.info(f' {url}: ')
+ def download_url(self, label: str, url: str, proto: str, count: int):
+ self.info(f' {count}x{label}: ')
props = {
'single': self.transfer_single(url=url, proto=proto, count=10),
'serial': self.transfer_serial(url=url, proto=proto, count=count),
- 'parallel': self.transfer_parallel(url=url, proto=proto, count=count),
+ 'parallel': self.transfer_parallel(url=url, proto=proto,
+ count=count),
}
self.info(f'ok.\n')
return props
- def downloads(self, proto: str, test_httpd: bool = True,
- test_caddy: bool = True) -> Dict[str, Any]:
+ def downloads(self, proto: str, count: int,
+ fsizes: List[int]) -> Dict[str, Any]:
scores = {}
- if test_httpd:
+ if self.httpd:
if proto == 'h3':
port = self.env.h3_port
via = 'nghttpx'
via = 'httpd'
descr = f'port {port}'
self.info(f'{via} downloads\n')
- self._make_docs_file(docs_dir=self.httpd.docs_dir, fname='score1.data', fsize=1024*1024)
- url1 = f'https://{self.env.domain1}:{port}/score1.data'
- self._make_docs_file(docs_dir=self.httpd.docs_dir, fname='score10.data', fsize=10*1024*1024)
- url10 = f'https://{self.env.domain1}:{port}/score10.data'
- self._make_docs_file(docs_dir=self.httpd.docs_dir, fname='score100.data', fsize=100*1024*1024)
- url100 = f'https://{self.env.domain1}:{port}/score100.data'
scores[via] = {
'description': descr,
- '1MB': self.download_url(url=url1, proto=proto, count=50),
- '10MB': self.download_url(url=url10, proto=proto, count=50),
- '100MB': self.download_url(url=url100, proto=proto, count=50),
}
- if test_caddy and self.caddy:
+ for fsize in fsizes:
+ label = f'{int(fsize / 1024)}KB' if fsize < 1024*1024 else \
+ f'{int(fsize / (1024 * 1024))}MB'
+ fname = f'score{label}.data'
+ self._make_docs_file(docs_dir=self.httpd.docs_dir,
+ fname=fname, fsize=fsize)
+ url = f'https://{self.env.domain1}:{port}/{fname}'
+ results = self.download_url(label=label, url=url,
+ proto=proto, count=count)
+ scores[via][label] = results
+ if self.caddy:
port = self.caddy.port
via = 'caddy'
descr = f'port {port}'
self.info('caddy downloads\n')
- self._make_docs_file(docs_dir=self.caddy.docs_dir, fname='score1.data', fsize=1024 * 1024)
- url1 = f'https://{self.env.domain1}:{port}/score1.data'
- self._make_docs_file(docs_dir=self.caddy.docs_dir, fname='score10.data', fsize=10 * 1024 * 1024)
- url10 = f'https://{self.env.domain1}:{port}/score10.data'
- self._make_docs_file(docs_dir=self.caddy.docs_dir, fname='score100.data', fsize=100 * 1024 * 1024)
- url100 = f'https://{self.env.domain1}:{port}/score100.data'
scores[via] = {
'description': descr,
- '1MB': self.download_url(url=url1, proto=proto, count=50),
- '10MB': self.download_url(url=url10, proto=proto, count=50),
- '100MB': self.download_url(url=url100, proto=proto, count=50),
}
+ for fsize in fsizes:
+ label = f'{int(fsize / 1024)}KB' if fsize < 1024*1024 else \
+ f'{int(fsize / (1024 * 1024))}MB'
+ fname = f'score{label}.data'
+ self._make_docs_file(docs_dir=self.caddy.docs_dir,
+ fname=fname, fsize=fsize)
+ url = f'https://{self.env.domain1}:{port}/{fname}'
+ results = self.download_url(label=label, url=url,
+ proto=proto, count=count)
+ scores[via][label] = results
return scores
- def do_requests(self, url: str, proto: str, count: int, max_parallel: int = 1):
+ def do_requests(self, url: str, proto: str, count: int,
+ max_parallel: int = 1):
sample_size = 1
samples = []
errors = []
url = f'{url}?[0-{count - 1}]'
- extra_args = ['--parallel', '--parallel-max', str(max_parallel)] if max_parallel > 1 else []
+ extra_args = ['--parallel', '--parallel-max', str(max_parallel)] \
+ if max_parallel > 1 else []
self.info(f'{max_parallel}...')
for i in range(sample_size):
curl = CurlClient(env=self.env)
if err:
errors.append(err)
else:
- for s in r.stats:
+ for _ in r.stats:
samples.append(count / r.duration.total_seconds())
return {
'count': count,
self.info(f' {url}: ')
props = {
'serial': self.do_requests(url=url, proto=proto, count=count),
- 'par-6': self.do_requests(url=url, proto=proto, count=count, max_parallel=6),
- 'par-25': self.do_requests(url=url, proto=proto, count=count, max_parallel=25),
- 'par-50': self.do_requests(url=url, proto=proto, count=count, max_parallel=50),
- 'par-100': self.do_requests(url=url, proto=proto, count=count, max_parallel=100),
+ 'par-6': self.do_requests(url=url, proto=proto, count=count,
+ max_parallel=6),
+ 'par-25': self.do_requests(url=url, proto=proto, count=count,
+ max_parallel=25),
+ 'par-50': self.do_requests(url=url, proto=proto, count=count,
+ max_parallel=50),
+ 'par-100': self.do_requests(url=url, proto=proto, count=count,
+ max_parallel=100),
}
self.info(f'ok.\n')
return props
- def requests(self, proto: str, test_httpd: bool = True,
- test_caddy: bool = True) -> Dict[str, Any]:
+ def requests(self, proto: str) -> Dict[str, Any]:
scores = {}
- if test_httpd:
+ if self.httpd:
if proto == 'h3':
port = self.env.h3_port
via = 'nghttpx'
via = 'httpd'
descr = f'port {port}'
self.info(f'{via} requests\n')
- self._make_docs_file(docs_dir=self.httpd.docs_dir, fname='reqs10.data', fsize=10*1024)
+ self._make_docs_file(docs_dir=self.httpd.docs_dir,
+ fname='reqs10.data', fsize=10*1024)
url1 = f'https://{self.env.domain1}:{port}/reqs10.data'
scores[via] = {
'description': descr,
'10KB': self.requests_url(url=url1, proto=proto, count=10000),
}
- if test_caddy and self.caddy:
+ if self.caddy:
port = self.caddy.port
via = 'caddy'
descr = f'port {port}'
self.info('caddy requests\n')
- self._make_docs_file(docs_dir=self.caddy.docs_dir, fname='req10.data', fsize=10 * 1024)
+ self._make_docs_file(docs_dir=self.caddy.docs_dir,
+ fname='req10.data', fsize=10 * 1024)
url1 = f'https://{self.env.domain1}:{port}/req10.data'
scores[via] = {
'description': descr,
def score_proto(self, proto: str,
handshakes: bool = True,
- downloads: bool = True,
- requests: bool = True,
- test_httpd: bool = True,
- test_caddy: bool = True):
+ downloads: Optional[List[int]] = None,
+ download_count: int = 50,
+ requests: bool = True):
self.info(f"scoring {proto}\n")
p = {}
if proto == 'h3':
elif proto == 'h1' or proto == 'http/1.1':
proto = 'http/1.1'
p['name'] = proto
- p['implementation'] = 'hyper' if self.env.curl_uses_lib('hyper') else 'native'
+ p['implementation'] = 'hyper' if self.env.curl_uses_lib('hyper')\
+ else 'native'
else:
raise ScoreCardException(f"unknown protocol: {proto}")
p['version'] = Env.curl_lib_version(p['implementation'])
score = {
- 'curl': self.env.curl_version(),
+ 'curl': self.env.curl_fullname(),
'os': self.env.curl_os(),
'protocol': p,
}
if handshakes:
score['handshakes'] = self.handshakes(proto=proto)
- if downloads:
+ if downloads and len(downloads) > 0:
score['downloads'] = self.downloads(proto=proto,
- test_httpd=test_httpd,
- test_caddy=test_caddy)
+ count=download_count,
+ fsizes=downloads)
if requests:
- score['requests'] = self.requests(proto=proto,
- test_httpd=test_httpd,
- test_caddy=test_caddy)
+ score['requests'] = self.requests(proto=proto)
self.info("\n")
return score
return f'{val:0.000f} r/s' if val >= 0 else '--'
def print_score(self, score):
- print(f'{score["protocol"]["name"].upper()} in curl {score["curl"]} ({score["os"]}) via '
- f'{score["protocol"]["implementation"]}/{score["protocol"]["version"]} ')
+ print(f'{score["protocol"]["name"].upper()} in {score["curl"]}')
if 'handshakes' in score:
print(f'{"Handshakes":<24} {"ipv4":25} {"ipv6":28}')
print(f' {"Host":<17} {"Connect":>12} {"Handshake":>12} '
print(f' {key:<17} {self.fmt_ms(val["ipv4-connect"]):>12} '
f'{self.fmt_ms(val["ipv4-handshake"]):>12} '
f'{self.fmt_ms(val["ipv6-connect"]):>12} '
- f'{self.fmt_ms(val["ipv6-handshake"]):>12} {"/".join(val["ipv4-errors"] + val["ipv6-errors"]):<20}'
+ f'{self.fmt_ms(val["ipv6-handshake"]):>12} '
+ f'{"/".join(val["ipv4-errors"] + val["ipv6-errors"]):<20}'
)
if 'downloads' in score:
print('Downloads')
- print(f' {"Server":<8} {"Size":>8} '
- f'{"Single":>12} {"Serial":>12} {"Parallel":>12} {"Errors":<20}')
+ print(f' {"Server":<8} {"Size":>8} {"Single":>12} {"Serial":>12}'
+ f' {"Parallel":>12} {"Errors":<20}')
skeys = {}
for dkey, dval in score["downloads"].items():
for k in dval.keys():
f'{self.fmt_reqs(sval["par-100"]["speed"]):>12} '
f' {"/".join(errors):<20}')
- def main(self):
- parser = argparse.ArgumentParser(prog='scorecard', description="""
- Run a range of tests to give a scorecard for a HTTP protocol
- 'h3' or 'h2' implementation in curl.
- """)
- parser.add_argument("-v", "--verbose", action='count', default=1,
- help="log more output on stderr")
- parser.add_argument("-j", "--json", action='store_true', default=False,
- help="print json instead of text")
- parser.add_argument("-H", "--handshakes", action='store_true', default=False,
- help="evaluate handshakes only")
- parser.add_argument("-d", "--downloads", action='store_true', default=False,
- help="evaluate downloads only")
- parser.add_argument("-r", "--requests", action='store_true', default=False,
- help="evaluate requests only")
- parser.add_argument("--httpd", action='store_true', default=False,
- help="evaluate httpd server only")
- parser.add_argument("--caddy", action='store_true', default=False,
- help="evaluate caddy server only")
- parser.add_argument("protocol", default='h2', nargs='?', help="Name of protocol to score")
- args = parser.parse_args()
-
- self.verbose = args.verbose
- if args.verbose > 0:
- console = logging.StreamHandler()
- console.setLevel(logging.INFO)
- console.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
- logging.getLogger('').addHandler(console)
-
- protocol = args.protocol
- handshakes = True
- downloads = True
- requests = True
- test_httpd = protocol != 'h3'
+
+def parse_size(s):
+ m = re.match(r'(\d+)(mb|kb|gb)?', s, re.IGNORECASE)
+ if m is None:
+ raise Exception(f'unrecognized size: {s}')
+ size = int(m.group(1))
+ if m.group(2).lower() == 'kb':
+ size *= 1024
+ elif m.group(2).lower() == 'mb':
+ size *= 1024 * 1024
+ elif m.group(2).lower() == 'gb':
+ size *= 1024 * 1024 * 1024
+ return size
+
+
+def main():
+ parser = argparse.ArgumentParser(prog='scorecard', description="""
+ Run a range of tests to give a scorecard for a HTTP protocol
+ 'h3' or 'h2' implementation in curl.
+ """)
+ parser.add_argument("-v", "--verbose", action='count', default=1,
+ help="log more output on stderr")
+ parser.add_argument("-j", "--json", action='store_true',
+ default=False, help="print json instead of text")
+ parser.add_argument("-H", "--handshakes", action='store_true',
+ default=False, help="evaluate handshakes only")
+ parser.add_argument("-d", "--downloads", action='store_true',
+ default=False, help="evaluate downloads only")
+ parser.add_argument("--download", action='append', type=str,
+ default=None, help="evaluate download size")
+ parser.add_argument("--download-count", action='store', type=int,
+ default=50, help="perform that many downloads")
+ parser.add_argument("-r", "--requests", action='store_true',
+ default=False, help="evaluate requests only")
+ parser.add_argument("--httpd", action='store_true', default=False,
+ help="evaluate httpd server only")
+ parser.add_argument("--caddy", action='store_true', default=False,
+ help="evaluate caddy server only")
+ parser.add_argument("--curl-verbose", action='store_true',
+ default=False, help="run curl with `-v`")
+ parser.add_argument("protocol", default='h2', nargs='?',
+ help="Name of protocol to score")
+ args = parser.parse_args()
+
+ if args.verbose > 0:
+ console = logging.StreamHandler()
+ console.setLevel(logging.INFO)
+ console.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
+ logging.getLogger('').addHandler(console)
+
+ protocol = args.protocol
+ handshakes = True
+ downloads = [1024*1024, 10*1024*1024, 100*1024*1024]
+ requests = True
+ test_httpd = protocol != 'h3'
+ test_caddy = True
+ if args.handshakes:
+ downloads = None
+ requests = False
+ if args.downloads:
+ handshakes = False
+ requests = False
+ if args.download:
+ downloads = sorted([parse_size(x) for x in args.download])
+ handshakes = False
+ requests = False
+ if args.requests:
+ handshakes = False
+ downloads = None
+ if args.caddy:
test_caddy = True
- if args.handshakes:
- downloads = False
- requests = False
- if args.downloads:
- handshakes = False
- requests = False
- if args.requests:
- handshakes = False
- downloads = False
- if args.caddy:
- test_caddy = True
- test_httpd = False
- if args.httpd:
- test_caddy = False
- test_httpd = True
-
- rv = 0
- self.env = Env()
- self.env.setup()
- self.env.test_timeout = None
- self.httpd = None
- self.nghttpx = None
- self.caddy = None
- try:
- self.httpd = Httpd(env=self.env)
- assert self.httpd.exists(), f'httpd not found: {self.env.httpd}'
- self.httpd.clear_logs()
- assert self.httpd.start()
+ test_httpd = False
+ if args.httpd:
+ test_caddy = False
+ test_httpd = True
+
+ rv = 0
+ env = Env()
+ env.setup()
+ env.test_timeout = None
+ httpd = None
+ nghttpx = None
+ caddy = None
+ try:
+ if test_httpd:
+ httpd = Httpd(env=env)
+ assert httpd.exists(), \
+ f'httpd not found: {env.httpd}'
+ httpd.clear_logs()
+ assert httpd.start()
if 'h3' == protocol:
- self.nghttpx = Nghttpx(env=self.env)
- self.nghttpx.clear_logs()
- assert self.nghttpx.start()
- if self.env.caddy:
- self.caddy = Caddy(env=self.env)
- self.caddy.clear_logs()
- assert self.caddy.start()
-
- score = self.score_proto(proto=protocol, handshakes=handshakes,
- downloads=downloads,
- requests=requests,
- test_caddy=test_caddy,
- test_httpd=test_httpd)
- if args.json:
- print(json.JSONEncoder(indent=2).encode(score))
- else:
- self.print_score(score)
-
- except ScoreCardException as ex:
- sys.stderr.write(f"ERROR: {str(ex)}\n")
- rv = 1
- except KeyboardInterrupt:
- log.warning("aborted")
- rv = 1
- finally:
- if self.caddy:
- self.caddy.stop()
- self.caddy = None
- if self.nghttpx:
- self.nghttpx.stop(wait_dead=False)
- if self.httpd:
- self.httpd.stop()
- self.httpd = None
- sys.exit(rv)
+ nghttpx = Nghttpx(env=env)
+ nghttpx.clear_logs()
+ assert nghttpx.start()
+ if test_caddy and env.caddy:
+ caddy = Caddy(env=env)
+ caddy.clear_logs()
+ assert caddy.start()
+
+ card = ScoreCard(env=env, httpd=httpd, nghttpx=nghttpx, caddy=caddy,
+ verbose=args.verbose, curl_verbose=args.curl_verbose)
+ score = card.score_proto(proto=protocol,
+ handshakes=handshakes,
+ downloads=downloads,
+ download_count=args.download_count,
+ requests=requests)
+ if args.json:
+ print(json.JSONEncoder(indent=2).encode(score))
+ else:
+ card.print_score(score)
+
+ except ScoreCardException as ex:
+ sys.stderr.write(f"ERROR: {str(ex)}\n")
+ rv = 1
+ except KeyboardInterrupt:
+ log.warning("aborted")
+ rv = 1
+ finally:
+ if caddy:
+ caddy.stop()
+ if nghttpx:
+ nghttpx.stop(wait_dead=False)
+ if httpd:
+ httpd.stop()
+ sys.exit(rv)
if __name__ == "__main__":
- ScoreCard().main()
+ main()