There is a lot of [`pytest` documentation](https://docs.pytest.org/) with examples. No use in repeating that here. Assuming you are somewhat familiar with it, it is useful how *this* general test suite is setup. Especially if you want to add test cases.
+Please run [black](https://pypi.org/project/black/) when adding new tests.
+
### Servers
In `conftest.py` 3 "fixtures" are defined that are used by all test cases:
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
import pytest
-sys.path.append(os.path.join(os.path.dirname(__file__), '.'))
+sys.path.append(os.path.join(os.path.dirname(__file__), "."))
from testenv import Env, Nghttpx, Httpd, NghttpxQuic, NghttpxFwd
+
def pytest_report_header(config):
# Env inits its base properties only once, we can report them here
env = Env()
report = [
- f'Testing curl {env.curl_version()}',
- f' platform: {platform.platform()}',
- f' curl: Version: {env.curl_version_string()}',
- f' curl: Features: {env.curl_features_string()}',
- f' curl: Protocols: {env.curl_protocols_string()}',
- f' httpd: {env.httpd_version()}, http:{env.http_port} https:{env.https_port}',
- f' httpd-proxy: {env.httpd_version()}, http:{env.proxy_port} https:{env.proxys_port}'
+ f"Testing curl {env.curl_version()}",
+ f" platform: {platform.platform()}",
+ f" curl: Version: {env.curl_version_string()}",
+ f" curl: Features: {env.curl_features_string()}",
+ f" curl: Protocols: {env.curl_protocols_string()}",
+ f" httpd: {env.httpd_version()}, http:{env.http_port} https:{env.https_port}",
+ f" httpd-proxy: {env.httpd_version()}, http:{env.proxy_port} https:{env.proxys_port}",
]
if env.have_h3():
- report.extend([
- f' nghttpx: {env.nghttpx_version()}, h3:{env.https_port}'
- ])
+ report.extend([f" nghttpx: {env.nghttpx_version()}, h3:{env.https_port}"])
if env.has_caddy():
- report.extend([
- f' Caddy: {env.caddy_version()}, http:{env.caddy_http_port} https:{env.caddy_https_port}'
- ])
+ report.extend(
+ [
+ f" Caddy: {env.caddy_version()}, http:{env.caddy_http_port} https:{env.caddy_https_port}"
+ ]
+ )
if env.has_vsftpd():
- report.extend([
- f' VsFTPD: {env.vsftpd_version()}, ftp:{env.ftp_port}, ftps:{env.ftps_port}'
- ])
- buildinfo_fn = os.path.join(env.build_dir, 'buildinfo.txt')
+ report.extend(
+ [
+ f" VsFTPD: {env.vsftpd_version()}, ftp:{env.ftp_port}, ftps:{env.ftps_port}"
+ ]
+ )
+ buildinfo_fn = os.path.join(env.build_dir, "buildinfo.txt")
if os.path.exists(buildinfo_fn):
- with open(buildinfo_fn, 'r') as file_in:
+ with open(buildinfo_fn, "r") as file_in:
for line in file_in:
line = line.strip()
- if line and not line.startswith('#'):
+ if line and not line.startswith("#"):
report.extend([line])
- return '\n'.join(report)
+ return "\n".join(report)
+
# TODO: remove this and repeat argument everywhere, pytest-repeat can be used to repeat tests
def pytest_generate_tests(metafunc):
if "repeat" in metafunc.fixturenames:
- metafunc.parametrize('repeat', [0])
+ metafunc.parametrize("repeat", [0])
+
@pytest.fixture(scope="package")
def env(pytestconfig) -> Env:
env = Env(pytestconfig=pytestconfig)
level = logging.DEBUG if env.verbose > 0 else logging.INFO
- logging.getLogger('').setLevel(level=level)
- if not env.curl_has_protocol('http'):
+ logging.getLogger("").setLevel(level=level)
+ if not env.curl_has_protocol("http"):
pytest.skip("curl built without HTTP support")
- if not env.curl_has_protocol('https'):
+ if not env.curl_has_protocol("https"):
pytest.skip("curl built without HTTPS support")
if env.setup_incomplete():
pytest.skip(env.incomplete_reason())
env.setup()
return env
+
@pytest.fixture(scope="package", autouse=True)
def log_global_env_facts(record_testsuite_property, env):
record_testsuite_property("http-port", env.http_port)
-@pytest.fixture(scope='package')
+@pytest.fixture(scope="package")
def httpd(env) -> Generator[Httpd, None, None]:
httpd = Httpd(env=env)
if not httpd.exists():
- pytest.skip(f'httpd not found: {env.httpd}')
+ pytest.skip(f"httpd not found: {env.httpd}")
httpd.clear_logs()
if not httpd.start():
- pytest.fail(f'failed to start httpd: {env.httpd}')
+ pytest.fail(f"failed to start httpd: {env.httpd}")
yield httpd
httpd.stop()
-@pytest.fixture(scope='package')
+@pytest.fixture(scope="package")
def nghttpx(env, httpd) -> Generator[Nghttpx, None, None]:
nghttpx = NghttpxQuic(env=env)
if nghttpx.exists() and (env.have_h3() or nghttpx.https_port > 0):
yield nghttpx
nghttpx.stop()
-@pytest.fixture(scope='package')
+
+@pytest.fixture(scope="package")
def nghttpx_fwd(env, httpd) -> Generator[Nghttpx, None, None]:
nghttpx = NghttpxFwd(env=env)
if nghttpx.exists() and (env.have_h3() or nghttpx.https_port > 0):
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class ScoreCard:
-
- def __init__(self, env: Env,
- protocol: str,
- server_descr: str,
- server_port: int,
- verbose: int,
- curl_verbose: int,
- download_parallel: int = 0,
- server_addr: Optional[str] = None):
+ def __init__(
+ self,
+ env: Env,
+ protocol: str,
+ server_descr: str,
+ server_port: int,
+ verbose: int,
+ curl_verbose: int,
+ download_parallel: int = 0,
+ server_addr: Optional[str] = None,
+ ):
self.verbose = verbose
self.env = env
self.protocol = protocol
def handshakes(self) -> Dict[str, Any]:
props = {}
sample_size = 5
- self.info('TLS Handshake\n')
- for authority in [
- 'curl.se', 'google.com', 'cloudflare.com', 'nghttp2.org'
- ]:
- self.info(f' {authority}...')
+ self.info("TLS Handshake\n")
+ for authority in ["curl.se", "google.com", "cloudflare.com", "nghttp2.org"]:
+ self.info(f" {authority}...")
props[authority] = {}
- for ipv in ['ipv4', 'ipv6']:
- self.info(f'{ipv}...')
+ for ipv in ["ipv4", "ipv6"]:
+ self.info(f"{ipv}...")
c_samples = []
hs_samples = []
errors = []
for _ in range(sample_size):
- curl = CurlClient(env=self.env, silent=self._silent_curl,
- server_addr=self.server_addr)
+ curl = CurlClient(
+ env=self.env,
+ silent=self._silent_curl,
+ server_addr=self.server_addr,
+ )
args = [
- '--http3-only' if self.protocol == 'h3' else '--http2',
- f'--{ipv}', f'https://{authority}/'
+ "--http3-only" if self.protocol == "h3" else "--http2",
+ f"--{ipv}",
+ f"https://{authority}/",
]
r = curl.run_direct(args=args, with_stats=True)
if r.exit_code == 0 and len(r.stats) == 1:
- c_samples.append(r.stats[0]['time_connect'])
- hs_samples.append(r.stats[0]['time_appconnect'])
+ c_samples.append(r.stats[0]["time_connect"])
+ hs_samples.append(r.stats[0]["time_appconnect"])
else:
- errors.append(f'exit={r.exit_code}')
- props[authority][f'{ipv}-connect'] = mean(c_samples) \
- if len(c_samples) else -1
- props[authority][f'{ipv}-handshake'] = mean(hs_samples) \
- if len(hs_samples) else -1
- props[authority][f'{ipv}-errors'] = errors
- self.info('ok.\n')
+ errors.append(f"exit={r.exit_code}")
+ props[authority][f"{ipv}-connect"] = (
+ mean(c_samples) if len(c_samples) else -1
+ )
+ props[authority][f"{ipv}-handshake"] = (
+ mean(hs_samples) if len(hs_samples) else -1
+ )
+ props[authority][f"{ipv}-errors"] = errors
+ self.info("ok.\n")
return props
def _make_docs_file(self, docs_dir: str, fname: str, fsize: int):
fpath = os.path.join(docs_dir, fname)
- data1k = 1024*'x'
+ data1k = 1024 * "x"
flen = 0
- with open(fpath, 'w') as fd:
+ with open(fpath, "w") as fd:
while flen < fsize:
fd.write(data1k)
flen += len(data1k)
return fpath
- def setup_resources(self, server_docs: str,
- downloads: Optional[List[int]] = None):
+ def setup_resources(self, server_docs: str, downloads: Optional[List[int]] = None):
for fsize in downloads:
label = self.fmt_size(fsize)
- fname = f'score{label}.data'
- self._make_docs_file(docs_dir=server_docs,
- fname=fname, fsize=fsize)
- self._make_docs_file(docs_dir=server_docs,
- fname='reqs10.data', fsize=10*1024)
+ fname = f"score{label}.data"
+ self._make_docs_file(docs_dir=server_docs, fname=fname, fsize=fsize)
+ self._make_docs_file(docs_dir=server_docs, fname="reqs10.data", fsize=10 * 1024)
def _check_downloads(self, r: ExecResult, count: int):
- error = ''
+ error = ""
if r.exit_code != 0:
- error += f'exit={r.exit_code} '
+ error += f"exit={r.exit_code} "
if r.exit_code != 0 or len(r.stats) != count:
- error += f'stats={len(r.stats)}/{count} '
- fails = [s for s in r.stats if s['response_code'] != 200]
+ error += f"stats={len(r.stats)}/{count} "
+ fails = [s for s in r.stats if s["response_code"] != 200]
if len(fails) > 0:
- error += f'{len(fails)} failed'
+ error += f"{len(fails)} failed"
return error if len(error) > 0 else None
def transfer_single(self, url: str, count: int):
samples = []
errors = []
profiles = []
- self.info('single...')
+ self.info("single...")
for _ in range(sample_size):
- curl = CurlClient(env=self.env, silent=self._silent_curl,
- server_addr=self.server_addr)
- r = curl.http_download(urls=[url], alpn_proto=self.protocol,
- no_save=True, with_headers=False,
- with_profile=True)
+ curl = CurlClient(
+ env=self.env, silent=self._silent_curl, server_addr=self.server_addr
+ )
+ r = curl.http_download(
+ urls=[url],
+ alpn_proto=self.protocol,
+ no_save=True,
+ with_headers=False,
+ with_profile=True,
+ )
err = self._check_downloads(r, count)
if err:
errors.append(err)
else:
- total_size = sum([s['size_download'] for s in r.stats])
+ total_size = sum([s["size_download"] for s in r.stats])
samples.append(total_size / r.duration.total_seconds())
profiles.append(r.profile)
return {
- 'count': count,
- 'samples': sample_size,
- 'max-parallel': 1,
- 'speed': mean(samples) if len(samples) else -1,
- 'errors': errors,
- 'stats': RunProfile.AverageStats(profiles),
+ "count": count,
+ "samples": sample_size,
+ "max-parallel": 1,
+ "speed": mean(samples) if len(samples) else -1,
+ "errors": errors,
+ "stats": RunProfile.AverageStats(profiles),
}
def transfer_serial(self, url: str, count: int):
samples = []
errors = []
profiles = []
- url = f'{url}?[0-{count - 1}]'
- self.info('serial...')
+ url = f"{url}?[0-{count - 1}]"
+ self.info("serial...")
for _ in range(sample_size):
- curl = CurlClient(env=self.env, silent=self._silent_curl,
- server_addr=self.server_addr)
- r = curl.http_download(urls=[url], alpn_proto=self.protocol,
- no_save=True,
- with_headers=False, with_profile=True)
+ curl = CurlClient(
+ env=self.env, silent=self._silent_curl, server_addr=self.server_addr
+ )
+ r = curl.http_download(
+ urls=[url],
+ alpn_proto=self.protocol,
+ no_save=True,
+ with_headers=False,
+ with_profile=True,
+ )
err = self._check_downloads(r, count)
if err:
errors.append(err)
else:
- total_size = sum([s['size_download'] for s in r.stats])
+ total_size = sum([s["size_download"] for s in r.stats])
samples.append(total_size / r.duration.total_seconds())
profiles.append(r.profile)
return {
- 'count': count,
- 'samples': sample_size,
- 'max-parallel': 1,
- 'speed': mean(samples) if len(samples) else -1,
- 'errors': errors,
- 'stats': RunProfile.AverageStats(profiles),
+ "count": count,
+ "samples": sample_size,
+ "max-parallel": 1,
+ "speed": mean(samples) if len(samples) else -1,
+ "errors": errors,
+ "stats": RunProfile.AverageStats(profiles),
}
def transfer_parallel(self, url: str, count: int):
errors = []
profiles = []
max_parallel = self._download_parallel if self._download_parallel > 0 else count
- url = f'{url}?[0-{count - 1}]'
- self.info('parallel...')
+ url = f"{url}?[0-{count - 1}]"
+ self.info("parallel...")
for _ in range(sample_size):
- curl = CurlClient(env=self.env, silent=self._silent_curl,
- server_addr=self.server_addr)
- r = curl.http_download(urls=[url], alpn_proto=self.protocol,
- no_save=True,
- with_headers=False,
- with_profile=True,
- extra_args=[
- '--parallel',
- '--parallel-max', str(max_parallel)
- ])
+ curl = CurlClient(
+ env=self.env, silent=self._silent_curl, server_addr=self.server_addr
+ )
+ r = curl.http_download(
+ urls=[url],
+ alpn_proto=self.protocol,
+ no_save=True,
+ with_headers=False,
+ with_profile=True,
+ extra_args=["--parallel", "--parallel-max", str(max_parallel)],
+ )
err = self._check_downloads(r, count)
if err:
errors.append(err)
else:
- total_size = sum([s['size_download'] for s in r.stats])
+ total_size = sum([s["size_download"] for s in r.stats])
samples.append(total_size / r.duration.total_seconds())
profiles.append(r.profile)
return {
- 'count': count,
- 'samples': sample_size,
- 'max-parallel': max_parallel,
- 'speed': mean(samples) if len(samples) else -1,
- 'errors': errors,
- 'stats': RunProfile.AverageStats(profiles),
+ "count": count,
+ "samples": sample_size,
+ "max-parallel": max_parallel,
+ "speed": mean(samples) if len(samples) else -1,
+ "errors": errors,
+ "stats": RunProfile.AverageStats(profiles),
}
def download_url(self, label: str, url: str, count: int):
- self.info(f' {count}x{label}: ')
+ self.info(f" {count}x{label}: ")
props = {
- 'single': self.transfer_single(url=url, count=10),
+ "single": self.transfer_single(url=url, count=10),
}
if count > 1:
- props['serial'] = self.transfer_serial(url=url, count=count)
- props['parallel'] = self.transfer_parallel(url=url, count=count)
- self.info('ok.\n')
+ props["serial"] = self.transfer_serial(url=url, count=count)
+ props["parallel"] = self.transfer_parallel(url=url, count=count)
+ self.info("ok.\n")
return props
def downloads(self, count: int, fsizes: List[int]) -> Dict[str, Any]:
scores = {}
for fsize in fsizes:
label = self.fmt_size(fsize)
- fname = f'score{label}.data'
- url = f'https://{self.env.domain1}:{self.server_port}/{fname}'
+ fname = f"score{label}.data"
+ url = f"https://{self.env.domain1}:{self.server_port}/{fname}"
scores[label] = self.download_url(label=label, url=url, count=count)
return scores
def _check_uploads(self, r: ExecResult, count: int):
- error = ''
+ error = ""
if r.exit_code != 0:
- error += f'exit={r.exit_code} '
+ error += f"exit={r.exit_code} "
if r.exit_code != 0 or len(r.stats) != count:
- error += f'stats={len(r.stats)}/{count} '
- fails = [s for s in r.stats if s['response_code'] != 200]
+ error += f"stats={len(r.stats)}/{count} "
+ fails = [s for s in r.stats if s["response_code"] != 200]
if len(fails) > 0:
- error += f'{len(fails)} failed'
+ error += f"{len(fails)} failed"
for f in fails:
error += f'[{f["response_code"]}]'
return error if len(error) > 0 else None
samples = []
errors = []
profiles = []
- self.info('single...')
+ self.info("single...")
for _ in range(sample_size):
- curl = CurlClient(env=self.env, silent=self._silent_curl,
- server_addr=self.server_addr)
- r = curl.http_put(urls=[url], fdata=fpath, alpn_proto=self.protocol,
- with_headers=False, with_profile=True)
+ curl = CurlClient(
+ env=self.env, silent=self._silent_curl, server_addr=self.server_addr
+ )
+ r = curl.http_put(
+ urls=[url],
+ fdata=fpath,
+ alpn_proto=self.protocol,
+ with_headers=False,
+ with_profile=True,
+ )
err = self._check_uploads(r, count)
if err:
errors.append(err)
else:
- total_size = sum([s['size_upload'] for s in r.stats])
+ total_size = sum([s["size_upload"] for s in r.stats])
samples.append(total_size / r.duration.total_seconds())
profiles.append(r.profile)
return {
- 'count': count,
- 'samples': sample_size,
- 'max-parallel': 1,
- 'speed': mean(samples) if len(samples) else -1,
- 'errors': errors,
- 'stats': RunProfile.AverageStats(profiles) if len(profiles) else {},
+ "count": count,
+ "samples": sample_size,
+ "max-parallel": 1,
+ "speed": mean(samples) if len(samples) else -1,
+ "errors": errors,
+ "stats": RunProfile.AverageStats(profiles) if len(profiles) else {},
}
def upload_serial(self, url: str, fpath: str, count: int):
samples = []
errors = []
profiles = []
- url = f'{url}?id=[0-{count - 1}]'
- self.info('serial...')
+ url = f"{url}?id=[0-{count - 1}]"
+ self.info("serial...")
for _ in range(sample_size):
- curl = CurlClient(env=self.env, silent=self._silent_curl,
- server_addr=self.server_addr)
- r = curl.http_put(urls=[url], fdata=fpath, alpn_proto=self.protocol,
- with_headers=False, with_profile=True)
+ curl = CurlClient(
+ env=self.env, silent=self._silent_curl, server_addr=self.server_addr
+ )
+ r = curl.http_put(
+ urls=[url],
+ fdata=fpath,
+ alpn_proto=self.protocol,
+ with_headers=False,
+ with_profile=True,
+ )
err = self._check_uploads(r, count)
if err:
errors.append(err)
else:
- total_size = sum([s['size_upload'] for s in r.stats])
+ total_size = sum([s["size_upload"] for s in r.stats])
samples.append(total_size / r.duration.total_seconds())
profiles.append(r.profile)
return {
- 'count': count,
- 'samples': sample_size,
- 'max-parallel': 1,
- 'speed': mean(samples) if len(samples) else -1,
- 'errors': errors,
- 'stats': RunProfile.AverageStats(profiles) if len(profiles) else {},
+ "count": count,
+ "samples": sample_size,
+ "max-parallel": 1,
+ "speed": mean(samples) if len(samples) else -1,
+ "errors": errors,
+ "stats": RunProfile.AverageStats(profiles) if len(profiles) else {},
}
def upload_parallel(self, url: str, fpath: str, count: int):
errors = []
profiles = []
max_parallel = count
- url = f'{url}?id=[0-{count - 1}]'
- self.info('parallel...')
+ url = f"{url}?id=[0-{count - 1}]"
+ self.info("parallel...")
for _ in range(sample_size):
- curl = CurlClient(env=self.env, silent=self._silent_curl,
- server_addr=self.server_addr)
- r = curl.http_put(urls=[url], fdata=fpath, alpn_proto=self.protocol,
- with_headers=False, with_profile=True,
- extra_args=[
- '--parallel',
- '--parallel-max', str(max_parallel)
- ])
+ curl = CurlClient(
+ env=self.env, silent=self._silent_curl, server_addr=self.server_addr
+ )
+ r = curl.http_put(
+ urls=[url],
+ fdata=fpath,
+ alpn_proto=self.protocol,
+ with_headers=False,
+ with_profile=True,
+ extra_args=["--parallel", "--parallel-max", str(max_parallel)],
+ )
err = self._check_uploads(r, count)
if err:
errors.append(err)
else:
- total_size = sum([s['size_upload'] for s in r.stats])
+ total_size = sum([s["size_upload"] for s in r.stats])
samples.append(total_size / r.duration.total_seconds())
profiles.append(r.profile)
return {
- 'count': count,
- 'samples': sample_size,
- 'max-parallel': max_parallel,
- 'speed': mean(samples) if len(samples) else -1,
- 'errors': errors,
- 'stats': RunProfile.AverageStats(profiles) if len(profiles) else {},
+ "count": count,
+ "samples": sample_size,
+ "max-parallel": max_parallel,
+ "speed": mean(samples) if len(samples) else -1,
+ "errors": errors,
+ "stats": RunProfile.AverageStats(profiles) if len(profiles) else {},
}
def upload_url(self, label: str, url: str, fpath: str, count: int):
- self.info(f' {count}x{label}: ')
+ self.info(f" {count}x{label}: ")
props = {
- 'single': self.upload_single(url=url, fpath=fpath, count=10),
+ "single": self.upload_single(url=url, fpath=fpath, count=10),
}
if count > 1:
- props['serial'] = self.upload_serial(url=url, fpath=fpath, count=count)
- props['parallel'] = self.upload_parallel(url=url, fpath=fpath, count=count)
- self.info('ok.\n')
+ props["serial"] = self.upload_serial(url=url, fpath=fpath, count=count)
+ props["parallel"] = self.upload_parallel(url=url, fpath=fpath, count=count)
+ self.info("ok.\n")
return props
def uploads(self, count: int, fsizes: List[int]) -> Dict[str, Any]:
scores = {}
- url = f'https://{self.env.domain2}:{self.server_port}/curltest/put'
+ url = f"https://{self.env.domain2}:{self.server_port}/curltest/put"
fpaths = {}
for fsize in fsizes:
label = self.fmt_size(fsize)
- fname = f'upload{label}.data'
- fpaths[label] = self._make_docs_file(docs_dir=self.env.gen_dir,
- fname=fname, fsize=fsize)
+ fname = f"upload{label}.data"
+ fpaths[label] = self._make_docs_file(
+ docs_dir=self.env.gen_dir, fname=fname, fsize=fsize
+ )
for label, fpath in fpaths.items():
- scores[label] = self.upload_url(label=label, url=url, fpath=fpath,
- count=count)
+ scores[label] = self.upload_url(
+ label=label, url=url, fpath=fpath, count=count
+ )
return scores
def do_requests(self, url: str, count: int, max_parallel: int = 1):
samples = []
errors = []
profiles = []
- url = f'{url}?[0-{count - 1}]'
+ url = f"{url}?[0-{count - 1}]"
extra_args = [
- '-w', '%{response_code},\\n',
+ "-w",
+ "%{response_code},\\n",
]
if max_parallel > 1:
- extra_args.extend([
- '--parallel', '--parallel-max', str(max_parallel)
- ])
- self.info(f'{max_parallel}...')
+ extra_args.extend(["--parallel", "--parallel-max", str(max_parallel)])
+ self.info(f"{max_parallel}...")
for _ in range(sample_size):
- curl = CurlClient(env=self.env, silent=self._silent_curl,
- server_addr=self.server_addr)
- r = curl.http_download(urls=[url], alpn_proto=self.protocol, no_save=True,
- with_headers=False, with_profile=True,
- with_stats=False, extra_args=extra_args)
+ curl = CurlClient(
+ env=self.env, silent=self._silent_curl, server_addr=self.server_addr
+ )
+ r = curl.http_download(
+ urls=[url],
+ alpn_proto=self.protocol,
+ no_save=True,
+ with_headers=False,
+ with_profile=True,
+ with_stats=False,
+ extra_args=extra_args,
+ )
if r.exit_code != 0:
- errors.append(f'exit={r.exit_code}')
+ errors.append(f"exit={r.exit_code}")
else:
samples.append(count / r.duration.total_seconds())
non_200s = 0
for line in r.stdout.splitlines():
- if not line.startswith('200,'):
+ if not line.startswith("200,"):
non_200s += 1
if non_200s > 0:
- errors.append(f'responses != 200: {non_200s}')
+ errors.append(f"responses != 200: {non_200s}")
profiles.append(r.profile)
return {
- 'count': count,
- 'samples': sample_size,
- 'speed': mean(samples) if len(samples) else -1,
- 'errors': errors,
- 'stats': RunProfile.AverageStats(profiles),
+ "count": count,
+ "samples": sample_size,
+ "speed": mean(samples) if len(samples) else -1,
+ "errors": errors,
+ "stats": RunProfile.AverageStats(profiles),
}
def requests_url(self, url: str, count: int):
- self.info(f' {url}: ')
+ self.info(f" {url}: ")
props = {}
# 300 is max in curl, see tool_main.h
for m in [1, 6, 25, 50, 100, 300]:
props[str(m)] = self.do_requests(url=url, count=count, max_parallel=m)
- self.info('ok.\n')
+ self.info("ok.\n")
return props
def requests(self, req_count) -> Dict[str, Any]:
- url = f'https://{self.env.domain1}:{self.server_port}/reqs10.data'
+ url = f"https://{self.env.domain1}:{self.server_port}/reqs10.data"
return {
- 'count': req_count,
- '10KB': self.requests_url(url=url, count=req_count),
+ "count": req_count,
+ "10KB": self.requests_url(url=url, count=req_count),
}
- def score(self,
- handshakes: bool = True,
- downloads: Optional[List[int]] = None,
- download_count: int = 50,
- uploads: Optional[List[int]] = None,
- upload_count: int = 50,
- req_count=5000,
- requests: bool = True):
+ def score(
+ self,
+ handshakes: bool = True,
+ downloads: Optional[List[int]] = None,
+ download_count: int = 50,
+ uploads: Optional[List[int]] = None,
+ upload_count: int = 50,
+ req_count=5000,
+ requests: bool = True,
+ ):
self.info(f"scoring {self.protocol} against {self.server_descr}\n")
p = {}
- if self.protocol == 'h3':
- p['name'] = 'h3'
+ if self.protocol == "h3":
+ p["name"] = "h3"
if not self.env.have_h3_curl():
- raise ScoreCardError('curl does not support HTTP/3')
- for lib in ['ngtcp2', 'quiche', 'msh3', 'nghttp3']:
+ raise ScoreCardError("curl does not support HTTP/3")
+ for lib in ["ngtcp2", "quiche", "msh3", "nghttp3"]:
if self.env.curl_uses_lib(lib):
- p['implementation'] = lib
+ p["implementation"] = lib
break
- elif self.protocol == 'h2':
- p['name'] = 'h2'
+ elif self.protocol == "h2":
+ p["name"] = "h2"
if not self.env.have_h2_curl():
- raise ScoreCardError('curl does not support HTTP/2')
- for lib in ['nghttp2']:
+ raise ScoreCardError("curl does not support HTTP/2")
+ for lib in ["nghttp2"]:
if self.env.curl_uses_lib(lib):
- p['implementation'] = lib
+ p["implementation"] = lib
break
- elif self.protocol == 'h1' or self.protocol == 'http/1.1':
- proto = 'http/1.1'
- p['name'] = proto
- p['implementation'] = 'native'
+ elif self.protocol == "h1" or self.protocol == "http/1.1":
+ proto = "http/1.1"
+ p["name"] = proto
+ p["implementation"] = "native"
else:
raise ScoreCardError(f"unknown protocol: {self.protocol}")
- if 'implementation' not in p:
- raise ScoreCardError(f'did not recognized {p} lib')
- p['version'] = Env.curl_lib_version(p['implementation'])
+ if "implementation" not in p:
+ raise ScoreCardError(f"did not recognized {p} lib")
+ p["version"] = Env.curl_lib_version(p["implementation"])
score = {
- 'curl': self.env.curl_fullname(),
- 'os': self.env.curl_os(),
- 'protocol': p,
- 'server': self.server_descr,
+ "curl": self.env.curl_fullname(),
+ "os": self.env.curl_os(),
+ "protocol": p,
+ "server": self.server_descr,
}
if handshakes:
- score['handshakes'] = self.handshakes()
+ score["handshakes"] = self.handshakes()
if downloads and len(downloads) > 0:
- score['downloads'] = self.downloads(count=download_count,
- fsizes=downloads)
+ score["downloads"] = self.downloads(count=download_count, fsizes=downloads)
if uploads and len(uploads) > 0:
- score['uploads'] = self.uploads(count=upload_count,
- fsizes=uploads)
+ score["uploads"] = self.uploads(count=upload_count, fsizes=uploads)
if requests:
- score['requests'] = self.requests(req_count=req_count)
+ score["requests"] = self.requests(req_count=req_count)
self.info("\n")
return score
def fmt_ms(self, tval):
- return f'{int(tval*1000)} ms' if tval >= 0 else '--'
+ return f"{int(tval*1000)} ms" if tval >= 0 else "--"
def fmt_size(self, val):
- if val >= (1024*1024*1024):
- return f'{val / (1024*1024*1024):0.000f}GB'
+ if val >= (1024 * 1024 * 1024):
+ return f"{val / (1024*1024*1024):0.000f}GB"
elif val >= (1024 * 1024):
- return f'{val / (1024*1024):0.000f}MB'
+ return f"{val / (1024*1024):0.000f}MB"
elif val >= 1024:
- return f'{val / 1024:0.000f}KB'
+ return f"{val / 1024:0.000f}KB"
else:
- return f'{val:0.000f}B'
+ return f"{val:0.000f}B"
def fmt_mbs(self, val):
- return f'{val/(1024*1024):0.000f} MB/s' if val >= 0 else '--'
+ return f"{val/(1024*1024):0.000f} MB/s" if val >= 0 else "--"
def fmt_reqs(self, val):
- return f'{val:0.000f} r/s' if val >= 0 else '--'
+ return f"{val:0.000f} r/s" if val >= 0 else "--"
def print_score(self, score):
print(f'{score["protocol"]["name"].upper()} in {score["curl"]}')
- if 'handshakes' in score:
+ if "handshakes" in score:
print(f'{"Handshakes":<24} {"ipv4":25} {"ipv6":28}')
- print(f' {"Host":<17} {"Connect":>12} {"Handshake":>12} '
- f'{"Connect":>12} {"Handshake":>12} {"Errors":<20}')
+ print(
+ f' {"Host":<17} {"Connect":>12} {"Handshake":>12} '
+ f'{"Connect":>12} {"Handshake":>12} {"Errors":<20}'
+ )
for key, val in score["handshakes"].items():
- print(f' {key:<17} {self.fmt_ms(val["ipv4-connect"]):>12} '
- f'{self.fmt_ms(val["ipv4-handshake"]):>12} '
- f'{self.fmt_ms(val["ipv6-connect"]):>12} '
- f'{self.fmt_ms(val["ipv6-handshake"]):>12} '
- f'{"/".join(val["ipv4-errors"] + val["ipv6-errors"]):<20}'
- )
- if 'downloads' in score:
+ print(
+ f' {key:<17} {self.fmt_ms(val["ipv4-connect"]):>12} '
+ f'{self.fmt_ms(val["ipv4-handshake"]):>12} '
+ f'{self.fmt_ms(val["ipv6-connect"]):>12} '
+ f'{self.fmt_ms(val["ipv6-handshake"]):>12} '
+ f'{"/".join(val["ipv4-errors"] + val["ipv6-errors"]):<20}'
+ )
+ if "downloads" in score:
# get the key names of all sizes and measurements made
sizes = []
measures = []
m_names = {}
mcol_width = 12
mcol_sw = 17
- for sskey, ssval in score['downloads'].items():
+ for sskey, ssval in score["downloads"].items():
if isinstance(ssval, str):
continue
if sskey not in sizes:
sizes.append(sskey)
- for mkey, mval in score['downloads'][sskey].items():
+ for mkey, mval in score["downloads"][sskey].items():
if mkey not in measures:
measures.append(mkey)
- m_names[mkey] = f'{mkey}({mval["count"]}x{mval["max-parallel"]})'
+ m_names[
+ mkey
+ ] = f'{mkey}({mval["count"]}x{mval["max-parallel"]})'
print(f'Downloads from {score["server"]}')
- print(f' {"Size":>8}', end='')
+ print(f' {"Size":>8}', end="")
for m in measures:
- print(f' {m_names[m]:>{mcol_width}} {"[cpu/rss]":<{mcol_sw}}', end='')
+ print(f' {m_names[m]:>{mcol_width}} {"[cpu/rss]":<{mcol_sw}}', end="")
print(f' {"Errors":^20}')
- for size in score['downloads']:
- size_score = score['downloads'][size]
- print(f' {size:>8}', end='')
+ for size in score["downloads"]:
+ size_score = score["downloads"][size]
+ print(f" {size:>8}", end="")
errors = []
for val in size_score.values():
- if 'errors' in val:
- errors.extend(val['errors'])
+ if "errors" in val:
+ errors.extend(val["errors"])
for m in measures:
if m in size_score:
- print(f' {self.fmt_mbs(size_score[m]["speed"]):>{mcol_width}}', end='')
- s = f'[{size_score[m]["stats"]["cpu"]:>.1f}%'\
+ print(
+ f' {self.fmt_mbs(size_score[m]["speed"]):>{mcol_width}}',
+ end="",
+ )
+ s = (
+ f'[{size_score[m]["stats"]["cpu"]:>.1f}%'
f'/{self.fmt_size(size_score[m]["stats"]["rss"])}]'
- print(f' {s:<{mcol_sw}}', end='')
+ )
+ print(f" {s:<{mcol_sw}}", end="")
else:
- print(' '*mcol_width, end='')
+ print(" " * mcol_width, end="")
if len(errors):
print(f' {"/".join(errors):<20}')
else:
print(f' {"-":^20}')
- if 'uploads' in score:
+ if "uploads" in score:
# get the key names of all sizes and measurements made
sizes = []
measures = []
m_names = {}
mcol_width = 12
mcol_sw = 17
- for sskey, ssval in score['uploads'].items():
+ for sskey, ssval in score["uploads"].items():
if isinstance(ssval, str):
continue
if sskey not in sizes:
for mkey, mval in ssval.items():
if mkey not in measures:
measures.append(mkey)
- m_names[mkey] = f'{mkey}({mval["count"]}x{mval["max-parallel"]})'
+ m_names[
+ mkey
+ ] = f'{mkey}({mval["count"]}x{mval["max-parallel"]})'
print(f'Uploads to {score["server"]}')
- print(f' {"Size":>8}', end='')
+ print(f' {"Size":>8}', end="")
for m in measures:
- print(f' {m_names[m]:>{mcol_width}} {"[cpu/rss]":<{mcol_sw}}', end='')
+ print(f' {m_names[m]:>{mcol_width}} {"[cpu/rss]":<{mcol_sw}}', end="")
print(f' {"Errors":^20}')
for size in sizes:
- size_score = score['uploads'][size]
- print(f' {size:>8}', end='')
+ size_score = score["uploads"][size]
+ print(f" {size:>8}", end="")
errors = []
for val in size_score.values():
- if 'errors' in val:
- errors.extend(val['errors'])
+ if "errors" in val:
+ errors.extend(val["errors"])
for m in measures:
if m in size_score:
- print(f' {self.fmt_mbs(size_score[m]["speed"]):>{mcol_width}}', end='')
+ print(
+ f' {self.fmt_mbs(size_score[m]["speed"]):>{mcol_width}}',
+ end="",
+ )
stats = size_score[m]["stats"]
- if 'cpu' in stats:
+ if "cpu" in stats:
s = f'[{stats["cpu"]:>.1f}%/{self.fmt_size(stats["rss"])}]'
else:
- s = '[???/???]'
- print(f' {s:<{mcol_sw}}', end='')
+ s = "[???/???]"
+ print(f" {s:<{mcol_sw}}", end="")
else:
- print(' '*mcol_width, end='')
+ print(" " * mcol_width, end="")
if len(errors):
print(f' {"/".join(errors):<20}')
else:
print(f' {"-":^20}')
- if 'requests' in score:
+ if "requests" in score:
sizes = []
measures = []
m_names = {}
mcol_width = 9
mcol_sw = 13
- for sskey, ssval in score['requests'].items():
+ for sskey, ssval in score["requests"].items():
if isinstance(ssval, (str, int)):
continue
if sskey not in sizes:
sizes.append(sskey)
- for mkey in score['requests'][sskey]:
+ for mkey in score["requests"][sskey]:
if mkey not in measures:
measures.append(mkey)
- m_names[mkey] = f'{mkey}'
+ m_names[mkey] = f"{mkey}"
print('Requests (max parallel) to {score["server"]}')
- print(f' {"Size":>6} {"Reqs":>6}', end='')
+ print(f' {"Size":>6} {"Reqs":>6}', end="")
for m in measures:
- print(f' {m_names[m]:>{mcol_width}} {"[cpu/rss]":<{mcol_sw}}', end='')
+ print(f' {m_names[m]:>{mcol_width}} {"[cpu/rss]":<{mcol_sw}}', end="")
print(f' {"Errors":^10}')
for size in sizes:
- size_score = score['requests'][size]
- count = score['requests']['count']
- print(f' {size:>6} {count:>6}', end='')
+ size_score = score["requests"][size]
+ count = score["requests"]["count"]
+ print(f" {size:>6} {count:>6}", end="")
errors = []
for val in size_score.values():
- if 'errors' in val:
- errors.extend(val['errors'])
+ if "errors" in val:
+ errors.extend(val["errors"])
for m in measures:
if m in size_score:
- print(f' {self.fmt_reqs(size_score[m]["speed"]):>{mcol_width}}', end='')
- s = f'[{size_score[m]["stats"]["cpu"]:>.1f}%'\
+ print(
+ f' {self.fmt_reqs(size_score[m]["speed"]):>{mcol_width}}',
+ end="",
+ )
+ s = (
+ f'[{size_score[m]["stats"]["cpu"]:>.1f}%'
f'/{self.fmt_size(size_score[m]["stats"]["rss"])}]'
- print(f' {s:<{mcol_sw}}', end='')
+ )
+ print(f" {s:<{mcol_sw}}", end="")
else:
- print(' '*mcol_width, end='')
+ print(" " * mcol_width, end="")
if len(errors):
print(f' {"/".join(errors):<10}')
else:
def parse_size(s):
- m = re.match(r'(\d+)(mb|kb|gb)?', s, re.IGNORECASE)
+ m = re.match(r"(\d+)(mb|kb|gb)?", s, re.IGNORECASE)
if m is None:
- raise Exception(f'unrecognized size: {s}')
+ raise Exception(f"unrecognized size: {s}")
size = int(m.group(1))
if not m.group(2):
pass
- elif m.group(2).lower() == 'kb':
+ elif m.group(2).lower() == "kb":
size *= 1024
- elif m.group(2).lower() == 'mb':
+ elif m.group(2).lower() == "mb":
size *= 1024 * 1024
- elif m.group(2).lower() == 'gb':
+ elif m.group(2).lower() == "gb":
size *= 1024 * 1024 * 1024
return size
def main():
- parser = argparse.ArgumentParser(prog='scorecard', description="""
+ parser = argparse.ArgumentParser(
+ prog="scorecard",
+ description="""
Run a range of tests to give a scorecard for a HTTP protocol
'h3' or 'h2' implementation in curl.
- """)
- parser.add_argument("-v", "--verbose", action='count', default=1,
- help="log more output on stderr")
- parser.add_argument("-j", "--json", action='store_true',
- default=False, help="print json instead of text")
- parser.add_argument("-H", "--handshakes", action='store_true',
- default=False, help="evaluate handshakes only")
- parser.add_argument("-d", "--downloads", action='store_true',
- default=False, help="evaluate downloads")
- parser.add_argument("--download", action='append', type=str,
- default=None, help="evaluate download size")
- parser.add_argument("--download-count", action='store', type=int,
- default=50, help="perform that many downloads")
- parser.add_argument("--download-parallel", action='store', type=int,
- default=0, help="perform that many downloads in parallel (default all)")
- parser.add_argument("-u", "--uploads", action='store_true',
- default=False, help="evaluate uploads")
- parser.add_argument("--upload", action='append', type=str,
- default=None, help="evaluate upload size")
- parser.add_argument("--upload-count", action='store', type=int,
- default=50, help="perform that many uploads")
- parser.add_argument("-r", "--requests", action='store_true',
- default=False, help="evaluate requests")
- parser.add_argument("--request-count", action='store', type=int,
- default=5000, help="perform that many requests")
- parser.add_argument("--httpd", action='store_true', default=False,
- help="evaluate httpd server only")
- parser.add_argument("--caddy", action='store_true', default=False,
- help="evaluate caddy server only")
- parser.add_argument("--curl-verbose", action='store_true',
- default=False, help="run curl with `-v`")
- parser.add_argument("protocol", default='h2', nargs='?',
- help="Name of protocol to score")
- parser.add_argument("--start-only", action='store_true', default=False,
- help="only start the servers")
- parser.add_argument("--remote", action='store', type=str,
- default=None, help="score against the remote server at <ip>:<port>")
+ """,
+ )
+ parser.add_argument(
+ "-v", "--verbose", action="count", default=1, help="log more output on stderr"
+ )
+ parser.add_argument(
+ "-j",
+ "--json",
+ action="store_true",
+ default=False,
+ help="print json instead of text",
+ )
+ parser.add_argument(
+ "-H",
+ "--handshakes",
+ action="store_true",
+ default=False,
+ help="evaluate handshakes only",
+ )
+ parser.add_argument(
+ "-d",
+ "--downloads",
+ action="store_true",
+ default=False,
+ help="evaluate downloads",
+ )
+ parser.add_argument(
+ "--download",
+ action="append",
+ type=str,
+ default=None,
+ help="evaluate download size",
+ )
+ parser.add_argument(
+ "--download-count",
+ action="store",
+ type=int,
+ default=50,
+ help="perform that many downloads",
+ )
+ parser.add_argument(
+ "--download-parallel",
+ action="store",
+ type=int,
+ default=0,
+ help="perform that many downloads in parallel (default all)",
+ )
+ parser.add_argument(
+ "-u", "--uploads", action="store_true", default=False, help="evaluate uploads"
+ )
+ parser.add_argument(
+ "--upload", action="append", type=str, default=None, help="evaluate upload size"
+ )
+ parser.add_argument(
+ "--upload-count",
+ action="store",
+ type=int,
+ default=50,
+ help="perform that many uploads",
+ )
+ parser.add_argument(
+ "-r", "--requests", action="store_true", default=False, help="evaluate requests"
+ )
+ parser.add_argument(
+ "--request-count",
+ action="store",
+ type=int,
+ default=5000,
+ help="perform that many requests",
+ )
+ parser.add_argument(
+ "--httpd", action="store_true", default=False, help="evaluate httpd server only"
+ )
+ parser.add_argument(
+ "--caddy", action="store_true", default=False, help="evaluate caddy server only"
+ )
+ parser.add_argument(
+ "--curl-verbose", action="store_true", default=False, help="run curl with `-v`"
+ )
+ parser.add_argument(
+ "protocol", default="h2", nargs="?", help="Name of protocol to score"
+ )
+ parser.add_argument(
+ "--start-only",
+ action="store_true",
+ default=False,
+ help="only start the servers",
+ )
+ parser.add_argument(
+ "--remote",
+ action="store",
+ type=str,
+ default=None,
+ help="score against the remote server at <ip>:<port>",
+ )
args = parser.parse_args()
if args.verbose > 0:
console = logging.StreamHandler()
console.setLevel(logging.INFO)
console.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
- logging.getLogger('').addHandler(console)
+ logging.getLogger("").addHandler(console)
protocol = args.protocol
handshakes = True
if args.download is not None:
downloads = []
for x in args.download:
- downloads.extend([parse_size(s) for s in x.split(',')])
+ downloads.extend([parse_size(s) for s in x.split(",")])
uploads = [1024 * 1024, 10 * 1024 * 1024, 100 * 1024 * 1024]
if args.upload is not None:
uploads = []
for x in args.upload:
- uploads.extend([parse_size(s) for s in x.split(',')])
+ uploads.extend([parse_size(s) for s in x.split(",")])
requests = True
if args.downloads or args.uploads or args.requests or args.handshakes:
uploads = None
requests = args.requests
- test_httpd = protocol != 'h3'
+ test_httpd = protocol != "h3"
test_caddy = True
if args.caddy or args.httpd:
test_caddy = args.caddy
cards = []
if args.remote:
- m = re.match(r'^(.+):(\d+)$', args.remote)
+ m = re.match(r"^(.+):(\d+)$", args.remote)
if m is None:
- raise ScoreCardError(f'unable to parse ip:port from --remote {args.remote}')
+ raise ScoreCardError(
+ f"unable to parse ip:port from --remote {args.remote}"
+ )
test_httpd = False
test_caddy = False
remote_addr = m.group(1)
remote_port = int(m.group(2))
- card = ScoreCard(env=env,
- protocol=protocol,
- server_descr=f'Server at {args.remote}',
- server_addr=remote_addr,
- server_port=remote_port,
- verbose=args.verbose, curl_verbose=args.curl_verbose,
- download_parallel=args.download_parallel)
+ card = ScoreCard(
+ env=env,
+ protocol=protocol,
+ server_descr=f"Server at {args.remote}",
+ server_addr=remote_addr,
+ server_port=remote_port,
+ verbose=args.verbose,
+ curl_verbose=args.curl_verbose,
+ download_parallel=args.download_parallel,
+ )
cards.append(card)
if test_httpd:
httpd = Httpd(env=env)
- assert httpd.exists(), \
- f'httpd not found: {env.httpd}'
+ assert httpd.exists(), f"httpd not found: {env.httpd}"
httpd.clear_logs()
server_docs = httpd.docs_dir
assert httpd.start()
- if protocol == 'h3':
+ if protocol == "h3":
nghttpx = NghttpxQuic(env=env)
nghttpx.clear_logs()
assert nghttpx.start()
- server_descr = f'nghttpx: https:{env.h3_port} [backend httpd: {env.httpd_version()}, https:{env.https_port}]'
+ server_descr = f"nghttpx: https:{env.h3_port} [backend httpd: {env.httpd_version()}, https:{env.https_port}]"
server_port = env.h3_port
else:
- server_descr = f'httpd: {env.httpd_version()}, http:{env.http_port} https:{env.https_port}'
+ server_descr = f"httpd: {env.httpd_version()}, http:{env.http_port} https:{env.https_port}"
server_port = env.https_port
- card = ScoreCard(env=env,
- protocol=protocol,
- server_descr=server_descr,
- server_port=server_port,
- verbose=args.verbose, curl_verbose=args.curl_verbose,
- download_parallel=args.download_parallel)
+ card = ScoreCard(
+ env=env,
+ protocol=protocol,
+ server_descr=server_descr,
+ server_port=server_port,
+ verbose=args.verbose,
+ curl_verbose=args.curl_verbose,
+ download_parallel=args.download_parallel,
+ )
card.setup_resources(server_docs, downloads)
cards.append(card)
if test_caddy and env.caddy:
- backend = ''
+ backend = ""
if uploads and httpd is None:
- backend = f' [backend httpd: {env.httpd_version()}, http:{env.http_port} https:{env.https_port}]'
+ backend = f" [backend httpd: {env.httpd_version()}, http:{env.http_port} https:{env.https_port}]"
httpd = Httpd(env=env)
- assert httpd.exists(), \
- f'httpd not found: {env.httpd}'
+ assert httpd.exists(), f"httpd not found: {env.httpd}"
httpd.clear_logs()
assert httpd.start()
caddy = Caddy(env=env)
caddy.clear_logs()
assert caddy.start()
- server_descr = f'Caddy: {env.caddy_version()}, http:{env.caddy_http_port} https:{env.caddy_https_port}{backend}'
+ server_descr = f"Caddy: {env.caddy_version()}, http:{env.caddy_http_port} https:{env.caddy_https_port}{backend}"
server_port = caddy.port
server_docs = caddy.docs_dir
- card = ScoreCard(env=env,
- protocol=protocol,
- server_descr=server_descr,
- server_port=server_port,
- verbose=args.verbose, curl_verbose=args.curl_verbose,
- download_parallel=args.download_parallel)
+ card = ScoreCard(
+ env=env,
+ protocol=protocol,
+ server_descr=server_descr,
+ server_port=server_port,
+ verbose=args.verbose,
+ curl_verbose=args.curl_verbose,
+ download_parallel=args.download_parallel,
+ )
card.setup_resources(server_docs, downloads)
cards.append(card)
if args.start_only:
- print('started servers:')
+ print("started servers:")
for card in cards:
- print(f'{card.server_descr}')
- sys.stderr.write('press [RETURN] to finish')
+ print(f"{card.server_descr}")
+ sys.stderr.write("press [RETURN] to finish")
sys.stderr.flush()
sys.stdin.readline()
else:
for card in cards:
- score = card.score(handshakes=handshakes,
- downloads=downloads,
- download_count=args.download_count,
- uploads=uploads,
- upload_count=args.upload_count,
- req_count=args.request_count,
- requests=requests)
+ score = card.score(
+ handshakes=handshakes,
+ downloads=downloads,
+ download_count=args.download_count,
+ uploads=uploads,
+ upload_count=args.upload_count,
+ req_count=args.request_count,
+ requests=requests,
+ )
if args.json:
print(json.JSONEncoder(indent=2).encode(score))
else:
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class TestBasic:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, nghttpx):
if env.have_h3():
nghttpx.start_if_needed()
# simple http: GET
def test_01_01_http_get(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'http://{env.domain1}:{env.http_port}/data.json'
+ url = f"http://{env.domain1}:{env.http_port}/data.json"
r = curl.http_get(url=url)
r.check_response(http_status=200)
- assert r.json['server'] == env.domain1
+ assert r.json["server"] == env.domain1
# simple https: GET, any http version
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
def test_01_02_https_get(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'https://{env.domain1}:{env.https_port}/data.json'
+ url = f"https://{env.domain1}:{env.https_port}/data.json"
r = curl.http_get(url=url)
r.check_response(http_status=200)
- assert r.json['server'] == env.domain1
+ assert r.json["server"] == env.domain1
# simple https: GET, h2 wanted and got
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
def test_01_03_h2_get(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'https://{env.domain1}:{env.https_port}/data.json'
- r = curl.http_get(url=url, extra_args=['--http2'])
- r.check_response(http_status=200, protocol='HTTP/2')
- assert r.json['server'] == env.domain1
+ url = f"https://{env.domain1}:{env.https_port}/data.json"
+ r = curl.http_get(url=url, extra_args=["--http2"])
+ r.check_response(http_status=200, protocol="HTTP/2")
+ assert r.json["server"] == env.domain1
# simple https: GET, h2 unsupported, fallback to h1
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
def test_01_04_h2_unsupported(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'https://{env.domain2}:{env.https_port}/data.json'
- r = curl.http_get(url=url, extra_args=['--http2'])
- r.check_response(http_status=200, protocol='HTTP/1.1')
- assert r.json['server'] == env.domain2
+ url = f"https://{env.domain2}:{env.https_port}/data.json"
+ r = curl.http_get(url=url, extra_args=["--http2"])
+ r.check_response(http_status=200, protocol="HTTP/1.1")
+ assert r.json["server"] == env.domain2
# simple h3: GET, want h3 and get it
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_01_05_h3_get(self, env: Env, httpd, nghttpx):
curl = CurlClient(env=env)
- url = f'https://{env.domain1}:{env.h3_port}/data.json'
- r = curl.http_get(url=url, extra_args=['--http3-only'])
- r.check_response(http_status=200, protocol='HTTP/3')
- assert r.json['server'] == env.domain1
+ url = f"https://{env.domain1}:{env.h3_port}/data.json"
+ r = curl.http_get(url=url, extra_args=["--http3-only"])
+ r.check_response(http_status=200, protocol="HTTP/3")
+ assert r.json["server"] == env.domain1
# simple download, check connect/handshake timings
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_01_06_timings(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/data.json'
+ url = f"https://{env.authority_for(env.domain1, proto)}/data.json"
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True)
- r.check_stats(http_status=200, count=1,
- remote_port=env.port_for(alpn_proto=proto),
- remote_ip='127.0.0.1')
- assert r.stats[0]['time_connect'] > 0, f'{r.stats[0]}'
- assert r.stats[0]['time_appconnect'] > 0, f'{r.stats[0]}'
+ r.check_stats(
+ http_status=200,
+ count=1,
+ remote_port=env.port_for(alpn_proto=proto),
+ remote_ip="127.0.0.1",
+ )
+ assert r.stats[0]["time_connect"] > 0, f"{r.stats[0]}"
+ assert r.stats[0]["time_appconnect"] > 0, f"{r.stats[0]}"
# simple https: HEAD
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
def test_01_07_head(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/data.json'
- r = curl.http_download(urls=[url], with_stats=True, with_headers=True,
- extra_args=['-I'])
- r.check_stats(http_status=200, count=1, exitcode=0,
- remote_port=env.port_for(alpn_proto=proto),
- remote_ip='127.0.0.1')
+ url = f"https://{env.authority_for(env.domain1, proto)}/data.json"
+ r = curl.http_download(
+ urls=[url], with_stats=True, with_headers=True, extra_args=["-I"]
+ )
+ r.check_stats(
+ http_status=200,
+ count=1,
+ exitcode=0,
+ remote_port=env.port_for(alpn_proto=proto),
+ remote_ip="127.0.0.1",
+ )
# got the Conten-Length: header, but did not download anything
- assert r.responses[0]['header']['content-length'] == '30', f'{r.responses[0]}'
- assert r.stats[0]['size_download'] == 0, f'{r.stats[0]}'
+ assert r.responses[0]["header"]["content-length"] == "30", f"{r.responses[0]}"
+ assert r.stats[0]["size_download"] == 0, f"{r.stats[0]}"
# http: GET for HTTP/2, see Upgrade:, 101 switch
def test_01_08_h2_upgrade(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'http://{env.domain1}:{env.http_port}/data.json'
- r = curl.http_get(url=url, extra_args=['--http2'])
+ url = f"http://{env.domain1}:{env.http_port}/data.json"
+ r = curl.http_get(url=url, extra_args=["--http2"])
r.check_exit_code(0)
- assert len(r.responses) == 2, f'{r.responses}'
- assert r.responses[0]['status'] == 101, f'{r.responses[0]}'
- assert r.responses[1]['status'] == 200, f'{r.responses[1]}'
- assert r.responses[1]['protocol'] == 'HTTP/2', f'{r.responses[1]}'
- assert r.json['server'] == env.domain1
+ assert len(r.responses) == 2, f"{r.responses}"
+ assert r.responses[0]["status"] == 101, f"{r.responses[0]}"
+ assert r.responses[1]["status"] == 200, f"{r.responses[1]}"
+ assert r.responses[1]["protocol"] == "HTTP/2", f"{r.responses[1]}"
+ assert r.json["server"] == env.domain1
# http: GET for HTTP/2 with prior knowledge
def test_01_09_h2_prior_knowledge(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'http://{env.domain1}:{env.http_port}/data.json'
- r = curl.http_get(url=url, extra_args=['--http2-prior-knowledge'])
+ url = f"http://{env.domain1}:{env.http_port}/data.json"
+ r = curl.http_get(url=url, extra_args=["--http2-prior-knowledge"])
r.check_exit_code(0)
- assert len(r.responses) == 1, f'{r.responses}'
- assert r.response['status'] == 200, f'{r.responsw}'
- assert r.response['protocol'] == 'HTTP/2', f'{r.response}'
- assert r.json['server'] == env.domain1
+ assert len(r.responses) == 1, f"{r.responses}"
+ assert r.response["status"] == 200, f"{r.responsw}"
+ assert r.response["protocol"] == "HTTP/2", f"{r.response}"
+ assert r.json["server"] == env.domain1
# http: strip TE header in HTTP/2 requests
def test_01_10_te_strip(self, env: Env, httpd):
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, "h2")}/data.json'
- r = curl.http_get(url=url, extra_args=['--http2', '-H', 'TE: gzip'])
+ r = curl.http_get(url=url, extra_args=["--http2", "-H", "TE: gzip"])
r.check_exit_code(0)
- assert len(r.responses) == 1, f'{r.responses}'
- assert r.responses[0]['status'] == 200, f'{r.responses[1]}'
- assert r.responses[0]['protocol'] == 'HTTP/2', f'{r.responses[1]}'
+ assert len(r.responses) == 1, f"{r.responses}"
+ assert r.responses[0]["status"] == 200, f"{r.responses[1]}"
+ assert r.responses[0]["protocol"] == "HTTP/2", f"{r.responses[1]}"
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class TestDownload:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd, nghttpx):
if env.have_h3():
nghttpx.start_if_needed()
httpd.clear_extra_configs()
httpd.reload()
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd):
indir = httpd.docs_dir
- env.make_data_file(indir=indir, fname="data-10k", fsize=10*1024)
- env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024)
- env.make_data_file(indir=indir, fname="data-1m", fsize=1024*1024)
- env.make_data_file(indir=indir, fname="data-10m", fsize=10*1024*1024)
- env.make_data_file(indir=indir, fname="data-50m", fsize=50*1024*1024)
+ env.make_data_file(indir=indir, fname="data-10k", fsize=10 * 1024)
+ env.make_data_file(indir=indir, fname="data-100k", fsize=100 * 1024)
+ env.make_data_file(indir=indir, fname="data-1m", fsize=1024 * 1024)
+ env.make_data_file(indir=indir, fname="data-10m", fsize=10 * 1024 * 1024)
+ env.make_data_file(indir=indir, fname="data-50m", fsize=50 * 1024 * 1024)
# download 1 file
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_01_download_1(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/data.json'
+ url = f"https://{env.authority_for(env.domain1, proto)}/data.json"
r = curl.http_download(urls=[url], alpn_proto=proto)
r.check_response(http_status=200)
# download 2 files
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_02_download_2(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]'
+ url = f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]"
r = curl.http_download(urls=[url], alpn_proto=proto)
r.check_response(http_status=200, count=2)
# download 100 files sequentially
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_03_download_sequential(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 10
curl = CurlClient(env=env)
- urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]"
+ )
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(http_status=200, count=count, connect_count=1)
# download 100 files parallel
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_04_download_parallel(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 10
max_parallel = 5
curl = CurlClient(env=env)
- urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
- r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
- '--parallel', '--parallel-max', f'{max_parallel}'
- ])
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]"
+ )
+ r = curl.http_download(
+ urls=[urln],
+ alpn_proto=proto,
+ extra_args=["--parallel", "--parallel-max", f"{max_parallel}"],
+ )
r.check_response(http_status=200, count=count)
- if proto == 'http/1.1':
+ if proto == "http/1.1":
# http/1.1 parallel transfers will open multiple connections
assert r.total_connects > 1, r.dump_logs()
else:
assert r.total_connects == 1, r.dump_logs()
# download 500 files sequential
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_05_download_many_sequential(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 shaky here")
count = 200
curl = CurlClient(env=env)
- urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]"
+ )
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(http_status=200, count=count)
- if proto == 'http/1.1':
+ if proto == "http/1.1":
# http/1.1 parallel transfers will open multiple connections
assert r.total_connects > 1, r.dump_logs()
else:
assert r.total_connects == 1, r.dump_logs()
# download 500 files parallel
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_02_06_download_many_parallel(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 200
max_parallel = 50
curl = CurlClient(env=env)
- urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[000-{count-1}]'
- r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
- '--parallel', '--parallel-max', f'{max_parallel}'
- ])
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}/data.json?[000-{count-1}]"
+ )
+ r = curl.http_download(
+ urls=[urln],
+ alpn_proto=proto,
+ extra_args=["--parallel", "--parallel-max", f"{max_parallel}"],
+ )
r.check_response(http_status=200, count=count, connect_count=1)
# download files parallel, check connection reuse/multiplex
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_02_07_download_reuse(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 200
curl = CurlClient(env=env)
- urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
- r = curl.http_download(urls=[urln], alpn_proto=proto,
- with_stats=True, extra_args=[
- '--parallel', '--parallel-max', '200'
- ])
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]"
+ )
+ r = curl.http_download(
+ urls=[urln],
+ alpn_proto=proto,
+ with_stats=True,
+ extra_args=["--parallel", "--parallel-max", "200"],
+ )
r.check_response(http_status=200, count=count)
# should have used at most 2 connections only (test servers allow 100 req/conn)
# it may be just 1 on slow systems where request are answered faster than
assert r.total_connects <= 2, "h2 should use fewer connections here"
# download files parallel with http/1.1, check connection not reused
- @pytest.mark.parametrize("proto", ['http/1.1'])
+ @pytest.mark.parametrize("proto", ["http/1.1"])
def test_02_07b_download_reuse(self, env: Env, httpd, nghttpx, proto):
count = 6
curl = CurlClient(env=env)
- urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
- r = curl.http_download(urls=[urln], alpn_proto=proto,
- with_stats=True, extra_args=[
- '--parallel'
- ])
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]"
+ )
+ r = curl.http_download(
+ urls=[urln], alpn_proto=proto, with_stats=True, extra_args=["--parallel"]
+ )
r.check_response(count=count, http_status=200)
# http/1.1 should have used count connections
assert r.total_connects == count, "http/1.1 should use this many connections"
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_08_1MB_serial(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 5
- urln = f'https://{env.authority_for(env.domain1, proto)}/data-1m?[0-{count-1}]'
+ urln = f"https://{env.authority_for(env.domain1, proto)}/data-1m?[0-{count-1}]"
curl = CurlClient(env=env)
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(count=count, http_status=200)
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_02_09_1MB_parallel(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 5
- urln = f'https://{env.authority_for(env.domain1, proto)}/data-1m?[0-{count-1}]'
+ urln = f"https://{env.authority_for(env.domain1, proto)}/data-1m?[0-{count-1}]"
curl = CurlClient(env=env)
- r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
- '--parallel'
- ])
+ r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=["--parallel"])
r.check_response(count=count, http_status=200)
- @pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
+ @pytest.mark.skipif(
+ condition=Env().slow_network, reason="not suitable for slow network tests"
+ )
@pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_10_10MB_serial(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 3
- urln = f'https://{env.authority_for(env.domain1, proto)}/data-10m?[0-{count-1}]'
+ urln = f"https://{env.authority_for(env.domain1, proto)}/data-10m?[0-{count-1}]"
curl = CurlClient(env=env)
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(count=count, http_status=200)
- @pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
+ @pytest.mark.skipif(
+ condition=Env().slow_network, reason="not suitable for slow network tests"
+ )
@pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_02_11_10MB_parallel(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 stalls here")
count = 3
- urln = f'https://{env.authority_for(env.domain1, proto)}/data-10m?[0-{count-1}]'
+ urln = f"https://{env.authority_for(env.domain1, proto)}/data-10m?[0-{count-1}]"
curl = CurlClient(env=env)
- r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
- '--parallel'
- ])
+ r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=["--parallel"])
r.check_response(count=count, http_status=200)
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_02_12_head_serial_https(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 5
- urln = f'https://{env.authority_for(env.domain1, proto)}/data-10m?[0-{count-1}]'
+ urln = f"https://{env.authority_for(env.domain1, proto)}/data-10m?[0-{count-1}]"
curl = CurlClient(env=env)
- r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
- '--head'
- ])
+ r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=["--head"])
r.check_response(count=count, http_status=200)
- @pytest.mark.parametrize("proto", ['h2'])
+ @pytest.mark.parametrize("proto", ["h2"])
def test_02_13_head_serial_h2c(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 5
- urln = f'http://{env.domain1}:{env.http_port}/data-10m?[0-{count-1}]'
+ urln = f"http://{env.domain1}:{env.http_port}/data-10m?[0-{count-1}]"
curl = CurlClient(env=env)
- r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
- '--head', '--http2-prior-knowledge', '--fail-early'
- ])
+ r = curl.http_download(
+ urls=[urln],
+ alpn_proto=proto,
+ extra_args=["--head", "--http2-prior-knowledge", "--fail-early"],
+ )
r.check_response(count=count, http_status=200)
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_02_14_not_found(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 stalls here")
count = 5
- urln = f'https://{env.authority_for(env.domain1, proto)}/not-found?[0-{count-1}]'
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}/not-found?[0-{count-1}]"
+ )
curl = CurlClient(env=env)
- r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
- '--parallel'
- ])
- r.check_stats(count=count, http_status=404, exitcode=0,
- remote_port=env.port_for(alpn_proto=proto),
- remote_ip='127.0.0.1')
-
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=["--parallel"])
+ r.check_stats(
+ count=count,
+ http_status=404,
+ exitcode=0,
+ remote_port=env.port_for(alpn_proto=proto),
+ remote_ip="127.0.0.1",
+ )
+
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_02_15_fail_not_found(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 stalls here")
count = 5
- urln = f'https://{env.authority_for(env.domain1, proto)}/not-found?[0-{count-1}]'
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}/not-found?[0-{count-1}]"
+ )
curl = CurlClient(env=env)
- r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
- '--fail'
- ])
- r.check_stats(count=count, http_status=404, exitcode=22,
- remote_port=env.port_for(alpn_proto=proto),
- remote_ip='127.0.0.1')
-
- @pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
+ r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=["--fail"])
+ r.check_stats(
+ count=count,
+ http_status=404,
+ exitcode=22,
+ remote_port=env.port_for(alpn_proto=proto),
+ remote_ip="127.0.0.1",
+ )
+
+ @pytest.mark.skipif(
+ condition=Env().slow_network, reason="not suitable for slow network tests"
+ )
def test_02_20_h2_small_frames(self, env: Env, httpd):
# Test case to reproduce content corruption as observed in
# https://github.com/curl/curl/issues/10525
# To reliably reproduce, we need an Apache httpd that supports
# setting smaller frame sizes. This is not released yet, we
# test if it works and back out if not.
- httpd.set_extra_config(env.domain1, lines=[
- 'H2MaxDataFrameLen 1024',
- ])
+ httpd.set_extra_config(
+ env.domain1,
+ lines=[
+ "H2MaxDataFrameLen 1024",
+ ],
+ )
assert httpd.stop()
if not httpd.start():
# no, not supported, bail out
httpd.set_extra_config(env.domain1, lines=None)
assert httpd.start()
- pytest.skip('H2MaxDataFrameLen not supported')
+ pytest.skip("H2MaxDataFrameLen not supported")
# ok, make 100 downloads with 2 parallel running and they
# are expected to stumble into the issue when using `lib/http2.c`
# from curl 7.88.0
count = 5
urln = f'https://{env.authority_for(env.domain1, "h2")}/data-1m?[0-{count-1}]'
curl = CurlClient(env=env)
- r = curl.http_download(urls=[urln], alpn_proto="h2", extra_args=[
- '--parallel', '--parallel-max', '2'
- ])
+ r = curl.http_download(
+ urls=[urln],
+ alpn_proto="h2",
+ extra_args=["--parallel", "--parallel-max", "2"],
+ )
r.check_response(count=count, http_status=200)
- srcfile = os.path.join(httpd.docs_dir, 'data-1m')
+ srcfile = os.path.join(httpd.docs_dir, "data-1m")
self.check_downloads(curl, srcfile, count)
# restore httpd defaults
httpd.set_extra_config(env.domain1, lines=None)
assert httpd.start()
# download via lib client, 1 at a time, pause/resume at different offsets
- @pytest.mark.parametrize("pause_offset", [0, 10*1024, 100*1023, 640000])
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("pause_offset", [0, 10 * 1024, 100 * 1023, 640000])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_21_lib_serial(self, env: Env, httpd, nghttpx, proto, pause_offset):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
- docname = 'data-10m'
- url = f'https://localhost:{env.https_port}/{docname}'
- client = LocalClient(name='hx-download', env=env)
+ docname = "data-10m"
+ url = f"https://localhost:{env.https_port}/{docname}"
+ client = LocalClient(name="hx-download", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- r = client.run(args=[
- '-n', f'{count}', '-P', f'{pause_offset}', '-V', proto, url
- ])
+ pytest.skip(f"example client not built: {client.name}")
+ r = client.run(
+ args=["-n", f"{count}", "-P", f"{pause_offset}", "-V", proto, url]
+ )
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
self.check_downloads(client, srcfile, count)
# download via lib client, several at a time, pause/resume
- @pytest.mark.parametrize("pause_offset", [100*1023])
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
- def test_02_22_lib_parallel_resume(self, env: Env, httpd, nghttpx, proto, pause_offset):
- if proto == 'h3' and not env.have_h3():
+ @pytest.mark.parametrize("pause_offset", [100 * 1023])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
+ def test_02_22_lib_parallel_resume(
+ self, env: Env, httpd, nghttpx, proto, pause_offset
+ ):
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
max_parallel = 5
- docname = 'data-10m'
- url = f'https://localhost:{env.https_port}/{docname}'
- client = LocalClient(name='hx-download', env=env)
+ docname = "data-10m"
+ url = f"https://localhost:{env.https_port}/{docname}"
+ client = LocalClient(name="hx-download", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- r = client.run(args=[
- '-n', f'{count}', '-m', f'{max_parallel}',
- '-P', f'{pause_offset}', '-V', proto, url
- ])
+ pytest.skip(f"example client not built: {client.name}")
+ r = client.run(
+ args=[
+ "-n",
+ f"{count}",
+ "-m",
+ f"{max_parallel}",
+ "-P",
+ f"{pause_offset}",
+ "-V",
+ proto,
+ url,
+ ]
+ )
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
self.check_downloads(client, srcfile, count)
# download, several at a time, pause and abort paused
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_23a_lib_abort_paused(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_ossl_quic():
- pytest.skip('OpenSSL QUIC fails here')
- if proto == 'h3' and env.ci_run and env.curl_uses_lib('quiche'):
+ if proto == "h3" and env.curl_uses_ossl_quic():
+ pytest.skip("OpenSSL QUIC fails here")
+ if proto == "h3" and env.ci_run and env.curl_uses_lib("quiche"):
pytest.skip("fails in CI, but works locally for unknown reasons")
count = 10
max_parallel = 5
- if proto in ['h2', 'h3']:
+ if proto in ["h2", "h3"]:
pause_offset = 64 * 1024
else:
pause_offset = 12 * 1024
- docname = 'data-1m'
- url = f'https://localhost:{env.https_port}/{docname}'
- client = LocalClient(name='hx-download', env=env)
+ docname = "data-1m"
+ url = f"https://localhost:{env.https_port}/{docname}"
+ client = LocalClient(name="hx-download", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- r = client.run(args=[
- '-n', f'{count}', '-m', f'{max_parallel}', '-a',
- '-P', f'{pause_offset}', '-V', proto, url
- ])
+ pytest.skip(f"example client not built: {client.name}")
+ r = client.run(
+ args=[
+ "-n",
+ f"{count}",
+ "-m",
+ f"{max_parallel}",
+ "-a",
+ "-P",
+ f"{pause_offset}",
+ "-V",
+ proto,
+ url,
+ ]
+ )
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
# downloads should be there, but not necessarily complete
self.check_downloads(client, srcfile, count, complete=False)
# download, several at a time, abort after n bytes
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_23b_lib_abort_offset(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_ossl_quic():
- pytest.skip('OpenSSL QUIC fails here')
- if proto == 'h3' and env.ci_run and env.curl_uses_lib('quiche'):
+ if proto == "h3" and env.curl_uses_ossl_quic():
+ pytest.skip("OpenSSL QUIC fails here")
+ if proto == "h3" and env.ci_run and env.curl_uses_lib("quiche"):
pytest.skip("fails in CI, but works locally for unknown reasons")
count = 10
max_parallel = 5
- if proto in ['h2', 'h3']:
+ if proto in ["h2", "h3"]:
abort_offset = 64 * 1024
else:
abort_offset = 12 * 1024
- docname = 'data-1m'
- url = f'https://localhost:{env.https_port}/{docname}'
- client = LocalClient(name='hx-download', env=env)
+ docname = "data-1m"
+ url = f"https://localhost:{env.https_port}/{docname}"
+ client = LocalClient(name="hx-download", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- r = client.run(args=[
- '-n', f'{count}', '-m', f'{max_parallel}', '-a',
- '-A', f'{abort_offset}', '-V', proto, url
- ])
+ pytest.skip(f"example client not built: {client.name}")
+ r = client.run(
+ args=[
+ "-n",
+ f"{count}",
+ "-m",
+ f"{max_parallel}",
+ "-a",
+ "-A",
+ f"{abort_offset}",
+ "-V",
+ proto,
+ url,
+ ]
+ )
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
# downloads should be there, but not necessarily complete
self.check_downloads(client, srcfile, count, complete=False)
# download, several at a time, abort after n bytes
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_23c_lib_fail_offset(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_ossl_quic():
- pytest.skip('OpenSSL QUIC fails here')
- if proto == 'h3' and env.ci_run and env.curl_uses_lib('quiche'):
+ if proto == "h3" and env.curl_uses_ossl_quic():
+ pytest.skip("OpenSSL QUIC fails here")
+ if proto == "h3" and env.ci_run and env.curl_uses_lib("quiche"):
pytest.skip("fails in CI, but works locally for unknown reasons")
count = 10
max_parallel = 5
- if proto in ['h2', 'h3']:
+ if proto in ["h2", "h3"]:
fail_offset = 64 * 1024
else:
fail_offset = 12 * 1024
- docname = 'data-1m'
- url = f'https://localhost:{env.https_port}/{docname}'
- client = LocalClient(name='hx-download', env=env)
+ docname = "data-1m"
+ url = f"https://localhost:{env.https_port}/{docname}"
+ client = LocalClient(name="hx-download", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- r = client.run(args=[
- '-n', f'{count}', '-m', f'{max_parallel}', '-a',
- '-F', f'{fail_offset}', '-V', proto, url
- ])
+ pytest.skip(f"example client not built: {client.name}")
+ r = client.run(
+ args=[
+ "-n",
+ f"{count}",
+ "-m",
+ f"{max_parallel}",
+ "-a",
+ "-F",
+ f"{fail_offset}",
+ "-V",
+ proto,
+ url,
+ ]
+ )
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
# downloads should be there, but not necessarily complete
self.check_downloads(client, srcfile, count, complete=False)
# speed limited download
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_24_speed_limit(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 1
- url = f'https://{env.authority_for(env.domain1, proto)}/data-1m'
+ url = f"https://{env.authority_for(env.domain1, proto)}/data-1m"
curl = CurlClient(env=env)
speed_limit = 384 * 1024
- min_duration = math.floor((1024 * 1024)/speed_limit)
- r = curl.http_download(urls=[url], alpn_proto=proto, extra_args=[
- '--limit-rate', f'{speed_limit}'
- ])
+ min_duration = math.floor((1024 * 1024) / speed_limit)
+ r = curl.http_download(
+ urls=[url], alpn_proto=proto, extra_args=["--limit-rate", f"{speed_limit}"]
+ )
r.check_response(count=count, http_status=200)
- assert r.duration > timedelta(seconds=min_duration), \
- f'rate limited transfer should take more than {min_duration}s, '\
- f'not {r.duration}'
+ assert r.duration > timedelta(seconds=min_duration), (
+ f"rate limited transfer should take more than {min_duration}s, "
+ f"not {r.duration}"
+ )
# make extreme parallel h2 upgrades, check invalid conn reuse
# before protocol switch has happened
def test_02_25_h2_upgrade_x(self, env: Env, httpd):
- url = f'http://localhost:{env.http_port}/data-100k'
- client = LocalClient(name='h2-upgrade-extreme', env=env, timeout=15)
+ url = f"http://localhost:{env.http_port}/data-100k"
+ client = LocalClient(name="h2-upgrade-extreme", env=env, timeout=15)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
+ pytest.skip(f"example client not built: {client.name}")
r = client.run(args=[url])
- assert r.exit_code == 0, f'{client.dump_logs()}'
+ assert r.exit_code == 0, f"{client.dump_logs()}"
# Special client that tests TLS session reuse in parallel transfers
# TODO: just uses a single connection for h2/h3. Not sure how to prevent that
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_26_session_shared_reuse(self, env: Env, proto, httpd, nghttpx):
- url = f'https://{env.authority_for(env.domain1, proto)}/data-100k'
- client = LocalClient(name='tls-session-reuse', env=env)
+ url = f"https://{env.authority_for(env.domain1, proto)}/data-100k"
+ client = LocalClient(name="tls-session-reuse", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
+ pytest.skip(f"example client not built: {client.name}")
r = client.run(args=[proto, url])
r.check_exit_code(0)
# test on paused transfers, based on issue #11982
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_27a_paused_no_cl(self, env: Env, httpd, nghttpx, proto):
- url = f'https://{env.authority_for(env.domain1, proto)}' \
- '/curltest/tweak/?&chunks=6&chunk_size=8000'
- client = LocalClient(env=env, name='h2-pausing')
- r = client.run(args=['-V', proto, url])
+ url = (
+ f"https://{env.authority_for(env.domain1, proto)}"
+ "/curltest/tweak/?&chunks=6&chunk_size=8000"
+ )
+ client = LocalClient(env=env, name="h2-pausing")
+ r = client.run(args=["-V", proto, url])
r.check_exit_code(0)
# test on paused transfers, based on issue #11982
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_27b_paused_no_cl(self, env: Env, httpd, nghttpx, proto):
- url = f'https://{env.authority_for(env.domain1, proto)}' \
- '/curltest/tweak/?error=502'
- client = LocalClient(env=env, name='h2-pausing')
- r = client.run(args=['-V', proto, url])
+ url = (
+ f"https://{env.authority_for(env.domain1, proto)}"
+ "/curltest/tweak/?error=502"
+ )
+ client = LocalClient(env=env, name="h2-pausing")
+ r = client.run(args=["-V", proto, url])
r.check_exit_code(0)
# test on paused transfers, based on issue #11982
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_27c_paused_no_cl(self, env: Env, httpd, nghttpx, proto):
- url = f'https://{env.authority_for(env.domain1, proto)}' \
- '/curltest/tweak/?status=200&chunks=1&chunk_size=100'
- client = LocalClient(env=env, name='h2-pausing')
- r = client.run(args=['-V', proto, url])
+ url = (
+ f"https://{env.authority_for(env.domain1, proto)}"
+ "/curltest/tweak/?status=200&chunks=1&chunk_size=100"
+ )
+ client = LocalClient(env=env, name="h2-pausing")
+ r = client.run(args=["-V", proto, url])
r.check_exit_code(0)
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_28_get_compressed(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 1
- urln = f'https://{env.authority_for(env.domain1brotli, proto)}/data-100k?[0-{count-1}]'
+ urln = f"https://{env.authority_for(env.domain1brotli, proto)}/data-100k?[0-{count-1}]"
curl = CurlClient(env=env)
- r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
- '--compressed'
- ])
+ r = curl.http_download(
+ urls=[urln], alpn_proto=proto, extra_args=["--compressed"]
+ )
r.check_exit_code(code=0)
r.check_response(count=count, http_status=200)
- def check_downloads(self, client, srcfile: str, count: int,
- complete: bool = True):
+ def check_downloads(self, client, srcfile: str, count: int, complete: bool = True):
for i in range(count):
dfile = client.download_file(i)
assert os.path.exists(dfile)
if complete and not filecmp.cmp(srcfile, dfile, shallow=False):
- diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
- b=open(dfile).readlines(),
- fromfile=srcfile,
- tofile=dfile,
- n=1))
- assert False, f'download {dfile} differs:\n{diff}'
+ diff = "".join(
+ difflib.unified_diff(
+ a=open(srcfile).readlines(),
+ b=open(dfile).readlines(),
+ fromfile=srcfile,
+ tofile=dfile,
+ n=1,
+ )
+ )
+ assert False, f"download {dfile} differs:\n{diff}"
# download via lib client, 1 at a time, pause/resume at different offsets
- @pytest.mark.parametrize("pause_offset", [0, 10*1024, 100*1023, 640000])
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("pause_offset", [0, 10 * 1024, 100 * 1023, 640000])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_29_h2_lib_serial(self, env: Env, httpd, nghttpx, proto, pause_offset):
count = 2
- docname = 'data-10m'
- url = f'https://localhost:{env.https_port}/{docname}'
- client = LocalClient(name='hx-download', env=env)
+ docname = "data-10m"
+ url = f"https://localhost:{env.https_port}/{docname}"
+ client = LocalClient(name="hx-download", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- r = client.run(args=[
- '-n', f'{count}', '-P', f'{pause_offset}', '-V', proto, url
- ])
+ pytest.skip(f"example client not built: {client.name}")
+ r = client.run(
+ args=["-n", f"{count}", "-P", f"{pause_offset}", "-V", proto, url]
+ )
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
self.check_downloads(client, srcfile, count)
def test_02_30_parallel_prior_knowledge(self, env: Env, httpd):
count = 3
curl = CurlClient(env=env)
- urln = f'http://{env.domain1}:{env.http_port}/data.json?[0-{count-1}]'
- r = curl.http_download(urls=[urln], extra_args=[
- '--parallel', '--http2-prior-knowledge'
- ])
+ urln = f"http://{env.domain1}:{env.http_port}/data.json?[0-{count-1}]"
+ r = curl.http_download(
+ urls=[urln], extra_args=["--parallel", "--http2-prior-knowledge"]
+ )
r.check_response(http_status=200, count=count)
assert r.total_connects == 1, r.dump_logs()
def test_02_31_parallel_upgrade(self, env: Env, httpd, nghttpx):
count = 3
curl = CurlClient(env=env)
- urln = f'http://{env.domain1}:{env.http_port}/data.json?[0-{count-1}]'
- r = curl.http_download(urls=[urln], extra_args=[
- '--parallel', '--http2'
- ])
+ urln = f"http://{env.domain1}:{env.http_port}/data.json?[0-{count-1}]"
+ r = curl.http_download(urls=[urln], extra_args=["--parallel", "--http2"])
r.check_response(http_status=200, count=count)
# we see 3 connections, because Apache only every serves a single
# request via Upgrade: and then closed the connection.
# nghttpx is the only server we have that supports TLS early data
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx")
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_02_32_earlydata(self, env: Env, httpd, nghttpx, proto):
- if not env.curl_uses_lib('gnutls'):
- pytest.skip('TLS earlydata only implemented in GnuTLS')
- if proto == 'h3' and not env.have_h3():
+ if not env.curl_uses_lib("gnutls"):
+ pytest.skip("TLS earlydata only implemented in GnuTLS")
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
- docname = 'data-10k'
+ docname = "data-10k"
# we want this test to always connect to nghttpx, since it is
# the only server we have that supports TLS earlydata
port = env.port_for(proto)
- if proto != 'h3':
+ if proto != "h3":
port = env.nghttpx_https_port
- url = f'https://{env.domain1}:{port}/{docname}'
- client = LocalClient(name='hx-download', env=env)
+ url = f"https://{env.domain1}:{port}/{docname}"
+ client = LocalClient(name="hx-download", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- r = client.run(args=[
- '-n', f'{count}',
- '-e', # use TLS earlydata
- '-f', # forbid reuse of connections
- '-r', f'{env.domain1}:{port}:127.0.0.1',
- '-V', proto, url
- ])
+ pytest.skip(f"example client not built: {client.name}")
+ r = client.run(
+ args=[
+ "-n",
+ f"{count}",
+ "-e", # use TLS earlydata
+ "-f", # forbid reuse of connections
+ "-r",
+ f"{env.domain1}:{port}:127.0.0.1",
+ "-V",
+ proto,
+ url,
+ ]
+ )
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
self.check_downloads(client, srcfile, count)
earlydata = {}
reused_session = False
for line in r.trace_lines:
- m = re.match(r'^\[t-(\d+)] EarlyData: (-?\d+)', line)
+ m = re.match(r"^\[t-(\d+)] EarlyData: (-?\d+)", line)
if m:
earlydata[int(m.group(1))] = int(m.group(2))
continue
- m = re.match(r'\[1-1] \* SSL reusing session.*', line)
+ m = re.match(r"\[1-1] \* SSL reusing session.*", line)
if m:
reused_session = True
- assert reused_session, 'session was not reused for 2nd transfer'
- assert earlydata[0] == 0, f'{earlydata}'
- if proto == 'http/1.1':
- assert earlydata[1] == 69, f'{earlydata}'
- elif proto == 'h2':
- assert earlydata[1] == 107, f'{earlydata}'
- elif proto == 'h3':
- assert earlydata[1] == 67, f'{earlydata}'
-
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
+ assert reused_session, "session was not reused for 2nd transfer"
+ assert earlydata[0] == 0, f"{earlydata}"
+ if proto == "http/1.1":
+ assert earlydata[1] == 69, f"{earlydata}"
+ elif proto == "h2":
+ assert earlydata[1] == 107, f"{earlydata}"
+ elif proto == "h3":
+ assert earlydata[1] == 67, f"{earlydata}"
+
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2"])
@pytest.mark.parametrize("max_host_conns", [0, 1, 5])
- def test_02_33_max_host_conns(self, env: Env, httpd, nghttpx, proto, max_host_conns):
- if proto == 'h3' and not env.have_h3():
+ def test_02_33_max_host_conns(
+ self, env: Env, httpd, nghttpx, proto, max_host_conns
+ ):
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 50
max_parallel = 50
- docname = 'data-10k'
+ docname = "data-10k"
port = env.port_for(proto)
- url = f'https://{env.domain1}:{port}/{docname}'
- client = LocalClient(name='hx-download', env=env)
+ url = f"https://{env.domain1}:{port}/{docname}"
+ client = LocalClient(name="hx-download", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- r = client.run(args=[
- '-n', f'{count}',
- '-m', f'{max_parallel}',
- '-x', # always use a fresh connection
- '-M', str(max_host_conns), # limit conns per host
- '-r', f'{env.domain1}:{port}:127.0.0.1',
- '-V', proto, url
- ])
+ pytest.skip(f"example client not built: {client.name}")
+ r = client.run(
+ args=[
+ "-n",
+ f"{count}",
+ "-m",
+ f"{max_parallel}",
+ "-x", # always use a fresh connection
+ "-M",
+ str(max_host_conns), # limit conns per host
+ "-r",
+ f"{env.domain1}:{port}:127.0.0.1",
+ "-V",
+ proto,
+ url,
+ ]
+ )
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
self.check_downloads(client, srcfile, count)
if max_host_conns > 0:
matched_lines = 0
for line in r.trace_lines:
- m = re.match(r'.*The cache now contains (\d+) members.*', line)
+ m = re.match(r".*The cache now contains (\d+) members.*", line)
if m:
matched_lines += 1
n = int(m.group(1))
assert n <= max_host_conns
assert matched_lines > 0
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2"])
@pytest.mark.parametrize("max_total_conns", [0, 1, 5])
- def test_02_34_max_total_conns(self, env: Env, httpd, nghttpx, proto, max_total_conns):
- if proto == 'h3' and not env.have_h3():
+ def test_02_34_max_total_conns(
+ self, env: Env, httpd, nghttpx, proto, max_total_conns
+ ):
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 50
max_parallel = 50
- docname = 'data-10k'
+ docname = "data-10k"
port = env.port_for(proto)
- url = f'https://{env.domain1}:{port}/{docname}'
- client = LocalClient(name='hx-download', env=env)
+ url = f"https://{env.domain1}:{port}/{docname}"
+ client = LocalClient(name="hx-download", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- r = client.run(args=[
- '-n', f'{count}',
- '-m', f'{max_parallel}',
- '-x', # always use a fresh connection
- '-T', str(max_total_conns), # limit total connections
- '-r', f'{env.domain1}:{port}:127.0.0.1',
- '-V', proto, url
- ])
+ pytest.skip(f"example client not built: {client.name}")
+ r = client.run(
+ args=[
+ "-n",
+ f"{count}",
+ "-m",
+ f"{max_parallel}",
+ "-x", # always use a fresh connection
+ "-T",
+ str(max_total_conns), # limit total connections
+ "-r",
+ f"{env.domain1}:{port}:127.0.0.1",
+ "-V",
+ proto,
+ url,
+ ]
+ )
r.check_exit_code(0)
srcfile = os.path.join(httpd.docs_dir, docname)
self.check_downloads(client, srcfile, count)
if max_total_conns > 0:
matched_lines = 0
for line in r.trace_lines:
- m = re.match(r'.*The cache now contains (\d+) members.*', line)
+ m = re.match(r".*The cache now contains (\d+) members.*", line)
if m:
matched_lines += 1
n = int(m.group(1))
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class TestGoAway:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd, nghttpx):
if env.have_h3():
nghttpx.start_if_needed()
# download files sequentially with delay, reload server for GOAWAY
def test_03_01_h2_goaway(self, env: Env, httpd, nghttpx):
- proto = 'h2'
+ proto = "h2"
count = 3
self.r = None
+
def long_run():
curl = CurlClient(env=env)
# send 10 chunks of 1024 bytes in a response body with 100ms delay in between
- urln = f'https://{env.authority_for(env.domain1, proto)}' \
- f'/curltest/tweak?id=[0-{count - 1}]'\
- '&chunks=10&chunk_size=1024&chunk_delay=100ms'
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}"
+ f"/curltest/tweak?id=[0-{count - 1}]"
+ "&chunks=10&chunk_size=1024&chunk_delay=100ms"
+ )
self.r = curl.http_download(urls=[urln], alpn_proto=proto)
t = Thread(target=long_run)
# we expect to see a second connection opened afterwards
assert r.total_connects == 2
for idx, s in enumerate(r.stats):
- if s['num_connects'] > 0:
- log.debug(f'request {idx} connected')
+ if s["num_connects"] > 0:
+ log.debug(f"request {idx} connected")
# this should take `count` seconds to retrieve
assert r.duration >= timedelta(seconds=count)
# download files sequentially with delay, reload server for GOAWAY
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_03_02_h3_goaway(self, env: Env, httpd, nghttpx):
- proto = 'h3'
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ proto = "h3"
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 stalls here")
- if proto == 'h3' and env.curl_uses_ossl_quic():
- pytest.skip('OpenSSL QUIC fails here')
+ if proto == "h3" and env.curl_uses_ossl_quic():
+ pytest.skip("OpenSSL QUIC fails here")
count = 3
self.r = None
+
def long_run():
curl = CurlClient(env=env)
# send 10 chunks of 1024 bytes in a response body with 100ms delay in between
- urln = f'https://{env.authority_for(env.domain1, proto)}' \
- f'/curltest/tweak?id=[0-{count - 1}]'\
- '&chunks=10&chunk_size=1024&chunk_delay=100ms'
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}"
+ f"/curltest/tweak?id=[0-{count - 1}]"
+ "&chunks=10&chunk_size=1024&chunk_delay=100ms"
+ )
self.r = curl.http_download(urls=[urln], alpn_proto=proto)
t = Thread(target=long_run)
t.join()
r: ExecResult = self.r
# this should take `count` seconds to retrieve, maybe a little less
- assert r.duration >= timedelta(seconds=count-1)
+ assert r.duration >= timedelta(seconds=count - 1)
r.check_response(count=count, http_status=200, connect_count=2)
# reload will shut down the connection gracefully with GOAWAY
# we expect to see a second connection opened afterwards
for idx, s in enumerate(r.stats):
- if s['num_connects'] > 0:
- log.debug(f'request {idx} connected')
+ if s["num_connects"] > 0:
+ log.debug(f"request {idx} connected")
# download files sequentially with delay, reload server for GOAWAY
def test_03_03_h1_goaway(self, env: Env, httpd, nghttpx):
- proto = 'http/1.1'
+ proto = "http/1.1"
count = 3
self.r = None
+
def long_run():
curl = CurlClient(env=env)
# send 10 chunks of 1024 bytes in a response body with 100ms delay in between
# pause 2 seconds between requests
- urln = f'https://{env.authority_for(env.domain1, proto)}' \
- f'/curltest/tweak?id=[0-{count - 1}]'\
- '&chunks=10&chunk_size=1024&chunk_delay=100ms'
- self.r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
- '--rate', '30/m',
- ])
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}"
+ f"/curltest/tweak?id=[0-{count - 1}]"
+ "&chunks=10&chunk_size=1024&chunk_delay=100ms"
+ )
+ self.r = curl.http_download(
+ urls=[urln],
+ alpn_proto=proto,
+ extra_args=[
+ "--rate",
+ "30/m",
+ ],
+ )
t = Thread(target=long_run)
t.start()
# reload will shut down the connection gracefully
# we expect to see a second connection opened afterwards
for idx, s in enumerate(r.stats):
- if s['num_connects'] > 0:
- log.debug(f'request {idx} connected')
+ if s["num_connects"] > 0:
+ log.debug(f"request {idx} connected")
# this should take `count` seconds to retrieve
assert r.duration >= timedelta(seconds=count)
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
log = logging.getLogger(__name__)
-@pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
+@pytest.mark.skipif(
+ condition=Env().slow_network, reason="not suitable for slow network tests"
+)
@pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")
class TestStuttered:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd, nghttpx):
if env.have_h3():
nghttpx.start_if_needed()
httpd.reload()
# download 1 file, check that delayed response works in general
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_04_01_download_1(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 1
curl = CurlClient(env=env)
- urln = f'https://{env.authority_for(env.domain1, proto)}' \
- f'/curltest/tweak?id=[0-{count - 1}]'\
- '&chunks=100&chunk_size=100&chunk_delay=10ms'
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}"
+ f"/curltest/tweak?id=[0-{count - 1}]"
+ "&chunks=100&chunk_size=100&chunk_delay=10ms"
+ )
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(count=1, http_status=200)
# download 50 files in 100 chunks a 100 bytes with 10ms delay between
# prepend 100 file requests to warm up connection processing limits
# (Apache2 increases # of parallel processed requests after successes)
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_04_02_100_100_10(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 50
warmups = 100
curl = CurlClient(env=env)
- url1 = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]'
- urln = f'https://{env.authority_for(env.domain1, proto)}' \
- f'/curltest/tweak?id=[0-{count-1}]'\
- '&chunks=100&chunk_size=100&chunk_delay=10ms'
- r = curl.http_download(urls=[url1, urln], alpn_proto=proto,
- extra_args=['--parallel'])
- r.check_response(count=warmups+count, http_status=200)
+ url1 = (
+ f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]"
+ )
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}"
+ f"/curltest/tweak?id=[0-{count-1}]"
+ "&chunks=100&chunk_size=100&chunk_delay=10ms"
+ )
+ r = curl.http_download(
+ urls=[url1, urln], alpn_proto=proto, extra_args=["--parallel"]
+ )
+ r.check_response(count=warmups + count, http_status=200)
assert r.total_connects == 1
- t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total')
+ t_avg, i_min, t_min, i_max, t_max = self.stats_spread(
+ r.stats[warmups:], "time_total"
+ )
if t_max < (5 * t_min) and t_min < 2:
- log.warning(f'avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]')
+ log.warning(
+ f"avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]"
+ )
# download 50 files in 1000 chunks a 10 bytes with 1ms delay between
# prepend 100 file requests to warm up connection processing limits
# (Apache2 increases # of parallel processed requests after successes)
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_04_03_1000_10_1(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 50
warmups = 100
curl = CurlClient(env=env)
- url1 = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]'
- urln = f'https://{env.authority_for(env.domain1, proto)}' \
- f'/curltest/tweak?id=[0-{count - 1}]'\
- '&chunks=1000&chunk_size=10&chunk_delay=100us'
- r = curl.http_download(urls=[url1, urln], alpn_proto=proto,
- extra_args=['--parallel'])
- r.check_response(count=warmups+count, http_status=200)
+ url1 = (
+ f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]"
+ )
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}"
+ f"/curltest/tweak?id=[0-{count - 1}]"
+ "&chunks=1000&chunk_size=10&chunk_delay=100us"
+ )
+ r = curl.http_download(
+ urls=[url1, urln], alpn_proto=proto, extra_args=["--parallel"]
+ )
+ r.check_response(count=warmups + count, http_status=200)
assert r.total_connects == 1
- t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total')
+ t_avg, i_min, t_min, i_max, t_max = self.stats_spread(
+ r.stats[warmups:], "time_total"
+ )
if t_max < (5 * t_min):
- log.warning(f'avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]')
+ log.warning(
+ f"avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]"
+ )
# download 50 files in 10000 chunks a 1 byte with 10us delay between
# prepend 100 file requests to warm up connection processing limits
# (Apache2 increases # of parallel processed requests after successes)
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_04_04_1000_10_1(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 50
warmups = 100
curl = CurlClient(env=env)
- url1 = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]'
- urln = f'https://{env.authority_for(env.domain1, proto)}' \
- f'/curltest/tweak?id=[0-{count - 1}]'\
- '&chunks=10000&chunk_size=1&chunk_delay=50us'
- r = curl.http_download(urls=[url1, urln], alpn_proto=proto,
- extra_args=['--parallel'])
- r.check_response(count=warmups+count, http_status=200)
+ url1 = (
+ f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{warmups-1}]"
+ )
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}"
+ f"/curltest/tweak?id=[0-{count - 1}]"
+ "&chunks=10000&chunk_size=1&chunk_delay=50us"
+ )
+ r = curl.http_download(
+ urls=[url1, urln], alpn_proto=proto, extra_args=["--parallel"]
+ )
+ r.check_response(count=warmups + count, http_status=200)
assert r.total_connects == 1
- t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total')
+ t_avg, i_min, t_min, i_max, t_max = self.stats_spread(
+ r.stats[warmups:], "time_total"
+ )
if t_max < (5 * t_min):
- log.warning(f'avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]')
+ log.warning(
+ f"avg time of transfer: {t_avg} [{i_min}={t_min}, {i_max}={t_max}]"
+ )
- def stats_spread(self, stats: List[Dict], key: str) -> Tuple[float, int, float, int, float]:
+ def stats_spread(
+ self, stats: List[Dict], key: str
+ ) -> Tuple[float, int, float, int, float]:
stotals = 0.0
s_min = 100.0
i_min = -1
if val < s_min:
s_min = val
i_min = idx
- return stotals/len(stats), i_min, s_min, i_max, s_max
+ return stotals / len(stats), i_min, s_min, i_max, s_max
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
log = logging.getLogger(__name__)
-@pytest.mark.skipif(condition=not Env.httpd_is_at_least('2.4.55'),
- reason=f"httpd version too old for this: {Env.httpd_version()}")
+@pytest.mark.skipif(
+ condition=not Env.httpd_is_at_least("2.4.55"),
+ reason=f"httpd version too old for this: {Env.httpd_version()}",
+)
class TestErrors:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd, nghttpx):
if env.have_h3():
nghttpx.start_if_needed()
httpd.reload()
# download 1 file, check that we get CURLE_PARTIAL_FILE
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_05_01_partial_1(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 stalls here")
count = 1
curl = CurlClient(env=env)
- urln = f'https://{env.authority_for(env.domain1, proto)}' \
- f'/curltest/tweak?id=[0-{count - 1}]'\
- '&chunks=3&chunk_size=16000&body_error=reset'
- r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
- '--retry', '0'
- ])
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}"
+ f"/curltest/tweak?id=[0-{count - 1}]"
+ "&chunks=3&chunk_size=16000&body_error=reset"
+ )
+ r = curl.http_download(
+ urls=[urln], alpn_proto=proto, extra_args=["--retry", "0"]
+ )
r.check_exit_code(False)
invalid_stats = []
for idx, s in enumerate(r.stats):
- if 'exitcode' not in s or s['exitcode'] not in [18, 56, 92, 95]:
+ if "exitcode" not in s or s["exitcode"] not in [18, 56, 92, 95]:
invalid_stats.append(f'request {idx} exit with {s["exitcode"]}')
- assert len(invalid_stats) == 0, f'failed: {invalid_stats}'
+ assert len(invalid_stats) == 0, f"failed: {invalid_stats}"
# download files, check that we get CURLE_PARTIAL_FILE for all
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_05_02_partial_20(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 stalls here")
count = 20
curl = CurlClient(env=env)
- urln = f'https://{env.authority_for(env.domain1, proto)}' \
- f'/curltest/tweak?id=[0-{count - 1}]'\
- '&chunks=5&chunk_size=16000&body_error=reset'
- r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
- '--retry', '0', '--parallel',
- ])
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}"
+ f"/curltest/tweak?id=[0-{count - 1}]"
+ "&chunks=5&chunk_size=16000&body_error=reset"
+ )
+ r = curl.http_download(
+ urls=[urln],
+ alpn_proto=proto,
+ extra_args=[
+ "--retry",
+ "0",
+ "--parallel",
+ ],
+ )
r.check_exit_code(False)
- assert len(r.stats) == count, f'did not get all stats: {r}'
+ assert len(r.stats) == count, f"did not get all stats: {r}"
invalid_stats = []
for idx, s in enumerate(r.stats):
- if 'exitcode' not in s or s['exitcode'] not in [18, 55, 56, 92, 95]:
+ if "exitcode" not in s or s["exitcode"] not in [18, 55, 56, 92, 95]:
invalid_stats.append(f'request {idx} exit with {s["exitcode"]}\n{s}')
- assert len(invalid_stats) == 0, f'failed: {invalid_stats}'
+ assert len(invalid_stats) == 0, f"failed: {invalid_stats}"
# access a resource that, on h2, RST the stream with HTTP_1_1_REQUIRED
def test_05_03_required(self, env: Env, httpd, nghttpx):
curl = CurlClient(env=env)
- proto = 'http/1.1'
- urln = f'https://{env.authority_for(env.domain1, proto)}/curltest/1_1'
+ proto = "http/1.1"
+ urln = f"https://{env.authority_for(env.domain1, proto)}/curltest/1_1"
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_exit_code(0)
r.check_response(http_status=200, count=1)
- proto = 'h2'
- urln = f'https://{env.authority_for(env.domain1, proto)}/curltest/1_1'
+ proto = "h2"
+ urln = f"https://{env.authority_for(env.domain1, proto)}/curltest/1_1"
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_exit_code(0)
r.check_response(http_status=200, count=1)
# check that we did a downgrade
- assert r.stats[0]['http_version'] == '1.1', r.dump_logs()
+ assert r.stats[0]["http_version"] == "1.1", r.dump_logs()
# On the URL used here, Apache is doing an "unclean" TLS shutdown,
# meaning it sends no shutdown notice and just closes TCP.
# and stop receiving when that signals the end
# - h2 to work since it will signal the end of the response before
# and not see the "unclean" close either
- @pytest.mark.parametrize("proto", ['http/1.0', 'http/1.1', 'h2'])
+ @pytest.mark.parametrize("proto", ["http/1.0", "http/1.1", "h2"])
def test_05_04_unclean_tls_shutdown(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- count = 10 if proto == 'h2' else 1
+ count = 10 if proto == "h2" else 1
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}'\
- f'/curltest/shutdown_unclean?id=[0-{count-1}]&chunks=4'
- r = curl.http_download(urls=[url], alpn_proto=proto, extra_args=[
- '--parallel',
- ])
- if proto == 'http/1.0' and not env.curl_uses_lib('wolfssl') and \
- (env.curl_is_debug() or not env.curl_uses_lib('openssl')):
+ url = (
+ f"https://{env.authority_for(env.domain1, proto)}"
+ f"/curltest/shutdown_unclean?id=[0-{count-1}]&chunks=4"
+ )
+ r = curl.http_download(
+ urls=[url],
+ alpn_proto=proto,
+ extra_args=[
+ "--parallel",
+ ],
+ )
+ if (
+ proto == "http/1.0"
+ and not env.curl_uses_lib("wolfssl")
+ and (env.curl_is_debug() or not env.curl_uses_lib("openssl"))
+ ):
# we are inconsistent if we fail or not in missing TLS shutdown
# openssl code ignore such errors intentionally in non-debug builds
r.check_exit_code(56)
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class TestEyeballs:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd, nghttpx):
if env.have_h3():
nghttpx.start_if_needed()
def test_06_01_h3_only(self, env: Env, httpd, nghttpx):
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json'
- r = curl.http_download(urls=[urln], extra_args=['--http3-only'])
+ r = curl.http_download(urls=[urln], extra_args=["--http3-only"])
r.check_response(count=1, http_status=200)
- assert r.stats[0]['http_version'] == '3'
+ assert r.stats[0]["http_version"] == "3"
# download using only HTTP/3 on missing server
@pytest.mark.skipif(condition=not Env.have_h3(), reason="missing HTTP/3 support")
nghttpx.stop_if_running()
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json'
- r = curl.http_download(urls=[urln], extra_args=['--http3-only'])
+ r = curl.http_download(urls=[urln], extra_args=["--http3-only"])
r.check_response(exitcode=7, http_status=None)
# download using HTTP/3 on missing server with fallback on h2
nghttpx.stop_if_running()
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json'
- r = curl.http_download(urls=[urln], extra_args=['--http3'])
+ r = curl.http_download(urls=[urln], extra_args=["--http3"])
r.check_response(count=1, http_status=200)
- assert r.stats[0]['http_version'] == '2'
+ assert r.stats[0]["http_version"] == "2"
# download using HTTP/3 on missing server with fallback on http/1.1
@pytest.mark.skipif(condition=not Env.have_h3(), reason="missing HTTP/3 support")
nghttpx.stop_if_running()
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain2, "h3")}/data.json'
- r = curl.http_download(urls=[urln], extra_args=['--http3'])
+ r = curl.http_download(urls=[urln], extra_args=["--http3"])
r.check_response(count=1, http_status=200)
- assert r.stats[0]['http_version'] == '1.1'
+ assert r.stats[0]["http_version"] == "1.1"
# make a successful https: transfer and observer the timer stats
def test_06_10_stats_success(self, env: Env, httpd, nghttpx):
urln = f'https://{env.authority_for(env.domain1, "h2")}/data.json'
r = curl.http_download(urls=[urln])
r.check_response(count=1, http_status=200)
- assert r.stats[0]['time_connect'] > 0.0
- assert r.stats[0]['time_appconnect'] > 0.0
+ assert r.stats[0]["time_connect"] > 0.0
+ assert r.stats[0]["time_appconnect"] > 0.0
# make https: to a hostname that tcp connects, but will not verify
def test_06_11_stats_fail_verify(self, env: Env, httpd, nghttpx):
curl = CurlClient(env=env)
- urln = f'https://not-valid.com:{env.https_port}/data.json'
- r = curl.http_download(urls=[urln], extra_args=[
- '--resolve', f'not-valid.com:{env.https_port}:127.0.0.1'
- ])
+ urln = f"https://not-valid.com:{env.https_port}/data.json"
+ r = curl.http_download(
+ urls=[urln],
+ extra_args=["--resolve", f"not-valid.com:{env.https_port}:127.0.0.1"],
+ )
r.check_response(count=1, http_status=0, exitcode=False)
- assert r.stats[0]['time_connect'] > 0.0 # was tcp connected
- assert r.stats[0]['time_appconnect'] == 0 # but not SSL verified
+ assert r.stats[0]["time_connect"] > 0.0 # was tcp connected
+ assert r.stats[0]["time_appconnect"] == 0 # but not SSL verified
# make https: to an invalid address
def test_06_12_stats_fail_tcp(self, env: Env, httpd, nghttpx):
curl = CurlClient(env=env)
- urln = 'https://not-valid.com:1/data.json'
- r = curl.http_download(urls=[urln], extra_args=[
- '--resolve', f'not-valid.com:{1}:127.0.0.1'
- ])
+ urln = "https://not-valid.com:1/data.json"
+ r = curl.http_download(
+ urls=[urln], extra_args=["--resolve", f"not-valid.com:{1}:127.0.0.1"]
+ )
r.check_response(count=1, http_status=None, exitcode=False)
- assert r.stats[0]['time_connect'] == 0 # no one should have listened
- assert r.stats[0]['time_appconnect'] == 0 # did not happen either
+ assert r.stats[0]["time_connect"] == 0 # no one should have listened
+ assert r.stats[0]["time_appconnect"] == 0 # did not happen either
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class TestUpload:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd, nghttpx):
if env.have_h3():
nghttpx.start_if_needed()
- env.make_data_file(indir=env.gen_dir, fname="data-10k", fsize=10*1024)
- env.make_data_file(indir=env.gen_dir, fname="data-63k", fsize=63*1024)
- env.make_data_file(indir=env.gen_dir, fname="data-64k", fsize=64*1024)
- env.make_data_file(indir=env.gen_dir, fname="data-100k", fsize=100*1024)
- env.make_data_file(indir=env.gen_dir, fname="data-1m+", fsize=(1024*1024)+1)
- env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024)
+ env.make_data_file(indir=env.gen_dir, fname="data-10k", fsize=10 * 1024)
+ env.make_data_file(indir=env.gen_dir, fname="data-63k", fsize=63 * 1024)
+ env.make_data_file(indir=env.gen_dir, fname="data-64k", fsize=64 * 1024)
+ env.make_data_file(indir=env.gen_dir, fname="data-100k", fsize=100 * 1024)
+ env.make_data_file(indir=env.gen_dir, fname="data-1m+", fsize=(1024 * 1024) + 1)
+ env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10 * 1024 * 1024)
httpd.clear_extra_configs()
httpd.reload()
# upload small data, check that this is what was echoed
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_01_upload_1_small(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 fails here")
- data = '0123456789'
+ data = "0123456789"
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]"
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto)
r.check_stats(count=1, http_status=200, exitcode=0)
respdata = open(curl.response_file(0)).readlines()
assert respdata == [data]
# upload large data, check that this is what was echoed
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_02_upload_1_large(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 fails here")
- fdata = os.path.join(env.gen_dir, 'data-100k')
+ fdata = os.path.join(env.gen_dir, "data-100k")
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
- r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]"
+ r = curl.http_upload(urls=[url], data=f"@{fdata}", alpn_proto=proto)
r.check_stats(count=1, http_status=200, exitcode=0)
indata = open(fdata).readlines()
respdata = open(curl.response_file(0)).readlines()
assert respdata == indata
# upload data sequentially, check that they were echoed
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_10_upload_sequential(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 stalls here")
count = 20
- data = '0123456789'
+ data = "0123456789"
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]"
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto)
r.check_stats(count=count, http_status=200, exitcode=0)
for i in range(count):
assert respdata == [data]
# upload data parallel, check that they were echoed
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_07_11_upload_parallel(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 stalls here")
# limit since we use a separate connection in h1
count = 20
- data = '0123456789'
+ data = "0123456789"
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
- r = curl.http_upload(urls=[url], data=data, alpn_proto=proto,
- extra_args=['--parallel'])
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]"
+ r = curl.http_upload(
+ urls=[url], data=data, alpn_proto=proto, extra_args=["--parallel"]
+ )
r.check_stats(count=count, http_status=200, exitcode=0)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == [data]
# upload large data sequentially, check that this is what was echoed
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_12_upload_seq_large(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 stalls here")
- fdata = os.path.join(env.gen_dir, 'data-100k')
+ fdata = os.path.join(env.gen_dir, "data-100k")
count = 10
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
- r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]"
+ r = curl.http_upload(urls=[url], data=f"@{fdata}", alpn_proto=proto)
r.check_response(count=count, http_status=200)
indata = open(fdata).readlines()
r.check_stats(count=count, http_status=200, exitcode=0)
assert respdata == indata
# upload very large data sequentially, check that this is what was echoed
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_13_upload_seq_large(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 stalls here")
- fdata = os.path.join(env.gen_dir, 'data-10m')
+ fdata = os.path.join(env.gen_dir, "data-10m")
count = 2
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
- r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]"
+ r = curl.http_upload(urls=[url], data=f"@{fdata}", alpn_proto=proto)
r.check_stats(count=count, http_status=200, exitcode=0)
indata = open(fdata).readlines()
for i in range(count):
assert respdata == indata
# upload from stdin, issue #14870
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
- @pytest.mark.parametrize("indata", [
- '', '1', '123\n456andsomething\n\n'
- ])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
+ @pytest.mark.parametrize("indata", ["", "1", "123\n456andsomething\n\n"])
def test_07_14_upload_stdin(self, env: Env, httpd, nghttpx, proto, indata):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 stalls here")
count = 1
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]'
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]"
r = curl.http_put(urls=[url], data=indata, alpn_proto=proto)
r.check_stats(count=count, http_status=200, exitcode=0)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
- assert respdata == [f'{len(indata)}']
+ assert respdata == [f"{len(indata)}"]
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_15_hx_put(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
- upload_size = 128*1024
- url = f'https://localhost:{env.https_port}/curltest/put?id=[0-{count-1}]'
- client = LocalClient(name='hx-upload', env=env)
+ upload_size = 128 * 1024
+ url = f"https://localhost:{env.https_port}/curltest/put?id=[0-{count-1}]"
+ client = LocalClient(name="hx-upload", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- r = client.run(args=[
- '-n', f'{count}', '-S', f'{upload_size}', '-V', proto, url
- ])
+ pytest.skip(f"example client not built: {client.name}")
+ r = client.run(
+ args=["-n", f"{count}", "-S", f"{upload_size}", "-V", proto, url]
+ )
r.check_exit_code(0)
self.check_downloads(client, [f"{upload_size}"], count)
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_16_hx_put_reuse(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
- upload_size = 128*1024
- url = f'https://localhost:{env.https_port}/curltest/put?id=[0-{count-1}]'
- client = LocalClient(name='hx-upload', env=env)
+ upload_size = 128 * 1024
+ url = f"https://localhost:{env.https_port}/curltest/put?id=[0-{count-1}]"
+ client = LocalClient(name="hx-upload", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- r = client.run(args=[
- '-n', f'{count}', '-S', f'{upload_size}', '-R', '-V', proto, url
- ])
+ pytest.skip(f"example client not built: {client.name}")
+ r = client.run(
+ args=["-n", f"{count}", "-S", f"{upload_size}", "-R", "-V", proto, url]
+ )
r.check_exit_code(0)
self.check_downloads(client, [f"{upload_size}"], count)
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_17_hx_post_reuse(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
- upload_size = 128*1024
- url = f'https://localhost:{env.https_port}/curltest/echo?id=[0-{count-1}]'
- client = LocalClient(name='hx-upload', env=env)
+ upload_size = 128 * 1024
+ url = f"https://localhost:{env.https_port}/curltest/echo?id=[0-{count-1}]"
+ client = LocalClient(name="hx-upload", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- r = client.run(args=[
- '-n', f'{count}', '-M', 'POST', '-S', f'{upload_size}', '-R', '-V', proto, url
- ])
+ pytest.skip(f"example client not built: {client.name}")
+ r = client.run(
+ args=[
+ "-n",
+ f"{count}",
+ "-M",
+ "POST",
+ "-S",
+ f"{upload_size}",
+ "-R",
+ "-V",
+ proto,
+ url,
+ ]
+ )
r.check_exit_code(0)
self.check_downloads(client, ["x" * upload_size], count)
# upload data parallel, check that they were echoed
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_07_20_upload_parallel(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 stalls here")
# limit since we use a separate connection in h1
count = 10
- data = '0123456789'
+ data = "0123456789"
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
- r = curl.http_upload(urls=[url], data=data, alpn_proto=proto,
- extra_args=['--parallel'])
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]"
+ r = curl.http_upload(
+ urls=[url], data=data, alpn_proto=proto, extra_args=["--parallel"]
+ )
r.check_stats(count=count, http_status=200, exitcode=0)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == [data]
# upload large data parallel, check that this is what was echoed
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_07_21_upload_parallel_large(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 stalls here")
- fdata = os.path.join(env.gen_dir, 'data-100k')
+ fdata = os.path.join(env.gen_dir, "data-100k")
# limit since we use a separate connection in h1
count = 10
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
- r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto,
- extra_args=['--parallel'])
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]"
+ r = curl.http_upload(
+ urls=[url], data=f"@{fdata}", alpn_proto=proto, extra_args=["--parallel"]
+ )
r.check_response(count=count, http_status=200)
self.check_download(count, fdata, curl)
# upload large data parallel to a URL that denies uploads
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_07_22_upload_parallel_fail(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 stalls here")
- fdata = os.path.join(env.gen_dir, 'data-10m')
+ fdata = os.path.join(env.gen_dir, "data-10m")
count = 20
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}'\
- f'/curltest/tweak?status=400&delay=5ms&chunks=1&body_error=reset&id=[0-{count-1}]'
- r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto,
- extra_args=['--parallel'])
- exp_exit = 92 if proto == 'h2' else 95
+ url = (
+ f"https://{env.authority_for(env.domain1, proto)}"
+ f"/curltest/tweak?status=400&delay=5ms&chunks=1&body_error=reset&id=[0-{count-1}]"
+ )
+ r = curl.http_upload(
+ urls=[url], data=f"@{fdata}", alpn_proto=proto, extra_args=["--parallel"]
+ )
+ exp_exit = 92 if proto == "h2" else 95
r.check_stats(count=count, exitcode=exp_exit)
# PUT 100k
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_30_put_100k(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 fails here")
- fdata = os.path.join(env.gen_dir, 'data-100k')
+ fdata = os.path.join(env.gen_dir, "data-100k")
count = 1
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]'
- r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
- extra_args=['--parallel'])
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]"
+ r = curl.http_put(
+ urls=[url], fdata=fdata, alpn_proto=proto, extra_args=["--parallel"]
+ )
r.check_stats(count=count, http_status=200, exitcode=0)
- exp_data = [f'{os.path.getsize(fdata)}']
+ exp_data = [f"{os.path.getsize(fdata)}"]
r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == exp_data
# PUT 10m
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_31_put_10m(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 fails here")
- fdata = os.path.join(env.gen_dir, 'data-10m')
+ fdata = os.path.join(env.gen_dir, "data-10m")
count = 1
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]&chunk_delay=2ms'
- r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
- extra_args=['--parallel'])
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]&chunk_delay=2ms"
+ r = curl.http_put(
+ urls=[url], fdata=fdata, alpn_proto=proto, extra_args=["--parallel"]
+ )
r.check_stats(count=count, http_status=200, exitcode=0)
- exp_data = [f'{os.path.getsize(fdata)}']
+ exp_data = [f"{os.path.getsize(fdata)}"]
r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == exp_data
# issue #10591
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_32_issue_10591(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 fails here")
- fdata = os.path.join(env.gen_dir, 'data-10m')
+ fdata = os.path.join(env.gen_dir, "data-10m")
count = 1
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]'
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]"
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto)
r.check_stats(count=count, http_status=200, exitcode=0)
# issue #11157, upload that is 404'ed by server, needs to terminate
# correctly and not time out on sending
def test_07_33_issue_11157a(self, env: Env, httpd, nghttpx):
- proto = 'h2'
- fdata = os.path.join(env.gen_dir, 'data-10m')
+ proto = "h2"
+ fdata = os.path.join(env.gen_dir, "data-10m")
# send a POST to our PUT handler which will send immediately a 404 back
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put'
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/put"
curl = CurlClient(env=env)
- r = curl.run_direct(with_stats=True, args=[
- '--resolve', f'{env.authority_for(env.domain1, proto)}:127.0.0.1',
- '--cacert', env.ca.cert_file,
- '--request', 'POST',
- '--max-time', '5', '-v',
- '--url', url,
- '--form', 'idList=12345678',
- '--form', 'pos=top',
- '--form', 'name=mr_test',
- '--form', f'fileSource=@{fdata};type=application/pdf',
- ])
- assert r.exit_code == 0, f'{r}'
+ r = curl.run_direct(
+ with_stats=True,
+ args=[
+ "--resolve",
+ f"{env.authority_for(env.domain1, proto)}:127.0.0.1",
+ "--cacert",
+ env.ca.cert_file,
+ "--request",
+ "POST",
+ "--max-time",
+ "5",
+ "-v",
+ "--url",
+ url,
+ "--form",
+ "idList=12345678",
+ "--form",
+ "pos=top",
+ "--form",
+ "name=mr_test",
+ "--form",
+ f"fileSource=@{fdata};type=application/pdf",
+ ],
+ )
+ assert r.exit_code == 0, f"{r}"
r.check_stats(1, 404)
# issue #11157, send upload that is slowly read in
def test_07_33_issue_11157b(self, env: Env, httpd, nghttpx):
- proto = 'h2'
- fdata = os.path.join(env.gen_dir, 'data-10m')
+ proto = "h2"
+ fdata = os.path.join(env.gen_dir, "data-10m")
# tell our test PUT handler to read the upload more slowly, so
# that the send buffering and transfer loop needs to wait
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?chunk_delay=2ms'
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/put?chunk_delay=2ms"
curl = CurlClient(env=env)
- r = curl.run_direct(with_stats=True, args=[
- '--verbose', '--trace-config', 'ids,time',
- '--resolve', f'{env.authority_for(env.domain1, proto)}:127.0.0.1',
- '--cacert', env.ca.cert_file,
- '--request', 'PUT',
- '--max-time', '10', '-v',
- '--url', url,
- '--form', 'idList=12345678',
- '--form', 'pos=top',
- '--form', 'name=mr_test',
- '--form', f'fileSource=@{fdata};type=application/pdf',
- ])
+ r = curl.run_direct(
+ with_stats=True,
+ args=[
+ "--verbose",
+ "--trace-config",
+ "ids,time",
+ "--resolve",
+ f"{env.authority_for(env.domain1, proto)}:127.0.0.1",
+ "--cacert",
+ env.ca.cert_file,
+ "--request",
+ "PUT",
+ "--max-time",
+ "10",
+ "-v",
+ "--url",
+ url,
+ "--form",
+ "idList=12345678",
+ "--form",
+ "pos=top",
+ "--form",
+ "name=mr_test",
+ "--form",
+ f"fileSource=@{fdata};type=application/pdf",
+ ],
+ )
assert r.exit_code == 0, r.dump_logs()
r.check_stats(1, 200)
def test_07_34_issue_11194(self, env: Env, httpd, nghttpx):
- proto = 'h2'
+ proto = "h2"
# tell our test PUT handler to read the upload more slowly, so
# that the send buffering and transfer loop needs to wait
- fdata = os.path.join(env.gen_dir, 'data-100k')
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put'
+ fdata = os.path.join(env.gen_dir, "data-100k")
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/put"
curl = CurlClient(env=env)
- r = curl.run_direct(with_stats=True, args=[
- '--verbose', '--trace-config', 'ids,time',
- '--resolve', f'{env.authority_for(env.domain1, proto)}:127.0.0.1',
- '--cacert', env.ca.cert_file,
- '--request', 'PUT',
- '--digest', '--user', 'test:test',
- '--data-binary', f'@{fdata}',
- '--url', url,
- ])
+ r = curl.run_direct(
+ with_stats=True,
+ args=[
+ "--verbose",
+ "--trace-config",
+ "ids,time",
+ "--resolve",
+ f"{env.authority_for(env.domain1, proto)}:127.0.0.1",
+ "--cacert",
+ env.ca.cert_file,
+ "--request",
+ "PUT",
+ "--digest",
+ "--user",
+ "test:test",
+ "--data-binary",
+ f"@{fdata}",
+ "--url",
+ url,
+ ],
+ )
assert r.exit_code == 0, r.dump_logs()
r.check_stats(1, 200)
# upload large data on a h1 to h2 upgrade
def test_07_35_h1_h2_upgrade_upload(self, env: Env, httpd, nghttpx):
- fdata = os.path.join(env.gen_dir, 'data-100k')
+ fdata = os.path.join(env.gen_dir, "data-100k")
curl = CurlClient(env=env)
- url = f'http://{env.domain1}:{env.http_port}/curltest/echo?id=[0-0]'
- r = curl.http_upload(urls=[url], data=f'@{fdata}', extra_args=[
- '--http2'
- ])
+ url = f"http://{env.domain1}:{env.http_port}/curltest/echo?id=[0-0]"
+ r = curl.http_upload(urls=[url], data=f"@{fdata}", extra_args=["--http2"])
r.check_response(count=1, http_status=200)
# apache does not Upgrade on request with a body
- assert r.stats[0]['http_version'] == '1.1', f'{r}'
+ assert r.stats[0]["http_version"] == "1.1", f"{r}"
indata = open(fdata).readlines()
respdata = open(curl.response_file(0)).readlines()
assert respdata == indata
# upload to a 301,302,303 response
- @pytest.mark.parametrize("redir", ['301', '302', '303'])
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("redir", ["301", "302", "303"])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_36_upload_30x(self, env: Env, httpd, nghttpx, redir, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 fails here")
- data = '0123456789' * 10
+ data = "0123456789" * 10
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo{redir}?id=[0-0]'
- r = curl.http_upload(urls=[url], data=data, alpn_proto=proto, extra_args=[
- '-L', '--trace-config', 'http/2,http/3'
- ])
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo{redir}?id=[0-0]"
+ r = curl.http_upload(
+ urls=[url],
+ data=data,
+ alpn_proto=proto,
+ extra_args=["-L", "--trace-config", "http/2,http/3"],
+ )
r.check_response(count=1, http_status=200)
respdata = open(curl.response_file(0)).readlines()
assert respdata == [] # was transformed to a GET
# upload to a 307 response
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_37_upload_307(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 fails here")
- data = '0123456789' * 10
+ data = "0123456789" * 10
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo307?id=[0-0]'
- r = curl.http_upload(urls=[url], data=data, alpn_proto=proto, extra_args=[
- '-L', '--trace-config', 'http/2,http/3'
- ])
+ url = (
+ f"https://{env.authority_for(env.domain1, proto)}/curltest/echo307?id=[0-0]"
+ )
+ r = curl.http_upload(
+ urls=[url],
+ data=data,
+ alpn_proto=proto,
+ extra_args=["-L", "--trace-config", "http/2,http/3"],
+ )
r.check_response(count=1, http_status=200)
respdata = open(curl.response_file(0)).readlines()
assert respdata == [data] # was POST again
# POST form data, yet another code path in transfer
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_38_form_small(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 fails here")
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
- r = curl.http_form(urls=[url], alpn_proto=proto, form={
- 'name1': 'value1',
- })
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]"
+ r = curl.http_form(
+ urls=[url],
+ alpn_proto=proto,
+ form={
+ "name1": "value1",
+ },
+ )
r.check_stats(count=1, http_status=200, exitcode=0)
# POST data urlencoded, small enough to be sent with request headers
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_39_post_urlenc_small(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 fails here")
- fdata = os.path.join(env.gen_dir, 'data-63k')
+ fdata = os.path.join(env.gen_dir, "data-63k")
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
- r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto, extra_args=[
- '--trace-config', 'http/2,http/3'
- ])
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]"
+ r = curl.http_upload(
+ urls=[url],
+ data=f"@{fdata}",
+ alpn_proto=proto,
+ extra_args=["--trace-config", "http/2,http/3"],
+ )
r.check_stats(count=1, http_status=200, exitcode=0)
indata = open(fdata).readlines()
respdata = open(curl.response_file(0)).readlines()
assert respdata == indata
# POST data urlencoded, large enough to be sent separate from request headers
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_40_post_urlenc_large(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 fails here")
- fdata = os.path.join(env.gen_dir, 'data-64k')
+ fdata = os.path.join(env.gen_dir, "data-64k")
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
- r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto, extra_args=[
- '--trace-config', 'http/2,http/3'
- ])
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]"
+ r = curl.http_upload(
+ urls=[url],
+ data=f"@{fdata}",
+ alpn_proto=proto,
+ extra_args=["--trace-config", "http/2,http/3"],
+ )
r.check_stats(count=1, http_status=200, exitcode=0)
indata = open(fdata).readlines()
respdata = open(curl.response_file(0)).readlines()
# than our default upload buffer length (64KB).
# Unfixed, this will fail when run with CURL_DBG_SOCK_WBLOCK=80 most
# of the time
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_41_post_urlenc_small(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 fails here")
- if proto == 'h3' and env.curl_uses_lib('quiche'):
+ if proto == "h3" and env.curl_uses_lib("quiche"):
pytest.skip("quiche has CWND issues with large requests")
- fdata = os.path.join(env.gen_dir, 'data-63k')
+ fdata = os.path.join(env.gen_dir, "data-63k")
curl = CurlClient(env=env)
- extra_args = ['--trace-config', 'http/2,http/3']
+ extra_args = ["--trace-config", "http/2,http/3"]
# add enough headers so that the first send chunk is > 64KB
for i in range(63):
- extra_args.extend(['-H', f'x{i:02d}: {"y"*1019}'])
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
- r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto, extra_args=extra_args)
+ extra_args.extend(["-H", f'x{i:02d}: {"y"*1019}'])
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]"
+ r = curl.http_upload(
+ urls=[url], data=f"@{fdata}", alpn_proto=proto, extra_args=extra_args
+ )
r.check_stats(count=1, http_status=200, exitcode=0)
indata = open(fdata).readlines()
respdata = open(curl.response_file(0)).readlines()
dfile = curl.download_file(i)
assert os.path.exists(dfile)
if not filecmp.cmp(srcfile, dfile, shallow=False):
- diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
- b=open(dfile).readlines(),
- fromfile=srcfile,
- tofile=dfile,
- n=1))
- assert False, f'download {dfile} differs:\n{diff}'
+ diff = "".join(
+ difflib.unified_diff(
+ a=open(srcfile).readlines(),
+ b=open(dfile).readlines(),
+ fromfile=srcfile,
+ tofile=dfile,
+ n=1,
+ )
+ )
+ assert False, f"download {dfile} differs:\n{diff}"
# upload data, pause, let connection die with an incomplete response
# issues #11769 #13260
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_42a_upload_disconnect(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 fails here")
- client = LocalClient(name='upload-pausing', env=env, timeout=60)
+ client = LocalClient(name="upload-pausing", env=env, timeout=60)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]&die_after=0'
- r = client.run(['-V', proto, url])
- if r.exit_code == 18: # PARTIAL_FILE is always ok
+ pytest.skip(f"example client not built: {client.name}")
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]&die_after=0"
+ r = client.run(["-V", proto, url])
+ if r.exit_code == 18: # PARTIAL_FILE is always ok
pass
- elif proto == 'h2':
+ elif proto == "h2":
r.check_exit_code(92) # CURLE_HTTP2_STREAM also ok
- elif proto == 'h3':
+ elif proto == "h3":
r.check_exit_code(95) # CURLE_HTTP3 also ok
else:
r.check_exit_code(18) # will fail as it should
# upload data, pause, let connection die without any response at all
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_42b_upload_disconnect(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 fails here")
- client = LocalClient(name='upload-pausing', env=env, timeout=60)
+ client = LocalClient(name="upload-pausing", env=env, timeout=60)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]&just_die=1'
- r = client.run(['-V', proto, url])
+ pytest.skip(f"example client not built: {client.name}")
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]&just_die=1"
+ r = client.run(["-V", proto, url])
exp_code = 52 # GOT_NOTHING
- if proto == 'h2' or proto == 'h3':
+ if proto == "h2" or proto == "h3":
exp_code = 0 # we get a 500 from the server
r.check_exit_code(exp_code) # GOT_NOTHING
# upload data, pause, let connection die after 100 continue
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_42c_upload_disconnect(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 fails here")
- client = LocalClient(name='upload-pausing', env=env, timeout=60)
+ client = LocalClient(name="upload-pausing", env=env, timeout=60)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]&die_after_100=1'
- r = client.run(['-V', proto, url])
+ pytest.skip(f"example client not built: {client.name}")
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]&die_after_100=1"
+ r = client.run(["-V", proto, url])
exp_code = 52 # GOT_NOTHING
- if proto == 'h2' or proto == 'h3':
+ if proto == "h2" or proto == "h3":
exp_code = 0 # we get a 500 from the server
r.check_exit_code(exp_code) # GOT_NOTHING
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_43_upload_denied(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 fails here")
- fdata = os.path.join(env.gen_dir, 'data-10m')
+ fdata = os.path.join(env.gen_dir, "data-10m")
count = 1
max_upload = 128 * 1024
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?'\
- f'id=[0-{count-1}]&max_upload={max_upload}'
- r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
- extra_args=['--trace-config', 'all'])
+ url = (
+ f"https://{env.authority_for(env.domain1, proto)}/curltest/put?"
+ f"id=[0-{count-1}]&max_upload={max_upload}"
+ )
+ r = curl.http_put(
+ urls=[url],
+ fdata=fdata,
+ alpn_proto=proto,
+ extra_args=["--trace-config", "all"],
+ )
r.check_stats(count=count, http_status=413, exitcode=0)
# speed limited on put handler
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_50_put_speed_limit(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 1
- fdata = os.path.join(env.gen_dir, 'data-100k')
+ fdata = os.path.join(env.gen_dir, "data-100k")
up_len = 100 * 1024
speed_limit = 50 * 1024
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-0]'
- r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
- with_headers=True, extra_args=[
- '--limit-rate', f'{speed_limit}'
- ])
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-0]"
+ r = curl.http_put(
+ urls=[url],
+ fdata=fdata,
+ alpn_proto=proto,
+ with_headers=True,
+ extra_args=["--limit-rate", f"{speed_limit}"],
+ )
r.check_response(count=count, http_status=200)
- assert r.responses[0]['header']['received-length'] == f'{up_len}', f'{r.responses[0]}'
- up_speed = r.stats[0]['speed_upload']
- assert (speed_limit * 0.5) <= up_speed <= (speed_limit * 1.5), f'{r.stats[0]}'
+ assert (
+ r.responses[0]["header"]["received-length"] == f"{up_len}"
+ ), f"{r.responses[0]}"
+ up_speed = r.stats[0]["speed_upload"]
+ assert (speed_limit * 0.5) <= up_speed <= (speed_limit * 1.5), f"{r.stats[0]}"
# speed limited on echo handler
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_07_51_echo_speed_limit(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 1
- fdata = os.path.join(env.gen_dir, 'data-100k')
+ fdata = os.path.join(env.gen_dir, "data-100k")
speed_limit = 50 * 1024
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
- r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto,
- with_headers=True, extra_args=[
- '--limit-rate', f'{speed_limit}'
- ])
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]"
+ r = curl.http_upload(
+ urls=[url],
+ data=f"@{fdata}",
+ alpn_proto=proto,
+ with_headers=True,
+ extra_args=["--limit-rate", f"{speed_limit}"],
+ )
r.check_response(count=count, http_status=200)
- up_speed = r.stats[0]['speed_upload']
- assert (speed_limit * 0.5) <= up_speed <= (speed_limit * 1.5), f'{r.stats[0]}'
+ up_speed = r.stats[0]["speed_upload"]
+ assert (speed_limit * 0.5) <= up_speed <= (speed_limit * 1.5), f"{r.stats[0]}"
# upload larger data, triggering "Expect: 100-continue" code paths
- @pytest.mark.parametrize("proto", ['http/1.1'])
+ @pytest.mark.parametrize("proto", ["http/1.1"])
def test_07_60_upload_exp100(self, env: Env, httpd, nghttpx, proto):
- fdata = os.path.join(env.gen_dir, 'data-1m+')
+ fdata = os.path.join(env.gen_dir, "data-1m+")
read_delay = 1
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-0]'\
- f'&read_delay={read_delay}s'
- r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto, extra_args=[
- '--expect100-timeout', f'{read_delay+1}'
- ])
+ url = (
+ f"https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-0]"
+ f"&read_delay={read_delay}s"
+ )
+ r = curl.http_put(
+ urls=[url],
+ fdata=fdata,
+ alpn_proto=proto,
+ extra_args=["--expect100-timeout", f"{read_delay+1}"],
+ )
r.check_stats(count=1, http_status=200, exitcode=0)
# upload larger data, triggering "Expect: 100-continue" code paths
- @pytest.mark.parametrize("proto", ['http/1.1'])
+ @pytest.mark.parametrize("proto", ["http/1.1"])
def test_07_61_upload_exp100_timeout(self, env: Env, httpd, nghttpx, proto):
- fdata = os.path.join(env.gen_dir, 'data-1m+')
+ fdata = os.path.join(env.gen_dir, "data-1m+")
read_delay = 2
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-0]'\
- f'&read_delay={read_delay}s'
- r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto, extra_args=[
- '--expect100-timeout', f'{read_delay-1}'
- ])
+ url = (
+ f"https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-0]"
+ f"&read_delay={read_delay}s"
+ )
+ r = curl.http_put(
+ urls=[url],
+ fdata=fdata,
+ alpn_proto=proto,
+ extra_args=["--expect100-timeout", f"{read_delay-1}"],
+ )
r.check_stats(count=1, http_status=200, exitcode=0)
# issue #15688 when posting a form and cr_mime_read() is called with
# length < 4, we did not progress
- @pytest.mark.parametrize("proto", ['http/1.1'])
+ @pytest.mark.parametrize("proto", ["http/1.1"])
def test_07_62_upload_issue_15688(self, env: Env, httpd, proto):
# this length leads to (including multipart formatting) to a
# client reader invocation with length 1.
upload_len = 196169
- fname = f'data-{upload_len}'
+ fname = f"data-{upload_len}"
env.make_data_file(indir=env.gen_dir, fname=fname, fsize=upload_len)
fdata = os.path.join(env.gen_dir, fname)
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
- r = curl.http_form(urls=[url], form={
- 'file': f'@{fdata}',
- }, alpn_proto=proto, extra_args=[
- '--max-time', '10'
- ])
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]"
+ r = curl.http_form(
+ urls=[url],
+ form={
+ "file": f"@{fdata}",
+ },
+ alpn_proto=proto,
+ extra_args=["--max-time", "10"],
+ )
r.check_stats(count=1, http_status=200, exitcode=0)
# nghttpx is the only server we have that supports TLS early data and
# has a limit of 16k it announces
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx")
- @pytest.mark.parametrize("proto,upload_size,exp_early", [
- ['http/1.1', 100, 203], # headers+body
- ['http/1.1', 10*1024, 10345], # headers+body
- ['http/1.1', 32*1024, 16384], # headers+body, limited by server max
- ['h2', 10*1024, 10378], # headers+body
- ['h2', 32*1024, 16384], # headers+body, limited by server max
- ['h3', 1024, 1126], # headers+body (app data)
- ['h3', 1024 * 1024, 131177], # headers+body (long app data). The 0RTT
- # size is limited by our sendbuf size
- # of 128K.
- ])
- def test_07_70_put_earlydata(self, env: Env, httpd, nghttpx, proto, upload_size, exp_early):
- if not env.curl_uses_lib('gnutls'):
- pytest.skip('TLS earlydata only implemented in GnuTLS')
- if proto == 'h3' and not env.have_h3():
+ @pytest.mark.parametrize(
+ "proto,upload_size,exp_early",
+ [
+ ["http/1.1", 100, 203], # headers+body
+ ["http/1.1", 10 * 1024, 10345], # headers+body
+ ["http/1.1", 32 * 1024, 16384], # headers+body, limited by server max
+ ["h2", 10 * 1024, 10378], # headers+body
+ ["h2", 32 * 1024, 16384], # headers+body, limited by server max
+ ["h3", 1024, 1126], # headers+body (app data)
+ ["h3", 1024 * 1024, 131177], # headers+body (long app data). The 0RTT
+ # size is limited by our sendbuf size
+ # of 128K.
+ ],
+ )
+ def test_07_70_put_earlydata(
+ self, env: Env, httpd, nghttpx, proto, upload_size, exp_early
+ ):
+ if not env.curl_uses_lib("gnutls"):
+ pytest.skip("TLS earlydata only implemented in GnuTLS")
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
# we want this test to always connect to nghttpx, since it is
# the only server we have that supports TLS earlydata
port = env.port_for(proto)
- if proto != 'h3':
+ if proto != "h3":
port = env.nghttpx_https_port
- url = f'https://{env.domain1}:{port}/curltest/put?id=[0-{count-1}]'
- client = LocalClient(name='hx-upload', env=env)
+ url = f"https://{env.domain1}:{port}/curltest/put?id=[0-{count-1}]"
+ client = LocalClient(name="hx-upload", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- r = client.run(args=[
- '-n', f'{count}',
- '-e', # use TLS earlydata
- '-f', # forbid reuse of connections
- '-l', # announce upload length, no 'Expect: 100'
- '-S', f'{upload_size}',
- '-r', f'{env.domain1}:{port}:127.0.0.1',
- '-V', proto, url
- ])
+ pytest.skip(f"example client not built: {client.name}")
+ r = client.run(
+ args=[
+ "-n",
+ f"{count}",
+ "-e", # use TLS earlydata
+ "-f", # forbid reuse of connections
+ "-l", # announce upload length, no 'Expect: 100'
+ "-S",
+ f"{upload_size}",
+ "-r",
+ f"{env.domain1}:{port}:127.0.0.1",
+ "-V",
+ proto,
+ url,
+ ]
+ )
r.check_exit_code(0)
self.check_downloads(client, [f"{upload_size}"], count)
earlydata = {}
for line in r.trace_lines:
- m = re.match(r'^\[t-(\d+)] EarlyData: (-?\d+)', line)
+ m = re.match(r"^\[t-(\d+)] EarlyData: (-?\d+)", line)
if m:
earlydata[int(m.group(1))] = int(m.group(2))
- assert earlydata[0] == 0, f'{earlydata}'
- assert earlydata[1] == exp_early, f'{earlydata}'
+ assert earlydata[0] == 0, f"{earlydata}"
+ assert earlydata[1] == exp_early, f"{earlydata}"
- def check_downloads(self, client, source: List[str], count: int,
- complete: bool = True):
+ def check_downloads(
+ self, client, source: List[str], count: int, complete: bool = True
+ ):
for i in range(count):
dfile = client.download_file(i)
assert os.path.exists(dfile)
if complete:
- diff = "".join(difflib.unified_diff(a=source,
- b=open(dfile).readlines(),
- fromfile='-',
- tofile=dfile,
- n=1))
- assert not diff, f'download {dfile} differs:\n{diff}'
+ diff = "".join(
+ difflib.unified_diff(
+ a=source,
+ b=open(dfile).readlines(),
+ fromfile="-",
+ tofile=dfile,
+ n=1,
+ )
+ )
+ assert not diff, f"download {dfile} differs:\n{diff}"
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
@pytest.mark.skipif(condition=not Env.has_caddy(), reason="missing caddy")
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
class TestCaddy:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def caddy(self, env):
caddy = Caddy(env=env)
assert caddy.start()
def _make_docs_file(self, docs_dir: str, fname: str, fsize: int):
fpath = os.path.join(docs_dir, fname)
- data1k = 1024*'x'
+ data1k = 1024 * "x"
flen = 0
- with open(fpath, 'w') as fd:
+ with open(fpath, "w") as fd:
while flen < fsize:
fd.write(data1k)
flen += len(data1k)
return flen
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, caddy):
- self._make_docs_file(docs_dir=caddy.docs_dir, fname='data10k.data', fsize=10*1024)
- self._make_docs_file(docs_dir=caddy.docs_dir, fname='data1.data', fsize=1024*1024)
- self._make_docs_file(docs_dir=caddy.docs_dir, fname='data5.data', fsize=5*1024*1024)
- self._make_docs_file(docs_dir=caddy.docs_dir, fname='data10.data', fsize=10*1024*1024)
- self._make_docs_file(docs_dir=caddy.docs_dir, fname='data100.data', fsize=100*1024*1024)
- env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024)
+ self._make_docs_file(
+ docs_dir=caddy.docs_dir, fname="data10k.data", fsize=10 * 1024
+ )
+ self._make_docs_file(
+ docs_dir=caddy.docs_dir, fname="data1.data", fsize=1024 * 1024
+ )
+ self._make_docs_file(
+ docs_dir=caddy.docs_dir, fname="data5.data", fsize=5 * 1024 * 1024
+ )
+ self._make_docs_file(
+ docs_dir=caddy.docs_dir, fname="data10.data", fsize=10 * 1024 * 1024
+ )
+ self._make_docs_file(
+ docs_dir=caddy.docs_dir, fname="data100.data", fsize=100 * 1024 * 1024
+ )
+ env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10 * 1024 * 1024)
# download 1 file
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_08_01_download_1(self, env: Env, caddy: Caddy, proto):
- if proto == 'h3' and not env.have_h3_curl():
+ if proto == "h3" and not env.have_h3_curl():
pytest.skip("h3 not supported in curl")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 itself crashes")
curl = CurlClient(env=env)
- url = f'https://{env.domain1}:{caddy.port}/data.json'
+ url = f"https://{env.domain1}:{caddy.port}/data.json"
r = curl.http_download(urls=[url], alpn_proto=proto)
r.check_response(count=1, http_status=200)
# download 1MB files sequentially
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_08_02_download_1mb_sequential(self, env: Env, caddy: Caddy, proto):
- if proto == 'h3' and not env.have_h3_curl():
+ if proto == "h3" and not env.have_h3_curl():
pytest.skip("h3 not supported in curl")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 itself crashes")
count = 50
curl = CurlClient(env=env)
- urln = f'https://{env.domain1}:{caddy.port}/data1.data?[0-{count-1}]'
+ urln = f"https://{env.domain1}:{caddy.port}/data1.data?[0-{count-1}]"
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(count=count, http_status=200, connect_count=1)
# download 1MB files parallel
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_08_03_download_1mb_parallel(self, env: Env, caddy: Caddy, proto):
- if proto == 'h3' and not env.have_h3_curl():
+ if proto == "h3" and not env.have_h3_curl():
pytest.skip("h3 not supported in curl")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 itself crashes")
count = 20
curl = CurlClient(env=env)
- urln = f'https://{env.domain1}:{caddy.port}/data1.data?[0-{count-1}]'
- r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
- '--parallel'
- ])
+ urln = f"https://{env.domain1}:{caddy.port}/data1.data?[0-{count-1}]"
+ r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=["--parallel"])
r.check_response(count=count, http_status=200)
- if proto == 'http/1.1':
+ if proto == "http/1.1":
# http/1.1 parallel transfers will open multiple connections
assert r.total_connects > 1, r.dump_logs()
else:
assert r.total_connects == 1, r.dump_logs()
# download 5MB files sequentially
- @pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
+ @pytest.mark.skipif(
+ condition=Env().slow_network, reason="not suitable for slow network tests"
+ )
@pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_08_04a_download_10mb_sequential(self, env: Env, caddy: Caddy, proto):
- if proto == 'h3' and not env.have_h3_curl():
+ if proto == "h3" and not env.have_h3_curl():
pytest.skip("h3 not supported in curl")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 itself crashes")
count = 40
curl = CurlClient(env=env)
- urln = f'https://{env.domain1}:{caddy.port}/data5.data?[0-{count-1}]'
+ urln = f"https://{env.domain1}:{caddy.port}/data5.data?[0-{count-1}]"
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(count=count, http_status=200, connect_count=1)
# download 10MB files sequentially
- @pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
+ @pytest.mark.skipif(
+ condition=Env().slow_network, reason="not suitable for slow network tests"
+ )
@pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_08_04b_download_10mb_sequential(self, env: Env, caddy: Caddy, proto):
- if proto == 'h3' and not env.have_h3_curl():
+ if proto == "h3" and not env.have_h3_curl():
pytest.skip("h3 not supported in curl")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 itself crashes")
count = 20
curl = CurlClient(env=env)
- urln = f'https://{env.domain1}:{caddy.port}/data10.data?[0-{count-1}]'
+ urln = f"https://{env.domain1}:{caddy.port}/data10.data?[0-{count-1}]"
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(count=count, http_status=200, connect_count=1)
# download 10MB files parallel
- @pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.skipif(
+ condition=Env().slow_network, reason="not suitable for slow network tests"
+ )
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
@pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")
def test_08_05_download_1mb_parallel(self, env: Env, caddy: Caddy, proto):
- if proto == 'h3' and not env.have_h3_curl():
+ if proto == "h3" and not env.have_h3_curl():
pytest.skip("h3 not supported in curl")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 itself crashes")
- if proto == 'http/1.1' and env.curl_uses_lib('mbedtls'):
- pytest.skip("mbedtls 3.6.0 fails on 50 connections with: "\
- "ssl_handshake returned: (-0x7F00) SSL - Memory allocation failed")
+ if proto == "http/1.1" and env.curl_uses_lib("mbedtls"):
+ pytest.skip(
+ "mbedtls 3.6.0 fails on 50 connections with: "
+ "ssl_handshake returned: (-0x7F00) SSL - Memory allocation failed"
+ )
count = 50
curl = CurlClient(env=env)
- urln = f'https://{env.domain1}:{caddy.port}/data10.data?[0-{count-1}]'
- r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
- '--parallel'
- ])
+ urln = f"https://{env.domain1}:{caddy.port}/data10.data?[0-{count-1}]"
+ r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=["--parallel"])
r.check_response(count=count, http_status=200)
- if proto == 'http/1.1':
+ if proto == "http/1.1":
# http/1.1 parallel transfers will open multiple connections
assert r.total_connects > 1, r.dump_logs()
else:
assert r.total_connects == 1, r.dump_logs()
# post data parallel, check that they were echoed
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_08_06_post_parallel(self, env: Env, httpd, caddy, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 stalls here")
# limit since we use a separate connection in h1
count = 20
- data = '0123456789'
+ data = "0123456789"
curl = CurlClient(env=env)
- url = f'https://{env.domain2}:{caddy.port}/curltest/echo?id=[0-{count-1}]'
- r = curl.http_upload(urls=[url], data=data, alpn_proto=proto,
- extra_args=['--parallel'])
+ url = f"https://{env.domain2}:{caddy.port}/curltest/echo?id=[0-{count-1}]"
+ r = curl.http_upload(
+ urls=[url], data=data, alpn_proto=proto, extra_args=["--parallel"]
+ )
r.check_stats(count=count, http_status=200, exitcode=0)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == [data]
# put large file, check that they length were echoed
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_08_07_put_large(self, env: Env, httpd, caddy, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('msh3'):
+ if proto == "h3" and env.curl_uses_lib("msh3"):
pytest.skip("msh3 stalls here")
# limit since we use a separate connection in h1<
count = 1
- fdata = os.path.join(env.gen_dir, 'data-10m')
+ fdata = os.path.join(env.gen_dir, "data-10m")
curl = CurlClient(env=env)
- url = f'https://{env.domain2}:{caddy.port}/curltest/put?id=[0-{count-1}]'
+ url = f"https://{env.domain2}:{caddy.port}/curltest/put?id=[0-{count-1}]"
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto)
- exp_data = [f'{os.path.getsize(fdata)}']
+ exp_data = [f"{os.path.getsize(fdata)}"]
r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == exp_data
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_08_08_earlydata(self, env: Env, httpd, caddy, proto):
- if not env.curl_uses_lib('gnutls'):
- pytest.skip('TLS earlydata only implemented in GnuTLS')
- if proto == 'h3' and not env.have_h3():
+ if not env.curl_uses_lib("gnutls"):
+ pytest.skip("TLS earlydata only implemented in GnuTLS")
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
- docname = 'data10k.data'
- url = f'https://{env.domain1}:{caddy.port}/{docname}'
- client = LocalClient(name='hx-download', env=env)
+ docname = "data10k.data"
+ url = f"https://{env.domain1}:{caddy.port}/{docname}"
+ client = LocalClient(name="hx-download", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- r = client.run(args=[
- '-n', f'{count}',
- '-e', # use TLS earlydata
- '-f', # forbid reuse of connections
- '-r', f'{env.domain1}:{caddy.port}:127.0.0.1',
- '-V', proto, url
- ])
+ pytest.skip(f"example client not built: {client.name}")
+ r = client.run(
+ args=[
+ "-n",
+ f"{count}",
+ "-e", # use TLS earlydata
+ "-f", # forbid reuse of connections
+ "-r",
+ f"{env.domain1}:{caddy.port}:127.0.0.1",
+ "-V",
+ proto,
+ url,
+ ]
+ )
r.check_exit_code(0)
srcfile = os.path.join(caddy.docs_dir, docname)
self.check_downloads(client, srcfile, count)
earlydata = {}
for line in r.trace_lines:
- m = re.match(r'^\[t-(\d+)] EarlyData: (-?\d+)', line)
+ m = re.match(r"^\[t-(\d+)] EarlyData: (-?\d+)", line)
if m:
earlydata[int(m.group(1))] = int(m.group(2))
- assert earlydata[0] == 0, f'{earlydata}'
- if proto == 'h3':
- assert earlydata[1] == 71, f'{earlydata}'
+ assert earlydata[0] == 0, f"{earlydata}"
+ if proto == "h3":
+ assert earlydata[1] == 71, f"{earlydata}"
else:
# Caddy does not support early data on TCP
- assert earlydata[1] == 0, f'{earlydata}'
+ assert earlydata[1] == 0, f"{earlydata}"
- def check_downloads(self, client, srcfile: str, count: int,
- complete: bool = True):
+ def check_downloads(self, client, srcfile: str, count: int, complete: bool = True):
for i in range(count):
dfile = client.download_file(i)
assert os.path.exists(dfile)
if complete and not filecmp.cmp(srcfile, dfile, shallow=False):
- diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
- b=open(dfile).readlines(),
- fromfile=srcfile,
- tofile=dfile,
- n=1))
- assert False, f'download {dfile} differs:\n{diff}'
+ diff = "".join(
+ difflib.unified_diff(
+ a=open(srcfile).readlines(),
+ b=open(dfile).readlines(),
+ fromfile=srcfile,
+ tofile=dfile,
+ n=1,
+ )
+ )
+ assert False, f"download {dfile} differs:\n{diff}"
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class TestPush:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd):
- push_dir = os.path.join(httpd.docs_dir, 'push')
+ push_dir = os.path.join(httpd.docs_dir, "push")
if not os.path.exists(push_dir):
os.makedirs(push_dir)
- env.make_data_file(indir=push_dir, fname="data1", fsize=1*1024)
- env.make_data_file(indir=push_dir, fname="data2", fsize=1*1024)
- env.make_data_file(indir=push_dir, fname="data3", fsize=1*1024)
- httpd.set_extra_config(env.domain1, [
- 'H2EarlyHints on',
- '<Location /push/data1>',
- ' H2PushResource /push/data2',
- '</Location>',
- '<Location /push/data2>',
- ' H2PushResource /push/data1',
- ' H2PushResource /push/data3',
- '</Location>',
- ])
+ env.make_data_file(indir=push_dir, fname="data1", fsize=1 * 1024)
+ env.make_data_file(indir=push_dir, fname="data2", fsize=1 * 1024)
+ env.make_data_file(indir=push_dir, fname="data3", fsize=1 * 1024)
+ httpd.set_extra_config(
+ env.domain1,
+ [
+ "H2EarlyHints on",
+ "<Location /push/data1>",
+ " H2PushResource /push/data2",
+ "</Location>",
+ "<Location /push/data2>",
+ " H2PushResource /push/data1",
+ " H2PushResource /push/data3",
+ "</Location>",
+ ],
+ )
# activate the new config
httpd.reload()
yield
# download a file that triggers a "103 Early Hints" response
def test_09_01_h2_early_hints(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'https://{env.domain1}:{env.https_port}/push/data1'
- r = curl.http_download(urls=[url], alpn_proto='h2', with_stats=False,
- with_headers=True)
+ url = f"https://{env.domain1}:{env.https_port}/push/data1"
+ r = curl.http_download(
+ urls=[url], alpn_proto="h2", with_stats=False, with_headers=True
+ )
r.check_exit_code(0)
- assert len(r.responses) == 2, f'{r.responses}'
- assert r.responses[0]['status'] == 103, f'{r.responses}'
- assert 'link' in r.responses[0]['header'], f'{r.responses[0]}'
- assert r.responses[0]['header']['link'] == '</push/data2>; rel=preload', f'{r.responses[0]}'
+ assert len(r.responses) == 2, f"{r.responses}"
+ assert r.responses[0]["status"] == 103, f"{r.responses}"
+ assert "link" in r.responses[0]["header"], f"{r.responses[0]}"
+ assert (
+ r.responses[0]["header"]["link"] == "</push/data2>; rel=preload"
+ ), f"{r.responses[0]}"
def test_09_02_h2_push(self, env: Env, httpd):
# use localhost as we do not have resolve support in local client
- url = f'https://localhost:{env.https_port}/push/data1'
- client = LocalClient(name='h2-serverpush', env=env)
+ url = f"https://localhost:{env.https_port}/push/data1"
+ client = LocalClient(name="h2-serverpush", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
+ pytest.skip(f"example client not built: {client.name}")
r = client.run(args=[url])
r.check_exit_code(0)
assert os.path.exists(client.download_file(0))
- assert os.path.exists(os.path.join(client.run_dir, 'push0')), r.dump_logs()
+ assert os.path.exists(os.path.join(client.run_dir, "push0")), r.dump_logs()
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class TestProxy:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd, nghttpx_fwd):
- push_dir = os.path.join(httpd.docs_dir, 'push')
+ push_dir = os.path.join(httpd.docs_dir, "push")
if not os.path.exists(push_dir):
os.makedirs(push_dir)
if env.have_nghttpx():
nghttpx_fwd.start_if_needed()
- env.make_data_file(indir=env.gen_dir, fname="data-100k", fsize=100*1024)
- env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024)
+ env.make_data_file(indir=env.gen_dir, fname="data-100k", fsize=100 * 1024)
+ env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10 * 1024 * 1024)
httpd.clear_extra_configs()
httpd.reload()
def get_tunnel_proto_used(self, r: ExecResult):
for line in r.trace_lines:
- m = re.match(r'.* CONNECT tunnel: (\S+) negotiated$', line)
+ m = re.match(r".* CONNECT tunnel: (\S+) negotiated$", line)
if m:
return m.group(1)
assert False, f'tunnel protocol not found in:\n{"".join(r.trace_lines)}'
# download via http: proxy (no tunnel)
def test_10_01_proxy_http(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'http://localhost:{env.http_port}/data.json'
- r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=curl.get_proxy_args(proxys=False))
+ url = f"http://localhost:{env.http_port}/data.json"
+ r = curl.http_download(
+ urls=[url],
+ alpn_proto="http/1.1",
+ with_stats=True,
+ extra_args=curl.get_proxy_args(proxys=False),
+ )
r.check_response(count=1, http_status=200)
# download via https: proxy (no tunnel)
- @pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
- reason='curl lacks HTTPS-proxy support')
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
+ @pytest.mark.skipif(
+ condition=not Env.curl_has_feature("HTTPS-proxy"),
+ reason="curl lacks HTTPS-proxy support",
+ )
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2"])
def test_10_02_proxys_down(self, env: Env, httpd, proto):
- if proto == 'h2' and not env.curl_uses_lib('nghttp2'):
- pytest.skip('only supported with nghttp2')
+ if proto == "h2" and not env.curl_uses_lib("nghttp2"):
+ pytest.skip("only supported with nghttp2")
curl = CurlClient(env=env)
- url = f'http://localhost:{env.http_port}/data.json'
+ url = f"http://localhost:{env.http_port}/data.json"
xargs = curl.get_proxy_args(proto=proto)
- r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=xargs)
- r.check_response(count=1, http_status=200,
- protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
+ r = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
+ )
+ r.check_response(
+ count=1, http_status=200, protocol="HTTP/2" if proto == "h2" else "HTTP/1.1"
+ )
# upload via https: with proto (no tunnel)
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
- @pytest.mark.parametrize("fname, fcount", [
- ['data.json', 5],
- ['data-100k', 5],
- ['data-1m', 2]
- ])
- @pytest.mark.skipif(condition=not Env.have_nghttpx(),
- reason="no nghttpx available")
- def test_10_02_proxys_up(self, env: Env, httpd, nghttpx, proto,
- fname, fcount):
- if proto == 'h2' and not env.curl_uses_lib('nghttp2'):
- pytest.skip('only supported with nghttp2')
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2"])
+ @pytest.mark.parametrize(
+ "fname, fcount", [["data.json", 5], ["data-100k", 5], ["data-1m", 2]]
+ )
+ @pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
+ def test_10_02_proxys_up(self, env: Env, httpd, nghttpx, proto, fname, fcount):
+ if proto == "h2" and not env.curl_uses_lib("nghttp2"):
+ pytest.skip("only supported with nghttp2")
count = fcount
srcfile = os.path.join(httpd.docs_dir, fname)
curl = CurlClient(env=env)
- url = f'http://localhost:{env.http_port}/curltest/echo?id=[0-{count-1}]'
+ url = f"http://localhost:{env.http_port}/curltest/echo?id=[0-{count-1}]"
xargs = curl.get_proxy_args(proto=proto)
- r = curl.http_upload(urls=[url], data=f'@{srcfile}', alpn_proto=proto,
- extra_args=xargs)
- r.check_response(count=count, http_status=200,
- protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
+ r = curl.http_upload(
+ urls=[url], data=f"@{srcfile}", alpn_proto=proto, extra_args=xargs
+ )
+ r.check_response(
+ count=count,
+ http_status=200,
+ protocol="HTTP/2" if proto == "h2" else "HTTP/1.1",
+ )
indata = open(srcfile).readlines()
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
# download http: via http: proxytunnel
def test_10_03_proxytunnel_http(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'http://localhost:{env.http_port}/data.json'
+ url = f"http://localhost:{env.http_port}/data.json"
xargs = curl.get_proxy_args(proxys=False, tunnel=True)
- r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=xargs)
+ r = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
+ )
r.check_response(count=1, http_status=200)
# download http: via https: proxytunnel
- @pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
- reason='curl lacks HTTPS-proxy support')
+ @pytest.mark.skipif(
+ condition=not Env.curl_has_feature("HTTPS-proxy"),
+ reason="curl lacks HTTPS-proxy support",
+ )
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
def test_10_04_proxy_https(self, env: Env, httpd, nghttpx_fwd):
curl = CurlClient(env=env)
- url = f'http://localhost:{env.http_port}/data.json'
+ url = f"http://localhost:{env.http_port}/data.json"
xargs = curl.get_proxy_args(tunnel=True)
- r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=xargs)
+ r = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
+ )
r.check_response(count=1, http_status=200)
# download https: with proto via http: proxytunnel
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2"])
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
def test_10_05_proxytunnel_http(self, env: Env, httpd, proto):
curl = CurlClient(env=env)
- url = f'https://localhost:{env.https_port}/data.json'
+ url = f"https://localhost:{env.https_port}/data.json"
xargs = curl.get_proxy_args(proxys=False, tunnel=True)
- r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
- extra_args=xargs)
- r.check_response(count=1, http_status=200,
- protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
+ r = curl.http_download(
+ urls=[url], alpn_proto=proto, with_stats=True, extra_args=xargs
+ )
+ r.check_response(
+ count=1, http_status=200, protocol="HTTP/2" if proto == "h2" else "HTTP/1.1"
+ )
# download https: with proto via https: proxytunnel
- @pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
- reason='curl lacks HTTPS-proxy support')
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
- @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
+ @pytest.mark.skipif(
+ condition=not Env.curl_has_feature("HTTPS-proxy"),
+ reason="curl lacks HTTPS-proxy support",
+ )
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2"])
+ @pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
def test_10_06_proxytunnel_https(self, env: Env, httpd, nghttpx_fwd, proto, tunnel):
- if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
- pytest.skip('only supported with nghttp2')
+ if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
+ pytest.skip("only supported with nghttp2")
curl = CurlClient(env=env)
- url = f'https://localhost:{env.https_port}/data.json?[0-0]'
+ url = f"https://localhost:{env.https_port}/data.json?[0-0]"
xargs = curl.get_proxy_args(tunnel=True, proto=tunnel)
- r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
- extra_args=xargs)
- r.check_response(count=1, http_status=200,
- protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
- assert self.get_tunnel_proto_used(r) == 'HTTP/2' \
- if tunnel == 'h2' else 'HTTP/1.1'
- srcfile = os.path.join(httpd.docs_dir, 'data.json')
+ r = curl.http_download(
+ urls=[url], alpn_proto=proto, with_stats=True, extra_args=xargs
+ )
+ r.check_response(
+ count=1, http_status=200, protocol="HTTP/2" if proto == "h2" else "HTTP/1.1"
+ )
+ assert (
+ self.get_tunnel_proto_used(r) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
+ )
+ srcfile = os.path.join(httpd.docs_dir, "data.json")
dfile = curl.download_file(0)
assert filecmp.cmp(srcfile, dfile, shallow=False)
# download many https: with proto via https: proxytunnel
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
- @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
- @pytest.mark.parametrize("fname, fcount", [
- ['data.json', 100],
- ['data-100k', 20],
- ['data-1m', 5]
- ])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2"])
+ @pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
+ @pytest.mark.parametrize(
+ "fname, fcount", [["data.json", 100], ["data-100k", 20], ["data-1m", 5]]
+ )
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
- def test_10_07_pts_down_small(self, env: Env, httpd, nghttpx_fwd, proto,
- tunnel, fname, fcount):
- if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
- pytest.skip('only supported with nghttp2')
+ def test_10_07_pts_down_small(
+ self, env: Env, httpd, nghttpx_fwd, proto, tunnel, fname, fcount
+ ):
+ if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
+ pytest.skip("only supported with nghttp2")
count = fcount
curl = CurlClient(env=env)
- url = f'https://localhost:{env.https_port}/{fname}?[0-{count-1}]'
+ url = f"https://localhost:{env.https_port}/{fname}?[0-{count-1}]"
xargs = curl.get_proxy_args(tunnel=True, proto=tunnel)
- r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
- extra_args=xargs)
- r.check_response(count=count, http_status=200,
- protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
- assert self.get_tunnel_proto_used(r) == 'HTTP/2' \
- if tunnel == 'h2' else 'HTTP/1.1'
+ r = curl.http_download(
+ urls=[url], alpn_proto=proto, with_stats=True, extra_args=xargs
+ )
+ r.check_response(
+ count=count,
+ http_status=200,
+ protocol="HTTP/2" if proto == "h2" else "HTTP/1.1",
+ )
+ assert (
+ self.get_tunnel_proto_used(r) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
+ )
srcfile = os.path.join(httpd.docs_dir, fname)
for i in range(count):
dfile = curl.download_file(i)
# upload many https: with proto via https: proxytunnel
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
- @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
- @pytest.mark.parametrize("fname, fcount", [
- ['data.json', 50],
- ['data-100k', 20],
- ['data-1m', 5]
- ])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2"])
+ @pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
+ @pytest.mark.parametrize(
+ "fname, fcount", [["data.json", 50], ["data-100k", 20], ["data-1m", 5]]
+ )
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
- def test_10_08_upload_seq_large(self, env: Env, httpd, nghttpx, proto,
- tunnel, fname, fcount):
- if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
- pytest.skip('only supported with nghttp2')
+ def test_10_08_upload_seq_large(
+ self, env: Env, httpd, nghttpx, proto, tunnel, fname, fcount
+ ):
+ if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
+ pytest.skip("only supported with nghttp2")
count = fcount
srcfile = os.path.join(httpd.docs_dir, fname)
curl = CurlClient(env=env)
- url = f'https://localhost:{env.https_port}/curltest/echo?id=[0-{count-1}]'
+ url = f"https://localhost:{env.https_port}/curltest/echo?id=[0-{count-1}]"
xargs = curl.get_proxy_args(tunnel=True, proto=tunnel)
- r = curl.http_upload(urls=[url], data=f'@{srcfile}', alpn_proto=proto,
- extra_args=xargs)
- assert self.get_tunnel_proto_used(r) == 'HTTP/2' \
- if tunnel == 'h2' else 'HTTP/1.1'
+ r = curl.http_upload(
+ urls=[url], data=f"@{srcfile}", alpn_proto=proto, extra_args=xargs
+ )
+ assert (
+ self.get_tunnel_proto_used(r) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
+ )
r.check_response(count=count, http_status=200)
indata = open(srcfile).readlines()
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
- assert respdata == indata, f'resonse {i} differs'
+ assert respdata == indata, f"resonse {i} differs"
assert r.total_connects == 1, r.dump_logs()
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
- @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
+ @pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
def test_10_09_reuse_ser(self, env: Env, httpd, nghttpx_fwd, tunnel):
- if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
- pytest.skip('only supported with nghttp2')
+ if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
+ pytest.skip("only supported with nghttp2")
curl = CurlClient(env=env)
- url1 = f'https://localhost:{env.https_port}/data.json'
- url2 = f'http://localhost:{env.http_port}/data.json'
+ url1 = f"https://localhost:{env.https_port}/data.json"
+ url2 = f"http://localhost:{env.http_port}/data.json"
xargs = curl.get_proxy_args(tunnel=True, proto=tunnel)
- r = curl.http_download(urls=[url1, url2], alpn_proto='http/1.1', with_stats=True,
- extra_args=xargs)
+ r = curl.http_download(
+ urls=[url1, url2], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
+ )
r.check_response(count=2, http_status=200)
- assert self.get_tunnel_proto_used(r) == 'HTTP/2' \
- if tunnel == 'h2' else 'HTTP/1.1'
- if tunnel == 'h2':
+ assert (
+ self.get_tunnel_proto_used(r) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
+ )
+ if tunnel == "h2":
# TODO: we would like to reuse the first connection for the
# second URL, but this is currently not possible
# assert r.total_connects == 1
assert r.total_connects == 2
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
- @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
+ @pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
def test_10_10_reuse_proxy(self, env: Env, httpd, nghttpx_fwd, tunnel):
# url twice via https: proxy separated with '--next', will reuse
- if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
- pytest.skip('only supported with nghttp2')
+ if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
+ pytest.skip("only supported with nghttp2")
curl = CurlClient(env=env)
- url = f'https://localhost:{env.https_port}/data.json'
+ url = f"https://localhost:{env.https_port}/data.json"
proxy_args = curl.get_proxy_args(tunnel=True, proto=tunnel)
- r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=proxy_args)
+ r1 = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=proxy_args
+ )
r1.check_response(count=1, http_status=200)
- assert self.get_tunnel_proto_used(r1) == 'HTTP/2' \
- if tunnel == 'h2' else 'HTTP/1.1'
+ assert (
+ self.get_tunnel_proto_used(r1) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
+ )
# get the args, duplicate separated with '--next'
x2_args = r1.args[1:]
- x2_args.append('--next')
+ x2_args.append("--next")
x2_args.extend(proxy_args)
- r2 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=x2_args)
+ r2 = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=x2_args
+ )
r2.check_response(count=2, http_status=200)
assert r2.total_connects == 1
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
- @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
+ @pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
- @pytest.mark.skipif(condition=not Env.curl_uses_lib('openssl'), reason="tls13-ciphers not supported")
+ @pytest.mark.skipif(
+ condition=not Env.curl_uses_lib("openssl"), reason="tls13-ciphers not supported"
+ )
def test_10_11_noreuse_proxy_https(self, env: Env, httpd, nghttpx_fwd, tunnel):
# different --proxy-tls13-ciphers, no reuse of connection for https:
curl = CurlClient(env=env)
- if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
- pytest.skip('only supported with nghttp2')
- url = f'https://localhost:{env.https_port}/data.json'
+ if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
+ pytest.skip("only supported with nghttp2")
+ url = f"https://localhost:{env.https_port}/data.json"
proxy_args = curl.get_proxy_args(tunnel=True, proto=tunnel)
- r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=proxy_args)
+ r1 = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=proxy_args
+ )
r1.check_response(count=1, http_status=200)
- assert self.get_tunnel_proto_used(r1) == 'HTTP/2' \
- if tunnel == 'h2' else 'HTTP/1.1'
+ assert (
+ self.get_tunnel_proto_used(r1) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
+ )
# get the args, duplicate separated with '--next'
x2_args = r1.args[1:]
- x2_args.append('--next')
+ x2_args.append("--next")
x2_args.extend(proxy_args)
- x2_args.extend(['--proxy-tls13-ciphers', 'TLS_AES_256_GCM_SHA384'])
- r2 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=x2_args)
+ x2_args.extend(["--proxy-tls13-ciphers", "TLS_AES_256_GCM_SHA384"])
+ r2 = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=x2_args
+ )
r2.check_response(count=2, http_status=200)
assert r2.total_connects == 2
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
- @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
+ @pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
- @pytest.mark.skipif(condition=not Env.curl_uses_lib('openssl'), reason="tls13-ciphers not supported")
+ @pytest.mark.skipif(
+ condition=not Env.curl_uses_lib("openssl"), reason="tls13-ciphers not supported"
+ )
def test_10_12_noreuse_proxy_http(self, env: Env, httpd, nghttpx_fwd, tunnel):
# different --proxy-tls13-ciphers, no reuse of connection for http:
- if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
- pytest.skip('only supported with nghttp2')
+ if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
+ pytest.skip("only supported with nghttp2")
curl = CurlClient(env=env)
- url = f'http://localhost:{env.http_port}/data.json'
+ url = f"http://localhost:{env.http_port}/data.json"
proxy_args = curl.get_proxy_args(tunnel=True, proto=tunnel)
- r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=proxy_args)
+ r1 = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=proxy_args
+ )
r1.check_response(count=1, http_status=200)
- assert self.get_tunnel_proto_used(r1) == 'HTTP/2' \
- if tunnel == 'h2' else 'HTTP/1.1'
+ assert (
+ self.get_tunnel_proto_used(r1) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
+ )
# get the args, duplicate separated with '--next'
x2_args = r1.args[1:]
- x2_args.append('--next')
+ x2_args.append("--next")
x2_args.extend(proxy_args)
- x2_args.extend(['--proxy-tls13-ciphers', 'TLS_AES_256_GCM_SHA384'])
- r2 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=x2_args)
+ x2_args.extend(["--proxy-tls13-ciphers", "TLS_AES_256_GCM_SHA384"])
+ r2 = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=x2_args
+ )
r2.check_response(count=2, http_status=200)
assert r2.total_connects == 2
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
- @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
+ @pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
- @pytest.mark.skipif(condition=not Env.curl_uses_lib('openssl'), reason="tls13-ciphers not supported")
+ @pytest.mark.skipif(
+ condition=not Env.curl_uses_lib("openssl"), reason="tls13-ciphers not supported"
+ )
def test_10_13_noreuse_https(self, env: Env, httpd, nghttpx_fwd, tunnel):
# different --tls13-ciphers on https: same proxy config
- if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
- pytest.skip('only supported with nghttp2')
+ if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
+ pytest.skip("only supported with nghttp2")
curl = CurlClient(env=env)
- url = f'https://localhost:{env.https_port}/data.json'
+ url = f"https://localhost:{env.https_port}/data.json"
proxy_args = curl.get_proxy_args(tunnel=True, proto=tunnel)
- r1 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=proxy_args)
+ r1 = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=proxy_args
+ )
r1.check_response(count=1, http_status=200)
- assert self.get_tunnel_proto_used(r1) == 'HTTP/2' \
- if tunnel == 'h2' else 'HTTP/1.1'
+ assert (
+ self.get_tunnel_proto_used(r1) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
+ )
# get the args, duplicate separated with '--next'
x2_args = r1.args[1:]
- x2_args.append('--next')
+ x2_args.append("--next")
x2_args.extend(proxy_args)
- x2_args.extend(['--tls13-ciphers', 'TLS_AES_256_GCM_SHA384'])
- r2 = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=x2_args)
+ x2_args.extend(["--tls13-ciphers", "TLS_AES_256_GCM_SHA384"])
+ r2 = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=x2_args
+ )
r2.check_response(count=2, http_status=200)
assert r2.total_connects == 2
# download via https: proxy (no tunnel) using IP address
- @pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
- reason='curl lacks HTTPS-proxy support')
- @pytest.mark.skipif(condition=Env.curl_uses_lib('bearssl'), reason="ip address cert verification not supported")
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
+ @pytest.mark.skipif(
+ condition=not Env.curl_has_feature("HTTPS-proxy"),
+ reason="curl lacks HTTPS-proxy support",
+ )
+ @pytest.mark.skipif(
+ condition=Env.curl_uses_lib("bearssl"),
+ reason="ip address cert verification not supported",
+ )
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2"])
def test_10_14_proxys_ip_addr(self, env: Env, httpd, proto):
- if proto == 'h2' and not env.curl_uses_lib('nghttp2'):
- pytest.skip('only supported with nghttp2')
+ if proto == "h2" and not env.curl_uses_lib("nghttp2"):
+ pytest.skip("only supported with nghttp2")
curl = CurlClient(env=env)
- url = f'http://localhost:{env.http_port}/data.json'
+ url = f"http://localhost:{env.http_port}/data.json"
xargs = curl.get_proxy_args(proto=proto, use_ip=True)
- r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=xargs)
- if env.curl_uses_lib('mbedtls') and \
- not env.curl_lib_version_at_least('mbedtls', '3.5.0'):
- r.check_exit_code(60) # CURLE_PEER_FAILED_VERIFICATION
+ r = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
+ )
+ if env.curl_uses_lib("mbedtls") and not env.curl_lib_version_at_least(
+ "mbedtls", "3.5.0"
+ ):
+ r.check_exit_code(60) # CURLE_PEER_FAILED_VERIFICATION
else:
- r.check_response(count=1, http_status=200,
- protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
+ r.check_response(
+ count=1,
+ http_status=200,
+ protocol="HTTP/2" if proto == "h2" else "HTTP/1.1",
+ )
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
log = logging.getLogger(__name__)
-class UDSFaker:
+class UDSFaker:
def __init__(self, path):
self._uds_path = path
self._done = False
c, client_address = self._socket.accept()
try:
c.recv(16)
- c.sendall("""HTTP/1.1 200 Ok
+ c.sendall(
+ """HTTP/1.1 200 Ok
Server: UdsFaker
Content-Type: application/json
Content-Length: 19
-{ "host": "faked" }""".encode())
+{ "host": "faked" }""".encode()
+ )
finally:
c.close()
class TestUnix:
-
@pytest.fixture(scope="class")
def uds_faker(self, env: Env) -> Generator[UDSFaker, None, None]:
- uds_path = os.path.join(env.gen_dir, 'uds_11.sock')
+ uds_path = os.path.join(env.gen_dir, "uds_11.sock")
faker = UDSFaker(path=uds_path)
faker.start()
yield faker
# download http: via Unix socket
def test_11_01_unix_connect_http(self, env: Env, httpd, uds_faker):
curl = CurlClient(env=env)
- url = f'http://{env.domain1}:{env.http_port}/data.json'
- r = curl.http_download(urls=[url], with_stats=True,
- extra_args=[
- '--unix-socket', uds_faker.path,
- ])
+ url = f"http://{env.domain1}:{env.http_port}/data.json"
+ r = curl.http_download(
+ urls=[url],
+ with_stats=True,
+ extra_args=[
+ "--unix-socket",
+ uds_faker.path,
+ ],
+ )
r.check_response(count=1, http_status=200)
# download https: via Unix socket
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
def test_11_02_unix_connect_http(self, env: Env, httpd, uds_faker):
curl = CurlClient(env=env)
- url = f'https://{env.domain1}:{env.https_port}/data.json'
- r = curl.http_download(urls=[url], with_stats=True,
- extra_args=[
- '--unix-socket', uds_faker.path,
- ])
+ url = f"https://{env.domain1}:{env.https_port}/data.json"
+ r = curl.http_download(
+ urls=[url],
+ with_stats=True,
+ extra_args=[
+ "--unix-socket",
+ uds_faker.path,
+ ],
+ )
r.check_response(exitcode=35, http_status=None)
# download HTTP/3 via Unix socket
- @pytest.mark.skipif(condition=not Env.have_h3(), reason='h3 not supported')
+ @pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_11_03_unix_connect_quic(self, env: Env, httpd, uds_faker):
curl = CurlClient(env=env)
- url = f'https://{env.domain1}:{env.https_port}/data.json'
- r = curl.http_download(urls=[url], with_stats=True,
- alpn_proto='h3',
- extra_args=[
- '--unix-socket', uds_faker.path,
- ])
+ url = f"https://{env.domain1}:{env.https_port}/data.json"
+ r = curl.http_download(
+ urls=[url],
+ with_stats=True,
+ alpn_proto="h3",
+ extra_args=[
+ "--unix-socket",
+ uds_faker.path,
+ ],
+ )
r.check_response(exitcode=96, http_status=None)
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason="curl without SSL")
class TestReuse:
-
# check if HTTP/1.1 handles 'Connection: close' correctly
- @pytest.mark.parametrize("proto", ['http/1.1'])
+ @pytest.mark.parametrize("proto", ["http/1.1"])
def test_12_01_h1_conn_close(self, env: Env, httpd, nghttpx, proto):
httpd.clear_extra_configs()
- httpd.set_extra_config('base', [
- 'MaxKeepAliveRequests 1',
- ])
+ httpd.set_extra_config(
+ "base",
+ [
+ "MaxKeepAliveRequests 1",
+ ],
+ )
httpd.reload()
count = 100
curl = CurlClient(env=env)
- urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]"
+ )
r = curl.http_download(urls=[urln], alpn_proto=proto)
r.check_response(count=count, http_status=200)
# Server sends `Connection: close` on every 2nd request, requiring
# a new connection
delta = 5
- assert (count/2 - delta) < r.total_connects < (count/2 + delta)
+ assert (count / 2 - delta) < r.total_connects < (count / 2 + delta)
- @pytest.mark.skipif(condition=Env.httpd_is_at_least('2.5.0'),
- reason="httpd 2.5+ handles KeepAlives different")
- @pytest.mark.parametrize("proto", ['http/1.1'])
+ @pytest.mark.skipif(
+ condition=Env.httpd_is_at_least("2.5.0"),
+ reason="httpd 2.5+ handles KeepAlives different",
+ )
+ @pytest.mark.parametrize("proto", ["http/1.1"])
def test_12_02_h1_conn_timeout(self, env: Env, httpd, nghttpx, proto):
httpd.clear_extra_configs()
- httpd.set_extra_config('base', [
- 'KeepAliveTimeout 1',
- ])
+ httpd.set_extra_config(
+ "base",
+ [
+ "KeepAliveTimeout 1",
+ ],
+ )
httpd.reload()
count = 5
curl = CurlClient(env=env)
- urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
- r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
- '--rate', '30/m',
- ])
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]"
+ )
+ r = curl.http_download(
+ urls=[urln],
+ alpn_proto=proto,
+ extra_args=[
+ "--rate",
+ "30/m",
+ ],
+ )
r.check_response(count=count, http_status=200)
# Connections time out on server before we send another request,
assert r.total_connects == count
httpd.reload()
count = 2
# write a alt-svc file the advises h3 instead of h2
- asfile = os.path.join(env.gen_dir, 'alt-svc-12_03.txt')
+ asfile = os.path.join(env.gen_dir, "alt-svc-12_03.txt")
ts = datetime.now() + timedelta(hours=24)
- expires = f'{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}'
- with open(asfile, 'w') as fd:
- fd.write(f'h2 {env.domain1} {env.https_port} h3 {env.domain1} {env.https_port} "{expires}" 0 0')
- log.info(f'altscv: {open(asfile).readlines()}')
+ expires = f"{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}"
+ with open(asfile, "w") as fd:
+ fd.write(
+ f'h2 {env.domain1} {env.https_port} h3 {env.domain1} {env.https_port} "{expires}" 0 0'
+ )
+ log.info(f"altscv: {open(asfile).readlines()}")
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h2")}/data.json?[0-{count-1}]'
- r = curl.http_download(urls=[urln], with_stats=True, extra_args=[
- '--alt-svc', f'{asfile}',
- ])
+ r = curl.http_download(
+ urls=[urln],
+ with_stats=True,
+ extra_args=[
+ "--alt-svc",
+ f"{asfile}",
+ ],
+ )
r.check_response(count=count, http_status=200)
# We expect the connection to be reused
assert r.total_connects == 1
for s in r.stats:
- assert s['http_version'] == '3', f'{s}'
+ assert s["http_version"] == "3", f"{s}"
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_12_04_alt_svc_h3h2(self, env: Env, httpd, nghttpx):
httpd.reload()
count = 2
# write a alt-svc file the advises h2 instead of h3
- asfile = os.path.join(env.gen_dir, 'alt-svc-12_04.txt')
+ asfile = os.path.join(env.gen_dir, "alt-svc-12_04.txt")
ts = datetime.now() + timedelta(hours=24)
- expires = f'{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}'
- with open(asfile, 'w') as fd:
- fd.write(f'h3 {env.domain1} {env.https_port} h2 {env.domain1} {env.https_port} "{expires}" 0 0')
- log.info(f'altscv: {open(asfile).readlines()}')
+ expires = f"{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}"
+ with open(asfile, "w") as fd:
+ fd.write(
+ f'h3 {env.domain1} {env.https_port} h2 {env.domain1} {env.https_port} "{expires}" 0 0'
+ )
+ log.info(f"altscv: {open(asfile).readlines()}")
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h2")}/data.json?[0-{count-1}]'
- r = curl.http_download(urls=[urln], with_stats=True, extra_args=[
- '--alt-svc', f'{asfile}',
- ])
+ r = curl.http_download(
+ urls=[urln],
+ with_stats=True,
+ extra_args=[
+ "--alt-svc",
+ f"{asfile}",
+ ],
+ )
r.check_response(count=count, http_status=200)
# We expect the connection to be reused and use HTTP/2
assert r.total_connects == 1
for s in r.stats:
- assert s['http_version'] == '2', f'{s}'
+ assert s["http_version"] == "2", f"{s}"
@pytest.mark.skipif(condition=not Env.have_h3(), reason="h3 not supported")
def test_12_05_alt_svc_h3h1(self, env: Env, httpd, nghttpx):
httpd.reload()
count = 2
# write a alt-svc file the advises h1 instead of h3
- asfile = os.path.join(env.gen_dir, 'alt-svc-12_05.txt')
+ asfile = os.path.join(env.gen_dir, "alt-svc-12_05.txt")
ts = datetime.now() + timedelta(hours=24)
- expires = f'{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}'
- with open(asfile, 'w') as fd:
- fd.write(f'h3 {env.domain1} {env.https_port} http/1.1 {env.domain1} {env.https_port} "{expires}" 0 0')
- log.info(f'altscv: {open(asfile).readlines()}')
+ expires = f"{ts.year:04}{ts.month:02}{ts.day:02} {ts.hour:02}:{ts.minute:02}:{ts.second:02}"
+ with open(asfile, "w") as fd:
+ fd.write(
+ f'h3 {env.domain1} {env.https_port} http/1.1 {env.domain1} {env.https_port} "{expires}" 0 0'
+ )
+ log.info(f"altscv: {open(asfile).readlines()}")
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h2")}/data.json?[0-{count-1}]'
- r = curl.http_download(urls=[urln], with_stats=True, extra_args=[
- '--alt-svc', f'{asfile}',
- ])
+ r = curl.http_download(
+ urls=[urln],
+ with_stats=True,
+ extra_args=[
+ "--alt-svc",
+ f"{asfile}",
+ ],
+ )
r.check_response(count=count, http_status=200)
# We expect the connection to be reused and use HTTP/1.1
assert r.total_connects == 1
for s in r.stats:
- assert s['http_version'] == '1.1', f'{s}'
+ assert s["http_version"] == "1.1", f"{s}"
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
log = logging.getLogger(__name__)
-@pytest.mark.skipif(condition=Env.setup_incomplete(),
- reason=f"missing: {Env.incomplete_reason()}")
+@pytest.mark.skipif(
+ condition=Env.setup_incomplete(), reason=f"missing: {Env.incomplete_reason()}"
+)
class TestProxyAuth:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd, nghttpx_fwd):
if env.have_nghttpx():
nghttpx_fwd.start_if_needed()
def get_tunnel_proto_used(self, r: ExecResult):
for line in r.trace_lines:
- m = re.match(r'.* CONNECT tunnel: (\S+) negotiated$', line)
+ m = re.match(r".* CONNECT tunnel: (\S+) negotiated$", line)
if m:
return m.group(1)
assert False, f'tunnel protocol not found in:\n{"".join(r.trace_lines)}'
# download via http: proxy (no tunnel), no auth
def test_13_01_proxy_no_auth(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'http://localhost:{env.http_port}/data.json'
- r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=curl.get_proxy_args(proxys=False))
+ url = f"http://localhost:{env.http_port}/data.json"
+ r = curl.http_download(
+ urls=[url],
+ alpn_proto="http/1.1",
+ with_stats=True,
+ extra_args=curl.get_proxy_args(proxys=False),
+ )
r.check_response(count=1, http_status=407)
# download via http: proxy (no tunnel), auth
def test_13_02_proxy_auth(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'http://localhost:{env.http_port}/data.json'
+ url = f"http://localhost:{env.http_port}/data.json"
xargs = curl.get_proxy_args(proxys=False)
- xargs.extend(['--proxy-user', 'proxy:proxy'])
- r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=xargs)
+ xargs.extend(["--proxy-user", "proxy:proxy"])
+ r = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
+ )
r.check_response(count=1, http_status=200)
- @pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
- reason='curl lacks HTTPS-proxy support')
+ @pytest.mark.skipif(
+ condition=not Env.curl_has_feature("HTTPS-proxy"),
+ reason="curl lacks HTTPS-proxy support",
+ )
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
def test_13_03_proxys_no_auth(self, env: Env, httpd, nghttpx_fwd):
curl = CurlClient(env=env)
- url = f'http://localhost:{env.http_port}/data.json'
+ url = f"http://localhost:{env.http_port}/data.json"
xargs = curl.get_proxy_args(proxys=True)
- r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=xargs)
+ r = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
+ )
r.check_response(count=1, http_status=407)
- @pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
- reason='curl lacks HTTPS-proxy support')
+ @pytest.mark.skipif(
+ condition=not Env.curl_has_feature("HTTPS-proxy"),
+ reason="curl lacks HTTPS-proxy support",
+ )
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
def test_13_04_proxys_auth(self, env: Env, httpd, nghttpx_fwd):
curl = CurlClient(env=env)
- url = f'http://localhost:{env.http_port}/data.json'
+ url = f"http://localhost:{env.http_port}/data.json"
xargs = curl.get_proxy_args(proxys=True)
- xargs.extend(['--proxy-user', 'proxy:proxy'])
- r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=xargs)
+ xargs.extend(["--proxy-user", "proxy:proxy"])
+ r = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
+ )
r.check_response(count=1, http_status=200)
def test_13_05_tunnel_http_no_auth(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'http://localhost:{env.http_port}/data.json'
+ url = f"http://localhost:{env.http_port}/data.json"
xargs = curl.get_proxy_args(proxys=False, tunnel=True)
- r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=xargs)
+ r = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
+ )
# expect "COULD_NOT_CONNECT"
r.check_response(exitcode=56, http_status=None)
def test_13_06_tunnel_http_auth(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'http://localhost:{env.http_port}/data.json'
+ url = f"http://localhost:{env.http_port}/data.json"
xargs = curl.get_proxy_args(proxys=False, tunnel=True)
- xargs.extend(['--proxy-user', 'proxy:proxy'])
- r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
- extra_args=xargs)
+ xargs.extend(["--proxy-user", "proxy:proxy"])
+ r = curl.http_download(
+ urls=[url], alpn_proto="http/1.1", with_stats=True, extra_args=xargs
+ )
r.check_response(count=1, http_status=200)
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
- @pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
- reason='curl lacks HTTPS-proxy support')
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
- @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
+ @pytest.mark.skipif(
+ condition=not Env.curl_has_feature("HTTPS-proxy"),
+ reason="curl lacks HTTPS-proxy support",
+ )
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2"])
+ @pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
def test_13_07_tunnels_no_auth(self, env: Env, httpd, proto, tunnel):
- if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
- pytest.skip('only supported with nghttp2')
+ if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
+ pytest.skip("only supported with nghttp2")
curl = CurlClient(env=env)
- url = f'https://localhost:{env.https_port}/data.json'
+ url = f"https://localhost:{env.https_port}/data.json"
xargs = curl.get_proxy_args(proxys=True, tunnel=True, proto=tunnel)
- r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
- extra_args=xargs)
+ r = curl.http_download(
+ urls=[url], alpn_proto=proto, with_stats=True, extra_args=xargs
+ )
# expect "COULD_NOT_CONNECT"
r.check_response(exitcode=56, http_status=None)
- assert self.get_tunnel_proto_used(r) == 'HTTP/2' \
- if tunnel == 'h2' else 'HTTP/1.1'
+ assert (
+ self.get_tunnel_proto_used(r) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
+ )
@pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
- @pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
- reason='curl lacks HTTPS-proxy support')
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
- @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
+ @pytest.mark.skipif(
+ condition=not Env.curl_has_feature("HTTPS-proxy"),
+ reason="curl lacks HTTPS-proxy support",
+ )
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2"])
+ @pytest.mark.parametrize("tunnel", ["http/1.1", "h2"])
def test_13_08_tunnels_auth(self, env: Env, httpd, proto, tunnel):
- if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
- pytest.skip('only supported with nghttp2')
+ if tunnel == "h2" and not env.curl_uses_lib("nghttp2"):
+ pytest.skip("only supported with nghttp2")
curl = CurlClient(env=env)
- url = f'https://localhost:{env.https_port}/data.json'
+ url = f"https://localhost:{env.https_port}/data.json"
xargs = curl.get_proxy_args(proxys=True, tunnel=True, proto=tunnel)
- xargs.extend(['--proxy-user', 'proxy:proxy'])
- r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
- extra_args=xargs)
- r.check_response(count=1, http_status=200,
- protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
- assert self.get_tunnel_proto_used(r) == 'HTTP/2' \
- if tunnel == 'h2' else 'HTTP/1.1'
+ xargs.extend(["--proxy-user", "proxy:proxy"])
+ r = curl.http_download(
+ urls=[url], alpn_proto=proto, with_stats=True, extra_args=xargs
+ )
+ r.check_response(
+ count=1, http_status=200, protocol="HTTP/2" if proto == "h2" else "HTTP/1.1"
+ )
+ assert (
+ self.get_tunnel_proto_used(r) == "HTTP/2" if tunnel == "h2" else "HTTP/1.1"
+ )
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class TestAuth:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd, nghttpx):
if env.have_h3():
nghttpx.start_if_needed()
- env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024)
+ env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10 * 1024 * 1024)
httpd.clear_extra_configs()
httpd.reload()
# download 1 file, not authenticated
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_14_01_digest_get_noauth(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
+ url = f"https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json"
r = curl.http_download(urls=[url], alpn_proto=proto)
r.check_response(http_status=401)
# download 1 file, authenticated
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_14_02_digest_get_auth(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
- r = curl.http_download(urls=[url], alpn_proto=proto, extra_args=[
- '--digest', '--user', 'test:test'
- ])
+ url = f"https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json"
+ r = curl.http_download(
+ urls=[url], alpn_proto=proto, extra_args=["--digest", "--user", "test:test"]
+ )
r.check_response(http_status=200)
# PUT data, authenticated
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_14_03_digest_put_auth(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- data='0123456789'
+ data = "0123456789"
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
- r = curl.http_upload(urls=[url], data=data, alpn_proto=proto, extra_args=[
- '--digest', '--user', 'test:test'
- ])
+ url = f"https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json"
+ r = curl.http_upload(
+ urls=[url],
+ data=data,
+ alpn_proto=proto,
+ extra_args=["--digest", "--user", "test:test"],
+ )
r.check_response(http_status=200)
# PUT data, digest auth large pw
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_14_04_digest_large_pw(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- data='0123456789'
- password = 'x' * 65535
+ data = "0123456789"
+ password = "x" * 65535
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
- r = curl.http_upload(urls=[url], data=data, alpn_proto=proto, extra_args=[
- '--digest', '--user', f'test:{password}',
- '--trace-config', 'http/2,http/3'
- ])
+ url = f"https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json"
+ r = curl.http_upload(
+ urls=[url],
+ data=data,
+ alpn_proto=proto,
+ extra_args=[
+ "--digest",
+ "--user",
+ f"test:{password}",
+ "--trace-config",
+ "http/2,http/3",
+ ],
+ )
# digest does not submit the password, but a hash of it, so all
# works and, since the pw is not correct, we get a 401
r.check_response(http_status=401)
# PUT data, basic auth large pw
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_14_05_basic_large_pw(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and not env.curl_uses_lib('ngtcp2'):
+ if proto == "h3" and not env.curl_uses_lib("ngtcp2"):
# See <https://github.com/cloudflare/quiche/issues/1573>
pytest.skip("quiche/openssl-quic have problems with large requests")
# just large enough that nghttp2 will submit
- password = 'x' * (47 * 1024)
- fdata = os.path.join(env.gen_dir, 'data-10m')
+ password = "x" * (47 * 1024)
+ fdata = os.path.join(env.gen_dir, "data-10m")
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
- r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto, extra_args=[
- '--basic', '--user', f'test:{password}',
- '--trace-config', 'http/2,http/3'
- ])
+ url = f"https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json"
+ r = curl.http_upload(
+ urls=[url],
+ data=f"@{fdata}",
+ alpn_proto=proto,
+ extra_args=[
+ "--basic",
+ "--user",
+ f"test:{password}",
+ "--trace-config",
+ "http/2,http/3",
+ ],
+ )
# but apache denies on length limit
r.check_response(http_status=431)
# PUT data, basic auth with very large pw
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_14_06_basic_very_large_pw(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if proto == 'h3' and env.curl_uses_lib('quiche'):
+ if proto == "h3" and env.curl_uses_lib("quiche"):
# See <https://github.com/cloudflare/quiche/issues/1573>
pytest.skip("quiche has problems with large requests")
- password = 'x' * (64 * 1024)
- fdata = os.path.join(env.gen_dir, 'data-10m')
+ password = "x" * (64 * 1024)
+ fdata = os.path.join(env.gen_dir, "data-10m")
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json'
- r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto, extra_args=[
- '--basic', '--user', f'test:{password}'
- ])
+ url = f"https://{env.authority_for(env.domain1, proto)}/restricted/digest/data.json"
+ r = curl.http_upload(
+ urls=[url],
+ data=f"@{fdata}",
+ alpn_proto=proto,
+ extra_args=["--basic", "--user", f"test:{password}"],
+ )
# Depending on protocol, we might have an error sending or
# the server might shutdown the connection and we see the error
# on receiving
- assert r.exit_code in [55, 56], f'{r.dump_logs()}'
+ assert r.exit_code in [55, 56], f"{r.dump_logs()}"
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class TestTracing:
-
# default verbose output
def test_15_01_trace_defaults(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'http://{env.domain1}:{env.http_port}/data.json'
- r = curl.http_get(url=url, def_tracing=False, extra_args=[
- '-v'
- ])
+ url = f"http://{env.domain1}:{env.http_port}/data.json"
+ r = curl.http_get(url=url, def_tracing=False, extra_args=["-v"])
r.check_response(http_status=200)
trace = r.trace_lines
assert len(trace) > 0
# trace ids
def test_15_02_trace_ids(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'http://{env.domain1}:{env.http_port}/data.json'
- r = curl.http_get(url=url, def_tracing=False, extra_args=[
- '-v', '--trace-config', 'ids'
- ])
+ url = f"http://{env.domain1}:{env.http_port}/data.json"
+ r = curl.http_get(
+ url=url, def_tracing=False, extra_args=["-v", "--trace-config", "ids"]
+ )
r.check_response(http_status=200)
- for line in r.trace_lines:
- m = re.match(r'^\[0-[0x]] .+', line)
+ for line in r.trace_lines:
+ m = re.match(r"^\[0-[0x]] .+", line)
if m is None:
- assert False, f'no match: {line}'
+ assert False, f"no match: {line}"
# trace ids+time
def test_15_03_trace_ids_time(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'http://{env.domain1}:{env.http_port}/data.json'
- r = curl.http_get(url=url, def_tracing=False, extra_args=[
- '-v', '--trace-config', 'ids,time'
- ])
+ url = f"http://{env.domain1}:{env.http_port}/data.json"
+ r = curl.http_get(
+ url=url, def_tracing=False, extra_args=["-v", "--trace-config", "ids,time"]
+ )
r.check_response(http_status=200)
- for line in r.trace_lines:
- m = re.match(r'^([0-9:.]+) \[0-[0x]] .+', line)
+ for line in r.trace_lines:
+ m = re.match(r"^([0-9:.]+) \[0-[0x]] .+", line)
if m is None:
- assert False, f'no match: {line}'
+ assert False, f"no match: {line}"
# trace all
def test_15_04_trace_all(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'http://{env.domain1}:{env.http_port}/data.json'
- r = curl.http_get(url=url, def_tracing=False, extra_args=[
- '-v', '--trace-config', 'all'
- ])
+ url = f"http://{env.domain1}:{env.http_port}/data.json"
+ r = curl.http_get(
+ url=url, def_tracing=False, extra_args=["-v", "--trace-config", "all"]
+ )
r.check_response(http_status=200)
found_tcp = False
- for line in r.trace_lines:
- m = re.match(r'^([0-9:.]+) \[0-[0x]] .+', line)
+ for line in r.trace_lines:
+ m = re.match(r"^([0-9:.]+) \[0-[0x]] .+", line)
if m is None:
- assert False, f'no match: {line}'
- m = re.match(r'^([0-9:.]+) \[0-[0x]] . \[TCP].+', line)
+ assert False, f"no match: {line}"
+ m = re.match(r"^([0-9:.]+) \[0-[0x]] . \[TCP].+", line)
if m is not None:
found_tcp = True
if not found_tcp:
# trace all, no TCP, no time
def test_15_05_trace_all(self, env: Env, httpd):
curl = CurlClient(env=env)
- url = f'http://{env.domain1}:{env.http_port}/data.json'
- r = curl.http_get(url=url, def_tracing=False, extra_args=[
- '-v', '--trace-config', 'all,-tcp,-time'
- ])
+ url = f"http://{env.domain1}:{env.http_port}/data.json"
+ r = curl.http_get(
+ url=url,
+ def_tracing=False,
+ extra_args=["-v", "--trace-config", "all,-tcp,-time"],
+ )
r.check_response(http_status=200)
found_tcp = False
- for line in r.trace_lines:
- m = re.match(r'^\[0-[0x]] .+', line)
+ for line in r.trace_lines:
+ m = re.match(r"^\[0-[0x]] .+", line)
if m is None:
- assert False, f'no match: {line}'
- m = re.match(r'^\[0-[0x]] . \[TCP].+', line)
+ assert False, f"no match: {line}"
+ m = re.match(r"^\[0-[0x]] . \[TCP].+", line)
if m is not None:
found_tcp = True
if found_tcp:
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class TestInfo:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd, nghttpx):
if env.have_h3():
nghttpx.start_if_needed()
httpd.clear_extra_configs()
httpd.reload()
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd):
indir = httpd.docs_dir
- env.make_data_file(indir=indir, fname="data-10k", fsize=10*1024)
- env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024)
- env.make_data_file(indir=indir, fname="data-1m", fsize=1024*1024)
+ env.make_data_file(indir=indir, fname="data-10k", fsize=10 * 1024)
+ env.make_data_file(indir=indir, fname="data-100k", fsize=100 * 1024)
+ env.make_data_file(indir=indir, fname="data-1m", fsize=1024 * 1024)
# download plain file
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_16_01_info_download(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
+ url = f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]"
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True)
- r.check_stats(count=count, http_status=200, exitcode=0,
- remote_port=env.port_for(alpn_proto=proto),
- remote_ip='127.0.0.1')
+ r.check_stats(
+ count=count,
+ http_status=200,
+ exitcode=0,
+ remote_port=env.port_for(alpn_proto=proto),
+ remote_ip="127.0.0.1",
+ )
for idx, s in enumerate(r.stats):
self.check_stat(idx, s, r, dl_size=30, ul_size=0)
# download plain file with a 302 redirect
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_16_02_info_302_download(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/data.json.302?[0-{count-1}]'
- r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True, extra_args=[
- '--location'
- ])
- r.check_stats(count=count, http_status=200, exitcode=0,
- remote_port=env.port_for(alpn_proto=proto),
- remote_ip='127.0.0.1')
+ url = f"https://{env.authority_for(env.domain1, proto)}/data.json.302?[0-{count-1}]"
+ r = curl.http_download(
+ urls=[url], alpn_proto=proto, with_stats=True, extra_args=["--location"]
+ )
+ r.check_stats(
+ count=count,
+ http_status=200,
+ exitcode=0,
+ remote_port=env.port_for(alpn_proto=proto),
+ remote_ip="127.0.0.1",
+ )
for idx, s in enumerate(r.stats):
self.check_stat(idx, s, r, dl_size=30, ul_size=0)
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_16_03_info_upload(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 2
- fdata = os.path.join(env.gen_dir, 'data-100k')
+ fdata = os.path.join(env.gen_dir, "data-100k")
fsize = 100 * 1024
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
- r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto,
- with_headers=True, extra_args=[
- '--trace-config', 'http/2,http/3'
- ])
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]"
+ r = curl.http_upload(
+ urls=[url],
+ data=f"@{fdata}",
+ alpn_proto=proto,
+ with_headers=True,
+ extra_args=["--trace-config", "http/2,http/3"],
+ )
r.check_response(count=count, http_status=200)
- r.check_stats(count=count, http_status=200, exitcode=0,
- remote_port=env.port_for(alpn_proto=proto),
- remote_ip='127.0.0.1')
+ r.check_stats(
+ count=count,
+ http_status=200,
+ exitcode=0,
+ remote_port=env.port_for(alpn_proto=proto),
+ remote_ip="127.0.0.1",
+ )
for idx, s in enumerate(r.stats):
self.check_stat(idx, s, r, dl_size=fsize, ul_size=fsize)
# download plain file via http: ('time_appconnect' is 0)
- @pytest.mark.parametrize("proto", ['http/1.1'])
+ @pytest.mark.parametrize("proto", ["http/1.1"])
def test_16_04_info_http_download(self, env: Env, httpd, nghttpx, proto):
count = 2
curl = CurlClient(env=env)
- url = f'http://{env.domain1}:{env.http_port}/data.json?[0-{count-1}]'
+ url = f"http://{env.domain1}:{env.http_port}/data.json?[0-{count-1}]"
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True)
- r.check_stats(count=count, http_status=200, exitcode=0,
- remote_port=env.http_port, remote_ip='127.0.0.1')
+ r.check_stats(
+ count=count,
+ http_status=200,
+ exitcode=0,
+ remote_port=env.http_port,
+ remote_ip="127.0.0.1",
+ )
for idx, s in enumerate(r.stats):
self.check_stat(idx, s, r, dl_size=30, ul_size=0)
def check_stat(self, idx, s, r, dl_size=None, ul_size=None):
self.check_stat_times(s, idx)
# we always send something
- self.check_stat_positive(s, idx, 'size_request')
+ self.check_stat_positive(s, idx, "size_request")
# we always receive response headers
- self.check_stat_positive(s, idx, 'size_header')
+ self.check_stat_positive(s, idx, "size_header")
if ul_size is not None:
- assert s['size_upload'] == ul_size, f'stat #{idx}\n{r.dump_logs()}' # the file we sent
- assert s['size_request'] >= s['size_upload'], \
- f'stat #{idx}, "size_request" smaller than "size_upload", {s}\n{r.dump_logs()}'
+ assert (
+ s["size_upload"] == ul_size
+ ), f"stat #{idx}\n{r.dump_logs()}" # the file we sent
+ assert (
+ s["size_request"] >= s["size_upload"]
+ ), f'stat #{idx}, "size_request" smaller than "size_upload", {s}\n{r.dump_logs()}'
if dl_size is not None:
- assert s['size_download'] == dl_size, f'stat #{idx}\n{r.dump_logs()}' # the file we received
+ assert (
+ s["size_download"] == dl_size
+ ), f"stat #{idx}\n{r.dump_logs()}" # the file we received
def check_stat_positive(self, s, idx, key):
assert key in s, f'stat #{idx} "{key}" missing: {s}'
def check_stat_times(self, s, idx):
# check timings reported on a transfer for consistency
- url = s['url_effective']
+ url = s["url_effective"]
# all stat keys which reporting timings
all_keys = {
- 'time_appconnect', 'time_connect', 'time_redirect',
- 'time_pretransfer', 'time_starttransfer', 'time_total'
+ "time_appconnect",
+ "time_connect",
+ "time_redirect",
+ "time_pretransfer",
+ "time_starttransfer",
+ "time_total",
}
# stat keys where we expect a positive value
- pos_keys = {'time_pretransfer', 'time_starttransfer', 'time_total', 'time_queue'}
- if s['num_connects'] > 0:
- pos_keys.add('time_connect')
- if url.startswith('https:'):
- pos_keys.add('time_appconnect')
- if s['num_redirects'] > 0:
- pos_keys.add('time_redirect')
+ pos_keys = {
+ "time_pretransfer",
+ "time_starttransfer",
+ "time_total",
+ "time_queue",
+ }
+ if s["num_connects"] > 0:
+ pos_keys.add("time_connect")
+ if url.startswith("https:"):
+ pos_keys.add("time_appconnect")
+ if s["num_redirects"] > 0:
+ pos_keys.add("time_redirect")
zero_keys = all_keys - pos_keys
# assert all zeros are zeros and the others are positive
for key in zero_keys:
for key in pos_keys:
self.check_stat_positive(s, idx, key)
# assert that all timers before "time_pretransfer" are less or equal
- for key in ['time_appconnect', 'time_connect', 'time_namelookup']:
- assert s[key] < s['time_pretransfer'], f'time "{key}" larger than' \
- f'"time_pretransfer": {s}'
+ for key in ["time_appconnect", "time_connect", "time_namelookup"]:
+ assert s[key] < s["time_pretransfer"], (
+ f'time "{key}" larger than' f'"time_pretransfer": {s}'
+ )
# assert transfer start is after pretransfer
- assert s['time_pretransfer'] <= s['time_starttransfer'], f'"time_pretransfer" '\
- f'greater than "time_starttransfer", {s}'
+ assert s["time_pretransfer"] <= s["time_starttransfer"], (
+ f'"time_pretransfer" ' f'greater than "time_starttransfer", {s}'
+ )
# assert that transfer start is before total
- assert s['time_starttransfer'] <= s['time_total'], f'"time_starttransfer" '\
- f'greater than "time_total", {s}'
- if s['num_redirects'] > 0:
- assert s['time_queue'] < s['time_starttransfer'], f'"time_queue" '\
- f'greater/equal than "time_starttransfer", {s}'
+ assert s["time_starttransfer"] <= s["time_total"], (
+ f'"time_starttransfer" ' f'greater than "time_total", {s}'
+ )
+ if s["num_redirects"] > 0:
+ assert s["time_queue"] < s["time_starttransfer"], (
+ f'"time_queue" ' f'greater/equal than "time_starttransfer", {s}'
+ )
else:
- assert s['time_queue'] <= s['time_starttransfer'], f'"time_queue" '\
- f'greater than "time_starttransfer", {s}'
+ assert s["time_queue"] <= s["time_starttransfer"], (
+ f'"time_queue" ' f'greater than "time_starttransfer", {s}'
+ )
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class TestSSLUse:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd, nghttpx):
- env.make_data_file(indir=httpd.docs_dir, fname="data-10k", fsize=10*1024)
+ env.make_data_file(indir=httpd.docs_dir, fname="data-10k", fsize=10 * 1024)
if env.have_h3():
nghttpx.start_if_needed()
- @pytest.fixture(autouse=True, scope='function')
+ @pytest.fixture(autouse=True, scope="function")
def _function_scope(self, request, env, httpd):
httpd.clear_extra_configs()
- if 'httpd' not in request.node._fixtureinfo.argnames:
+ if "httpd" not in request.node._fixtureinfo.argnames:
httpd.reload_if_config_changed()
def test_17_01_sslinfo_plain(self, env: Env, nghttpx):
- proto = 'http/1.1'
+ proto = "http/1.1"
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo"
r = curl.http_get(url=url, alpn_proto=proto)
- assert r.json['HTTPS'] == 'on', f'{r.json}'
- assert 'SSL_SESSION_ID' in r.json, f'{r.json}'
- assert 'SSL_SESSION_RESUMED' in r.json, f'{r.json}'
- assert r.json['SSL_SESSION_RESUMED'] == 'Initial', f'{r.json}'
+ assert r.json["HTTPS"] == "on", f"{r.json}"
+ assert "SSL_SESSION_ID" in r.json, f"{r.json}"
+ assert "SSL_SESSION_RESUMED" in r.json, f"{r.json}"
+ assert r.json["SSL_SESSION_RESUMED"] == "Initial", f"{r.json}"
- @pytest.mark.parametrize("tls_max", ['1.2', '1.3'])
+ @pytest.mark.parametrize("tls_max", ["1.2", "1.3"])
def test_17_02_sslinfo_reconnect(self, env: Env, tls_max):
- proto = 'http/1.1'
+ proto = "http/1.1"
count = 3
- exp_resumed = 'Resumed'
- xargs = ['--sessionid', '--tls-max', tls_max, f'--tlsv{tls_max}']
- if env.curl_uses_lib('libressl'):
- if tls_max == '1.3':
- exp_resumed = 'Initial' # 1.2 works in LibreSSL, but 1.3 does not, TODO
- if env.curl_uses_lib('rustls-ffi'):
- exp_resumed = 'Initial' # Rustls does not support sessions, TODO
- if env.curl_uses_lib('bearssl') and tls_max == '1.3':
- pytest.skip('BearSSL does not support TLSv1.3')
- if env.curl_uses_lib('mbedtls') and tls_max == '1.3' and \
- not env.curl_lib_version_at_least('mbedtls', '3.6.0'):
- pytest.skip('mbedtls TLSv1.3 session resume not working in 3.6.0')
+ exp_resumed = "Resumed"
+ xargs = ["--sessionid", "--tls-max", tls_max, f"--tlsv{tls_max}"]
+ if env.curl_uses_lib("libressl"):
+ if tls_max == "1.3":
+ exp_resumed = "Initial" # 1.2 works in LibreSSL, but 1.3 does not, TODO
+ if env.curl_uses_lib("rustls-ffi"):
+ exp_resumed = "Initial" # Rustls does not support sessions, TODO
+ if env.curl_uses_lib("bearssl") and tls_max == "1.3":
+ pytest.skip("BearSSL does not support TLSv1.3")
+ if (
+ env.curl_uses_lib("mbedtls")
+ and tls_max == "1.3"
+ and not env.curl_lib_version_at_least("mbedtls", "3.6.0")
+ ):
+ pytest.skip("mbedtls TLSv1.3 session resume not working in 3.6.0")
run_env = os.environ.copy()
- run_env['CURL_DEBUG'] = 'ssl'
+ run_env["CURL_DEBUG"] = "ssl"
curl = CurlClient(env=env, run_env=run_env)
# tell the server to close the connection after each request
- urln = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo?'\
- f'id=[0-{count-1}]&close'
- r = curl.http_download(urls=[urln], alpn_proto=proto, with_stats=True,
- extra_args=xargs)
+ urln = (
+ f"https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo?"
+ f"id=[0-{count-1}]&close"
+ )
+ r = curl.http_download(
+ urls=[urln], alpn_proto=proto, with_stats=True, extra_args=xargs
+ )
r.check_response(count=count, http_status=200)
# should have used one connection for each request, sessions after
# first should have been resumed
assert os.path.exists(dfile)
with open(dfile) as f:
djson = json.load(f)
- assert djson['HTTPS'] == 'on', f'{i}: {djson}'
+ assert djson["HTTPS"] == "on", f"{i}: {djson}"
if i == 0:
- assert djson['SSL_SESSION_RESUMED'] == 'Initial', f'{i}: {djson}\n{r.dump_logs()}'
+ assert (
+ djson["SSL_SESSION_RESUMED"] == "Initial"
+ ), f"{i}: {djson}\n{r.dump_logs()}"
else:
- assert djson['SSL_SESSION_RESUMED'] == exp_resumed, f'{i}: {djson}\n{r.dump_logs()}'
+ assert (
+ djson["SSL_SESSION_RESUMED"] == exp_resumed
+ ), f"{i}: {djson}\n{r.dump_logs()}"
# use host name with trailing dot, verify handshake
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_17_03_trailing_dot(self, env: Env, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
- domain = f'{env.domain1}.'
- url = f'https://{env.authority_for(domain, proto)}/curltest/sslinfo'
+ domain = f"{env.domain1}."
+ url = f"https://{env.authority_for(domain, proto)}/curltest/sslinfo"
r = curl.http_get(url=url, alpn_proto=proto)
- assert r.exit_code == 0, f'{r}'
- assert r.json, f'{r}'
- if proto != 'h3': # we proxy h3
+ assert r.exit_code == 0, f"{r}"
+ assert r.json, f"{r}"
+ if proto != "h3": # we proxy h3
# the SNI the server received is without trailing dot
- assert r.json['SSL_TLS_SNI'] == env.domain1, f'{r.json}'
+ assert r.json["SSL_TLS_SNI"] == env.domain1, f"{r.json}"
# use host name with double trailing dot, verify handshake
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_17_04_double_dot(self, env: Env, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
- domain = f'{env.domain1}..'
- url = f'https://{env.authority_for(domain, proto)}/curltest/sslinfo'
- r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
- '-H', f'Host: {env.domain1}',
- ])
+ domain = f"{env.domain1}.."
+ url = f"https://{env.authority_for(domain, proto)}/curltest/sslinfo"
+ r = curl.http_get(
+ url=url,
+ alpn_proto=proto,
+ extra_args=[
+ "-H",
+ f"Host: {env.domain1}",
+ ],
+ )
if r.exit_code == 0:
- assert r.json, f'{r.stdout}'
+ assert r.json, f"{r.stdout}"
# the SNI the server received is without trailing dot
- if proto != 'h3': # we proxy h3
- assert r.json['SSL_TLS_SNI'] == env.domain1, f'{r.json}'
- assert False, f'should not have succeeded: {r.json}'
+ if proto != "h3": # we proxy h3
+ assert r.json["SSL_TLS_SNI"] == env.domain1, f"{r.json}"
+ assert False, f"should not have succeeded: {r.json}"
# 7 - Rustls rejects a servername with .. during setup
# 35 - LibreSSL rejects setting an SNI name with trailing dot
# 60 - peer name matching failed against certificate
- assert r.exit_code in [7, 35, 60], f'{r}'
+ assert r.exit_code in [7, 35, 60], f"{r}"
# use ip address for connect
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_17_05_ip_addr(self, env: Env, proto):
- if env.curl_uses_lib('bearssl'):
+ if env.curl_uses_lib("bearssl"):
pytest.skip("BearSSL does not support cert verification with IP addresses")
- if env.curl_uses_lib('mbedtls'):
+ if env.curl_uses_lib("mbedtls"):
pytest.skip("mbedTLS does use IP addresses in SNI")
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
- domain = '127.0.0.1'
- url = f'https://{env.authority_for(domain, proto)}/curltest/sslinfo'
+ domain = "127.0.0.1"
+ url = f"https://{env.authority_for(domain, proto)}/curltest/sslinfo"
r = curl.http_get(url=url, alpn_proto=proto)
- assert r.exit_code == 0, f'{r}'
- assert r.json, f'{r}'
- if proto != 'h3': # we proxy h3
+ assert r.exit_code == 0, f"{r}"
+ assert r.json, f"{r}"
+ if proto != "h3": # we proxy h3
# the SNI should not have been used
- assert 'SSL_TLS_SNI' not in r.json, f'{r.json}'
+ assert "SSL_TLS_SNI" not in r.json, f"{r.json}"
# use localhost for connect
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_17_06_localhost(self, env: Env, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
- domain = 'localhost'
- url = f'https://{env.authority_for(domain, proto)}/curltest/sslinfo'
+ domain = "localhost"
+ url = f"https://{env.authority_for(domain, proto)}/curltest/sslinfo"
r = curl.http_get(url=url, alpn_proto=proto)
- assert r.exit_code == 0, f'{r}'
- assert r.json, f'{r}'
- if proto != 'h3': # we proxy h3
- assert r.json['SSL_TLS_SNI'] == domain, f'{r.json}'
+ assert r.exit_code == 0, f"{r}"
+ assert r.json, f"{r}"
+ if proto != "h3": # we proxy h3
+ assert r.json["SSL_TLS_SNI"] == domain, f"{r.json}"
@staticmethod
def gen_test_17_07_list():
tls13_tests = [
[None, True],
- [['TLS_AES_128_GCM_SHA256'], True],
- [['TLS_AES_256_GCM_SHA384'], False],
- [['TLS_CHACHA20_POLY1305_SHA256'], True],
- [['TLS_AES_256_GCM_SHA384',
- 'TLS_CHACHA20_POLY1305_SHA256'], True],
+ [["TLS_AES_128_GCM_SHA256"], True],
+ [["TLS_AES_256_GCM_SHA384"], False],
+ [["TLS_CHACHA20_POLY1305_SHA256"], True],
+ [["TLS_AES_256_GCM_SHA384", "TLS_CHACHA20_POLY1305_SHA256"], True],
]
tls12_tests = [
[None, True],
- [['ECDHE-ECDSA-AES128-GCM-SHA256', 'ECDHE-RSA-AES128-GCM-SHA256'], True],
- [['ECDHE-ECDSA-AES256-GCM-SHA384', 'ECDHE-RSA-AES256-GCM-SHA384'], False],
- [['ECDHE-ECDSA-CHACHA20-POLY1305', 'ECDHE-RSA-CHACHA20-POLY1305'], True],
- [['ECDHE-ECDSA-AES256-GCM-SHA384', 'ECDHE-RSA-AES256-GCM-SHA384',
- 'ECDHE-ECDSA-CHACHA20-POLY1305', 'ECDHE-RSA-CHACHA20-POLY1305'], True],
+ [["ECDHE-ECDSA-AES128-GCM-SHA256", "ECDHE-RSA-AES128-GCM-SHA256"], True],
+ [["ECDHE-ECDSA-AES256-GCM-SHA384", "ECDHE-RSA-AES256-GCM-SHA384"], False],
+ [["ECDHE-ECDSA-CHACHA20-POLY1305", "ECDHE-RSA-CHACHA20-POLY1305"], True],
+ [
+ [
+ "ECDHE-ECDSA-AES256-GCM-SHA384",
+ "ECDHE-RSA-AES256-GCM-SHA384",
+ "ECDHE-ECDSA-CHACHA20-POLY1305",
+ "ECDHE-RSA-CHACHA20-POLY1305",
+ ],
+ True,
+ ],
]
ret = []
- for tls_proto in ['TLSv1.3 +TLSv1.2', 'TLSv1.3', 'TLSv1.2']:
+ for tls_proto in ["TLSv1.3 +TLSv1.2", "TLSv1.3", "TLSv1.2"]:
for [ciphers13, succeed13] in tls13_tests:
for [ciphers12, succeed12] in tls12_tests:
ret.append([tls_proto, ciphers13, ciphers12, succeed13, succeed12])
return ret
- @pytest.mark.parametrize("tls_proto, ciphers13, ciphers12, succeed13, succeed12", gen_test_17_07_list())
- def test_17_07_ssl_ciphers(self, env: Env, httpd, tls_proto, ciphers13, ciphers12, succeed13, succeed12):
+ @pytest.mark.parametrize(
+ "tls_proto, ciphers13, ciphers12, succeed13, succeed12", gen_test_17_07_list()
+ )
+ def test_17_07_ssl_ciphers(
+ self, env: Env, httpd, tls_proto, ciphers13, ciphers12, succeed13, succeed12
+ ):
# to test setting cipher suites, the AES 256 ciphers are disabled in the test server
- httpd.set_extra_config('base', [
- 'SSLCipherSuite SSL'
- ' ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256'
- ':ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305',
- 'SSLCipherSuite TLSv1.3'
- ' TLS_AES_128_GCM_SHA256:TLS_CHACHA20_POLY1305_SHA256',
- f'SSLProtocol {tls_proto}'
- ])
+ httpd.set_extra_config(
+ "base",
+ [
+ "SSLCipherSuite SSL"
+ " ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256"
+ ":ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305",
+ "SSLCipherSuite TLSv1.3"
+ " TLS_AES_128_GCM_SHA256:TLS_CHACHA20_POLY1305_SHA256",
+ f"SSLProtocol {tls_proto}",
+ ],
+ )
httpd.reload_if_config_changed()
- proto = 'http/1.1'
+ proto = "http/1.1"
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo"
# SSL backend specifics
- if env.curl_uses_lib('gnutls'):
- pytest.skip('GnuTLS does not support setting ciphers')
- elif env.curl_uses_lib('boringssl'):
+ if env.curl_uses_lib("gnutls"):
+ pytest.skip("GnuTLS does not support setting ciphers")
+ elif env.curl_uses_lib("boringssl"):
if ciphers13 is not None:
- pytest.skip('BoringSSL does not support setting TLSv1.3 ciphers')
- elif env.curl_uses_lib('schannel'): # not in CI, so untested
+ pytest.skip("BoringSSL does not support setting TLSv1.3 ciphers")
+ elif env.curl_uses_lib("schannel"): # not in CI, so untested
if ciphers12 is not None:
- pytest.skip('Schannel does not support setting TLSv1.2 ciphers by name')
- elif env.curl_uses_lib('bearssl'):
- if tls_proto == 'TLSv1.3':
- pytest.skip('BearSSL does not support TLSv1.3')
- tls_proto = 'TLSv1.2'
- elif env.curl_uses_lib('mbedtls') and not env.curl_lib_version_at_least('mbedtls', '3.6.0'):
- if tls_proto == 'TLSv1.3':
- pytest.skip('mbedTLS < 3.6.0 does not support TLSv1.3')
- elif env.curl_uses_lib('sectransp'): # not in CI, so untested
- if tls_proto == 'TLSv1.3':
- pytest.skip('Secure Transport does not support TLSv1.3')
- tls_proto = 'TLSv1.2'
+ pytest.skip("Schannel does not support setting TLSv1.2 ciphers by name")
+ elif env.curl_uses_lib("bearssl"):
+ if tls_proto == "TLSv1.3":
+ pytest.skip("BearSSL does not support TLSv1.3")
+ tls_proto = "TLSv1.2"
+ elif env.curl_uses_lib("mbedtls") and not env.curl_lib_version_at_least(
+ "mbedtls", "3.6.0"
+ ):
+ if tls_proto == "TLSv1.3":
+ pytest.skip("mbedTLS < 3.6.0 does not support TLSv1.3")
+ elif env.curl_uses_lib("sectransp"): # not in CI, so untested
+ if tls_proto == "TLSv1.3":
+ pytest.skip("Secure Transport does not support TLSv1.3")
+ tls_proto = "TLSv1.2"
# test
- extra_args = ['--tls13-ciphers', ':'.join(ciphers13)] if ciphers13 else []
- extra_args += ['--ciphers', ':'.join(ciphers12)] if ciphers12 else []
+ extra_args = ["--tls13-ciphers", ":".join(ciphers13)] if ciphers13 else []
+ extra_args += ["--ciphers", ":".join(ciphers12)] if ciphers12 else []
r = curl.http_get(url=url, alpn_proto=proto, extra_args=extra_args)
- if tls_proto != 'TLSv1.2' and succeed13:
+ if tls_proto != "TLSv1.2" and succeed13:
assert r.exit_code == 0, r.dump_logs()
- assert r.json['HTTPS'] == 'on', r.dump_logs()
- assert r.json['SSL_PROTOCOL'] == 'TLSv1.3', r.dump_logs()
- assert ciphers13 is None or r.json['SSL_CIPHER'] in ciphers13, r.dump_logs()
- elif tls_proto == 'TLSv1.2' and succeed12:
+ assert r.json["HTTPS"] == "on", r.dump_logs()
+ assert r.json["SSL_PROTOCOL"] == "TLSv1.3", r.dump_logs()
+ assert ciphers13 is None or r.json["SSL_CIPHER"] in ciphers13, r.dump_logs()
+ elif tls_proto == "TLSv1.2" and succeed12:
assert r.exit_code == 0, r.dump_logs()
- assert r.json['HTTPS'] == 'on', r.dump_logs()
- assert r.json['SSL_PROTOCOL'] == 'TLSv1.2', r.dump_logs()
- assert ciphers12 is None or r.json['SSL_CIPHER'] in ciphers12, r.dump_logs()
+ assert r.json["HTTPS"] == "on", r.dump_logs()
+ assert r.json["SSL_PROTOCOL"] == "TLSv1.2", r.dump_logs()
+ assert ciphers12 is None or r.json["SSL_CIPHER"] in ciphers12, r.dump_logs()
else:
assert r.exit_code != 0, r.dump_logs()
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_17_08_cert_status(self, env: Env, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
- if not env.curl_uses_lib('openssl') and \
- not env.curl_uses_lib('gnutls') and \
- not env.curl_uses_lib('quictls'):
+ if (
+ not env.curl_uses_lib("openssl")
+ and not env.curl_uses_lib("gnutls")
+ and not env.curl_uses_lib("quictls")
+ ):
pytest.skip("TLS library does not support --cert-status")
curl = CurlClient(env=env)
- domain = 'localhost'
- url = f'https://{env.authority_for(domain, proto)}/'
- r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
- '--cert-status'
- ])
+ domain = "localhost"
+ url = f"https://{env.authority_for(domain, proto)}/"
+ r = curl.http_get(url=url, alpn_proto=proto, extra_args=["--cert-status"])
# CURLE_SSL_INVALIDCERTSTATUS, our certs have no OCSP info
- assert r.exit_code == 91, f'{r}'
+ assert r.exit_code == 91, f"{r}"
@staticmethod
def gen_test_17_09_list():
- return [[tls_proto, max_ver, min_ver]
- for tls_proto in ['TLSv1', 'TLSv1.1', 'TLSv1.2', 'TLSv1.3']
- for max_ver in range(5)
- for min_ver in range(-2, 4)]
+ return [
+ [tls_proto, max_ver, min_ver]
+ for tls_proto in ["TLSv1", "TLSv1.1", "TLSv1.2", "TLSv1.3"]
+ for max_ver in range(5)
+ for min_ver in range(-2, 4)
+ ]
@pytest.mark.parametrize("tls_proto, max_ver, min_ver", gen_test_17_09_list())
def test_17_09_ssl_min_max(self, env: Env, httpd, tls_proto, max_ver, min_ver):
- httpd.set_extra_config('base', [
- f'SSLProtocol {tls_proto}',
- 'SSLCipherSuite ALL:@SECLEVEL=0',
- ])
+ httpd.set_extra_config(
+ "base",
+ [
+ f"SSLProtocol {tls_proto}",
+ "SSLCipherSuite ALL:@SECLEVEL=0",
+ ],
+ )
httpd.reload_if_config_changed()
- proto = 'http/1.1'
+ proto = "http/1.1"
run_env = os.environ.copy()
- if env.curl_uses_lib('gnutls'):
+ if env.curl_uses_lib("gnutls"):
# we need to override any default system configuration since
# we want to test all protocol versions. Ubuntu (or the GH image)
# disable TSL1.0 and TLS1.1 system wide. We do not want.
- our_config = os.path.join(env.gen_dir, 'gnutls_config')
+ our_config = os.path.join(env.gen_dir, "gnutls_config")
if not os.path.exists(our_config):
- with open(our_config, 'w') as fd:
- fd.write('# empty\n')
- run_env['GNUTLS_SYSTEM_PRIORITY_FILE'] = our_config
+ with open(our_config, "w") as fd:
+ fd.write("# empty\n")
+ run_env["GNUTLS_SYSTEM_PRIORITY_FILE"] = our_config
curl = CurlClient(env=env, run_env=run_env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo"
# SSL backend specifics
- if env.curl_uses_lib('bearssl'):
- supported = ['TLSv1', 'TLSv1.1', 'TLSv1.2', None]
- elif env.curl_uses_lib('sectransp'): # not in CI, so untested
- supported = ['TLSv1', 'TLSv1.1', 'TLSv1.2', None]
- elif env.curl_uses_lib('gnutls'):
- supported = ['TLSv1', 'TLSv1.1', 'TLSv1.2', 'TLSv1.3']
- elif env.curl_uses_lib('quiche'):
- supported = ['TLSv1', 'TLSv1.1', 'TLSv1.2', 'TLSv1.3']
+ if env.curl_uses_lib("bearssl"):
+ supported = ["TLSv1", "TLSv1.1", "TLSv1.2", None]
+ elif env.curl_uses_lib("sectransp"): # not in CI, so untested
+ supported = ["TLSv1", "TLSv1.1", "TLSv1.2", None]
+ elif env.curl_uses_lib("gnutls"):
+ supported = ["TLSv1", "TLSv1.1", "TLSv1.2", "TLSv1.3"]
+ elif env.curl_uses_lib("quiche"):
+ supported = ["TLSv1", "TLSv1.1", "TLSv1.2", "TLSv1.3"]
else: # most SSL backends dropped support for TLSv1.0, TLSv1.1
- supported = [None, None, 'TLSv1.2', 'TLSv1.3']
+ supported = [None, None, "TLSv1.2", "TLSv1.3"]
# test
- extra_args = [[], ['--tlsv1'], ['--tlsv1.0'], ['--tlsv1.1'], ['--tlsv1.2'], ['--tlsv1.3']][min_ver+2] + \
- [['--tls-max', '1.0'], ['--tls-max', '1.1'], ['--tls-max', '1.2'], ['--tls-max', '1.3'], []][max_ver]
- extra_args.extend(['--trace-config', 'ssl'])
+ extra_args = [
+ [],
+ ["--tlsv1"],
+ ["--tlsv1.0"],
+ ["--tlsv1.1"],
+ ["--tlsv1.2"],
+ ["--tlsv1.3"],
+ ][min_ver + 2] + [
+ ["--tls-max", "1.0"],
+ ["--tls-max", "1.1"],
+ ["--tls-max", "1.2"],
+ ["--tls-max", "1.3"],
+ [],
+ ][
+ max_ver
+ ]
+ extra_args.extend(["--trace-config", "ssl"])
r = curl.http_get(url=url, alpn_proto=proto, extra_args=extra_args)
- if max_ver >= min_ver and tls_proto in supported[max(0, min_ver):min(max_ver, 3)+1]:
- assert r.exit_code == 0, f'extra_args={extra_args}\n{r.dump_logs()}'
- assert r.json['HTTPS'] == 'on', r.dump_logs()
- assert r.json['SSL_PROTOCOL'] == tls_proto, r.dump_logs()
+ if (
+ max_ver >= min_ver
+ and tls_proto in supported[max(0, min_ver) : min(max_ver, 3) + 1]
+ ):
+ assert r.exit_code == 0, f"extra_args={extra_args}\n{r.dump_logs()}"
+ assert r.json["HTTPS"] == "on", r.dump_logs()
+ assert r.json["SSL_PROTOCOL"] == tls_proto, r.dump_logs()
else:
- assert r.exit_code != 0, f'extra_args={extra_args}\n{r.dump_logs()}'
+ assert r.exit_code != 0, f"extra_args={extra_args}\n{r.dump_logs()}"
def test_17_10_h3_session_reuse(self, env: Env, httpd, nghttpx):
if not env.have_h3():
pytest.skip("h3 not supported")
- if not env.curl_uses_lib('quictls') and \
- not env.curl_uses_lib('gnutls') and \
- not env.curl_uses_lib('wolfssl'):
+ if (
+ not env.curl_uses_lib("quictls")
+ and not env.curl_uses_lib("gnutls")
+ and not env.curl_uses_lib("wolfssl")
+ ):
pytest.skip("QUIC session reuse not implemented")
count = 2
- docname = 'data-10k'
- url = f'https://localhost:{env.https_port}/{docname}'
- client = LocalClient(name='hx-download', env=env)
+ docname = "data-10k"
+ url = f"https://localhost:{env.https_port}/{docname}"
+ client = LocalClient(name="hx-download", env=env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- r = client.run(args=[
- '-n', f'{count}',
- '-f', # forbid reuse of connections
- '-r', f'{env.domain1}:{env.port_for("h3")}:127.0.0.1',
- '-V', 'h3', url
- ])
+ pytest.skip(f"example client not built: {client.name}")
+ r = client.run(
+ args=[
+ "-n",
+ f"{count}",
+ "-f", # forbid reuse of connections
+ "-r",
+ f'{env.domain1}:{env.port_for("h3")}:127.0.0.1',
+ "-V",
+ "h3",
+ url,
+ ]
+ )
r.check_exit_code(0)
# check that TLS session was reused as expected
reused_session = False
for line in r.trace_lines:
- m = re.match(r'\[1-1] \* SSL reusing session.*', line)
+ m = re.match(r"\[1-1] \* SSL reusing session.*", line)
if m:
reused_session = True
- assert reused_session, f'{r}\n{r.dump_logs()}'
+ assert reused_session, f"{r}\n{r.dump_logs()}"
# use host name server has no certificate for
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_17_11_wrong_host(self, env: Env, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
- domain = f'insecure.{env.tld}'
- url = f'https://{domain}:{env.port_for(proto)}/curltest/sslinfo'
+ domain = f"insecure.{env.tld}"
+ url = f"https://{domain}:{env.port_for(proto)}/curltest/sslinfo"
r = curl.http_get(url=url, alpn_proto=proto)
- assert r.exit_code == 60, f'{r}'
+ assert r.exit_code == 60, f"{r}"
# use host name server has no cert for with --insecure
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_17_12_insecure(self, env: Env, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
- domain = f'insecure.{env.tld}'
- url = f'https://{domain}:{env.port_for(proto)}/curltest/sslinfo'
- r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
- '--insecure'
- ])
- assert r.exit_code == 0, f'{r}'
- assert r.json, f'{r}'
+ domain = f"insecure.{env.tld}"
+ url = f"https://{domain}:{env.port_for(proto)}/curltest/sslinfo"
+ r = curl.http_get(url=url, alpn_proto=proto, extra_args=["--insecure"])
+ assert r.exit_code == 0, f"{r}"
+ assert r.json, f"{r}"
# connect to an expired certificate
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2"])
def test_17_14_expired_cert(self, env: Env, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
- url = f'https://{env.expired_domain}:{env.port_for(proto)}/'
+ url = f"https://{env.expired_domain}:{env.port_for(proto)}/"
r = curl.http_get(url=url, alpn_proto=proto)
- assert r.exit_code == 60, f'{r}' # peer failed verification
+ assert r.exit_code == 60, f"{r}" # peer failed verification
exp_trace = None
match_trace = None
- if env.curl_uses_lib('openssl') or env.curl_uses_lib('quictls'):
- exp_trace = r'.*SSL certificate problem: certificate has expired$'
- elif env.curl_uses_lib('gnutls'):
- exp_trace = r'.*server verification failed: certificate has expired\..*'
- elif env.curl_uses_lib('wolfssl'):
- exp_trace = r'.*server verification failed: certificate has expired\.$'
+ if env.curl_uses_lib("openssl") or env.curl_uses_lib("quictls"):
+ exp_trace = r".*SSL certificate problem: certificate has expired$"
+ elif env.curl_uses_lib("gnutls"):
+ exp_trace = r".*server verification failed: certificate has expired\..*"
+ elif env.curl_uses_lib("wolfssl"):
+ exp_trace = r".*server verification failed: certificate has expired\.$"
if exp_trace is not None:
for line in r.trace_lines:
if re.match(exp_trace, line):
break
assert match_trace, f'Did not find "{exp_trace}" in trace\n{r.dump_logs()}'
- @pytest.mark.skipif(condition=not Env.curl_has_feature('SSLS-EXPORT'),
- reason='curl lacks SSL session export support')
+ @pytest.mark.skipif(
+ condition=not Env.curl_has_feature("SSLS-EXPORT"),
+ reason="curl lacks SSL session export support",
+ )
def test_17_15_session_export(self, env: Env, httpd):
- proto = 'http/1.1'
- if env.curl_uses_lib('libressl'):
- pytest.skip('Libressl resumption does not work inTLSv1.3')
- if env.curl_uses_lib('rustls-ffi'):
- pytest.skip('rustsls does not expose sessions')
- if env.curl_uses_lib('bearssl'):
- pytest.skip('BearSSL does not support TLSv1.3')
- if env.curl_uses_lib('mbedtls') and \
- not env.curl_lib_version_at_least('mbedtls', '3.6.0'):
- pytest.skip('mbedtls TLSv1.3 session resume not working before 3.6.0')
+ proto = "http/1.1"
+ if env.curl_uses_lib("libressl"):
+ pytest.skip("Libressl resumption does not work inTLSv1.3")
+ if env.curl_uses_lib("rustls-ffi"):
+ pytest.skip("rustsls does not expose sessions")
+ if env.curl_uses_lib("bearssl"):
+ pytest.skip("BearSSL does not support TLSv1.3")
+ if env.curl_uses_lib("mbedtls") and not env.curl_lib_version_at_least(
+ "mbedtls", "3.6.0"
+ ):
+ pytest.skip("mbedtls TLSv1.3 session resume not working before 3.6.0")
run_env = os.environ.copy()
- run_env['CURL_DEBUG'] = 'ssl,scache'
+ run_env["CURL_DEBUG"] = "ssl,scache"
# clean session file first, then reuse
- session_file = os.path.join(env.gen_dir, 'test_17_15.sessions')
+ session_file = os.path.join(env.gen_dir, "test_17_15.sessions")
if os.path.exists(session_file):
return os.remove(session_file)
- xargs = ['--tls-max', '1.3', '--tlsv1.3', '--ssl-sessions', session_file]
+ xargs = ["--tls-max", "1.3", "--tlsv1.3", "--ssl-sessions", session_file]
curl = CurlClient(env=env, run_env=run_env)
# tell the server to close the connection after each request
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo"
r = curl.http_get(url=url, alpn_proto=proto, extra_args=xargs)
- assert r.exit_code == 0, f'{r}'
- assert r.json['HTTPS'] == 'on', f'{r.json}'
- assert r.json['SSL_SESSION_RESUMED'] == 'Initial', f'{r.json}\n{r.dump_logs()}'
+ assert r.exit_code == 0, f"{r}"
+ assert r.json["HTTPS"] == "on", f"{r.json}"
+ assert r.json["SSL_SESSION_RESUMED"] == "Initial", f"{r.json}\n{r.dump_logs()}"
# ok, run again, sessions should be imported
- run_dir2 = os.path.join(env.gen_dir, 'curl2')
+ run_dir2 = os.path.join(env.gen_dir, "curl2")
curl = CurlClient(env=env, run_env=run_env, run_dir=run_dir2)
r = curl.http_get(url=url, alpn_proto=proto, extra_args=xargs)
- assert r.exit_code == 0, f'{r}'
- assert r.json['SSL_SESSION_RESUMED'] == 'Resumed', f'{r.json}\n{r.dump_logs()}'
+ assert r.exit_code == 0, f"{r}"
+ assert r.json["SSL_SESSION_RESUMED"] == "Resumed", f"{r.json}\n{r.dump_logs()}"
# verify the ciphers are ignored when talking TLSv1.3 only
# see issue #16232
def test_17_16_h3_ignore_ciphers12(self, env: Env):
- proto = 'h3'
- if proto == 'h3' and not env.have_h3():
+ proto = "h3"
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
- r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
- '--ciphers', 'NONSENSE'
- ])
- assert r.exit_code == 0, f'{r}'
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo"
+ r = curl.http_get(
+ url=url, alpn_proto=proto, extra_args=["--ciphers", "NONSENSE"]
+ )
+ assert r.exit_code == 0, f"{r}"
def test_17_17_h1_ignore_ciphers13(self, env: Env):
- proto = 'http/1.1'
+ proto = "http/1.1"
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo'
- r = curl.http_get(url=url, alpn_proto=proto, extra_args=[
- '--tls13-ciphers', 'NONSENSE', '--tls-max', '1.2'
- ])
- assert r.exit_code == 0, f'{r}'
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/sslinfo"
+ r = curl.http_get(
+ url=url,
+ alpn_proto=proto,
+ extra_args=["--tls13-ciphers", "NONSENSE", "--tls-max", "1.2"],
+ )
+ assert r.exit_code == 0, f"{r}"
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class TestMethods:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd, nghttpx):
if env.have_h3():
nghttpx.start_if_needed()
httpd.clear_extra_configs()
httpd.reload_if_config_changed()
indir = httpd.docs_dir
- env.make_data_file(indir=indir, fname="data-10k", fsize=10*1024)
- env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024)
- env.make_data_file(indir=indir, fname="data-1m", fsize=1024*1024)
+ env.make_data_file(indir=indir, fname="data-10k", fsize=10 * 1024)
+ env.make_data_file(indir=indir, fname="data-100k", fsize=100 * 1024)
+ env.make_data_file(indir=indir, fname="data-1m", fsize=1024 * 1024)
# download 1 file
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2", "h3"])
def test_18_01_delete(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
count = 1
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/tweak?id=[0-{count-1}]'
+ url = f"https://{env.authority_for(env.domain1, proto)}/curltest/tweak?id=[0-{count-1}]"
r = curl.http_delete(urls=[url], alpn_proto=proto)
r.check_stats(count=count, http_status=204, exitcode=0)
# - 10ms later DATA frame length=0 and eos=1
# should be accepted
def test_18_02_delete_h2_special(self, env: Env, httpd, nghttpx):
- proto = 'h2'
+ proto = "h2"
count = 1
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/tweak?id=[0-{count-1}]'\
- '&chunks=1&chunk_size=0&chunk_delay=10ms'
+ url = (
+ f"https://{env.authority_for(env.domain1, proto)}/curltest/tweak?id=[0-{count-1}]"
+ "&chunks=1&chunk_size=0&chunk_delay=10ms"
+ )
r = curl.http_delete(urls=[url], alpn_proto=proto)
r.check_stats(count=count, http_status=204, exitcode=0)
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class TestShutdown:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd, nghttpx):
if env.have_h3():
nghttpx.start_if_needed()
httpd.clear_extra_configs()
httpd.reload()
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, httpd):
indir = httpd.docs_dir
- env.make_data_file(indir=indir, fname="data-10k", fsize=10*1024)
- env.make_data_file(indir=indir, fname="data-100k", fsize=100*1024)
- env.make_data_file(indir=indir, fname="data-1m", fsize=1024*1024)
+ env.make_data_file(indir=indir, fname="data-10k", fsize=10 * 1024)
+ env.make_data_file(indir=indir, fname="data-100k", fsize=100 * 1024)
+ env.make_data_file(indir=indir, fname="data-1m", fsize=1024 * 1024)
# check with `tcpdump` that we see curl TCP RST packets
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
- @pytest.mark.parametrize("proto", ['http/1.1'])
+ @pytest.mark.parametrize("proto", ["http/1.1"])
def test_19_01_check_tcp_rst(self, env: Env, httpd, proto):
if env.ci_run:
pytest.skip("seems not to work in CI")
curl = CurlClient(env=env)
- url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]'
- r = curl.http_download(urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=[
- '--parallel'
- ])
+ url = f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]"
+ r = curl.http_download(
+ urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=["--parallel"]
+ )
r.check_response(http_status=200, count=2)
assert r.tcpdump
- assert len(r.tcpdump.stats) != 0, f'Expected TCP RSTs packets: {r.tcpdump.stderr}'
+ assert (
+ len(r.tcpdump.stats) != 0
+ ), f"Expected TCP RSTs packets: {r.tcpdump.stderr}"
# check with `tcpdump` that we do NOT see TCP RST when CURL_GRACEFUL_SHUTDOWN set
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
- @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
+ @pytest.mark.parametrize("proto", ["http/1.1", "h2"])
def test_19_02_check_shutdown(self, env: Env, httpd, proto):
if not env.curl_is_debug():
- pytest.skip('only works for curl debug builds')
- curl = CurlClient(env=env, run_env={
- 'CURL_GRACEFUL_SHUTDOWN': '2000',
- 'CURL_DEBUG': 'ssl,tcp'
- })
- url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]'
- r = curl.http_download(urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=[
- '--parallel'
- ])
+ pytest.skip("only works for curl debug builds")
+ curl = CurlClient(
+ env=env, run_env={"CURL_GRACEFUL_SHUTDOWN": "2000", "CURL_DEBUG": "ssl,tcp"}
+ )
+ url = f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]"
+ r = curl.http_download(
+ urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=["--parallel"]
+ )
r.check_response(http_status=200, count=2)
assert r.tcpdump
- assert len(r.tcpdump.stats) == 0, 'Unexpected TCP RSTs packets'
+ assert len(r.tcpdump.stats) == 0, "Unexpected TCP RSTs packets"
# run downloads where the server closes the connection after each request
- @pytest.mark.parametrize("proto", ['http/1.1'])
+ @pytest.mark.parametrize("proto", ["http/1.1"])
def test_19_03_shutdown_by_server(self, env: Env, httpd, proto):
if not env.curl_is_debug():
- pytest.skip('only works for curl debug builds')
+ pytest.skip("only works for curl debug builds")
count = 10
- curl = CurlClient(env=env, run_env={
- 'CURL_GRACEFUL_SHUTDOWN': '2000',
- 'CURL_DEBUG': 'ssl'
- })
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/tweak/?'\
- f'id=[0-{count-1}]&with_cl&close'
+ curl = CurlClient(
+ env=env, run_env={"CURL_GRACEFUL_SHUTDOWN": "2000", "CURL_DEBUG": "ssl"}
+ )
+ url = (
+ f"https://{env.authority_for(env.domain1, proto)}/curltest/tweak/?"
+ f"id=[0-{count-1}]&with_cl&close"
+ )
r = curl.http_download(urls=[url], alpn_proto=proto)
r.check_response(http_status=200, count=count)
- shutdowns = [line for line in r.trace_lines
- if re.match(r'.*CCACHE\] shutdown #\d+, done=1', line)]
- assert len(shutdowns) == count, f'{shutdowns}'
+ shutdowns = [
+ line
+ for line in r.trace_lines
+ if re.match(r".*CCACHE\] shutdown #\d+, done=1", line)
+ ]
+ assert len(shutdowns) == count, f"{shutdowns}"
# run downloads with CURLOPT_FORBID_REUSE set, meaning *we* close
# the connection after each request
- @pytest.mark.parametrize("proto", ['http/1.1'])
+ @pytest.mark.parametrize("proto", ["http/1.1"])
def test_19_04_shutdown_by_curl(self, env: Env, httpd, proto):
if not env.curl_is_debug():
- pytest.skip('only works for curl debug builds')
+ pytest.skip("only works for curl debug builds")
count = 10
- docname = 'data.json'
- url = f'https://localhost:{env.https_port}/{docname}'
- client = LocalClient(name='hx-download', env=env, run_env={
- 'CURL_GRACEFUL_SHUTDOWN': '2000',
- 'CURL_DEBUG': 'ssl'
- })
+ docname = "data.json"
+ url = f"https://localhost:{env.https_port}/{docname}"
+ client = LocalClient(
+ name="hx-download",
+ env=env,
+ run_env={"CURL_GRACEFUL_SHUTDOWN": "2000", "CURL_DEBUG": "ssl"},
+ )
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- r = client.run(args=[
- '-n', f'{count}', '-f', '-V', proto, url
- ])
+ pytest.skip(f"example client not built: {client.name}")
+ r = client.run(args=["-n", f"{count}", "-f", "-V", proto, url])
r.check_exit_code(0)
- shutdowns = [line for line in r.trace_lines
- if re.match(r'.*CCACHE\] shutdown #\d+, done=1', line)]
- assert len(shutdowns) == count, f'{shutdowns}'
+ shutdowns = [
+ line
+ for line in r.trace_lines
+ if re.match(r".*CCACHE\] shutdown #\d+, done=1", line)
+ ]
+ assert len(shutdowns) == count, f"{shutdowns}"
# run event-based downloads with CURLOPT_FORBID_REUSE set, meaning *we* close
# the connection after each request
- @pytest.mark.parametrize("proto", ['http/1.1'])
+ @pytest.mark.parametrize("proto", ["http/1.1"])
def test_19_05_event_shutdown_by_server(self, env: Env, httpd, proto):
if not env.curl_is_debug():
- pytest.skip('only works for curl debug builds')
+ pytest.skip("only works for curl debug builds")
count = 10
- curl = CurlClient(env=env, run_env={
- # forbid connection reuse to trigger shutdowns after transfer
- 'CURL_FORBID_REUSE': '1',
- # make socket receives block 50% of the time to delay shutdown
- 'CURL_DBG_SOCK_RBLOCK': '50',
- 'CURL_DEBUG': 'ssl'
- })
- url = f'https://{env.authority_for(env.domain1, proto)}/curltest/tweak/?'\
- f'id=[0-{count-1}]&with_cl&'
- r = curl.http_download(urls=[url], alpn_proto=proto, extra_args=[
- '--test-event'
- ])
+ curl = CurlClient(
+ env=env,
+ run_env={
+ # forbid connection reuse to trigger shutdowns after transfer
+ "CURL_FORBID_REUSE": "1",
+ # make socket receives block 50% of the time to delay shutdown
+ "CURL_DBG_SOCK_RBLOCK": "50",
+ "CURL_DEBUG": "ssl",
+ },
+ )
+ url = (
+ f"https://{env.authority_for(env.domain1, proto)}/curltest/tweak/?"
+ f"id=[0-{count-1}]&with_cl&"
+ )
+ r = curl.http_download(
+ urls=[url], alpn_proto=proto, extra_args=["--test-event"]
+ )
r.check_response(http_status=200, count=count)
# check that we closed all connections
- closings = [line for line in r.trace_lines
- if re.match(r'.*CCACHE\] closing #\d+', line)]
- assert len(closings) == count, f'{closings}'
+ closings = [
+ line for line in r.trace_lines if re.match(r".*CCACHE\] closing #\d+", line)
+ ]
+ assert len(closings) == count, f"{closings}"
# check that all connection sockets were removed from event
- removes = [line for line in r.trace_lines
- if re.match(r'.*socket cb: socket \d+ REMOVED', line)]
- assert len(removes) == count, f'{removes}'
+ removes = [
+ line
+ for line in r.trace_lines
+ if re.match(r".*socket cb: socket \d+ REMOVED", line)
+ ]
+ assert len(removes) == count, f"{removes}"
# check graceful shutdown on multiplexed http
- @pytest.mark.parametrize("proto", ['h2', 'h3'])
+ @pytest.mark.parametrize("proto", ["h2", "h3"])
def test_19_06_check_shutdown(self, env: Env, httpd, nghttpx, proto):
- if proto == 'h3' and not env.have_h3():
+ if proto == "h3" and not env.have_h3():
pytest.skip("h3 not supported")
if not env.curl_is_debug():
- pytest.skip('only works for curl debug builds')
- curl = CurlClient(env=env, run_env={
- 'CURL_GRACEFUL_SHUTDOWN': '2000',
- 'CURL_DEBUG': 'all'
- })
- url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]'
- r = curl.http_download(urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=[
- '--parallel'
- ])
+ pytest.skip("only works for curl debug builds")
+ curl = CurlClient(
+ env=env, run_env={"CURL_GRACEFUL_SHUTDOWN": "2000", "CURL_DEBUG": "all"}
+ )
+ url = f"https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]"
+ r = curl.http_download(
+ urls=[url], alpn_proto=proto, with_tcpdump=True, extra_args=["--parallel"]
+ )
r.check_response(http_status=200, count=2)
# check connection cache closings
- shutdowns = [line for line in r.trace_lines
- if re.match(r'.*CCACHE\] shutdown #\d+, done=1', line)]
- assert len(shutdowns) == 1, f'{shutdowns}'
+ shutdowns = [
+ line
+ for line in r.trace_lines
+ if re.match(r".*CCACHE\] shutdown #\d+, done=1", line)
+ ]
+ assert len(shutdowns) == 1, f"{shutdowns}"
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
log = logging.getLogger(__name__)
-@pytest.mark.skipif(condition=not Env.curl_has_protocol('ws'),
- reason='curl lacks ws protocol support')
+@pytest.mark.skipif(
+ condition=not Env.curl_has_protocol("ws"), reason="curl lacks ws protocol support"
+)
class TestWebsockets:
-
def check_alive(self, env, timeout=5):
curl = CurlClient(env=env)
- url = f'http://localhost:{env.ws_port}/'
+ url = f"http://localhost:{env.ws_port}/"
end = datetime.now() + timedelta(seconds=timeout)
while datetime.now() < end:
r = curl.http_download(urls=[url])
if r.exit_code == 0:
return True
- time.sleep(.1)
+ time.sleep(0.1)
return False
def _mkpath(self, path):
if os.path.exists(path):
return shutil.rmtree(path)
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def ws_echo(self, env):
- run_dir = os.path.join(env.gen_dir, 'ws-echo-server')
- err_file = os.path.join(run_dir, 'stderr')
+ run_dir = os.path.join(env.gen_dir, "ws-echo-server")
+ err_file = os.path.join(run_dir, "stderr")
self._rmrf(run_dir)
self._mkpath(run_dir)
- with open(err_file, 'w') as cerr:
- cmd = os.path.join(env.project_dir,
- 'tests/http/testenv/ws_echo_server.py')
- args = [cmd, '--port', str(env.ws_port)]
- p = subprocess.Popen(args=args, cwd=run_dir, stderr=cerr,
- stdout=cerr)
+ with open(err_file, "w") as cerr:
+ cmd = os.path.join(env.project_dir, "tests/http/testenv/ws_echo_server.py")
+ args = [cmd, "--port", str(env.ws_port)]
+ p = subprocess.Popen(args=args, cwd=run_dir, stderr=cerr, stdout=cerr)
assert self.check_alive(env)
yield
p.terminate()
def test_20_01_basic(self, env: Env, ws_echo):
curl = CurlClient(env=env)
- url = f'http://localhost:{env.ws_port}/'
+ url = f"http://localhost:{env.ws_port}/"
r = curl.http_download(urls=[url])
r.check_response(http_status=426)
def test_20_02_pingpong_small(self, env: Env, ws_echo):
payload = 125 * "x"
- client = LocalClient(env=env, name='ws-pingpong')
+ client = LocalClient(env=env, name="ws-pingpong")
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- url = f'ws://localhost:{env.ws_port}/'
+ pytest.skip(f"example client not built: {client.name}")
+ url = f"ws://localhost:{env.ws_port}/"
r = client.run(args=[url, payload])
r.check_exit_code(0)
# the python websocket server does not like 'large' control frames
def test_20_03_pingpong_too_large(self, env: Env, ws_echo):
payload = 127 * "x"
- client = LocalClient(env=env, name='ws-pingpong')
+ client = LocalClient(env=env, name="ws-pingpong")
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- url = f'ws://localhost:{env.ws_port}/'
+ pytest.skip(f"example client not built: {client.name}")
+ url = f"ws://localhost:{env.ws_port}/"
r = client.run(args=[url, payload])
r.check_exit_code(56)
def test_20_04_data_small(self, env: Env, ws_echo):
- client = LocalClient(env=env, name='ws-data')
+ client = LocalClient(env=env, name="ws-data")
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- url = f'ws://localhost:{env.ws_port}/'
- r = client.run(args=['-m', str(0), '-M', str(10), url])
+ pytest.skip(f"example client not built: {client.name}")
+ url = f"ws://localhost:{env.ws_port}/"
+ r = client.run(args=["-m", str(0), "-M", str(10), url])
r.check_exit_code(0)
def test_20_05_data_med(self, env: Env, ws_echo):
- client = LocalClient(env=env, name='ws-data')
+ client = LocalClient(env=env, name="ws-data")
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- url = f'ws://localhost:{env.ws_port}/'
- r = client.run(args=['-m', str(120), '-M', str(130), url])
+ pytest.skip(f"example client not built: {client.name}")
+ url = f"ws://localhost:{env.ws_port}/"
+ r = client.run(args=["-m", str(120), "-M", str(130), url])
r.check_exit_code(0)
def test_20_06_data_large(self, env: Env, ws_echo):
- client = LocalClient(env=env, name='ws-data')
+ client = LocalClient(env=env, name="ws-data")
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- url = f'ws://localhost:{env.ws_port}/'
- r = client.run(args=['-m', str(65535 - 5), '-M', str(65535 + 5), url])
+ pytest.skip(f"example client not built: {client.name}")
+ url = f"ws://localhost:{env.ws_port}/"
+ r = client.run(args=["-m", str(65535 - 5), "-M", str(65535 + 5), url])
r.check_exit_code(0)
def test_20_07_data_large_small_recv(self, env: Env, ws_echo):
run_env = os.environ.copy()
- run_env['CURL_WS_CHUNK_SIZE'] = '1024'
- client = LocalClient(env=env, name='ws-data', run_env=run_env)
+ run_env["CURL_WS_CHUNK_SIZE"] = "1024"
+ client = LocalClient(env=env, name="ws-data", run_env=run_env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- url = f'ws://localhost:{env.ws_port}/'
- r = client.run(args=['-m', str(65535 - 5), '-M', str(65535 + 5), url])
+ pytest.skip(f"example client not built: {client.name}")
+ url = f"ws://localhost:{env.ws_port}/"
+ r = client.run(args=["-m", str(65535 - 5), "-M", str(65535 + 5), url])
r.check_exit_code(0)
# Send large frames and simulate send blocking on 8192 bytes chunks
# Simlates error reported in #15865
def test_20_08_data_very_large(self, env: Env, ws_echo):
run_env = os.environ.copy()
- run_env['CURL_WS_CHUNK_EAGAIN'] = '8192'
- client = LocalClient(env=env, name='ws-data', run_env=run_env)
+ run_env["CURL_WS_CHUNK_EAGAIN"] = "8192"
+ client = LocalClient(env=env, name="ws-data", run_env=run_env)
if not client.exists():
- pytest.skip(f'example client not built: {client.name}')
- url = f'ws://localhost:{env.ws_port}/'
+ pytest.skip(f"example client not built: {client.name}")
+ url = f"ws://localhost:{env.ws_port}/"
count = 10
large = 512 * 1024
large = 20000
- r = client.run(args=['-c', str(count), '-m', str(large), url])
+ r = client.run(args=["-c", str(count), "-m", str(large), url])
r.check_exit_code(0)
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
@pytest.mark.skipif(condition=not Env.has_vsftpd(), reason="missing vsftpd")
class TestVsFTPD:
-
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def vsftpd(self, env):
vsftpd = VsFTPD(env=env)
assert vsftpd.start()
def _make_docs_file(self, docs_dir: str, fname: str, fsize: int):
fpath = os.path.join(docs_dir, fname)
- data1k = 1024*'x'
+ data1k = 1024 * "x"
flen = 0
- with open(fpath, 'w') as fd:
+ with open(fpath, "w") as fd:
while flen < fsize:
fd.write(data1k)
flen += len(data1k)
return flen
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, vsftpd):
if os.path.exists(vsftpd.docs_dir):
shutil.rmtree(vsftpd.docs_dir)
if not os.path.exists(vsftpd.docs_dir):
os.makedirs(vsftpd.docs_dir)
- self._make_docs_file(docs_dir=vsftpd.docs_dir, fname='data-1k', fsize=1024)
- self._make_docs_file(docs_dir=vsftpd.docs_dir, fname='data-10k', fsize=10*1024)
- self._make_docs_file(docs_dir=vsftpd.docs_dir, fname='data-1m', fsize=1024*1024)
- self._make_docs_file(docs_dir=vsftpd.docs_dir, fname='data-10m', fsize=10*1024*1024)
+ self._make_docs_file(docs_dir=vsftpd.docs_dir, fname="data-1k", fsize=1024)
+ self._make_docs_file(
+ docs_dir=vsftpd.docs_dir, fname="data-10k", fsize=10 * 1024
+ )
+ self._make_docs_file(
+ docs_dir=vsftpd.docs_dir, fname="data-1m", fsize=1024 * 1024
+ )
+ self._make_docs_file(
+ docs_dir=vsftpd.docs_dir, fname="data-10m", fsize=10 * 1024 * 1024
+ )
env.make_data_file(indir=env.gen_dir, fname="upload-1k", fsize=1024)
- env.make_data_file(indir=env.gen_dir, fname="upload-100k", fsize=100*1024)
- env.make_data_file(indir=env.gen_dir, fname="upload-1m", fsize=1024*1024)
+ env.make_data_file(indir=env.gen_dir, fname="upload-100k", fsize=100 * 1024)
+ env.make_data_file(indir=env.gen_dir, fname="upload-1m", fsize=1024 * 1024)
def test_30_01_list_dir(self, env: Env, vsftpd: VsFTPD):
curl = CurlClient(env=env)
- url = f'ftp://{env.ftp_domain}:{vsftpd.port}/'
+ url = f"ftp://{env.ftp_domain}:{vsftpd.port}/"
r = curl.ftp_get(urls=[url], with_stats=True)
r.check_stats(count=1, http_status=226)
- lines = open(os.path.join(curl.run_dir, 'download_#1.data')).readlines()
- assert len(lines) == 4, f'list: {lines}'
+ lines = open(os.path.join(curl.run_dir, "download_#1.data")).readlines()
+ assert len(lines) == 4, f"list: {lines}"
# download 1 file, no SSL
- @pytest.mark.parametrize("docname", [
- 'data-1k', 'data-1m', 'data-10m'
- ])
+ @pytest.mark.parametrize("docname", ["data-1k", "data-1m", "data-10m"])
def test_30_02_download_1(self, env: Env, vsftpd: VsFTPD, docname):
curl = CurlClient(env=env)
- srcfile = os.path.join(vsftpd.docs_dir, f'{docname}')
+ srcfile = os.path.join(vsftpd.docs_dir, f"{docname}")
count = 1
- url = f'ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]'
+ url = f"ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]"
r = curl.ftp_get(urls=[url], with_stats=True)
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
- @pytest.mark.parametrize("docname", [
- 'data-1k', 'data-1m', 'data-10m'
- ])
+ @pytest.mark.parametrize("docname", ["data-1k", "data-1m", "data-10m"])
def test_30_03_download_10_serial(self, env: Env, vsftpd: VsFTPD, docname):
curl = CurlClient(env=env)
- srcfile = os.path.join(vsftpd.docs_dir, f'{docname}')
+ srcfile = os.path.join(vsftpd.docs_dir, f"{docname}")
count = 10
- url = f'ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]'
+ url = f"ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]"
r = curl.ftp_get(urls=[url], with_stats=True)
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
- @pytest.mark.parametrize("docname", [
- 'data-1k', 'data-1m', 'data-10m'
- ])
+ @pytest.mark.parametrize("docname", ["data-1k", "data-1m", "data-10m"])
def test_30_04_download_10_parallel(self, env: Env, vsftpd: VsFTPD, docname):
curl = CurlClient(env=env)
- srcfile = os.path.join(vsftpd.docs_dir, f'{docname}')
+ srcfile = os.path.join(vsftpd.docs_dir, f"{docname}")
count = 10
- url = f'ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]'
- r = curl.ftp_get(urls=[url], with_stats=True, extra_args=[
- '--parallel'
- ])
+ url = f"ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]"
+ r = curl.ftp_get(urls=[url], with_stats=True, extra_args=["--parallel"])
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
- @pytest.mark.parametrize("docname", [
- 'upload-1k', 'upload-100k', 'upload-1m'
- ])
+ @pytest.mark.parametrize("docname", ["upload-1k", "upload-100k", "upload-1m"])
def test_30_05_upload_1(self, env: Env, vsftpd: VsFTPD, docname):
curl = CurlClient(env=env)
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpd.docs_dir, docname)
self._rmf(dstfile)
count = 1
- url = f'ftp://{env.ftp_domain}:{vsftpd.port}/'
- r = curl.ftp_upload(urls=[url], fupload=f'{srcfile}', with_stats=True)
+ url = f"ftp://{env.ftp_domain}:{vsftpd.port}/"
+ r = curl.ftp_upload(urls=[url], fupload=f"{srcfile}", with_stats=True)
r.check_stats(count=count, http_status=226)
self.check_upload(env, vsftpd, docname=docname)
# check with `tcpdump` if curl causes any TCP RST packets
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
def test_30_06_shutdownh_download(self, env: Env, vsftpd: VsFTPD):
- docname = 'data-1k'
+ docname = "data-1k"
curl = CurlClient(env=env)
count = 1
- url = f'ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]'
+ url = f"ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]"
r = curl.ftp_get(urls=[url], with_stats=True, with_tcpdump=True)
r.check_stats(count=count, http_status=226)
assert r.tcpdump
- assert len(r.tcpdump.stats) == 0, 'Unexpected TCP RSTs packets'
+ assert len(r.tcpdump.stats) == 0, "Unexpected TCP RSTs packets"
# check with `tcpdump` if curl causes any TCP RST packets
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
def test_30_07_shutdownh_upload(self, env: Env, vsftpd: VsFTPD):
- docname = 'upload-1k'
+ docname = "upload-1k"
curl = CurlClient(env=env)
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpd.docs_dir, docname)
self._rmf(dstfile)
count = 1
- url = f'ftp://{env.ftp_domain}:{vsftpd.port}/'
- r = curl.ftp_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, with_tcpdump=True)
+ url = f"ftp://{env.ftp_domain}:{vsftpd.port}/"
+ r = curl.ftp_upload(
+ urls=[url], fupload=f"{srcfile}", with_stats=True, with_tcpdump=True
+ )
r.check_stats(count=count, http_status=226)
assert r.tcpdump
- assert len(r.tcpdump.stats) == 0, 'Unexpected TCP RSTs packets'
+ assert len(r.tcpdump.stats) == 0, "Unexpected TCP RSTs packets"
def test_30_08_active_download(self, env: Env, vsftpd: VsFTPD):
- docname = 'data-10k'
+ docname = "data-10k"
curl = CurlClient(env=env)
- srcfile = os.path.join(vsftpd.docs_dir, f'{docname}')
+ srcfile = os.path.join(vsftpd.docs_dir, f"{docname}")
count = 1
- url = f'ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]'
- r = curl.ftp_get(urls=[url], with_stats=True, extra_args=[
- '--ftp-port', '127.0.0.1'
- ])
+ url = f"ftp://{env.ftp_domain}:{vsftpd.port}/{docname}?[0-{count-1}]"
+ r = curl.ftp_get(
+ urls=[url], with_stats=True, extra_args=["--ftp-port", "127.0.0.1"]
+ )
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
def test_30_09_active_upload(self, env: Env, vsftpd: VsFTPD):
- docname = 'upload-1k'
+ docname = "upload-1k"
curl = CurlClient(env=env)
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpd.docs_dir, docname)
self._rmf(dstfile)
count = 1
- url = f'ftp://{env.ftp_domain}:{vsftpd.port}/'
- r = curl.ftp_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, extra_args=[
- '--ftp-port', '127.0.0.1'
- ])
+ url = f"ftp://{env.ftp_domain}:{vsftpd.port}/"
+ r = curl.ftp_upload(
+ urls=[url],
+ fupload=f"{srcfile}",
+ with_stats=True,
+ extra_args=["--ftp-port", "127.0.0.1"],
+ )
r.check_stats(count=count, http_status=226)
self.check_upload(env, vsftpd, docname=docname)
- def check_downloads(self, client, srcfile: str, count: int,
- complete: bool = True):
+ def check_downloads(self, client, srcfile: str, count: int, complete: bool = True):
for i in range(count):
dfile = client.download_file(i)
assert os.path.exists(dfile)
if complete and not filecmp.cmp(srcfile, dfile, shallow=False):
- diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
- b=open(dfile).readlines(),
- fromfile=srcfile,
- tofile=dfile,
- n=1))
- assert False, f'download {dfile} differs:\n{diff}'
+ diff = "".join(
+ difflib.unified_diff(
+ a=open(srcfile).readlines(),
+ b=open(dfile).readlines(),
+ fromfile=srcfile,
+ tofile=dfile,
+ n=1,
+ )
+ )
+ assert False, f"download {dfile} differs:\n{diff}"
def check_upload(self, env, vsftpd: VsFTPD, docname):
srcfile = os.path.join(env.gen_dir, docname)
assert os.path.exists(srcfile)
assert os.path.exists(dstfile)
if not filecmp.cmp(srcfile, dstfile, shallow=False):
- diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
- b=open(dstfile).readlines(),
- fromfile=srcfile,
- tofile=dstfile,
- n=1))
- assert False, f'upload {dstfile} differs:\n{diff}'
+ diff = "".join(
+ difflib.unified_diff(
+ a=open(srcfile).readlines(),
+ b=open(dstfile).readlines(),
+ fromfile=srcfile,
+ tofile=dstfile,
+ n=1,
+ )
+ )
+ assert False, f"upload {dstfile} differs:\n{diff}"
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
@pytest.mark.skipif(condition=not Env.has_vsftpd(), reason="missing vsftpd")
class TestVsFTPD:
-
SUPPORTS_SSL = True
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def vsftpds(self, env):
if not TestVsFTPD.SUPPORTS_SSL:
- pytest.skip('vsftpd does not seem to support SSL')
+ pytest.skip("vsftpd does not seem to support SSL")
vsftpds = VsFTPD(env=env, with_ssl=True)
if not vsftpds.start():
vsftpds.stop()
TestVsFTPD.SUPPORTS_SSL = False
- pytest.skip('vsftpd does not seem to support SSL')
+ pytest.skip("vsftpd does not seem to support SSL")
yield vsftpds
vsftpds.stop()
def _make_docs_file(self, docs_dir: str, fname: str, fsize: int):
fpath = os.path.join(docs_dir, fname)
- data1k = 1024*'x'
+ data1k = 1024 * "x"
flen = 0
- with open(fpath, 'w') as fd:
+ with open(fpath, "w") as fd:
while flen < fsize:
fd.write(data1k)
flen += len(data1k)
return flen
- @pytest.fixture(autouse=True, scope='class')
+ @pytest.fixture(autouse=True, scope="class")
def _class_scope(self, env, vsftpds):
if os.path.exists(vsftpds.docs_dir):
shutil.rmtree(vsftpds.docs_dir)
if not os.path.exists(vsftpds.docs_dir):
os.makedirs(vsftpds.docs_dir)
- self._make_docs_file(docs_dir=vsftpds.docs_dir, fname='data-1k', fsize=1024)
- self._make_docs_file(docs_dir=vsftpds.docs_dir, fname='data-10k', fsize=10*1024)
- self._make_docs_file(docs_dir=vsftpds.docs_dir, fname='data-1m', fsize=1024*1024)
- self._make_docs_file(docs_dir=vsftpds.docs_dir, fname='data-10m', fsize=10*1024*1024)
+ self._make_docs_file(docs_dir=vsftpds.docs_dir, fname="data-1k", fsize=1024)
+ self._make_docs_file(
+ docs_dir=vsftpds.docs_dir, fname="data-10k", fsize=10 * 1024
+ )
+ self._make_docs_file(
+ docs_dir=vsftpds.docs_dir, fname="data-1m", fsize=1024 * 1024
+ )
+ self._make_docs_file(
+ docs_dir=vsftpds.docs_dir, fname="data-10m", fsize=10 * 1024 * 1024
+ )
env.make_data_file(indir=env.gen_dir, fname="upload-1k", fsize=1024)
- env.make_data_file(indir=env.gen_dir, fname="upload-100k", fsize=100*1024)
- env.make_data_file(indir=env.gen_dir, fname="upload-1m", fsize=1024*1024)
+ env.make_data_file(indir=env.gen_dir, fname="upload-100k", fsize=100 * 1024)
+ env.make_data_file(indir=env.gen_dir, fname="upload-1m", fsize=1024 * 1024)
def test_31_01_list_dir(self, env: Env, vsftpds: VsFTPD):
curl = CurlClient(env=env)
- url = f'ftp://{env.ftp_domain}:{vsftpds.port}/'
+ url = f"ftp://{env.ftp_domain}:{vsftpds.port}/"
r = curl.ftp_ssl_get(urls=[url], with_stats=True)
r.check_stats(count=1, http_status=226)
- lines = open(os.path.join(curl.run_dir, 'download_#1.data')).readlines()
- assert len(lines) == 4, f'list: {lines}'
+ lines = open(os.path.join(curl.run_dir, "download_#1.data")).readlines()
+ assert len(lines) == 4, f"list: {lines}"
# download 1 file, no SSL
- @pytest.mark.parametrize("docname", [
- 'data-1k', 'data-1m', 'data-10m'
- ])
+ @pytest.mark.parametrize("docname", ["data-1k", "data-1m", "data-10m"])
def test_31_02_download_1(self, env: Env, vsftpds: VsFTPD, docname):
curl = CurlClient(env=env)
- srcfile = os.path.join(vsftpds.docs_dir, f'{docname}')
+ srcfile = os.path.join(vsftpds.docs_dir, f"{docname}")
count = 1
- url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
+ url = f"ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]"
r = curl.ftp_ssl_get(urls=[url], with_stats=True)
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
- @pytest.mark.parametrize("docname", [
- 'data-1k', 'data-1m', 'data-10m'
- ])
+ @pytest.mark.parametrize("docname", ["data-1k", "data-1m", "data-10m"])
def test_31_03_download_10_serial(self, env: Env, vsftpds: VsFTPD, docname):
curl = CurlClient(env=env)
- srcfile = os.path.join(vsftpds.docs_dir, f'{docname}')
+ srcfile = os.path.join(vsftpds.docs_dir, f"{docname}")
count = 10
- url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
+ url = f"ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]"
r = curl.ftp_ssl_get(urls=[url], with_stats=True)
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
- @pytest.mark.parametrize("docname", [
- 'data-1k', 'data-1m', 'data-10m'
- ])
+ @pytest.mark.parametrize("docname", ["data-1k", "data-1m", "data-10m"])
def test_31_04_download_10_parallel(self, env: Env, vsftpds: VsFTPD, docname):
curl = CurlClient(env=env)
- srcfile = os.path.join(vsftpds.docs_dir, f'{docname}')
+ srcfile = os.path.join(vsftpds.docs_dir, f"{docname}")
count = 10
- url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
- r = curl.ftp_ssl_get(urls=[url], with_stats=True, extra_args=[
- '--parallel'
- ])
+ url = f"ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]"
+ r = curl.ftp_ssl_get(urls=[url], with_stats=True, extra_args=["--parallel"])
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
- @pytest.mark.parametrize("docname", [
- 'upload-1k', 'upload-100k', 'upload-1m'
- ])
+ @pytest.mark.parametrize("docname", ["upload-1k", "upload-100k", "upload-1m"])
def test_31_05_upload_1(self, env: Env, vsftpds: VsFTPD, docname):
curl = CurlClient(env=env)
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpds.docs_dir, docname)
self._rmf(dstfile)
count = 1
- url = f'ftp://{env.ftp_domain}:{vsftpds.port}/'
- r = curl.ftp_ssl_upload(urls=[url], fupload=f'{srcfile}', with_stats=True)
+ url = f"ftp://{env.ftp_domain}:{vsftpds.port}/"
+ r = curl.ftp_ssl_upload(urls=[url], fupload=f"{srcfile}", with_stats=True)
r.check_stats(count=count, http_status=226)
self.check_upload(env, vsftpds, docname=docname)
# check with `tcpdump` if curl causes any TCP RST packets
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
def test_31_06_shutdownh_download(self, env: Env, vsftpds: VsFTPD):
- docname = 'data-1k'
+ docname = "data-1k"
curl = CurlClient(env=env)
count = 1
- url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
+ url = f"ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]"
r = curl.ftp_ssl_get(urls=[url], with_stats=True, with_tcpdump=True)
r.check_stats(count=count, http_status=226)
# vsftp closes control connection without niceties,
# disregard RST packets it sent from its port to curl
- assert len(r.tcpdump.stats_excluding(src_port=env.ftps_port)) == 0, 'Unexpected TCP RSTs packets'
+ assert (
+ len(r.tcpdump.stats_excluding(src_port=env.ftps_port)) == 0
+ ), "Unexpected TCP RSTs packets"
# check with `tcpdump` if curl causes any TCP RST packets
@pytest.mark.skipif(condition=not Env.tcpdump(), reason="tcpdump not available")
def test_31_07_shutdownh_upload(self, env: Env, vsftpds: VsFTPD):
- docname = 'upload-1k'
+ docname = "upload-1k"
curl = CurlClient(env=env)
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpds.docs_dir, docname)
self._rmf(dstfile)
count = 1
- url = f'ftp://{env.ftp_domain}:{vsftpds.port}/'
- r = curl.ftp_ssl_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, with_tcpdump=True)
+ url = f"ftp://{env.ftp_domain}:{vsftpds.port}/"
+ r = curl.ftp_ssl_upload(
+ urls=[url], fupload=f"{srcfile}", with_stats=True, with_tcpdump=True
+ )
r.check_stats(count=count, http_status=226)
# vsftp closes control connection without niceties,
# disregard RST packets it sent from its port to curl
- assert len(r.tcpdump.stats_excluding(src_port=env.ftps_port)) == 0, 'Unexpected TCP RSTs packets'
+ assert (
+ len(r.tcpdump.stats_excluding(src_port=env.ftps_port)) == 0
+ ), "Unexpected TCP RSTs packets"
def test_31_08_upload_ascii(self, env: Env, vsftpds: VsFTPD):
- docname = 'upload-ascii'
+ docname = "upload-ascii"
line_length = 21
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpds.docs_dir, docname)
- env.make_data_file(indir=env.gen_dir, fname=docname, fsize=100*1024,
- line_length=line_length)
+ env.make_data_file(
+ indir=env.gen_dir, fname=docname, fsize=100 * 1024, line_length=line_length
+ )
srcsize = os.path.getsize(srcfile)
self._rmf(dstfile)
count = 1
curl = CurlClient(env=env)
- url = f'ftp://{env.ftp_domain}:{vsftpds.port}/'
- r = curl.ftp_ssl_upload(urls=[url], fupload=f'{srcfile}', with_stats=True,
- extra_args=['--use-ascii'])
+ url = f"ftp://{env.ftp_domain}:{vsftpds.port}/"
+ r = curl.ftp_ssl_upload(
+ urls=[url],
+ fupload=f"{srcfile}",
+ with_stats=True,
+ extra_args=["--use-ascii"],
+ )
r.check_stats(count=count, http_status=226)
# expect the uploaded file to be number of converted newlines larger
dstsize = os.path.getsize(dstfile)
newlines = len(open(srcfile).readlines())
- assert (srcsize + newlines) == dstsize, \
- f'expected source with {newlines} lines to be that much larger,'\
- f'instead srcsize={srcsize}, upload size={dstsize}, diff={dstsize-srcsize}'
+ assert (srcsize + newlines) == dstsize, (
+ f"expected source with {newlines} lines to be that much larger,"
+ f"instead srcsize={srcsize}, upload size={dstsize}, diff={dstsize-srcsize}"
+ )
def test_31_08_active_download(self, env: Env, vsftpds: VsFTPD):
- docname = 'data-10k'
+ docname = "data-10k"
curl = CurlClient(env=env)
- srcfile = os.path.join(vsftpds.docs_dir, f'{docname}')
+ srcfile = os.path.join(vsftpds.docs_dir, f"{docname}")
count = 1
- url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]'
- r = curl.ftp_ssl_get(urls=[url], with_stats=True, extra_args=[
- '--ftp-port', '127.0.0.1'
- ])
+ url = f"ftp://{env.ftp_domain}:{vsftpds.port}/{docname}?[0-{count-1}]"
+ r = curl.ftp_ssl_get(
+ urls=[url], with_stats=True, extra_args=["--ftp-port", "127.0.0.1"]
+ )
r.check_stats(count=count, http_status=226)
self.check_downloads(curl, srcfile, count)
def test_31_09_active_upload(self, env: Env, vsftpds: VsFTPD):
- docname = 'upload-1k'
+ docname = "upload-1k"
curl = CurlClient(env=env)
srcfile = os.path.join(env.gen_dir, docname)
dstfile = os.path.join(vsftpds.docs_dir, docname)
self._rmf(dstfile)
count = 1
- url = f'ftp://{env.ftp_domain}:{vsftpds.port}/'
- r = curl.ftp_ssl_upload(urls=[url], fupload=f'{srcfile}', with_stats=True, extra_args=[
- '--ftp-port', '127.0.0.1'
- ])
+ url = f"ftp://{env.ftp_domain}:{vsftpds.port}/"
+ r = curl.ftp_ssl_upload(
+ urls=[url],
+ fupload=f"{srcfile}",
+ with_stats=True,
+ extra_args=["--ftp-port", "127.0.0.1"],
+ )
r.check_stats(count=count, http_status=226)
self.check_upload(env, vsftpds, docname=docname)
- @pytest.mark.parametrize("indata", [
- '1234567890', ''
- ])
+ @pytest.mark.parametrize("indata", ["1234567890", ""])
def test_31_10_upload_stdin(self, env: Env, vsftpds: VsFTPD, indata):
curl = CurlClient(env=env)
docname = "upload_31_10"
dstfile = os.path.join(vsftpds.docs_dir, docname)
self._rmf(dstfile)
count = 1
- url = f'ftp://{env.ftp_domain}:{vsftpds.port}/{docname}'
+ url = f"ftp://{env.ftp_domain}:{vsftpds.port}/{docname}"
r = curl.ftp_ssl_upload(urls=[url], updata=indata, with_stats=True)
r.check_stats(count=count, http_status=226)
assert os.path.exists(dstfile)
destdata = open(dstfile).readlines()
expdata = [indata] if len(indata) else []
- assert expdata == destdata, f'exected: {expdata}, got: {destdata}'
+ assert expdata == destdata, f"exected: {expdata}, got: {destdata}"
- def check_downloads(self, client, srcfile: str, count: int,
- complete: bool = True):
+ def check_downloads(self, client, srcfile: str, count: int, complete: bool = True):
for i in range(count):
dfile = client.download_file(i)
assert os.path.exists(dfile)
if complete and not filecmp.cmp(srcfile, dfile, shallow=False):
- diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
- b=open(dfile).readlines(),
- fromfile=srcfile,
- tofile=dfile,
- n=1))
- assert False, f'download {dfile} differs:\n{diff}'
+ diff = "".join(
+ difflib.unified_diff(
+ a=open(srcfile).readlines(),
+ b=open(dfile).readlines(),
+ fromfile=srcfile,
+ tofile=dfile,
+ n=1,
+ )
+ )
+ assert False, f"download {dfile} differs:\n{diff}"
def check_upload(self, env, vsftpd: VsFTPD, docname):
srcfile = os.path.join(env.gen_dir, docname)
assert os.path.exists(srcfile)
assert os.path.exists(dstfile)
if not filecmp.cmp(srcfile, dstfile, shallow=False):
- diff = "".join(difflib.unified_diff(a=open(srcfile).readlines(),
- b=open(dstfile).readlines(),
- fromfile=srcfile,
- tofile=dstfile,
- n=1))
- assert False, f'upload {dstfile} differs:\n{diff}'
+ diff = "".join(
+ difflib.unified_diff(
+ a=open(srcfile).readlines(),
+ b=open(dstfile).readlines(),
+ fromfile=srcfile,
+ tofile=dstfile,
+ n=1,
+ )
+ )
+ assert False, f"upload {dstfile} differs:\n{diff}"
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
###########################################################################
# ruff: noqa: F401, E402
import pytest
-pytest.register_assert_rewrite("testenv.env", "testenv.curl", "testenv.caddy",
- "testenv.httpd", "testenv.nghttpx")
+
+pytest.register_assert_rewrite(
+ "testenv.env", "testenv.curl", "testenv.caddy", "testenv.httpd", "testenv.nghttpx"
+)
from .env import Env
from .certs import TestCA, Credentials
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class Caddy:
-
def __init__(self, env: Env):
self.env = env
- self._caddy = os.environ['CADDY'] if 'CADDY' in os.environ else env.caddy
- self._caddy_dir = os.path.join(env.gen_dir, 'caddy')
- self._docs_dir = os.path.join(self._caddy_dir, 'docs')
- self._conf_file = os.path.join(self._caddy_dir, 'Caddyfile')
- self._error_log = os.path.join(self._caddy_dir, 'caddy.log')
- self._tmp_dir = os.path.join(self._caddy_dir, 'tmp')
+ self._caddy = os.environ["CADDY"] if "CADDY" in os.environ else env.caddy
+ self._caddy_dir = os.path.join(env.gen_dir, "caddy")
+ self._docs_dir = os.path.join(self._caddy_dir, "docs")
+ self._conf_file = os.path.join(self._caddy_dir, "Caddyfile")
+ self._error_log = os.path.join(self._caddy_dir, "caddy.log")
+ self._tmp_dir = os.path.join(self._caddy_dir, "tmp")
self._process = None
self._rmf(self._error_log)
if self._process:
self.stop()
self._write_config()
- args = [
- self._caddy, 'run'
- ]
- caddyerr = open(self._error_log, 'a')
- self._process = subprocess.Popen(args=args, cwd=self._caddy_dir, stderr=caddyerr)
+ args = [self._caddy, "run"]
+ caddyerr = open(self._error_log, "a")
+ self._process = subprocess.Popen(
+ args=args, cwd=self._caddy_dir, stderr=caddyerr
+ )
if self._process.returncode is not None:
return False
return not wait_live or self.wait_live(timeout=timedelta(seconds=5))
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
- check_url = f'https://{self.env.domain1}:{self.port}/'
+ check_url = f"https://{self.env.domain1}:{self.port}/"
r = curl.http_get(url=check_url)
if r.exit_code != 0:
return True
- log.debug(f'waiting for caddy to stop responding: {r}')
- time.sleep(.1)
+ log.debug(f"waiting for caddy to stop responding: {r}")
+ time.sleep(0.1)
log.debug(f"Server still responding after {timeout}")
return False
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
- check_url = f'https://{self.env.domain1}:{self.port}/'
+ check_url = f"https://{self.env.domain1}:{self.port}/"
r = curl.http_get(url=check_url)
if r.exit_code == 0:
return True
- time.sleep(.1)
+ time.sleep(0.1)
log.error(f"Caddy still not responding after {timeout}")
return False
assert creds2 # convince pytype this isn't None
self._mkpath(self._docs_dir)
self._mkpath(self._tmp_dir)
- with open(os.path.join(self._docs_dir, 'data.json'), 'w') as fd:
+ with open(os.path.join(self._docs_dir, "data.json"), "w") as fd:
data = {
- 'server': f'{domain1}',
+ "server": f"{domain1}",
}
fd.write(JSONEncoder().encode(data))
- with open(self._conf_file, 'w') as fd:
- conf = [ # base server config
- '{',
- f' http_port {self.env.caddy_http_port}',
- f' https_port {self.env.caddy_https_port}',
- f' servers :{self.env.caddy_https_port} {{',
- ' protocols h3 h2 h1',
- ' }',
- '}',
- f'{domain1}:{self.env.caddy_https_port} {{',
- ' file_server * {',
- f' root {self._docs_dir}',
- ' }',
- f' tls {creds1.cert_file} {creds1.pkey_file}',
- '}',
- f'{domain2} {{',
- f' reverse_proxy /* http://localhost:{self.env.http_port} {{',
- ' }',
- f' tls {creds2.cert_file} {creds2.pkey_file}',
- '}',
+ with open(self._conf_file, "w") as fd:
+ conf = [ # base server config
+ "{",
+ f" http_port {self.env.caddy_http_port}",
+ f" https_port {self.env.caddy_https_port}",
+ f" servers :{self.env.caddy_https_port} {{",
+ " protocols h3 h2 h1",
+ " }",
+ "}",
+ f"{domain1}:{self.env.caddy_https_port} {{",
+ " file_server * {",
+ f" root {self._docs_dir}",
+ " }",
+ f" tls {creds1.cert_file} {creds1.pkey_file}",
+ "}",
+ f"{domain2} {{",
+ f" reverse_proxy /* http://localhost:{self.env.http_port} {{",
+ " }",
+ f" tls {creds2.cert_file} {creds2.pkey_file}",
+ "}",
]
fd.write("\n".join(conf))
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
from cryptography.hazmat.primitives.asymmetric import ec, rsa
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
-from cryptography.hazmat.primitives.serialization import Encoding, PrivateFormat, NoEncryption, load_pem_private_key
+from cryptography.hazmat.primitives.serialization import (
+ Encoding,
+ PrivateFormat,
+ NoEncryption,
+ load_pem_private_key,
+)
from cryptography.x509 import ExtendedKeyUsageOID, NameOID
EC_SUPPORTED = {}
-EC_SUPPORTED.update([(curve.name.upper(), curve) for curve in [
- ec.SECP192R1,
- ec.SECP224R1,
- ec.SECP256R1,
- ec.SECP384R1,
-]])
+EC_SUPPORTED.update(
+ [
+ (curve.name.upper(), curve)
+ for curve in [
+ ec.SECP192R1,
+ ec.SECP224R1,
+ ec.SECP256R1,
+ ec.SECP384R1,
+ ]
+ ]
+)
def _private_key(key_type):
if isinstance(key_type, str):
key_type = key_type.upper()
- m = re.match(r'^(RSA)?(\d+)$', key_type)
+ m = re.match(r"^(RSA)?(\d+)$", key_type)
if m:
key_type = int(m.group(2))
if isinstance(key_type, int):
return rsa.generate_private_key(
- public_exponent=65537,
- key_size=key_type,
- backend=default_backend()
+ public_exponent=65537, key_size=key_type, backend=default_backend()
)
if not isinstance(key_type, ec.EllipticCurve) and key_type in EC_SUPPORTED:
key_type = EC_SUPPORTED[key_type]
- return ec.generate_private_key(
- curve=key_type,
- backend=default_backend()
- )
+ return ec.generate_private_key(curve=key_type, backend=default_backend())
class CertificateSpec:
-
- def __init__(self, name: Optional[str] = None,
- domains: Optional[List[str]] = None,
- email: Optional[str] = None,
- key_type: Optional[str] = None,
- single_file: bool = False,
- valid_from: timedelta = timedelta(days=-1),
- valid_to: timedelta = timedelta(days=89),
- client: bool = False,
- check_valid: bool = True,
- sub_specs: Optional[List['CertificateSpec']] = None):
+ def __init__(
+ self,
+ name: Optional[str] = None,
+ domains: Optional[List[str]] = None,
+ email: Optional[str] = None,
+ key_type: Optional[str] = None,
+ single_file: bool = False,
+ valid_from: timedelta = timedelta(days=-1),
+ valid_to: timedelta = timedelta(days=89),
+ client: bool = False,
+ check_valid: bool = True,
+ sub_specs: Optional[List["CertificateSpec"]] = None,
+ ):
self._name = name
self.domains = domains
self.client = client
class Credentials:
-
- def __init__(self,
- name: str,
- cert: Any,
- pkey: Any,
- issuer: Optional['Credentials'] = None):
+ def __init__(
+ self, name: str, cert: Any, pkey: Any, issuer: Optional["Credentials"] = None
+ ):
self._name = name
self._cert = cert
self._pkey = pkey
def pkey_pem(self) -> bytes:
return self._pkey.private_bytes(
Encoding.PEM,
- PrivateFormat.TraditionalOpenSSL if self.key_type.startswith('rsa') else PrivateFormat.PKCS8,
- NoEncryption())
+ PrivateFormat.TraditionalOpenSSL
+ if self.key_type.startswith("rsa")
+ else PrivateFormat.PKCS8,
+ NoEncryption(),
+ )
@property
- def issuer(self) -> Optional['Credentials']:
+ def issuer(self) -> Optional["Credentials"]:
return self._issuer
- def set_store(self, store: 'CertStore'):
+ def set_store(self, store: "CertStore"):
self._store = store
- def set_files(self, cert_file: str, pkey_file: Optional[str] = None,
- combined_file: Optional[str] = None):
+ def set_files(
+ self,
+ cert_file: str,
+ pkey_file: Optional[str] = None,
+ combined_file: Optional[str] = None,
+ ):
self._cert_file = cert_file
self._pkey_file = pkey_file
self._combined_file = combined_file
def combined_file(self) -> Optional[str]:
return self._combined_file
- def get_first(self, name) -> Optional['Credentials']:
+ def get_first(self, name) -> Optional["Credentials"]:
creds = self._store.get_credentials_for_name(name) if self._store else []
return creds[0] if len(creds) else None
- def get_credentials_for_name(self, name) -> List['Credentials']:
+ def get_credentials_for_name(self, name) -> List["Credentials"]:
return self._store.get_credentials_for_name(name) if self._store else []
- def issue_certs(self, specs: List[CertificateSpec],
- chain: Optional[List['Credentials']] = None) -> List['Credentials']:
+ def issue_certs(
+ self, specs: List[CertificateSpec], chain: Optional[List["Credentials"]] = None
+ ) -> List["Credentials"]:
return [self.issue_cert(spec=spec, chain=chain) for spec in specs]
- def issue_cert(self, spec: CertificateSpec,
- chain: Optional[List['Credentials']] = None) -> 'Credentials':
+ def issue_cert(
+ self, spec: CertificateSpec, chain: Optional[List["Credentials"]] = None
+ ) -> "Credentials":
key_type = spec.key_type if spec.key_type else self.key_type
creds = None
if self._store:
creds = self._store.load_credentials(
- name=spec.name, key_type=key_type, single_file=spec.single_file,
- issuer=self, check_valid=spec.check_valid)
+ name=spec.name,
+ key_type=key_type,
+ single_file=spec.single_file,
+ issuer=self,
+ check_valid=spec.check_valid,
+ )
if creds is None:
- creds = TestCA.create_credentials(spec=spec, issuer=self, key_type=key_type,
- valid_from=spec.valid_from, valid_to=spec.valid_to)
+ creds = TestCA.create_credentials(
+ spec=spec,
+ issuer=self,
+ key_type=key_type,
+ valid_from=spec.valid_from,
+ valid_to=spec.valid_to,
+ )
if self._store:
self._store.save(creds, single_file=spec.single_file)
if spec.type == "ca":
class CertStore:
-
def __init__(self, fpath: str):
self._store_dir = fpath
if not os.path.exists(self._store_dir):
def path(self) -> str:
return self._store_dir
- def save(self, creds: Credentials, name: Optional[str] = None,
- chain: Optional[List[Credentials]] = None,
- single_file: bool = False) -> None:
+ def save(
+ self,
+ creds: Credentials,
+ name: Optional[str] = None,
+ chain: Optional[List[Credentials]] = None,
+ single_file: bool = False,
+ ) -> None:
name = name if name is not None else creds.name
cert_file = self.get_cert_file(name=name, key_type=creds.key_type)
pkey_file = self.get_pkey_file(name=name, key_type=creds.key_type)
chain.append(creds)
if not with_root and len(chain) > 1:
chain = chain[:-1]
- chain_file = os.path.join(self._store_dir, f'{name}-{infix}.pem')
+ chain_file = os.path.join(self._store_dir, f"{name}-{infix}.pem")
with open(chain_file, "wb") as fd:
for c in chain:
fd.write(c.cert_pem)
def get_cert_file(self, name: str, key_type=None) -> str:
key_infix = ".{0}".format(key_type) if key_type is not None else ""
- return os.path.join(self._store_dir, f'{name}{key_infix}.cert.pem')
+ return os.path.join(self._store_dir, f"{name}{key_infix}.cert.pem")
def get_pkey_file(self, name: str, key_type=None) -> str:
key_infix = ".{0}".format(key_type) if key_type is not None else ""
- return os.path.join(self._store_dir, f'{name}{key_infix}.pkey.pem')
+ return os.path.join(self._store_dir, f"{name}{key_infix}.pkey.pem")
def get_combined_file(self, name: str, key_type=None) -> str:
- return os.path.join(self._store_dir, f'{name}.pem')
+ return os.path.join(self._store_dir, f"{name}.pem")
def load_pem_cert(self, fpath: str) -> x509.Certificate:
with open(fpath) as fd:
with open(fpath) as fd:
return load_pem_private_key("".join(fd.readlines()).encode(), password=None)
- def load_credentials(self, name: str, key_type=None,
- single_file: bool = False,
- issuer: Optional[Credentials] = None,
- check_valid: bool = False):
+ def load_credentials(
+ self,
+ name: str,
+ key_type=None,
+ single_file: bool = False,
+ issuer: Optional[Credentials] = None,
+ check_valid: bool = False,
+ ):
cert_file = self.get_cert_file(name=name, key_type=key_type)
- pkey_file = cert_file if single_file else self.get_pkey_file(name=name, key_type=key_type)
+ pkey_file = (
+ cert_file
+ if single_file
+ else self.get_pkey_file(name=name, key_type=key_type)
+ )
comb_file = self.get_combined_file(name=name, key_type=key_type)
if os.path.isfile(cert_file) and os.path.isfile(pkey_file):
cert = self.load_pem_cert(cert_file)
pkey = self.load_pem_pkey(pkey_file)
try:
now = datetime.now(tz=timezone.utc)
- if check_valid and \
- ((cert.not_valid_after_utc < now) or
- (cert.not_valid_before_utc > now)):
+ if check_valid and (
+ (cert.not_valid_after_utc < now)
+ or (cert.not_valid_before_utc > now)
+ ):
return None
except AttributeError: # older python
now = datetime.now()
- if check_valid and \
- ((cert.not_valid_after < now) or
- (cert.not_valid_before > now)):
+ if check_valid and (
+ (cert.not_valid_after < now) or (cert.not_valid_before > now)
+ ):
return None
creds = Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer)
creds.set_store(self)
class TestCA:
-
@classmethod
- def create_root(cls, name: str, store_dir: str, key_type: str = "rsa2048") -> Credentials:
+ def create_root(
+ cls, name: str, store_dir: str, key_type: str = "rsa2048"
+ ) -> Credentials:
store = CertStore(fpath=store_dir)
creds = store.load_credentials(name="ca", key_type=key_type, issuer=None)
if creds is None:
return creds
@staticmethod
- def create_credentials(spec: CertificateSpec, issuer: Credentials, key_type: Any,
- valid_from: timedelta = timedelta(days=-1),
- valid_to: timedelta = timedelta(days=89),
- ) -> Credentials:
+ def create_credentials(
+ spec: CertificateSpec,
+ issuer: Credentials,
+ key_type: Any,
+ valid_from: timedelta = timedelta(days=-1),
+ valid_to: timedelta = timedelta(days=89),
+ ) -> Credentials:
"""
Create a certificate signed by this CA for the given domains.
:returns: the certificate and private key PEM file paths
"""
if spec.domains and len(spec.domains):
- creds = TestCA._make_server_credentials(name=spec.name, domains=spec.domains,
- issuer=issuer, valid_from=valid_from,
- valid_to=valid_to, key_type=key_type)
+ creds = TestCA._make_server_credentials(
+ name=spec.name,
+ domains=spec.domains,
+ issuer=issuer,
+ valid_from=valid_from,
+ valid_to=valid_to,
+ key_type=key_type,
+ )
elif spec.client:
- creds = TestCA._make_client_credentials(name=spec.name, issuer=issuer,
- email=spec.email, valid_from=valid_from,
- valid_to=valid_to, key_type=key_type)
+ creds = TestCA._make_client_credentials(
+ name=spec.name,
+ issuer=issuer,
+ email=spec.email,
+ valid_from=valid_from,
+ valid_to=valid_to,
+ key_type=key_type,
+ )
elif spec.name:
- creds = TestCA._make_ca_credentials(name=spec.name, issuer=issuer,
- valid_from=valid_from, valid_to=valid_to,
- key_type=key_type)
+ creds = TestCA._make_ca_credentials(
+ name=spec.name,
+ issuer=issuer,
+ valid_from=valid_from,
+ valid_to=valid_to,
+ key_type=key_type,
+ )
else:
raise Exception(f"unrecognized certificate specification: {spec}")
return creds
@staticmethod
- def _make_x509_name(org_name: Optional[str] = None, common_name: Optional[str] = None, parent: x509.Name = None) -> x509.Name:
+ def _make_x509_name(
+ org_name: Optional[str] = None,
+ common_name: Optional[str] = None,
+ parent: x509.Name = None,
+ ) -> x509.Name:
name_pieces = []
if org_name:
- oid = NameOID.ORGANIZATIONAL_UNIT_NAME if parent else NameOID.ORGANIZATION_NAME
+ oid = (
+ NameOID.ORGANIZATIONAL_UNIT_NAME
+ if parent
+ else NameOID.ORGANIZATION_NAME
+ )
name_pieces.append(x509.NameAttribute(oid, org_name))
elif common_name:
name_pieces.append(x509.NameAttribute(NameOID.COMMON_NAME, common_name))
@staticmethod
def _make_csr(
- subject: x509.Name,
- pkey: Any,
- issuer_subject: Optional[Credentials],
- valid_from_delta: Optional[timedelta] = None,
- valid_until_delta: Optional[timedelta] = None
+ subject: x509.Name,
+ pkey: Any,
+ issuer_subject: Optional[Credentials],
+ valid_from_delta: Optional[timedelta] = None,
+ valid_until_delta: Optional[timedelta] = None,
):
pubkey = pkey.public_key()
issuer_subject = issuer_subject if issuer_subject is not None else subject
@staticmethod
def _add_ca_usages(csr: Any) -> Any:
- return csr.add_extension(
- x509.BasicConstraints(ca=True, path_length=9),
- critical=True,
- ).add_extension(
- x509.KeyUsage(
- digital_signature=True,
- content_commitment=False,
- key_encipherment=False,
- data_encipherment=False,
- key_agreement=False,
- key_cert_sign=True,
- crl_sign=True,
- encipher_only=False,
- decipher_only=False),
- critical=True
- ).add_extension(
- x509.ExtendedKeyUsage([
- ExtendedKeyUsageOID.CLIENT_AUTH,
- ExtendedKeyUsageOID.SERVER_AUTH,
- ExtendedKeyUsageOID.CODE_SIGNING,
- ]),
- critical=True
+ return (
+ csr.add_extension(
+ x509.BasicConstraints(ca=True, path_length=9),
+ critical=True,
+ )
+ .add_extension(
+ x509.KeyUsage(
+ digital_signature=True,
+ content_commitment=False,
+ key_encipherment=False,
+ data_encipherment=False,
+ key_agreement=False,
+ key_cert_sign=True,
+ crl_sign=True,
+ encipher_only=False,
+ decipher_only=False,
+ ),
+ critical=True,
+ )
+ .add_extension(
+ x509.ExtendedKeyUsage(
+ [
+ ExtendedKeyUsageOID.CLIENT_AUTH,
+ ExtendedKeyUsageOID.SERVER_AUTH,
+ ExtendedKeyUsageOID.CODE_SIGNING,
+ ]
+ ),
+ critical=True,
+ )
)
@staticmethod
except: # noqa: E722
names.append(x509.DNSName(name))
- return csr.add_extension(
- x509.BasicConstraints(ca=False, path_length=None),
- critical=True,
- ).add_extension(
- x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(
- issuer.certificate.extensions.get_extension_for_class(
- x509.SubjectKeyIdentifier).value),
- critical=False
- ).add_extension(
- x509.SubjectAlternativeName(names), critical=True,
- ).add_extension(
- x509.ExtendedKeyUsage([
- ExtendedKeyUsageOID.SERVER_AUTH,
- ]),
- critical=False
+ return (
+ csr.add_extension(
+ x509.BasicConstraints(ca=False, path_length=None),
+ critical=True,
+ )
+ .add_extension(
+ x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(
+ issuer.certificate.extensions.get_extension_for_class(
+ x509.SubjectKeyIdentifier
+ ).value
+ ),
+ critical=False,
+ )
+ .add_extension(
+ x509.SubjectAlternativeName(names),
+ critical=True,
+ )
+ .add_extension(
+ x509.ExtendedKeyUsage(
+ [
+ ExtendedKeyUsageOID.SERVER_AUTH,
+ ]
+ ),
+ critical=False,
+ )
)
@staticmethod
- def _add_client_usages(csr: Any, issuer: Credentials, rfc82name: Optional[str] = None) -> Any:
+ def _add_client_usages(
+ csr: Any, issuer: Credentials, rfc82name: Optional[str] = None
+ ) -> Any:
cert = csr.add_extension(
x509.BasicConstraints(ca=False, path_length=None),
critical=True,
).add_extension(
x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(
issuer.certificate.extensions.get_extension_for_class(
- x509.SubjectKeyIdentifier).value),
- critical=False
+ x509.SubjectKeyIdentifier
+ ).value
+ ),
+ critical=False,
)
if rfc82name:
cert.add_extension(
critical=True,
)
cert.add_extension(
- x509.ExtendedKeyUsage([
- ExtendedKeyUsageOID.CLIENT_AUTH,
- ]),
- critical=True
+ x509.ExtendedKeyUsage(
+ [
+ ExtendedKeyUsageOID.CLIENT_AUTH,
+ ]
+ ),
+ critical=True,
)
return cert
@staticmethod
- def _make_ca_credentials(name, key_type: Any,
- issuer: Optional[Credentials] = None,
- valid_from: timedelta = timedelta(days=-1),
- valid_to: timedelta = timedelta(days=89),
- ) -> Credentials:
+ def _make_ca_credentials(
+ name,
+ key_type: Any,
+ issuer: Optional[Credentials] = None,
+ valid_from: timedelta = timedelta(days=-1),
+ valid_to: timedelta = timedelta(days=89),
+ ) -> Credentials:
pkey = _private_key(key_type=key_type)
if issuer is not None:
issuer_subject = issuer.certificate.subject
else:
issuer_subject = None
issuer_key = pkey
- subject = TestCA._make_x509_name(org_name=name, parent=issuer.subject if issuer else None)
- csr = TestCA._make_csr(subject=subject,
- issuer_subject=issuer_subject, pkey=pkey,
- valid_from_delta=valid_from, valid_until_delta=valid_to)
+ subject = TestCA._make_x509_name(
+ org_name=name, parent=issuer.subject if issuer else None
+ )
+ csr = TestCA._make_csr(
+ subject=subject,
+ issuer_subject=issuer_subject,
+ pkey=pkey,
+ valid_from_delta=valid_from,
+ valid_until_delta=valid_to,
+ )
csr = TestCA._add_ca_usages(csr)
- cert = csr.sign(private_key=issuer_key,
- algorithm=hashes.SHA256(),
- backend=default_backend())
+ cert = csr.sign(
+ private_key=issuer_key, algorithm=hashes.SHA256(), backend=default_backend()
+ )
return Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer)
@staticmethod
- def _make_server_credentials(name: str, domains: List[str], issuer: Credentials,
- key_type: Any,
- valid_from: timedelta = timedelta(days=-1),
- valid_to: timedelta = timedelta(days=89),
- ) -> Credentials:
+ def _make_server_credentials(
+ name: str,
+ domains: List[str],
+ issuer: Credentials,
+ key_type: Any,
+ valid_from: timedelta = timedelta(days=-1),
+ valid_to: timedelta = timedelta(days=89),
+ ) -> Credentials:
pkey = _private_key(key_type=key_type)
subject = TestCA._make_x509_name(common_name=name, parent=issuer.subject)
- csr = TestCA._make_csr(subject=subject,
- issuer_subject=issuer.certificate.subject, pkey=pkey,
- valid_from_delta=valid_from, valid_until_delta=valid_to)
+ csr = TestCA._make_csr(
+ subject=subject,
+ issuer_subject=issuer.certificate.subject,
+ pkey=pkey,
+ valid_from_delta=valid_from,
+ valid_until_delta=valid_to,
+ )
csr = TestCA._add_leaf_usages(csr, domains=domains, issuer=issuer)
- cert = csr.sign(private_key=issuer.private_key,
- algorithm=hashes.SHA256(),
- backend=default_backend())
+ cert = csr.sign(
+ private_key=issuer.private_key,
+ algorithm=hashes.SHA256(),
+ backend=default_backend(),
+ )
return Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer)
@staticmethod
- def _make_client_credentials(name: str,
- issuer: Credentials, email: Optional[str],
- key_type: Any,
- valid_from: timedelta = timedelta(days=-1),
- valid_to: timedelta = timedelta(days=89),
- ) -> Credentials:
+ def _make_client_credentials(
+ name: str,
+ issuer: Credentials,
+ email: Optional[str],
+ key_type: Any,
+ valid_from: timedelta = timedelta(days=-1),
+ valid_to: timedelta = timedelta(days=89),
+ ) -> Credentials:
pkey = _private_key(key_type=key_type)
subject = TestCA._make_x509_name(common_name=name, parent=issuer.subject)
- csr = TestCA._make_csr(subject=subject,
- issuer_subject=issuer.certificate.subject, pkey=pkey,
- valid_from_delta=valid_from, valid_until_delta=valid_to)
+ csr = TestCA._make_csr(
+ subject=subject,
+ issuer_subject=issuer.certificate.subject,
+ pkey=pkey,
+ valid_from_delta=valid_from,
+ valid_until_delta=valid_to,
+ )
csr = TestCA._add_client_usages(csr, issuer=issuer, rfc82name=email)
- cert = csr.sign(private_key=issuer.private_key,
- algorithm=hashes.SHA256(),
- backend=default_backend())
+ cert = csr.sign(
+ private_key=issuer.private_key,
+ algorithm=hashes.SHA256(),
+ backend=default_backend(),
+ )
return Credentials(name=name, cert=cert, pkey=pkey, issuer=issuer)
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class LocalClient:
-
- def __init__(self, name: str, env: Env, run_dir: Optional[str] = None,
- timeout: Optional[float] = None,
- run_env: Optional[Dict[str,str]] = None):
+ def __init__(
+ self,
+ name: str,
+ env: Env,
+ run_dir: Optional[str] = None,
+ timeout: Optional[float] = None,
+ run_env: Optional[Dict[str, str]] = None,
+ ):
self.name = name
- self.path = os.path.join(env.build_dir, f'tests/http/clients/{name}')
+ self.path = os.path.join(env.build_dir, f"tests/http/clients/{name}")
self.env = env
self._run_env = run_env
self._timeout = timeout if timeout else env.test_timeout
- self._curl = os.environ['CURL'] if 'CURL' in os.environ else env.curl
+ self._curl = os.environ["CURL"] if "CURL" in os.environ else env.curl
self._run_dir = run_dir if run_dir else os.path.join(env.gen_dir, name)
- self._stdoutfile = f'{self._run_dir}/stdout'
- self._stderrfile = f'{self._run_dir}/stderr'
+ self._stdoutfile = f"{self._run_dir}/stdout"
+ self._stderrfile = f"{self._run_dir}/stderr"
self._rmrf(self._run_dir)
self._mkpath(self._run_dir)
return os.path.exists(self.path)
def download_file(self, i: int) -> str:
- return os.path.join(self._run_dir, f'download_{i}.data')
+ return os.path.join(self._run_dir, f"download_{i}.data")
def _rmf(self, path):
if os.path.exists(path):
run_env = None
if self._run_env:
run_env = self._run_env.copy()
- for key in ['CURL_DEBUG']:
+ for key in ["CURL_DEBUG"]:
if key in os.environ and key not in run_env:
run_env[key] = os.environ[key]
try:
- with open(self._stdoutfile, 'w') as cout, open(self._stderrfile, 'w') as cerr:
- p = subprocess.run(myargs, stderr=cerr, stdout=cout,
- cwd=self._run_dir, shell=False,
- input=None, env=run_env,
- timeout=self._timeout)
+ with open(self._stdoutfile, "w") as cout, open(
+ self._stderrfile, "w"
+ ) as cerr:
+ p = subprocess.run(
+ myargs,
+ stderr=cerr,
+ stdout=cout,
+ cwd=self._run_dir,
+ shell=False,
+ input=None,
+ env=run_env,
+ timeout=self._timeout,
+ )
exitcode = p.returncode
except subprocess.TimeoutExpired:
- log.warning(f'Timeout after {self._timeout}s: {args}')
+ log.warning(f"Timeout after {self._timeout}s: {args}")
exitcode = -1
- exception = 'TimeoutExpired'
+ exception = "TimeoutExpired"
coutput = open(self._stdoutfile).readlines()
cerrput = open(self._stderrfile).readlines()
- return ExecResult(args=myargs, exit_code=exitcode, exception=exception,
- stdout=coutput, stderr=cerrput,
- duration=datetime.now() - start)
+ return ExecResult(
+ args=myargs,
+ exit_code=exitcode,
+ exception=exception,
+ stdout=coutput,
+ stderr=cerrput,
+ duration=datetime.now() - start,
+ )
def dump_logs(self):
lines = []
- lines.append('>>--stdout ----------------------------------------------\n')
+ lines.append(">>--stdout ----------------------------------------------\n")
lines.extend(open(self._stdoutfile).readlines())
- lines.append('>>--stderr ----------------------------------------------\n')
+ lines.append(">>--stderr ----------------------------------------------\n")
lines.extend(open(self._stderrfile).readlines())
- lines.append('<<-------------------------------------------------------\n')
- return ''.join(lines)
+ lines.append("<<-------------------------------------------------------\n")
+ return "".join(lines)
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class RunProfile:
-
- STAT_KEYS = ['cpu', 'rss', 'vsz']
+ STAT_KEYS = ["cpu", "rss", "vsz"]
@classmethod
- def AverageStats(cls, profiles: List['RunProfile']):
+ def AverageStats(cls, profiles: List["RunProfile"]):
avg = {}
stats = [p.stats for p in profiles]
for key in cls.STAT_KEYS:
return self._duration
@property
- def stats(self) -> Optional[Dict[str,Any]]:
+ def stats(self) -> Optional[Dict[str, Any]]:
return self._stats
def sample(self):
if self._psu is None:
self._psu = psutil.Process(pid=self._pid)
mem = self._psu.memory_info()
- self._samples.append({
- 'time': elapsed,
- 'cpu': self._psu.cpu_percent(),
- 'vsz': mem.vms,
- 'rss': mem.rss,
- })
+ self._samples.append(
+ {
+ "time": elapsed,
+ "cpu": self._psu.cpu_percent(),
+ "vsz": mem.vms,
+ "rss": mem.rss,
+ }
+ )
except psutil.NoSuchProcess:
pass
def finish(self):
self._duration = datetime.now() - self._started_at
if len(self._samples) > 0:
- weights = [s['time'].total_seconds() for s in self._samples]
+ weights = [s["time"].total_seconds() for s in self._samples]
self._stats = {}
for key in self.STAT_KEYS:
self._stats[key] = fmean([s[key] for s in self._samples], weights)
self._psu = None
def __repr__(self):
- return f'RunProfile[pid={self._pid}, '\
- f'duration={self.duration.total_seconds():.3f}s, '\
- f'stats={self.stats}]'
+ return (
+ f"RunProfile[pid={self._pid}, "
+ f"duration={self.duration.total_seconds():.3f}s, "
+ f"stats={self.stats}]"
+ )
class RunTcpDump:
-
def __init__(self, env, run_dir):
self._env = env
self._run_dir = run_dir
self._proc = None
- self._stdoutfile = os.path.join(self._run_dir, 'tcpdump.out')
- self._stderrfile = os.path.join(self._run_dir, 'tcpdump.err')
+ self._stdoutfile = os.path.join(self._run_dir, "tcpdump.out")
+ self._stderrfile = os.path.join(self._run_dir, "tcpdump.err")
@property
def stats(self) -> Optional[List[str]]:
if self._proc:
- raise Exception('tcpdump still running')
- return [line
- for line in open(self._stdoutfile)
- if re.match(r'.* IP 127\.0\.0\.1\.\d+ [<>] 127\.0\.0\.1\.\d+:.*', line)]
+ raise Exception("tcpdump still running")
+ return [
+ line
+ for line in open(self._stdoutfile)
+ if re.match(r".* IP 127\.0\.0\.1\.\d+ [<>] 127\.0\.0\.1\.\d+:.*", line)
+ ]
def stats_excluding(self, src_port) -> Optional[List[str]]:
if self._proc:
- raise Exception('tcpdump still running')
- return [line
- for line in self.stats
- if not re.match(r'.* IP 127\.0\.0\.1\.' + str(src_port) + ' >.*', line)]
+ raise Exception("tcpdump still running")
+ return [
+ line
+ for line in self.stats
+ if not re.match(r".* IP 127\.0\.0\.1\." + str(src_port) + " >.*", line)
+ ]
@property
def stderr(self) -> List[str]:
if self._proc:
- raise Exception('tcpdump still running')
+ raise Exception("tcpdump still running")
return open(self._stderrfile).readlines()
def sample(self):
# not sure how to make that detection reliable for all platforms
- local_if = 'lo0' if sys.platform.startswith('darwin') else 'lo'
+ local_if = "lo0" if sys.platform.startswith("darwin") else "lo"
try:
tcpdump = self._env.tcpdump()
if tcpdump is None:
- raise Exception('tcpdump not available')
+ raise Exception("tcpdump not available")
# look with tcpdump for TCP RST packets which indicate
# we did not shut down connections cleanly
args = []
# at least on Linux, we need root permissions to run tcpdump
- if sys.platform.startswith('linux'):
- args.append('sudo')
- args.extend([
- tcpdump, '-i', local_if, '-n', 'tcp[tcpflags] & (tcp-rst)!=0'
- ])
- with open(self._stdoutfile, 'w') as cout, open(self._stderrfile, 'w') as cerr:
- self._proc = subprocess.Popen(args, stdout=cout, stderr=cerr,
- text=True, cwd=self._run_dir,
- shell=False)
+ if sys.platform.startswith("linux"):
+ args.append("sudo")
+ args.extend([tcpdump, "-i", local_if, "-n", "tcp[tcpflags] & (tcp-rst)!=0"])
+ with open(self._stdoutfile, "w") as cout, open(
+ self._stderrfile, "w"
+ ) as cerr:
+ self._proc = subprocess.Popen(
+ args,
+ stdout=cout,
+ stderr=cerr,
+ text=True,
+ cwd=self._run_dir,
+ shell=False,
+ )
assert self._proc
assert self._proc.returncode is None
while self._proc:
except subprocess.TimeoutExpired:
pass
except Exception:
- log.exception('Tcpdump')
+ log.exception("Tcpdump")
def start(self):
def do_sample():
self.sample()
+
t = Thread(target=do_sample)
t.start()
class ExecResult:
-
- def __init__(self, args: List[str], exit_code: int,
- stdout: List[str], stderr: List[str],
- duration: Optional[timedelta] = None,
- with_stats: bool = False,
- exception: Optional[str] = None,
- profile: Optional[RunProfile] = None,
- tcpdump: Optional[RunTcpDump] = None):
+ def __init__(
+ self,
+ args: List[str],
+ exit_code: int,
+ stdout: List[str],
+ stderr: List[str],
+ duration: Optional[timedelta] = None,
+ with_stats: bool = False,
+ exception: Optional[str] = None,
+ profile: Optional[RunProfile] = None,
+ tcpdump: Optional[RunTcpDump] = None,
+ ):
self._args = args
self._exit_code = exit_code
self._exception = exception
else:
# noinspection PyBroadException
try:
- out = ''.join(self._stdout)
+ out = "".join(self._stdout)
self._json_out = json.loads(out)
except: # noqa: E722
pass
def __repr__(self):
- return f"ExecResult[code={self.exit_code}, exception={self._exception}, "\
- f"args={self._args}, stdout={self._stdout}, stderr={self._stderr}]"
+ return (
+ f"ExecResult[code={self.exit_code}, exception={self._exception}, "
+ f"args={self._args}, stdout={self._stdout}, stderr={self._stderr}]"
+ )
def _parse_stats(self):
self._stats = []
self._stats.append(json.loads(line))
# TODO: specify specific exceptions here
except: # noqa: E722
- log.exception(f'not a JSON stat: {line}')
+ log.exception(f"not a JSON stat: {line}")
break
@property
@property
def outraw(self) -> bytes:
- return ''.join(self._stdout).encode()
+ return "".join(self._stdout).encode()
@property
def stdout(self) -> str:
- return ''.join(self._stdout)
+ return "".join(self._stdout)
@property
def json(self) -> Optional[Dict]:
@property
def stderr(self) -> str:
- return ''.join(self._stderr)
+ return "".join(self._stderr)
@property
def trace_lines(self) -> List[str]:
if len(self.stats):
n = 0
for stat in self.stats:
- n += stat['num_connects']
+ n += stat["num_connects"]
return n
return None
def add_results(self, results: Dict):
self._results.update(results)
- if 'response' in results:
- self.add_response(results['response'])
+ if "response" in results:
+ self.add_response(results["response"])
def add_assets(self, assets: List):
self._assets.extend(assets)
def check_exit_code(self, code: Union[int, bool]):
if code is True:
- assert self.exit_code == 0, f'expected exit code {code}, '\
- f'got {self.exit_code}\n{self.dump_logs()}'
+ assert self.exit_code == 0, (
+ f"expected exit code {code}, "
+ f"got {self.exit_code}\n{self.dump_logs()}"
+ )
elif code is False:
- assert self.exit_code != 0, f'expected exit code {code}, '\
- f'got {self.exit_code}\n{self.dump_logs()}'
+ assert self.exit_code != 0, (
+ f"expected exit code {code}, "
+ f"got {self.exit_code}\n{self.dump_logs()}"
+ )
else:
- assert self.exit_code == code, f'expected exit code {code}, '\
- f'got {self.exit_code}\n{self.dump_logs()}'
-
- def check_response(self, http_status: Optional[int] = 200,
- count: Optional[int] = 1,
- protocol: Optional[str] = None,
- exitcode: Optional[int] = 0,
- connect_count: Optional[int] = None):
+ assert self.exit_code == code, (
+ f"expected exit code {code}, "
+ f"got {self.exit_code}\n{self.dump_logs()}"
+ )
+
+ def check_response(
+ self,
+ http_status: Optional[int] = 200,
+ count: Optional[int] = 1,
+ protocol: Optional[str] = None,
+ exitcode: Optional[int] = 0,
+ connect_count: Optional[int] = None,
+ ):
if exitcode:
self.check_exit_code(exitcode)
if self.with_stats and isinstance(exitcode, int):
for idx, x in enumerate(self.stats):
- if 'exitcode' in x:
- assert int(x['exitcode']) == exitcode, \
- f'response #{idx} exitcode: expected {exitcode}, '\
+ if "exitcode" in x:
+ assert int(x["exitcode"]) == exitcode, (
+ f"response #{idx} exitcode: expected {exitcode}, "
f'got {x["exitcode"]}\n{self.dump_logs()}'
+ )
if self.with_stats:
- assert len(self.stats) == count, \
- f'response count: expected {count}, ' \
- f'got {len(self.stats)}\n{self.dump_logs()}'
+ assert len(self.stats) == count, (
+ f"response count: expected {count}, "
+ f"got {len(self.stats)}\n{self.dump_logs()}"
+ )
else:
- assert len(self.responses) == count, \
- f'response count: expected {count}, ' \
- f'got {len(self.responses)}\n{self.dump_logs()}'
+ assert len(self.responses) == count, (
+ f"response count: expected {count}, "
+ f"got {len(self.responses)}\n{self.dump_logs()}"
+ )
if http_status is not None:
if self.with_stats:
for idx, x in enumerate(self.stats):
- assert 'http_code' in x, \
- f'response #{idx} reports no http_code\n{self.dump_stat(x)}'
- assert x['http_code'] == http_status, \
- f'response #{idx} http_code: expected {http_status}, '\
+ assert (
+ "http_code" in x
+ ), f"response #{idx} reports no http_code\n{self.dump_stat(x)}"
+ assert x["http_code"] == http_status, (
+ f"response #{idx} http_code: expected {http_status}, "
f'got {x["http_code"]}\n{self.dump_stat(x)}'
+ )
else:
for idx, x in enumerate(self.responses):
- assert x['status'] == http_status, \
- f'response #{idx} status: expected {http_status},'\
+ assert x["status"] == http_status, (
+ f"response #{idx} status: expected {http_status},"
f'got {x["status"]}\n{self.dump_stat(x)}'
+ )
if protocol is not None:
if self.with_stats:
http_version = None
- if protocol == 'HTTP/1.1':
- http_version = '1.1'
- elif protocol == 'HTTP/2':
- http_version = '2'
- elif protocol == 'HTTP/3':
- http_version = '3'
+ if protocol == "HTTP/1.1":
+ http_version = "1.1"
+ elif protocol == "HTTP/2":
+ http_version = "2"
+ elif protocol == "HTTP/3":
+ http_version = "3"
if http_version is not None:
for idx, x in enumerate(self.stats):
- assert x['http_version'] == http_version, \
- f'response #{idx} protocol: expected http/{http_version},' \
+ assert x["http_version"] == http_version, (
+ f"response #{idx} protocol: expected http/{http_version},"
f'got version {x["http_version"]}\n{self.dump_stat(x)}'
+ )
else:
for idx, x in enumerate(self.responses):
- assert x['protocol'] == protocol, \
- f'response #{idx} protocol: expected {protocol},'\
+ assert x["protocol"] == protocol, (
+ f"response #{idx} protocol: expected {protocol},"
f'got {x["protocol"]}\n{self.dump_logs()}'
+ )
if connect_count is not None:
- assert self.total_connects == connect_count, \
- f'expected {connect_count}, but {self.total_connects} '\
- f'were made\n{self.dump_logs()}'
-
- def check_stats(self, count: int, http_status: Optional[int] = None,
- exitcode: Optional[int] = None,
- remote_port: Optional[int] = None,
- remote_ip: Optional[str] = None):
+ assert self.total_connects == connect_count, (
+ f"expected {connect_count}, but {self.total_connects} "
+ f"were made\n{self.dump_logs()}"
+ )
+
+ def check_stats(
+ self,
+ count: int,
+ http_status: Optional[int] = None,
+ exitcode: Optional[int] = None,
+ remote_port: Optional[int] = None,
+ remote_ip: Optional[str] = None,
+ ):
if exitcode is None:
self.check_exit_code(0)
- assert len(self.stats) == count, \
- f'stats count: expected {count}, got {len(self.stats)}\n{self.dump_logs()}'
+ assert (
+ len(self.stats) == count
+ ), f"stats count: expected {count}, got {len(self.stats)}\n{self.dump_logs()}"
if http_status is not None:
for idx, x in enumerate(self.stats):
- assert 'http_code' in x, \
- f'status #{idx} reports no http_code\n{self.dump_stat(x)}'
- assert x['http_code'] == http_status, \
- f'status #{idx} http_code: expected {http_status}, '\
+ assert (
+ "http_code" in x
+ ), f"status #{idx} reports no http_code\n{self.dump_stat(x)}"
+ assert x["http_code"] == http_status, (
+ f"status #{idx} http_code: expected {http_status}, "
f'got {x["http_code"]}\n{self.dump_stat(x)}'
+ )
if exitcode is not None:
for idx, x in enumerate(self.stats):
- if 'exitcode' in x:
- assert x['exitcode'] == exitcode, \
- f'status #{idx} exitcode: expected {exitcode}, '\
+ if "exitcode" in x:
+ assert x["exitcode"] == exitcode, (
+ f"status #{idx} exitcode: expected {exitcode}, "
f'got {x["exitcode"]}\n{self.dump_stat(x)}'
+ )
if remote_port is not None:
for idx, x in enumerate(self.stats):
- assert 'remote_port' in x, f'remote_port missing\n{self.dump_stat(x)}'
- assert x['remote_port'] == remote_port, \
- f'status #{idx} remote_port: expected {remote_port}, '\
- f'got {x["remote_port"]}\n{self.dump_stat(x)}'
+ assert "remote_port" in x, f"remote_port missing\n{self.dump_stat(x)}"
+ assert x["remote_port"] == remote_port, (
+ f"status #{idx} remote_port: expected {remote_port}, "
+ f'got {x["remote_port"]}\n{self.dump_stat(x)}'
+ )
if remote_ip is not None:
for idx, x in enumerate(self.stats):
- assert 'remote_ip' in x, f'remote_ip missing\n{self.dump_stat(x)}'
- assert x['remote_ip'] == remote_ip, \
- f'status #{idx} remote_ip: expected {remote_ip}, '\
- f'got {x["remote_ip"]}\n{self.dump_stat(x)}'
+ assert "remote_ip" in x, f"remote_ip missing\n{self.dump_stat(x)}"
+ assert x["remote_ip"] == remote_ip, (
+ f"status #{idx} remote_ip: expected {remote_ip}, "
+ f'got {x["remote_ip"]}\n{self.dump_stat(x)}'
+ )
def dump_logs(self):
- lines = ['>>--stdout ----------------------------------------------\n']
+ lines = [">>--stdout ----------------------------------------------\n"]
lines.extend(self._stdout)
- lines.append('>>--stderr ----------------------------------------------\n')
+ lines.append(">>--stderr ----------------------------------------------\n")
lines.extend(self._stderr)
- lines.append('<<-------------------------------------------------------\n')
- return ''.join(lines)
+ lines.append("<<-------------------------------------------------------\n")
+ return "".join(lines)
def dump_stat(self, x):
lines = [
- 'json stat from curl:',
+ "json stat from curl:",
json.JSONEncoder(indent=2).encode(x),
]
- if 'xfer_id' in x:
- xfer_id = x['xfer_id']
- lines.append(f'>>--xfer {xfer_id} trace:\n')
+ if "xfer_id" in x:
+ xfer_id = x["xfer_id"]
+ lines.append(f">>--xfer {xfer_id} trace:\n")
lines.extend(self.xfer_trace_for(xfer_id))
else:
- lines.append('>>--full trace-------------------------------------------\n')
+ lines.append(">>--full trace-------------------------------------------\n")
lines.extend(self._stderr)
- lines.append('<<-------------------------------------------------------\n')
- return ''.join(lines)
+ lines.append("<<-------------------------------------------------------\n")
+ return "".join(lines)
def xfer_trace_for(self, xfer_id) -> List[str]:
- pat = re.compile(f'^[^[]* \\[{xfer_id}-.*$')
- return [line for line in self._stderr if pat.match(line)]
+ pat = re.compile(f"^[^[]* \\[{xfer_id}-.*$")
+ return [line for line in self._stderr if pat.match(line)]
class CurlClient:
-
ALPN_ARG = {
- 'http/0.9': '--http0.9',
- 'http/1.0': '--http1.0',
- 'http/1.1': '--http1.1',
- 'h2': '--http2',
- 'h2c': '--http2',
- 'h3': '--http3-only',
+ "http/0.9": "--http0.9",
+ "http/1.0": "--http1.0",
+ "http/1.1": "--http1.1",
+ "h2": "--http2",
+ "h2c": "--http2",
+ "h3": "--http3-only",
}
- def __init__(self, env: Env,
- run_dir: Optional[str] = None,
- timeout: Optional[float] = None,
- silent: bool = False,
- run_env: Optional[Dict[str, str]] = None,
- server_addr: Optional[str] = None):
+ def __init__(
+ self,
+ env: Env,
+ run_dir: Optional[str] = None,
+ timeout: Optional[float] = None,
+ silent: bool = False,
+ run_env: Optional[Dict[str, str]] = None,
+ server_addr: Optional[str] = None,
+ ):
self.env = env
self._timeout = timeout if timeout else env.test_timeout
- self._curl = os.environ['CURL'] if 'CURL' in os.environ else env.curl
- self._run_dir = run_dir if run_dir else os.path.join(env.gen_dir, 'curl')
- self._stdoutfile = f'{self._run_dir}/curl.stdout'
- self._stderrfile = f'{self._run_dir}/curl.stderr'
- self._headerfile = f'{self._run_dir}/curl.headers'
- self._log_path = f'{self._run_dir}/curl.log'
+ self._curl = os.environ["CURL"] if "CURL" in os.environ else env.curl
+ self._run_dir = run_dir if run_dir else os.path.join(env.gen_dir, "curl")
+ self._stdoutfile = f"{self._run_dir}/curl.stdout"
+ self._stderrfile = f"{self._run_dir}/curl.stderr"
+ self._headerfile = f"{self._run_dir}/curl.headers"
+ self._log_path = f"{self._run_dir}/curl.log"
self._silent = silent
self._run_env = run_env
- self._server_addr = server_addr if server_addr else '127.0.0.1'
+ self._server_addr = server_addr if server_addr else "127.0.0.1"
self._rmrf(self._run_dir)
self._mkpath(self._run_dir)
return self._run_dir
def download_file(self, i: int) -> str:
- return os.path.join(self.run_dir, f'download_{i}.data')
+ return os.path.join(self.run_dir, f"download_{i}.data")
def _rmf(self, path):
if os.path.exists(path):
if not os.path.exists(path):
return os.makedirs(path)
- def get_proxy_args(self, proto: str = 'http/1.1',
- proxys: bool = True, tunnel: bool = False,
- use_ip: bool = False):
+ def get_proxy_args(
+ self,
+ proto: str = "http/1.1",
+ proxys: bool = True,
+ tunnel: bool = False,
+ use_ip: bool = False,
+ ):
proxy_name = self._server_addr if use_ip else self.env.proxy_domain
if proxys:
pport = self.env.pts_port(proto) if tunnel else self.env.proxys_port
xargs = [
- '--proxy', f'https://{proxy_name}:{pport}/',
- '--resolve', f'{proxy_name}:{pport}:{self._server_addr}',
- '--proxy-cacert', self.env.ca.cert_file,
+ "--proxy",
+ f"https://{proxy_name}:{pport}/",
+ "--resolve",
+ f"{proxy_name}:{pport}:{self._server_addr}",
+ "--proxy-cacert",
+ self.env.ca.cert_file,
]
- if proto == 'h2':
- xargs.append('--proxy-http2')
+ if proto == "h2":
+ xargs.append("--proxy-http2")
else:
xargs = [
- '--proxy', f'http://{proxy_name}:{self.env.proxy_port}/',
- '--resolve', f'{proxy_name}:{self.env.proxy_port}:{self._server_addr}',
+ "--proxy",
+ f"http://{proxy_name}:{self.env.proxy_port}/",
+ "--resolve",
+ f"{proxy_name}:{self.env.proxy_port}:{self._server_addr}",
]
if tunnel:
- xargs.append('--proxytunnel')
+ xargs.append("--proxytunnel")
return xargs
- def http_get(self, url: str, extra_args: Optional[List[str]] = None,
- alpn_proto: Optional[str] = None,
- def_tracing: bool = True,
- with_stats: bool = False,
- with_profile: bool = False,
- with_tcpdump: bool = False):
- return self._raw(url, options=extra_args,
- with_stats=with_stats,
- alpn_proto=alpn_proto,
- def_tracing=def_tracing,
- with_profile=with_profile,
- with_tcpdump=with_tcpdump)
-
- def http_download(self, urls: List[str],
- alpn_proto: Optional[str] = None,
- with_stats: bool = True,
- with_headers: bool = False,
- with_profile: bool = False,
- with_tcpdump: bool = False,
- no_save: bool = False,
- extra_args: Optional[List[str]] = None):
+ def http_get(
+ self,
+ url: str,
+ extra_args: Optional[List[str]] = None,
+ alpn_proto: Optional[str] = None,
+ def_tracing: bool = True,
+ with_stats: bool = False,
+ with_profile: bool = False,
+ with_tcpdump: bool = False,
+ ):
+ return self._raw(
+ url,
+ options=extra_args,
+ with_stats=with_stats,
+ alpn_proto=alpn_proto,
+ def_tracing=def_tracing,
+ with_profile=with_profile,
+ with_tcpdump=with_tcpdump,
+ )
+
+ def http_download(
+ self,
+ urls: List[str],
+ alpn_proto: Optional[str] = None,
+ with_stats: bool = True,
+ with_headers: bool = False,
+ with_profile: bool = False,
+ with_tcpdump: bool = False,
+ no_save: bool = False,
+ extra_args: Optional[List[str]] = None,
+ ):
if extra_args is None:
extra_args = []
if no_save:
- extra_args.extend([
- '-o', '/dev/null',
- ])
+ extra_args.extend(
+ [
+ "-o",
+ "/dev/null",
+ ]
+ )
else:
- extra_args.extend([
- '-o', 'download_#1.data',
- ])
+ extra_args.extend(
+ [
+ "-o",
+ "download_#1.data",
+ ]
+ )
# remove any existing ones
for i in range(100):
self._rmf(self.download_file(i))
if with_stats:
- extra_args.extend([
- '-w', '%{json}\\n'
- ])
- return self._raw(urls, alpn_proto=alpn_proto, options=extra_args,
- with_stats=with_stats,
- with_headers=with_headers,
- with_profile=with_profile,
- with_tcpdump=with_tcpdump)
-
- def http_upload(self, urls: List[str], data: str,
- alpn_proto: Optional[str] = None,
- with_stats: bool = True,
- with_headers: bool = False,
- with_profile: bool = False,
- with_tcpdump: bool = False,
- extra_args: Optional[List[str]] = None):
+ extra_args.extend(["-w", "%{json}\\n"])
+ return self._raw(
+ urls,
+ alpn_proto=alpn_proto,
+ options=extra_args,
+ with_stats=with_stats,
+ with_headers=with_headers,
+ with_profile=with_profile,
+ with_tcpdump=with_tcpdump,
+ )
+
+ def http_upload(
+ self,
+ urls: List[str],
+ data: str,
+ alpn_proto: Optional[str] = None,
+ with_stats: bool = True,
+ with_headers: bool = False,
+ with_profile: bool = False,
+ with_tcpdump: bool = False,
+ extra_args: Optional[List[str]] = None,
+ ):
if extra_args is None:
extra_args = []
- extra_args.extend([
- '--data-binary', data, '-o', 'download_#1.data',
- ])
+ extra_args.extend(
+ [
+ "--data-binary",
+ data,
+ "-o",
+ "download_#1.data",
+ ]
+ )
if with_stats:
- extra_args.extend([
- '-w', '%{json}\\n'
- ])
- return self._raw(urls, alpn_proto=alpn_proto, options=extra_args,
- with_stats=with_stats,
- with_headers=with_headers,
- with_profile=with_profile,
- with_tcpdump=with_tcpdump)
-
- def http_delete(self, urls: List[str],
- alpn_proto: Optional[str] = None,
- with_stats: bool = True,
- with_profile: bool = False,
- extra_args: Optional[List[str]] = None):
+ extra_args.extend(["-w", "%{json}\\n"])
+ return self._raw(
+ urls,
+ alpn_proto=alpn_proto,
+ options=extra_args,
+ with_stats=with_stats,
+ with_headers=with_headers,
+ with_profile=with_profile,
+ with_tcpdump=with_tcpdump,
+ )
+
+ def http_delete(
+ self,
+ urls: List[str],
+ alpn_proto: Optional[str] = None,
+ with_stats: bool = True,
+ with_profile: bool = False,
+ extra_args: Optional[List[str]] = None,
+ ):
if extra_args is None:
extra_args = []
- extra_args.extend([
- '-X', 'DELETE', '-o', '/dev/null',
- ])
+ extra_args.extend(
+ [
+ "-X",
+ "DELETE",
+ "-o",
+ "/dev/null",
+ ]
+ )
if with_stats:
- extra_args.extend([
- '-w', '%{json}\\n'
- ])
- return self._raw(urls, alpn_proto=alpn_proto, options=extra_args,
- with_stats=with_stats,
- with_headers=False,
- with_profile=with_profile)
-
- def http_put(self, urls: List[str], data=None, fdata=None,
- alpn_proto: Optional[str] = None,
- with_stats: bool = True,
- with_headers: bool = False,
- with_profile: bool = False,
- extra_args: Optional[List[str]] = None):
+ extra_args.extend(["-w", "%{json}\\n"])
+ return self._raw(
+ urls,
+ alpn_proto=alpn_proto,
+ options=extra_args,
+ with_stats=with_stats,
+ with_headers=False,
+ with_profile=with_profile,
+ )
+
+ def http_put(
+ self,
+ urls: List[str],
+ data=None,
+ fdata=None,
+ alpn_proto: Optional[str] = None,
+ with_stats: bool = True,
+ with_headers: bool = False,
+ with_profile: bool = False,
+ extra_args: Optional[List[str]] = None,
+ ):
if extra_args is None:
extra_args = []
if fdata is not None:
- extra_args.extend(['-T', fdata])
+ extra_args.extend(["-T", fdata])
elif data is not None:
- extra_args.extend(['-T', '-'])
- extra_args.extend([
- '-o', 'download_#1.data',
- ])
+ extra_args.extend(["-T", "-"])
+ extra_args.extend(
+ [
+ "-o",
+ "download_#1.data",
+ ]
+ )
if with_stats:
- extra_args.extend([
- '-w', '%{json}\\n'
- ])
- return self._raw(urls, intext=data,
- alpn_proto=alpn_proto, options=extra_args,
- with_stats=with_stats,
- with_headers=with_headers,
- with_profile=with_profile)
-
- def http_form(self, urls: List[str], form: Dict[str, str],
- alpn_proto: Optional[str] = None,
- with_stats: bool = True,
- with_headers: bool = False,
- extra_args: Optional[List[str]] = None):
+ extra_args.extend(["-w", "%{json}\\n"])
+ return self._raw(
+ urls,
+ intext=data,
+ alpn_proto=alpn_proto,
+ options=extra_args,
+ with_stats=with_stats,
+ with_headers=with_headers,
+ with_profile=with_profile,
+ )
+
+ def http_form(
+ self,
+ urls: List[str],
+ form: Dict[str, str],
+ alpn_proto: Optional[str] = None,
+ with_stats: bool = True,
+ with_headers: bool = False,
+ extra_args: Optional[List[str]] = None,
+ ):
if extra_args is None:
extra_args = []
for key, val in form.items():
- extra_args.extend(['-F', f'{key}={val}'])
- extra_args.extend([
- '-o', 'download_#1.data',
- ])
+ extra_args.extend(["-F", f"{key}={val}"])
+ extra_args.extend(
+ [
+ "-o",
+ "download_#1.data",
+ ]
+ )
if with_stats:
- extra_args.extend([
- '-w', '%{json}\\n'
- ])
- return self._raw(urls, alpn_proto=alpn_proto, options=extra_args,
- with_stats=with_stats,
- with_headers=with_headers)
-
- def ftp_get(self, urls: List[str],
- with_stats: bool = True,
- with_profile: bool = False,
- with_tcpdump: bool = False,
- no_save: bool = False,
- extra_args: Optional[List[str]] = None):
+ extra_args.extend(["-w", "%{json}\\n"])
+ return self._raw(
+ urls,
+ alpn_proto=alpn_proto,
+ options=extra_args,
+ with_stats=with_stats,
+ with_headers=with_headers,
+ )
+
+ def ftp_get(
+ self,
+ urls: List[str],
+ with_stats: bool = True,
+ with_profile: bool = False,
+ with_tcpdump: bool = False,
+ no_save: bool = False,
+ extra_args: Optional[List[str]] = None,
+ ):
if extra_args is None:
extra_args = []
if no_save:
- extra_args.extend([
- '-o', '/dev/null',
- ])
+ extra_args.extend(
+ [
+ "-o",
+ "/dev/null",
+ ]
+ )
else:
- extra_args.extend([
- '-o', 'download_#1.data',
- ])
+ extra_args.extend(
+ [
+ "-o",
+ "download_#1.data",
+ ]
+ )
# remove any existing ones
for i in range(100):
self._rmf(self.download_file(i))
if with_stats:
- extra_args.extend([
- '-w', '%{json}\\n'
- ])
- return self._raw(urls, options=extra_args,
- with_stats=with_stats,
- with_headers=False,
- with_profile=with_profile,
- with_tcpdump=with_tcpdump)
-
- def ftp_ssl_get(self, urls: List[str],
- with_stats: bool = True,
- with_profile: bool = False,
- with_tcpdump: bool = False,
- no_save: bool = False,
- extra_args: Optional[List[str]] = None):
+ extra_args.extend(["-w", "%{json}\\n"])
+ return self._raw(
+ urls,
+ options=extra_args,
+ with_stats=with_stats,
+ with_headers=False,
+ with_profile=with_profile,
+ with_tcpdump=with_tcpdump,
+ )
+
+ def ftp_ssl_get(
+ self,
+ urls: List[str],
+ with_stats: bool = True,
+ with_profile: bool = False,
+ with_tcpdump: bool = False,
+ no_save: bool = False,
+ extra_args: Optional[List[str]] = None,
+ ):
if extra_args is None:
extra_args = []
- extra_args.extend([
- '--ssl-reqd',
- ])
- return self.ftp_get(urls=urls, with_stats=with_stats,
- with_profile=with_profile, no_save=no_save,
- with_tcpdump=with_tcpdump,
- extra_args=extra_args)
-
- def ftp_upload(self, urls: List[str],
- fupload: Optional[Any] = None,
- updata: Optional[str] = None,
- with_stats: bool = True,
- with_profile: bool = False,
- with_tcpdump: bool = False,
- extra_args: Optional[List[str]] = None):
+ extra_args.extend(
+ [
+ "--ssl-reqd",
+ ]
+ )
+ return self.ftp_get(
+ urls=urls,
+ with_stats=with_stats,
+ with_profile=with_profile,
+ no_save=no_save,
+ with_tcpdump=with_tcpdump,
+ extra_args=extra_args,
+ )
+
+ def ftp_upload(
+ self,
+ urls: List[str],
+ fupload: Optional[Any] = None,
+ updata: Optional[str] = None,
+ with_stats: bool = True,
+ with_profile: bool = False,
+ with_tcpdump: bool = False,
+ extra_args: Optional[List[str]] = None,
+ ):
if extra_args is None:
extra_args = []
if fupload is not None:
- extra_args.extend([
- '--upload-file', fupload
- ])
+ extra_args.extend(["--upload-file", fupload])
elif updata is not None:
- extra_args.extend([
- '--upload-file', '-'
- ])
+ extra_args.extend(["--upload-file", "-"])
else:
- raise Exception('need either file or data to upload')
+ raise Exception("need either file or data to upload")
if with_stats:
- extra_args.extend([
- '-w', '%{json}\\n'
- ])
- return self._raw(urls, options=extra_args,
- intext=updata,
- with_stats=with_stats,
- with_headers=False,
- with_profile=with_profile,
- with_tcpdump=with_tcpdump)
-
- def ftp_ssl_upload(self, urls: List[str],
- fupload: Optional[Any] = None,
- updata: Optional[str] = None,
- with_stats: bool = True,
- with_profile: bool = False,
- with_tcpdump: bool = False,
- extra_args: Optional[List[str]] = None):
+ extra_args.extend(["-w", "%{json}\\n"])
+ return self._raw(
+ urls,
+ options=extra_args,
+ intext=updata,
+ with_stats=with_stats,
+ with_headers=False,
+ with_profile=with_profile,
+ with_tcpdump=with_tcpdump,
+ )
+
+ def ftp_ssl_upload(
+ self,
+ urls: List[str],
+ fupload: Optional[Any] = None,
+ updata: Optional[str] = None,
+ with_stats: bool = True,
+ with_profile: bool = False,
+ with_tcpdump: bool = False,
+ extra_args: Optional[List[str]] = None,
+ ):
if extra_args is None:
extra_args = []
- extra_args.extend([
- '--ssl-reqd',
- ])
- return self.ftp_upload(urls=urls, fupload=fupload, updata=updata,
- with_stats=with_stats, with_profile=with_profile,
- with_tcpdump=with_tcpdump,
- extra_args=extra_args)
+ extra_args.extend(
+ [
+ "--ssl-reqd",
+ ]
+ )
+ return self.ftp_upload(
+ urls=urls,
+ fupload=fupload,
+ updata=updata,
+ with_stats=with_stats,
+ with_profile=with_profile,
+ with_tcpdump=with_tcpdump,
+ extra_args=extra_args,
+ )
def response_file(self, idx: int):
- return os.path.join(self._run_dir, f'download_{idx}.data')
+ return os.path.join(self._run_dir, f"download_{idx}.data")
def run_direct(self, args, with_stats: bool = False, with_profile: bool = False):
my_args = [self._curl]
if with_stats:
- my_args.extend([
- '-w', '%{json}\\n'
- ])
- my_args.extend([
- '-o', 'download.data',
- ])
+ my_args.extend(["-w", "%{json}\\n"])
+ my_args.extend(
+ [
+ "-o",
+ "download.data",
+ ]
+ )
my_args.extend(args)
return self._run(args=my_args, with_stats=with_stats, with_profile=with_profile)
- def _run(self, args, intext='', with_stats: bool = False,
- with_profile: bool = True, with_tcpdump: bool = False):
+ def _run(
+ self,
+ args,
+ intext="",
+ with_stats: bool = False,
+ with_profile: bool = True,
+ with_tcpdump: bool = False,
+ ):
self._rmf(self._stdoutfile)
self._rmf(self._stderrfile)
self._rmf(self._headerfile)
tcpdump = RunTcpDump(self.env, self._run_dir)
tcpdump.start()
try:
- with open(self._stdoutfile, 'w') as cout, open(self._stderrfile, 'w') as cerr:
+ with open(self._stdoutfile, "w") as cout, open(
+ self._stderrfile, "w"
+ ) as cerr:
if with_profile:
- end_at = started_at + timedelta(seconds=self._timeout) \
- if self._timeout else None
- log.info(f'starting: {args}')
- p = subprocess.Popen(args, stderr=cerr, stdout=cout,
- cwd=self._run_dir, shell=False,
- env=self._run_env)
+ end_at = (
+ started_at + timedelta(seconds=self._timeout)
+ if self._timeout
+ else None
+ )
+ log.info(f"starting: {args}")
+ p = subprocess.Popen(
+ args,
+ stderr=cerr,
+ stdout=cout,
+ cwd=self._run_dir,
+ shell=False,
+ env=self._run_env,
+ )
profile = RunProfile(p.pid, started_at, self._run_dir)
if intext is not None and False:
p.communicate(input=intext.encode(), timeout=1)
except subprocess.TimeoutExpired:
if end_at and datetime.now() >= end_at:
p.kill()
- raise subprocess.TimeoutExpired(cmd=args, timeout=self._timeout)
+ raise subprocess.TimeoutExpired(
+ cmd=args, timeout=self._timeout
+ )
profile.sample()
ptimeout = 0.01
exitcode = p.returncode
profile.finish()
- log.info(f'done: exit={exitcode}, profile={profile}')
+ log.info(f"done: exit={exitcode}, profile={profile}")
else:
- p = subprocess.run(args, stderr=cerr, stdout=cout,
- cwd=self._run_dir, shell=False,
- input=intext.encode() if intext else None,
- timeout=self._timeout,
- env=self._run_env)
+ p = subprocess.run(
+ args,
+ stderr=cerr,
+ stdout=cout,
+ cwd=self._run_dir,
+ shell=False,
+ input=intext.encode() if intext else None,
+ timeout=self._timeout,
+ env=self._run_env,
+ )
exitcode = p.returncode
except subprocess.TimeoutExpired:
now = datetime.now()
duration = now - started_at
- log.warning(f'Timeout at {now} after {duration.total_seconds()}s '
- f'(configured {self._timeout}s): {args}')
+ log.warning(
+ f"Timeout at {now} after {duration.total_seconds()}s "
+ f"(configured {self._timeout}s): {args}"
+ )
exitcode = -1
- exception = 'TimeoutExpired'
+ exception = "TimeoutExpired"
if tcpdump:
tcpdump.finish()
coutput = open(self._stdoutfile).readlines()
cerrput = open(self._stderrfile).readlines()
- return ExecResult(args=args, exit_code=exitcode, exception=exception,
- stdout=coutput, stderr=cerrput,
- duration=datetime.now() - started_at,
- with_stats=with_stats,
- profile=profile, tcpdump=tcpdump)
-
- def _raw(self, urls, intext='', timeout=None, options=None, insecure=False,
- alpn_proto: Optional[str] = None,
- force_resolve=True,
- with_stats=False,
- with_headers=True,
- def_tracing=True,
- with_profile=False,
- with_tcpdump=False):
+ return ExecResult(
+ args=args,
+ exit_code=exitcode,
+ exception=exception,
+ stdout=coutput,
+ stderr=cerrput,
+ duration=datetime.now() - started_at,
+ with_stats=with_stats,
+ profile=profile,
+ tcpdump=tcpdump,
+ )
+
+ def _raw(
+ self,
+ urls,
+ intext="",
+ timeout=None,
+ options=None,
+ insecure=False,
+ alpn_proto: Optional[str] = None,
+ force_resolve=True,
+ with_stats=False,
+ with_headers=True,
+ def_tracing=True,
+ with_profile=False,
+ with_tcpdump=False,
+ ):
args = self._complete_args(
- urls=urls, timeout=timeout, options=options, insecure=insecure,
- alpn_proto=alpn_proto, force_resolve=force_resolve,
- with_headers=with_headers, def_tracing=def_tracing)
- r = self._run(args, intext=intext, with_stats=with_stats,
- with_profile=with_profile, with_tcpdump=with_tcpdump)
+ urls=urls,
+ timeout=timeout,
+ options=options,
+ insecure=insecure,
+ alpn_proto=alpn_proto,
+ force_resolve=force_resolve,
+ with_headers=with_headers,
+ def_tracing=def_tracing,
+ )
+ r = self._run(
+ args,
+ intext=intext,
+ with_stats=with_stats,
+ with_profile=with_profile,
+ with_tcpdump=with_tcpdump,
+ )
if r.exit_code == 0 and with_headers:
self._parse_headerfile(self._headerfile, r=r)
return r
- def _complete_args(self, urls, timeout=None, options=None,
- insecure=False, force_resolve=True,
- alpn_proto: Optional[str] = None,
- with_headers: bool = True,
- def_tracing: bool = True):
+ def _complete_args(
+ self,
+ urls,
+ timeout=None,
+ options=None,
+ insecure=False,
+ force_resolve=True,
+ alpn_proto: Optional[str] = None,
+ with_headers: bool = True,
+ def_tracing: bool = True,
+ ):
if not isinstance(urls, list):
urls = [urls]
args = [self._curl, "-s", "--path-as-is"]
- if 'CURL_TEST_EVENT' in os.environ:
- args.append('--test-event')
+ if "CURL_TEST_EVENT" in os.environ:
+ args.append("--test-event")
if with_headers:
args.extend(["-D", self._headerfile])
if def_tracing is not False and not self._silent:
- args.extend(['-v', '--trace-ids', '--trace-time'])
+ args.extend(["-v", "--trace-ids", "--trace-time"])
if self.env.verbose > 1:
- args.extend(['--trace-config', 'http/2,http/3,h2-proxy,h1-proxy'])
+ args.extend(["--trace-config", "http/2,http/3,h2-proxy,h1-proxy"])
active_options = options
- if options is not None and '--next' in options:
- active_options = options[options.index('--next') + 1:]
+ if options is not None and "--next" in options:
+ active_options = options[options.index("--next") + 1 :]
for url in urls:
u = urlparse(urls[0])
raise Exception(f'unknown ALPN protocol: "{alpn_proto}"')
args.append(self.ALPN_ARG[alpn_proto])
- if u.scheme == 'http':
+ if u.scheme == "http":
pass
elif insecure:
- args.append('--insecure')
+ args.append("--insecure")
elif active_options and "--cacert" in active_options:
pass
elif u.hostname:
args.extend(["--cacert", self.env.ca.cert_file])
- if force_resolve and u.hostname and u.hostname != 'localhost' \
- and not re.match(r'^(\d+|\[|:).*', u.hostname):
+ if (
+ force_resolve
+ and u.hostname
+ and u.hostname != "localhost"
+ and not re.match(r"^(\d+|\[|:).*", u.hostname)
+ ):
port = u.port if u.port else 443
- args.extend([
- '--resolve', f'{u.hostname}:{port}:{self._server_addr}',
- ])
+ args.extend(
+ [
+ "--resolve",
+ f"{u.hostname}:{port}:{self._server_addr}",
+ ]
+ )
if timeout is not None and int(timeout) > 0:
args.extend(["--connect-timeout", str(int(timeout))])
args.append(url)
return args
- def _parse_headerfile(self, headerfile: str, r: Optional[ExecResult] = None) -> ExecResult:
+ def _parse_headerfile(
+ self, headerfile: str, r: Optional[ExecResult] = None
+ ) -> ExecResult:
lines = open(headerfile).readlines()
if r is None:
r = ExecResult(args=[], exit_code=0, stdout=[], stderr=[])
if resp:
r.add_response(resp)
- expected = ['status']
+ expected = ["status"]
for line in lines:
line = line.strip()
- if re.match(r'^$', line):
- if 'trailer' in expected:
+ if re.match(r"^$", line):
+ if "trailer" in expected:
# end of trailers
fin_response(response)
response = None
- expected = ['status']
- elif 'header' in expected:
+ expected = ["status"]
+ elif "header" in expected:
# end of header, another status or trailers might follow
- expected = ['status', 'trailer']
+ expected = ["status", "trailer"]
else:
assert False, f"unexpected line: '{line}'"
continue
- if 'status' in expected:
+ if "status" in expected:
# log.debug("reading 1st response line: %s", line)
- m = re.match(r'^(\S+) (\d+)( .*)?$', line)
+ m = re.match(r"^(\S+) (\d+)( .*)?$", line)
if m:
fin_response(response)
response = {
"description": m.group(3),
"header": {},
"trailer": {},
- "body": r.outraw
+ "body": r.outraw,
}
- expected = ['header']
+ expected = ["header"]
continue
- if 'trailer' in expected:
- m = re.match(r'^([^:]+):\s*(.*)$', line)
+ if "trailer" in expected:
+ m = re.match(r"^([^:]+):\s*(.*)$", line)
if m:
- response['trailer'][m.group(1).lower()] = m.group(2)
+ response["trailer"][m.group(1).lower()] = m.group(2)
continue
- if 'header' in expected:
- m = re.match(r'^([^:]+):\s*(.*)$', line)
+ if "header" in expected:
+ m = re.match(r"^([^:]+):\s*(.*)$", line)
if m:
- response['header'][m.group(1).lower()] = m.group(2)
+ response["header"][m.group(1).lower()] = m.group(2)
continue
assert False, f"unexpected line: '{line}, expected: {expected}'"
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
TESTS_HTTPD_PATH = os.path.dirname(os.path.dirname(__file__))
TOP_PATH = os.path.join(os.getcwd(), os.path.pardir)
-DEF_CONFIG = init_config_from(os.path.join(TOP_PATH, 'tests', 'http', 'config.ini'))
-CURL = os.path.join(TOP_PATH, 'src', 'curl')
+DEF_CONFIG = init_config_from(os.path.join(TOP_PATH, "tests", "http", "config.ini"))
+CURL = os.path.join(TOP_PATH, "src", "curl")
class EnvConfig:
-
def __init__(self):
self.tests_dir = TESTS_HTTPD_PATH
- self.gen_dir = os.path.join(self.tests_dir, 'gen')
+ self.gen_dir = os.path.join(self.tests_dir, "gen")
self.project_dir = os.path.dirname(os.path.dirname(self.tests_dir))
self.build_dir = TOP_PATH
self.config = DEF_CONFIG
# check cur and its features
self.curl = CURL
- if 'CURL' in os.environ:
- self.curl = os.environ['CURL']
+ if "CURL" in os.environ:
+ self.curl = os.environ["CURL"]
self.curl_props = {
- 'version_string': '',
- 'version': '',
- 'os': '',
- 'fullname': '',
- 'features_string': '',
- 'features': set(),
- 'protocols_string': '',
- 'protocols': set(),
- 'libs': set(),
- 'lib_versions': set(),
+ "version_string": "",
+ "version": "",
+ "os": "",
+ "fullname": "",
+ "features_string": "",
+ "features": set(),
+ "protocols_string": "",
+ "protocols": set(),
+ "libs": set(),
+ "lib_versions": set(),
}
self.curl_is_debug = False
self.curl_protos = []
- p = subprocess.run(args=[self.curl, '-V'],
- capture_output=True, text=True)
+ p = subprocess.run(args=[self.curl, "-V"], capture_output=True, text=True)
if p.returncode != 0:
- raise RuntimeError(f'{self.curl} -V failed with exit code: {p.returncode}')
- if p.stderr.startswith('WARNING:'):
+ raise RuntimeError(f"{self.curl} -V failed with exit code: {p.returncode}")
+ if p.stderr.startswith("WARNING:"):
self.curl_is_debug = True
for line in p.stdout.splitlines(keepends=False):
- if line.startswith('curl '):
- self.curl_props['version_string'] = line
- m = re.match(r'^curl (?P<version>\S+) (?P<os>\S+) (?P<libs>.*)$', line)
+ if line.startswith("curl "):
+ self.curl_props["version_string"] = line
+ m = re.match(r"^curl (?P<version>\S+) (?P<os>\S+) (?P<libs>.*)$", line)
if m:
- self.curl_props['fullname'] = m.group(0)
- self.curl_props['version'] = m.group('version')
- self.curl_props['os'] = m.group('os')
- self.curl_props['lib_versions'] = {
- lib.lower() for lib in m.group('libs').split(' ')
+ self.curl_props["fullname"] = m.group(0)
+ self.curl_props["version"] = m.group("version")
+ self.curl_props["os"] = m.group("os")
+ self.curl_props["lib_versions"] = {
+ lib.lower() for lib in m.group("libs").split(" ")
}
- self.curl_props['libs'] = {
- re.sub(r'/[a-z0-9.-]*', '', lib) for lib in self.curl_props['lib_versions']
+ self.curl_props["libs"] = {
+ re.sub(r"/[a-z0-9.-]*", "", lib)
+ for lib in self.curl_props["lib_versions"]
}
- if line.startswith('Features: '):
- self.curl_props['features_string'] = line[10:]
- self.curl_props['features'] = {
- feat.lower() for feat in line[10:].split(' ')
+ if line.startswith("Features: "):
+ self.curl_props["features_string"] = line[10:]
+ self.curl_props["features"] = {
+ feat.lower() for feat in line[10:].split(" ")
}
- if line.startswith('Protocols: '):
- self.curl_props['protocols_string'] = line[11:]
- self.curl_props['protocols'] = {
- prot.lower() for prot in line[11:].split(' ')
+ if line.startswith("Protocols: "):
+ self.curl_props["protocols_string"] = line[11:]
+ self.curl_props["protocols"] = {
+ prot.lower() for prot in line[11:].split(" ")
}
- self.ports = alloc_ports(port_specs={
- 'ftp': socket.SOCK_STREAM,
- 'ftps': socket.SOCK_STREAM,
- 'http': socket.SOCK_STREAM,
- 'https': socket.SOCK_STREAM,
- 'nghttpx_https': socket.SOCK_STREAM,
- 'proxy': socket.SOCK_STREAM,
- 'proxys': socket.SOCK_STREAM,
- 'h2proxys': socket.SOCK_STREAM,
- 'caddy': socket.SOCK_STREAM,
- 'caddys': socket.SOCK_STREAM,
- 'ws': socket.SOCK_STREAM,
- })
- self.httpd = self.config['httpd']['httpd']
- self.apxs = self.config['httpd']['apxs']
+ self.ports = alloc_ports(
+ port_specs={
+ "ftp": socket.SOCK_STREAM,
+ "ftps": socket.SOCK_STREAM,
+ "http": socket.SOCK_STREAM,
+ "https": socket.SOCK_STREAM,
+ "nghttpx_https": socket.SOCK_STREAM,
+ "proxy": socket.SOCK_STREAM,
+ "proxys": socket.SOCK_STREAM,
+ "h2proxys": socket.SOCK_STREAM,
+ "caddy": socket.SOCK_STREAM,
+ "caddys": socket.SOCK_STREAM,
+ "ws": socket.SOCK_STREAM,
+ }
+ )
+ self.httpd = self.config["httpd"]["httpd"]
+ self.apxs = self.config["httpd"]["apxs"]
if len(self.apxs) == 0:
self.apxs = None
self._httpd_version = None
self.examples_pem = {
- 'key': 'xxx',
- 'cert': 'xxx',
+ "key": "xxx",
+ "cert": "xxx",
}
- self.htdocs_dir = os.path.join(self.gen_dir, 'htdocs')
- self.tld = 'http.curl.se'
+ self.htdocs_dir = os.path.join(self.gen_dir, "htdocs")
+ self.tld = "http.curl.se"
self.domain1 = f"one.{self.tld}"
self.domain1brotli = f"brotli.one.{self.tld}"
self.domain2 = f"two.{self.tld}"
self.proxy_domain = f"proxy.{self.tld}"
self.expired_domain = f"expired.{self.tld}"
self.cert_specs = [
- CertificateSpec(domains=[self.domain1, self.domain1brotli, 'localhost', '127.0.0.1'], key_type='rsa2048'),
- CertificateSpec(domains=[self.domain2], key_type='rsa2048'),
- CertificateSpec(domains=[self.ftp_domain], key_type='rsa2048'),
- CertificateSpec(domains=[self.proxy_domain, '127.0.0.1'], key_type='rsa2048'),
- CertificateSpec(domains=[self.expired_domain], key_type='rsa2048',
- valid_from=timedelta(days=-100), valid_to=timedelta(days=-10)),
- CertificateSpec(name="clientsX", sub_specs=[
- CertificateSpec(name="user1", client=True),
- ]),
+ CertificateSpec(
+ domains=[self.domain1, self.domain1brotli, "localhost", "127.0.0.1"],
+ key_type="rsa2048",
+ ),
+ CertificateSpec(domains=[self.domain2], key_type="rsa2048"),
+ CertificateSpec(domains=[self.ftp_domain], key_type="rsa2048"),
+ CertificateSpec(
+ domains=[self.proxy_domain, "127.0.0.1"], key_type="rsa2048"
+ ),
+ CertificateSpec(
+ domains=[self.expired_domain],
+ key_type="rsa2048",
+ valid_from=timedelta(days=-100),
+ valid_to=timedelta(days=-10),
+ ),
+ CertificateSpec(
+ name="clientsX",
+ sub_specs=[
+ CertificateSpec(name="user1", client=True),
+ ],
+ ),
]
- self.nghttpx = self.config['nghttpx']['nghttpx']
+ self.nghttpx = self.config["nghttpx"]["nghttpx"]
if len(self.nghttpx.strip()) == 0:
self.nghttpx = None
self._nghttpx_version = None
self.nghttpx_with_h3 = False
if self.nghttpx is not None:
- p = subprocess.run(args=[self.nghttpx, '-v'],
- capture_output=True, text=True)
+ p = subprocess.run(
+ args=[self.nghttpx, "-v"], capture_output=True, text=True
+ )
if p.returncode != 0:
# not a working nghttpx
self.nghttpx = None
else:
- self._nghttpx_version = re.sub(r'^nghttpx\s*', '', p.stdout.strip())
- self.nghttpx_with_h3 = re.match(r'.* nghttp3/.*', p.stdout.strip()) is not None
- log.debug(f'nghttpx -v: {p.stdout}')
+ self._nghttpx_version = re.sub(r"^nghttpx\s*", "", p.stdout.strip())
+ self.nghttpx_with_h3 = (
+ re.match(r".* nghttp3/.*", p.stdout.strip()) is not None
+ )
+ log.debug(f"nghttpx -v: {p.stdout}")
- self.caddy = self.config['caddy']['caddy']
+ self.caddy = self.config["caddy"]["caddy"]
self._caddy_version = None
if len(self.caddy.strip()) == 0:
self.caddy = None
if self.caddy is not None:
try:
- p = subprocess.run(args=[self.caddy, 'version'],
- capture_output=True, text=True)
+ p = subprocess.run(
+ args=[self.caddy, "version"], capture_output=True, text=True
+ )
if p.returncode != 0:
# not a working caddy
self.caddy = None
- m = re.match(r'v?(\d+\.\d+\.\d+).*', p.stdout)
+ m = re.match(r"v?(\d+\.\d+\.\d+).*", p.stdout)
if m:
self._caddy_version = m.group(1)
else:
- raise RuntimeError(f'Unable to determine cadd version from: {p.stdout}')
+ raise RuntimeError(
+ f"Unable to determine cadd version from: {p.stdout}"
+ )
# TODO: specify specific exceptions here
except: # noqa: E722
self.caddy = None
- self.vsftpd = self.config['vsftpd']['vsftpd']
+ self.vsftpd = self.config["vsftpd"]["vsftpd"]
self._vsftpd_version = None
if self.vsftpd is not None:
try:
- with tempfile.TemporaryFile('w+') as tmp:
- p = subprocess.run(args=[self.vsftpd, '-v'],
- capture_output=True, text=True, stdin=tmp)
+ with tempfile.TemporaryFile("w+") as tmp:
+ p = subprocess.run(
+ args=[self.vsftpd, "-v"],
+ capture_output=True,
+ text=True,
+ stdin=tmp,
+ )
if p.returncode != 0:
# not a working vsftpd
self.vsftpd = None
# any data there instead.
tmp.seek(0)
ver_text = tmp.read()
- m = re.match(r'vsftpd: version (\d+\.\d+\.\d+)', ver_text)
+ m = re.match(r"vsftpd: version (\d+\.\d+\.\d+)", ver_text)
if m:
self._vsftpd_version = m.group(1)
elif len(p.stderr) == 0:
# vsftp does not use stdout or stderr for printing its version... -.-
- self._vsftpd_version = 'unknown'
+ self._vsftpd_version = "unknown"
else:
- raise Exception(f'Unable to determine VsFTPD version from: {p.stderr}')
+ raise Exception(
+ f"Unable to determine VsFTPD version from: {p.stderr}"
+ )
except Exception:
self.vsftpd = None
- self._tcpdump = shutil.which('tcpdump')
+ self._tcpdump = shutil.which("tcpdump")
@property
def httpd_version(self):
if self._httpd_version is None and self.apxs is not None:
try:
- p = subprocess.run(args=[self.apxs, '-q', 'HTTPD_VERSION'],
- capture_output=True, text=True)
+ p = subprocess.run(
+ args=[self.apxs, "-q", "HTTPD_VERSION"],
+ capture_output=True,
+ text=True,
+ )
if p.returncode != 0:
- log.error(f'{self.apxs} failed to query HTTPD_VERSION: {p}')
+ log.error(f"{self.apxs} failed to query HTTPD_VERSION: {p}")
else:
self._httpd_version = p.stdout.strip()
except Exception:
- log.exception(f'{self.apxs} failed to run')
+ log.exception(f"{self.apxs} failed to run")
return self._httpd_version
def versiontuple(self, v):
- v = re.sub(r'(\d+\.\d+(\.\d+)?)(-\S+)?', r'\1', v)
- return tuple(map(int, v.split('.')))
+ v = re.sub(r"(\d+\.\d+(\.\d+)?)(-\S+)?", r"\1", v)
+ return tuple(map(int, v.split(".")))
def httpd_is_at_least(self, minv):
if self.httpd_version is None:
return hv >= self.versiontuple(minv)
def is_complete(self) -> bool:
- return os.path.isfile(self.httpd) and \
- self.apxs is not None and \
- os.path.isfile(self.apxs)
+ return (
+ os.path.isfile(self.httpd)
+ and self.apxs is not None
+ and os.path.isfile(self.apxs)
+ )
def get_incomplete_reason(self) -> Optional[str]:
if self.httpd is None or len(self.httpd.strip()) == 0:
- return 'httpd not configured, see `--with-test-httpd=<path>`'
+ return "httpd not configured, see `--with-test-httpd=<path>`"
if not os.path.isfile(self.httpd):
- return f'httpd ({self.httpd}) not found'
+ return f"httpd ({self.httpd}) not found"
if self.apxs is None:
return "command apxs not found (commonly provided in apache2-dev)"
if not os.path.isfile(self.apxs):
class Env:
-
CONFIG = EnvConfig()
@staticmethod
@staticmethod
def have_ssl_curl() -> bool:
- return Env.curl_has_feature('ssl') or Env.curl_has_feature('multissl')
+ return Env.curl_has_feature("ssl") or Env.curl_has_feature("multissl")
@staticmethod
def have_h2_curl() -> bool:
- return 'http2' in Env.CONFIG.curl_props['features']
+ return "http2" in Env.CONFIG.curl_props["features"]
@staticmethod
def have_h3_curl() -> bool:
- return 'http3' in Env.CONFIG.curl_props['features']
+ return "http3" in Env.CONFIG.curl_props["features"]
@staticmethod
def curl_uses_lib(libname: str) -> bool:
- return libname.lower() in Env.CONFIG.curl_props['libs']
+ return libname.lower() in Env.CONFIG.curl_props["libs"]
@staticmethod
def curl_uses_ossl_quic() -> bool:
if Env.have_h3_curl():
- return not Env.curl_uses_lib('ngtcp2') and Env.curl_uses_lib('nghttp3')
+ return not Env.curl_uses_lib("ngtcp2") and Env.curl_uses_lib("nghttp3")
return False
@staticmethod
def curl_version_string() -> str:
- return Env.CONFIG.curl_props['version_string']
+ return Env.CONFIG.curl_props["version_string"]
@staticmethod
def curl_features_string() -> str:
- return Env.CONFIG.curl_props['features_string']
+ return Env.CONFIG.curl_props["features_string"]
@staticmethod
def curl_has_feature(feature: str) -> bool:
- return feature.lower() in Env.CONFIG.curl_props['features']
+ return feature.lower() in Env.CONFIG.curl_props["features"]
@staticmethod
def curl_protocols_string() -> str:
- return Env.CONFIG.curl_props['protocols_string']
+ return Env.CONFIG.curl_props["protocols_string"]
@staticmethod
def curl_has_protocol(protocol: str) -> bool:
- return protocol.lower() in Env.CONFIG.curl_props['protocols']
+ return protocol.lower() in Env.CONFIG.curl_props["protocols"]
@staticmethod
def curl_lib_version(libname: str) -> str:
- prefix = f'{libname.lower()}/'
- for lversion in Env.CONFIG.curl_props['lib_versions']:
+ prefix = f"{libname.lower()}/"
+ for lversion in Env.CONFIG.curl_props["lib_versions"]:
if lversion.startswith(prefix):
- return lversion[len(prefix):]
- return 'unknown'
+ return lversion[len(prefix) :]
+ return "unknown"
@staticmethod
def curl_lib_version_at_least(libname: str, min_version) -> bool:
lversion = Env.curl_lib_version(libname)
- if lversion != 'unknown':
- return Env.CONFIG.versiontuple(min_version) <= \
- Env.CONFIG.versiontuple(lversion)
+ if lversion != "unknown":
+ return Env.CONFIG.versiontuple(min_version) <= Env.CONFIG.versiontuple(
+ lversion
+ )
return False
@staticmethod
def curl_os() -> str:
- return Env.CONFIG.curl_props['os']
+ return Env.CONFIG.curl_props["os"]
@staticmethod
def curl_fullname() -> str:
- return Env.CONFIG.curl_props['fullname']
+ return Env.CONFIG.curl_props["fullname"]
@staticmethod
def curl_version() -> str:
- return Env.CONFIG.curl_props['version']
+ return Env.CONFIG.curl_props["version"]
@staticmethod
def curl_is_debug() -> bool:
return Env.CONFIG.tcpdmp
def __init__(self, pytestconfig=None):
- self._verbose = pytestconfig.option.verbose \
- if pytestconfig is not None else 0
+ self._verbose = pytestconfig.option.verbose if pytestconfig is not None else 0
self._ca = None
self._test_timeout = 300.0 if self._verbose > 1 else 60.0 # seconds
def issue_certs(self):
if self._ca is None:
- ca_dir = os.path.join(self.CONFIG.gen_dir, 'ca')
- self._ca = TestCA.create_root(name=self.CONFIG.tld,
- store_dir=ca_dir,
- key_type="rsa2048")
+ ca_dir = os.path.join(self.CONFIG.gen_dir, "ca")
+ self._ca = TestCA.create_root(
+ name=self.CONFIG.tld, store_dir=ca_dir, key_type="rsa2048"
+ )
self._ca.issue_certs(self.CONFIG.cert_specs)
def setup(self):
@property
def http_port(self) -> int:
- return self.CONFIG.ports['http']
+ return self.CONFIG.ports["http"]
@property
def https_port(self) -> int:
- return self.CONFIG.ports['https']
+ return self.CONFIG.ports["https"]
@property
def nghttpx_https_port(self) -> int:
- return self.CONFIG.ports['nghttpx_https']
+ return self.CONFIG.ports["nghttpx_https"]
@property
def h3_port(self) -> int:
@property
def proxy_port(self) -> int:
- return self.CONFIG.ports['proxy']
+ return self.CONFIG.ports["proxy"]
@property
def proxys_port(self) -> int:
- return self.CONFIG.ports['proxys']
+ return self.CONFIG.ports["proxys"]
@property
def ftp_port(self) -> int:
- return self.CONFIG.ports['ftp']
+ return self.CONFIG.ports["ftp"]
@property
def ftps_port(self) -> int:
- return self.CONFIG.ports['ftps']
+ return self.CONFIG.ports["ftps"]
@property
def h2proxys_port(self) -> int:
- return self.CONFIG.ports['h2proxys']
+ return self.CONFIG.ports["h2proxys"]
- def pts_port(self, proto: str = 'http/1.1') -> int:
+ def pts_port(self, proto: str = "http/1.1") -> int:
# proxy tunnel port
- return self.CONFIG.ports['h2proxys' if proto == 'h2' else 'proxys']
+ return self.CONFIG.ports["h2proxys" if proto == "h2" else "proxys"]
@property
def caddy(self) -> str:
@property
def caddy_https_port(self) -> int:
- return self.CONFIG.ports['caddys']
+ return self.CONFIG.ports["caddys"]
@property
def caddy_http_port(self) -> int:
- return self.CONFIG.ports['caddy']
+ return self.CONFIG.ports["caddy"]
@property
def vsftpd(self) -> str:
@property
def ws_port(self) -> int:
- return self.CONFIG.ports['ws']
+ return self.CONFIG.ports["ws"]
@property
def curl(self) -> str:
@property
def slow_network(self) -> bool:
- return "CURL_DBG_SOCK_WBLOCK" in os.environ or \
- "CURL_DBG_SOCK_WPARTIAL" in os.environ
+ return (
+ "CURL_DBG_SOCK_WBLOCK" in os.environ
+ or "CURL_DBG_SOCK_WPARTIAL" in os.environ
+ )
@property
def ci_run(self) -> bool:
return "CURL_CI" in os.environ
def port_for(self, alpn_proto: Optional[str] = None):
- if alpn_proto is None or \
- alpn_proto in ['h2', 'http/1.1', 'http/1.0', 'http/0.9']:
+ if alpn_proto is None or alpn_proto in [
+ "h2",
+ "http/1.1",
+ "http/1.0",
+ "http/0.9",
+ ]:
return self.https_port
- if alpn_proto in ['h3']:
+ if alpn_proto in ["h3"]:
return self.h3_port
return self.http_port
def authority_for(self, domain: str, alpn_proto: Optional[str] = None):
- return f'{domain}:{self.port_for(alpn_proto=alpn_proto)}'
+ return f"{domain}:{self.port_for(alpn_proto=alpn_proto)}"
- def make_data_file(self, indir: str, fname: str, fsize: int,
- line_length: int = 1024) -> str:
+ def make_data_file(
+ self, indir: str, fname: str, fsize: int, line_length: int = 1024
+ ) -> str:
if line_length < 11:
- raise RuntimeError('line_length less than 11 not supported')
+ raise RuntimeError("line_length less than 11 not supported")
fpath = os.path.join(indir, fname)
s10 = "0123456789"
s = round((line_length / 10) + 1) * s10
- s = s[0:line_length-11]
- with open(fpath, 'w') as fd:
+ s = s[0 : line_length - 11]
+ with open(fpath, "w") as fd:
for i in range(int(fsize / line_length)):
fd.write(f"{i:09d}-{s}\n")
remain = int(fsize % line_length)
if remain != 0:
i = int(fsize / line_length) + 1
- fd.write(f"{i:09d}-{s}"[0:remain-1] + "\n")
+ fd.write(f"{i:09d}-{s}"[0 : remain - 1] + "\n")
return fpath
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class Httpd:
-
MODULES = [
- 'log_config', 'logio', 'unixd', 'version', 'watchdog',
- 'authn_core', 'authn_file',
- 'authz_user', 'authz_core', 'authz_host',
- 'auth_basic', 'auth_digest',
- 'alias', 'env', 'filter', 'headers', 'mime', 'setenvif',
- 'socache_shmcb',
- 'rewrite', 'http2', 'ssl', 'proxy', 'proxy_http', 'proxy_connect',
- 'brotli',
- 'mpm_event',
+ "log_config",
+ "logio",
+ "unixd",
+ "version",
+ "watchdog",
+ "authn_core",
+ "authn_file",
+ "authz_user",
+ "authz_core",
+ "authz_host",
+ "auth_basic",
+ "auth_digest",
+ "alias",
+ "env",
+ "filter",
+ "headers",
+ "mime",
+ "setenvif",
+ "socache_shmcb",
+ "rewrite",
+ "http2",
+ "ssl",
+ "proxy",
+ "proxy_http",
+ "proxy_connect",
+ "brotli",
+ "mpm_event",
]
COMMON_MODULES_DIRS = [
- '/usr/lib/apache2/modules', # debian
- '/usr/libexec/apache2/', # macos
+ "/usr/lib/apache2/modules", # debian
+ "/usr/libexec/apache2/", # macos
]
MOD_CURLTEST = None
def __init__(self, env: Env, proxy_auth: bool = False):
self.env = env
- self._apache_dir = os.path.join(env.gen_dir, 'apache')
- self._run_dir = os.path.join(self._apache_dir, 'run')
- self._lock_dir = os.path.join(self._apache_dir, 'locks')
- self._docs_dir = os.path.join(self._apache_dir, 'docs')
- self._conf_dir = os.path.join(self._apache_dir, 'conf')
- self._conf_file = os.path.join(self._conf_dir, 'test.conf')
- self._logs_dir = os.path.join(self._apache_dir, 'logs')
- self._error_log = os.path.join(self._logs_dir, 'error_log')
- self._tmp_dir = os.path.join(self._apache_dir, 'tmp')
- self._basic_passwords = os.path.join(self._conf_dir, 'basic.passwords')
- self._digest_passwords = os.path.join(self._conf_dir, 'digest.passwords')
+ self._apache_dir = os.path.join(env.gen_dir, "apache")
+ self._run_dir = os.path.join(self._apache_dir, "run")
+ self._lock_dir = os.path.join(self._apache_dir, "locks")
+ self._docs_dir = os.path.join(self._apache_dir, "docs")
+ self._conf_dir = os.path.join(self._apache_dir, "conf")
+ self._conf_file = os.path.join(self._conf_dir, "test.conf")
+ self._logs_dir = os.path.join(self._apache_dir, "logs")
+ self._error_log = os.path.join(self._logs_dir, "error_log")
+ self._tmp_dir = os.path.join(self._apache_dir, "tmp")
+ self._basic_passwords = os.path.join(self._conf_dir, "basic.passwords")
+ self._digest_passwords = os.path.join(self._conf_dir, "digest.passwords")
self._mods_dir = None
self._auth_digest = True
self._proxy_auth_basic = proxy_auth
self._extra_configs = {}
self._loaded_extra_configs = None
assert env.apxs
- p = subprocess.run(args=[env.apxs, '-q', 'libexecdir'],
- capture_output=True, text=True)
+ p = subprocess.run(
+ args=[env.apxs, "-q", "libexecdir"], capture_output=True, text=True
+ )
if p.returncode != 0:
- raise Exception(f'{env.apxs} failed to query libexecdir: {p}')
+ raise Exception(f"{env.apxs} failed to query libexecdir: {p}")
self._mods_dir = p.stdout.strip()
if self._mods_dir is None:
- raise Exception('apache modules dir cannot be found')
+ raise Exception("apache modules dir cannot be found")
if not os.path.exists(self._mods_dir):
- raise Exception(f'apache modules dir does not exist: {self._mods_dir}')
+ raise Exception(f"apache modules dir does not exist: {self._mods_dir}")
self._process = None
self._rmf(self._error_log)
self._init_curltest()
def set_proxy_auth(self, active: bool):
self._proxy_auth_basic = active
- def _run(self, args, intext=''):
+ def _run(self, args, intext=""):
env = os.environ.copy()
- env['APACHE_RUN_DIR'] = self._run_dir
- env['APACHE_RUN_USER'] = os.environ['USER']
- env['APACHE_LOCK_DIR'] = self._lock_dir
- env['APACHE_CONFDIR'] = self._apache_dir
- p = subprocess.run(args, stderr=subprocess.PIPE, stdout=subprocess.PIPE,
- cwd=self.env.gen_dir,
- input=intext.encode() if intext else None,
- env=env)
+ env["APACHE_RUN_DIR"] = self._run_dir
+ env["APACHE_RUN_USER"] = os.environ["USER"]
+ env["APACHE_LOCK_DIR"] = self._lock_dir
+ env["APACHE_CONFDIR"] = self._apache_dir
+ p = subprocess.run(
+ args,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ cwd=self.env.gen_dir,
+ input=intext.encode() if intext else None,
+ env=env,
+ )
start = datetime.now()
- return ExecResult(args=args, exit_code=p.returncode,
- stdout=p.stdout.decode().splitlines(),
- stderr=p.stderr.decode().splitlines(),
- duration=datetime.now() - start)
+ return ExecResult(
+ args=args,
+ exit_code=p.returncode,
+ stdout=p.stdout.decode().splitlines(),
+ stderr=p.stderr.decode().splitlines(),
+ duration=datetime.now() - start,
+ )
def _cmd_httpd(self, cmd: str):
- args = [self.env.httpd,
- "-d", self._apache_dir,
- "-f", self._conf_file,
- "-k", cmd]
+ args = [
+ self.env.httpd,
+ "-d",
+ self._apache_dir,
+ "-f",
+ self._conf_file,
+ "-k",
+ cmd,
+ ]
return self._run(args=args)
def start(self):
if self._process:
self.stop()
self._write_config()
- with open(self._error_log, 'a') as fd:
- fd.write('start of server\n')
- with open(os.path.join(self._apache_dir, 'xxx'), 'a') as fd:
- fd.write('start of server\n')
- r = self._cmd_httpd('start')
+ with open(self._error_log, "a") as fd:
+ fd.write("start of server\n")
+ with open(os.path.join(self._apache_dir, "xxx"), "a") as fd:
+ fd.write("start of server\n")
+ r = self._cmd_httpd("start")
if r.exit_code != 0:
- log.error(f'failed to start httpd: {r}')
+ log.error(f"failed to start httpd: {r}")
return False
self._loaded_extra_configs = copy.deepcopy(self._extra_configs)
return self.wait_live(timeout=timedelta(seconds=5))
def stop(self):
- r = self._cmd_httpd('stop')
+ r = self._cmd_httpd("stop")
self._loaded_extra_configs = None
if r.exit_code == 0:
return self.wait_dead(timeout=timedelta(seconds=5))
- log.fatal(f'stopping httpd failed: {r}')
+ log.fatal(f"stopping httpd failed: {r}")
return r.exit_code == 0
def restart(self):
r = self._cmd_httpd("graceful")
self._loaded_extra_configs = None
if r.exit_code != 0:
- log.error(f'failed to reload httpd: {r}')
+ log.error(f"failed to reload httpd: {r}")
self._loaded_extra_configs = copy.deepcopy(self._extra_configs)
return self.wait_live(timeout=timedelta(seconds=5))
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
- r = curl.http_get(url=f'http://{self.env.domain1}:{self.env.http_port}/')
+ r = curl.http_get(url=f"http://{self.env.domain1}:{self.env.http_port}/")
if r.exit_code != 0:
return True
- time.sleep(.1)
+ time.sleep(0.1)
log.debug(f"Server still responding after {timeout}")
return False
def wait_live(self, timeout: timedelta):
- curl = CurlClient(env=self.env, run_dir=self._tmp_dir,
- timeout=timeout.total_seconds())
+ curl = CurlClient(
+ env=self.env, run_dir=self._tmp_dir, timeout=timeout.total_seconds()
+ )
try_until = datetime.now() + timeout
while datetime.now() < try_until:
- r = curl.http_get(url=f'http://{self.env.domain1}:{self.env.http_port}/')
+ r = curl.http_get(url=f"http://{self.env.domain1}:{self.env.http_port}/")
if r.exit_code == 0:
return True
- time.sleep(.1)
+ time.sleep(0.1)
log.debug(f"Server still not responding after {timeout}")
return False
self._mkpath(self._conf_dir)
self._mkpath(self._logs_dir)
self._mkpath(self._tmp_dir)
- self._mkpath(os.path.join(self._docs_dir, 'two'))
- with open(os.path.join(self._docs_dir, 'data.json'), 'w') as fd:
+ self._mkpath(os.path.join(self._docs_dir, "two"))
+ with open(os.path.join(self._docs_dir, "data.json"), "w") as fd:
data = {
- 'server': f'{domain1}',
+ "server": f"{domain1}",
}
fd.write(JSONEncoder().encode(data))
- with open(os.path.join(self._docs_dir, 'two/data.json'), 'w') as fd:
+ with open(os.path.join(self._docs_dir, "two/data.json"), "w") as fd:
data = {
- 'server': f'{domain2}',
+ "server": f"{domain2}",
}
fd.write(JSONEncoder().encode(data))
if self._proxy_auth_basic:
- with open(self._basic_passwords, 'w') as fd:
- fd.write('proxy:$apr1$FQfeInbs$WQZbODJlVg60j0ogEIlTW/\n')
+ with open(self._basic_passwords, "w") as fd:
+ fd.write("proxy:$apr1$FQfeInbs$WQZbODJlVg60j0ogEIlTW/\n")
if self._auth_digest:
- with open(self._digest_passwords, 'w') as fd:
- fd.write('test:restricted area:57123e269fd73d71ae0656594e938e2f\n')
- self._mkpath(os.path.join(self.docs_dir, 'restricted/digest'))
- with open(os.path.join(self.docs_dir, 'restricted/digest/data.json'), 'w') as fd:
+ with open(self._digest_passwords, "w") as fd:
+ fd.write("test:restricted area:57123e269fd73d71ae0656594e938e2f\n")
+ self._mkpath(os.path.join(self.docs_dir, "restricted/digest"))
+ with open(
+ os.path.join(self.docs_dir, "restricted/digest/data.json"), "w"
+ ) as fd:
fd.write('{"area":"digest"}\n')
- with open(self._conf_file, 'w') as fd:
+ with open(self._conf_file, "w") as fd:
for m in self.MODULES:
- if os.path.exists(os.path.join(self._mods_dir, f'mod_{m}.so')):
+ if os.path.exists(os.path.join(self._mods_dir, f"mod_{m}.so")):
fd.write(f'LoadModule {m}_module "{self._mods_dir}/mod_{m}.so"\n')
if Httpd.MOD_CURLTEST is not None:
fd.write(f'LoadModule curltest_module "{Httpd.MOD_CURLTEST}"\n')
- conf = [ # base server config
+ conf = [ # base server config
f'ServerRoot "{self._apache_dir}"',
- 'DefaultRuntimeDir logs',
- 'PidFile httpd.pid',
- f'ErrorLog {self._error_log}',
- f'LogLevel {self._get_log_level()}',
- 'StartServers 4',
- 'ReadBufferSize 16000',
- 'H2MinWorkers 16',
- 'H2MaxWorkers 256',
- f'Listen {self.env.http_port}',
- f'Listen {self.env.https_port}',
- f'Listen {self.env.proxy_port}',
- f'Listen {self.env.proxys_port}',
+ "DefaultRuntimeDir logs",
+ "PidFile httpd.pid",
+ f"ErrorLog {self._error_log}",
+ f"LogLevel {self._get_log_level()}",
+ "StartServers 4",
+ "ReadBufferSize 16000",
+ "H2MinWorkers 16",
+ "H2MaxWorkers 256",
+ f"Listen {self.env.http_port}",
+ f"Listen {self.env.https_port}",
+ f"Listen {self.env.proxy_port}",
+ f"Listen {self.env.proxys_port}",
f'TypesConfig "{self._conf_dir}/mime.types',
'SSLSessionCache "shmcb:ssl_gcache_data(32000)"',
]
- if 'base' in self._extra_configs:
- conf.extend(self._extra_configs['base'])
- conf.extend([ # plain http host for domain1
- f'<VirtualHost *:{self.env.http_port}>',
- f' ServerName {domain1}',
- ' ServerAlias localhost',
- f' DocumentRoot "{self._docs_dir}"',
- ' Protocols h2c http/1.1',
- ' H2Direct on',
- ])
+ if "base" in self._extra_configs:
+ conf.extend(self._extra_configs["base"])
+ conf.extend(
+ [ # plain http host for domain1
+ f"<VirtualHost *:{self.env.http_port}>",
+ f" ServerName {domain1}",
+ " ServerAlias localhost",
+ f' DocumentRoot "{self._docs_dir}"',
+ " Protocols h2c http/1.1",
+ " H2Direct on",
+ ]
+ )
conf.extend(self._curltest_conf(domain1))
- conf.extend([
- '</VirtualHost>',
- '',
- ])
- conf.extend([ # https host for domain1, h1 + h2
- f'<VirtualHost *:{self.env.https_port}>',
- f' ServerName {domain1}',
- ' ServerAlias localhost',
- ' Protocols h2 http/1.1',
- ' SSLEngine on',
- f' SSLCertificateFile {creds1.cert_file}',
- f' SSLCertificateKeyFile {creds1.pkey_file}',
- f' DocumentRoot "{self._docs_dir}"',
- ])
+ conf.extend(
+ [
+ "</VirtualHost>",
+ "",
+ ]
+ )
+ conf.extend(
+ [ # https host for domain1, h1 + h2
+ f"<VirtualHost *:{self.env.https_port}>",
+ f" ServerName {domain1}",
+ " ServerAlias localhost",
+ " Protocols h2 http/1.1",
+ " SSLEngine on",
+ f" SSLCertificateFile {creds1.cert_file}",
+ f" SSLCertificateKeyFile {creds1.pkey_file}",
+ f' DocumentRoot "{self._docs_dir}"',
+ ]
+ )
conf.extend(self._curltest_conf(domain1))
if domain1 in self._extra_configs:
conf.extend(self._extra_configs[domain1])
- conf.extend([
- '</VirtualHost>',
- '',
- ])
+ conf.extend(
+ [
+ "</VirtualHost>",
+ "",
+ ]
+ )
# Alternate to domain1 with BROTLI compression
- conf.extend([ # https host for domain1, h1 + h2
- f'<VirtualHost *:{self.env.https_port}>',
- f' ServerName {domain1brotli}',
- ' Protocols h2 http/1.1',
- ' SSLEngine on',
- f' SSLCertificateFile {creds1.cert_file}',
- f' SSLCertificateKeyFile {creds1.pkey_file}',
- f' DocumentRoot "{self._docs_dir}"',
- ' SetOutputFilter BROTLI_COMPRESS',
- ])
+ conf.extend(
+ [ # https host for domain1, h1 + h2
+ f"<VirtualHost *:{self.env.https_port}>",
+ f" ServerName {domain1brotli}",
+ " Protocols h2 http/1.1",
+ " SSLEngine on",
+ f" SSLCertificateFile {creds1.cert_file}",
+ f" SSLCertificateKeyFile {creds1.pkey_file}",
+ f' DocumentRoot "{self._docs_dir}"',
+ " SetOutputFilter BROTLI_COMPRESS",
+ ]
+ )
conf.extend(self._curltest_conf(domain1))
if domain1 in self._extra_configs:
conf.extend(self._extra_configs[domain1])
- conf.extend([
- '</VirtualHost>',
- '',
- ])
- conf.extend([ # plain http host for domain2
- f'<VirtualHost *:{self.env.http_port}>',
- f' ServerName {domain2}',
- ' ServerAlias localhost',
- f' DocumentRoot "{self._docs_dir}"',
- ' Protocols h2c http/1.1',
- ])
+ conf.extend(
+ [
+ "</VirtualHost>",
+ "",
+ ]
+ )
+ conf.extend(
+ [ # plain http host for domain2
+ f"<VirtualHost *:{self.env.http_port}>",
+ f" ServerName {domain2}",
+ " ServerAlias localhost",
+ f' DocumentRoot "{self._docs_dir}"',
+ " Protocols h2c http/1.1",
+ ]
+ )
conf.extend(self._curltest_conf(domain2))
- conf.extend([
- '</VirtualHost>',
- '',
- ])
- conf.extend([ # https host for domain2, no h2
- f'<VirtualHost *:{self.env.https_port}>',
- f' ServerName {domain2}',
- ' Protocols http/1.1',
- ' SSLEngine on',
- f' SSLCertificateFile {creds2.cert_file}',
- f' SSLCertificateKeyFile {creds2.pkey_file}',
- f' DocumentRoot "{self._docs_dir}/two"',
- ])
+ conf.extend(
+ [
+ "</VirtualHost>",
+ "",
+ ]
+ )
+ conf.extend(
+ [ # https host for domain2, no h2
+ f"<VirtualHost *:{self.env.https_port}>",
+ f" ServerName {domain2}",
+ " Protocols http/1.1",
+ " SSLEngine on",
+ f" SSLCertificateFile {creds2.cert_file}",
+ f" SSLCertificateKeyFile {creds2.pkey_file}",
+ f' DocumentRoot "{self._docs_dir}/two"',
+ ]
+ )
conf.extend(self._curltest_conf(domain2))
if domain2 in self._extra_configs:
conf.extend(self._extra_configs[domain2])
- conf.extend([
- '</VirtualHost>',
- '',
- ])
- conf.extend([ # https host for expired domain
- f'<VirtualHost *:{self.env.https_port}>',
- f' ServerName {exp_domain}',
- ' Protocols h2 http/1.1',
- ' SSLEngine on',
- f' SSLCertificateFile {exp_creds.cert_file}',
- f' SSLCertificateKeyFile {exp_creds.pkey_file}',
- f' DocumentRoot "{self._docs_dir}/expired"',
- ])
+ conf.extend(
+ [
+ "</VirtualHost>",
+ "",
+ ]
+ )
+ conf.extend(
+ [ # https host for expired domain
+ f"<VirtualHost *:{self.env.https_port}>",
+ f" ServerName {exp_domain}",
+ " Protocols h2 http/1.1",
+ " SSLEngine on",
+ f" SSLCertificateFile {exp_creds.cert_file}",
+ f" SSLCertificateKeyFile {exp_creds.pkey_file}",
+ f' DocumentRoot "{self._docs_dir}/expired"',
+ ]
+ )
conf.extend(self._curltest_conf(exp_domain))
if exp_domain in self._extra_configs:
conf.extend(self._extra_configs[exp_domain])
- conf.extend([
- '</VirtualHost>',
- '',
- ])
- conf.extend([ # http forward proxy
- f'<VirtualHost *:{self.env.proxy_port}>',
- f' ServerName {proxy_domain}',
- ' Protocols h2c http/1.1',
- ' ProxyRequests On',
- ' H2ProxyRequests On',
- ' ProxyVia On',
- f' AllowCONNECT {self.env.http_port} {self.env.https_port}',
- ])
+ conf.extend(
+ [
+ "</VirtualHost>",
+ "",
+ ]
+ )
+ conf.extend(
+ [ # http forward proxy
+ f"<VirtualHost *:{self.env.proxy_port}>",
+ f" ServerName {proxy_domain}",
+ " Protocols h2c http/1.1",
+ " ProxyRequests On",
+ " H2ProxyRequests On",
+ " ProxyVia On",
+ f" AllowCONNECT {self.env.http_port} {self.env.https_port}",
+ ]
+ )
conf.extend(self._get_proxy_conf())
- conf.extend([
- '</VirtualHost>',
- '',
- ])
- conf.extend([ # https forward proxy
- f'<VirtualHost *:{self.env.proxys_port}>',
- f' ServerName {proxy_domain}',
- ' Protocols h2 http/1.1',
- ' SSLEngine on',
- f' SSLCertificateFile {proxy_creds.cert_file}',
- f' SSLCertificateKeyFile {proxy_creds.pkey_file}',
- ' ProxyRequests On',
- ' H2ProxyRequests On',
- ' ProxyVia On',
- f' AllowCONNECT {self.env.http_port} {self.env.https_port}',
- ])
+ conf.extend(
+ [
+ "</VirtualHost>",
+ "",
+ ]
+ )
+ conf.extend(
+ [ # https forward proxy
+ f"<VirtualHost *:{self.env.proxys_port}>",
+ f" ServerName {proxy_domain}",
+ " Protocols h2 http/1.1",
+ " SSLEngine on",
+ f" SSLCertificateFile {proxy_creds.cert_file}",
+ f" SSLCertificateKeyFile {proxy_creds.pkey_file}",
+ " ProxyRequests On",
+ " H2ProxyRequests On",
+ " ProxyVia On",
+ f" AllowCONNECT {self.env.http_port} {self.env.https_port}",
+ ]
+ )
conf.extend(self._get_proxy_conf())
- conf.extend([
- '</VirtualHost>',
- '',
- ])
+ conf.extend(
+ [
+ "</VirtualHost>",
+ "",
+ ]
+ )
fd.write("\n".join(conf))
- with open(os.path.join(self._conf_dir, 'mime.types'), 'w') as fd:
- fd.write("\n".join([
- 'text/html html',
- 'application/json json',
- ''
- ]))
+ with open(os.path.join(self._conf_dir, "mime.types"), "w") as fd:
+ fd.write(
+ "\n".join(
+ ["text/html html", "application/json json", ""]
+ )
+ )
def _get_proxy_conf(self):
if self._proxy_auth_basic:
return [
' <Proxy "*">',
- ' AuthType Basic',
+ " AuthType Basic",
' AuthName "Restricted Proxy"',
- ' AuthBasicProvider file',
+ " AuthBasicProvider file",
f' AuthUserFile "{self._basic_passwords}"',
- ' Require user proxy',
- ' </Proxy>',
+ " Require user proxy",
+ " </Proxy>",
]
else:
return [
' <Proxy "*">',
- ' Require ip 127.0.0.1',
- ' </Proxy>',
+ " Require ip 127.0.0.1",
+ " </Proxy>",
]
def _get_log_level(self):
if self.env.verbose > 3:
- return 'trace2'
+ return "trace2"
if self.env.verbose > 2:
- return 'trace1'
+ return "trace1"
if self.env.verbose > 1:
- return 'debug'
- return 'info'
+ return "debug"
+ return "info"
def _curltest_conf(self, servername) -> List[str]:
lines = []
if Httpd.MOD_CURLTEST is not None:
- lines.extend([
- ' Redirect 302 /data.json.302 /data.json',
- ' Redirect 301 /curltest/echo301 /curltest/echo',
- ' Redirect 302 /curltest/echo302 /curltest/echo',
- ' Redirect 303 /curltest/echo303 /curltest/echo',
- ' Redirect 307 /curltest/echo307 /curltest/echo',
- ' <Location /curltest/sslinfo>',
- ' SSLOptions StdEnvVars',
- ' SetHandler curltest-sslinfo',
- ' </Location>',
- ' <Location /curltest/echo>',
- ' SetHandler curltest-echo',
- ' </Location>',
- ' <Location /curltest/put>',
- ' SetHandler curltest-put',
- ' </Location>',
- ' <Location /curltest/tweak>',
- ' SetHandler curltest-tweak',
- ' </Location>',
- ' Redirect 302 /tweak /curltest/tweak',
- ' <Location /curltest/1_1>',
- ' SetHandler curltest-1_1-required',
- ' </Location>',
- ' <Location /curltest/shutdown_unclean>',
- ' SetHandler curltest-tweak',
- ' SetEnv force-response-1.0 1',
- ' </Location>',
- ' SetEnvIf Request_URI "/shutdown_unclean" ssl-unclean=1',
- ])
+ lines.extend(
+ [
+ " Redirect 302 /data.json.302 /data.json",
+ " Redirect 301 /curltest/echo301 /curltest/echo",
+ " Redirect 302 /curltest/echo302 /curltest/echo",
+ " Redirect 303 /curltest/echo303 /curltest/echo",
+ " Redirect 307 /curltest/echo307 /curltest/echo",
+ " <Location /curltest/sslinfo>",
+ " SSLOptions StdEnvVars",
+ " SetHandler curltest-sslinfo",
+ " </Location>",
+ " <Location /curltest/echo>",
+ " SetHandler curltest-echo",
+ " </Location>",
+ " <Location /curltest/put>",
+ " SetHandler curltest-put",
+ " </Location>",
+ " <Location /curltest/tweak>",
+ " SetHandler curltest-tweak",
+ " </Location>",
+ " Redirect 302 /tweak /curltest/tweak",
+ " <Location /curltest/1_1>",
+ " SetHandler curltest-1_1-required",
+ " </Location>",
+ " <Location /curltest/shutdown_unclean>",
+ " SetHandler curltest-tweak",
+ " SetEnv force-response-1.0 1",
+ " </Location>",
+ ' SetEnvIf Request_URI "/shutdown_unclean" ssl-unclean=1',
+ ]
+ )
if self._auth_digest:
- lines.extend([
- f' <Directory {self.docs_dir}/restricted/digest>',
- ' AuthType Digest',
- ' AuthName "restricted area"',
- f' AuthDigestDomain "https://{servername}"',
- ' AuthBasicProvider file',
- f' AuthUserFile "{self._digest_passwords}"',
- ' Require valid-user',
- ' </Directory>',
-
- ])
+ lines.extend(
+ [
+ f" <Directory {self.docs_dir}/restricted/digest>",
+ " AuthType Digest",
+ ' AuthName "restricted area"',
+ f' AuthDigestDomain "https://{servername}"',
+ " AuthBasicProvider file",
+ f' AuthUserFile "{self._digest_passwords}"',
+ " Require valid-user",
+ " </Directory>",
+ ]
+ )
return lines
def _init_curltest(self):
if Httpd.MOD_CURLTEST is not None:
return
local_dir = os.path.dirname(inspect.getfile(Httpd))
- p = subprocess.run([self.env.apxs, '-c', 'mod_curltest.c'],
- capture_output=True,
- cwd=os.path.join(local_dir, 'mod_curltest'))
+ p = subprocess.run(
+ [self.env.apxs, "-c", "mod_curltest.c"],
+ capture_output=True,
+ cwd=os.path.join(local_dir, "mod_curltest"),
+ )
rv = p.returncode
if rv != 0:
log.error(f"compiling mod_curltest failed: {p.stderr}")
raise Exception(f"compiling mod_curltest failed: {p.stderr}")
Httpd.MOD_CURLTEST = os.path.join(
- local_dir, 'mod_curltest/.libs/mod_curltest.so')
+ local_dir, "mod_curltest/.libs/mod_curltest.so"
+ )
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class Nghttpx:
-
def __init__(self, env: Env, port: int, https_port: int, name: str):
self.env = env
self._name = name
self._https_port = https_port
self._cmd = env.nghttpx
self._run_dir = os.path.join(env.gen_dir, name)
- self._pid_file = os.path.join(self._run_dir, 'nghttpx.pid')
- self._conf_file = os.path.join(self._run_dir, 'nghttpx.conf')
- self._error_log = os.path.join(self._run_dir, 'nghttpx.log')
- self._stderr = os.path.join(self._run_dir, 'nghttpx.stderr')
- self._tmp_dir = os.path.join(self._run_dir, 'tmp')
+ self._pid_file = os.path.join(self._run_dir, "nghttpx.pid")
+ self._conf_file = os.path.join(self._run_dir, "nghttpx.conf")
+ self._error_log = os.path.join(self._run_dir, "nghttpx.log")
+ self._stderr = os.path.join(self._run_dir, "nghttpx.stderr")
+ self._tmp_dir = os.path.join(self._run_dir, "tmp")
self._process: Optional[subprocess.Popen] = None
self._rmf(self._pid_file)
self._rmf(self._error_log)
return False
while datetime.now() < end_wait:
try:
- log.debug(f'waiting for nghttpx({running.pid}) to exit.')
+ log.debug(f"waiting for nghttpx({running.pid}) to exit.")
running.wait(2)
- log.debug(f'nghttpx({running.pid}) terminated -> {running.returncode}')
+ log.debug(
+ f"nghttpx({running.pid}) terminated -> {running.returncode}"
+ )
break
except subprocess.TimeoutExpired:
- log.warning(f'nghttpx({running.pid}), not shut down yet.')
+ log.warning(f"nghttpx({running.pid}), not shut down yet.")
os.kill(running.pid, signal.SIGQUIT)
if datetime.now() >= end_wait:
- log.error(f'nghttpx({running.pid}), terminate forcefully.')
+ log.error(f"nghttpx({running.pid}), terminate forcefully.")
os.kill(running.pid, signal.SIGKILL)
running.terminate()
running.wait(1)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
if self._https_port > 0:
- check_url = f'https://{self.env.domain1}:{self._https_port}/'
- r = curl.http_get(url=check_url, extra_args=[
- '--trace', 'curl.trace', '--trace-time',
- '--connect-timeout', '1'
- ])
+ check_url = f"https://{self.env.domain1}:{self._https_port}/"
+ r = curl.http_get(
+ url=check_url,
+ extra_args=[
+ "--trace",
+ "curl.trace",
+ "--trace-time",
+ "--connect-timeout",
+ "1",
+ ],
+ )
else:
- check_url = f'https://{self.env.domain1}:{self._port}/'
- r = curl.http_get(url=check_url, extra_args=[
- '--trace', 'curl.trace', '--trace-time',
- '--http3-only', '--connect-timeout', '1'
- ])
+ check_url = f"https://{self.env.domain1}:{self._port}/"
+ r = curl.http_get(
+ url=check_url,
+ extra_args=[
+ "--trace",
+ "curl.trace",
+ "--trace-time",
+ "--http3-only",
+ "--connect-timeout",
+ "1",
+ ],
+ )
if r.exit_code != 0:
return True
- log.debug(f'waiting for nghttpx to stop responding: {r}')
- time.sleep(.1)
+ log.debug(f"waiting for nghttpx to stop responding: {r}")
+ time.sleep(0.1)
log.debug(f"Server still responding after {timeout}")
return False
try_until = datetime.now() + timeout
while datetime.now() < try_until:
if self._https_port > 0:
- check_url = f'https://{self.env.domain1}:{self._https_port}/'
- r = curl.http_get(url=check_url, extra_args=[
- '--trace', 'curl.trace', '--trace-time',
- '--connect-timeout', '1'
- ])
+ check_url = f"https://{self.env.domain1}:{self._https_port}/"
+ r = curl.http_get(
+ url=check_url,
+ extra_args=[
+ "--trace",
+ "curl.trace",
+ "--trace-time",
+ "--connect-timeout",
+ "1",
+ ],
+ )
else:
- check_url = f'https://{self.env.domain1}:{self._port}/'
- r = curl.http_get(url=check_url, extra_args=[
- '--http3-only', '--trace', 'curl.trace', '--trace-time',
- '--connect-timeout', '1'
- ])
+ check_url = f"https://{self.env.domain1}:{self._port}/"
+ r = curl.http_get(
+ url=check_url,
+ extra_args=[
+ "--http3-only",
+ "--trace",
+ "curl.trace",
+ "--trace-time",
+ "--connect-timeout",
+ "1",
+ ],
+ )
if r.exit_code == 0:
return True
- log.debug(f'waiting for nghttpx to become responsive: {r}')
- time.sleep(.1)
+ log.debug(f"waiting for nghttpx to become responsive: {r}")
+ time.sleep(0.1)
log.error(f"Server still not responding after {timeout}")
return False
return os.makedirs(path)
def _write_config(self):
- with open(self._conf_file, 'w') as fd:
- fd.write('# nghttpx test config')
- fd.write("\n".join([
- '# do we need something here?'
- ]))
+ with open(self._conf_file, "w") as fd:
+ fd.write("# nghttpx test config")
+ fd.write("\n".join(["# do we need something here?"]))
class NghttpxQuic(Nghttpx):
-
def __init__(self, env: Env):
- super().__init__(env=env, name='nghttpx-quic', port=env.h3_port,
- https_port=env.nghttpx_https_port)
+ super().__init__(
+ env=env,
+ name="nghttpx-quic",
+ port=env.h3_port,
+ https_port=env.nghttpx_https_port,
+ )
def start(self, wait_live=True):
self._mkpath(self._tmp_dir)
assert creds # convince pytype this isn't None
args = [
self._cmd,
- f'--frontend=*,{self.env.h3_port};quic',
- '--frontend-quic-early-data',
- f'--frontend=*,{self.env.nghttpx_https_port};tls',
- f'--backend=127.0.0.1,{self.env.https_port};{self.env.domain1};sni={self.env.domain1};proto=h2;tls',
- f'--backend=127.0.0.1,{self.env.http_port}',
- '--log-level=INFO',
- f'--pid-file={self._pid_file}',
- f'--errorlog-file={self._error_log}',
- f'--conf={self._conf_file}',
- f'--cacert={self.env.ca.cert_file}',
+ f"--frontend=*,{self.env.h3_port};quic",
+ "--frontend-quic-early-data",
+ f"--frontend=*,{self.env.nghttpx_https_port};tls",
+ f"--backend=127.0.0.1,{self.env.https_port};{self.env.domain1};sni={self.env.domain1};proto=h2;tls",
+ f"--backend=127.0.0.1,{self.env.http_port}",
+ "--log-level=INFO",
+ f"--pid-file={self._pid_file}",
+ f"--errorlog-file={self._error_log}",
+ f"--conf={self._conf_file}",
+ f"--cacert={self.env.ca.cert_file}",
creds.pkey_file,
creds.cert_file,
- '--frontend-http3-window-size=1M',
- '--frontend-http3-max-window-size=10M',
- '--frontend-http3-connection-window-size=10M',
- '--frontend-http3-max-connection-window-size=100M',
+ "--frontend-http3-window-size=1M",
+ "--frontend-http3-max-window-size=10M",
+ "--frontend-http3-connection-window-size=10M",
+ "--frontend-http3-max-connection-window-size=100M",
# f'--frontend-quic-debug-log',
]
- ngerr = open(self._stderr, 'a')
+ ngerr = open(self._stderr, "a")
self._process = subprocess.Popen(args=args, stderr=ngerr)
if self._process.returncode is not None:
return False
class NghttpxFwd(Nghttpx):
-
def __init__(self, env: Env):
- super().__init__(env=env, name='nghttpx-fwd', port=env.h2proxys_port,
- https_port=0)
+ super().__init__(
+ env=env, name="nghttpx-fwd", port=env.h2proxys_port, https_port=0
+ )
def start(self, wait_live=True):
self._mkpath(self._tmp_dir)
assert creds # convince pytype this isn't None
args = [
self._cmd,
- '--http2-proxy',
- f'--frontend=*,{self.env.h2proxys_port}',
- f'--backend=127.0.0.1,{self.env.proxy_port}',
- '--log-level=INFO',
- f'--pid-file={self._pid_file}',
- f'--errorlog-file={self._error_log}',
- f'--conf={self._conf_file}',
- f'--cacert={self.env.ca.cert_file}',
+ "--http2-proxy",
+ f"--frontend=*,{self.env.h2proxys_port}",
+ f"--backend=127.0.0.1,{self.env.proxy_port}",
+ "--log-level=INFO",
+ f"--pid-file={self._pid_file}",
+ f"--errorlog-file={self._error_log}",
+ f"--conf={self._conf_file}",
+ f"--cacert={self.env.ca.cert_file}",
creds.pkey_file,
creds.cert_file,
]
- ngerr = open(self._stderr, 'a')
+ ngerr = open(self._stderr, "a")
self._process = subprocess.Popen(args=args, stderr=ngerr)
if self._process.returncode is not None:
return False
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
- check_url = f'https://{self.env.proxy_domain}:{self.env.h2proxys_port}/'
+ check_url = f"https://{self.env.proxy_domain}:{self.env.h2proxys_port}/"
r = curl.http_get(url=check_url)
if r.exit_code != 0:
return True
- log.debug(f'waiting for nghttpx-fwd to stop responding: {r}')
- time.sleep(.1)
+ log.debug(f"waiting for nghttpx-fwd to stop responding: {r}")
+ time.sleep(0.1)
log.debug(f"Server still responding after {timeout}")
return False
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
- check_url = f'https://{self.env.proxy_domain}:{self.env.h2proxys_port}/'
- r = curl.http_get(url=check_url, extra_args=[
- '--trace', 'curl.trace', '--trace-time'
- ])
+ check_url = f"https://{self.env.proxy_domain}:{self.env.h2proxys_port}/"
+ r = curl.http_get(
+ url=check_url, extra_args=["--trace", "curl.trace", "--trace-time"]
+ )
if r.exit_code == 0:
return True
- log.debug(f'waiting for nghttpx-fwd to become responsive: {r}')
- time.sleep(.1)
+ log.debug(f"waiting for nghttpx-fwd to become responsive: {r}")
+ time.sleep(0.1)
log.error(f"Server still not responding after {timeout}")
return False
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
for name, ptype in port_specs.items():
try:
s = socket.socket(type=ptype)
- s.bind(('', 0))
+ s.bind(("", 0))
ports[name] = s.getsockname()[1]
socks.append(s)
except Exception as e:
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
class VsFTPD:
-
def __init__(self, env: Env, with_ssl=False):
self.env = env
self._cmd = env.vsftpd
- self._scheme = 'ftp'
+ self._scheme = "ftp"
self._with_ssl = with_ssl
if self._with_ssl:
self._port = self.env.ftps_port
- name = 'vsftpds'
+ name = "vsftpds"
else:
self._port = self.env.ftp_port
- name = 'vsftpd'
+ name = "vsftpd"
self._vsftpd_dir = os.path.join(env.gen_dir, name)
- self._run_dir = os.path.join(self._vsftpd_dir, 'run')
- self._docs_dir = os.path.join(self._vsftpd_dir, 'docs')
- self._tmp_dir = os.path.join(self._vsftpd_dir, 'tmp')
- self._conf_file = os.path.join(self._vsftpd_dir, 'test.conf')
- self._pid_file = os.path.join(self._vsftpd_dir, 'vsftpd.pid')
- self._error_log = os.path.join(self._vsftpd_dir, 'vsftpd.log')
+ self._run_dir = os.path.join(self._vsftpd_dir, "run")
+ self._docs_dir = os.path.join(self._vsftpd_dir, "docs")
+ self._tmp_dir = os.path.join(self._vsftpd_dir, "tmp")
+ self._conf_file = os.path.join(self._vsftpd_dir, "test.conf")
+ self._pid_file = os.path.join(self._vsftpd_dir, "vsftpd.pid")
+ self._error_log = os.path.join(self._vsftpd_dir, "vsftpd.log")
self._process = None
self.clear_logs()
self._write_config()
args = [
self._cmd,
- f'{self._conf_file}',
+ f"{self._conf_file}",
]
- procerr = open(self._error_log, 'a')
+ procerr = open(self._error_log, "a")
self._process = subprocess.Popen(args=args, stderr=procerr)
if self._process.returncode is not None:
return False
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
- check_url = f'{self._scheme}://{self.domain}:{self.port}/'
- r = curl.ftp_get(urls=[check_url], extra_args=['-v'])
+ check_url = f"{self._scheme}://{self.domain}:{self.port}/"
+ r = curl.ftp_get(urls=[check_url], extra_args=["-v"])
if r.exit_code != 0:
return True
- log.debug(f'waiting for vsftpd to stop responding: {r}')
- time.sleep(.1)
+ log.debug(f"waiting for vsftpd to stop responding: {r}")
+ time.sleep(0.1)
log.debug(f"Server still responding after {timeout}")
return False
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
- check_url = f'{self._scheme}://{self.domain}:{self.port}/'
- r = curl.ftp_get(urls=[check_url], extra_args=[
- '--trace', 'curl-start.trace', '--trace-time'
- ])
+ check_url = f"{self._scheme}://{self.domain}:{self.port}/"
+ r = curl.ftp_get(
+ urls=[check_url],
+ extra_args=["--trace", "curl-start.trace", "--trace-time"],
+ )
if r.exit_code == 0:
return True
- log.debug(f'waiting for vsftpd to become responsive: {r}')
- time.sleep(.1)
+ log.debug(f"waiting for vsftpd to become responsive: {r}")
+ time.sleep(0.1)
log.error(f"Server still not responding after {timeout}")
return False
self._mkpath(self._docs_dir)
self._mkpath(self._tmp_dir)
conf = [ # base server config
- 'listen=YES',
- 'run_as_launching_user=YES',
- '#listen_address=127.0.0.1',
- f'listen_port={self.port}',
- 'local_enable=NO',
- 'anonymous_enable=YES',
- f'anon_root={self._docs_dir}',
- 'dirmessage_enable=YES',
- 'write_enable=YES',
- 'anon_upload_enable=YES',
- 'log_ftp_protocol=YES',
- 'xferlog_enable=YES',
- 'xferlog_std_format=NO',
- f'vsftpd_log_file={self._error_log}',
- '\n',
+ "listen=YES",
+ "run_as_launching_user=YES",
+ "#listen_address=127.0.0.1",
+ f"listen_port={self.port}",
+ "local_enable=NO",
+ "anonymous_enable=YES",
+ f"anon_root={self._docs_dir}",
+ "dirmessage_enable=YES",
+ "write_enable=YES",
+ "anon_upload_enable=YES",
+ "log_ftp_protocol=YES",
+ "xferlog_enable=YES",
+ "xferlog_std_format=NO",
+ f"vsftpd_log_file={self._error_log}",
+ "\n",
]
if self._with_ssl:
creds = self.env.get_credentials(self.domain)
assert creds # convince pytype this isn't None
- conf.extend([
- 'ssl_enable=YES',
- 'debug_ssl=YES',
- 'allow_anon_ssl=YES',
- f'rsa_cert_file={creds.cert_file}',
- f'rsa_private_key_file={creds.pkey_file}',
- # require_ssl_reuse=YES means ctrl and data connection need to use the same session
- 'require_ssl_reuse=NO',
- ])
-
- with open(self._conf_file, 'w') as fd:
+ conf.extend(
+ [
+ "ssl_enable=YES",
+ "debug_ssl=YES",
+ "allow_anon_ssl=YES",
+ f"rsa_cert_file={creds.cert_file}",
+ f"rsa_private_key_file={creds.pkey_file}",
+ # require_ssl_reuse=YES means ctrl and data connection need to use the same session
+ "require_ssl_reuse=NO",
+ ]
+ )
+
+ with open(self._conf_file, "w") as fd:
fd.write("\n".join(conf))
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-#***************************************************************************
+# ***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
def main():
- parser = argparse.ArgumentParser(prog='scorecard', description="""
+ parser = argparse.ArgumentParser(
+ prog="scorecard",
+ description="""
Run a websocket echo server.
- """)
- parser.add_argument("--port", type=int,
- default=9876, help="port to listen on")
+ """,
+ )
+ parser.add_argument("--port", type=int, default=9876, help="port to listen on")
args = parser.parse_args()
logging.basicConfig(