env = Env(pytestconfig=pytestconfig)
level = logging.DEBUG if env.verbose > 0 else logging.INFO
logging.getLogger('').setLevel(level=level)
+ if not env.curl_has_protocol('http'):
+ pytest.skip("curl built without HTTP support")
+ if not env.curl_has_protocol('https'):
+ pytest.skip("curl built without HTTPS support")
+ if env.setup_incomplete():
+ pytest.skip(env.incomplete_reason())
+
env.setup()
return env
errors = []
for i in range(sample_size):
self.info('.')
- curl = CurlClient(env=self.env)
+ curl = CurlClient(env=self.env, silent=True)
url = f'https://{authority}/'
r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True)
if r.exit_code == 0 and len(r.stats) == 1:
errors = []
for i in range(sample_size):
self.info('.')
- curl = CurlClient(env=self.env)
+ curl = CurlClient(env=self.env, silent=True)
args = [
'--http3-only' if proto == 'h3' else '--http2',
f'--{ipv}', f'https://{authority}/'
errors = []
self.info(f'{sample_size}x single')
for i in range(sample_size):
- curl = CurlClient(env=self.env)
+ curl = CurlClient(env=self.env, silent=True)
r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True)
err = self._check_downloads(r, count)
if err:
url = f'{url}?[0-{count - 1}]'
self.info(f'{sample_size}x{count} serial')
for i in range(sample_size):
- curl = CurlClient(env=self.env)
+ curl = CurlClient(env=self.env, silent=True)
r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True)
self.info(f'.')
err = self._check_downloads(r, count)
url = f'{url}?[0-{count - 1}]'
self.info(f'{sample_size}x{count} parallel')
for i in range(sample_size):
- curl = CurlClient(env=self.env)
+ curl = CurlClient(env=self.env, silent=True)
start = datetime.now()
r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True,
extra_args=['--parallel'])
log = logging.getLogger(__name__)
-@pytest.mark.skipif(condition=Env.setup_incomplete(),
- reason=f"missing: {Env.incomplete_reason()}")
class TestBasic:
@pytest.fixture(autouse=True, scope='class')
curl = CurlClient(env=env)
url = f'http://{env.domain1}:{env.http_port}/data.json'
r = curl.http_get(url=url)
- assert r.exit_code == 0
+ r.check_exit_code(0)
assert r.response['status'] == 200
assert r.json['server'] == env.domain1
curl = CurlClient(env=env)
url = f'https://{env.domain1}:{env.https_port}/data.json'
r = curl.http_get(url=url)
- assert r.exit_code == 0
+ r.check_exit_code(0)
assert r.response['status'] == 200
assert r.json['server'] == env.domain1
curl = CurlClient(env=env)
url = f'https://{env.domain1}:{env.https_port}/data.json'
r = curl.http_get(url=url, extra_args=['--http2'])
- assert r.exit_code == 0
+ r.check_exit_code(0)
assert r.response['status'] == 200
assert r.response['protocol'] == 'HTTP/2'
assert r.json['server'] == env.domain1
curl = CurlClient(env=env)
url = f'https://{env.domain2}:{env.https_port}/data.json'
r = curl.http_get(url=url, extra_args=['--http2'])
- assert r.exit_code == 0
+ r.check_exit_code(0)
assert r.response['status'] == 200
assert r.response['protocol'] == 'HTTP/1.1'
assert r.json['server'] == env.domain2
curl = CurlClient(env=env)
url = f'https://{env.domain1}:{env.h3_port}/data.json'
r = curl.http_get(url=url, extra_args=['--http3'])
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
assert r.response['status'] == 200
assert r.response['protocol'] == 'HTTP/3'
assert r.json['server'] == env.domain1
log = logging.getLogger(__name__)
-@pytest.mark.skipif(condition=Env.setup_incomplete(),
- reason=f"missing: {Env.incomplete_reason()}")
class TestDownload:
@pytest.fixture(autouse=True, scope='class')
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/data.json'
r = curl.http_download(urls=[url], alpn_proto=proto)
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=1, exp_status=200)
# download 2 files
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]'
r = curl.http_download(urls=[url], alpn_proto=proto)
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=2, exp_status=200)
# download 100 files sequentially
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-99]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=100, exp_status=200)
# http/1.1 sequential transfers will open 1 connection
assert r.total_connects == 1
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel', '--parallel-max', f'{max_parallel}'
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=100, exp_status=200)
if proto == 'http/1.1':
# http/1.1 parallel transfers will open multiple connections
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-499]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=500, exp_status=200)
if proto == 'http/1.1':
# http/1.1 parallel transfers will open multiple connections
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel', '--parallel-max', f'{max_parallel}'
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
# http2 parallel transfers will use one connection (common limit is 100)
assert r.total_connects == 1
with_stats=True, extra_args=[
'--parallel', '--parallel-max', '200'
])
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
# should have used 2 connections only (test servers allow 100 req/conn)
assert r.total_connects == 2, "h2 should use fewer connections here"
with_stats=True, extra_args=[
'--parallel'
])
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
# http/1.1 should have used count connections
assert r.total_connects == count, "http/1.1 should use this many connections"
urln = f'https://{env.authority_for(env.domain1, proto)}/data-1m?[0-{count-1}]'
curl = CurlClient(env=env)
r = curl.http_download(urls=[urln], alpn_proto=proto)
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel'
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
urln = f'https://{env.authority_for(env.domain1, proto)}/data-10m?[0-{count-1}]'
curl = CurlClient(env=env)
r = curl.http_download(urls=[urln], alpn_proto=proto)
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel'
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
@pytest.mark.parametrize("proto", ['h2', 'h3'])
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--head'
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
@pytest.mark.parametrize("proto", ['h2'])
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--head', '--http2-prior-knowledge', '--fail-early'
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
def test_02_20_h2_small_frames(self, env: Env, httpd, repeat):
r = curl.http_download(urls=[urln], alpn_proto="h2", extra_args=[
'--parallel', '--parallel-max', '2'
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
srcfile = os.path.join(httpd.docs_dir, 'data-1m')
for i in range(count):
log = logging.getLogger(__name__)
-@pytest.mark.skipif(condition=Env.setup_incomplete(),
- reason=f"missing: {Env.incomplete_reason()}")
class TestGoAway:
@pytest.fixture(autouse=True, scope='class')
assert httpd.reload()
t.join()
r: ExecResult = self.r
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
# reload will shut down the connection gracefully with GOAWAY
# we expect to see a second connection opened afterwards
assert nghttpx.reload(timeout=timedelta(seconds=2))
t.join()
r: ExecResult = self.r
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
# reload will shut down the connection gracefully with GOAWAY
# we expect to see a second connection opened afterwards
assert r.total_connects == 2
assert httpd.reload()
t.join()
r: ExecResult = self.r
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
# reload will shut down the connection gracefully with GOAWAY
# we expect to see a second connection opened afterwards
log = logging.getLogger(__name__)
-@pytest.mark.skipif(condition=Env.setup_incomplete(),
- reason=f"missing: {Env.incomplete_reason()}")
class TestStuttered:
@pytest.fixture(autouse=True, scope='class')
f'/curltest/tweak?id=[0-{count - 1}]'\
'&chunks=100&chunk_size=100&chunk_delay=10ms'
r = curl.http_download(urls=[urln], alpn_proto=proto)
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=1, exp_status=200)
# download 50 files in 100 chunks a 100 bytes with 10ms delay between
'&chunks=100&chunk_size=100&chunk_delay=10ms'
r = curl.http_download(urls=[url1, urln], alpn_proto=proto,
extra_args=['--parallel'])
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=warmups+count, exp_status=200)
assert r.total_connects == 1
t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total')
'&chunks=1000&chunk_size=10&chunk_delay=100us'
r = curl.http_download(urls=[url1, urln], alpn_proto=proto,
extra_args=['--parallel'])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=warmups+count, exp_status=200)
assert r.total_connects == 1
t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total')
'&chunks=10000&chunk_size=1&chunk_delay=50us'
r = curl.http_download(urls=[url1, urln], alpn_proto=proto,
extra_args=['--parallel'])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=warmups+count, exp_status=200)
assert r.total_connects == 1
t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total')
log = logging.getLogger(__name__)
-@pytest.mark.skipif(condition=Env.setup_incomplete(),
- reason=f"missing: {Env.incomplete_reason()}")
@pytest.mark.skipif(condition=not Env.httpd_is_at_least('2.4.55'),
reason=f"httpd version too old for this: {Env.httpd_version()}")
class TestErrors:
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--retry', '0'
])
- assert r.exit_code != 0, f'{r}'
+ r.check_exit_code_not(0)
invalid_stats = []
for idx, s in enumerate(r.stats):
if 'exitcode' not in s or s['exitcode'] not in [18, 56, 92]:
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--retry', '0', '--parallel',
])
- assert r.exit_code != 0, f'{r}'
+ r.check_exit_code_not(0)
assert len(r.stats) == count, f'did not get all stats: {r}'
invalid_stats = []
for idx, s in enumerate(r.stats):
log = logging.getLogger(__name__)
-@pytest.mark.skipif(condition=Env.setup_incomplete(),
- reason=f"missing: {Env.incomplete_reason()}")
class TestEyeballs:
@pytest.fixture(autouse=True, scope='class')
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json'
r = curl.http_download(urls=[urln], extra_args=['--http3-only'])
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=1, exp_status=200)
assert r.stats[0]['http_version'] == '3'
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json'
r = curl.http_download(urls=[urln], extra_args=['--http3-only'])
- assert r.exit_code == 7, f'{r}' # could not connect
+ r.check_exit_code(7)
# download using HTTP/3 on missing server with fallback on h2
@pytest.mark.skipif(condition=not Env.have_h3(), reason=f"missing HTTP/3 support")
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json'
r = curl.http_download(urls=[urln], extra_args=['--http3'])
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=1, exp_status=200)
assert r.stats[0]['http_version'] == '2'
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain2, "h3")}/data.json'
r = curl.http_download(urls=[urln], extra_args=['--http3'])
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=1, exp_status=200)
assert r.stats[0]['http_version'] == '1.1'
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h2")}/data.json'
r = curl.http_download(urls=[urln])
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=1, exp_status=200)
assert r.stats[0]['time_connect'] > 0.0
assert r.stats[0]['time_appconnect'] > 0.0
r = curl.http_download(urls=[urln], extra_args=[
'--resolve', f'not-valid.com:{env.https_port}:127.0.0.1'
])
- assert r.exit_code != 0, f'{r}'
+ r.check_exit_code_not(0)
r.check_stats(count=1, exp_status=0)
assert r.stats[0]['time_connect'] > 0.0 # was tcp connected
assert r.stats[0]['time_appconnect'] == 0 # but not SSL verified
r = curl.http_download(urls=[urln], extra_args=[
'--resolve', f'not-valid.com:{1}:127.0.0.1'
])
- assert r.exit_code != 0, f'{r}'
+ r.check_exit_code_not(0)
r.check_stats(count=1, exp_status=0)
assert r.stats[0]['time_connect'] == 0 # no one should have listened
assert r.stats[0]['time_appconnect'] == 0 # did not happen either
log = logging.getLogger(__name__)
-@pytest.mark.skipif(condition=Env.setup_incomplete(),
- reason=f"missing: {Env.incomplete_reason()}")
class TestUpload:
@pytest.fixture(autouse=True, scope='class')
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto)
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=1, exp_status=200)
respdata = open(curl.response_file(0)).readlines()
assert respdata == [data]
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=1, exp_status=200)
indata = open(fdata).readlines()
respdata = open(curl.response_file(0)).readlines()
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto)
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto,
extra_args=['--parallel'])
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
indata = open(fdata).readlines()
r.check_stats(count=count, exp_status=200)
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
indata = open(fdata).readlines()
r.check_stats(count=count, exp_status=200)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto,
extra_args=['--parallel'])
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto,
extra_args=['--parallel'])
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
indata = open(fdata).readlines()
r.check_stats(count=count, exp_status=200)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]'
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
extra_args=['--parallel'])
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
exp_data = [f'{os.path.getsize(fdata)}']
r.check_stats(count=count, exp_status=200)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]&chunk_delay=10ms'
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
extra_args=['--parallel'])
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
exp_data = [f'{os.path.getsize(fdata)}']
r.check_stats(count=count, exp_status=200)
curl = CurlClient(env=env)
url = f'https://{env.domain1}:{caddy.port}/data.json'
r = curl.http_download(urls=[url], alpn_proto=proto)
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
r.check_stats(count=1, exp_status=200)
# download 1MB files sequentially
curl = CurlClient(env=env)
urln = f'https://{env.domain1}:{caddy.port}/data1.data?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
# sequential transfers will open 1 connection
assert r.total_connects == 1
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel'
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
if proto == 'http/1.1':
# http/1.1 parallel transfers will open multiple connections
curl = CurlClient(env=env)
urln = f'https://{env.domain1}:{caddy.port}/data10.data?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
# sequential transfers will open 1 connection
assert r.total_connects == 1
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel'
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
if proto == 'http/1.1':
# http/1.1 parallel transfers will open multiple connections
log = logging.getLogger(__name__)
-@pytest.mark.skipif(condition=Env.setup_incomplete(),
- reason=f"missing: {Env.incomplete_reason()}")
class TestPush:
@pytest.fixture(autouse=True, scope='class')
url = f'https://{env.domain1}:{env.https_port}/push/data1'
r = curl.http_download(urls=[url], alpn_proto='h2', with_stats=False,
with_headers=True)
- assert r.exit_code == 0, f'{r}'
+ r.check_exit_code(0)
assert len(r.responses) == 2, f'{r.responses}'
assert r.responses[0]['status'] == 103, f'{r.responses}'
assert 'link' in r.responses[0]['header'], f'{r.responses[0]}'
log = logging.getLogger(__name__)
-@pytest.mark.skipif(condition=Env.setup_incomplete(),
- reason=f"missing: {Env.incomplete_reason()}")
class TestProxy:
@pytest.fixture(autouse=True, scope='class')
'--proxy', f'http://{env.proxy_domain}:{env.proxy_port}/',
'--resolve', f'{env.proxy_domain}:{env.proxy_port}:127.0.0.1',
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=1, exp_status=200)
# download via https: proxy (no tunnel)
'--resolve', f'{env.proxy_domain}:{env.proxys_port}:127.0.0.1',
'--proxy-cacert', env.ca.cert_file,
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=1, exp_status=200)
# download http: via http: proxytunnel
'--proxy', f'http://{env.proxy_domain}:{env.proxy_port}/',
'--resolve', f'{env.proxy_domain}:{env.proxy_port}:127.0.0.1',
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=1, exp_status=200)
# download http: via https: proxytunnel
'--resolve', f'{env.proxy_domain}:{env.proxys_port}:127.0.0.1',
'--proxy-cacert', env.ca.cert_file,
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=1, exp_status=200)
# download https: with proto via http: proxytunnel
'--proxy', f'http://{env.proxy_domain}:{env.proxy_port}/',
'--resolve', f'{env.proxy_domain}:{env.proxy_port}:127.0.0.1',
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=1, exp_status=200)
exp_proto = 'HTTP/2' if proto == 'h2' else 'HTTP/1.1'
assert r.response['protocol'] == exp_proto
'--resolve', f'{env.proxy_domain}:{env.proxys_port}:127.0.0.1',
'--proxy-cacert', env.ca.cert_file,
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=1, exp_status=200)
exp_proto = 'HTTP/2' if proto == 'h2' else 'HTTP/1.1'
assert r.response['protocol'] == exp_proto
self._done = True
-
-@pytest.mark.skipif(condition=Env.setup_incomplete(),
- reason=f"missing: {Env.incomplete_reason()}")
class TestUnix:
@pytest.fixture(scope="class")
extra_args=[
'--unix-socket', uds_faker.path,
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=1, exp_status=200)
# download https: via unix socket
extra_args=[
'--unix-socket', uds_faker.path,
])
- assert r.exit_code == 35 # CONNECT_ERROR (as faker is not TLS)
+ r.check_exit_code(35)
# download HTTP/3 via unix socket
@pytest.mark.skipif(condition=not Env.have_h3(), reason='h3 not supported')
extra_args=[
'--unix-socket', uds_faker.path,
])
- assert r.exit_code == 96 # QUIC CONNECT ERROR
+ r.check_exit_code(96)
log = logging.getLogger(__name__)
-@pytest.mark.skipif(condition=Env.setup_incomplete(),
- reason=f"missing: {Env.incomplete_reason()}")
@pytest.mark.skipif(condition=Env.curl_uses_lib('bearssl'), reason='BearSSL too slow')
class TestReuse:
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
# Server sends `Connection: close` on every 2nd request, requiring
# a new connection
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--rate', '30/m',
])
- assert r.exit_code == 0
+ r.check_exit_code(0)
r.check_stats(count=count, exp_status=200)
# Connections time out on server before we send another request,
assert r.total_connects == count
def __init__(self, args: List[str], exit_code: int,
stdout: List[str], stderr: List[str],
+ trace: Optional[List[str]] = None,
duration: Optional[timedelta] = None,
with_stats: bool = False,
exception: Optional[str] = None):
self._exception = exception
self._stdout = stdout
self._stderr = stderr
+ self._trace = trace
self._duration = duration if duration is not None else timedelta()
self._response = None
self._responses = []
def add_assets(self, assets: List):
self._assets.extend(assets)
+ def check_exit_code(self, code: int):
+ assert self.exit_code == code, \
+ f'expected exit code {code}, '\
+ 'got {self.exit_code}\n{self._dump_logs()}'
+
+ def check_exit_code_not(self, code: int):
+ assert self.exit_code != code, \
+ f'expected exit code other than {code}\n{self._dump_logs()}'
+
def check_responses(self, count: int, exp_status: Optional[int] = None,
exp_exitcode: Optional[int] = None):
assert len(self.responses) == count, \
- f'response count: expected {count}, got {len(self.responses)}'
+ f'response count: expected {count}, ' \
+ f'got {len(self.responses)}\n{self._dump_logs()}'
if exp_status is not None:
for idx, x in enumerate(self.responses):
assert x['status'] == exp_status, \
- f'response #{idx} unexpectedstatus: {x["status"]}'
+ f'response #{idx} status: expected {exp_status},'\
+ f'got {x["status"]}\n{self._dump_logs()}'
if exp_exitcode is not None:
for idx, x in enumerate(self.responses):
if 'exitcode' in x:
- assert x['exitcode'] == 0, f'response #{idx} exitcode: {x["exitcode"]}'
+ assert x['exitcode'] == 0, \
+ f'response #{idx} exitcode: expected {exp_exitcode}, '\
+ f'got {x["exitcode"]}\n{self._dump_logs()}'
if self.with_stats:
- assert len(self.stats) == count, f'{self}'
+ self.check_stats(count)
def check_stats(self, count: int, exp_status: Optional[int] = None,
- exp_exitcode: Optional[int] = None):
+ exp_exitcode: Optional[int] = None):
assert len(self.stats) == count, \
- f'stats count: expected {count}, got {len(self.stats)}'
+ f'stats count: expected {count}, got {len(self.stats)}\n{self._dump_logs()}'
if exp_status is not None:
for idx, x in enumerate(self.stats):
assert 'http_code' in x, \
- f'status #{idx} reports no http_code'
+ f'status #{idx} reports no http_code\n{self._dump_logs()}'
assert x['http_code'] == exp_status, \
- f'status #{idx} unexpected http_code: {x["http_code"]}'
+ f'status #{idx} http_code: expected {exp_status}, '\
+ f'got {x["http_code"]}\n{self._dump_logs()}'
if exp_exitcode is not None:
for idx, x in enumerate(self.stats):
if 'exitcode' in x:
- assert x['exitcode'] == 0, f'status #{idx} exitcode: {x["exitcode"]}'
+ assert x['exitcode'] == 0, \
+ f'status #{idx} exitcode: expected {exp_exitcode}, '\
+ f'got {x["exitcode"]}\n{self._dump_logs()}'
+
+ def _dump_logs(self):
+ lines = []
+ lines.append('>>--stdout ----------------------------------------------\n')
+ lines.extend(self._stdout)
+ if self._trace:
+ lines.append('>>--trace ----------------------------------------------\n')
+ lines.extend(self._trace)
+ else:
+ lines.append('>>--stderr ----------------------------------------------\n')
+ lines.extend(self._stderr)
+ lines.append('<<-------------------------------------------------------\n')
+ return ''.join(lines)
class CurlClient:
}
def __init__(self, env: Env, run_dir: Optional[str] = None,
- timeout: Optional[float] = None):
+ timeout: Optional[float] = None, silent: bool = False):
self.env = env
self._timeout = timeout if timeout else env.test_timeout
self._curl = os.environ['CURL'] if 'CURL' in os.environ else env.curl
self._headerfile = f'{self._run_dir}/curl.headers'
self._tracefile = f'{self._run_dir}/curl.trace'
self._log_path = f'{self._run_dir}/curl.log'
+ self._silent = silent
self._rmrf(self._run_dir)
self._mkpath(self._run_dir)
input=intext.encode() if intext else None,
timeout=self._timeout)
exitcode = p.returncode
- except subprocess.TimeoutExpired as e:
+ except subprocess.TimeoutExpired:
log.warning(f'Timeout after {self._timeout}s: {args}')
exitcode = -1
exception = 'TimeoutExpired'
coutput = open(self._stdoutfile).readlines()
cerrput = open(self._stderrfile).readlines()
+ ctrace = None
+ if os.path.exists(self._tracefile):
+ ctrace = open(self._tracefile).readlines()
return ExecResult(args=args, exit_code=exitcode, exception=exception,
- stdout=coutput, stderr=cerrput,
+ stdout=coutput, stderr=cerrput, trace=ctrace,
duration=datetime.now() - start,
with_stats=with_stats)
args = [self._curl, "-s", "--path-as-is"]
if with_headers:
args.extend(["-D", self._headerfile])
- if self.env.verbose > 1:
- args.extend(['--trace', self._tracefile])
if self.env.verbose > 2:
args.extend(['--trace', self._tracefile, '--trace-time'])
+ elif self.env.verbose > 1:
+ args.extend(['--trace', self._tracefile])
+ elif not self._silent:
+ args.extend(['-v'])
for url in urls:
u = urlparse(urls[0])
fin_response(response)
return r
-
def curl_has_feature(feature: str) -> bool:
return feature.lower() in Env.CONFIG.curl_props['features']
+ @staticmethod
+ def curl_has_protocol(protocol: str) -> bool:
+ return protocol.lower() in Env.CONFIG.curl_props['protocols']
+
+
@staticmethod
def curl_lib_version(libname: str) -> str:
prefix = f'{libname.lower()}/'