summaryrefslogtreecommitdiff
path: root/tests/http
diff options
context:
space:
mode:
Diffstat (limited to 'tests/http')
-rw-r--r--tests/http/conftest.py14
-rw-r--r--tests/http/scorecard.py1
-rw-r--r--tests/http/test_01_basic.py21
-rw-r--r--tests/http/test_02_download.py57
-rw-r--r--tests/http/test_03_goaway.py15
-rw-r--r--tests/http/test_04_stuttered.py12
-rw-r--r--tests/http/test_05_errors.py4
-rw-r--r--tests/http/test_06_eyeballs.py20
-rw-r--r--tests/http/test_07_upload.py37
-rw-r--r--tests/http/test_08_caddy.py33
-rw-r--r--tests/http/test_10_proxy.py174
-rw-r--r--tests/http/test_11_unix.py8
-rw-r--r--tests/http/test_12_reuse.py7
-rw-r--r--tests/http/test_13_proxy_auth.py193
-rw-r--r--tests/http/testenv/__init__.py5
-rw-r--r--tests/http/testenv/curl.py157
-rw-r--r--tests/http/testenv/env.py21
-rw-r--r--tests/http/testenv/httpd.py67
-rw-r--r--tests/http/testenv/nghttpx.py123
19 files changed, 707 insertions, 262 deletions
diff --git a/tests/http/conftest.py b/tests/http/conftest.py
index 22386b94d..bd97baa8b 100644
--- a/tests/http/conftest.py
+++ b/tests/http/conftest.py
@@ -31,8 +31,7 @@ import pytest
sys.path.append(os.path.join(os.path.dirname(__file__), '.'))
-from testenv import Env, Nghttpx, Httpd
-
+from testenv import Env, Nghttpx, Httpd, NghttpxQuic, NghttpxFwd
@pytest.fixture(scope="package")
def env(pytestconfig) -> Env:
@@ -68,7 +67,16 @@ def httpd(env) -> Httpd:
@pytest.fixture(scope='package')
def nghttpx(env, httpd) -> Optional[Nghttpx]:
- nghttpx = Nghttpx(env=env)
+ nghttpx = NghttpxQuic(env=env)
+ if env.have_h3():
+ nghttpx.clear_logs()
+ assert nghttpx.start()
+ yield nghttpx
+ nghttpx.stop()
+
+@pytest.fixture(scope='package')
+def nghttpx_fwd(env, httpd) -> Optional[Nghttpx]:
+ nghttpx = NghttpxFwd(env=env)
if env.have_h3():
nghttpx.clear_logs()
assert nghttpx.start()
diff --git a/tests/http/scorecard.py b/tests/http/scorecard.py
index bc3808fb4..3c29159a0 100644
--- a/tests/http/scorecard.py
+++ b/tests/http/scorecard.py
@@ -35,7 +35,6 @@ from typing import Dict, Any, Optional, List
from testenv import Env, Httpd, Nghttpx, CurlClient, Caddy, ExecResult
-
log = logging.getLogger(__name__)
diff --git a/tests/http/test_01_basic.py b/tests/http/test_01_basic.py
index 66c9ae50e..30b87007b 100644
--- a/tests/http/test_01_basic.py
+++ b/tests/http/test_01_basic.py
@@ -46,37 +46,34 @@ class TestBasic:
curl = CurlClient(env=env)
url = f'http://{env.domain1}:{env.http_port}/data.json'
r = curl.http_get(url=url)
- r.check_exit_code(0)
- assert r.response['status'] == 200
+ r.check_response(http_status=200)
assert r.json['server'] == env.domain1
# simple https: GET, any http version
+ @pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason=f"curl without SSL")
def test_01_02_https_get(self, env: Env, httpd):
curl = CurlClient(env=env)
url = f'https://{env.domain1}:{env.https_port}/data.json'
r = curl.http_get(url=url)
- r.check_exit_code(0)
- assert r.response['status'] == 200
+ r.check_response(http_status=200)
assert r.json['server'] == env.domain1
# simple https: GET, h2 wanted and got
+ @pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason=f"curl without SSL")
def test_01_03_h2_get(self, env: Env, httpd):
curl = CurlClient(env=env)
url = f'https://{env.domain1}:{env.https_port}/data.json'
r = curl.http_get(url=url, extra_args=['--http2'])
- r.check_exit_code(0)
- assert r.response['status'] == 200
- assert r.response['protocol'] == 'HTTP/2'
+ r.check_response(http_status=200, protocol='HTTP/2')
assert r.json['server'] == env.domain1
# simple https: GET, h2 unsupported, fallback to h1
+ @pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason=f"curl without SSL")
def test_01_04_h2_unsupported(self, env: Env, httpd):
curl = CurlClient(env=env)
url = f'https://{env.domain2}:{env.https_port}/data.json'
r = curl.http_get(url=url, extra_args=['--http2'])
- r.check_exit_code(0)
- assert r.response['status'] == 200
- assert r.response['protocol'] == 'HTTP/1.1'
+ r.check_response(http_status=200, protocol='HTTP/1.1')
assert r.json['server'] == env.domain2
# simple h3: GET, want h3 and get it
@@ -85,7 +82,5 @@ class TestBasic:
curl = CurlClient(env=env)
url = f'https://{env.domain1}:{env.h3_port}/data.json'
r = curl.http_get(url=url, extra_args=['--http3'])
- r.check_exit_code(0)
- assert r.response['status'] == 200
- assert r.response['protocol'] == 'HTTP/3'
+ r.check_response(http_status=200, protocol='HTTP/3')
assert r.json['server'] == env.domain1
diff --git a/tests/http/test_02_download.py b/tests/http/test_02_download.py
index 5804adaf8..f718c8b98 100644
--- a/tests/http/test_02_download.py
+++ b/tests/http/test_02_download.py
@@ -59,8 +59,7 @@ class TestDownload:
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/data.json'
r = curl.http_download(urls=[url], alpn_proto=proto)
- r.check_exit_code(0)
- r.check_stats(count=1, exp_status=200)
+ r.check_response(http_status=200)
# download 2 files
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
@@ -70,8 +69,7 @@ class TestDownload:
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-1]'
r = curl.http_download(urls=[url], alpn_proto=proto)
- r.check_exit_code(0)
- r.check_stats(count=2, exp_status=200)
+ r.check_response(http_status=200, count=2)
# download 100 files sequentially
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
@@ -82,10 +80,7 @@ class TestDownload:
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-99]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
- r.check_exit_code(0)
- r.check_stats(count=100, exp_status=200)
- # http/1.1 sequential transfers will open 1 connection
- assert r.total_connects == 1
+ r.check_response(http_status=200, count=100, connect_count=1)
# download 100 files parallel
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
@@ -99,14 +94,13 @@ class TestDownload:
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel', '--parallel-max', f'{max_parallel}'
])
- r.check_exit_code(0)
- r.check_stats(count=100, exp_status=200)
+ r.check_response(http_status=200, count=100)
if proto == 'http/1.1':
# http/1.1 parallel transfers will open multiple connections
- assert r.total_connects > 1
+ assert r.total_connects > 1, r.dump_logs()
else:
# http2 parallel transfers will use one connection (common limit is 100)
- assert r.total_connects == 1
+ assert r.total_connects == 1, r.dump_logs()
# download 500 files sequential
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
@@ -119,14 +113,13 @@ class TestDownload:
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-499]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
- r.check_exit_code(0)
- r.check_stats(count=500, exp_status=200)
+ r.check_response(http_status=200, count=500)
if proto == 'http/1.1':
# http/1.1 parallel transfers will open multiple connections
- assert r.total_connects > 1
+ assert r.total_connects > 1, r.dump_logs()
else:
# http2 parallel transfers will use one connection (common limit is 100)
- assert r.total_connects == 1
+ assert r.total_connects == 1, r.dump_logs()
# download 500 files parallel
@pytest.mark.parametrize("proto", ['h2', 'h3'])
@@ -141,10 +134,7 @@ class TestDownload:
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel', '--parallel-max', f'{max_parallel}'
])
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
- # http2 parallel transfers will use one connection (common limit is 100)
- assert r.total_connects == 1
+ r.check_response(http_status=200, count=count, connect_count=1)
# download files parallel, check connection reuse/multiplex
@pytest.mark.parametrize("proto", ['h2', 'h3'])
@@ -159,8 +149,7 @@ class TestDownload:
with_stats=True, extra_args=[
'--parallel', '--parallel-max', '200'
])
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(http_status=200, count=count)
# should have used at most 2 connections only (test servers allow 100 req/conn)
# it may be just 1 on slow systems where request are answered faster than
# curl can exhaust the capacity or if curl runs with address-sanitizer speed
@@ -177,8 +166,7 @@ class TestDownload:
with_stats=True, extra_args=[
'--parallel'
])
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
# http/1.1 should have used count connections
assert r.total_connects == count, "http/1.1 should use this many connections"
@@ -191,8 +179,7 @@ class TestDownload:
urln = f'https://{env.authority_for(env.domain1, proto)}/data-1m?[0-{count-1}]'
curl = CurlClient(env=env)
r = curl.http_download(urls=[urln], alpn_proto=proto)
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_09_1MB_parallel(self, env: Env,
@@ -205,8 +192,7 @@ class TestDownload:
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel'
])
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_10_10MB_serial(self, env: Env,
@@ -217,8 +203,7 @@ class TestDownload:
urln = f'https://{env.authority_for(env.domain1, proto)}/data-10m?[0-{count-1}]'
curl = CurlClient(env=env)
r = curl.http_download(urls=[urln], alpn_proto=proto)
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
def test_02_11_10MB_parallel(self, env: Env,
@@ -233,8 +218,7 @@ class TestDownload:
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel'
])
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
@pytest.mark.parametrize("proto", ['h2', 'h3'])
def test_02_12_head_serial_https(self, env: Env,
@@ -247,8 +231,7 @@ class TestDownload:
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--head'
])
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
@pytest.mark.parametrize("proto", ['h2'])
def test_02_13_head_serial_h2c(self, env: Env,
@@ -261,8 +244,7 @@ class TestDownload:
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--head', '--http2-prior-knowledge', '--fail-early'
])
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
def test_02_20_h2_small_frames(self, env: Env, httpd, repeat):
# Test case to reproduce content corruption as observed in
@@ -288,8 +270,7 @@ class TestDownload:
r = curl.http_download(urls=[urln], alpn_proto="h2", extra_args=[
'--parallel', '--parallel-max', '2'
])
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
srcfile = os.path.join(httpd.docs_dir, 'data-1m')
for i in range(count):
dfile = curl.download_file(i)
diff --git a/tests/http/test_03_goaway.py b/tests/http/test_03_goaway.py
index 5da60aa83..00fbce9a4 100644
--- a/tests/http/test_03_goaway.py
+++ b/tests/http/test_03_goaway.py
@@ -66,8 +66,7 @@ class TestGoAway:
assert httpd.reload()
t.join()
r: ExecResult = self.r
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
# reload will shut down the connection gracefully with GOAWAY
# we expect to see a second connection opened afterwards
assert r.total_connects == 2
@@ -101,16 +100,14 @@ class TestGoAway:
assert nghttpx.reload(timeout=timedelta(seconds=2))
t.join()
r: ExecResult = self.r
- r.check_exit_code(0)
+ # this should take `count` seconds to retrieve
+ assert r.duration >= timedelta(seconds=count)
+ r.check_response(count=count, http_status=200, connect_count=2)
# reload will shut down the connection gracefully with GOAWAY
# we expect to see a second connection opened afterwards
- assert r.total_connects == 2
for idx, s in enumerate(r.stats):
if s['num_connects'] > 0:
log.debug(f'request {idx} connected')
- # this should take `count` seconds to retrieve
- assert r.duration >= timedelta(seconds=count)
- r.check_stats(count=count, exp_status=200, exp_exitcode=0)
# download files sequentially with delay, reload server for GOAWAY
def test_03_03_h1_goaway(self, env: Env, httpd, nghttpx, repeat):
@@ -133,11 +130,9 @@ class TestGoAway:
assert httpd.reload()
t.join()
r: ExecResult = self.r
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200, connect_count=2)
# reload will shut down the connection gracefully with GOAWAY
# we expect to see a second connection opened afterwards
- assert r.total_connects == 2
for idx, s in enumerate(r.stats):
if s['num_connects'] > 0:
log.debug(f'request {idx} connected')
diff --git a/tests/http/test_04_stuttered.py b/tests/http/test_04_stuttered.py
index 0cad4c227..2a5f1e266 100644
--- a/tests/http/test_04_stuttered.py
+++ b/tests/http/test_04_stuttered.py
@@ -55,8 +55,7 @@ class TestStuttered:
f'/curltest/tweak?id=[0-{count - 1}]'\
'&chunks=100&chunk_size=100&chunk_delay=10ms'
r = curl.http_download(urls=[urln], alpn_proto=proto)
- r.check_exit_code(0)
- r.check_stats(count=1, exp_status=200)
+ r.check_response(count=1, http_status=200)
# download 50 files in 100 chunks a 100 bytes with 10ms delay between
# prepend 100 file requests to warm up connection processing limits
@@ -75,8 +74,7 @@ class TestStuttered:
'&chunks=100&chunk_size=100&chunk_delay=10ms'
r = curl.http_download(urls=[url1, urln], alpn_proto=proto,
extra_args=['--parallel'])
- r.check_exit_code(0)
- r.check_stats(count=warmups+count, exp_status=200)
+ r.check_response(count=warmups+count, http_status=200)
assert r.total_connects == 1
t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total')
if t_max < (5 * t_min) and t_min < 2:
@@ -98,8 +96,7 @@ class TestStuttered:
'&chunks=1000&chunk_size=10&chunk_delay=100us'
r = curl.http_download(urls=[url1, urln], alpn_proto=proto,
extra_args=['--parallel'])
- r.check_exit_code(0)
- r.check_stats(count=warmups+count, exp_status=200)
+ r.check_response(count=warmups+count, http_status=200)
assert r.total_connects == 1
t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total')
if t_max < (5 * t_min):
@@ -121,8 +118,7 @@ class TestStuttered:
'&chunks=10000&chunk_size=1&chunk_delay=50us'
r = curl.http_download(urls=[url1, urln], alpn_proto=proto,
extra_args=['--parallel'])
- r.check_exit_code(0)
- r.check_stats(count=warmups+count, exp_status=200)
+ r.check_response(count=warmups+count, http_status=200)
assert r.total_connects == 1
t_avg, i_min, t_min, i_max, t_max = self.stats_spread(r.stats[warmups:], 'time_total')
if t_max < (5 * t_min):
diff --git a/tests/http/test_05_errors.py b/tests/http/test_05_errors.py
index f27ba8c39..587ba33c4 100644
--- a/tests/http/test_05_errors.py
+++ b/tests/http/test_05_errors.py
@@ -62,7 +62,7 @@ class TestErrors:
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--retry', '0'
])
- r.check_exit_code_not(0)
+ r.check_exit_code(False)
invalid_stats = []
for idx, s in enumerate(r.stats):
if 'exitcode' not in s or s['exitcode'] not in [18, 56, 92]:
@@ -85,7 +85,7 @@ class TestErrors:
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--retry', '0', '--parallel',
])
- r.check_exit_code_not(0)
+ r.check_exit_code(False)
assert len(r.stats) == count, f'did not get all stats: {r}'
invalid_stats = []
for idx, s in enumerate(r.stats):
diff --git a/tests/http/test_06_eyeballs.py b/tests/http/test_06_eyeballs.py
index f30ecd36f..3eeb0793b 100644
--- a/tests/http/test_06_eyeballs.py
+++ b/tests/http/test_06_eyeballs.py
@@ -50,8 +50,7 @@ class TestEyeballs:
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json'
r = curl.http_download(urls=[urln], extra_args=['--http3-only'])
- r.check_exit_code(0)
- r.check_stats(count=1, exp_status=200)
+ r.check_response(count=1, http_status=200)
assert r.stats[0]['http_version'] == '3'
# download using only HTTP/3 on missing server
@@ -61,7 +60,7 @@ class TestEyeballs:
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json'
r = curl.http_download(urls=[urln], extra_args=['--http3-only'])
- r.check_exit_code(7)
+ r.check_response(exitcode=7, http_status=None)
# download using HTTP/3 on missing server with fallback on h2
@pytest.mark.skipif(condition=not Env.have_h3(), reason=f"missing HTTP/3 support")
@@ -70,8 +69,7 @@ class TestEyeballs:
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h3")}/data.json'
r = curl.http_download(urls=[urln], extra_args=['--http3'])
- r.check_exit_code(0)
- r.check_stats(count=1, exp_status=200)
+ r.check_response(count=1, http_status=200)
assert r.stats[0]['http_version'] == '2'
# download using HTTP/3 on missing server with fallback on http/1.1
@@ -81,8 +79,7 @@ class TestEyeballs:
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain2, "h3")}/data.json'
r = curl.http_download(urls=[urln], extra_args=['--http3'])
- r.check_exit_code(0)
- r.check_stats(count=1, exp_status=200)
+ r.check_response(count=1, http_status=200)
assert r.stats[0]['http_version'] == '1.1'
# make a successful https: transfer and observer the timer stats
@@ -90,8 +87,7 @@ class TestEyeballs:
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, "h2")}/data.json'
r = curl.http_download(urls=[urln])
- r.check_exit_code(0)
- r.check_stats(count=1, exp_status=200)
+ r.check_response(count=1, http_status=200)
assert r.stats[0]['time_connect'] > 0.0
assert r.stats[0]['time_appconnect'] > 0.0
@@ -102,8 +98,7 @@ class TestEyeballs:
r = curl.http_download(urls=[urln], extra_args=[
'--resolve', f'not-valid.com:{env.https_port}:127.0.0.1'
])
- r.check_exit_code_not(0)
- r.check_stats(count=1, exp_status=0)
+ r.check_response(count=1, http_status=0, exitcode=False)
assert r.stats[0]['time_connect'] > 0.0 # was tcp connected
assert r.stats[0]['time_appconnect'] == 0 # but not SSL verified
@@ -114,8 +109,7 @@ class TestEyeballs:
r = curl.http_download(urls=[urln], extra_args=[
'--resolve', f'not-valid.com:{1}:127.0.0.1'
])
- r.check_exit_code_not(0)
- r.check_stats(count=1, exp_status=0)
+ r.check_response(count=1, http_status=None, exitcode=False)
assert r.stats[0]['time_connect'] == 0 # no one should have listened
assert r.stats[0]['time_appconnect'] == 0 # did not happen either
diff --git a/tests/http/test_07_upload.py b/tests/http/test_07_upload.py
index 795e5f2fe..5b4c1d8a8 100644
--- a/tests/http/test_07_upload.py
+++ b/tests/http/test_07_upload.py
@@ -58,8 +58,7 @@ class TestUpload:
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto)
- r.check_exit_code(0)
- r.check_stats(count=1, exp_status=200)
+ r.check_response(count=1, http_status=200)
respdata = open(curl.response_file(0)).readlines()
assert respdata == [data]
@@ -74,8 +73,7 @@ class TestUpload:
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-0]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
- r.check_exit_code(0)
- r.check_stats(count=1, exp_status=200)
+ r.check_response(count=1, http_status=200)
indata = open(fdata).readlines()
respdata = open(curl.response_file(0)).readlines()
assert respdata == indata
@@ -92,8 +90,7 @@ class TestUpload:
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto)
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == [data]
@@ -112,8 +109,7 @@ class TestUpload:
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto,
extra_args=['--parallel'])
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == [data]
@@ -130,10 +126,9 @@ class TestUpload:
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
indata = open(fdata).readlines()
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == indata
@@ -150,10 +145,8 @@ class TestUpload:
curl = CurlClient(env=env)
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto)
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
indata = open(fdata).readlines()
- r.check_stats(count=count, exp_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == indata
@@ -172,8 +165,7 @@ class TestUpload:
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=data, alpn_proto=proto,
extra_args=['--parallel'])
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == [data]
@@ -192,8 +184,7 @@ class TestUpload:
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/echo?id=[0-{count-1}]'
r = curl.http_upload(urls=[url], data=f'@{fdata}', alpn_proto=proto,
extra_args=['--parallel'])
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
self.check_download(count, fdata, curl)
# PUT 100k
@@ -209,10 +200,9 @@ class TestUpload:
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]'
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
extra_args=['--parallel'])
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
exp_data = [f'{os.path.getsize(fdata)}']
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == exp_data
@@ -230,10 +220,9 @@ class TestUpload:
url = f'https://{env.authority_for(env.domain1, proto)}/curltest/put?id=[0-{count-1}]&chunk_delay=10ms'
r = curl.http_put(urls=[url], fdata=fdata, alpn_proto=proto,
extra_args=['--parallel'])
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
exp_data = [f'{os.path.getsize(fdata)}']
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
for i in range(count):
respdata = open(curl.response_file(i)).readlines()
assert respdata == exp_data
diff --git a/tests/http/test_08_caddy.py b/tests/http/test_08_caddy.py
index 6ce34ec89..157419222 100644
--- a/tests/http/test_08_caddy.py
+++ b/tests/http/test_08_caddy.py
@@ -35,6 +35,7 @@ log = logging.getLogger(__name__)
@pytest.mark.skipif(condition=not Env.has_caddy(), reason=f"missing caddy")
+@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason=f"curl without SSL")
class TestCaddy:
@pytest.fixture(autouse=True, scope='class')
@@ -71,8 +72,7 @@ class TestCaddy:
curl = CurlClient(env=env)
url = f'https://{env.domain1}:{caddy.port}/data.json'
r = curl.http_download(urls=[url], alpn_proto=proto)
- r.check_exit_code(0)
- r.check_stats(count=1, exp_status=200)
+ r.check_response(count=1, http_status=200)
# download 1MB files sequentially
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
@@ -86,10 +86,7 @@ class TestCaddy:
curl = CurlClient(env=env)
urln = f'https://{env.domain1}:{caddy.port}/data1.data?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
- # sequential transfers will open 1 connection
- assert r.total_connects == 1
+ r.check_response(count=count, http_status=200, connect_count=1)
# download 1MB files parallel
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
@@ -105,13 +102,12 @@ class TestCaddy:
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel'
])
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
if proto == 'http/1.1':
# http/1.1 parallel transfers will open multiple connections
- assert r.total_connects > 1
+ assert r.total_connects > 1, r.dump_logs()
else:
- assert r.total_connects == 1
+ assert r.total_connects == 1, r.dump_logs()
# download 5MB files sequentially
@pytest.mark.parametrize("proto", ['h2', 'h3'])
@@ -125,10 +121,7 @@ class TestCaddy:
curl = CurlClient(env=env)
urln = f'https://{env.domain1}:{caddy.port}/data5.data?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
- assert r.exit_code == 0
- r.check_stats(count=count, exp_status=200)
- # sequential transfers will open 1 connection
- assert r.total_connects == 1
+ r.check_response(count=count, http_status=200, connect_count=1)
# download 10MB files sequentially
@pytest.mark.parametrize("proto", ['h2', 'h3'])
@@ -142,10 +135,7 @@ class TestCaddy:
curl = CurlClient(env=env)
urln = f'https://{env.domain1}:{caddy.port}/data10.data?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
- # sequential transfers will open 1 connection
- assert r.total_connects == 1
+ r.check_response(count=count, http_status=200, connect_count=1)
# download 10MB files parallel
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
@@ -161,11 +151,10 @@ class TestCaddy:
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--parallel'
])
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
if proto == 'http/1.1':
# http/1.1 parallel transfers will open multiple connections
- assert r.total_connects > 1
+ assert r.total_connects > 1, r.dump_logs()
else:
- assert r.total_connects == 1
+ assert r.total_connects == 1, r.dump_logs()
diff --git a/tests/http/test_10_proxy.py b/tests/http/test_10_proxy.py
index b93d665b0..87e74e1bf 100644
--- a/tests/http/test_10_proxy.py
+++ b/tests/http/test_10_proxy.py
@@ -24,11 +24,13 @@
#
###########################################################################
#
+import filecmp
import logging
import os
+import re
import pytest
-from testenv import Env, CurlClient
+from testenv import Env, CurlClient, ExecResult
log = logging.getLogger(__name__)
@@ -37,13 +39,33 @@ log = logging.getLogger(__name__)
class TestProxy:
@pytest.fixture(autouse=True, scope='class')
- def _class_scope(self, env, httpd):
+ def _class_scope(self, env, httpd, nghttpx_fwd):
push_dir = os.path.join(httpd.docs_dir, 'push')
if not os.path.exists(push_dir):
os.makedirs(push_dir)
+ if env.have_nghttpx():
+ nghttpx_fwd.start_if_needed()
+ env.make_data_file(indir=env.gen_dir, fname="data-100k", fsize=100*1024)
+ env.make_data_file(indir=env.gen_dir, fname="data-10m", fsize=10*1024*1024)
httpd.clear_extra_configs()
httpd.reload()
+ def set_tunnel_proto(self, proto):
+ if proto == 'h2':
+ os.environ['CURL_PROXY_TUNNEL_H2'] = '1'
+ return 'HTTP/2'
+ else:
+ os.environ.pop('CURL_PROXY_TUNNEL_H2', None)
+ return 'HTTP/1.1'
+
+ def get_tunnel_proto_used(self, r: ExecResult):
+ for l in r.trace_lines:
+ m = re.match(r'.* CONNECT tunnel: (\S+) negotiated$', l)
+ if m:
+ return m.group(1)
+ assert False, f'tunnel protocol not found in:\n{"".join(r.trace_lines)}'
+ return None
+
# download via http: proxy (no tunnel)
def test_10_01_proxy_http(self, env: Env, httpd, repeat):
curl = CurlClient(env=env)
@@ -53,13 +75,13 @@ class TestProxy:
'--proxy', f'http://{env.proxy_domain}:{env.proxy_port}/',
'--resolve', f'{env.proxy_domain}:{env.proxy_port}:127.0.0.1',
])
- r.check_exit_code(0)
- r.check_stats(count=1, exp_status=200)
+ r.check_response(count=1, http_status=200)
# download via https: proxy (no tunnel)
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
reason='curl lacks HTTPS-proxy support')
- def test_10_02_proxy_https(self, env: Env, httpd, repeat):
+ @pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
+ def test_10_02_proxy_https(self, env: Env, httpd, nghttpx_fwd, repeat):
curl = CurlClient(env=env)
url = f'http://localhost:{env.http_port}/data.json'
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
@@ -68,8 +90,7 @@ class TestProxy:
'--resolve', f'{env.proxy_domain}:{env.proxys_port}:127.0.0.1',
'--proxy-cacert', env.ca.cert_file,
])
- r.check_exit_code(0)
- r.check_stats(count=1, exp_status=200)
+ r.check_response(count=1, http_status=200)
# download http: via http: proxytunnel
def test_10_03_proxytunnel_http(self, env: Env, httpd, repeat):
@@ -81,27 +102,27 @@ class TestProxy:
'--proxy', f'http://{env.proxy_domain}:{env.proxy_port}/',
'--resolve', f'{env.proxy_domain}:{env.proxy_port}:127.0.0.1',
])
- r.check_exit_code(0)
- r.check_stats(count=1, exp_status=200)
+ r.check_response(count=1, http_status=200)
# download http: via https: proxytunnel
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
reason='curl lacks HTTPS-proxy support')
- def test_10_04_proxy_https(self, env: Env, httpd, repeat):
+ @pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
+ def test_10_04_proxy_https(self, env: Env, httpd, nghttpx_fwd, repeat):
curl = CurlClient(env=env)
url = f'http://localhost:{env.http_port}/data.json'
r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
extra_args=[
'--proxytunnel',
- '--proxy', f'https://{env.proxy_domain}:{env.proxys_port}/',
- '--resolve', f'{env.proxy_domain}:{env.proxys_port}:127.0.0.1',
+ '--proxy', f'https://{env.proxy_domain}:{env.pts_port()}/',
+ '--resolve', f'{env.proxy_domain}:{env.pts_port()}:127.0.0.1',
'--proxy-cacert', env.ca.cert_file,
])
- r.check_exit_code(0)
- r.check_stats(count=1, exp_status=200)
+ r.check_response(count=1, http_status=200)
# download https: with proto via http: proxytunnel
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
+ @pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason=f"curl without SSL")
def test_10_05_proxytunnel_http(self, env: Env, httpd, proto, repeat):
curl = CurlClient(env=env)
url = f'https://localhost:{env.https_port}/data.json'
@@ -112,28 +133,129 @@ class TestProxy:
'--proxy', f'http://{env.proxy_domain}:{env.proxy_port}/',
'--resolve', f'{env.proxy_domain}:{env.proxy_port}:127.0.0.1',
])
- r.check_exit_code(0)
- r.check_stats(count=1, exp_status=200)
- exp_proto = 'HTTP/2' if proto == 'h2' else 'HTTP/1.1'
- assert r.response['protocol'] == exp_proto
+ r.check_response(count=1, http_status=200,
+ protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
# download https: with proto via https: proxytunnel
@pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
reason='curl lacks HTTPS-proxy support')
@pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
- def test_10_06_proxy_https(self, env: Env, httpd, proto, repeat):
+ @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
+ @pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
+ def test_10_06_proxytunnel_https(self, env: Env, httpd, nghttpx_fwd, proto, tunnel, repeat):
+ if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
+ pytest.skip('only supported with nghttp2')
+ exp_tunnel_proto = self.set_tunnel_proto(tunnel)
curl = CurlClient(env=env)
- url = f'https://localhost:{env.https_port}/data.json'
+ url = f'https://localhost:{env.https_port}/data.json?[0-0]'
r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
with_headers=True,
extra_args=[
'--proxytunnel',
- '--proxy', f'https://{env.proxy_domain}:{env.proxys_port}/',
- '--resolve', f'{env.proxy_domain}:{env.proxys_port}:127.0.0.1',
+ '--proxy', f'https://{env.proxy_domain}:{env.pts_port(tunnel)}/',
+ '--resolve', f'{env.proxy_domain}:{env.pts_port(tunnel)}:127.0.0.1',
+ '--proxy-cacert', env.ca.cert_file,
+ ])
+ r.check_response(count=1, http_status=200,
+ protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
+ assert self.get_tunnel_proto_used(r) == exp_tunnel_proto
+ srcfile = os.path.join(httpd.docs_dir, 'data.json')
+ dfile = curl.download_file(0)
+ assert filecmp.cmp(srcfile, dfile, shallow=False)
+
+ # download many https: with proto via https: proxytunnel
+ @pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason=f"curl without SSL")
+ @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
+ @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
+ @pytest.mark.parametrize("fname, fcount", [
+ ['data.json', 100],
+ ['data-100k', 20],
+ ['data-1m', 5]
+ ])
+ @pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
+ def test_10_07_pts_down_small(self, env: Env, httpd, nghttpx_fwd, proto,
+ tunnel, fname, fcount, repeat):
+ if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
+ pytest.skip('only supported with nghttp2')
+ count = fcount
+ exp_tunnel_proto = self.set_tunnel_proto(tunnel)
+ curl = CurlClient(env=env)
+ url = f'https://localhost:{env.https_port}/{fname}?[0-{count-1}]'
+ r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
+ with_headers=True,
+ extra_args=[
+ '--proxytunnel',
+ '--proxy', f'https://{env.proxy_domain}:{env.pts_port(tunnel)}/',
+ '--resolve', f'{env.proxy_domain}:{env.pts_port(tunnel)}:127.0.0.1',
+ '--proxy-cacert', env.ca.cert_file,
+ ])
+ r.check_response(count=count, http_status=200,
+ protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
+ assert self.get_tunnel_proto_used(r) == exp_tunnel_proto
+ srcfile = os.path.join(httpd.docs_dir, fname)
+ for i in range(count):
+ dfile = curl.download_file(i)
+ assert filecmp.cmp(srcfile, dfile, shallow=False)
+
+ # upload many https: with proto via https: proxytunnel
+ @pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason=f"curl without SSL")
+ @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
+ @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
+ @pytest.mark.parametrize("fname, fcount", [
+ ['data.json', 50],
+ ['data-100k', 20],
+ ['data-1m', 5]
+ ])
+ @pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
+ def test_10_08_upload_seq_large(self, env: Env, httpd, nghttpx, proto,
+ tunnel, fname, fcount, repeat):
+ if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
+ pytest.skip('only supported with nghttp2')
+ count = fcount
+ srcfile = os.path.join(httpd.docs_dir, fname)
+ exp_tunnel_proto = self.set_tunnel_proto(tunnel)
+ curl = CurlClient(env=env)
+ url = f'https://localhost:{env.https_port}/curltest/echo?id=[0-{count-1}]'
+ r = curl.http_upload(urls=[url], data=f'@{srcfile}', alpn_proto=proto,
+ extra_args=[
+ '--proxytunnel',
+ '--proxy', f'https://{env.proxy_domain}:{env.pts_port(tunnel)}/',
+ '--resolve', f'{env.proxy_domain}:{env.pts_port(tunnel)}:127.0.0.1',
+ '--proxy-cacert', env.ca.cert_file,
+ ])
+ assert self.get_tunnel_proto_used(r) == exp_tunnel_proto
+ r.check_response(count=count, http_status=200)
+ indata = open(srcfile).readlines()
+ r.check_response(count=count, http_status=200)
+ for i in range(count):
+ respdata = open(curl.response_file(i)).readlines()
+ assert respdata == indata
+
+ @pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason=f"curl without SSL")
+ @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
+ @pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
+ def test_10_09_reuse_ser(self, env: Env, httpd, nghttpx_fwd, tunnel, repeat):
+ if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
+ pytest.skip('only supported with nghttp2')
+ exp_tunnel_proto = self.set_tunnel_proto(tunnel)
+ curl = CurlClient(env=env)
+ url1 = f'https://localhost:{env.https_port}/data.json'
+ url2 = f'http://localhost:{env.http_port}/data.json'
+ r = curl.http_download(urls=[url1, url2], alpn_proto='http/1.1', with_stats=True,
+ with_headers=True,
+ extra_args=[
+ '--proxytunnel',
+ '--proxy', f'https://{env.proxy_domain}:{env.pts_port(tunnel)}/',
+ '--resolve', f'{env.proxy_domain}:{env.pts_port(tunnel)}:127.0.0.1',
'--proxy-cacert', env.ca.cert_file,
])
- r.check_exit_code(0)
- r.check_stats(count=1, exp_status=200)
- exp_proto = 'HTTP/2' if proto == 'h2' else 'HTTP/1.1'
- assert r.response['protocol'] == exp_proto
+ r.check_response(count=2, http_status=200)
+ assert self.get_tunnel_proto_used(r) == exp_tunnel_proto
+ if tunnel == 'h2':
+ # TODO: we would like to reuse the first connection for the
+ # second URL, but this is currently not possible
+ # assert r.total_connects == 1
+ assert r.total_connects == 2
+ else:
+ assert r.total_connects == 2
diff --git a/tests/http/test_11_unix.py b/tests/http/test_11_unix.py
index 86ecd6f86..dc2684adb 100644
--- a/tests/http/test_11_unix.py
+++ b/tests/http/test_11_unix.py
@@ -101,10 +101,10 @@ class TestUnix:
extra_args=[
'--unix-socket', uds_faker.path,
])
- r.check_exit_code(0)
- r.check_stats(count=1, exp_status=200)
+ r.check_response(count=1, http_status=200)
# download https: via unix socket
+ @pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason=f"curl without SSL")
def test_11_02_unix_connect_http(self, env: Env, httpd, uds_faker, repeat):
curl = CurlClient(env=env)
url = f'https://{env.domain1}:{env.https_port}/data.json'
@@ -112,7 +112,7 @@ class TestUnix:
extra_args=[
'--unix-socket', uds_faker.path,
])
- r.check_exit_code(35)
+ r.check_response(exitcode=35, http_status=None)
# download HTTP/3 via unix socket
@pytest.mark.skipif(condition=not Env.have_h3(), reason='h3 not supported')
@@ -124,4 +124,4 @@ class TestUnix:
extra_args=[
'--unix-socket', uds_faker.path,
])
- r.check_exit_code(96)
+ r.check_response(exitcode=96, http_status=None)
diff --git a/tests/http/test_12_reuse.py b/tests/http/test_12_reuse.py
index cd22af6e9..302929956 100644
--- a/tests/http/test_12_reuse.py
+++ b/tests/http/test_12_reuse.py
@@ -37,6 +37,7 @@ log = logging.getLogger(__name__)
@pytest.mark.skipif(condition=Env.curl_uses_lib('bearssl'), reason='BearSSL too slow')
+@pytest.mark.skipif(condition=not Env.have_ssl_curl(), reason=f"curl without SSL")
class TestReuse:
# check if HTTP/1.1 handles 'Connection: close' correctly
@@ -52,8 +53,7 @@ class TestReuse:
curl = CurlClient(env=env)
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
r = curl.http_download(urls=[urln], alpn_proto=proto)
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
# Server sends `Connection: close` on every 2nd request, requiring
# a new connection
assert r.total_connects == count/2
@@ -72,8 +72,7 @@ class TestReuse:
r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
'--rate', '30/m',
])
- r.check_exit_code(0)
- r.check_stats(count=count, exp_status=200)
+ r.check_response(count=count, http_status=200)
# Connections time out on server before we send another request,
assert r.total_connects == count
# we do not see how often a request was retried in the stats, so
diff --git a/tests/http/test_13_proxy_auth.py b/tests/http/test_13_proxy_auth.py
new file mode 100644
index 000000000..b20a84945
--- /dev/null
+++ b/tests/http/test_13_proxy_auth.py
@@ -0,0 +1,193 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#***************************************************************************
+# _ _ ____ _
+# Project ___| | | | _ \| |
+# / __| | | | |_) | |
+# | (__| |_| | _ <| |___
+# \___|\___/|_| \_\_____|
+#
+# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
+#
+# This software is licensed as described in the file COPYING, which
+# you should have received as part of this distribution. The terms
+# are also available at https://curl.se/docs/copyright.html.
+#
+# You may opt to use, copy, modify, merge, publish, distribute and/or sell
+# copies of the Software, and permit persons to whom the Software is
+# furnished to do so, under the terms of the COPYING file.
+#
+# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
+# KIND, either express or implied.
+#
+# SPDX-License-Identifier: curl
+#
+###########################################################################
+#
+import filecmp
+import logging
+import os
+import re
+import time
+import pytest
+
+from testenv import Env, CurlClient
+
+
+log = logging.getLogger(__name__)
+
+
+@pytest.mark.skipif(condition=Env.setup_incomplete(),
+ reason=f"missing: {Env.incomplete_reason()}")
+class TestProxyAuth:
+
+ @pytest.fixture(autouse=True, scope='class')
+ def _class_scope(self, env, httpd, nghttpx_fwd):
+ if env.have_nghttpx():
+ nghttpx_fwd.start_if_needed()
+ httpd.clear_extra_configs()
+ httpd.set_proxy_auth(True)
+ httpd.reload()
+ yield
+ httpd.set_proxy_auth(False)
+ httpd.reload()
+
+ def set_tunnel_proto(self, proto):
+ if proto == 'h2':
+ os.environ['CURL_PROXY_TUNNEL_H2'] = '1'
+ return 'HTTP/2'
+ else:
+ os.environ.pop('CURL_PROXY_TUNNEL_H2', None)
+ return 'HTTP/1.1'
+
+ def get_tunnel_proto_used(self, curl: CurlClient):
+ assert os.path.exists(curl.trace_file)
+ for l in open(curl.trace_file).readlines():
+ m = re.match(r'.* == Info: CONNECT tunnel: (\S+) negotiated', l)
+ if m:
+ return m.group(1)
+ return None
+
+ # download via http: proxy (no tunnel), no auth
+ def test_13_01_proxy_no_auth(self, env: Env, httpd, repeat):
+ curl = CurlClient(env=env)
+ url = f'http://localhost:{env.http_port}/data.json'
+ r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
+ extra_args=[
+ '--proxy', f'http://{env.proxy_domain}:{env.proxy_port}/',
+ '--resolve', f'{env.proxy_domain}:{env.proxy_port}:127.0.0.1',
+ ])
+ r.check_response(count=1, http_status=407)
+
+ # download via http: proxy (no tunnel), auth
+ def test_13_02_proxy_auth(self, env: Env, httpd, repeat):
+ curl = CurlClient(env=env)
+ url = f'http://localhost:{env.http_port}/data.json'
+ r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
+ extra_args=[
+ '--proxy-user', 'proxy:proxy',
+ '--proxy', f'http://{env.proxy_domain}:{env.proxy_port}/',
+ '--resolve', f'{env.proxy_domain}:{env.proxy_port}:127.0.0.1',
+ ])
+ r.check_response(count=1, http_status=200)
+
+ @pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
+ reason='curl lacks HTTPS-proxy support')
+ @pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
+ def test_13_03_proxys_no_auth(self, env: Env, httpd, nghttpx_fwd, repeat):
+ curl = CurlClient(env=env)
+ url = f'http://localhost:{env.http_port}/data.json'
+ r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
+ extra_args=[
+ '--proxy', f'https://{env.proxy_domain}:{env.pts_port()}/',
+ '--resolve', f'{env.proxy_domain}:{env.pts_port()}:127.0.0.1',
+ '--proxy-cacert', env.ca.cert_file,
+ ])
+ r.check_response(count=1, http_status=407)
+
+ @pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
+ reason='curl lacks HTTPS-proxy support')
+ @pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
+ def test_13_04_proxys_auth(self, env: Env, httpd, nghttpx_fwd, repeat):
+ curl = CurlClient(env=env)
+ url = f'http://localhost:{env.http_port}/data.json'
+ r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
+ extra_args=[
+ '--proxy-user', 'proxy:proxy',
+ '--proxy', f'https://{env.proxy_domain}:{env.pts_port()}/',
+ '--resolve', f'{env.proxy_domain}:{env.pts_port()}:127.0.0.1',
+ '--proxy-cacert', env.ca.cert_file,
+ ])
+ r.check_response(count=1, http_status=200)
+
+ def test_13_05_tunnel_http_no_auth(self, env: Env, httpd, repeat):
+ curl = CurlClient(env=env)
+ url = f'http://localhost:{env.http_port}/data.json'
+ r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
+ extra_args=[
+ '--proxytunnel',
+ '--proxy', f'http://{env.proxy_domain}:{env.proxy_port}/',
+ '--resolve', f'{env.proxy_domain}:{env.proxy_port}:127.0.0.1',
+ ])
+ # expect "COULD_NOT_CONNECT"
+ r.check_response(exitcode=56, http_status=None)
+
+ def test_13_06_tunnel_http_auth(self, env: Env, httpd, repeat):
+ curl = CurlClient(env=env)
+ url = f'http://localhost:{env.http_port}/data.json'
+ r = curl.http_download(urls=[url], alpn_proto='http/1.1', with_stats=True,
+ extra_args=[
+ '--proxytunnel',
+ '--proxy-user', 'proxy:proxy',
+ '--proxy', f'http://{env.proxy_domain}:{env.proxy_port}/',
+ '--resolve', f'{env.proxy_domain}:{env.proxy_port}:127.0.0.1',
+ ])
+ r.check_response(count=1, http_status=200)
+
+ @pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
+ @pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
+ reason='curl lacks HTTPS-proxy support')
+ @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
+ @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
+ def test_13_07_tunnels_no_auth(self, env: Env, httpd, proto, tunnel, repeat):
+ if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
+ pytest.skip('only supported with nghttp2')
+ exp_tunnel_proto = self.set_tunnel_proto(tunnel)
+ curl = CurlClient(env=env)
+ url = f'https://localhost:{env.https_port}/data.json'
+ r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
+ with_headers=True, with_trace=True,
+ extra_args=[
+ '--proxytunnel',
+ '--proxy', f'https://{env.proxy_domain}:{env.pts_port(tunnel)}/',
+ '--resolve', f'{env.proxy_domain}:{env.pts_port(tunnel)}:127.0.0.1',
+ '--proxy-cacert', env.ca.cert_file,
+ ])
+ # expect "COULD_NOT_CONNECT"
+ r.check_response(exitcode=56, http_status=None)
+ assert self.get_tunnel_proto_used(curl) == exp_tunnel_proto
+
+ @pytest.mark.skipif(condition=not Env.have_nghttpx(), reason="no nghttpx available")
+ @pytest.mark.skipif(condition=not Env.curl_has_feature('HTTPS-proxy'),
+ reason='curl lacks HTTPS-proxy support')
+ @pytest.mark.parametrize("proto", ['http/1.1', 'h2'])
+ @pytest.mark.parametrize("tunnel", ['http/1.1', 'h2'])
+ def test_13_08_tunnels_auth(self, env: Env, httpd, proto, tunnel, repeat):
+ if tunnel == 'h2' and not env.curl_uses_lib('nghttp2'):
+ pytest.skip('only supported with nghttp2')
+ exp_tunnel_proto = self.set_tunnel_proto(tunnel)
+ curl = CurlClient(env=env)
+ url = f'https://localhost:{env.https_port}/data.json'
+ r = curl.http_download(urls=[url], alpn_proto=proto, with_stats=True,
+ with_headers=True, with_trace=True,
+ extra_args=[
+ '--proxytunnel',
+ '--proxy-user', 'proxy:proxy',
+ '--proxy', f'https://{env.proxy_domain}:{env.pts_port(tunnel)}/',
+ '--resolve', f'{env.proxy_domain}:{env.pts_port(tunnel)}:127.0.0.1',
+ '--proxy-cacert', env.ca.cert_file,
+ ])
+ r.check_response(count=1, http_status=200,
+ protocol='HTTP/2' if proto == 'h2' else 'HTTP/1.1')
+ assert self.get_tunnel_proto_used(curl) == exp_tunnel_proto
+
diff --git a/tests/http/testenv/__init__.py b/tests/http/testenv/__init__.py
index 8eb7632ae..3da668570 100644
--- a/tests/http/testenv/__init__.py
+++ b/tests/http/testenv/__init__.py
@@ -24,9 +24,14 @@
#
###########################################################################
#
+import pytest
+pytest.register_assert_rewrite("testenv.env", "testenv.curl", "testenv.caddy",
+ "testenv.httpd", "testenv.nghttpx")
+
from .env import Env
from .certs import TestCA, Credentials
from .caddy import Caddy
from .httpd import Httpd
from .curl import CurlClient, ExecResult
from .nghttpx import Nghttpx
+from .nghttpx import Nghttpx, NghttpxQuic, NghttpxFwd
diff --git a/tests/http/testenv/curl.py b/tests/http/testenv/curl.py
index 98c1bd4ab..a272dbf27 100644
--- a/tests/http/testenv/curl.py
+++ b/tests/http/testenv/curl.py
@@ -24,6 +24,7 @@
#
###########################################################################
#
+import pytest
import json
import logging
import os
@@ -31,7 +32,7 @@ import re
import shutil
import subprocess
from datetime import timedelta, datetime
-from typing import List, Optional, Dict
+from typing import List, Optional, Dict, Union
from urllib.parse import urlparse
from .env import Env
@@ -111,6 +112,10 @@ class ExecResult:
return ''.join(self._stderr)
@property
+ def trace_lines(self) -> List[str]:
+ return self._trace if self._trace else self._stderr
+
+ @property
def duration(self) -> timedelta:
return self._duration
@@ -159,53 +164,97 @@ class ExecResult:
def add_assets(self, assets: List):
self._assets.extend(assets)
- def check_exit_code(self, code: int):
- assert self.exit_code == code, \
- f'expected exit code {code}, '\
- f'got {self.exit_code}\n{self._dump_logs()}'
-
- def check_exit_code_not(self, code: int):
- assert self.exit_code != code, \
- f'expected exit code other than {code}\n{self._dump_logs()}'
-
- def check_responses(self, count: int, exp_status: Optional[int] = None,
- exp_exitcode: Optional[int] = None):
- assert len(self.responses) == count, \
- f'response count: expected {count}, ' \
- f'got {len(self.responses)}\n{self._dump_logs()}'
- if exp_status is not None:
- for idx, x in enumerate(self.responses):
- assert x['status'] == exp_status, \
- f'response #{idx} status: expected {exp_status},'\
- f'got {x["status"]}\n{self._dump_logs()}'
- if exp_exitcode is not None:
- for idx, x in enumerate(self.responses):
- if 'exitcode' in x:
- assert x['exitcode'] == 0, \
- f'response #{idx} exitcode: expected {exp_exitcode}, '\
- f'got {x["exitcode"]}\n{self._dump_logs()}'
- if self.with_stats:
- self.check_stats(count)
+ def check_exit_code(self, code: Union[int, bool]):
+ if code is True:
+ assert self.exit_code == 0, f'expected exit code {code}, '\
+ f'got {self.exit_code}\n{self.dump_logs()}'
+ elif code is False:
+ assert self.exit_code != 0, f'expected exit code {code}, '\
+ f'got {self.exit_code}\n{self.dump_logs()}'
+ else:
+ assert self.exit_code == code, f'expected exit code {code}, '\
+ f'got {self.exit_code}\n{self.dump_logs()}'
+
+ def check_response(self, http_status: Optional[int] = 200,
+ count: Optional[int] = 1,
+ protocol: Optional[str] = None,
+ exitcode: Optional[int] = 0,
+ connect_count: Optional[int] = None):
+ if exitcode:
+ self.check_exit_code(exitcode)
+ if self.with_stats and isinstance(exitcode, int):
+ for idx, x in enumerate(self.stats):
+ if 'exitcode' in x:
+ assert int(x['exitcode']) == exitcode, \
+ f'response #{idx} exitcode: expected {exitcode}, '\
+ f'got {x["exitcode"]}\n{self.dump_logs()}'
- def check_stats(self, count: int, exp_status: Optional[int] = None,
- exp_exitcode: Optional[int] = None):
+ if self.with_stats:
+ assert len(self.stats) == count, \
+ f'response count: expected {count}, ' \
+ f'got {len(self.stats)}\n{self.dump_logs()}'
+ else:
+ assert len(self.responses) == count, \
+ f'response count: expected {count}, ' \
+ f'got {len(self.responses)}\n{self.dump_logs()}'
+ if http_status is not None:
+ if self.with_stats:
+ for idx, x in enumerate(self.stats):
+ assert 'http_code' in x, \
+ f'response #{idx} reports no http_code\n{self.dump_logs()}'
+ assert x['http_code'] == http_status, \
+ f'response #{idx} http_code: expected {http_status}, '\
+ f'got {x["http_code"]}\n{self.dump_logs()}'
+ else:
+ for idx, x in enumerate(self.responses):
+ assert x['status'] == http_status, \
+ f'response #{idx} status: expected {http_status},'\
+ f'got {x["status"]}\n{self.dump_logs()}'
+ if protocol is not None:
+ if self.with_stats:
+ http_version = None
+ if protocol == 'HTTP/1.1':
+ http_version = '1.1'
+ elif protocol == 'HTTP/2':
+ http_version = '2'
+ elif protocol == 'HTTP/3':
+ http_version = '3'
+ if http_version is not None:
+ for idx, x in enumerate(self.stats):
+ assert x['http_version'] == http_version, \
+ f'response #{idx} protocol: expected http/{http_version},' \
+ f'got version {x["http_version"]}\n{self.dump_logs()}'
+ else:
+ for idx, x in enumerate(self.responses):
+ assert x['protocol'] == protocol, \
+ f'response #{idx} protocol: expected {protocol},'\
+ f'got {x["protocol"]}\n{self.dump_logs()}'
+ if connect_count is not None:
+ assert self.total_connects == connect_count, \
+ f'expected {connect_count}, but {self.total_connects} '\
+ f'were made\n{self.dump_logs()}'
+
+ def check_stats(self, count: int, http_status: Optional[int] = None,
+ exitcode: Optional[int] = None):
+ if exitcode is None:
+ self.check_exit_code(0)
assert len(self.stats) == count, \
- f'stats count: expected {count}, got {len(self.stats)}\n{self._dump_logs()}'
- if exp_status is not None:
+ f'stats count: expected {count}, got {len(self.stats)}\n{self.dump_logs()}'
+ if http_status is not None:
for idx, x in enumerate(self.stats):
assert 'http_code' in x, \
- f'status #{idx} reports no http_code\n{self._dump_logs()}'
- assert x['http_code'] == exp_status, \
- f'status #{idx} http_code: expected {exp_status}, '\
- f'got {x["http_code"]}\n{self._dump_logs()}'
- if exp_exitcode is not None:
+ f'status #{idx} reports no http_code\n{self.dump_logs()}'
+ assert x['http_code'] == http_status, \
+ f'status #{idx} http_code: expected {http_status}, '\
+ f'got {x["http_code"]}\n{self.dump_logs()}'
+ if exitcode is not None:
for idx, x in enumerate(self.stats):
if 'exitcode' in x:
assert x['exitcode'] == 0, \
- f'status #{idx} exitcode: expected {exp_exitcode}, '\
- f'got {x["exitcode"]}\n{self._dump_logs()}'
+ f'status #{idx} exitcode: expected {exitcode}, '\
+ f'got {x["exitcode"]}\n{self.dump_logs()}'
- def _dump_logs(self):
+ def dump_logs(self):
lines = []
lines.append('>>--stdout ----------------------------------------------\n')
lines.extend(self._stdout)
@@ -252,6 +301,10 @@ class CurlClient:
def download_file(self, i: int) -> str:
return os.path.join(self.run_dir, f'download_{i}.data')
+ @property
+ def trace_file(self) -> str:
+ return self._tracefile
+
def _rmf(self, path):
if os.path.exists(path):
return os.remove(path)
@@ -272,6 +325,7 @@ class CurlClient:
with_stats: bool = True,
with_headers: bool = False,
no_save: bool = False,
+ with_trace: bool = False,
extra_args: List[str] = None):
if extra_args is None:
extra_args = []
@@ -292,12 +346,14 @@ class CurlClient:
])
return self._raw(urls, alpn_proto=alpn_proto, options=extra_args,
with_stats=with_stats,
- with_headers=with_headers)
+ with_headers=with_headers,
+ with_trace=with_trace)
def http_upload(self, urls: List[str], data: str,
alpn_proto: Optional[str] = None,
with_stats: bool = True,
with_headers: bool = False,
+ with_trace: bool = False,
extra_args: Optional[List[str]] = None):
if extra_args is None:
extra_args = []
@@ -310,12 +366,14 @@ class CurlClient:
])
return self._raw(urls, alpn_proto=alpn_proto, options=extra_args,
with_stats=with_stats,
- with_headers=with_headers)
+ with_headers=with_headers,
+ with_trace=with_trace)
def http_put(self, urls: List[str], data=None, fdata=None,
alpn_proto: Optional[str] = None,
with_stats: bool = True,
with_headers: bool = False,
+ with_trace: bool = False,
extra_args: Optional[List[str]] = None):
if extra_args is None:
extra_args = []
@@ -333,7 +391,8 @@ class CurlClient:
return self._raw(urls, intext=data,
alpn_proto=alpn_proto, options=extra_args,
with_stats=with_stats,
- with_headers=with_headers)
+ with_headers=with_headers,
+ with_trace=with_trace)
def response_file(self, idx: int):
return os.path.join(self._run_dir, f'download_{idx}.data')
@@ -379,15 +438,16 @@ class CurlClient:
duration=datetime.now() - start,
with_stats=with_stats)
- def _raw(self, urls, intext='', timeout=10, options=None, insecure=False,
+ def _raw(self, urls, intext='', timeout=None, options=None, insecure=False,
alpn_proto: Optional[str] = None,
force_resolve=True,
with_stats=False,
- with_headers=True):
+ with_headers=True,
+ with_trace=False):
args = self._complete_args(
urls=urls, timeout=timeout, options=options, insecure=insecure,
alpn_proto=alpn_proto, force_resolve=force_resolve,
- with_headers=with_headers)
+ with_headers=with_headers, with_trace=with_trace)
r = self._run(args, intext=intext, with_stats=with_stats)
if r.exit_code == 0 and with_headers:
self._parse_headerfile(self._headerfile, r=r)
@@ -398,14 +458,15 @@ class CurlClient:
def _complete_args(self, urls, timeout=None, options=None,
insecure=False, force_resolve=True,
alpn_proto: Optional[str] = None,
- with_headers: bool = True):
+ with_headers: bool = True,
+ with_trace: bool = False):
if not isinstance(urls, list):
urls = [urls]
args = [self._curl, "-s", "--path-as-is"]
if with_headers:
args.extend(["-D", self._headerfile])
- if self.env.verbose > 2:
+ if with_trace or self.env.verbose > 2:
args.extend(['--trace', self._tracefile, '--trace-time'])
elif self.env.verbose > 1:
args.extend(['--trace', self._tracefile])
diff --git a/tests/http/testenv/env.py b/tests/http/testenv/env.py
index 6dcb4b2ea..1e175e4a9 100644
--- a/tests/http/testenv/env.py
+++ b/tests/http/testenv/env.py
@@ -106,6 +106,7 @@ class EnvConfig:
'https': socket.SOCK_STREAM,
'proxy': socket.SOCK_STREAM,
'proxys': socket.SOCK_STREAM,
+ 'h2proxys': socket.SOCK_STREAM,
'caddy': socket.SOCK_STREAM,
'caddys': socket.SOCK_STREAM,
})
@@ -230,10 +231,18 @@ class Env:
return Env.CONFIG.get_incomplete_reason()
@staticmethod
+ def have_nghttpx() -> bool:
+ return Env.CONFIG.nghttpx is not None
+
+ @staticmethod
def have_h3_server() -> bool:
return Env.CONFIG.nghttpx_with_h3
@staticmethod
+ def have_ssl_curl() -> bool:
+ return 'ssl' in Env.CONFIG.curl_props['features']
+
+ @staticmethod
def have_h2_curl() -> bool:
return 'http2' in Env.CONFIG.curl_props['features']
@@ -371,14 +380,22 @@ class Env:
return self.https_port
@property
- def proxy_port(self) -> str:
+ def proxy_port(self) -> int:
return self.CONFIG.ports['proxy']
@property
- def proxys_port(self) -> str:
+ def proxys_port(self) -> int:
return self.CONFIG.ports['proxys']
@property
+ def h2proxys_port(self) -> int:
+ return self.CONFIG.ports['h2proxys']
+
+ def pts_port(self, proto: str = 'http/1.1') -> int:
+ # proxy tunnel port
+ return self.CONFIG.ports['h2proxys' if proto == 'h2' else 'proxys']
+
+ @property
def caddy(self) -> str:
return self.CONFIG.caddy
diff --git a/tests/http/testenv/httpd.py b/tests/http/testenv/httpd.py
index 5b20d31e2..612da1006 100644
--- a/tests/http/testenv/httpd.py
+++ b/tests/http/testenv/httpd.py
@@ -44,7 +44,9 @@ class Httpd:
MODULES = [
'log_config', 'logio', 'unixd', 'version', 'watchdog',
- 'authn_core', 'authz_user', 'authz_core', 'authz_host',
+ 'authn_core', 'authn_file',
+ 'authz_user', 'authz_core', 'authz_host',
+ 'auth_basic', 'auth_digest',
'env', 'filter', 'headers', 'mime',
'rewrite', 'http2', 'ssl', 'proxy', 'proxy_http', 'proxy_connect',
'mpm_event',
@@ -56,7 +58,7 @@ class Httpd:
MOD_CURLTEST = None
- def __init__(self, env: Env):
+ def __init__(self, env: Env, proxy_auth: bool = False):
self.env = env
self._cmd = env.apachectl
self._apache_dir = os.path.join(env.gen_dir, 'apache')
@@ -68,7 +70,9 @@ class Httpd:
self._logs_dir = os.path.join(self._apache_dir, 'logs')
self._error_log = os.path.join(self._logs_dir, 'error_log')
self._tmp_dir = os.path.join(self._apache_dir, 'tmp')
+ self._passwords = os.path.join(self._conf_dir, 'passwords')
self._mods_dir = None
+ self._proxy_auth = proxy_auth
self._extra_configs = {}
assert env.apxs
p = subprocess.run(args=[env.apxs, '-q', 'libexecdir'],
@@ -103,6 +107,9 @@ class Httpd:
def clear_extra_configs(self):
self._extra_configs = {}
+ def set_proxy_auth(self, active: bool):
+ self._proxy_auth = active
+
def _run(self, args, intext=''):
env = {}
for key, val in os.environ.items():
@@ -146,6 +153,7 @@ class Httpd:
r = self._apachectl('stop')
if r.exit_code == 0:
return self.wait_dead(timeout=timedelta(seconds=5))
+ log.fatal(f'stopping httpd failed: {r}')
return r.exit_code == 0
def restart(self):
@@ -211,6 +219,9 @@ class Httpd:
'server': f'{domain2}',
}
fd.write(JSONEncoder().encode(data))
+ if self._proxy_auth:
+ with open(self._passwords, 'w') as fd:
+ fd.write('proxy:$apr1$FQfeInbs$WQZbODJlVg60j0ogEIlTW/\n')
with open(self._conf_file, 'w') as fd:
for m in self.MODULES:
if os.path.exists(os.path.join(self._mods_dir, f'mod_{m}.so')):
@@ -223,9 +234,6 @@ class Httpd:
f'PidFile httpd.pid',
f'ErrorLog {self._error_log}',
f'LogLevel {self._get_log_level()}',
- f'LogLevel http:trace4',
- f'LogLevel proxy:trace4',
- f'LogLevel proxy_http:trace4',
f'H2MinWorkers 16',
f'H2MaxWorkers 128',
f'H2Direct on',
@@ -284,30 +292,33 @@ class Httpd:
conf.extend([ # http forward proxy
f'<VirtualHost *:{self.env.proxy_port}>',
f' ServerName {proxy_domain}',
- f' Protocols h2c, http/1.1',
+ f' Protocols h2c http/1.1',
f' ProxyRequests On',
f' ProxyVia On',
f' AllowCONNECT {self.env.http_port} {self.env.https_port}',
- f' <Proxy "*">',
- f' Require ip 127.0.0.1',
- f' </Proxy>',
+ ])
+ conf.extend(self._get_proxy_conf())
+ conf.extend([
f'</VirtualHost>',
+ f'',
])
conf.extend([ # https forward proxy
f'<VirtualHost *:{self.env.proxys_port}>',
f' ServerName {proxy_domain}',
- f' Protocols h2, http/1.1',
+ f' Protocols h2 http/1.1',
f' SSLEngine on',
f' SSLCertificateFile {proxy_creds.cert_file}',
f' SSLCertificateKeyFile {proxy_creds.pkey_file}',
f' ProxyRequests On',
f' ProxyVia On',
f' AllowCONNECT {self.env.http_port} {self.env.https_port}',
- f' <Proxy "*">',
- f' Require ip 127.0.0.1',
- f' </Proxy>',
+ ])
+ conf.extend(self._get_proxy_conf())
+ conf.extend([
f'</VirtualHost>',
+ f'',
])
+
fd.write("\n".join(conf))
with open(os.path.join(self._conf_dir, 'mime.types'), 'w') as fd:
fd.write("\n".join([
@@ -316,13 +327,31 @@ class Httpd:
''
]))
+ def _get_proxy_conf(self):
+ if self._proxy_auth:
+ return [
+ f' <Proxy "*">',
+ f' AuthType Basic',
+ f' AuthName "Restricted Proxy"',
+ f' AuthBasicProvider file',
+ f' AuthUserFile "{self._passwords}"',
+ f' Require user proxy',
+ f' </Proxy>',
+ ]
+ else:
+ return [
+ f' <Proxy "*">',
+ f' Require ip 127.0.0.1',
+ f' </Proxy>',
+ ]
+
def _get_log_level(self):
- #if self.env.verbose > 3:
- # return 'trace2'
- #if self.env.verbose > 2:
- # return 'trace1'
- #if self.env.verbose > 1:
- # return 'debug'
+ if self.env.verbose > 3:
+ return 'trace2'
+ if self.env.verbose > 2:
+ return 'trace1'
+ if self.env.verbose > 1:
+ return 'debug'
return 'info'
def _curltest_conf(self) -> List[str]:
diff --git a/tests/http/testenv/nghttpx.py b/tests/http/testenv/nghttpx.py
index 1a26aa84f..234b31c0e 100644
--- a/tests/http/testenv/nghttpx.py
+++ b/tests/http/testenv/nghttpx.py
@@ -41,10 +41,12 @@ log = logging.getLogger(__name__)
class Nghttpx:
- def __init__(self, env: Env):
+ def __init__(self, env: Env, port: int, name: str):
self.env = env
+ self._name = name
+ self._port = port
self._cmd = env.nghttpx
- self._run_dir = os.path.join(env.gen_dir, 'nghttpx')
+ self._run_dir = os.path.join(env.gen_dir, name)
self._pid_file = os.path.join(self._run_dir, 'nghttpx.pid')
self._conf_file = os.path.join(self._run_dir, 'nghttpx.conf')
self._error_log = os.path.join(self._run_dir, 'nghttpx.log')
@@ -76,27 +78,7 @@ class Nghttpx:
return True
def start(self, wait_live=True):
- self._mkpath(self._tmp_dir)
- if self._process:
- self.stop()
- args = [
- self._cmd,
- f'--frontend=*,{self.env.h3_port};quic',
- f'--backend=127.0.0.1,{self.env.https_port};{self.env.domain1};sni={self.env.domain1};proto=h2;tls',
- f'--backend=127.0.0.1,{self.env.http_port}',
- f'--log-level=INFO',
- f'--pid-file={self._pid_file}',
- f'--errorlog-file={self._error_log}',
- f'--conf={self._conf_file}',
- f'--cacert={self.env.ca.cert_file}',
- self.env.get_credentials(self.env.domain1).pkey_file,
- self.env.get_credentials(self.env.domain1).cert_file,
- ]
- ngerr = open(self._stderr, 'a')
- self._process = subprocess.Popen(args=args, stderr=ngerr)
- if self._process.returncode is not None:
- return False
- return not wait_live or self.wait_live(timeout=timedelta(seconds=5))
+ pass
def stop_if_running(self):
if self.is_running():
@@ -146,7 +128,7 @@ class Nghttpx:
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
- check_url = f'https://{self.env.domain1}:{self.env.h3_port}/'
+ check_url = f'https://{self.env.domain1}:{self._port}/'
r = curl.http_get(url=check_url, extra_args=['--http3-only'])
if r.exit_code != 0:
return True
@@ -159,7 +141,7 @@ class Nghttpx:
curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
try_until = datetime.now() + timeout
while datetime.now() < try_until:
- check_url = f'https://{self.env.domain1}:{self.env.h3_port}/'
+ check_url = f'https://{self.env.domain1}:{self._port}/'
r = curl.http_get(url=check_url, extra_args=[
'--http3-only', '--trace', 'curl.trace', '--trace-time'
])
@@ -184,3 +166,94 @@ class Nghttpx:
fd.write("\n".join([
'# do we need something here?'
]))
+
+
+class NghttpxQuic(Nghttpx):
+
+ def __init__(self, env: Env):
+ super().__init__(env=env, name='nghttpx-quic', port=env.h3_port)
+
+ def start(self, wait_live=True):
+ self._mkpath(self._tmp_dir)
+ if self._process:
+ self.stop()
+ args = [
+ self._cmd,
+ f'--frontend=*,{self.env.h3_port};quic',
+ f'--backend=127.0.0.1,{self.env.https_port};{self.env.domain1};sni={self.env.domain1};proto=h2;tls',
+ f'--backend=127.0.0.1,{self.env.http_port}',
+ f'--log-level=INFO',
+ f'--pid-file={self._pid_file}',
+ f'--errorlog-file={self._error_log}',
+ f'--conf={self._conf_file}',
+ f'--cacert={self.env.ca.cert_file}',
+ self.env.get_credentials(self.env.domain1).pkey_file,
+ self.env.get_credentials(self.env.domain1).cert_file,
+ f'--frontend-http3-window-size=1M',
+ f'--frontend-http3-max-window-size=10M',
+ f'--frontend-http3-connection-window-size=10M',
+ f'--frontend-http3-max-connection-window-size=100M',
+ ]
+ ngerr = open(self._stderr, 'a')
+ self._process = subprocess.Popen(args=args, stderr=ngerr)
+ if self._process.returncode is not None:
+ return False
+ return not wait_live or self.wait_live(timeout=timedelta(seconds=5))
+
+
+class NghttpxFwd(Nghttpx):
+
+ def __init__(self, env: Env):
+ super().__init__(env=env, name='nghttpx-fwd', port=env.h2proxys_port)
+
+ def start(self, wait_live=True):
+ self._mkpath(self._tmp_dir)
+ if self._process:
+ self.stop()
+ args = [
+ self._cmd,
+ f'--http2-proxy',
+ f'--frontend=*,{self.env.h2proxys_port}',
+ f'--backend=127.0.0.1,{self.env.proxy_port}',
+ f'--log-level=INFO',
+ f'--pid-file={self._pid_file}',
+ f'--errorlog-file={self._error_log}',
+ f'--conf={self._conf_file}',
+ f'--cacert={self.env.ca.cert_file}',
+ self.env.get_credentials(self.env.proxy_domain).pkey_file,
+ self.env.get_credentials(self.env.proxy_domain).cert_file,
+ ]
+ ngerr = open(self._stderr, 'a')
+ self._process = subprocess.Popen(args=args, stderr=ngerr)
+ if self._process.returncode is not None:
+ return False
+ return not wait_live or self.wait_live(timeout=timedelta(seconds=5))
+
+ def wait_dead(self, timeout: timedelta):
+ curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
+ try_until = datetime.now() + timeout
+ while datetime.now() < try_until:
+ check_url = f'https://{self.env.proxy_domain}:{self.env.h2proxys_port}/'
+ r = curl.http_get(url=check_url)
+ if r.exit_code != 0:
+ return True
+ log.debug(f'waiting for nghttpx-fwd to stop responding: {r}')
+ time.sleep(.1)
+ log.debug(f"Server still responding after {timeout}")
+ return False
+
+ def wait_live(self, timeout: timedelta):
+ curl = CurlClient(env=self.env, run_dir=self._tmp_dir)
+ try_until = datetime.now() + timeout
+ while datetime.now() < try_until:
+ check_url = f'https://{self.env.proxy_domain}:{self.env.h2proxys_port}/'
+ r = curl.http_get(url=check_url, extra_args=[
+ '--trace', 'curl.trace', '--trace-time'
+ ])
+ if r.exit_code == 0:
+ return True
+ log.debug(f'waiting for nghttpx-fwd to become responsive: {r}')
+ time.sleep(.1)
+ log.error(f"Server still not responding after {timeout}")
+ return False
+