Skip to content

Commit 821f6e2

Browse files
icingbagder
authored andcommitted
CURLOPT_PIPEWAIT: allow waited reuse also for subsequent connections
As tested in test_02_07, when firing off 200 urls with --parallel, 199 wait for the first connection to be established. if that is multiuse, urls are added up to its capacity. The first url over capacity opens another connection. But subsequent urls found the same situation and open a connection too. They should have waited for the second connection to actually connect and make its capacity known. This change fixes that by - setting `connkeep()` early in the HTTP setup handler. as otherwise a new connection is marked as closeit by default and not considered for multiuse at all - checking the "connected" status for a candidate always and continuing to PIPEWAIT if no alternative is found. pytest: - removed "skip" from test_02_07 - added test_02_07b to check that http/1.1 continues to work as before Closes #10456
1 parent d79c3af commit 821f6e2

File tree

3 files changed

+33
-16
lines changed

3 files changed

+33
-16
lines changed

lib/http.c

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -233,6 +233,7 @@ static CURLcode http_setup_conn(struct Curl_easy *data,
233233

234234
Curl_mime_initpart(&http->form);
235235
data->req.p.http = http;
236+
connkeep(conn, "HTTP default");
236237

237238
if((data->state.httpwant == CURL_HTTP_VERSION_3)
238239
|| (data->state.httpwant == CURL_HTTP_VERSION_3ONLY)) {

lib/url.c

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1170,14 +1170,14 @@ ConnectionExists(struct Curl_easy *data,
11701170
continue;
11711171
}
11721172
}
1173+
}
11731174

1174-
if(!Curl_conn_is_connected(check, FIRSTSOCKET)) {
1175-
foundPendingCandidate = TRUE;
1176-
/* Don't pick a connection that hasn't connected yet */
1177-
infof(data, "Connection #%ld isn't open enough, can't reuse",
1178-
check->connection_id);
1179-
continue;
1180-
}
1175+
if(!Curl_conn_is_connected(check, FIRSTSOCKET)) {
1176+
foundPendingCandidate = TRUE;
1177+
/* Don't pick a connection that hasn't connected yet */
1178+
infof(data, "Connection #%ld isn't open enough, can't reuse",
1179+
check->connection_id);
1180+
continue;
11811181
}
11821182

11831183
#ifdef USE_UNIX_SOCKETS

tests/tests-httpd/test_02_download.py

Lines changed: 25 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -143,23 +143,39 @@ def test_02_06_download_500_parallel(self, env: Env,
143143
# http2 parallel transfers will use one connection (common limit is 100)
144144
assert r.total_connects == 1
145145

146-
# download 500 files parallel (max of 200), only h2
147-
@pytest.mark.skip(reason="TODO: we get 101 connections created. PIPEWAIT needs a fix")
146+
# download files parallel, check connection reuse/multiplex
148147
@pytest.mark.parametrize("proto", ['h2', 'h3'])
149-
def test_02_07_download_500_parallel(self, env: Env,
150-
httpd, nghttpx, repeat, proto):
148+
def test_02_07_download_reuse(self, env: Env,
149+
httpd, nghttpx, repeat, proto):
151150
if proto == 'h3' and not env.have_h3():
152151
pytest.skip("h3 not supported")
152+
count=200
153153
curl = CurlClient(env=env)
154-
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-499]'
154+
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
155155
r = curl.http_download(urls=[urln], alpn_proto=proto,
156-
with_stats=False, extra_args=[
156+
with_stats=True, extra_args=[
157157
'--parallel', '--parallel-max', '200'
158158
])
159159
assert r.exit_code == 0, f'{r}'
160-
r.check_stats(count=500, exp_status=200)
161-
# http2 should now use 2 connections, at most 5
162-
assert r.total_connects <= 5, "h2 should use fewer connections here"
160+
r.check_stats(count=count, exp_status=200)
161+
# should have used 2 connections only (test servers allow 100 req/conn)
162+
assert r.total_connects == 2, "h2 should use fewer connections here"
163+
164+
# download files parallel with http/1.1, check connection not reused
165+
@pytest.mark.parametrize("proto", ['http/1.1'])
166+
def test_02_07b_download_reuse(self, env: Env,
167+
httpd, nghttpx, repeat, proto):
168+
count=20
169+
curl = CurlClient(env=env)
170+
urln = f'https://{env.authority_for(env.domain1, proto)}/data.json?[0-{count-1}]'
171+
r = curl.http_download(urls=[urln], alpn_proto=proto,
172+
with_stats=True, extra_args=[
173+
'--parallel', '--parallel-max', '200'
174+
])
175+
assert r.exit_code == 0, f'{r}'
176+
r.check_stats(count=count, exp_status=200)
177+
# http/1.1 should have used count connections
178+
assert r.total_connects == count, "http/1.1 should use this many connections"
163179

164180
@pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
165181
def test_02_08_1MB_serial(self, env: Env,

0 commit comments

Comments
 (0)