diff --git a/src/tool_operate.c b/src/tool_operate.c index 143e3d3d42..b6d4d0978e 100644 --- a/src/tool_operate.c +++ b/src/tool_operate.c @@ -1723,6 +1723,29 @@ static CURLcode parallel_event(struct parastate *s) &s->still_running); while(!s->mcode && (s->still_running || s->more_transfers)) { + + if(!s->still_running && s->more_transfers) { + curl_off_t nxfers; + /* None are running, but there may be more ready to run. Try + * to add more when possible. */ + s->result = add_parallel_transfers(s->multi, s->share, + &s->more_transfers, + &s->added_transfers); + if(s->result) + break; + if(s->more_transfers) { + /* There are still more transfers, so not all were added. If we + * did not add *any*, it means there is nothing to do until time + * passes and a transfer becomes ready to add. + * Avoid busy looping by taking a small break. */ + CURLMcode mcode = curl_multi_get_offt( + s->multi, CURLMINFO_XFERS_CURRENT, &nxfers); + if(!mcode && !nxfers) { + /* None have been added, must be waiting on a timeout */ + curlx_wait_ms(500); + } + } + } #if DEBUG_UV curl_mfprintf(tool_stderr, "parallel_event: uv_run(), " "mcode=%d, %d running, %d more\n", @@ -1919,6 +1942,13 @@ static CURLcode parallel_transfers(CURLSH *share) s->wrapitup_processed = TRUE; } } + else if(s->more_transfers) { + s->result = add_parallel_transfers(s->multi, s->share, + &s->more_transfers, + &s->added_transfers); + if(s->result) + break; + } s->mcode = curl_multi_poll(s->multi, NULL, 0, 1000, NULL); if(!s->mcode) diff --git a/tests/http/test_05_errors.py b/tests/http/test_05_errors.py index d6c11ff874..e6e1ad0705 100644 --- a/tests/http/test_05_errors.py +++ b/tests/http/test_05_errors.py @@ -143,3 +143,40 @@ class TestErrors: ]) assert r.exit_code == 60, f'{r}' assert r.stats[0]['errormsg'] != 'CURL_DBG_SOCK_FAIL_IPV6: failed to open socket' + + # Get, retry on 502 + def test_05_06_retry_502(self, env: Env, httpd, nghttpx): + proto = 'http/1.1' + curl = CurlClient(env=env) + url = f'https://{env.authority_for(env.domain1, proto)}/curltest/tweak?status=502' + r = curl.http_download(urls=[url], alpn_proto=proto, extra_args=[ + '--retry', '2', '--retry-all-errors', '--retry-delay', '1', + ]) + r.check_response(http_status=502) + assert r.stats[0]['num_retries'] == 2, f'{r}' + # curious, since curl does the retries, it finds the previous + # connection in the cache and reports that no connects were done + assert r.stats[0]['num_connects'] == 0, f'{r}' + + # Get, retry on 502 in parallel mode + def test_05_07_retry_502_parallel(self, env: Env, httpd, nghttpx): + proto = 'http/1.1' + curl = CurlClient(env=env) + url = f'https://{env.authority_for(env.domain1, proto)}/curltest/tweak?status=502' + r = curl.http_download(urls=[url], alpn_proto=proto, extra_args=[ + '--retry', '2', '--retry-all-errors', '--retry-delay', '1', '--parallel' + ]) + r.check_response(http_status=502) + assert r.stats[0]['num_retries'] == 2, f'{r}' + + # Get, retry on 401, not happening + def test_05_08_retry_401(self, env: Env, httpd, nghttpx): + proto = 'http/1.1' + curl = CurlClient(env=env) + url = f'https://{env.authority_for(env.domain1, proto)}/curltest/tweak?status=401' + r = curl.http_download(urls=[url], alpn_proto=proto, extra_args=[ + '--retry', '2', '--retry-all-errors', '--retry-delay', '1' + ]) + r.check_response(http_status=401) + # No retries on a 401 + assert r.stats[0]['num_retries'] == 0, f'{r}'