1
0
mirror of https://github.com/moparisthebest/curl synced 2024-12-21 23:58:49 -05:00

curl: speed up handling of many URLs

By properly keeping track of the last entry in the list of URLs/uploads
to handle, curl now avoids many meaningless traverses of the list which
speeds up many-URL handling *MASSIVELY* (several magnitudes on 100K
URLs).

Added test 1291, to verify that it doesn't take ages - but we don't have
any detection of "too slow" command in the test suite.

Reported-by: arainchik on github
Fixes #1959
Closes #2052
This commit is contained in:
Daniel Stenberg 2017-11-04 12:56:30 +01:00
parent 90abb74ff0
commit ee8016b3de
4 changed files with 63 additions and 11 deletions

View File

@ -114,6 +114,7 @@ struct OperationConfig {
struct getout *url_last; /* point to the last/current node */
struct getout *url_get; /* point to the node to fill in URL */
struct getout *url_out; /* point to the node to fill in outfile */
struct getout *url_ul; /* point to the node to fill in upload */
char *cipher_list;
char *proxy_cipher_list;
char *cert;

View File

@ -787,7 +787,7 @@ ParameterError getparameter(const char *flag, /* f or -long-flag */
url = config->url_get;
else
/* there was no free node, create one! */
url = new_getout(config);
config->url_get = url = new_getout(config);
if(!url)
return PARAM_NO_MEM;
@ -1787,7 +1787,7 @@ ParameterError getparameter(const char *flag, /* f or -long-flag */
url = config->url_out;
else
/* there was no free node, create one! */
url = new_getout(config);
config->url_out = url = new_getout(config);
if(!url)
return PARAM_NO_MEM;
@ -1912,23 +1912,23 @@ ParameterError getparameter(const char *flag, /* f or -long-flag */
/* we are uploading */
{
struct getout *url;
if(!config->url_out)
config->url_out = config->url_list;
if(config->url_out) {
if(!config->url_ul)
config->url_ul = config->url_list;
if(config->url_ul) {
/* there's a node here, if it already is filled-in continue to find
an "empty" node */
while(config->url_out && (config->url_out->flags & GETOUT_UPLOAD))
config->url_out = config->url_out->next;
while(config->url_ul && (config->url_ul->flags & GETOUT_UPLOAD))
config->url_ul = config->url_ul->next;
}
/* now there might or might not be an available node to fill in! */
if(config->url_out)
if(config->url_ul)
/* existing node */
url = config->url_out;
url = config->url_ul;
else
/* there was no free node, create one! */
url = new_getout(config);
config->url_ul = url = new_getout(config);
if(!url)
return PARAM_NO_MEM;

View File

@ -137,7 +137,7 @@ test1252 test1253 test1254 test1255 test1256 test1257 test1258 test1259 \
test1260 test1261 test1262 \
\
test1280 test1281 test1282 test1283 test1284 test1285 test1286 test1287 \
test1288 test1289 test1290 \
test1288 test1289 test1290 test1291 \
test1298 test1299 \
test1300 test1301 test1302 test1303 test1304 test1305 test1306 test1307 \
test1308 test1309 test1310 test1311 test1312 test1313 test1314 test1315 \

51
tests/data/test1291 Normal file
View File

@ -0,0 +1,51 @@
# This test case is primarily meant to verify that parsing and adding the 100K
# files is a swift operation.
#
<testcase>
<info>
<keywords>
HTTP
HTTP PUT
</keywords>
</info>
#
# Server-side
<reply>
<data>
</data>
</reply>
# Client-side
<client>
<server>
none
</server>
<name>
Attempt to upload 100K files but fail immediately
</name>
<command>
-K log/cmd1291 --fail-early
</command>
<file name="log/upload-this">
XXXXXXXx
</file>
# generate the config file
<precheck>
perl -e 'for(1 .. 100000) { printf("upload-file=log/upload-this\nurl=htttttp://non-existing-host.haxx.se/upload/1291\n", $_);}' > log/cmd1291;
</precheck>
</client>
# Verify data after the test has been "shot"
<verify>
<errorcode>
1
</errorcode>
# we disable valgrind here since it takes 40+ seconds even on a fairly snappy
# machine
<valgrind>
disable
</valgrind>
</verify>
</testcase>