diff --git a/lib/http.c b/lib/http.c index b1eef71f8..3ba963519 100644 --- a/lib/http.c +++ b/lib/http.c @@ -2357,7 +2357,6 @@ CURLcode Curl_http(struct connectdata *conn, bool *done) /* and no fragment part */ CURLUcode uc; - char *url; CURLU *h = curl_url_dup(data->state.uh); if(!h) return CURLE_OUT_OF_MEMORY; @@ -2388,19 +2387,15 @@ CURLcode Curl_http(struct connectdata *conn, bool *done) return CURLE_OUT_OF_MEMORY; } } - /* now extract the new version of the URL */ - uc = curl_url_get(h, CURLUPART_URL, &url, 0); + /* Extract the the URL to use in the request. Store in STRING_TEMP_URL for + clean-up reasons if the function returns before the free() further + down. */ + uc = curl_url_get(h, CURLUPART_URL, &data->set.str[STRING_TEMP_URL], 0); if(uc) { curl_url_cleanup(h); return CURLE_OUT_OF_MEMORY; } - if(data->change.url_alloc) - free(data->change.url); - - data->change.url = url; - data->change.url_alloc = TRUE; - curl_url_cleanup(h); if(strcasecompare("ftp", data->state.up.scheme)) { @@ -2579,12 +2574,16 @@ CURLcode Curl_http(struct connectdata *conn, bool *done) query = NULL; } +#ifndef CURL_DISABLE_PROXY /* url */ if(conn->bits.httpproxy && !conn->bits.tunnel_proxy) { - char *url = data->change.url; + char *url = data->set.str[STRING_TEMP_URL]; result = Curl_add_buffer(&req_buffer, url, strlen(url)); + Curl_safefree(data->set.str[STRING_TEMP_URL]); } - else if(paste_ftp_userpwd) + else +#endif + if(paste_ftp_userpwd) result = Curl_add_bufferf(&req_buffer, "ftp://%s:%s@%s", conn->user, conn->passwd, path + sizeof("ftp://") - 1); diff --git a/lib/urldata.h b/lib/urldata.h index 94f02ba6a..7f26a9561 100644 --- a/lib/urldata.h +++ b/lib/urldata.h @@ -1503,6 +1503,9 @@ enum dupstring { STRING_ALTSVC, /* CURLOPT_ALTSVC */ #endif STRING_SASL_AUTHZID, /* CURLOPT_SASL_AUTHZID */ +#ifndef CURL_DISABLE_PROXY + STRING_TEMP_URL, /* temp URL storage for proxy use */ +#endif /* -- end of zero-terminated strings -- */ STRING_LASTZEROTERMINATED, diff --git a/tests/data/Makefile.inc b/tests/data/Makefile.inc index 1e9aef9d2..fc22c2f17 100644 --- a/tests/data/Makefile.inc +++ b/tests/data/Makefile.inc @@ -57,7 +57,7 @@ test298 test299 test300 test301 test302 test303 test304 test305 test306 \ test307 test308 test309 test310 test311 test312 test313 test314 test315 \ test316 test317 test318 test319 test320 test321 test322 test323 test324 \ test325 test326 test327 test328 test329 test330 test331 test332 test333 \ -test334 \ +test334 test335 \ test340 \ \ test350 test351 test352 test353 test354 test355 test356 \ diff --git a/tests/data/test2047 b/tests/data/test2047 index fc140486b..bc7350971 100644 --- a/tests/data/test2047 +++ b/tests/data/test2047 @@ -92,7 +92,7 @@ OK 1 1 3 -http://xn--4cab6c.se/20470001 +http://åäö.se/20470001 text/plain; charset=us-ascii 200 diff --git a/tests/data/test335 b/tests/data/test335 new file mode 100644 index 000000000..4d54da980 --- /dev/null +++ b/tests/data/test335 @@ -0,0 +1,102 @@ +# Mostly a duplicate of test168 + + + +HTTP +HTTP GET +HTTP proxy +HTTP proxy Digest auth +HTTP Digest auth +HTTP auth in URL + + + +# Server-side + + +# this is returned first since we get no proxy-auth + +HTTP/1.1 407 Authorization Required to proxy me my dear swsclose +Proxy-Authenticate: Digest realm="weirdorealm", nonce="12345" + +And you should ignore this data. + + +# then this is returned since we get no server-auth + +HTTP/1.1 401 Authorization to the remote host as well swsbounce swsclose +WWW-Authenticate: Digest realm="realmweirdo", nonce="123456" + +you should ignore this data too + + + +HTTP/1.1 200 OK swsclose +Server: no +Content-Length: 15 + +Nice auth sir! + + + +HTTP/1.1 407 Authorization Required to proxy me my dear swsclose +Proxy-Authenticate: Digest realm="weirdorealm", nonce="12345" + +HTTP/1.1 401 Authorization to the remote host as well swsbounce swsclose +WWW-Authenticate: Digest realm="realmweirdo", nonce="123456" + +HTTP/1.1 200 OK swsclose +Server: no +Content-Length: 15 + +Nice auth sir! + + + +# Client-side + + +http + + +!SSPI +crypto + + +HTTP with proxy Digest and site Digest with creds in URLs + + +http://digest:alot@data.from.server.requiring.digest.hohoho.com/335 --proxy http://foo:bar@%HOSTIP:%HTTPPORT --proxy-digest --digest + + + +# Verify data after the test has been "shot" + + +^User-Agent: curl/.* + + +GET http://data.from.server.requiring.digest.hohoho.com/335 HTTP/1.1 +Host: data.from.server.requiring.digest.hohoho.com +User-Agent: curl/7.12.0-CVS (i686-pc-linux-gnu) libcurl/7.12.0-CVS OpenSSL/0.9.6b zlib/1.1.4 c-ares/1.2.0 libidn/0.4.3 +Accept: */* +Proxy-Connection: Keep-Alive + +GET http://data.from.server.requiring.digest.hohoho.com/335 HTTP/1.1 +Host: data.from.server.requiring.digest.hohoho.com +Proxy-Authorization: Digest username="foo", realm="weirdorealm", nonce="12345", uri="/335", response="f61609cd8f5bb205ef4e169b2c5626cb" +User-Agent: curl/7.12.0-CVS (i686-pc-linux-gnu) libcurl/7.12.0-CVS OpenSSL/0.9.6b zlib/1.1.4 c-ares/1.2.0 libidn/0.4.3 +Accept: */* +Proxy-Connection: Keep-Alive + +GET http://data.from.server.requiring.digest.hohoho.com/335 HTTP/1.1 +Host: data.from.server.requiring.digest.hohoho.com +Proxy-Authorization: Digest username="foo", realm="weirdorealm", nonce="12345", uri="/335", response="f61609cd8f5bb205ef4e169b2c5626cb" +Authorization: Digest username="digest", realm="realmweirdo", nonce="123456", uri="/335", response="08a2e2e684047f4219a38ddc189ac00c" +User-Agent: curl/7.12.0-CVS (i686-pc-linux-gnu) libcurl/7.12.0-CVS OpenSSL/0.9.6b ipv6 zlib/1.1.4 GSS libidn/0.4.3 +Accept: */* +Proxy-Connection: Keep-Alive + + + +