1
0
mirror of https://github.com/moparisthebest/wget synced 2024-07-03 16:38:41 -04:00

improved Test-idn-robots.px

This commit is contained in:
Tim Ruehsen 2013-10-07 23:37:42 +02:00 committed by Giuseppe Scrivano
parent 1dec2028d0
commit c289349684
2 changed files with 31 additions and 1 deletions

View File

@ -1,3 +1,8 @@
2013-10-07 Tim Ruehsen <tim.ruehsen@gmx.de>
* Test-idn-robots.px: added punycoded and escaped URLs to follow
removed -H
2013-08-22 Tim Ruehsen <tim.ruehsen@gmx.de>
* Makefile.am (EXTRA_DIST): Add Test--httpsonly-r.px.

View File

@ -9,11 +9,14 @@ use HTTPTest;
# " Kon'nichiwa <dot> Japan
my $euc_jp_hostname = "\272\243\306\374\244\317.\306\374\313\334";
my $punycoded_hostname = 'xn--v9ju72g90p.xn--wgv71a';
my $escaped_hostname = "%ba%a3%c6%fc%a4%cf.%c6%fc%cb%dc";
###############################################################################
my $starter_file = <<EOF;
<a href="http://$euc_jp_hostname/foo.txt">The link</a>
<a href="http://$punycoded_hostname/foo2.txt">The second link</a>
<a href="http://$escaped_hostname/foo3.txt">The third link</a>
EOF
my $result_file = <<EOF;
@ -38,6 +41,22 @@ my %urls = (
},
content => $result_file,
},
"http://$punycoded_hostname/foo2.txt" => {
code => "200",
msg => "Uh-huh2",
headers => {
'Content-Type' => 'text/plain',
},
content => $result_file,
},
"http://$punycoded_hostname/foo3.txt" => {
code => "200",
msg => "Uh-huh3",
headers => {
'Content-Type' => 'text/plain',
},
content => $result_file,
},
"http://$punycoded_hostname/robots.txt" => {
code => "200",
msg => "Uh-huh",
@ -48,7 +67,7 @@ my %urls = (
},
);
my $cmdline = $WgetTest::WGETPATH . " --iri -rH"
my $cmdline = $WgetTest::WGETPATH . " --iri -r"
. " -e http_proxy=localhost:{{port}} --local-encoding=EUC-JP"
. " http://$euc_jp_hostname/";
@ -61,6 +80,12 @@ my %expected_downloaded_files = (
"$punycoded_hostname/foo.txt" => {
content => $result_file,
},
"$punycoded_hostname/foo2.txt" => {
content => $result_file,
},
"$punycoded_hostname/foo3.txt" => {
content => $result_file,
},
"$punycoded_hostname/robots.txt" => {
content => '',
},