1
0
mirror of https://github.com/moparisthebest/wget synced 2024-07-03 16:38:41 -04:00

Rewrite the --rejected-log test using the new framework.

* tests/Test--rejected-log.px: Remove old test.
 * testenv/Test--rejected-log.py: Create new test.
This commit is contained in:
Jookia 2015-08-07 07:58:30 +10:00 committed by Giuseppe Scrivano
parent 7e5079c40f
commit 98272d2c8d
4 changed files with 105 additions and 139 deletions

View File

@ -63,6 +63,7 @@ if HAVE_PYTHON3
Test-Post.py \ Test-Post.py \
Test-504.py \ Test-504.py \
Test--spider-r.py \ Test--spider-r.py \
Test--rejected-log.py \
Test-redirect-crash.py \ Test-redirect-crash.py \
Test-reserved-chars.py \ Test-reserved-chars.py \
Test-condget.py \ Test-condget.py \

104
testenv/Test--rejected-log.py Executable file
View File

@ -0,0 +1,104 @@
#!/usr/bin/env python3
from sys import exit
from test.http_test import HTTPTest
from misc.wget_file import WgetFile
"""
This test executed Wget in recursive mode with a rejected log outputted.
"""
TEST_NAME = "Rejected Log"
############# File Definitions ###############################################
mainpage = """
<html>
<head>
<title>Main Page</title>
</head>
<body>
<p>
Recurse to a <a href="http://127.0.0.1:{{port}}/secondpage.html">second page</a>.
</p>
</body>
</html>
"""
secondpage = """
<html>
<head>
<title>Second Page</title>
</head>
<body>
<p>
Recurse to a <a href="http://127.0.0.1:{{port}}/thirdpage.html">third page</a>.
Try the blacklisted <a href="http://127.0.0.1:{{port}}/index.html">main page</a>.
</p>
</body>
</html>
"""
thirdpage = """
<html>
<head>
<title>Third Page</title>
</head>
<body>
<p>
Try a hidden <a href="http://127.0.0.1:{{port}}/dummy.txt">dummy file</a>.
Try to leave to <a href="http://no.such.domain/">another domain</a>.
</p>
</body>
</html>
"""
robots = """
User-agent: *
Disallow: /dummy.txt
"""
log = """\
REASON U_URL U_SCHEME U_HOST U_PORT U_PATH U_PARAMS U_QUERY U_FRAGMENT P_URL P_SCHEME P_HOST P_PORT P_PATH P_PARAMS P_QUERY P_FRAGMENT
BLACKLIST http%3A//127.0.0.1%3A{{port}}/index.html SCHEME_HTTP 127.0.0.1 {{port}} index.html http%3A//127.0.0.1%3A{{port}}/secondpage.html SCHEME_HTTP 127.0.0.1 {{port}} secondpage.html
ROBOTS http%3A//127.0.0.1%3A{{port}}/dummy.txt SCHEME_HTTP 127.0.0.1 {{port}} dummy.txt http%3A//127.0.0.1%3A{{port}}/thirdpage.html SCHEME_HTTP 127.0.0.1 {{port}} thirdpage.html
SPANNEDHOST http%3A//no.such.domain/ SCHEME_HTTP no.such.domain 80 http%3A//127.0.0.1%3A{{port}}/thirdpage.html SCHEME_HTTP 127.0.0.1 {{port}} thirdpage.html
"""
dummyfile = "Don't care."
index_html = WgetFile ("index.html", mainpage)
secondpage_html = WgetFile ("secondpage.html", secondpage)
thirdpage_html = WgetFile ("thirdpage.html", thirdpage)
robots_txt = WgetFile ("robots.txt", robots)
dummy_txt = WgetFile ("dummy.txt", dummyfile)
log_csv = WgetFile ("log.csv", log)
WGET_OPTIONS = "-nd -r --rejected-log log.csv"
WGET_URLS = [["index.html"]]
Files = [[index_html, secondpage_html, thirdpage_html, robots_txt, dummy_txt]]
ExpectedReturnCode = 0
ExpectedDownloadedFiles = [index_html, secondpage_html, thirdpage_html, robots_txt, log_csv]
# TODO: fix long line
# TODO: check names
################ Pre and Post Test Hooks #####################################
pre_test = {
"ServerFiles" : Files
}
test_options = {
"WgetCommands" : WGET_OPTIONS,
"Urls" : WGET_URLS
}
post_test = {
"ExpectedFiles" : ExpectedDownloadedFiles,
"ExpectedRetcode" : ExpectedReturnCode
}
err = HTTPTest (
name=TEST_NAME,
pre_hook=pre_test,
test_params=test_options,
post_hook=post_test
).begin ()
exit (err)

View File

@ -127,7 +127,6 @@ PX_TESTS = \
Test--start-pos.px \ Test--start-pos.px \
Test--start-pos--continue.px \ Test--start-pos--continue.px \
Test--httpsonly-r.px \ Test--httpsonly-r.px \
Test--rejected-log.px \
Test-204.px Test-204.px
EXTRA_DIST = FTPServer.pm FTPTest.pm HTTPServer.pm HTTPTest.pm \ EXTRA_DIST = FTPServer.pm FTPTest.pm HTTPServer.pm HTTPTest.pm \

View File

@ -1,138 +0,0 @@
#!/usr/bin/env perl
use strict;
use warnings;
use HTTPTest;
###############################################################################
my $mainpage = <<EOF;
<html>
<head>
<title>Main Page</title>
</head>
<body>
<p>
Recurse to a <a href="http://localhost:{{port}}/secondpage.html">second page</a>.
</p>
</body>
</html>
EOF
my $secondpage = <<EOF;
<html>
<head>
<title>Second Page</title>
</head>
<body>
<p>
Recurse to a <a href="http://localhost:{{port}}/thirdpage.html">third page</a>.
Try the blacklisted <a href="http://localhost:{{port}}/index.html">main page</a>.
</p>
</body>
</html>
EOF
my $thirdpage = <<EOF;
<html>
<head>
<title>Third Page</title>
</head>
<body>
<p>
Try a hidden <a href="http://localhost:{{port}}/dummy.txt">dummy file</a>.
Try to leave to <a href="http://no.such.domain/">another domain</a>.
</p>
</body>
</html>
EOF
my $robots = <<EOF;
User-agent: *
Disallow: /dummy.txt
EOF
my $log = <<EOF;
REASON U_URL U_SCHEME U_HOST U_PORT U_PATH U_PARAMS U_QUERY U_FRAGMENT P_URL P_SCHEME P_HOST P_PORT P_PATH P_PARAMS P_QUERY P_FRAGMENT
BLACKLIST http%3A//localhost%3A{{port}}/index.html SCHEME_HTTP localhost {{port}} index.html http%3A//localhost%3A{{port}}/secondpage.html SCHEME_HTTP localhost {{port}} secondpage.html
ROBOTS http%3A//localhost%3A{{port}}/dummy.txt SCHEME_HTTP localhost {{port}} dummy.txt http%3A//localhost%3A{{port}}/thirdpage.html SCHEME_HTTP localhost {{port}} thirdpage.html
SPANNEDHOST http%3A//no.such.domain/ SCHEME_HTTP no.such.domain 80 http%3A//localhost%3A{{port}}/thirdpage.html SCHEME_HTTP localhost {{port}} thirdpage.html
EOF
# code, msg, headers, content
my %urls = (
'/index.html' => {
code => "200",
msg => "Dontcare",
headers => {
"Content-type" => "text/html",
},
content => $mainpage,
},
'/secondpage.html' => {
code => "200",
msg => "Dontcare",
headers => {
"Content-type" => "text/html",
},
content => $secondpage,
},
'/thirdpage.html' => {
code => "200",
msg => "Dontcare",
headers => {
"Content-type" => "text/html",
},
content => $thirdpage,
},
'/dummy.txt' => {
code => "200",
msg => "Dontcare",
headers => {
"Content-type" => "text/plain",
},
content => "",
},
'/robots.txt' => {
code => "200",
msg => "Dontcare",
headers => {
"Content-type" => "text/plain",
},
content => $robots
},
);
my $cmdline = $WgetTest::WGETPATH . " -nd -r --rejected-log log.csv http://localhost:{{port}}/index.html";
my $expected_error_code = 0;
my %expected_downloaded_files = (
"index.html" => {
content => $mainpage,
},
"secondpage.html" => {
content => $secondpage,
},
"thirdpage.html" => {
content => $thirdpage,
},
"robots.txt" => {
content => $robots,
},
"log.csv" => {
content => $log,
},
);
###############################################################################
my $the_test = HTTPTest->new (input => \%urls,
cmdline => $cmdline,
errcode => $expected_error_code,
output => \%expected_downloaded_files);
exit $the_test->run();
# vim: et ts=4 sw=4