#!/usr/bin/env python3 from sys import exit from test.http_test import HTTPTest from misc.wget_file import WgetFile """ This test executed Wget in Spider mode with recursive retrieval. """ TEST_NAME = "Recursive Spider" ############# File Definitions ############################################### mainpage = """
Some text and a link to a second page. Also, a broken link.
""" secondpage = """Some text and a link to a third page. Also, a broken link.
""" thirdpage = """Some text and a link to a text file. Also, another broken link.
""" dummyfile = "Don't care." index_html = WgetFile ("index.html", mainpage) secondpage_html = WgetFile ("secondpage.html", secondpage) thirdpage_html = WgetFile ("thirdpage.html", thirdpage) dummy_txt = WgetFile ("dummy.txt", dummyfile) Request_List = [ [ "HEAD /", "GET /", "GET /robots.txt", "HEAD /secondpage.html", "GET /secondpage.html", "HEAD /nonexistent", "HEAD /thirdpage.html", "GET /thirdpage.html", "HEAD /dummy.txt", "HEAD /againnonexistent" ] ] WGET_OPTIONS = "-d --spider -r" WGET_URLS = [[""]] Files = [[index_html, secondpage_html, thirdpage_html, dummy_txt]] ExpectedReturnCode = 8 ExpectedDownloadedFiles = [] ################ Pre and Post Test Hooks ##################################### pre_test = { "ServerFiles" : Files } test_options = { "WgetCommands" : WGET_OPTIONS, "Urls" : WGET_URLS } post_test = { "ExpectedFiles" : ExpectedDownloadedFiles, "ExpectedRetcode" : ExpectedReturnCode, "FilesCrawled" : Request_List } err = HTTPTest ( name=TEST_NAME, pre_hook=pre_test, test_params=test_options, post_hook=post_test ).begin () exit (err)