summaryrefslogtreecommitdiff
path: root/testenv/Test--spider-r.py
blob: 5eb01e469829854d0f86075aae9b7a2de52f12c2 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
#!/usr/bin/env python3
from sys import exit
from test.http_test import HTTPTest
from misc.wget_file import WgetFile

"""
    This test executed Wget in Spider mode with recursive retrieval.
"""
TEST_NAME = "Recursive Spider"
############# File Definitions ###############################################
mainpage = """
<html>
<head>
  <title>Main Page</title>
</head>
<body>
  <p>
    Some text and a link to a <a href="http://127.0.0.1:{{port}}/secondpage.html">second page</a>.
    Also, a <a href="http://127.0.0.1:{{port}}/nonexistent">broken link</a>.
  </p>
</body>
</html>
"""


secondpage = """
<html>
<head>
  <title>Second Page</title>
</head>
<body>
  <p>
    Some text and a link to a <a href="http://127.0.0.1:{{port}}/thirdpage.html">third page</a>.
    Also, a <a href="http://127.0.0.1:{{port}}/nonexistent">broken link</a>.
  </p>
</body>
</html>
"""

thirdpage = """
<html>
<head>
  <title>Third Page</title>
</head>
<body>
  <p>
    Some text and a link to a <a href="http://127.0.0.1:{{port}}/dummy.txt">text file</a>.
    Also, another <a href="http://127.0.0.1:{{port}}/againnonexistent">broken link</a>.
  </p>
</body>
</html>
"""

dummyfile = "Don't care."


index_html = WgetFile ("index.html", mainpage)
secondpage_html = WgetFile ("secondpage.html", secondpage)
thirdpage_html = WgetFile ("thirdpage.html", thirdpage)
dummy_txt = WgetFile ("dummy.txt", dummyfile)

Request_List = [
    [
        "HEAD /",
        "GET /",
        "GET /robots.txt",
        "HEAD /secondpage.html",
        "GET /secondpage.html",
        "HEAD /nonexistent",
        "HEAD /thirdpage.html",
        "GET /thirdpage.html",
        "HEAD /dummy.txt",
        "HEAD /againnonexistent"
    ]
]

WGET_OPTIONS = "--spider -r"
WGET_URLS = [[""]]

Files = [[index_html, secondpage_html, thirdpage_html, dummy_txt]]

ExpectedReturnCode = 8
ExpectedDownloadedFiles = []

################ Pre and Post Test Hooks #####################################
pre_test = {
    "ServerFiles"       : Files
}
test_options = {
    "WgetCommands"      : WGET_OPTIONS,
    "Urls"              : WGET_URLS
}
post_test = {
    "ExpectedFiles"     : ExpectedDownloadedFiles,
    "ExpectedRetcode"   : ExpectedReturnCode,
    "FilesCrawled"      : Request_List
}

err = HTTPTest (
                name=TEST_NAME,
                pre_hook=pre_test,
                test_params=test_options,
                post_hook=post_test
).begin ()

exit (err)