forked from H4ckForJob/dirmap
-
Notifications
You must be signed in to change notification settings - Fork 0
/
dirmap.conf
119 lines (112 loc) · 4.18 KB
/
dirmap.conf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
#Recursive scan options
[RecursiveScan]
#recursive scan:Close:0;Open:1
conf.recursive_scan = 0
#Recursive scanning if these status codes
conf.recursive_status_code = [301,403]
#Exit the scan when the URL exceeds this length
conf.recursive_scan_max_url_length = 60
#These suffix names are not recursive
conf.recursive_blacklist_exts = ["html",'htm','shtml','png','jpg','webp','bmp','js','css','pdf','ini','mp3','mp4']
#The directory does not scan
#conf.exclude_subdirs = ['/test1','/test2']
conf.exclude_subdirs = ""
#Processing scan mode
[ScanModeHandler]
#Dict mode:Close :0;single dict:1;multiple dict:2
conf.dict_mode = 1
#Single dictionary file path
conf.dict_mode_load_single_dict = "dict_mode_dict.txt"
#Multiple dictionary file path
conf.dict_mode_load_mult_dict = "dictmult"
#Blast mode:tips:Use "conf.file_extension" options for suffixes
conf.blast_mode = 0
#Minimum length of character set
conf.blast_mode_min = 3
#Maximum length of character set
conf.blast_mode_max = 3
#The default character set:a-z
conf.blast_mode_az = "abcdefghijklmnopqrstuvwxyz"
#The default character set:0-9
conf.blast_mode_num = "0123456789"
#Custom character set
conf.blast_mode_custom_charset = "abc"
#Custom continue to generate blast dictionary location
conf.blast_mode_resume_charset = ""
#Crawl mode:Close :0;Open:1
conf.crawl_mode = 1
#Crawl mode dynamic fuzz suffix dict
conf.crawl_mode_dynamic_fuzz_suffix = "crawl_mode_suffix.txt"
#Parse robots.txt file
conf.crawl_mode_parse_robots = 0
#An xpath expression used by a crawler to parse an HTML document
conf.crawl_mode_parse_html = "//*/@href | //*/@src | //form/@action"
#Whether to turn on the dynamically generated payloads:close:0;open:1
conf.crawl_mode_dynamic_fuzz = 1
#Fuzz mode:Close :0;single dict:1;multiple dict:2
conf.fuzz_mode = 0
#Single dictionary file path.You can customize the dictionary path. The labels are just a flag for insert dict.
conf.fuzz_mode_load_single_dict = "fuzz_mode_dir.txt"
#Multiple dictionary file path
conf.fuzz_mode_load_mult_dict = "fuzzmult"
#Set the label of fuzz.e.g:{dir};{ext}
#conf.fuzz_mode_label = "{dir}"
conf.fuzz_mode_label = "{dir}"
#Processing payloads
[PayloadHandler]
#Processing requests
[RequestHandler]
#Custom request header.e.g:test1=test1,test2=test2
conf.request_headers = ""
#Custom request user-agent
conf.request_header_ua = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36"
#Custom request cookie.e.g:cookie1=cookie1; cookie2=cookie2;
conf.request_header_cookie = ""
#Custom 401 certification
conf.request_header_401_auth = ""
#Custom request methods (get, head)
conf.request_method = "get"
#Custom per request timeout in x sec.
conf.request_timeout = 3
#Custom per request delay random(0-x) secends.The parameter must be an integer.
conf.request_delay = 0
#Custom all request limit,default 30 coroutines
conf.request_limit = 30
#Custom request max retries
conf.request_max_retries = 1
#Whether to open an HTTP persistent connection
conf.request_persistent_connect = 0
#Whether to follow 302 redirection
conf.redirection_302 = False
#Payload add file extension
conf.file_extension = ""
#Processing responses
[ResponseHandler]
#Sets the response status code to record
conf.response_status_code = [200]
#Whether to record content-type
conf.response_header_content_type = 1
#Whether to record page size
conf.response_size = 1
#Auto check 404 page
conf.auto_check_404_page = True
#Custom 503 page regex
conf.custom_503_page = "page 503"
#Custom regular match response content
# conf.custom_response_page = "([0-9]){3}([a-z]){3}test"
conf.custom_response_page = ""
#Skip files of size x bytes.you must be set "None",if don't want to skip any file.e.g:None;0b;1k;1m
conf.skip_size = "None"
#Processing proxy
[ProxyHandler]
#proxy:e.g:{"http":"http://127.0.0.1:8080","https":"https://127.0.0.1:8080"}
#conf.proxy_server = {"http":"http://127.0.0.1:8080","https":"https://127.0.0.1:8080"}
conf.proxy_server = None
#Debug option
[DebugMode]
#Print payloads and exit the program
conf.debug = 0
#update option
[CheckUpdate]
#Get the latest code from github(Not yet available)
conf.update = 0