-
Notifications
You must be signed in to change notification settings - Fork 11
/
dsuc.py
123 lines (108 loc) · 3.8 KB
/
dsuc.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
'''_____________________________________________________________________
|[] R3DXPL0IT SHELL |ROOT]|!"|
|"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""|"|
|CODED BY > R3DXPLOIT(JIMMY) | |
|EMAIL > RETURN_ROOT@PROTONMAIL.COM | |
|GITHUB > https://github.com/r3dxpl0it | |
|WEB-PAGE > https://r3dxpl0it.Github.io |_|
|_____________________________________________________________________|/|
'''
import requests
import bs4
import argparse
external = []
unknown = []
fuzzables = []
def extractor(soup , host) :
all_links = list()
for link in soup.find_all('a' , href = True) :
if link['href'].startswith('/') :
if link['href'] not in all_links :
all_links.append(host+link['href'])
elif host in link['href'] :
if link['href'] not in all_links :
all_links.append( link['href'] )
elif 'http://' in host :
if 'https://'+host.split('http://')[1] in link['href'] and link['href'] not in all_links:
all_links.append( link['href'] )
elif 'http' not in link['href'] and 'www' not in link['href'] and len(link['href']) > 2 and '#' not in link['href'] :
if link['href'] not in all_links :
all_links.append(host+'/'+link['href'])
elif len (link['href']) > 6 :
external.append( link['href'] )
else :
unknown.append( link['href'] )
return all_links
def fuzzable_extract(linklist):
fuzzables = []
for link in linklist :
if "=" in link :
fuzzables.append(link)
return fuzzables
def xploit(link , host = None) :
if host is None :
host = link
res = requests.get(link , allow_redirects=True)
soup = bs4.BeautifulSoup(res.text , 'lxml')
return extractor(soup , host)
def level2(linklist , host) :
final_list = list()
for link in linklist :
for x in xploit(link , host) :
if x not in final_list :
final_list.append(x)
print("Appended" , x)
if link not in final_list :
final_list.append(link)
return final_list
def main() :
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--url', help='root url', dest='url')
parser.add_argument('-d', '--deepcrawl', help='crawl deaply', dest='deepcrawl', action='store_true')
parser.add_argument('-f', '--fuzzable', help='extract fuzzable', dest='fuzzable', action='store_true')
parser.add_argument('-e', '--external', help='extract external', dest='external', action='store_true')
args = parser.parse_args()
if args.url is None :
quit()
if 'http' not in args.url :
args.url = 'http://' + args.url
if args.deepcrawl :
links = level2(xploit(args.url) , args.url)
if len(links) > 1 :
print('\n\nLINKS WITH DEEPCRAWL : \n\n')
for link in links :
print('>\t' , link)
else :
print ('\n\nNo Link Found\n\n')
else :
links =xploit(args.url)
if len(links) > 1 :
print('\n\nLINKS : \n\n')
for link in links :
print('>\t' , link)
else :
print ('\n\nNo Link Found\n\n')
if args.fuzzable :
if len(links) > 1 :
if len(fuzzable_extract(links)) > 1 :
print('\n\nFUZZABLE LINKS : \n\n')
for link in fuzzable_extract(links) :
print('>\t' , link)
else :
print ('\n\nNo Fuzzable Link Found\n\n')
if args.external :
if len(external) > 1 :
print('\n\nEXTERNAL LINKS : \n\n')
for link in external :
print('>\t' , link)
else :
print ('\n\nNo EXTERNAL Link Found\n\n')
if __name__ == "__main__" :
banner = '''
_____________________________________________________________________
|CODED BY > R3DXPLOIT(JIMMY) | |
|GITHUB > https://github.com/r3dxpl0it | |
|_____________________________________________________________________|/|
'''
print(banner)
main()