-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdombuster.py
executable file
·172 lines (145 loc) · 4.92 KB
/
dombuster.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
#!/usr/bin/env python3
import re
import json
import argparse
import time
from workers.scrapers import *
from workers.resolver import *
from workers.pinger import *
from workers.whois import *
from appearance import *
from workers.timer import format_seconds
parser = argparse.ArgumentParser()
parser.add_argument('domain', type=str, help="Domain name to search for subdomains")
parser.add_argument('output', type=str, help="Name of output file")
parser.add_argument('-k', type=str, help="Path to JSON file with API keys. Currently only VirusTotal and SecurityTrail.")
parser.add_argument('-v', action="store_true", help="Verbosity (caution: it will probably flood your console)")
parser.add_argument('-q', action="store_true", help="Suppress console output")
parser.add_argument('-f', action="store_true", help="Don't use slow sources and go fast and furious")
parser.add_argument("--strict", action='store_true', help="Save only hosts that are online")
parser.add_argument("--ip", action='store_true', help="Save IP address of subdomain")
parser.add_argument("--org", action='store_true', help="Save OrgName and NetRange from reverse whois lookup on subdomain (can be kinda slow)")
parser.add_argument("--csv", action='store_true', help="Output in CSV (default: plaintext)")
APISources = [VirusTotal, SecurityTrails]
def validate(domain):
try:
# all sources accept kaspersky.com, not https://kaspersky.com
validation = re.match(r"[^/]*\.[a-zA-Z]{2,}$", domain)
if validation:
return validation.group(0)
else:
print_error("%s is not a valid domain. Please enter valid one" % domain)
exit(1)
except:
exit(1)
def work(domain):
sources = [child for child in Scraper.__subclasses__() if child not in APISources]
if args.f:
# slow sources
sources.remove(GoogleTransparency)
sources.remove(Yahoo)
sources.remove(Google)
subdomains = [[] for i in range(len(sources))]
threads = [sources[i](domain, subdomains[i], verbose, start_time) for i in range(len(sources))]
if args.k:
with open(args.k, "r") as f:
keys = json.loads(f.read())
for src in APISources:
if str(src.__name__) in keys:
if verbose > 0:
print("%s Found %s key" % (format_seconds(time.time()-start_time), src.__name__))
subdomains.append([])
threads.append(src(domain, subdomains[len(subdomains)-1], verbose, start_time, keys[src.__name__]))
for thread in threads:
thread.start()
for thread in threads:
thread.join()
return subdomains
def merge(subdomains):
overall = []
for output in subdomains:
overall += output
return set(overall)
def beautify(output, domain):
to_delete = set()
for entry in output:
if entry.startswith(".") or entry.startswith("*"):
to_delete.add(entry)
if domain not in entry:
to_delete.add(entry)
output -= to_delete
if args.strict:
offlineHosts = PingManager(list(output), verbose, start_time).start()
output -= offlineHosts
return output
def additionals(output):
if args.ip:
ips = ResolveManager(list(output), verbose, start_time).start()
output = [(a,b) for a, b in zip(output, ips)]
if args.org:
if not args.ip:
ips = ResolveManager(list(output), verbose, start_time).start()
tuples = [(a,b) for a, b in zip(output, ips)]
orgs = WhoisManager(tuples, verbose, start_time).start()
output = [(a, '') for a in output]
else:
orgs = WhoisManager(output, verbose, start_time).start()
output = [(a+b) for a,b in zip(output, orgs)]
return output
def createCSVheader():
header = ['' for i in range(4)]
header[0] = 'domain'
header[1] = 'ip' if args.ip else ''
header[2] = 'inetnum' if args.org else ''
header[3] = 'org-name' if args.org else ''
header = set(header)
if '' in header:
header.remove('')
return ','.join(header)
def formatize(output):
if not args.ip and not args.org:
return output
temp = list(output)
for i in range(len(temp)):
if temp[i] == 0 or len(temp[i]) == 0 or temp[i] == 'dummy':
temp[i] = ''
else:
temp[i] = ''.join(temp[i])
if args.csv:
temp = ",".join(temp)
else:
temp = " ".join(temp)
return temp
def main():
domain = validate(args.domain)
if verbose > 0:
print_target(domain)
subdomains = work(domain)
overall = merge(subdomains)
overall = beautify(overall, domain)
overall = additionals(overall)
with open(args.output, "w+") as f:
if args.csv:
f.write("%s\n" % createCSVheader())
for entry in overall:
f.write("%s\n" % formatize(entry))
if verbose > 0:
if args.strict:
print("%s Search is over. There are %d online hosts on subdomains for %s" % (format_seconds(time.time()-start_time), len(overall), domain))
else:
print("%s Search is over. There are %d unique subdomains for %s" % (format_seconds(time.time()-start_time), len(overall), domain))
if __name__ == "__main__":
args = parser.parse_args()
if args.q and args.v:
print_error("Conflicting arguments: -q and -v")
exit(1)
elif args.q:
verbose = 0
elif args.v:
verbose = 2
else:
verbose = 1
if verbose > 0:
print_banner()
start_time = time.time()
main()