forked from defalt-x/WPCH-3301
-
Notifications
You must be signed in to change notification settings - Fork 0
/
hash-page-v1.py
53 lines (44 loc) · 2.12 KB
/
hash-page-v1.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
from concurrent.futures import ThreadPoolExecutor
import requests
from timer import timer
import hashlib
with open(r'domain-url.txt', 'r') as file:
DOMAINS = file.read().splitlines()
# URL = 'https://en.wikipedia.org/wiki/Special:Random'
verify_hash = '36367763ab73783c7af284446c59466b4cd653239a311cb7116d4618dee09a8425893dc7500b464fdaf1672d7bef5e891c6e2274568926a49fb4f45132c2a8b4'
def HashPageSHA512 (response, url):
PageToSHA512 = hashlib.sha512(response.content).hexdigest()
if verify_hash == str(PageToSHA512):
print('SHA512: YES | ', url)
exit()
# Comment the below lines if you don't want any output until the HASH is found
else:
print('====================')
print('SHA512: NO | ', url)
# We remove the last URL it checked so the list will decrease.
# Unsure if this actually decreases the time taken
DOMAINS.remove(url)
return
timeoutDelay = 2
def fetch (session, url):
try:
with session.get(url, timeout=timeoutDelay) as response:
sc = response.status_code
if sc == 200:
HashPageSHA512 (response, response.url)
except:
print(f'==================')
print(f'Dead Link {url}')
pass
# Once finished this will tell us how long it took to complete the whole list so you can calculate how long it would take for future attempts
# Using the second function below you can visit the random wiki page 5000 times using 10 workers.
# This only takes approx 16seconds to complete.
@timer(1, 1)
def main():
with ThreadPoolExecutor(max_workers=10) as executor:
with requests.Session() as session:
# this will send each URL in the DOMAINS array to be hashed
executor.map(fetch, [session] * len(DOMAINS), [DOMAINS][0])
# Use this one if you want to just check a how long it would take to visit random wiki pages and hash the page contents
# executor.map(fetch, [session] * 5000, [URL] * 5000)
executor.shutdown(wait=True)