Skip to content

Commit

Permalink
Added subcommands and changed secret scanner
Browse files Browse the repository at this point in the history
  • Loading branch information
Prateek-Thakare committed Aug 25, 2023
1 parent 396169f commit 8ec85aa
Show file tree
Hide file tree
Showing 12 changed files with 236 additions and 76 deletions.
39 changes: 33 additions & 6 deletions configs/local.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,38 @@

workflow:
- workflowName: 'default'
schedule: 'daily between 00:00 and 04:00'
cmd: []
workflowConfig:
- moduleName : discovery
tools: ['Subfinder', 'SSLMate', 'Amass']
order: 1
- moduleName: prerecon
tools: ['FindCDN', 'Naabu']
order: 2
- moduleName: activehostscan
tools: ['HTTPX_Tech', 'HTTPX']
order: 3
- moduleName: activerecon
tools: ['Wafw00f']
order: 4
- moduleName: scan
tools: [ 'DNSTwister', 'Csper', 'Nuclei', 'NucleiRecon']
order: 5
- moduleName: secretscanner
tools: ['SecretScanner']
order: 6

# This workflow is for product security teams. It includes tools like Route53 and Ipinfo. Please provide the key for IPinfo
- workflowName: 'productsecurity'
schedule: 'daily between 00:00 and 04:00'
cmd: []
workflowConfig:
- moduleName : Route53
tools: ['Route53']
order: 1
- moduleName : discovery
tools: ['Subfinder', 'SSLMate', 'Amass']
tools: ['Subfinder']
order: 1
- moduleName: prerecon
tools: ['FindCDN', 'Naabu', 'IPinfo']
Expand All @@ -26,6 +50,7 @@ workflow:
- moduleName: secretscanner
tools: ['SecretScanner']
order: 6

# This workflow excludes some tools viz SSLMate, IPinfo, DNSTwister, Csper
- workflowName: 'bugbounty'
schedule: 'daily between 00:00 and 04:00'
Expand All @@ -49,6 +74,7 @@ workflow:
- moduleName: secretscanner
tools: ['SecretScanner']
order: 6

- workflowName: 'test'
schedule: 'daily between 00:00 and 04:00'
cmd: []
Expand Down Expand Up @@ -84,10 +110,10 @@ dbConfig:

logging:
version: 1
disable_existing_loggers: False
disable_existing_loggers: True
formatters:
default:
format: "[%(asctime)s] --> %(filename)s: %(lineno)d - %(levelname)s: %(message)s"
format: "[%(asctime)s] --> %(levelname)s: %(message)s"
"()": "mantis.config_parsers.logging_utils.CustomFormatter"

handlers:
Expand All @@ -111,14 +137,15 @@ logging_debug:
version: 1
disable_existing_loggers: False
formatters:
simple:
format: "[%(asctime)s] --> %(filename)s: %(lineno)d - %(levelname)s: %(message)s"
default:
format: "[%(asctime)s] --> %(levelname)s: %(message)s"
"()": "mantis.config_parsers.logging_utils.CustomFormatter"

handlers:
console:
class: logging.StreamHandler
level: DEBUG
formatter: simple
formatter: default
stream: ext://sys.stdout

loggers:
Expand Down
2 changes: 1 addition & 1 deletion launch.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,4 @@ def main():
start = time.perf_counter()
main()
finish = time.perf_counter()
logging.info(f"Total time taken to run the tool: {round(finish - start, 2)} seconds")
# logging.info(f"Total time taken to run the tool: {round(finish - start, 2)} seconds")
6 changes: 4 additions & 2 deletions mantis/config_parsers/logging_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,13 @@ class CustomFormatter(logging.Formatter):
grey = "\x1b[38;20m"
yellow = "\x1b[33;20m"
red = "\x1b[31;20m"
green = "\x1b[32;20m"
bold_red = "\x1b[31;1m"
reset = "\x1b[0m"
format = "[%(asctime)s] --> %(filename)s: %(lineno)d - %(levelname)s: %(message)s"
format = "[%(asctime)s] --> %(levelname)s: %(message)s"

FORMATS = {
logging.DEBUG: grey + format + reset,
logging.DEBUG: green + format + reset,
logging.INFO: grey + format + reset,
logging.WARNING: yellow + format + reset,
logging.ERROR: red + format + reset,
Expand All @@ -34,6 +35,7 @@ def configure_logging(args: ArgsModel):
if args.verbose:
log_config = ConfigProvider.get_config().logging_debug
logging.config.dictConfig(log_config)
logging.StreamHandler().setFormatter(CustomFormatter())
logging.info('MANTIS ASSET DISCOVERY - STARTED')
logging.info("Debug mode enabled")
else:
Expand Down
1 change: 1 addition & 0 deletions mantis/modules/Route53/Route53.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ async def main(self):
output_dict_list.append(domain_dict)
await CrudUtils.insert_assets(output_dict_list, source='internal')
except Exception as e:
results["failure"] = 1
results['exception'] = str(e)

return results
Expand Down
7 changes: 4 additions & 3 deletions mantis/modules/alerter.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@
class Alerter:
@staticmethod
async def send_alerts(log_dict, args):

try:
notify_config = ConfigProvider.get_config().notify

scan_efficiency_blocks, scan_stats, module_scan_stats = Alerter.get_stats_slack_message(log_dict)
for team in notify_config:
asset_type_list, asset_tag_list = NotificationsUtils.get_assets_to_notify_list(team.teamName)
finding__type_list, finding_tag_list = NotificationsUtils.get_findings_to_notify_list(team.teamName)
Expand All @@ -25,13 +25,14 @@ async def send_alerts(log_dict, args):
if isinstance(team.channel[channel_type], list):
for webhook in team.channel[channel_type]:
if team.scanEfficiency == True:
Notifications.send_slack_notifications(Alerter.get_stats_slack_message(log_dict), webhook)
Notifications.send_slack_notifications(scan_efficiency_blocks, webhook)
for block in slack_blocks:
Notifications.send_slack_notifications([block], webhook)
else:
logging.error("Slack must provide list of webhooks, check local.yml")
except Exception as e:
logging.debug(f"Slack alerts not configured")
return scan_stats, module_scan_stats

@staticmethod
async def get_inventory_slack_message(assets, findings, asset_tag_list, finding_tag_list):
Expand Down Expand Up @@ -262,4 +263,4 @@ def get_stats_slack_message(logs):
# final_str += f"- {tool}\t Success %: {results_dict[module]['tools_dict'][tool]['success_percentage']}\n"
# final_str += '\n'
blocks.append(divider)
return blocks
return blocks, scan_stats, module_scan_stats_list
11 changes: 5 additions & 6 deletions mantis/modules/prerecon/Naabu.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,11 @@ def parse_report(self, outfile):
naabu_output_dict = []
tool_output_dict = {}
hosts = []
try:
with open(outfile) as json_lines:
for line in json_lines:
naabu_output_dict.append(json.loads(line))
except Exception as e:
logging.error(f"Failed to read Naabu output - {e}")

with open(outfile) as json_lines:
for line in json_lines:
naabu_output_dict.append(json.loads(line))


for every_host in naabu_output_dict:
hosts.append(every_host['host'])
Expand Down
2 changes: 1 addition & 1 deletion mantis/modules/scan/Nuclei.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def parse_report(self, outfile):

return nuclei_info
else:
logging.warning('Nuclei output file found, but no vulnerabilities were reported')
logging.debug('Nuclei output file found, but no vulnerabilities were reported')

async def db_operations(self, output_dict, asset=None):

Expand Down
32 changes: 17 additions & 15 deletions mantis/modules/secretscanner/submodules/url_downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import requests
import logging
from concurrent.futures import ThreadPoolExecutor
from mantis.utils.base_request import BaseRequestExecutor
from mantis.utils.tool_utils import get_assets_grouped_by_type
from mantis.constants import ASSET_TYPE_TLD

Expand All @@ -20,11 +21,12 @@ def create_folders(base_path, extensions):


@staticmethod
def download_file(url, extension, base_path):
async def download_file(url, extension, base_path):
try:
logging.debug(f"Downloading {url}")
response = requests.get(url)
response.raise_for_status()
request_tuple = None, url, None, None
_, response = BaseRequestExecutor.sendRequest("GET", request_tuple)

except requests.exceptions.RequestException as e:
logging.error(f"Error downloading {url}: {e}")
else:
Expand Down Expand Up @@ -59,22 +61,22 @@ async def process_urls(args, path):
for domain in domains:
domain = domain.strip()
URLDownloader.file_path = f"{path}/{domain}/{domain}"
logging.debug(f"{URLDownloader.file_path} File not found!")
logging.debug(f"{URLDownloader.file_path}")

if URLDownloader.extensions:
base_path = os.path.join(path, domain)
URLDownloader.create_folders(base_path, URLDownloader.extensions)
if URLDownloader.extensions:
base_path = os.path.join(path, domain)
URLDownloader.create_folders(base_path, URLDownloader.extensions)

found_urls = URLDownloader.find_links_in_file()
found_urls = URLDownloader.find_links_in_file()

if found_urls:
with ThreadPoolExecutor(max_workers=5) as executor:
if found_urls:
for url in found_urls:
for extension in URLDownloader.extensions:
if url.endswith(extension):
executor.submit(URLDownloader.download_file, url, extension, base_path)
break
await URLDownloader.download_file(url, extension, base_path)

else:
logging.debug("No URLs found in the file.")
else:
logging.debug("No URLs found in the file.")
else:
logging.debug("No extensions found in the config file.")
logging.debug("No extensions found in the config file.")
35 changes: 29 additions & 6 deletions mantis/modules/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@
import os
import sys
import time
from tqdm import tqdm
import json
import logging
import asyncio
import importlib
import concurrent.futures
from tqdm import tqdm
from datetime import timedelta
from mantis.modules.alerter import Alerter
from mantis.utils.config_utils import ConfigUtils
from mantis.utils.list_assets import ListAssets
from mantis.models.args_model import ArgsModel
Expand All @@ -23,7 +23,6 @@
from mantis.constants import ASSET_TYPE_TLD, ASSET_TYPE_SUBDOMAIN, ASSET_TYPE_IP
from mantis.scan_orchestration.threadpool_scan import ExecuteScanThreadPool
from mantis.models.tool_logs_model import ModuleLogs, ScanLogs
from mantis.modules.alerter import Alerter

logging.getLogger().setLevel(logging.INFO)
class Workflow:
Expand Down Expand Up @@ -126,10 +125,11 @@ async def workflow_executor(args: ArgsModel):
else:
execute_threadpool_obj = ExecuteScanThreadPool()
def done_callback(future):

pbar.update(1)
loop = asyncio.get_running_loop()
tasks = []
with tqdm(total=len(commands_list)) as pbar:
with tqdm(total=len(commands_list), colour="green") as pbar:
for tool_tuple in commands_list:
pbar.set_description(module.upper())
task = loop.create_task(execute_threadpool_obj.execute_and_store(tool_tuple))
Expand All @@ -139,9 +139,13 @@ def done_callback(future):

module_log["module_tool_logs"] = res

except FileNotFoundError as e:
logging.debug("No file generated for tool")
except Exception as e:
logging.error(f"Error calling core functions on tool classes {e}")

print()

module_log["module_end_time"] = time.perf_counter()
module_log["module_time_taken"] = str(timedelta(seconds=round(module_log["module_end_time"] - module_log["module_start_time"], 0)))
moduleLog_validated = ModuleLogs(**module_log)
Expand Down Expand Up @@ -169,8 +173,27 @@ def done_callback(future):
os.makedirs('logs/scan_efficiency')
logfile_name = 'logs/scan_efficiency/'+ args.org + str(time.time()).split('.')[0] + "-logs.json"

logging.info(f"Writing logs to file: {logfile_name}")
with open(logfile_name, "w", encoding="utf-8") as outfile:
outfile.write(str(logs))

await Alerter.send_alerts(log_dict=scan_stat_validated, args=args)
scan_stats, module_scan_stats = await Alerter.send_alerts(log_dict=scan_stat_validated, args=args)
module_stats = ""
# print(module_scan_stats)
for module in module_scan_stats:
module_stats += "\033[1;34m"+module["module_name"] + "\033[0m\n"
module_stats += f"Time taken: {module['module_time_taken']}\n"
module_stats += f"Efficiency: {module['module_efficiency']}\n\n"
print(f'''\u001B[32m
\033[1;32mSCAN STATS:\033[0m
\033[1;34mTOTAL\033[0m
Efficiency: {scan_stats['scan_percentage']}
Time taken: {scan_stats['scan_time_taken']}
{module_stats}
You can find the detailed stats for each tool/subdomain combination here: {logfile_name}
\033[1;32mScan Completed
\u001B[0m''')

6 changes: 5 additions & 1 deletion mantis/tool_base_classes/toolScanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,9 +85,13 @@ async def execute(self, tool_tuple):
results["tool_time_taken"] = CommonUtils.get_ikaros_std_timestamp()
if tool_results_dict:
await self.db_operations(tool_results_dict, asset=asset)

except FileNotFoundError as e:
logging.debug(f"No file generated for the {asset}")

except Exception as e:
results["exception"] = str(e)
logging.exception(
logging.debug(
f"Error received: {type(e).__name__}: {e} for {asset} in tool {type(self).__name__}")

return results
Loading

0 comments on commit 8ec85aa

Please sign in to comment.