diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..2ece4fe
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,8 @@
+*.pyc
+.idea/
+reports/*
+report_map.json
+.DS_Store
+build/
+dist/
+j1nuclei.egg-info/
\ No newline at end of file
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..b3eafb3
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,4 @@
+include MANIFEST.in
+include LICENSE
+include README.md
+include j1nuclei/targets_discovery.json
\ No newline at end of file
diff --git a/README.md b/README.md
index f9f62be..e46fb59 100644
--- a/README.md
+++ b/README.md
@@ -1 +1,80 @@
-# j1nuclei
\ No newline at end of file
+
+
+
+## About
+J1Nuclei is a CLI tool demonstrating how JupiterOne platform can automate and learn from other tools.
+It automates everyday security tasks of scanning endpoints for vulnerabilities. Once scans are complete, the tool returns findings to our JupiterOne knowledge graph. Results can be reviewed, prioritized, and measured using Jupiterone console and insight dashboards.
+
+### Quickstart
+
+The tool can be installed by simply cloning the repository and starting the module.
+
+1. [Get Started](https://info.jupiterone.com/start-account) - If you're not already using JupiterOne, it's free (no credit card).
+2. Install nuclei
+``pip install nuclei``
+3. Install j1nuclei
+ * PIP ``pip install j1nuclei``
+ * From source ``git clone git@github.com:JupiterOne/j1nuclei.git``
+3. Get JupiterOne API token
+Follow instructions from [Create User and Account API](https://community.askj1.com/kb/articles/785-creating-user-and-account-api-keys) Keys kb article.
+
+4. Export your api key to the environment variable ``J1_API_TOKEN``
+``export J1_API_TOKEN=""``
+5. Launch j1nuclei from console or terminal
+
+
+## Exploring Findings
+Findings are mapped back into our graph using the following schema
+
+### 1. JupiterOne Query Language (J1QL)
+More information about J1QL is available from [Introduction to JupiterOne Query Language](https://community.askj1.com/kb/articles/980-introduction-to-jupiterone-query-language-j1ql)
+The J1QL and knowledge graph can answer many questions, here's a few from the data set produced by J1Nuclei
+
+#### How many nuclei issues do I have?
+```
+FIND nuclei_finding as f
+RETURN count(f) as value
+```
+
+#### How many my critical assets in production are affected?
+```
+FIND *
+WITH tag.Production = true AND classification = 'critical' AS asset
+THAT HAS >> nuclei_finding
+RETURN COUNT(asset)
+```
+#### How many endpoints are affected?
+```
+FIND UNIQUE * as asset
+THAT HAS >> nuclei_finding
+RETURN count(asset) as value
+```
+
+#### Criticality of the issues?
+```
+FIND nuclei_finding as f
+WHERE f._type = 'nuclei_finding'
+RETURN f.severity as x, count(f) as y
+```
+
+#### What are my issues (graph view)?
+```
+FIND *
+THAT HAS >> nuclei_finding
+THAT IS >> nuclei_vulnerability
+RETURN TREE
+```
+
+### 2. Insight Dashboard
+You can also create dashboards using our console Insights. For starters, you can use the one we provided as part
+of this tool [nuclei_portal_schema.json](dashboard_nuclei_portal.json). Steps to create, edit, and upload your own
+dashboard are available from [Getting started with insights-dashboards](https://community.askj1.com/kb/articles/812-getting-started-with-insights-dashboards).
+We also shared many dashboards in our open-source repository from [https://github.com/JupiterOne/insights-dashboards](https://github.com/JupiterOne/insights-dashboards).
+
+
+
+## Customizing target discovery
+Because getting a comprehensive view may require several queries, j1nuclei use a JSON file [target_query.json](j1nuclei/targets_discovery.json)
+to define all queries to run. The file is populated with common queries by default and is extensible with any J1QL queries.
+For more information on our J1QL language is available from our [support site]("https://community.askj1.com/kb/articles/980-introduction-to-jupiterone-query-language-j1ql")
+and other questions implementation is available from [JupiterOne Questions library]("https://ask.us.jupiterone.io/filter?tagFilter=all").
\ No newline at end of file
diff --git a/dashboard_nuclei_portal.json b/dashboard_nuclei_portal.json
new file mode 100644
index 0000000..896093f
--- /dev/null
+++ b/dashboard_nuclei_portal.json
@@ -0,0 +1,195 @@
+{
+ "widgets": [
+ {
+ "id": "19a61314-b3b7-4138-bba4-2429aaf5915b",
+ "title": "# of findings",
+ "type": "number",
+ "config": {
+ "queries": [
+ {
+ "name": "query1",
+ "query": "FIND nuclei_finding as f\nRETURN count(f) as value"
+ }
+ ]
+ }
+ },
+ {
+ "id": "6110c88c-a763-4fc5-991a-5faf27869259",
+ "title": "Issue graph",
+ "type": "graph",
+ "config": {
+ "queries": [
+ {
+ "name": "query1",
+ "query": "FIND *\nTHAT HAS >> Finding \nTHAT IS >> Vulnerability as vul\nWHERE vul._type = 'nuclei_vulnerability'\nRETURN TREE"
+ }
+ ]
+ }
+ },
+ {
+ "id": "df53564c-e179-4703-92e2-f08df4191b2c",
+ "title": "# of affected assets",
+ "type": "number",
+ "config": {
+ "queries": [
+ {
+ "name": "query1",
+ "query": "FIND UNIQUE * as asset\nTHAT HAS >> Finding\nWITH _type = 'nuclei_finding'\nRETURN count(asset) as value"
+ }
+ ]
+ }
+ },
+ {
+ "id": "90f1f350-0a0d-4dc1-95c1-9c23fed9d4e4",
+ "title": "Production Critical - Findings",
+ "type": "bar",
+ "config": {
+ "queries": [
+ {
+ "name": "Findings",
+ "query": "FIND * as target\nTHAT HAS >> Finding as f WHERE\ntarget.tag.Production = true AND\ntarget.classification = \"critical\" AND\nf._type = \"nuclei_finding\"\nreturn f.severity as x, count(f) as y"
+ }
+ ]
+ }
+ },
+ {
+ "id": "bc7fcab8-bc35-46da-8d16-a07076f4997d",
+ "title": "# of critical assets affected",
+ "type": "number",
+ "config": {
+ "queries": [
+ {
+ "name": "query1",
+ "query": "FIND *\nWITH tag.Production = true AND classification = 'critical' AS asset\nTHAT HAS >> Finding\nWITH _type = 'nuclei_finding'\nRETURN COUNT(asset) as value"
+ }
+ ]
+ }
+ },
+ {
+ "id": "6e74fab5-d955-4ea2-bd93-030a46bdc250",
+ "title": "Total Findings",
+ "type": "bar",
+ "config": {
+ "queries": [
+ {
+ "name": "Findings",
+ "query": "FIND * as target\nTHAT HAS >> Finding as f WHERE\nf._type = \"nuclei_finding\"\nreturn f.severity as x, count(f) as y"
+ }
+ ]
+ }
+ },
+ {
+ "id": "3e5fb37a-863b-450c-8c2f-87732b62eb3d",
+ "title": "# of findings for critical assets",
+ "type": "number",
+ "config": {
+ "queries": [
+ {
+ "name": "query1",
+ "query": "FIND * as target\nTHAT HAS >> Finding as f WHERE\ntarget.tag.Production = true AND\ntarget.classification = \"critical\" AND\nf._type = \"nuclei_finding\"\nreturn count(f) as value"
+ }
+ ]
+ }
+ }
+ ],
+ "layouts": {
+ "sm": [],
+ "xs": [],
+ "lg": [
+ {
+ "static": false,
+ "w": 3,
+ "moved": false,
+ "h": 1,
+ "x": 0,
+ "y": 0,
+ "i": "19a61314-b3b7-4138-bba4-2429aaf5915b"
+ },
+ {
+ "static": false,
+ "w": 12,
+ "moved": false,
+ "h": 3,
+ "x": 0,
+ "y": 2,
+ "i": "6110c88c-a763-4fc5-991a-5faf27869259"
+ },
+ {
+ "static": false,
+ "w": 3,
+ "moved": false,
+ "h": 1,
+ "x": 3,
+ "y": 0,
+ "i": "df53564c-e179-4703-92e2-f08df4191b2c"
+ },
+ {
+ "static": false,
+ "w": 4,
+ "moved": false,
+ "h": 1,
+ "x": 7,
+ "y": 1,
+ "i": "90f1f350-0a0d-4dc1-95c1-9c23fed9d4e4"
+ },
+ {
+ "static": false,
+ "w": 3,
+ "moved": false,
+ "h": 1,
+ "x": 3,
+ "y": 1,
+ "i": "bc7fcab8-bc35-46da-8d16-a07076f4997d"
+ },
+ {
+ "static": false,
+ "w": 4,
+ "moved": false,
+ "h": 1,
+ "x": 7,
+ "y": 0,
+ "i": "6e74fab5-d955-4ea2-bd93-030a46bdc250"
+ },
+ {
+ "static": false,
+ "w": 3,
+ "moved": false,
+ "h": 1,
+ "x": 0,
+ "y": 1,
+ "i": "3e5fb37a-863b-450c-8c2f-87732b62eb3d"
+ }
+ ],
+ "xl": [],
+ "md": [
+ {
+ "x": 0,
+ "h": 2,
+ "i": "19a61314-b3b7-4138-bba4-2429aaf5915b",
+ "y": 1000000000000,
+ "w": 6
+ },
+ {
+ "x": 0,
+ "h": 2,
+ "i": "6110c88c-a763-4fc5-991a-5faf27869259",
+ "y": 1000000000000,
+ "w": 6
+ },
+ {
+ "x": 0,
+ "h": 2,
+ "i": "df53564c-e179-4703-92e2-f08df4191b2c",
+ "y": 1000000000000,
+ "w": 6
+ },
+ {
+ "x": 0,
+ "h": 2,
+ "i": "90f1f350-0a0d-4dc1-95c1-9c23fed9d4e4",
+ "y": 1000000000000,
+ "w": 6
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/insight-dashboard.png b/insight-dashboard.png
new file mode 100644
index 0000000..c449a19
Binary files /dev/null and b/insight-dashboard.png differ
diff --git a/j1nuclei-cli.png b/j1nuclei-cli.png
new file mode 100644
index 0000000..766c58f
Binary files /dev/null and b/j1nuclei-cli.png differ
diff --git a/j1nuclei/__init__.py b/j1nuclei/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/j1nuclei/__main__.py b/j1nuclei/__main__.py
new file mode 100644
index 0000000..a531a07
--- /dev/null
+++ b/j1nuclei/__main__.py
@@ -0,0 +1,18 @@
+import logging
+import sys
+import j1nuclei.cli
+
+
+def setup_logger():
+ root = logging.getLogger()
+ root.setLevel(logging.INFO)
+ handler = logging.StreamHandler(sys.stdout)
+ handler.setLevel(logging.INFO)
+ formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+ handler.setFormatter(formatter)
+ root.addHandler(handler)
+
+
+if __name__ == '__main__':
+ setup_logger()
+ sys.exit(j1nuclei.cli.main())
diff --git a/j1nuclei/cli.py b/j1nuclei/cli.py
new file mode 100644
index 0000000..e193683
--- /dev/null
+++ b/j1nuclei/cli.py
@@ -0,0 +1,138 @@
+import argparse
+import os
+import sys
+
+import j1nuclei.config
+import j1nuclei.runner
+
+
+def _get_api_key(env: str) -> str:
+ """
+ :param env: Environment key to retrieve
+ :return: value of the environment variable
+ """
+ return os.getenv(env)
+
+
+def _set_global_config(config_namespace: argparse.Namespace) -> None:
+ """
+ Set global configuration state
+ :param config_namespace: argparse namespace
+ :return: None
+ """
+ j1nuclei.config.j1_account = config_namespace.account
+ j1nuclei.config.nb_nuclei_concurrent = config_namespace.concurrent
+ j1nuclei.config.nuclei_extra_parameters = config_namespace.nuclei_params
+ j1nuclei.config.nuclei_report_path = config_namespace.nuclei_report_path
+ j1nuclei.config.query_file = config_namespace.query_file
+ j1nuclei.config.j1_api_key = _get_api_key(config_namespace.apikey_env)
+ j1nuclei.config.persister_scope = config_namespace.scope
+
+ if not j1nuclei.config.j1_api_key:
+ print(
+ f"Error retrieving API key from environment variable {config_namespace.apikey_env}. The api key must be set")
+ raise RuntimeError(f"Unable to retrieve api key from {config_namespace.apikey_env}")
+
+
+class CLI:
+ def __init__(self, prog=None):
+ """
+ :param prog: Program name
+ """
+ self.prog = prog
+ self.parser = self._build_parser()
+
+ def _build_parser(self) -> argparse.ArgumentParser:
+ """
+ :return: argument parser.
+ """
+ parser = argparse.ArgumentParser(
+ prog=self.prog,
+ description="J1Nuclei demonstrates how you can use JupiterOne to automate running ProjectDiscovery's Nuclei security scanner.",
+ epilog="For more information on JupiterOne's API visit https://community.askj1.com/kb/articles/794-jupiterone-api"
+ )
+ parser.add_argument(
+ '-a',
+ '--account',
+ type=str,
+ required=True,
+ help=(
+ 'Required - Set the JupiterOne account to collect targets and store results.'
+ 'To find the ID in your JupiterOne account go to Settings > Integration > {integration name} > {configuration name} > value in the ID field.'
+ ),
+ )
+ parser.add_argument(
+ '-c',
+ "--concurrent",
+ type=int,
+ default=5,
+ help=(
+ 'Optional - Number of concurrent nuclei instance to run '
+ ),
+ )
+ parser.add_argument(
+ '-n',
+ '--nuclei-params',
+ type=str,
+ default=None,
+ help=('Optional - Additional nuclei parameters to pass.'
+ 'The tool makes use of the -u, -json, -o parameters so they cannot be overwritten'),
+ )
+ parser.add_argument(
+ '-q',
+ '--query-file',
+ type=str,
+ default=f"{os.path.dirname(os.path.abspath(__file__))}/targets_discovery.json",
+ help='Optional - The file containing targeting queries. By default targets_discovery.json is used',
+ )
+ parser.add_argument(
+ '-apikey-env',
+ type=str,
+ default="J1_API_TOKEN",
+ help=(
+ 'Optional - They environment variable used to retrieve the JupiterOne API Key. By default J1_API_TOKEN is used'
+ 'To create an API key follow instructions from:'
+ 'https://community.askj1.com/kb/articles/785-creating-user-and-account-api-keys#create-account-level-api-keys'
+ ),
+ )
+ parser.add_argument(
+ '-r',
+ '--nuclei-report-path',
+ type=str,
+ default=os.path.join(os.getcwd(), "reports/"),
+ help='Optional - Path where to store nuclei reports. Default is reports',
+ )
+ parser.add_argument(
+ '-s',
+ '--scope',
+ type=str,
+ default="j1nuclei",
+ help='Optional - J1 Persister scope. Default is j1nuclei',
+ )
+ return parser
+
+ def main(self, argv: str) -> None:
+ """
+ Entrypoint for the command line interface
+ :param argv: The parameters supplied to the command line program.
+ :return: None
+ """
+ _set_global_config(self.parser.parse_args(argv))
+
+ # nuclei doesn't check if the output folder exist and will error out
+ # making sure it does
+ os.makedirs(j1nuclei.config.nuclei_report_path, exist_ok=True)
+
+ j1nuclei.runner.run()
+
+ print("Vulnerability scan completed!")
+
+
+def main(argv=None) -> None:
+ """
+ Main entry point to cli
+ :param argv: command line argument data
+ :return: None
+ """
+ argv = argv if argv is not None else sys.argv[1:]
+ sys.exit(CLI(prog='j1nuclei').main(argv))
diff --git a/j1nuclei/config.py b/j1nuclei/config.py
new file mode 100644
index 0000000..121f2f3
--- /dev/null
+++ b/j1nuclei/config.py
@@ -0,0 +1,9 @@
+j1_account: str = ""
+nb_nuclei_concurrent: int = 5
+nuclei_extra_parameters: str = ""
+query_file: str = "targets_discovery.json"
+j1_api_key: str = "J1_API_TOKEN"
+nuclei_report_path : str = "reports"
+persister_scope : str = "j1nuclei"
+
+
diff --git a/j1nuclei/j1api.py b/j1nuclei/j1api.py
new file mode 100644
index 0000000..8a1118f
--- /dev/null
+++ b/j1nuclei/j1api.py
@@ -0,0 +1,136 @@
+import concurrent
+import logging
+import requests
+import time
+from jupiterone import JupiterOneClient
+
+import j1nuclei.config
+from j1nuclei.j1auth import get_auth_headers
+from typing import Dict
+
+
+logger = logging.getLogger(__name__)
+
+
+def ingest_data_and_finalize(job_id: str, payload: Dict) -> None:
+ """
+ Ingest data and finalize persister job
+ :param job_id: persister job id
+ :param payload: data to ingest
+ :return: None
+ """
+
+ print(f"Uploading findings to JupiterOne - jobid {job_id}")
+ call_persister_upload(job_id, payload)
+ wait_for_job(job_id, "AWAITING_UPLOADS", 1)
+
+ logger.debug(f"Completed upload, finalizing job {job_id}")
+ call_persister_job_finalize(job_id)
+
+ print(f"Uploading completed merge in progress - jobid {job_id}")
+ print(f"Waiting for merge to complete - jobid {job_id}")
+ wait_for_job(job_id, "FINISHED", 1)
+
+
+def start_ingestion_job(payload: Dict) -> None:
+ """
+ Start persister ingestion job
+ :param payload: data to ingest
+ :return: None
+ """
+ job_id = create_persister_job()
+
+ if job_id is None:
+ raise Exception("Unable to create job id")
+
+ logger.debug(f"Created persister job id {job_id}")
+ logger.debug(f"Pushing payload {payload}")
+
+ call_persister_upload(job_id, payload)
+ wait_for_job(job_id, "AWAITING_UPLOADS", 1)
+
+ logger.debug(f"Completed upload, finalizing job {job_id}")
+ call_persister_job_finalize(job_id)
+ wait_for_job(job_id, "FINISHED", 1)
+
+
+def wait_for_job(job_id: str, status_to_wait_for: str, wait_time_sec: int) -> None:
+ """
+ Wait for a specific persister job state
+ :param job_id: persister job id
+ :param status_to_wait_for: state to wait for
+ :param wait_time_sec: time to waite before pulling state update
+ :return: None
+ """
+ # wait for persister job to reach expected state
+ while True:
+ r = get_job_status(job_id)
+ if r == status_to_wait_for:
+ break
+
+ time.sleep(wait_time_sec)
+
+
+def get_job_status(job_id: str) -> str:
+ """
+ Get status for a persister job
+ :param job_id: persister job id
+ :rtype: str
+ :return: state
+ """
+ api_url = f"https://api.us.jupiterone.io/persister/synchronization/jobs/{job_id}"
+ response = requests.get(api_url, headers=get_auth_headers())
+ response.raise_for_status()
+ logger.debug(f"get job status response for {job_id} - {response.json()}")
+
+ return response.json()["job"]["status"]
+
+
+def call_persister_upload(job_id: str, payload: Dict) -> None:
+ """
+ Upload data to persister
+ :param job_id: persister job id
+ :param payload: data to upload
+ :return: None
+ """
+ api_url = f"https://api.us.jupiterone.io/persister/synchronization/jobs/{job_id}/upload"
+ response = requests.post(api_url, headers=get_auth_headers(), json=payload)
+ response.raise_for_status()
+ logger.debug(f"call_persister_upload for {job_id} - {response.json()}")
+
+
+def call_persister_job_finalize(job_id: str) -> None:
+ """
+ Finalize persister job
+ :param job_id: persister job id
+ :return: None
+ """
+ api_url = f"https://api.us.jupiterone.io/persister/synchronization/jobs/{job_id}/finalize"
+ response = requests.post(api_url, headers=get_auth_headers())
+ response.raise_for_status()
+ logger.debug(f"call_job_finalize for {job_id} - {response.json()}")
+
+
+def create_persister_job() -> str:
+ """
+ Create persister job
+ :rtype: str
+ :return: id of newly created job
+ """
+ api_url = "https://api.us.jupiterone.io/persister/synchronization/jobs"
+ payload = {"source": "api", "scope": "j1nuclei", "syncMode": "DIFF"}
+ response = requests.post(api_url, headers=get_auth_headers(), json=payload)
+ response.raise_for_status()
+
+ return response.json()["job"]["id"]
+
+
+def graph_query(query: str) -> Dict:
+ """
+ Query JupiterOne
+ :param query: query
+ :return: results
+ """
+ j1_client = JupiterOneClient(j1nuclei.config.j1_account, j1nuclei.config.j1_api_key)
+ return j1_client.query_v1(query)
+
diff --git a/j1nuclei/j1auth.py b/j1nuclei/j1auth.py
new file mode 100644
index 0000000..670009a
--- /dev/null
+++ b/j1nuclei/j1auth.py
@@ -0,0 +1,15 @@
+import j1nuclei.config
+
+
+def get_auth_headers() -> dict:
+ """
+ Get JupiterOne authentication headers
+ :return: headers including account and authorization token
+ """
+ auth_header = {
+ "Content-Type": "application/json",
+ "JupiterOne-Account": j1nuclei.config.j1_account,
+ "Authorization": f"Bearer {j1nuclei.config.j1_api_key}"
+ }
+
+ return auth_header
diff --git a/j1nuclei/nucleirunner.py b/j1nuclei/nucleirunner.py
new file mode 100644
index 0000000..688c828
--- /dev/null
+++ b/j1nuclei/nucleirunner.py
@@ -0,0 +1,40 @@
+import concurrent.futures
+import logging
+import subprocess
+from typing import List, Dict
+
+import j1nuclei.config
+
+logger = logging.getLogger(__name__)
+
+
+def run_nuclei_concurrent(targets: List, max_workers: int) -> None:
+ """
+ Run nuclei concurrently
+ :param targets: hosts to scan
+ :param max_workers: maximum of concurrent instance of nuclei to run
+ :return: None
+ """
+ with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
+ for result in executor.map(run_nuclei, targets):
+ logging.info(f"Execution completed with result {result}")
+
+
+def run_nuclei(j1_target_context: Dict) -> int:
+ """
+ Run nuclei
+ :param j1_target_context: target context
+ :rtype: int
+ :return: nuclei exit code
+ """
+ # using format for cleanness
+ nuclei_cmd = "nuclei -u {} -json -o {} {}".format(j1_target_context["target"],
+ j1_target_context["nuclei_report_file"],
+ j1nuclei.config.nuclei_extra_parameters).split()
+
+ logging.debug(f"Running nuclei with arg - {nuclei_cmd}")
+
+ ret_code = subprocess.run(nuclei_cmd, shell=False)
+
+ logger.debug(f"{ret_code} -> ret code for {j1_target_context['target']}")
+ return ret_code
diff --git a/j1nuclei/runner.py b/j1nuclei/runner.py
new file mode 100644
index 0000000..a647e9d
--- /dev/null
+++ b/j1nuclei/runner.py
@@ -0,0 +1,289 @@
+import json
+import uuid
+import logging
+import os
+
+from j1nuclei.j1api import graph_query, create_persister_job, ingest_data_and_finalize
+from j1nuclei.nucleirunner import run_nuclei_concurrent
+from typing import List, Dict
+
+import j1nuclei.config
+
+logger = logging.getLogger(__name__)
+
+
+def run() -> None:
+ """
+ Run j1nuclei main flow
+ :return: None
+ """
+ process_targets(j1nuclei.config.query_file,
+ j1nuclei.config.nuclei_report_path,
+ j1nuclei.config.nb_nuclei_concurrent)
+
+
+def process_targets(query_file_path: str,
+ nuclei_report_folder: str,
+ nb_concurrent: int) -> None:
+ """
+ Process specific target
+ :param query_file_path: query file path
+ :param nuclei_report_folder: folder to store nuclei results
+ :param nb_concurrent: number of maximum concurrent nuclei to run
+ :return: None
+ """
+
+ with open(query_file_path, "r") as query_file:
+ queries = json.load(query_file)
+
+ targets = []
+ target_keys = dict()
+ expected_keys = {"key", "target", "scope", "source"}
+
+ for q in queries["queries"]:
+ logger.debug(f"Processing query {q['name']}")
+
+ data = graph_query(q["query"])
+
+ q_target_count = 0
+
+ if data:
+ # avoid duplicates
+ for r in data["data"]:
+
+ # validate that the query return the expected attributes
+ if not expected_keys.issubset(r.keys()):
+ logger.debug(f"Expected attributes {expected_keys} not present in result for query - {q['query']}")
+ continue
+
+ # check to see if the query returned a target. Some may return emtpy string in query
+ if "target" in r.keys():
+ if r["target"] != "":
+ q_target_count = q_target_count + 1
+ if not r["key"] in target_keys:
+ r["nuclei_report_file"] = os.path.join(nuclei_report_folder, str(uuid.uuid4()) + ".json")
+
+ j1_target_context = r.copy()
+
+ # in some case, the target may be an array based on the node property
+ # we only scan the first host/ip
+ if isinstance(j1_target_context["target"], list):
+ j1_target_context["target"] = j1_target_context["target"][0]
+
+ targets.append(j1_target_context)
+
+ target_keys[r["key"]] = ""
+ else:
+ logger.error(f"Error retrieving results for query name {q['name']}")
+
+ print(f"Query - {q['name']} found {q_target_count} targets")
+
+ print(f"Found a total of {len(targets)} targets to scan")
+
+ # saving data map
+ logger.info(f"Saving target map file for {len(targets)} targets")
+ save_runner_mapping(targets, "report_map.json")
+
+ run_nuclei_concurrent(targets, nb_concurrent)
+
+ logger.info("Scanning completed, ingesting results")
+ process_runner_map("report_map.json")
+
+
+def save_runner_mapping(runner_map: Dict, filepath) -> None:
+ """
+ Save scan target to report mapping
+ :param runner_map: mapping between target and nuclei report file
+ :param filepath: where to save mapping
+ :return: None
+ """
+ logger.debug(f"Saving runner mapping to {filepath}")
+ with open(filepath, "w") as outfile:
+ json.dump(runner_map, outfile)
+
+
+def process_runner_map(filepath: str) -> None:
+ """
+ Process target reports
+ :param filepath: target to report map
+ :return: None
+ """
+ with open(filepath, "r") as runner_map_file:
+ runner_map = json.load(runner_map_file)
+
+ job_id = create_persister_job()
+
+ if job_id is None:
+ raise Exception("Unable to create job id")
+
+ job_keys = dict() # used to keep track of all entity and relationships keys and avoid duplicate entry
+
+ job_payload = dict()
+ job_payload["entities"] = []
+ job_payload["relationships"] = []
+
+ for j1_target_context in runner_map:
+ logger.debug(
+ f"Ingesting data for target {j1_target_context['target']} from {j1_target_context['nuclei_report_file']}")
+
+ target_payload = parse_target_report(j1_target_context, job_keys)
+ if target_payload:
+ job_payload["entities"].extend(target_payload["entities"])
+ job_payload["relationships"].extend(target_payload["relationships"])
+
+ print(f"Merging findings back to JupiterOne account {j1nuclei.config.j1_account}")
+
+ ingest_data_and_finalize(job_id, job_payload)
+
+ print("Merging complete!")
+ logger.debug(f"Done processing runner map {filepath}")
+
+
+def marshal_nuclei_to_j1payload(j1_target_context: Dict, nuclei_findings: Dict, job_keys: Dict) -> Dict:
+ """
+ Convert nuclei data to JupiterOne
+ :param j1_target_context: target context
+ :param nuclei_findings: findings
+ :param job_keys: unique entity and relationships keys
+ :return:
+ """
+ entities = []
+ relationships = []
+
+ # schema
+ # Target - HAS -> Nuclei Finding -> Is Nuclei Vulnerability
+ for nuclei_finding in nuclei_findings:
+ # create properties
+ # define entity for Weakness
+
+ matcher_name = nuclei_finding.get("matcher-name") or ''
+
+ # TO Improve - cheap work around
+ # nuclei is not consistent on where it puts additional data
+ # making creating unique keys per cases not trivial and case by case
+ # for example, some are in matcher_name, some are in extracted_results
+ # cheap workound for now is if we hit duplicate keys, we skip the entity
+ finding_entity_key = "nuclei_{}_{}_{}".format(j1_target_context["key"],
+ nuclei_finding["template-id"],
+ matcher_name)
+
+ # if we already have the entity in the batch upload we can skip
+ # this happens when entity has multiple findings
+ # we only add 1 instance, but we add all relationships/findings to it
+ if finding_entity_key not in job_keys:
+ job_keys[finding_entity_key] = ""
+
+ finding_entity = dict()
+
+ finding_entity["_key"] = finding_entity_key
+ finding_entity["_type"] = "nuclei_finding"
+ finding_entity["_class"] = "Finding"
+ finding_entity["owner"] = "nuclei"
+ finding_entity["matcher-name"] = matcher_name
+ finding_entity["displayName"] = nuclei_finding["info"]["name"]
+ finding_entity["severity"] = nuclei_finding["info"]["severity"]
+ finding_entity["nuclei_type"] = nuclei_finding["type"]
+ entities.append(finding_entity)
+
+ # Vulnerability entity
+ vul_entity_key = f"nuclei_id_{nuclei_finding['template-id']}"
+
+ # if we already created it we skip
+ if vul_entity_key not in job_keys:
+ job_keys[vul_entity_key] = ""
+
+ vul_entity = dict()
+ vul_entity["_key"] = vul_entity_key
+ vul_entity["_type"] = "nuclei_vulnerability"
+ vul_entity["_class"] = "Vulnerability"
+ vul_entity["owner"] = "nuclei"
+ vul_entity["name"] = nuclei_finding["info"].get("name")
+ vul_entity["displayName"] = vul_entity["name"]
+ vul_entity["author"] = nuclei_finding["info"].get("author")
+ vul_entity["description"] = nuclei_finding["info"].get("description")
+ vul_entity["severity"] = nuclei_finding["info"]["severity"]
+ vul_entity["nuclei_type"] = nuclei_finding["type"]
+ vul_entity["template"] = nuclei_finding["template"]
+ vul_entity["template-id"] = nuclei_finding["template-id"]
+ vul_entity["template-url"] = nuclei_finding["template-url"]
+
+ entities.append(vul_entity)
+
+ # https://community.askj1.com/kb/articles/1157-creating-relationships-between-assets-you-own-and-assets-you-do-not
+ # relationship
+
+ # Target - HAS - Finding
+ has_relationship_key = f"{j1_target_context['key']}_{finding_entity_key}"
+
+ if has_relationship_key not in job_keys:
+ job_keys[has_relationship_key] = ""
+
+ has_relationship = dict()
+ has_relationship["_key"] = has_relationship_key
+ has_relationship["_type"] = "nuclei_has"
+ has_relationship["_class"] = "HAS"
+ has_relationship["displayName"] = has_relationship["_class"]
+
+ # https://community.askj1.com/kb/articles/1157-creating-relationships-between-assets-you-own-and-assets-you-do-not
+ # to create relationships to entities we didn't create, we need to provide its source and scope.
+ has_relationship["_fromEntitySource"] = j1_target_context["source"]
+ has_relationship["_fromEntityScope"] = j1_target_context["scope"]
+ has_relationship["_fromEntityKey"] = j1_target_context["key"]
+ has_relationship["_toEntitySource"] = "api"
+ has_relationship["_toEntityScope"] = j1nuclei.config.persister_scope
+ has_relationship["_toEntityKey"] = finding_entity_key
+ relationships.append(has_relationship)
+
+ # Finding - IS - Vulnerability
+ is_relationship_key = f"{finding_entity_key}_{vul_entity_key}"
+
+ if is_relationship_key not in job_keys:
+ job_keys[is_relationship_key] = ""
+
+ is_relationship = dict()
+ is_relationship["_key"] = is_relationship_key
+ is_relationship["_type"] = "nuclei_is"
+ is_relationship["_class"] = "IS"
+ is_relationship["displayName"] = is_relationship["_class"]
+
+ is_relationship["_fromEntitySource"] = "api"
+ is_relationship["_fromEntityScope"] = j1nuclei.config.persister_scope
+ is_relationship["_fromEntityKey"] = finding_entity_key
+ is_relationship["_toEntitySource"] = "api"
+ is_relationship["_toEntityScope"] = j1nuclei.config.persister_scope
+ is_relationship["_toEntityKey"] = vul_entity_key
+
+ relationships.append(is_relationship)
+
+ payload = dict()
+ payload["entities"] = entities
+ payload["relationships"] = relationships
+
+ return payload
+
+
+def parse_target_report(j1_target_context: Dict, job_keys: Dict) -> Dict:
+ """
+ Parse nuclei report
+ :param j1_target_context: target context
+ :param job_keys: job keys
+ :return: jupiterone formatted report data
+ """
+ findings = []
+
+ nuclei_report_filename = j1_target_context["nuclei_report_file"]
+
+ logger.debug(f"Processing {nuclei_report_filename}")
+
+ if os.path.exists(nuclei_report_filename):
+ with open(nuclei_report_filename, "r") as nuclei_report:
+ # nuclei json report writes json object per line but is not using correct structure of array causing
+ # json.load(file) to fail. We must reach line by line and load json.loads()
+ for line in nuclei_report.readlines():
+ findings.append(json.loads(line))
+
+ if len(findings) > 0:
+ print(f"Target key {j1_target_context['key']} has {len(findings)} issues")
+ return marshal_nuclei_to_j1payload(j1_target_context, findings, job_keys)
+ else:
+ return None
diff --git a/j1nuclei/targets_discovery.json b/j1nuclei/targets_discovery.json
new file mode 100644
index 0000000..c6bacee
--- /dev/null
+++ b/j1nuclei/targets_discovery.json
@@ -0,0 +1,24 @@
+{
+ "queries": [
+ {
+ "name": "Exposed to everyone",
+ "query": "Find Everyone that ALLOWS * as d RETURN d._key as key, coalesce(d.baseUrl, d.url) as target, d._integrationInstanceId as scope, d._source as source"
+ },
+ {
+ "name": "Get domains",
+ "query": "FIND Domain as d RETURN d._key as key, d.displayName as target, d._integrationInstanceId as scope, d._source as source"
+ },
+ {
+ "name": "Get domain cname,a,aaa records",
+ "query": "FIND DomainRecord as d WHERE d.type = 'CNAME' OR d.type = 'A' or d.type = 'AAA' RETURN d._key as key, d.name as target, d._integrationInstanceId as scope, d._source as source"
+ },
+ {
+ "name": "Get data stores",
+ "query": "FIND DataStore as d RETURN d._key as key, d.baseUrl as target, d._integrationInstanceId as scope, d._source as source"
+ },
+ {
+ "name":"Get all public ip address",
+ "query": "FIND unique (Host|NetworkInterface) WITH publicIpAddress != undefined as ip RETURN ip._key as key, ip.publicIpAddress as target, ip._integrationDefinitionId as scope, ip._source as source"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/jupiterone_nuclei.png b/jupiterone_nuclei.png
new file mode 100644
index 0000000..02bdbf1
Binary files /dev/null and b/jupiterone_nuclei.png differ
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..c9908e9
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,3 @@
+jupiterone==0.2.0
+requests==2.28.1
+setuptools==63.2.0
diff --git a/schema.png b/schema.png
new file mode 100644
index 0000000..8d22518
Binary files /dev/null and b/schema.png differ
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..4f3845f
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,31 @@
+from setuptools import setup, find_packages
+
+setup(
+ name='j1nuclei',
+ version='1.0.3',
+ maintainer='JupiterOne',
+ packages=find_packages(),
+ package_data={"j1nuclei": ['targets_discovery.json']},
+ install_requires=['jupiterone', 'requests'],
+ url='https://github.com/jupiterOne/j1nuclei',
+ license='MIT License',
+ author='JupiterOne',
+ author_email='sacha.faut@jupiterone.com',
+ description='J1Nuclei is a CLI tool demonstrating how JupiterOne platform can automate and learn from other tools. It automates everyday security tasks of scanning endpoints for vulnerabilities.',
+ entry_points={
+ 'console_scripts': [
+ 'j1nuclei = j1nuclei.cli:main'
+ ],
+ },
+ classifiers=[
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: Information Technology',
+ 'Intended Audience :: System Administrators',
+ 'License :: OSI Approved :: MIT License',
+ 'Natural Language :: English',
+ 'Operating System :: POSIX :: Linux',
+ 'Programming Language :: Python',
+ 'Topic :: Security',
+ ],
+)