From 4c83fc2703aefe2fddcc6eaffd80133f51a0ccdd Mon Sep 17 00:00:00 2001 From: taylor_socfortress <111797488+taylorwalton@users.noreply.github.com> Date: Wed, 28 Feb 2024 06:41:02 -0600 Subject: [PATCH] Custom alerting (#166) * Add custom alert provisioning functionality * Refactor custom field name validation and provision custom monitoring alerts * Update alert titles in provision.py * Add custom monitoring alert creation endpoint*** ***Add validation for CUSTOMER_CODE in custom monitoring alert provision model * Add session dependency and customer meta retrieval in provision_custom_monitoring_alert_route() * Update streams field to be optional and provide default value * grafana reporting start and custom alert creation * create custom alert iris creation and precommit fixes * get grafana orgs and dashboards * Add endpoints to get Grafana dashboard details and panels * Add Grafana dashboard iframe link generation endpoint * huntress integration * precommit fixes * provision huntress code * Update example value for alert_priority field * added CustomAlertForm * updated CustomAlertForm * added CustomAlertForm validations * precommit fixes --------- Co-authored-by: Davide Di Modica --- .../grafana/dashboards/Huntress/summary.json | 1215 +++++++++++++++++ .../grafana/dashboards/Mimecast/summary.json | 116 +- .../connectors/grafana/routes/reporting.py | 208 +++ .../connectors/grafana/schema/dashboards.py | 11 +- .../connectors/grafana/schema/reporting.py | 191 +++ .../connectors/grafana/services/dashboards.py | 8 +- .../connectors/grafana/services/reporting.py | 73 + backend/app/db/db_populate.py | 3 + .../integrations/huntress/routes/huntress.py | 50 + .../integrations/huntress/routes/provision.py | 51 + .../integrations/huntress/schema/huntress.py | 139 ++ .../integrations/huntress/schema/provision.py | 87 ++ .../integrations/huntress/services/collect.py | 86 ++ .../huntress/services/provision.py | 394 ++++++ backend/app/integrations/markdown/huntress.md | 23 + .../routes/monitoring_alert.py | 51 + .../monitoring_alert/routes/provision.py | 67 + .../schema/monitoring_alert.py | 189 ++- .../monitoring_alert/schema/provision.py | 85 ++ .../monitoring_alert/services/custom.py | 215 +++ .../monitoring_alert/services/provision.py | 105 +- backend/app/routers/grafana.py | 2 + backend/app/routers/huntress.py | 23 + backend/app/schedulers/scheduler.py | 2 + .../schedulers/services/invoke_huntress.py | 54 + backend/copilot.py | 2 + frontend/package.json | 12 +- frontend/src/api/monitoringAlerts.ts | 23 + .../ActiveResponseInvokeForm.vue | 9 +- .../components/customers/CustomerMetaForm.vue | 9 +- .../CustomerDefaultSettingsButton.vue | 4 +- ...rm.vue => CustomerDefaultSettingsForm.vue} | 8 +- .../MonitoringAlerts/CustomAlertButton.vue | 39 + .../MonitoringAlerts/CustomAlertForm.vue | 437 ++++++ .../graylog/MonitoringAlerts/List.vue | 2 + .../soc/SocAlerts/SocAlertAssetsItem.vue | 6 +- .../soc/SocCases/SocCaseAssetsItem.vue | 6 +- .../src/layouts/common/Toolbar/Breadcrumb.vue | 13 +- frontend/src/utils/index.ts | 2 +- 39 files changed, 3935 insertions(+), 85 deletions(-) create mode 100644 backend/app/connectors/grafana/dashboards/Huntress/summary.json create mode 100644 backend/app/connectors/grafana/routes/reporting.py create mode 100644 backend/app/connectors/grafana/schema/reporting.py create mode 100644 backend/app/connectors/grafana/services/reporting.py create mode 100644 backend/app/integrations/huntress/routes/huntress.py create mode 100644 backend/app/integrations/huntress/routes/provision.py create mode 100644 backend/app/integrations/huntress/schema/huntress.py create mode 100644 backend/app/integrations/huntress/schema/provision.py create mode 100644 backend/app/integrations/huntress/services/collect.py create mode 100644 backend/app/integrations/huntress/services/provision.py create mode 100644 backend/app/integrations/markdown/huntress.md create mode 100644 backend/app/integrations/monitoring_alert/services/custom.py create mode 100644 backend/app/routers/huntress.py create mode 100644 backend/app/schedulers/services/invoke_huntress.py rename frontend/src/components/customers/provision/{CustomerDefaultSettingForm.vue => CustomerDefaultSettingsForm.vue} (96%) create mode 100644 frontend/src/components/graylog/MonitoringAlerts/CustomAlertButton.vue create mode 100644 frontend/src/components/graylog/MonitoringAlerts/CustomAlertForm.vue diff --git a/backend/app/connectors/grafana/dashboards/Huntress/summary.json b/backend/app/connectors/grafana/dashboards/Huntress/summary.json new file mode 100644 index 00000000..1e360316 --- /dev/null +++ b/backend/app/connectors/grafana/dashboards/Huntress/summary.json @@ -0,0 +1,1215 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [], + "liveNow": false, + "panels": [ + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "red", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 4, + "x": 0, + "y": 0 + }, + "id": 5, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": ["sum"], + "fields": "", + "values": false + }, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "alias": "", + "bucketAggs": [ + { + "field": "timestamp", + "id": "2", + "settings": { + "interval": "auto" + }, + "type": "date_histogram" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "format": "table", + "metrics": [ + { + "id": "1", + "type": "count" + } + ], + "query": "severity:critical", + "queryType": "lucene", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "EVENTS - CRITICAL", + "type": "stat" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "red", + "value": null + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "source" + }, + "properties": [ + { + "id": "custom.cellOptions", + "value": { + "type": "color-background" + } + } + ] + } + ] + }, + "gridPos": { + "h": 7, + "w": 6, + "x": 4, + "y": 0 + }, + "id": 6, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": ["sum"], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "alias": "", + "bucketAggs": [ + { + "field": "source", + "id": "3", + "settings": { + "min_doc_count": "1", + "order": "desc", + "orderBy": "_term", + "size": "10" + }, + "type": "terms" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "format": "table", + "metrics": [ + { + "id": "1", + "type": "count" + } + ], + "query": "severity:critical", + "queryType": "lucene", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "EVENTS - CRITICAL (SOURCE)", + "transformations": [ + { + "id": "organize", + "options": { + "excludeByName": {}, + "includeByName": {}, + "indexByName": {}, + "renameByName": { + "source": "SOURCE" + } + } + } + ], + "transparent": true, + "type": "table" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "red", + "value": null + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "subject" + }, + "properties": [ + { + "id": "custom.cellOptions", + "value": { + "type": "color-background" + } + } + ] + } + ] + }, + "gridPos": { + "h": 7, + "w": 14, + "x": 10, + "y": 0 + }, + "id": 7, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": ["sum"], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "alias": "", + "bucketAggs": [ + { + "field": "subject", + "id": "3", + "settings": { + "min_doc_count": "1", + "order": "desc", + "orderBy": "_term", + "size": "10" + }, + "type": "terms" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "format": "table", + "metrics": [ + { + "id": "1", + "type": "count" + } + ], + "query": "severity:critical", + "queryType": "lucene", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "EVENTS - CRITICAL (SUBJECT)", + "type": "table" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "orange", + "value": null + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 4, + "x": 0, + "y": 7 + }, + "id": 3, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": ["sum"], + "fields": "", + "values": false + }, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "alias": "", + "bucketAggs": [ + { + "field": "timestamp", + "id": "2", + "settings": { + "interval": "auto" + }, + "type": "date_histogram" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "format": "table", + "metrics": [ + { + "id": "1", + "type": "count" + } + ], + "query": "", + "queryType": "lucene", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "EVENTS", + "type": "stat" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "bars", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 14, + "w": 20, + "x": 4, + "y": 7 + }, + "id": 1, + "options": { + "legend": { + "calcs": [], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "alias": "", + "bucketAggs": [ + { + "field": "source", + "id": "3", + "settings": { + "min_doc_count": "0", + "order": "desc", + "orderBy": "_term", + "size": "10" + }, + "type": "terms" + }, + { + "field": "timestamp", + "id": "2", + "settings": { + "interval": "10m" + }, + "type": "date_histogram" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "format": "table", + "metrics": [ + { + "id": "1", + "type": "count" + } + ], + "query": "", + "queryType": "lucene", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "EVENTS BY AGENT - HISTOGRAM", + "transformations": [ + { + "id": "organize", + "options": { + "excludeByName": {}, + "includeByName": {}, + "indexByName": {}, + "renameByName": { + "Count": "Agent" + } + } + } + ], + "type": "timeseries" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + } + }, + "mappings": [] + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 4, + "x": 0, + "y": 14 + }, + "id": 4, + "options": { + "legend": { + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "pieType": "donut", + "reduceOptions": { + "calcs": ["sum"], + "fields": "", + "values": false + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "alias": "", + "bucketAggs": [ + { + "field": "platform", + "id": "3", + "settings": { + "min_doc_count": "0", + "order": "desc", + "orderBy": "_term", + "size": "10" + }, + "type": "terms" + }, + { + "field": "timestamp", + "id": "2", + "settings": { + "interval": "auto" + }, + "type": "date_histogram" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "format": "table", + "metrics": [ + { + "id": "1", + "type": "count" + } + ], + "query": "", + "queryType": "lucene", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "EVENTS BY PLATRFORM", + "transformations": [ + { + "id": "organize", + "options": {} + } + ], + "type": "piechart" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + } + }, + "mappings": [] + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "high" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "super-light-red", + "mode": "fixed" + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "critical" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "red", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 7, + "w": 4, + "x": 0, + "y": 21 + }, + "id": 8, + "options": { + "legend": { + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "pieType": "donut", + "reduceOptions": { + "calcs": ["sum"], + "fields": "", + "values": false + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "alias": "", + "bucketAggs": [ + { + "field": "severity", + "id": "3", + "settings": { + "min_doc_count": "0", + "order": "desc", + "orderBy": "_term", + "size": "10" + }, + "type": "terms" + }, + { + "field": "timestamp", + "id": "2", + "settings": { + "interval": "auto" + }, + "type": "date_histogram" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "format": "table", + "metrics": [ + { + "id": "1", + "type": "count" + } + ], + "query": "", + "queryType": "lucene", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "EVENTS BY SEVERITY", + "transformations": [ + { + "id": "organize", + "options": {} + } + ], + "type": "piechart" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "bars", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "high" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "super-light-red", + "mode": "fixed" + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "critical" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "red", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 14, + "w": 20, + "x": 4, + "y": 21 + }, + "id": 2, + "options": { + "legend": { + "calcs": [], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "alias": "", + "bucketAggs": [ + { + "field": "severity", + "id": "3", + "settings": { + "min_doc_count": "0", + "order": "desc", + "orderBy": "_term", + "size": "10" + }, + "type": "terms" + }, + { + "field": "timestamp", + "id": "2", + "settings": { + "interval": "10m" + }, + "type": "date_histogram" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "format": "table", + "metrics": [ + { + "id": "1", + "type": "count" + } + ], + "query": "", + "queryType": "lucene", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "EVENTS BY SEVERITY - HISTOGRAM", + "transformations": [ + { + "id": "organize", + "options": { + "excludeByName": {}, + "includeByName": {}, + "indexByName": {}, + "renameByName": { + "Count": "Agent" + } + } + } + ], + "type": "timeseries" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + } + }, + "mappings": [] + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "high" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "super-light-red", + "mode": "fixed" + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "critical" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "red", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 7, + "w": 4, + "x": 0, + "y": 28 + }, + "id": 9, + "options": { + "legend": { + "displayMode": "table", + "placement": "bottom", + "showLegend": true + }, + "pieType": "donut", + "reduceOptions": { + "calcs": ["sum"], + "fields": "", + "values": false + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "alias": "", + "bucketAggs": [ + { + "field": "indicator_types", + "id": "3", + "settings": { + "min_doc_count": "0", + "order": "desc", + "orderBy": "_term", + "size": "10" + }, + "type": "terms" + }, + { + "field": "timestamp", + "id": "2", + "settings": { + "interval": "auto" + }, + "type": "date_histogram" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "format": "table", + "metrics": [ + { + "id": "1", + "type": "count" + } + ], + "query": "", + "queryType": "lucene", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "EVENTS BY INDICATOR TYPES", + "transformations": [ + { + "id": "organize", + "options": {} + } + ], + "type": "piechart" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "filterable": true, + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "EVENT ID" + }, + "properties": [ + { + "id": "links", + "value": [ + { + "targetBlank": true, + "title": "EVENT DETAILS", + "url": "https://grafana.company.local/explore?left=%7B%22datasource%22:%22HUNTRESS%22,%22queries%22:%5B%7B%22refId%22:%22A%22,%22query%22:%22_id:${__value.text}%22,%22alias%22:%22%22,%22metrics%22:%5B%7B%22id%22:%221%22,%22type%22:%22logs%22,%22settings%22:%7B%22limit%22:%22500%22%7D%7D%5D,%22bucketAggs%22:%5B%5D,%22timeField%22:%22timestamp%22%7D%5D,%22range%22:%7B%22from%22:%22now-6h%22,%22to%22:%22now%22%7D%7D" + } + ] + } + ] + } + ] + }, + "gridPos": { + "h": 15, + "w": 24, + "x": 0, + "y": 35 + }, + "id": 10, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "enablePagination": true, + "fields": "", + "reducer": ["sum"], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "alias": "", + "bucketAggs": [], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "format": "table", + "metrics": [ + { + "id": "1", + "settings": { + "order": "desc", + "size": "500", + "useTimeRange": true + }, + "type": "raw_data" + } + ], + "query": "", + "queryType": "lucene", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "EVENTS", + "transformations": [ + { + "id": "filterFieldsByName", + "options": { + "include": { + "names": [ + "timestamp", + "_id", + "body", + "footholds", + "indicator_types", + "platform", + "severity", + "source", + "status", + "subject", + "summary", + "agent_id" + ] + } + } + }, + { + "id": "organize", + "options": { + "excludeByName": {}, + "includeByName": {}, + "indexByName": { + "_id": 1, + "agent_id": 3, + "body": 8, + "footholds": 9, + "indicator_types": 5, + "platform": 4, + "severity": 10, + "source": 2, + "status": 11, + "subject": 6, + "summary": 7, + "timestamp": 0 + }, + "renameByName": { + "_id": "EVENT ID", + "agent_id": "AGENT ID", + "body": "BODY", + "footholds": "FOOTHOLDS", + "indicator_types": "INDICATOR TYPES", + "platform": "PLATFORM", + "severity": "SEVERITY", + "source": "SOURCE", + "status": "STATUS", + "subject": "SUBJECT", + "summary": "SUMMARY", + "timestamp": "DATE/TIME" + } + } + } + ], + "transparent": true, + "type": "table" + } + ], + "refresh": "", + "schemaVersion": 39, + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-24h", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "HUNTRESS - _SUMMARY", + "version": 4, + "weekStart": "" +} diff --git a/backend/app/connectors/grafana/dashboards/Mimecast/summary.json b/backend/app/connectors/grafana/dashboards/Mimecast/summary.json index 5f91a684..ebe68774 100644 --- a/backend/app/connectors/grafana/dashboards/Mimecast/summary.json +++ b/backend/app/connectors/grafana/dashboards/Mimecast/summary.json @@ -51,7 +51,7 @@ "panels": [ { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -118,7 +118,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -139,7 +139,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -281,7 +281,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -310,7 +310,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -391,7 +391,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -413,7 +413,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -480,7 +480,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -514,7 +514,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -578,7 +578,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -612,7 +612,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -765,7 +765,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -797,7 +797,7 @@ "circleMinSize": 2, "colors": ["rgba(245, 54, 54, 0.9)", "rgba(237, 129, 40, 0.89)", "rgba(50, 172, 45, 0.97)"], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "decimals": 0, @@ -852,7 +852,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -875,7 +875,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -1017,7 +1017,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -1046,7 +1046,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -1120,7 +1120,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -1142,7 +1142,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -1216,7 +1216,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -1238,7 +1238,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -1492,7 +1492,7 @@ { "bucketAggs": [], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -1584,7 +1584,7 @@ "panels": [ { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -1651,7 +1651,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -1672,7 +1672,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -1814,7 +1814,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -1843,7 +1843,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -1924,7 +1924,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -1946,7 +1946,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -2013,7 +2013,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -2047,7 +2047,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -2189,7 +2189,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -2218,7 +2218,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -2371,7 +2371,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -2403,7 +2403,7 @@ "circleMinSize": 2, "colors": ["rgba(245, 54, 54, 0.9)", "rgba(237, 129, 40, 0.89)", "rgba(50, 172, 45, 0.97)"], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "decimals": 0, @@ -2458,7 +2458,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -2545,7 +2545,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -2579,7 +2579,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -2833,7 +2833,7 @@ { "bucketAggs": [], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -2925,7 +2925,7 @@ "panels": [ { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -2993,7 +2993,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -3014,7 +3014,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -3157,7 +3157,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -3186,7 +3186,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -3267,7 +3267,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -3289,7 +3289,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -3357,7 +3357,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -3391,7 +3391,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -3534,7 +3534,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -3563,7 +3563,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -3717,7 +3717,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -3749,7 +3749,7 @@ "circleMinSize": 2, "colors": ["rgba(245, 54, 54, 0.9)", "rgba(237, 129, 40, 0.89)", "rgba(50, 172, 45, 0.97)"], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "decimals": 0, @@ -3804,7 +3804,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -3827,7 +3827,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -3892,7 +3892,7 @@ } ], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -3926,7 +3926,7 @@ }, { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "fieldConfig": { @@ -4180,7 +4180,7 @@ { "bucketAggs": [], "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "metrics": [ @@ -5552,7 +5552,7 @@ "list": [ { "datasource": { - "type": "elasticsearch", + "type": "grafana-opensearch-datasource", "uid": "replace_datasource_uid" }, "filters": [], diff --git a/backend/app/connectors/grafana/routes/reporting.py b/backend/app/connectors/grafana/routes/reporting.py new file mode 100644 index 00000000..06b4a34e --- /dev/null +++ b/backend/app/connectors/grafana/routes/reporting.py @@ -0,0 +1,208 @@ +import time +from datetime import datetime +from datetime import timedelta +from typing import List + +from fastapi import APIRouter +from fastapi import Depends +from fastapi import Security +from loguru import logger +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.auth.utils import AuthHandler +from app.connectors.grafana.schema.reporting import GrafanaDashboardDetailsResponse +from app.connectors.grafana.schema.reporting import GrafanaDashboardPanelsResponse +from app.connectors.grafana.schema.reporting import GrafanaDashboardResponse +from app.connectors.grafana.schema.reporting import GrafanaGenerateIframeLinksRequest +from app.connectors.grafana.schema.reporting import GrafanaGenerateIframeLinksResponse +from app.connectors.grafana.schema.reporting import GrafanaLinksList +from app.connectors.grafana.schema.reporting import GrafanaOrganizationsResponse +from app.connectors.grafana.schema.reporting import Panel +from app.connectors.grafana.schema.reporting import TimeRange +from app.connectors.grafana.services.reporting import get_dashboard_details +from app.connectors.grafana.services.reporting import get_dashboards +from app.connectors.grafana.services.reporting import get_orgs +from app.connectors.models import Connectors +from app.db.db_session import get_db + +# App specific imports + + +grafana_reporting_router = APIRouter() + + +async def get_grafana_url(session: AsyncSession): + connector = await session.execute(select(Connectors).where(Connectors.connector_name == "Grafana")) + connector = connector.scalars().first() + return connector.connector_url + + +def calculate_unix_timestamps(time_range: TimeRange): + now = datetime.now() + if time_range.unit == "minutes": + start_time = now - timedelta(minutes=time_range.value) + elif time_range.unit == "hours": + start_time = now - timedelta(hours=time_range.value) + elif time_range.unit == "days": + start_time = now - timedelta(days=time_range.value) + + timestamp_from = int(time.mktime(start_time.timetuple())) * 1000 + timestamp_to = int(time.mktime(now.timetuple())) * 1000 + + return timestamp_from, timestamp_to + + +def generate_panel_urls(grafana_url: str, request: GrafanaGenerateIframeLinksRequest, timestamp_from: int, timestamp_to: int): + panel_links: List[GrafanaLinksList] = [] + for panel_id in request.panel_ids: + panel_url = ( + f"{grafana_url}/d-solo/{request.dashboard_uid}/{request.dashboard_title}" + f"?orgId={request.org_id}&from={timestamp_from}&to={timestamp_to}" + f"&panelId={panel_id}" + ) + panel_links.append(GrafanaLinksList(panel_id=panel_id, panel_url=panel_url)) + return panel_links + + +@grafana_reporting_router.get( + "/orgs", + response_model=GrafanaOrganizationsResponse, + description="Provision Grafana dashboards", + dependencies=[Security(AuthHandler().require_any_scope("admin", "analyst"))], +) +async def get_grafana_orgs(): + """ + Endpoint to provision Grafana dashboards. + + Args: + request (DashboardProvisionRequest): The request body containing the dashboard provisioning data. + + Returns: + GrafanaDashboardResponse: The response containing the result of the dashboard provisioning. + """ + logger.info("Getting Grafana orgs") + orgs = await get_orgs() + return GrafanaOrganizationsResponse( + message="Organizations collected from Grafana", + orgs=orgs, + success=True, + ) + + +@grafana_reporting_router.get( + "/dashboards/{org_id}", + response_model=GrafanaDashboardResponse, + description="Get Grafana dashboards for reporting", + dependencies=[Security(AuthHandler().require_any_scope("admin", "analyst"))], +) +async def get_grafana_dashboards(org_id: int): + """ + Endpoint to get Grafana dashboards. + + Args: + org_id (int): The ID of the organization. + + Returns: + GrafanaDashboardResponse: The response containing the result of the dashboard provisioning. + """ + dashboards = await get_dashboards(org_id=org_id) + return GrafanaDashboardResponse( + message="Dashboards collected from Grafana", + dashboards=dashboards, + success=True, + ) + + +@grafana_reporting_router.get( + "/dashboard/{dashboard_uid}", + response_model=GrafanaDashboardDetailsResponse, + description="Get Grafana dashboard", + dependencies=[Security(AuthHandler().require_any_scope("admin", "analyst"))], +) +async def get_grafana_dashboard_details(dashboard_uid: str): + """ + Endpoint to get Grafana dashboard. + + Args: + dashboard_uid (str): The UID of the dashboard. + + Returns: + GrafanaDashboardResponse: The response containing the result of the dashboard provisioning. + """ + dashboard_details = await get_dashboard_details(dashboard_uid=dashboard_uid) + + return GrafanaDashboardDetailsResponse( + message="Dashboard details collected from Grafana", + dashboard_details=dashboard_details, + success=True, + ) + + +@grafana_reporting_router.get( + "/dashboard_panels/{dashboard_uid}", + response_model=GrafanaDashboardPanelsResponse, + description="Get Grafana dashboard panels", + dependencies=[Security(AuthHandler().require_any_scope("admin", "analyst"))], +) +async def get_grafana_dashboard_panels(dashboard_uid: str): + """ + Endpoint to get Grafana dashboard panels. + + Args: + dashboard_uid (str): The UID of the dashboard. + + Returns: + GrafanaDashboardResponse: The response containing the result of the dashboard provisioning. + """ + dashboard_details = await get_dashboard_details(dashboard_uid=dashboard_uid) + logger.info(f"Dashboard details: {dashboard_details}") + + # Get the panel id and panel title from the dashboard details for each panel + panels = [] + logger.info("Fetching panels from dashboard details") + for panel in dashboard_details["dashboard"]["panels"]: + if panel["type"] != "row": + panels.append(Panel(id=panel["id"], title=panel["title"])) + logger.info(f"Panels: {panels}") + return GrafanaDashboardPanelsResponse( + message="Panels collected from Grafana", + panels=panels, + success=True, + ) + + +@grafana_reporting_router.post( + "/generate_iframe_links", + response_model=GrafanaGenerateIframeLinksResponse, + description="Generate Grafana dashboard iframe links", + dependencies=[Security(AuthHandler().require_any_scope("admin", "analyst"))], +) +async def generate_grafana_iframe_links( + request: GrafanaGenerateIframeLinksRequest, + session: AsyncSession = Depends(get_db), +): + """ + Endpoint to generate Grafana dashboard iframe links. + + Args: + request (GrafanaGenerateIframeLinksRequest): The request body containing the dashboard UID and organization ID. + + Returns: + GrafanaDashboardPanelsResponse: The response containing the result of the dashboard provisioning. + """ + # get the Grafana URL from the database + grafana_url = await get_grafana_url(session) + logger.info(f"Grafana URL: {grafana_url}") + + # calculate the Unix timestamps based on the current time and the provided time range + timestamp_from, timestamp_to = calculate_unix_timestamps(request.time_range) + + # build the URL string for each panel_id + panel_urls = generate_panel_urls(grafana_url, request, timestamp_from, timestamp_to) + + return GrafanaGenerateIframeLinksResponse( + message="Iframe links generated from Grafana", + links=panel_urls, + success=True, + ) diff --git a/backend/app/connectors/grafana/schema/dashboards.py b/backend/app/connectors/grafana/schema/dashboards.py index 208e1132..50eb0137 100644 --- a/backend/app/connectors/grafana/schema/dashboards.py +++ b/backend/app/connectors/grafana/schema/dashboards.py @@ -70,6 +70,10 @@ class SapSiemDashboard(Enum): USERS_AUTH = ("SapSiem", "users_auth.json") +class HuntressDashboard(Enum): + SUMMARY = ("Huntress", "summary.json") + + class DashboardProvisionRequest(BaseModel): dashboards: List[str] = Field( ..., @@ -88,7 +92,12 @@ class DashboardProvisionRequest(BaseModel): @validator("dashboards", each_item=True) def check_dashboard_exists(cls, e): valid_dashboards = { - item.name: item for item in list(WazuhDashboard) + list(Office365Dashboard) + list(MimecastDashboard) + list(SapSiemDashboard) + item.name: item + for item in list(WazuhDashboard) + + list(Office365Dashboard) + + list(MimecastDashboard) + + list(SapSiemDashboard) + + list(HuntressDashboard) } if e not in valid_dashboards: raise ValueError(f'Dashboard identifier "{e}" is not recognized.') diff --git a/backend/app/connectors/grafana/schema/reporting.py b/backend/app/connectors/grafana/schema/reporting.py new file mode 100644 index 00000000..616e26f1 --- /dev/null +++ b/backend/app/connectors/grafana/schema/reporting.py @@ -0,0 +1,191 @@ +from typing import List +from typing import Optional + +from fastapi import HTTPException +from pydantic import BaseModel +from pydantic import Field +from pydantic import validator + + +class GrafanaOrganizations(BaseModel): + id: int = Field(..., description="The ID of the organization.") + name: str = Field(..., description="The name of the organization.") + + +class GrafanaOrganizationsResponse(BaseModel): + message: str = Field(..., description="The message from the response.") + orgs: List[GrafanaOrganizations] = Field(..., description="The organizations collected from Grafana.") + success: bool = Field(..., description="The success of the response.") + + +class GrafanaOrganizationDashboards(BaseModel): + id: int + uid: str + title: str + uri: str + url: str + slug: str + type: str + tags: List[str] + isStarred: bool + sortMeta: int + folderId: Optional[int] = None + folderUid: Optional[str] = None + folderTitle: Optional[str] = None + folderUrl: Optional[str] = None + + +class GrafanaDashboardResponse(BaseModel): + message: str = Field(..., description="The message from the response.") + dashboards: List[GrafanaOrganizationDashboards] = Field(..., description="The dashboards collected from Grafana.") + success: bool = Field(..., description="The success of the response.") + + +class Annotation(BaseModel): + builtIn: int + datasource: dict # More specific model can be created for datasource + enable: bool + hide: bool + iconColor: str + name: str + type: str + + +class Threshold(BaseModel): + color: str + value: Optional[float] + + +class FieldConfigDefaults(BaseModel): + color: dict # More specific model can be created for color + custom: dict # More specific model can be created for custom options + mappings: List[dict] # More specific model can be created for mappings + thresholds: dict # Utilize Threshold model + + +class FieldConfig(BaseModel): + defaults: FieldConfigDefaults + overrides: List[dict] # More specific model can be created for overrides + + +class GridPos(BaseModel): + h: int + w: int + x: int + y: int + + +class PanelOptions(BaseModel): + legend: dict # More specific model can be created for legend + tooltip: dict # More specific model can be created for tooltip + + +class Panel(BaseModel): + fieldConfig: Optional[FieldConfig] = Field(None, description="The field configuration for the panel.") + gridPos: Optional[GridPos] = Field(None, description="The grid position for the panel.") + id: int + options: Optional[PanelOptions] = Field(None, description="The options for the panel.") + title: str + type: Optional[str] = Field(None, description="The type of the panel.") + collapsed: Optional[bool] = None # Optional field for row panel + panels: List["Panel"] = [] # Nested list for row panel + + +class Templating(BaseModel): + list: List[dict] # More specific model can be created for template variables + + +class DashboardDetails(BaseModel): + annotations: dict # Use Annotation model for values in the list + editable: bool + fiscalYearStartMonth: int + graphTooltip: int + id: int + links: List[dict] # More specific model can be created for links + liveNow: bool + panels: List[Panel] + refresh: str + schemaVersion: int + tags: List[str] + templating: Templating + time: dict # More specific model can be created for time range + timepicker: dict # More specific model can be created for timepicker options + timezone: str + title: str + uid: str + version: int + weekStart: str + + +class MetaDetails(BaseModel): + type: str + canSave: bool + canEdit: bool + canAdmin: bool + canStar: bool + canDelete: bool + slug: str + url: str + expires: str + created: str + updated: str + updatedBy: str + createdBy: str + version: int + hasAcl: bool + isFolder: bool + folderId: int + folderUid: str + folderTitle: str + folderUrl: str + provisioned: bool + provisionedExternalId: Optional[str] = None + annotationsPermissions: dict # More specific model can be created for permissions + + +class GrafanaDashboardDetails(BaseModel): + meta: MetaDetails + dashboard: DashboardDetails + + +class GrafanaDashboardDetailsResponse(BaseModel): + message: str = Field(..., description="The message from the response.") + dashboard_details: GrafanaDashboardDetails = Field(..., description="The dashboard details collected from Grafana.") + success: bool = Field(..., description="The success of the response.") + + +class GrafanaDashboardPanelsResponse(BaseModel): + message: str = Field(..., description="The message from the response.") + panels: List[Panel] = Field(..., description="The panels collected from Grafana.") + success: bool = Field(..., description="The success of the response.") + + +class TimeRange(BaseModel): + value: int + unit: str + + @validator("unit") + def validate_unit(cls, unit): + valid_units = ["minutes", "hours", "days"] + if unit not in valid_units: + raise HTTPException(status_code=400, detail=f"Invalid time range unit: {unit}. Must be one of {valid_units}") + return unit + + +class GrafanaGenerateIframeLinksRequest(BaseModel): + org_id: int = Field(..., description="The ID of the organization.") + dashboard_title: str = Field(..., description="The title of the dashboard.") + dashboard_uid: str = Field(..., description="The UID of the dashboard.") + panel_ids: List[int] = Field(..., description="The IDs of the panels.") + time_range: TimeRange = Field(..., description="Time range in minutes, hours, or days") + + +class GrafanaLinksList(BaseModel): + panel_id: int + panel_url: str + + +class GrafanaGenerateIframeLinksResponse(BaseModel): + message: str = Field(..., description="The message from the response.") + links: List[GrafanaLinksList] = Field(..., description="The links collected from Grafana.") + success: bool = Field(..., description="The success of the response.") diff --git a/backend/app/connectors/grafana/services/dashboards.py b/backend/app/connectors/grafana/services/dashboards.py index a721b2ab..f7f0b034 100644 --- a/backend/app/connectors/grafana/services/dashboards.py +++ b/backend/app/connectors/grafana/services/dashboards.py @@ -7,6 +7,7 @@ from app.connectors.grafana.schema.dashboards import DashboardProvisionRequest from app.connectors.grafana.schema.dashboards import GrafanaDashboard from app.connectors.grafana.schema.dashboards import GrafanaDashboardResponse +from app.connectors.grafana.schema.dashboards import HuntressDashboard from app.connectors.grafana.schema.dashboards import MimecastDashboard from app.connectors.grafana.schema.dashboards import Office365Dashboard from app.connectors.grafana.schema.dashboards import SapSiemDashboard @@ -146,7 +147,12 @@ async def provision_dashboards( errors = [] valid_dashboards = { - item.name: item for item in list(WazuhDashboard) + list(Office365Dashboard) + list(MimecastDashboard) + list(SapSiemDashboard) + item.name: item + for item in list(WazuhDashboard) + + list(Office365Dashboard) + + list(MimecastDashboard) + + list(SapSiemDashboard) + + list(HuntressDashboard) } for dashboard_name in dashboard_request.dashboards: diff --git a/backend/app/connectors/grafana/services/reporting.py b/backend/app/connectors/grafana/services/reporting.py new file mode 100644 index 00000000..4f57c172 --- /dev/null +++ b/backend/app/connectors/grafana/services/reporting.py @@ -0,0 +1,73 @@ +from typing import List + +from fastapi import HTTPException +from loguru import logger + +from app.connectors.grafana.schema.reporting import GrafanaDashboardDetails +from app.connectors.grafana.schema.reporting import GrafanaOrganizationDashboards +from app.connectors.grafana.schema.reporting import GrafanaOrganizations +from app.connectors.grafana.utils.universal import create_grafana_client + + +async def get_orgs() -> List[GrafanaOrganizations]: + """ + Update a dashboard in Grafana. + + Args: + dashboard_json (dict): The updated dashboard JSON. + organization_id (int): The ID of the organization. + folder_id (int): The ID of the folder. + + Returns: + dict: The updated dashboard response. + + Raises: + HTTPException: If there is an error updating the dashboard. + """ + logger.info("Getting organizations from Grafana") + try: + grafana_client = await create_grafana_client("Grafana") + orgs = grafana_client.organizations.list_organization() + return orgs + except Exception as e: + logger.error(f"Failed to collect organizations: {e}") + raise HTTPException(status_code=500, detail=f"Failed to collect organizations: {e}") + + +async def get_dashboards(org_id: int) -> List[GrafanaOrganizationDashboards]: + """ + Get dashboards from Grafana. + + Returns: + dict: The response containing the dashboards collected from Grafana. + """ + logger.info("Getting dashboards from Grafana") + try: + grafana_client = await create_grafana_client("Grafana") + logger.info(f"Switching to organization {org_id}") + grafana_client.user.switch_actual_user_organisation(org_id) + dashboards = grafana_client.search.search_dashboards() + return dashboards + except Exception as e: + logger.error(f"Failed to collect dashboards: {e}") + raise HTTPException(status_code=500, detail=f"Failed to collect dashboards: {e}") + + +async def get_dashboard_details(dashboard_uid: str) -> GrafanaDashboardDetails: + """ + Get dashboard details from Grafana. + + Args: + dashboard_uid (str): The UID of the dashboard. + + Returns: + dict: The response containing the dashboard details collected from Grafana. + """ + logger.info("Getting dashboard details from Grafana") + try: + grafana_client = await create_grafana_client("Grafana") + dashboard_details = grafana_client.dashboard.get_dashboard(dashboard_uid) + return dashboard_details + except Exception as e: + logger.error(f"Failed to collect dashboard details: {e}") + raise HTTPException(status_code=500, detail=f"Failed to collect dashboard details: {e}") diff --git a/backend/app/db/db_populate.py b/backend/app/db/db_populate.py index a0f117ed..7ad78576 100644 --- a/backend/app/db/db_populate.py +++ b/backend/app/db/db_populate.py @@ -255,6 +255,7 @@ def get_available_integrations_list(): ("Office365", "Integrate Office365 with SOCFortress."), ("Mimecast", "Integrate Mimecast with SOCFortress."), ("SAP SIEM", "Integrate SAP SIEM with SOCFortress."), + ("Huntress", "Integrate Huntress with SOCFortress."), # ... Add more available integrations as needed ... ] @@ -348,6 +349,8 @@ async def get_available_integrations_auth_keys_list(session: AsyncSession): ("SAP SIEM", "SECRET_KEY"), ("SAP SIEM", "USER_KEY"), ("SAP SIEM", "API_DOMAIN"), + ("Huntress", "API_KEY"), + ("Huntress", "API_SECRET"), # ... Add more available integrations auth keys as needed ... ] diff --git a/backend/app/integrations/huntress/routes/huntress.py b/backend/app/integrations/huntress/routes/huntress.py new file mode 100644 index 00000000..2dea3db1 --- /dev/null +++ b/backend/app/integrations/huntress/routes/huntress.py @@ -0,0 +1,50 @@ +from fastapi import APIRouter +from fastapi import Depends +from sqlalchemy.ext.asyncio import AsyncSession + +from app.db.db_session import get_db +from app.integrations.huntress.schema.huntress import CollectHuntressRequest +from app.integrations.huntress.schema.huntress import HuntressAuthKeys +from app.integrations.huntress.schema.huntress import InvokeHuntressRequest +from app.integrations.huntress.schema.huntress import InvokeHuntressResponse +from app.integrations.huntress.services.collect import collect_huntress +from app.integrations.routes import find_customer_integration +from app.integrations.utils.utils import extract_auth_keys +from app.integrations.utils.utils import get_customer_integration_response + +integration_huntress_router = APIRouter() + + +@integration_huntress_router.post( + "", + response_model=InvokeHuntressResponse, + description="Pull down Huntress Events.", +) +async def collect_huntress_route(huntress_request: InvokeHuntressRequest, session: AsyncSession = Depends(get_db)): + """Pull down Huntress Events.""" + customer_integration_response = await get_customer_integration_response( + huntress_request.customer_code, + session, + ) + + customer_integration = await find_customer_integration( + huntress_request.customer_code, + huntress_request.integration_name, + customer_integration_response, + ) + + huntress_auth_keys = extract_auth_keys(customer_integration, service_name="Huntress") + + auth_keys = HuntressAuthKeys(**huntress_auth_keys) + + await collect_huntress( + request=( + CollectHuntressRequest( + customer_code=huntress_request.customer_code, + apiKey=auth_keys.API_KEY, + secretKey=auth_keys.API_SECRET, + ) + ), + ) + + return InvokeHuntressResponse(success=True, message="Huntress Events collected successfully.") diff --git a/backend/app/integrations/huntress/routes/provision.py b/backend/app/integrations/huntress/routes/provision.py new file mode 100644 index 00000000..c19c2bd7 --- /dev/null +++ b/backend/app/integrations/huntress/routes/provision.py @@ -0,0 +1,51 @@ +from fastapi import APIRouter +from fastapi import Depends +from sqlalchemy.ext.asyncio import AsyncSession + +from app.db.db_session import get_db +from app.integrations.huntress.schema.provision import ProvisionHuntressRequest +from app.integrations.huntress.schema.provision import ProvisionHuntressResponse +from app.integrations.huntress.services.provision import provision_huntress +from app.integrations.utils.utils import get_customer_integration_response +from app.schedulers.models.scheduler import CreateSchedulerRequest +from app.schedulers.scheduler import add_scheduler_jobs + +integration_huntress_provision_scheduler_router = APIRouter() + + +@integration_huntress_provision_scheduler_router.post( + "/provision", + response_model=ProvisionHuntressResponse, + description="Provision a Huntress integration.", +) +async def provision_huntress_route( + provision_huntress_request: ProvisionHuntressRequest, + session: AsyncSession = Depends(get_db), +) -> ProvisionHuntressResponse: + """ + Provisions a huntress integration. + + Args: + provision_huntress_request (ProvisionHuntressRequest): The request object containing the necessary data for provisioning. + session (AsyncSession, optional): The database session. Defaults to Depends(get_db). + + Returns: + ProvisionHuntressResponse: The response object indicating the success or failure of the provisioning process. + """ + # Check if the customer integration settings are available and can be provisioned + await get_customer_integration_response( + provision_huntress_request.customer_code, + session, + ) + await provision_huntress(provision_huntress_request, session) + await add_scheduler_jobs( + CreateSchedulerRequest( + function_name="invoke_huntress_integration_collection", + time_interval=provision_huntress_request.time_interval, + job_id="invoke_huntress_integration_collection", + ), + ) + return ProvisionHuntressResponse( + success=True, + message="Huntress integration provisioned successfully.", + ) diff --git a/backend/app/integrations/huntress/schema/huntress.py b/backend/app/integrations/huntress/schema/huntress.py new file mode 100644 index 00000000..84ec0dfa --- /dev/null +++ b/backend/app/integrations/huntress/schema/huntress.py @@ -0,0 +1,139 @@ +from typing import Dict +from typing import List +from typing import Optional + +from pydantic import BaseModel +from pydantic import Field + + +class InvokeHuntressRequest(BaseModel): + customer_code: str = Field( + ..., + description="The customer code.", + examples=["00002"], + ) + integration_name: str = Field( + "Huntress", + description="The integration name.", + examples=["Huntress"], + ) + + +class InvokeHuntressResponse(BaseModel): + success: bool = Field( + ..., + description="The success status.", + examples=[True], + ) + message: str = Field( + ..., + description="The message.", + examples=["Huntress Events collected successfully."], + ) + + +class HuntressAuthKeys(BaseModel): + API_KEY: str = Field( + ..., + description="The API key.", + examples=["123456"], + ) + API_SECRET: str = Field( + ..., + description="The secret key.", + examples=["123456"], + ) + + +class CollectHuntressRequest(BaseModel): + customer_code: str = Field( + ..., + description="The customer code.", + examples=["00002"], + ) + apiKey: str = Field( + ..., + description="The API key.", + examples=["123456"], + ) + secretKey: str = Field( + ..., + description="The secret key.", + examples=["123456"], + ) + + +class Remediation(BaseModel): + id: int + type: str + status: str + details: dict + completable_by_task_response: bool + completable_manually: bool + display_action: str + approved_at: Optional[str] + approved_by: Optional[dict] + completed_at: Optional[str] + + +class IndicatorCount(BaseModel): + footholds: Optional[int] = Field(0, description="The number of footholds.") + monitored_files: int = 0 + process_detections: int = 0 + ransomware_canaries: int = 0 + antivirus_detections: int = 0 + + +class ApprovedBy(BaseModel): + id: int + email: str + first_name: str + last_name: str + + +class Foothold(BaseModel): + id: Optional[int] # May not be present in all responses + display_name: str + service_name: str + command: str + file_path: str + virus_total_detections: str + virus_total_url: str + + +class IncidentReport(BaseModel): + id: int + status: str + summary: Optional[str] + body: str + updated_at: str + agent_id: Optional[int] + platform: str + status_updated_at: str + organization_id: Optional[int] + sent_at: str + account_id: int + subject: str + remediations: List[Remediation] + footholds: Optional[str] = Field(None, description="The footholds.") + severity: str + closed_at: Optional[str] = Field(None, description="The date the incident was closed.") + indicator_types: List[str] + indicator_counts: IndicatorCount + + def to_dict(self) -> Dict: + return self.dict() + + +class Pagination(BaseModel): + current_page: int + current_page_count: int + limit: int + total_count: int + next_page: Optional[int] = Field(None, description="The next page.") + next_page_url: Optional[str] = Field(None, description="The next page URL.") + + +class HuntressIncidentResponse(BaseModel): + incident_reports: List[IncidentReport] + pagination: Pagination diff --git a/backend/app/integrations/huntress/schema/provision.py b/backend/app/integrations/huntress/schema/provision.py new file mode 100644 index 00000000..898b1087 --- /dev/null +++ b/backend/app/integrations/huntress/schema/provision.py @@ -0,0 +1,87 @@ +from typing import Any +from typing import Dict +from typing import List +from typing import Optional + +from pydantic import BaseModel +from pydantic import Field +from pydantic import root_validator + + +class ProvisionHuntressRequest(BaseModel): + customer_code: str = Field( + ..., + description="The customer code.", + examples=["00002"], + ) + time_interval: int = Field( + ..., + description="The time interval for the scheduler.", + examples=[5], + ) + integration_name: str = Field( + "Huntress", + description="The integration name.", + examples=["Huntress"], + ) + + # ensure the `integration_name` is always set to "Mimecast" + @root_validator(pre=True) + def set_integration_name(cls, values: Dict[str, Any]) -> Dict[str, Any]: + values["integration_name"] = "Huntress" + return values + + +class ProvisionHuntressResponse(BaseModel): + success: bool + message: str + + +# ! STREAMS ! # +class StreamRule(BaseModel): + field: str + type: int + inverted: bool + value: str + + +class HuntressEventStream(BaseModel): + title: str = Field(..., description="Title of the stream") + description: str = Field(..., description="Description of the stream") + index_set_id: str = Field(..., description="ID of the associated index set") + rules: List[StreamRule] = Field(..., description="List of rules for the stream") + matching_type: str = Field(..., description="Matching type for the rules") + remove_matches_from_default_stream: bool = Field( + ..., + description="Whether to remove matches from the default stream", + ) + content_pack: Optional[str] = Field( + None, + description="Associated content pack, if any", + ) + + class Config: + schema_extra = { + "example": { + "title": "Huntress SIEM EVENTS - Example Company", + "description": "Huntress SIEM EVENTS - Example Company", + "index_set_id": "12345", + "rules": [ + { + "field": "customer_code", + "type": 1, + "inverted": False, + "value": "ExampleCode", + }, + { + "field": "integration", + "type": 1, + "inverted": False, + "value": "huntress", + }, + ], + "matching_type": "AND", + "remove_matches_from_default_stream": True, + "content_pack": None, + }, + } diff --git a/backend/app/integrations/huntress/services/collect.py b/backend/app/integrations/huntress/services/collect.py new file mode 100644 index 00000000..e59c15bc --- /dev/null +++ b/backend/app/integrations/huntress/services/collect.py @@ -0,0 +1,86 @@ +import base64 +from typing import List + +import httpx +from loguru import logger + +from app.connectors.wazuh_indexer.utils.universal import create_wazuh_indexer_client +from app.integrations.huntress.schema.huntress import CollectHuntressRequest +from app.integrations.huntress.schema.huntress import HuntressIncidentResponse +from app.integrations.huntress.schema.huntress import IncidentReport +from app.integrations.utils.event_shipper import event_shipper +from app.integrations.utils.schema import EventShipperPayload + + +async def base64_encode(payload: CollectHuntressRequest) -> str: + """Base64 encode the payload.""" + payload = f"{payload.apiKey}:{payload.secretKey}" + payload_bytes = payload.encode("utf-8") + base64_bytes = base64.b64encode(payload_bytes) + base64_string = base64_bytes.decode("utf-8") + return base64_string + + +async def make_request(url: str, auth: str) -> HuntressIncidentResponse: + headers = {"Accept": "application/json", "Authorization": f"Basic {auth}"} + async with httpx.AsyncClient() as client: + response = await client.get(url, headers=headers) + return HuntressIncidentResponse(**response.json()) + + +async def get_next_page(response: HuntressIncidentResponse) -> str: + if response.pagination and response.pagination.next_page_url: + return response.pagination.next_page_url + return None + + +async def check_if_incident_exists(incident: IncidentReport) -> bool: + """Check if the incident exists in the Wazuh-Indexer.""" + es_client = await create_wazuh_indexer_client("Wazuh-Indexer") + results = es_client.search( + index="huntress*", + body={ + "size": 1000, + "query": {"bool": {"must": [{"term": {"id": incident.id}}]}}, + }, + ) + if results["hits"]["total"]["value"] > 0: + logger.info("Event already exists in Wazuh-Indexer...Skipping") + return True + return False + + +async def send_to_event_shipper(incident: IncidentReport, customer_code: str) -> None: + exists = await check_if_incident_exists(incident) + if not exists: + message = EventShipperPayload( + customer_code=customer_code, + integration="huntress", + version="1.0", + **incident.to_dict(), + ) + await event_shipper(message) + return None + + +async def process_incidents(incidents: List[IncidentReport], customer_code: str) -> None: + """Process a list of incidents.""" + for incident in incidents: + await send_to_event_shipper(incident, customer_code) + + +async def process_pages(url: str, auth: str, customer_code: str) -> None: + """Process all pages of incidents.""" + while url is not None: + response = await make_request(url, auth) + await process_incidents(response.incident_reports, customer_code) + url = await get_next_page(response) + + +async def collect_huntress(request: CollectHuntressRequest) -> None: + """Pull down Huntress Events.""" + logger.info(f"Collecting Huntress Events with request: {request}") + base64_string = await base64_encode(request) + logger.info(f"Base64 encoded string: {base64_string}") + url = "https://api.huntress.io/v1/incident_reports?page=1&limit=100" + await process_pages(url, base64_string, request.customer_code) diff --git a/backend/app/integrations/huntress/services/provision.py b/backend/app/integrations/huntress/services/provision.py new file mode 100644 index 00000000..ada02048 --- /dev/null +++ b/backend/app/integrations/huntress/services/provision.py @@ -0,0 +1,394 @@ +import json +from datetime import datetime + +from loguru import logger +from sqlalchemy import and_ +from sqlalchemy import update +from sqlalchemy.ext.asyncio import AsyncSession + +from app.connectors.grafana.schema.dashboards import DashboardProvisionRequest +from app.connectors.grafana.schema.dashboards import HuntressDashboard +from app.connectors.grafana.services.dashboards import provision_dashboards +from app.connectors.grafana.utils.universal import create_grafana_client +from app.connectors.graylog.services.management import start_stream +from app.connectors.graylog.utils.universal import send_post_request +from app.customer_provisioning.schema.grafana import GrafanaDatasource +from app.customer_provisioning.schema.grafana import GrafanaDataSourceCreationResponse +from app.customer_provisioning.schema.graylog import GraylogIndexSetCreationResponse +from app.customer_provisioning.schema.graylog import StreamCreationResponse +from app.customer_provisioning.schema.graylog import TimeBasedIndexSet +from app.customer_provisioning.services.grafana import create_grafana_folder +from app.customer_provisioning.services.grafana import get_opensearch_version +from app.customers.routes.customers import get_customer +from app.customers.routes.customers import get_customer_meta +from app.integrations.huntress.schema.provision import HuntressEventStream +from app.integrations.huntress.schema.provision import ProvisionHuntressRequest +from app.integrations.huntress.schema.provision import ProvisionHuntressResponse +from app.integrations.models.customer_integration_settings import CustomerIntegrations +from app.integrations.routes import create_integration_meta +from app.integrations.schema import CustomerIntegrationsMetaSchema +from app.utils import get_connector_attribute + + +################## ! GRAYLOG ! ################## +async def build_index_set_config( + customer_code: str, + session: AsyncSession, +) -> TimeBasedIndexSet: + """ + Build the configuration for a time-based index set. + + Args: + request (ProvisionNewCustomer): The request object containing customer information. + + Returns: + TimeBasedIndexSet: The configured time-based index set. + """ + return TimeBasedIndexSet( + title=f"HUNTRESS - {(await get_customer(customer_code, session)).customer.customer_name}", + description=f"HUNTRESS - {customer_code}", + index_prefix=f"huntress_{customer_code}", + rotation_strategy_class="org.graylog2.indexer.rotation.strategies.TimeBasedRotationStrategy", + rotation_strategy={ + "type": "org.graylog2.indexer.rotation.strategies.TimeBasedRotationStrategyConfig", + "rotation_period": "P1D", + "rotate_empty_index_set": False, + "max_rotation_period": None, + }, + retention_strategy_class="org.graylog2.indexer.retention.strategies.DeletionRetentionStrategy", + retention_strategy={ + "type": "org.graylog2.indexer.retention.strategies.DeletionRetentionStrategyConfig", + "max_number_of_indices": 30, + }, + creation_date=datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + index_analyzer="standard", + shards=1, + replicas=0, + index_optimization_max_num_segments=1, + index_optimization_disabled=False, + writable=True, + field_type_refresh_interval=5000, + ) + + +# Function to send the POST request and handle the response +async def send_index_set_creation_request( + index_set: TimeBasedIndexSet, +) -> GraylogIndexSetCreationResponse: + """ + Sends a request to create an index set in Graylog. + + Args: + index_set (TimeBasedIndexSet): The index set to be created. + + Returns: + GraylogIndexSetCreationResponse: The response from Graylog after creating the index set. + """ + json_index_set = json.dumps(index_set.dict()) + logger.info(f"json_index_set set: {json_index_set}") + response_json = await send_post_request( + endpoint="/api/system/indices/index_sets", + data=index_set.dict(), + ) + return GraylogIndexSetCreationResponse(**response_json) + + +async def create_index_set( + customer_code: str, + session: AsyncSession, +) -> GraylogIndexSetCreationResponse: + """ + Creates an index set for a new customer. + + Args: + request (ProvisionNewCustomer): The request object containing the customer information. + + Returns: + GraylogIndexSetCreationResponse: The response object containing the result of the index set creation. + """ + logger.info(f"Creating index set for customer {customer_code}") + index_set_config = await build_index_set_config(customer_code, session) + return await send_index_set_creation_request(index_set_config) + + +# ! Event STREAMS ! # +# Function to create event stream configuration +async def build_event_stream_config( + customer_code: str, + index_set_id: str, + session: AsyncSession, +) -> HuntressEventStream: + """ + Builds the configuration for the Huntress event stream. + + Args: + customer_code (str): The customer code. + index_set_id (str): The index set ID. + session (AsyncSession): The async session. + + Returns: + HuntressEventStream: The configured Huntress event stream. + """ + return HuntressEventStream( + title=f"HUNTRESS EVENTS - {(await get_customer(customer_code, session)).customer.customer_name}", + description=f"HUNTRESS EVENTS - {(await get_customer(customer_code, session)).customer.customer_name}", + index_set_id=index_set_id, + rules=[ + { + "field": "integration", + "type": 1, + "inverted": False, + "value": "huntress", + }, + { + "field": "customer_code", + "type": 1, + "inverted": False, + "value": f"{customer_code}", + }, + ], + matching_type="AND", + remove_matches_from_default_stream=True, + content_pack=None, + ) + + +async def send_event_stream_creation_request( + event_stream: HuntressEventStream, +) -> StreamCreationResponse: + """ + Sends a request to create an event stream. + + Args: + event_stream (SapSiemEventStream): The event stream to be created. + + Returns: + StreamCreationResponse: The response containing the created event stream. + """ + json_event_stream = json.dumps(event_stream.dict()) + logger.info(f"json_event_stream set: {json_event_stream}") + response_json = await send_post_request( + endpoint="/api/streams", + data=event_stream.dict(), + ) + return StreamCreationResponse(**response_json) + + +async def create_event_stream( + customer_code: str, + index_set_id: str, + session: AsyncSession, +) -> StreamCreationResponse: + """ + Creates an event stream for a customer. + + Args: + request (ProvisionNewCustomer): The request object containing customer information. + index_set_id (str): The ID of the index set. + + Returns: + The result of the event stream creation request. + """ + event_stream_config = await build_event_stream_config( + customer_code, + index_set_id, + session, + ) + return await send_event_stream_creation_request(event_stream_config) + + +#### ! GRAFANA ! #### +async def create_grafana_datasource( + customer_code: str, + session: AsyncSession, +) -> GrafanaDataSourceCreationResponse: + """ + Creates a Grafana datasource for the specified customer. + + Args: + customer_code (str): The customer code. + session (AsyncSession): The async session. + + Returns: + GrafanaDataSourceCreationResponse: The response containing the created datasource details. + """ + logger.info("Creating Grafana datasource") + grafana_client = await create_grafana_client("Grafana") + # Switch to the newly created organization + grafana_client.user.switch_actual_user_organisation( + (await get_customer_meta(customer_code, session)).customer_meta.customer_meta_grafana_org_id, + ) + datasource_payload = GrafanaDatasource( + name="HUNTRESS", + type="grafana-opensearch-datasource", + typeName="OpenSearch", + access="proxy", + url=await get_connector_attribute( + connector_id=1, + column_name="connector_url", + session=session, + ), + database=f"huntress_{customer_code}*", + basicAuth=True, + basicAuthUser=await get_connector_attribute( + connector_id=1, + column_name="connector_username", + session=session, + ), + secureJsonData={ + "basicAuthPassword": await get_connector_attribute( + connector_id=1, + column_name="connector_password", + session=session, + ), + }, + isDefault=False, + jsonData={ + "database": f"huntress_{customer_code}*", + "flavor": "opensearch", + "includeFrozen": False, + "logLevelField": "severity", + "logMessageField": "summary", + "maxConcurrentShardRequests": 5, + "pplEnabled": True, + "timeField": "timestamp", + "tlsSkipVerify": True, + "version": await get_opensearch_version(), + }, + readOnly=True, + ) + results = grafana_client.datasource.create_datasource( + datasource=datasource_payload.dict(), + ) + return GrafanaDataSourceCreationResponse(**results) + + +async def provision_huntress( + provision_huntress_request: ProvisionHuntressRequest, + session: AsyncSession, +) -> ProvisionHuntressResponse: + logger.info( + f"Provisioning Huntress integration for customer {provision_huntress_request.customer_code}.", + ) + + # Create Index Set + index_set_id = ( + await create_index_set( + customer_code=provision_huntress_request.customer_code, + session=session, + ) + ).data.id + logger.info(f"Index set: {index_set_id}") + # Create event stream + stream_id = ( + await create_event_stream( + provision_huntress_request.customer_code, + index_set_id, + session, + ) + ).data.stream_id + # Start stream + await start_stream(stream_id=stream_id) + + # Grafana Deployment + huntress_datasource_uid = ( + await create_grafana_datasource( + customer_code=provision_huntress_request.customer_code, + session=session, + ) + ).datasource.uid + grafana_huntress_folder_id = ( + await create_grafana_folder( + organization_id=( + await get_customer_meta( + provision_huntress_request.customer_code, + session, + ) + ).customer_meta.customer_meta_grafana_org_id, + folder_title="HUNTRESS", + ) + ).id + await provision_dashboards( + DashboardProvisionRequest( + dashboards=[dashboard.name for dashboard in HuntressDashboard], + organizationId=( + await get_customer_meta( + provision_huntress_request.customer_code, + session, + ) + ).customer_meta.customer_meta_grafana_org_id, + folderId=grafana_huntress_folder_id, + datasourceUid=huntress_datasource_uid, + ), + ) + await create_integration_meta_entry( + CustomerIntegrationsMetaSchema( + customer_code=provision_huntress_request.customer_code, + integration_name="Huntress", + graylog_input_id=None, + graylog_index_id=index_set_id, + graylog_stream_id=stream_id, + grafana_org_id=( + await get_customer_meta( + provision_huntress_request.customer_code, + session, + ) + ).customer_meta.customer_meta_grafana_org_id, + grafana_dashboard_folder_id=grafana_huntress_folder_id, + ), + session, + ) + await update_customer_integration_table( + provision_huntress_request.customer_code, + session, + ) + + return ProvisionHuntressResponse( + success=True, + message="Huntress integration provisioned successfully.", + ) + + +############## ! WRITE TO DB ! ############## +async def create_integration_meta_entry( + customer_integration_meta: CustomerIntegrationsMetaSchema, + session: AsyncSession, +) -> None: + """ + Creates an entry for the customer integration meta in the database. + + Args: + customer_integration_meta (CustomerIntegrationsMetaSchema): The customer integration meta object. + session (AsyncSession): The async session object for database operations. + """ + await create_integration_meta(customer_integration_meta, session) + logger.info( + f"Integration meta entry created for customer {customer_integration_meta.customer_code}.", + ) + + +async def update_customer_integration_table( + customer_code: str, + session: AsyncSession, +) -> None: + """ + Updates the `customer_integrations` table to set the `deployed` column to True where the `customer_code` + matches the given customer code and the `integration_service_name` is "Huntress". + + Args: + customer_code (str): The customer code. + session (AsyncSession): The async session object for making HTTP requests. + """ + await session.execute( + update(CustomerIntegrations) + .where( + and_( + CustomerIntegrations.customer_code == customer_code, + CustomerIntegrations.integration_service_name == "Huntress", + ), + ) + .values(deployed=True), + ) + await session.commit() + + return None diff --git a/backend/app/integrations/markdown/huntress.md b/backend/app/integrations/markdown/huntress.md new file mode 100644 index 00000000..a2820041 --- /dev/null +++ b/backend/app/integrations/markdown/huntress.md @@ -0,0 +1,23 @@ +# [Huntress Integration](https://api.huntress.io/docs#introduction) + +Huntress is a cloud-based managed detection and response (MDR) solution that provides continuous monitoring and threat detection for endpoints. This integration allows you to ingest Huntress incidents into the SOCFortress SIEM stack. + +## Introduction + +The Huntress API follows a RESTful pattern. Requests are made via resource-oriented URLs as described in this document and API responses are formatted as JSON data. + +To test out the Huntress APIs quickly, use the [Swagger Interface](https://api.huntress.io/docs/preview). + +## API Overview + +### Authentication + +To begin, generate your API Key at `.huntress.io`. Once you are logged into your account on the Huntress site, check the dropdown menu at the top-right corner of the site header. You should see API Credentials among the options if your account has been granted access to the Huntress API. Click on the option to continue to the API Key generation page. + +Once on the API Key generation page, click on the green Setup button to begin the process to generate your API Key. You will be redirected to a page where you will be prompted to generate your API Key. Click the Generate button to generate a public and private key pair for Huntress API access. The inputs on the page will be filled in with your access credentials once you have done so. + +**Your API Private Key will only be visible at this stage of API Key generation. Be sure to save the value provided somewhere secure, as once you navigate away from this page, this value will no longer be accessible and you must regenerate your API credentials if your secret key value is lost.** + +If necessary, you can repeat the process to regenerate your API credentials with a new API Key and API Secret Key on the same API Key generation page, at `.huntress.io/account/api_credentials`. + +The Huntress API implements basic access authentication. Once you have your API Key and API Secret Key, provide these values as the result of a Base64 encoded string in every request to the Huntress API via the `Authorization` header. Your request header should look something like `Authorization: Basic [Base64Encode(:)]`. diff --git a/backend/app/integrations/monitoring_alert/routes/monitoring_alert.py b/backend/app/integrations/monitoring_alert/routes/monitoring_alert.py index 4d1565e0..ba62c683 100644 --- a/backend/app/integrations/monitoring_alert/routes/monitoring_alert.py +++ b/backend/app/integrations/monitoring_alert/routes/monitoring_alert.py @@ -26,6 +26,7 @@ from app.integrations.monitoring_alert.schema.monitoring_alert import ( MonitoringWazuhAlertsRequestModel, ) +from app.integrations.monitoring_alert.services.custom import analyze_custom_alert from app.integrations.monitoring_alert.services.office365_exchange import ( analyze_office365_exchange_online_alerts, ) @@ -157,6 +158,56 @@ async def create_monitoring_alert( ) +@monitoring_alerts_router.post( + "/custom", + response_model=GraylogPostResponse, +) +async def create_custom_monitoring_alert( + monitoring_alert: GraylogPostRequest, + session: AsyncSession = Depends(get_db), +) -> GraylogPostResponse: + """ + Create a new monitoring alert. This receives the alert from Graylog and stores it in the database. + + Args: + monitoring_alert (MonitoringAlertsRequestModel): The monitoring alert details. + session (AsyncSession, optional): The database session. Defaults to Depends(get_db). + + Returns: + MonitoringAlertsRequestModel: The created monitoring alert. + """ + logger.info(f"Creating monitoring alert: {monitoring_alert}") + logger.info(f"Found index name {monitoring_alert.event.alert_index}") + + for field in monitoring_alert.event.fields: + if field == "CUSTOMER_CODE": + customer_meta = await session.execute( + select(CustomersMeta).where( + CustomersMeta.customer_code == monitoring_alert.event.fields[field], + ), + ) + customer_meta = customer_meta.scalars().first() + + if not customer_meta: + logger.info(f"Getting customer meta for customer_meta_office365_organization_id: {monitoring_alert.event.fields[field]}") + customer_meta = await session.execute( + select(CustomersMeta).where( + CustomersMeta.customer_meta_office365_organization_id == monitoring_alert.event.fields[field], + ), + ) + customer_meta = customer_meta.scalars().first() + + if not customer_meta: + raise HTTPException(status_code=404, detail="Customer not found") + + await analyze_custom_alert(monitoring_alert, session) + + return GraylogPostResponse( + success=True, + message="Monitoring alert created successfully", + ) + + @monitoring_alerts_router.post( "/run_analysis/wazuh", response_model=AlertAnalysisResponse, diff --git a/backend/app/integrations/monitoring_alert/routes/provision.py b/backend/app/integrations/monitoring_alert/routes/provision.py index 53f6bb5f..8a4e73b1 100644 --- a/backend/app/integrations/monitoring_alert/routes/provision.py +++ b/backend/app/integrations/monitoring_alert/routes/provision.py @@ -1,19 +1,30 @@ +from typing import List + from fastapi import APIRouter +from fastapi import Depends from fastapi import HTTPException from loguru import logger +from sqlalchemy.ext.asyncio import AsyncSession from app.connectors.graylog.routes.events import get_all_event_definitions from app.connectors.graylog.schema.events import GraylogEventDefinitionsResponse +from app.connectors.graylog.services.streams import get_streams +from app.db.db_session import get_db +from app.integrations.monitoring_alert.routes.monitoring_alert import get_customer_meta from app.integrations.monitoring_alert.schema.provision import AvailableMonitoringAlerts from app.integrations.monitoring_alert.schema.provision import ( AvailableMonitoringAlertsResponse, ) +from app.integrations.monitoring_alert.schema.provision import ( + CustomMonitoringAlertProvisionModel, +) from app.integrations.monitoring_alert.schema.provision import ( ProvisionMonitoringAlertRequest, ) from app.integrations.monitoring_alert.schema.provision import ( ProvisionWazuhMonitoringAlertResponse, ) +from app.integrations.monitoring_alert.services.provision import provision_custom_alert from app.integrations.monitoring_alert.services.provision import ( provision_office365_exchange_online_alert, ) @@ -34,6 +45,23 @@ monitoring_alerts_provision_router = APIRouter() +async def return_stream_ids(stream_names: List[str]) -> List[str]: + """ + Return the stream IDs for the given stream names. + + Args: + stream_names (List[str]): A list of stream names. + + Returns: + List[str]: A list of stream IDs. + """ + all_streams_response = await get_streams() + all_streams = all_streams_response.streams + stream_ids = [stream.id for stream in all_streams if stream.title in stream_names] + logger.info(f"Stream IDs collected: {stream_ids}") + return stream_ids + + # Define your provision functions async def invoke_provision_wazuh_monitoring_alert( request: ProvisionMonitoringAlertRequest, @@ -91,12 +119,20 @@ async def invoke_provision_office365_threat_intel_alert( ) +async def invoke_provision_custom_monitoring_alert( + request: CustomMonitoringAlertProvisionModel, +): + # Provision the custom monitoring alert + await provision_custom_alert(request) + + # Create a dictionary that maps alert names to provision functions PROVISION_FUNCTIONS = { "WAZUH_SYSLOG_LEVEL_ALERT": invoke_provision_wazuh_monitoring_alert, "SURICATA_ALERT_SEVERITY_1": invoke_provision_suricata_monitoring_alert, "OFFICE365_EXCHANGE_ONLINE": invoke_provision_office365_exchange_online_alert, "OFFICE365_THREAT_INTEL": invoke_provision_office365_threat_intel_alert, + "CUSTOM": invoke_provision_custom_monitoring_alert, # Add more alert names and functions as needed } @@ -173,6 +209,37 @@ async def provision_monitoring_alert_route( return ProvisionWazuhMonitoringAlertResponse(success=True, message=f"Monitoring alert {request.alert_name} provisioned successfully.") +@monitoring_alerts_provision_router.post( + "/provision/custom", + response_model=ProvisionWazuhMonitoringAlertResponse, + description="Provisions custom monitoring alerts.", +) +async def provision_custom_monitoring_alert_route( + request: CustomMonitoringAlertProvisionModel, + session: AsyncSession = Depends(get_db), +) -> ProvisionWazuhMonitoringAlertResponse: + await check_if_event_definition_exists(request.alert_name.replace("_", " ")) + customer_code = next((field.value for field in request.custom_fields if field.name == "CUSTOMER_CODE"), None) + await get_customer_meta(customer_code=customer_code, session=session) + + # Look up the provision function based on request.alert_name + provision_function = PROVISION_FUNCTIONS.get("CUSTOM") + + if provision_function is None: + raise HTTPException( + status_code=400, + detail=f"No provision function found for alert name {request.alert_name}", + ) + + stream_ids = await return_stream_ids(request.streams) + request.streams = stream_ids + + # Invoke the provision function + await provision_function(request) + + return ProvisionWazuhMonitoringAlertResponse(success=True, message=f"Monitoring alert {request.alert_name} provisioned successfully.") + + @monitoring_alerts_provision_router.post( "/provision/testing", response_model=ProvisionWazuhMonitoringAlertResponse, diff --git a/backend/app/integrations/monitoring_alert/schema/monitoring_alert.py b/backend/app/integrations/monitoring_alert/schema/monitoring_alert.py index 54552937..06e194fa 100644 --- a/backend/app/integrations/monitoring_alert/schema/monitoring_alert.py +++ b/backend/app/integrations/monitoring_alert/schema/monitoring_alert.py @@ -4,9 +4,11 @@ from typing import List from typing import Optional +from fastapi import HTTPException from pydantic import BaseModel from pydantic import Extra from pydantic import Field +from pydantic import validator from app.integrations.alert_creation.general.schema.alert import IrisAsset from app.integrations.alert_creation.general.schema.alert import IrisIoc @@ -110,7 +112,8 @@ class GraylogEvent(BaseModel): description="Indicates if the event is an alert", example=True, ) - fields: GraylogEventFields = Field(..., description="Custom fields for the event") + # fields: GraylogEventFields = Field(..., description="Custom fields for the event") + fields: Dict[str, Any] = Field(..., description="Custom fields for the event") group_by_fields: Dict[str, Any] = Field( ..., description="Fields used to group events", @@ -121,6 +124,19 @@ class GraylogEvent(BaseModel): def alert_index(self) -> str: return self.origin_context.split(":")[4] + @property + def alert_id(self) -> str: + return self.origin_context.split(":")[5] + + @validator("fields") + def check_customer_code(cls, fields): + if "CUSTOMER_CODE" not in fields: + raise HTTPException( + status_code=400, + detail="CUSTOMER_CODE is required in the fields", + ) + return fields + class GraylogPostRequest(BaseModel): event_definition_id: str = Field( @@ -346,6 +362,177 @@ def to_dict(self): return self.dict(exclude_none=True) +########### ! CUSTOM ALERTS SCHEMA ! ########### +class CustomSourceModel(BaseModel): + timestamp: str = Field(..., description="The timestamp of the alert.") + timestamp_utc: Optional[str] = Field( + ..., + description="The UTC timestamp of the alert.", + ) + time_field: Optional[str] = Field( + "timestamp", + description="The timefield of the alert to be used when creating the IRIS alert.", + ) + date: Optional[float] = Field( + None, + description="Date of the alert in Unix timestamp", + ) + alert_metadata_tag: Optional[str] = Field( + None, + description="Metadata tag for the alert", + ) + alert_gid: Optional[int] = Field(None, description="Alert group ID") + + class Config: + allow_population_by_field_name = True + extra = Extra.allow + + def to_dict(self): + return self.dict(exclude_none=True) + + +class CustomAlertModel(BaseModel): + _index: str + _id: str + _version: int + _source: CustomSourceModel + asset_type_id: Optional[int] = Field( + None, + description="The asset type id of the alert which is needed for when we add the asset to IRIS.", + ) + ioc_value: Optional[str] = Field( + None, + description="The IoC value of the alert which is needed for when we add the IoC to IRIS.", + ) + ioc_type: Optional[str] = Field( + None, + description="The IoC type of the alert which is needed for when we add the IoC to IRIS.", + ) + + class Config: + extra = Extra.allow + + def to_dict(self): + return self.dict(exclude_none=True) + + +########### ! Create Custom Alerts In IRIS Schemas ! ########### +class CustomIrisAsset(BaseModel): + asset_name: Optional[str] = Field( + "Asset Does Not Apply to Custom Alerts", + description="Name of the asset", + example="Server01", + ) + asset_ip: Optional[str] = Field( + "Asset Does Not Apply to Custom Alerts", + description="IP address of the asset", + example="192.168.1.1", + ) + asset_description: Optional[str] = Field( + "Asset Does Not Apply to Custom Alerts", + description="Description of the asset", + example="Windows Server", + ) + asset_type_id: Optional[int] = Field( + 9, + description="Type ID of the asset", + example=1, + ) + + def to_dict(self): + return self.dict(exclude_none=True) + + +class CustomIrisIoc(BaseModel): + ioc_value: str = Field( + ..., + description="Value of the IoC", + example="www.google.com", + ) + ioc_description: str = Field( + ..., + description="Description of the IoC", + example="Google", + ) + ioc_tlp_id: int = Field(1, description="TLP ID of the IoC", example=1) + ioc_type_id: int = Field(20, description="Type ID of the IoC", example=20) + + +class CustomIrisAlertContext(Dict[str, Any]): + _source: CustomSourceModel + alert_id: str = Field(..., description="ID of the alert", example="123") + alert_name: str = Field( + ..., + description="Name of the alert", + example="Intrusion Detected", + ) + customer_iris_id: Optional[int] = Field( + None, + description="IRIS ID of the customer", + ) + customer_name: Optional[str] = Field( + None, + description="Name of the customer", + ) + customer_cases_index: Optional[str] = Field( + None, + description="IRIS case index name in the Wazuh-Indexer", + ) + time_field: Optional[str] = Field( + "timestamp_utc", + description="The timefield of the alert to be used when creating the IRIS alert.", + ) + + def to_dict(self): + return self.dict(exclude_none=True) + + +class CustomIrisAlertPayload(BaseModel): + alert_title: str = Field( + ..., + description="Title of the alert", + example="Intrusion Detected", + ) + alert_description: str = Field( + ..., + description="Description of the alert", + example="Intrusion Detected by Firewall", + ) + alert_source: str = Field(..., description="Source of the alert", example="Suricata") + assets: List[CustomIrisAsset] = Field(..., description="List of affected assets") + alert_status_id: int = Field(..., description="Status ID of the alert", example=3) + alert_severity_id: int = Field( + ..., + description="Severity ID of the alert", + example=5, + ) + alert_customer_id: int = Field( + ..., + description="Customer ID related to the alert", + example=1, + ) + alert_source_content: Dict[str, Any] = Field( + ..., + description="Original content from the alert source", + ) + alert_context: CustomIrisAlertContext = Field( + ..., + description="Contextual information about the alert", + ) + alert_iocs: Optional[List[IrisIoc]] = Field( + None, + description="List of IoCs related to the alert", + ) + alert_source_event_time: str = Field( + ..., + description="Timestamp of the alert", + example="2021-01-01T00:00:00.000Z", + ) + + def to_dict(self): + return self.dict(exclude_none=True) + + ########### ! SURICATA ALERTS SCHEMA ! ########### class SuricataSourceModel(BaseModel): alert_signature: str = Field(..., description="Signature of the alert") diff --git a/backend/app/integrations/monitoring_alert/schema/provision.py b/backend/app/integrations/monitoring_alert/schema/provision.py index 6672e728..2a36a5bf 100644 --- a/backend/app/integrations/monitoring_alert/schema/provision.py +++ b/backend/app/integrations/monitoring_alert/schema/provision.py @@ -6,6 +6,7 @@ from fastapi import HTTPException from pydantic import BaseModel from pydantic import Field +from pydantic import root_validator from pydantic import validator @@ -182,3 +183,87 @@ class GraylogAlertProvisionModel(BaseModel): notification_settings: GraylogAlertProvisionNotificationSettings notifications: List[GraylogAlertProvisionNotification] alert: bool + + +class AlertPriority(Enum): + LOW = 1 + NORMAL = 2 + HIGH = 3 + + +class CustomFields(BaseModel): + name: str + value: str + + @validator("name") + def replace_spaces_with_underscores(cls, v): + return v.replace(" ", "_") + + +class CustomMonitoringAlertProvisionModel(BaseModel): + alert_name: str = Field( + ..., + description="The name of the alert to provision.", + example="WAZUH_SYSLOG_LEVEL_ALERT", + ) + alert_description: str = Field( + ..., + description=( + "The description of the alert to provision. This alert monitors the " + "SYSLOG_LEVEL field in the Wazuh logs. When the level is ALERT, it " + "triggers an alert that is created within DFIR-IRIS. Ensure that you " + "have a pipeline rule that sets the SYSLOG_LEVEL field to ALERT when " + "the Wazuh rule level is greater than 11." + ), + example=( + "This alert monitors the SYSLOG_LEVEL field in the Wazuh logs. When " + "the level is ALERT, it triggers an alert that is created within " + "DFIR-IRIS. Ensure that you have a pipeline rule that sets the " + "SYSLOG_LEVEL field to ALERT when the Wazuh rule level is greater than 11." + ), + ) + alert_priority: AlertPriority = Field( + ..., + description="The priority of the alert to provision.", + example=2, + ) + search_query: str = Field( + ..., + description="The search query to use for the alert.", + example="syslog_type:wazuh AND syslog_level:alert", + ) + streams: Optional[List[str]] = Field( + [], + description="The streams to use for the alert.", + example=["5f3e4c3b3f37b70001f3d7b3"], + ) + custom_fields: List[CustomFields] = Field( + ..., + description="The custom fields to use for the alert.", + example=[{"name": "source", "value": "Wazuh"}], + ) + search_within_ms: int = Field( + ..., + description="The time in milliseconds to search within for the alert.", + example=300000, + ) + execute_every_ms: int = Field( + ..., + description="The time in milliseconds to execute the alert search.", + example=300000, + ) + + @root_validator + def check_customer_code(cls, values): + custom_fields = values.get("custom_fields") + if custom_fields is None: + raise HTTPException( + status_code=400, + detail="At least one custom field with name CUSTOMER_CODE is required", + ) + if not any(field.name == "CUSTOMER_CODE" for field in custom_fields): + raise HTTPException( + status_code=400, + detail="At least one custom field with name CUSTOMER_CODE is required", + ) + return values diff --git a/backend/app/integrations/monitoring_alert/services/custom.py b/backend/app/integrations/monitoring_alert/services/custom.py new file mode 100644 index 00000000..c582de65 --- /dev/null +++ b/backend/app/integrations/monitoring_alert/services/custom.py @@ -0,0 +1,215 @@ +from loguru import logger +from sqlalchemy.ext.asyncio import AsyncSession + +from app.connectors.dfir_iris.utils.universal import fetch_and_validate_data +from app.connectors.dfir_iris.utils.universal import initialize_client_and_alert +from app.connectors.wazuh_indexer.utils.universal import create_wazuh_indexer_client +from app.integrations.alert_escalation.schema.general_alert import ( + CreateAlertRequest as AddAlertRequest, +) +from app.integrations.alert_escalation.services.general_alert import ( + add_alert_to_document, +) +from app.integrations.monitoring_alert.schema.monitoring_alert import ( + AlertAnalysisResponse, +) +from app.integrations.monitoring_alert.schema.monitoring_alert import CustomAlertModel +from app.integrations.monitoring_alert.schema.monitoring_alert import ( + CustomIrisAlertContext, +) +from app.integrations.monitoring_alert.schema.monitoring_alert import ( + CustomIrisAlertPayload, +) +from app.integrations.monitoring_alert.schema.monitoring_alert import GraylogPostRequest +from app.utils import get_customer_alert_settings + + +async def fetch_wazuh_indexer_details(alert_id: str, index: str) -> CustomAlertModel: + """ + Fetch the Custom alert details from the Wazuh-Indexer. + + Args: + alert_id (str): The alert ID. + index (str): The index. + + Returns: + CollectAlertsResponse: The response from the Wazuh-Indexer. + """ + logger.info( + f"Fetching Custom alert details for alert_id: {alert_id} and index: {index}", + ) + + es_client = await create_wazuh_indexer_client("Wazuh-Indexer") + response = es_client.get(index=index, id=alert_id) + + return CustomAlertModel(**response) + + +async def fetch_alert_details(alert: GraylogPostRequest) -> CustomAlertModel: + logger.info(f"Analyzing custom alert: {alert.event.alert_id}") + alert_details = await fetch_wazuh_indexer_details(alert_id=alert.event.alert_id, index=alert.event.alert_index) + logger.info(f"Alert details: {alert_details}") + return alert_details + + +async def build_alert_context_payload( + custom_details: dict, + session: AsyncSession, +) -> CustomIrisAlertContext: + """ + Builds the payload for the alert context. + + Args: + alert_details (CreateAlertRequest): The details of the alert. + agent_data (AgentsResponse): The agent data. + session (AsyncSession): The async session. + + Returns: + CustomIrisAlertContext: The built alert context payload. + """ + logger.info(f"Building alert context payload for alert with custom details: {custom_details.event.fields}") + return CustomIrisAlertContext( + customer_iris_id=( + await get_customer_alert_settings( + customer_code=custom_details.event.fields["CUSTOMER_CODE"], + session=session, + ) + ).iris_customer_id, + customer_name=( + await get_customer_alert_settings( + customer_code=custom_details.event.fields["CUSTOMER_CODE"], + session=session, + ) + ).customer_name, + customer_cases_index=( + await get_customer_alert_settings( + customer_code=custom_details.event.fields["CUSTOMER_CODE"], + session=session, + ) + ).iris_index, + alert_id=custom_details.event.alert_id, + alert_name=custom_details.event.message, + **custom_details.event.fields, + ) + + +async def build_alert_payload( + alert_details: CustomIrisAlertContext, + custom_details: dict, + session: AsyncSession, +) -> CustomIrisAlertPayload: + """ + Builds the payload for an alert based on the provided alert details, agent data, IoC payload, and session. + + Args: + alert_details (CustomAlertModel): The details of the alert. + agent_data: The agent data associated with the alert. + ioc_payload (Optional[IrisIoc]): The IoC payload associated with the alert. + session (AsyncSession): The session used for database operations. + + Returns: + CustomIrisAlertPayload: The built alert payload. + """ + logger.info(f"Building alert payload for alert: {alert_details}") + + context_payload = await build_alert_context_payload( + custom_details=custom_details, + session=session, + ) + + logger.info(f"Alert has context: {context_payload}") + return CustomIrisAlertPayload( + alert_title=custom_details.event.message, + alert_description=custom_details.event.message, + alert_source="COPILOT Custom ANALYSIS", + assets=[], + alert_status_id=3, + alert_severity_id=5, + alert_customer_id=( + await get_customer_alert_settings( + customer_code=custom_details.event.fields["CUSTOMER_CODE"], + session=session, + ) + ).iris_customer_id, + alert_source_content=alert_details.to_dict(), + alert_context=context_payload, + alert_source_event_time=custom_details.event.timestamp, + ) + + +async def create_and_update_alert_in_iris( + alert_details: CustomAlertModel, + custom_details: dict, + session: AsyncSession, +) -> int: + """ + Creates the alert, then updates the alert with the asset and IoC if available. + + Args: + alert_details (CustomAlertModel): The details of the alert. + session (AsyncSession): The async session object. + + Returns: + int: The ID of the created alert in IRIS. + """ + logger.info(f"Received custom fields: {custom_details}") + + iris_alert_payload = await build_alert_payload( + alert_details=alert_details, + custom_details=custom_details, + session=session, + ) + + client, alert_client = await initialize_client_and_alert("DFIR-IRIS") + result = await fetch_and_validate_data( + client, + alert_client.add_alert, + iris_alert_payload.to_dict(), + ) + alert_id = result["data"]["alert_id"] + logger.info(f"Successfully created alert {alert_id} in IRIS.") + return alert_id + + +async def analyze_custom_alert( + monitoring_alerts: GraylogPostRequest, + session: AsyncSession, +) -> AlertAnalysisResponse: + """ + Analyze the given custom alerts. These are received straight from Graylog. + + 1. For each alert, extract the metadata from the Wazuh-Indexer. + 2. Build the alert context payload which is based on the custom fields set within the Graylog alert. + + Args: + monitoring_alerts (MonitoringAlerts): The monitoring alert details. + session (AsyncSession): The database session. + + Returns: + AlertAnalysisResponse: The analysis response. + """ + logger.info( + f"Analyzing custom alerts: alert_index: {monitoring_alerts.event.alert_index}, alert_id: {monitoring_alerts.event.alert_id}", + ) + alert_details = await fetch_alert_details(monitoring_alerts) + + iris_alert_id = await create_and_update_alert_in_iris( + alert_details, + custom_details=monitoring_alerts, + session=session, + ) + es_client = await create_wazuh_indexer_client("Wazuh-Indexer") + await add_alert_to_document( + es_client=es_client, + alert=AddAlertRequest( + alert_id=monitoring_alerts.event.alert_id, + index_name=monitoring_alerts.event.alert_index, + ), + soc_alert_id=iris_alert_id, + session=session, + ) + + return AlertAnalysisResponse( + success=True, + message="Custom alerts analyzed successfully", + ) diff --git a/backend/app/integrations/monitoring_alert/services/provision.py b/backend/app/integrations/monitoring_alert/services/provision.py index 93014f1d..b8b6fc9b 100644 --- a/backend/app/integrations/monitoring_alert/services/provision.py +++ b/backend/app/integrations/monitoring_alert/services/provision.py @@ -11,6 +11,9 @@ from app.connectors.graylog.services.collector import get_url_whitelist_entries from app.connectors.graylog.utils.universal import send_post_request from app.connectors.graylog.utils.universal import send_put_request +from app.integrations.monitoring_alert.schema.provision import ( + CustomMonitoringAlertProvisionModel, +) from app.integrations.monitoring_alert.schema.provision import ( GraylogAlertProvisionConfig, ) @@ -534,7 +537,7 @@ async def provision_office365_exchange_online_alert( notification_id = await get_notification_id("SEND TO COPILOT") await provision_alert_definition( GraylogAlertProvisionModel( - title="OFFICE365 EXCHANGE ONLINE ALERT", + title="OFFICE365 EXCHANGE ONLINE", description="Alert on Office365 Exchange Online alerts", priority=2, config=GraylogAlertProvisionConfig( @@ -653,7 +656,7 @@ async def provision_office365_threat_intel_alert( notification_id = await get_notification_id("SEND TO COPILOT") await provision_alert_definition( GraylogAlertProvisionModel( - title="OFFICE365 THREAT INTEL ALERT", + title="OFFICE365 THREAT INTEL", description="Alert on Office365 Threat Intel alerts", priority=2, config=GraylogAlertProvisionConfig( @@ -723,3 +726,101 @@ async def provision_office365_threat_intel_alert( success=True, message="Office365 Threat Intel monitoring alerts provisioned successfully", ) + + +async def provision_custom_alert(request: CustomMonitoringAlertProvisionModel) -> ProvisionWazuhMonitoringAlertResponse: + """ + Provisions custom monitoring alerts. + + Returns: + ProvisionWazuhMonitoringAlertResponse: The response indicating the success of provisioning the monitoring alerts. + """ + # + logger.info( + f"Invoking provision_custom_alert with request: {request.dict()}", + ) + notification_exists = await check_if_event_notification_exists("SEND TO COPILOT - CUSTOM") + if not notification_exists: + # ! Unfortunately Graylog does not support disabling SSL verification when sending webhooks + # ! Therefore, we need to send to API port of Copilot over HTTP + url_whitelisted = await check_if_url_whitelist_entry_exists( + f"http://{os.getenv('ALERT_FORWARDING_IP')}:5000/api/monitoring_alert/custom", + ) + if not url_whitelisted: + logger.info("Provisioning URL Whitelist") + whitelisted_urls = await build_url_whitelisted_entries( + whitelist_url_model=GraylogUrlWhitelistEntryConfig( + id=await generate_random_id(), + value=f"http://{os.getenv('ALERT_FORWARDING_IP')}:5000/api/monitoring_alert/custom", + title="SEND TO COPILOT - CUSTOM", + type="literal", + ), + ) + await provision_webhook_url_whitelist(whitelisted_urls) + + logger.info("Provisioning SEND TO COPILOT - CUSTOM Webhook") + notification_id = await provision_webhook( + GraylogAlertWebhookNotificationModel( + title="SEND TO COPILOT - CUSTOM", + description="Send alert to Copilot for custom alert", + config={ + "url": f"http://{os.getenv('ALERT_FORWARDING_IP')}:5000/api/monitoring_alert/custom", + "type": "http-notification-v1", + }, + ), + ) + logger.info(f"SEND TO COPILOT - CUSTOM Webhook provisioned with id: {notification_id}") + notification_id = await get_notification_id("SEND TO COPILOT - CUSTOM") + await provision_alert_definition( + GraylogAlertProvisionModel( + title=request.alert_name, + description=request.alert_description, + priority=request.alert_priority.value, + config=GraylogAlertProvisionConfig( + type="aggregation-v1", + query=f"{request.search_query}", + query_parameters=[], + streams=request.streams, + group_by=[], + series=[], + conditions={ + "expression": None, + }, + search_within_ms=await convert_seconds_to_milliseconds( + request.search_within_ms, + ), + execute_every_ms=await convert_seconds_to_milliseconds( + request.execute_every_ms, + ), + ), + field_spec={ + custom_field.name: GraylogAlertProvisionFieldSpecItem( + data_type="string", + providers=[ + GraylogAlertProvisionProvider( + type="template-v1", + template=f"${{source.{custom_field.value}}}", + require_values=True, + ), + ], + ) + for custom_field in request.custom_fields + }, + key_spec=[], + notification_settings=GraylogAlertProvisionNotificationSettings( + grace_period_ms=0, + backlog_size=None, + ), + notifications=[ + GraylogAlertProvisionNotification( + notification_id=notification_id, + ), + ], + alert=True, + ), + ) + + return ProvisionWazuhMonitoringAlertResponse( + success=True, + message="Custom monitoring alerts provisioned successfully", + ) diff --git a/backend/app/routers/grafana.py b/backend/app/routers/grafana.py index f9044190..36f20152 100644 --- a/backend/app/routers/grafana.py +++ b/backend/app/routers/grafana.py @@ -1,9 +1,11 @@ from fastapi import APIRouter from app.connectors.grafana.routes.dashboards import grafana_dashboards_router +from app.connectors.grafana.routes.reporting import grafana_reporting_router # Instantiate the APIRouter router = APIRouter() # Include the Shuffle related routes router.include_router(grafana_dashboards_router, prefix="/grafana", tags=["Grafana"]) +router.include_router(grafana_reporting_router, prefix="/reporting", tags=["Grafana-Reporting"]) diff --git a/backend/app/routers/huntress.py b/backend/app/routers/huntress.py new file mode 100644 index 00000000..9d1ee33b --- /dev/null +++ b/backend/app/routers/huntress.py @@ -0,0 +1,23 @@ +from fastapi import APIRouter + +from app.integrations.huntress.routes.huntress import integration_huntress_router +from app.integrations.huntress.routes.provision import ( + integration_huntress_provision_scheduler_router, +) + +# Instantiate the APIRouter +router = APIRouter() + +# Include the Huntress APIRouter +router.include_router( + integration_huntress_router, + prefix="/huntress", + tags=["huntress"], +) + +# Include the Huntress Provision APIRouter +router.include_router( + integration_huntress_provision_scheduler_router, + prefix="/huntress", + tags=["huntress"], +) diff --git a/backend/app/schedulers/scheduler.py b/backend/app/schedulers/scheduler.py index 6db9ed42..19931988 100644 --- a/backend/app/schedulers/scheduler.py +++ b/backend/app/schedulers/scheduler.py @@ -7,6 +7,7 @@ from app.schedulers.models.scheduler import CreateSchedulerRequest from app.schedulers.models.scheduler import JobMetadata from app.schedulers.services.agent_sync import agent_sync +from app.schedulers.services.invoke_huntress import invoke_huntress_integration_collect from app.schedulers.services.invoke_mimecast import invoke_mimecast_integration from app.schedulers.services.invoke_mimecast import invoke_mimecast_integration_ttp from app.schedulers.services.invoke_sap_siem import invoke_sap_siem_integration_collect @@ -103,6 +104,7 @@ def get_function_by_name(function_name: str): "invoke_sap_siem_integration_collection": invoke_sap_siem_integration_collect, "invoke_sap_siem_integration_suspicious_logins_analysis": invoke_sap_siem_integration_suspicious_logins_analysis, "invoke_sap_siem_integration_multiple_logins_same_ip_analysis": invoke_sap_siem_integration_multiple_logins_same_ip_analysis, + "invoke_huntress_integration_collection": invoke_huntress_integration_collect, # Add other function mappings here } return function_map.get( diff --git a/backend/app/schedulers/services/invoke_huntress.py b/backend/app/schedulers/services/invoke_huntress.py new file mode 100644 index 00000000..5dfcb3f4 --- /dev/null +++ b/backend/app/schedulers/services/invoke_huntress.py @@ -0,0 +1,54 @@ +from datetime import datetime + +from dotenv import load_dotenv +from loguru import logger +from sqlalchemy import select + +from app.db.db_session import get_db_session +from app.db.db_session import get_sync_db_session +from app.integrations.huntress.routes.huntress import collect_huntress_route +from app.integrations.huntress.schema.huntress import InvokeHuntressRequest +from app.integrations.huntress.schema.huntress import InvokeHuntressResponse +from app.integrations.models.customer_integration_settings import CustomerIntegrations +from app.schedulers.models.scheduler import JobMetadata +from app.schedulers.utils.universal import get_scheduled_job_metadata + +load_dotenv() + + +async def invoke_huntress_integration_collect() -> InvokeHuntressResponse: + """ + Invokes the Huntress integration collection. + """ + logger.info("Invoking Huntress integration collection.") + customer_codes = [] + async with get_db_session() as session: + stmt = select(CustomerIntegrations).where( + CustomerIntegrations.integration_service_name == "Huntress", + ) + result = await session.execute(stmt) + customer_codes = [row.customer_code for row in result.scalars()] + logger.info(f"customer_codes: {customer_codes}") + for customer_code in customer_codes: + await collect_huntress_route( + InvokeHuntressRequest( + customer_code=customer_code, + integration_name="Huntress", + time_range=f"{(await get_scheduled_job_metadata('invoke_huntress_integration_collection')).time_interval}m", + ), + session, + ) + # Close the session + await session.close() + with get_sync_db_session() as session: + # Synchronous ORM operations + job_metadata = session.query(JobMetadata).filter_by(job_id="invoke_huntress_integration_collection").one_or_none() + if job_metadata: + job_metadata.last_success = datetime.utcnow() + session.add(job_metadata) + session.commit() + else: + # Handle the case where job_metadata does not exist + print("JobMetadata for 'invoke_huntress_integration_collection' not found.") + + return InvokeHuntressResponse(success=True, message="Huntress integration invoked.") diff --git a/backend/copilot.py b/backend/copilot.py index 6b5073b6..8f92a9d8 100644 --- a/backend/copilot.py +++ b/backend/copilot.py @@ -37,6 +37,7 @@ from app.routers import grafana from app.routers import graylog from app.routers import healthcheck +from app.routers import huntress from app.routers import influxdb from app.routers import integrations from app.routers import logs @@ -121,6 +122,7 @@ api_router.include_router(sap_siem.router) api_router.include_router(stack_provisioning.router) api_router.include_router(active_response.router) +api_router.include_router(huntress.router) # Include the APIRouter in the FastAPI app app.include_router(api_router) diff --git a/frontend/package.json b/frontend/package.json index 8c4f05ae..1721b565 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -39,8 +39,8 @@ "@fontsource/lexend": "^5.0.18", "@fontsource/public-sans": "^5.0.16", "@popperjs/core": "^2.11.8", - "@vueuse/components": "^10.8.0", - "@vueuse/core": "^10.8.0", + "@vueuse/components": "^10.9.0", + "@vueuse/core": "^10.9.0", "apexcharts": "^3.46.0", "bytes": "^3.1.2", "colord": "^2.9.3", @@ -53,13 +53,13 @@ "lodash": "^4.17.21", "markdown-it-highlightjs": "^4.0.1", "mitt": "^3.0.1", - "naive-ui": "^2.38.0", + "naive-ui": "^2.38.1", "password-validator": "^5.3.0", "pinia": "^2.1.7", "pinia-plugin-persistedstate": "^3.2.1", "secure-ls": "^1.2.6", "validator": "^13.11.0", - "vue": "^3.4.19", + "vue": "^3.4.20", "vue-advanced-cropper": "^2.8.8", "vue-highlight-words": "^3.0.1", "vue-i18n": "^9.9.1", @@ -91,7 +91,7 @@ "@vue/tsconfig": "^0.5.1", "autoprefixer": "^10.4.17", "cypress": "^13.6.6", - "eslint": "^8.56.0", + "eslint": "^8.57.0", "eslint-plugin-cypress": "^2.15.1", "eslint-plugin-vue": "^9.22.0", "fs-extra": "^11.2.0", @@ -111,7 +111,7 @@ "typescript": "~5.3.3", "unplugin-vue-components": "^0.26.0", "vite": "^5.1.4", - "vite-bundle-analyzer": "^0.8.0", + "vite-bundle-analyzer": "^0.8.1", "vite-bundle-visualizer": "^1.0.1", "vite-svg-loader": "^5.1.0", "vitest": "^1.3.1", diff --git a/frontend/src/api/monitoringAlerts.ts b/frontend/src/api/monitoringAlerts.ts index aae31ca7..9e34b64f 100644 --- a/frontend/src/api/monitoringAlerts.ts +++ b/frontend/src/api/monitoringAlerts.ts @@ -7,6 +7,26 @@ export interface ProvisionsMonitoringAlertParams { executeEvery: number } +export enum CustomProvisionPriority { + "LOW" = 1, + "MEDIUM" = 2, + "HIGH" = 3 +} + +export interface CustomProvisionPayload { + alert_name: string + alert_description: string + alert_priority: CustomProvisionPriority + search_query: string + streams: { [key: string]: any } + custom_fields: { + name: string + value: string + }[] + search_within_ms: number + execute_every_ms: number +} + export default { getAvailableMonitoringAlerts() { return HttpClient.get( @@ -19,5 +39,8 @@ export default { execute_every: params.executeEvery, alert_name: alertName }) + }, + customProvision(payload: CustomProvisionPayload) { + return HttpClient.post(`/monitoring_alert/provision/custom`, payload) } } diff --git a/frontend/src/components/activeResponse/ActiveResponseInvokeForm.vue b/frontend/src/components/activeResponse/ActiveResponseInvokeForm.vue index 6ccdfafd..f835012d 100644 --- a/frontend/src/components/activeResponse/ActiveResponseInvokeForm.vue +++ b/frontend/src/components/activeResponse/ActiveResponseInvokeForm.vue @@ -119,6 +119,13 @@ function getClearForm(): InvokeForm { } function reset() { + if (!loading.value) { + resetForm() + formRef.value?.restoreValidation() + } +} + +function resetForm() { form.value = getClearForm() } @@ -154,7 +161,7 @@ function submit() { if (res.data.success) { message.success(res.data?.message || "Active Response invoked successfully") emit("submitted") - reset() + resetForm() } else { message.warning(res.data?.message || "An error occurred. Please try again later.") } diff --git a/frontend/src/components/customers/CustomerMetaForm.vue b/frontend/src/components/customers/CustomerMetaForm.vue index f46c3f9d..c012591f 100644 --- a/frontend/src/components/customers/CustomerMetaForm.vue +++ b/frontend/src/components/customers/CustomerMetaForm.vue @@ -254,6 +254,13 @@ function getPayload(meta: CustomerMetaExt): CustomerMeta { } function reset() { + if (!loading.value) { + resetForm() + formRef.value?.restoreValidation() + } +} + +function resetForm() { form.value = getClearForm() } @@ -267,7 +274,7 @@ function submit() { if (res.data.success) { emit("submitted", res.data.customer_meta) if (resetOnSubmit.value) { - reset() + resetForm() } } else { message.warning(res.data?.message || "An error occurred. Please try again later.") diff --git a/frontend/src/components/customers/provision/CustomerDefaultSettingsButton.vue b/frontend/src/components/customers/provision/CustomerDefaultSettingsButton.vue index 87f66d0e..d45d496b 100644 --- a/frontend/src/components/customers/provision/CustomerDefaultSettingsButton.vue +++ b/frontend/src/components/customers/provision/CustomerDefaultSettingsButton.vue @@ -15,7 +15,7 @@ :bordered="false" segmented > - + @@ -23,7 +23,7 @@ import { ref, watch } from "vue" import { NButton, NModal } from "naive-ui" import Icon from "@/components/common/Icon.vue" -import CustomerDefaultSettingForm from "./CustomerDefaultSettingForm.vue" +import CustomerDefaultSettingsForm from "./CustomerDefaultSettingsForm.vue" const SettingsIcon = "carbon:settings-edit" const settingsFormCTX = ref<{ load: () => void } | null>(null) diff --git a/frontend/src/components/customers/provision/CustomerDefaultSettingForm.vue b/frontend/src/components/customers/provision/CustomerDefaultSettingsForm.vue similarity index 96% rename from frontend/src/components/customers/provision/CustomerDefaultSettingForm.vue rename to frontend/src/components/customers/provision/CustomerDefaultSettingsForm.vue index a34a1dd1..391b963a 100644 --- a/frontend/src/components/customers/provision/CustomerDefaultSettingForm.vue +++ b/frontend/src/components/customers/provision/CustomerDefaultSettingsForm.vue @@ -149,10 +149,15 @@ function getClearForm(settings?: Omit function reset() { if (!loading.value) { - form.value = getClearForm() + resetForm() + formRef.value?.restoreValidation() } } +function resetForm() { + form.value = getClearForm() +} + function submit() { submittingDefaultSettings.value = true @@ -169,6 +174,7 @@ function submit() { .then(res => { if (res.data.success) { isNew.value = false + message.success(res.data?.message || "Customer Provisioning Default Settings updated successfully") } else { message.warning(res.data?.message || "An error occurred. Please try again later.") } diff --git a/frontend/src/components/graylog/MonitoringAlerts/CustomAlertButton.vue b/frontend/src/components/graylog/MonitoringAlerts/CustomAlertButton.vue new file mode 100644 index 00000000..a181e09b --- /dev/null +++ b/frontend/src/components/graylog/MonitoringAlerts/CustomAlertButton.vue @@ -0,0 +1,39 @@ + + + diff --git a/frontend/src/components/graylog/MonitoringAlerts/CustomAlertForm.vue b/frontend/src/components/graylog/MonitoringAlerts/CustomAlertForm.vue new file mode 100644 index 00000000..bcee1da4 --- /dev/null +++ b/frontend/src/components/graylog/MonitoringAlerts/CustomAlertForm.vue @@ -0,0 +1,437 @@ + + + + + diff --git a/frontend/src/components/graylog/MonitoringAlerts/List.vue b/frontend/src/components/graylog/MonitoringAlerts/List.vue index b204a206..69c27a25 100644 --- a/frontend/src/components/graylog/MonitoringAlerts/List.vue +++ b/frontend/src/components/graylog/MonitoringAlerts/List.vue @@ -23,6 +23,7 @@ +
{{ asset.asset_name }}
-