diff --git a/README.md b/README.md index 89820c3e..24b7e931 100644 --- a/README.md +++ b/README.md @@ -63,7 +63,7 @@ systemctl restart docker ```bash # Clone the CoPilot repository -wget https://raw.githubusercontent.com/socfortress/CoPilot/v0.0.2/docker-compose.yml +wget https://raw.githubusercontent.com/socfortress/CoPilot/v0.0.3/docker-compose.yml # Edit the docker-compose.yml file to set the server name and/or the services you want to use diff --git a/backend/app/connectors/dfir_iris/utils/universal.py b/backend/app/connectors/dfir_iris/utils/universal.py index 9d4167aa..a290bb9b 100644 --- a/backend/app/connectors/dfir_iris/utils/universal.py +++ b/backend/app/connectors/dfir_iris/utils/universal.py @@ -109,6 +109,7 @@ async def fetch_and_parse_data( session: ClientSession, action: Callable, *args, + **kwargs: Optional[Any], ) -> Dict[str, Union[bool, Optional[Dict]]]: """ Fetches and parses data from DFIR-IRIS using a specified action. @@ -122,8 +123,8 @@ async def fetch_and_parse_data( dict: A dictionary containing the success status and either the fetched data or None if the operation was unsuccessful. """ try: - logger.info(f"Executing {action.__name__}... on args: {args}") - status = action(*args) + logger.info(f"Executing {action.__name__}... on args: {args} and kwargs: {kwargs}") + status = action(*args, **kwargs) assert_api_resp(status, soft_fail=False) data = get_data_from_resp(status) logger.info(f"Successfully executed {action.__name__}") @@ -223,7 +224,7 @@ def handle_error(error_message: str, status_code: int = 500): raise HTTPException(status_code=status_code, detail=error_message) -async def fetch_and_validate_data(client: Any, func: Callable, *args: Any) -> Dict: +async def fetch_and_validate_data(client: Any, func: Callable, *args: Any, **kwargs: Optional[Any]) -> Dict: """ Fetches and validates data using the provided client, function, and arguments. @@ -238,7 +239,7 @@ async def fetch_and_validate_data(client: Any, func: Callable, *args: Any) -> Di Raises: Exception: If the data fetching fails. """ - result = await fetch_and_parse_data(client, func, *args) + result = await fetch_and_parse_data(client, func, *args, **kwargs) if not result["success"]: handle_error(f"Failed to fetch data: {result['message']}") return result diff --git a/backend/app/connectors/grafana/dashboards/SapSiem/users_auth.json b/backend/app/connectors/grafana/dashboards/SapSiem/users_auth.json new file mode 100644 index 00000000..a42bb6c4 --- /dev/null +++ b/backend/app/connectors/grafana/dashboards/SapSiem/users_auth.json @@ -0,0 +1,2129 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "target": { + "limit": 100, + "matchAny": false, + "tags": [], + "type": "dashboard" + }, + "type": "dashboard" + } + ] + }, + "editable": false, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [ + { + "asDropdown": true, + "icon": "external link", + "includeVars": true, + "keepTime": true, + "tags": ["SAP"], + "targetBlank": true, + "title": "", + "type": "dashboards" + } + ], + "liveNow": false, + "panels": [ + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "super-light-red", + "value": null + } + ] + }, + "unit": "locale" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 4, + "x": 0, + "y": 0 + }, + "id": 43, + "links": [], + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "horizontal", + "reduceOptions": { + "calcs": ["sum"], + "fields": "", + "values": false + }, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "bucketAggs": [ + { + "field": "event_timestamp", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "query": "_exists_:errCode AND logSource:$log_source", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "AUTH ERRORS", + "type": "stat" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + } + }, + "mappings": [] + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 4, + "x": 4, + "y": 0 + }, + "id": 53, + "options": { + "legend": { + "displayMode": "table", + "placement": "right", + "showLegend": false + }, + "pieType": "donut", + "reduceOptions": { + "calcs": ["sum"], + "fields": "", + "values": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "8.5.1", + "targets": [ + { + "bucketAggs": [ + { + "fake": true, + "field": "errDetails", + "id": "4", + "settings": { + "min_doc_count": 1, + "order": "desc", + "orderBy": "_count", + "size": "10" + }, + "type": "terms" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "query": "_exists_:errCode AND logSource:$log_source", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "EVENTS BY ERROR (Top 10)", + "transformations": [], + "type": "piechart" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "red", + "value": null + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Count" + }, + "properties": [ + { + "id": "displayName", + "value": "EVENTS" + }, + { + "id": "unit", + "value": "short" + }, + { + "id": "decimals", + "value": -1 + }, + { + "id": "custom.align" + }, + { + "id": "thresholds", + "value": { + "mode": "absolute", + "steps": [ + { + "color": "red", + "value": null + } + ] + } + } + ] + } + ] + }, + "gridPos": { + "h": 7, + "w": 6, + "x": 8, + "y": 0 + }, + "id": 54, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": ["sum"], + "show": false + }, + "showHeader": true + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "bucketAggs": [ + { + "fake": true, + "field": "errDetails", + "id": "4", + "settings": { + "min_doc_count": 1, + "order": "desc", + "orderBy": "_count", + "size": "0" + }, + "type": "terms" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "query": "logSource:$log_source", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "EVENTS BY ERROR (Top 10)", + "transformations": [ + { + "id": "merge", + "options": { + "reducers": [] + } + }, + { + "id": "organize", + "options": { + "excludeByName": {}, + "indexByName": {}, + "renameByName": { + "Count": "EVENTS", + "data_errDetails": "ERROR", + "data_office_365_Operation": "OPERATION" + } + } + } + ], + "type": "table" + }, + { + "circleMaxSize": 30, + "circleMinSize": 2, + "colors": ["rgba(245, 54, 54, 0.9)", "rgba(237, 129, 40, 0.89)", "rgba(50, 172, 45, 0.97)"], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "decimals": 0, + "esMetric": "Count", + "gridPos": { + "h": 7, + "w": 10, + "x": 14, + "y": 0 + }, + "hideEmpty": false, + "hideZero": false, + "id": 67, + "initialZoom": 1, + "locationData": "countries", + "mapCenter": "(0°, 0°)", + "mapCenterLatitude": 0, + "mapCenterLongitude": 0, + "maxDataPoints": 1, + "mouseWheelZoom": false, + "showLegend": true, + "stickyLabels": false, + "tableQueryOptions": { + "geohashField": "geohash", + "latitudeField": "latitude", + "longitudeField": "longitude", + "metricField": "metric", + "queryType": "geohash" + }, + "targets": [ + { + "bucketAggs": [ + { + "fake": true, + "field": "ip_country_code", + "id": "3", + "settings": { + "min_doc_count": 1, + "order": "desc", + "orderBy": "_term", + "size": "0" + }, + "type": "terms" + }, + { + "field": "timestamp", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "query": "NOT errCode:0 AND logSource:$log_source", + "refId": "A", + "timeField": "timestamp" + } + ], + "thresholds": "0,10", + "title": "FAILED AUTH EVENTS BY COUNTRY", + "type": "grafana-worldmap-panel", + "unitPlural": "", + "unitSingle": "", + "valueName": "total" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "blue", + "value": null + } + ] + }, + "unit": "locale" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 4, + "x": 0, + "y": 7 + }, + "id": 61, + "links": [], + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "reduceOptions": { + "calcs": ["sum"], + "fields": "", + "values": false + }, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "bucketAggs": [ + { + "field": "timestamp", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "query": "logSource:$log_source", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "TOTAL EVENTS", + "type": "stat" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "bars", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "normal" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 13, + "w": 20, + "x": 4, + "y": 7 + }, + "id": 66, + "options": { + "legend": { + "calcs": [], + "displayMode": "table", + "placement": "right", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "alias": "", + "bucketAggs": [ + { + "field": "params_loginID", + "id": "3", + "settings": { + "min_doc_count": "1", + "order": "desc", + "orderBy": "_count", + "size": "10" + }, + "type": "terms" + }, + { + "field": "timestamp", + "id": "2", + "settings": { + "interval": "auto" + }, + "type": "date_histogram" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "metrics": [ + { + "id": "1", + "type": "count" + } + ], + "query": "logSource:$log_source", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "TOP 10 USERS - HISTOGRAM", + "transparent": true, + "type": "timeseries" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "blue", + "value": null + } + ] + }, + "unit": "locale" + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 4, + "x": 0, + "y": 14 + }, + "id": 18, + "links": [], + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "reduceOptions": { + "calcs": ["sum"], + "fields": "", + "values": false + }, + "text": {}, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "bucketAggs": [ + { + "$$hashKey": "object:331", + "field": "timestamp", + "id": "2", + "settings": { + "interval": "365d", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "metrics": [ + { + "field": "params_loginID", + "id": "1", + "type": "cardinality" + } + ], + "query": "logSource:$log_source", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "SAP - TOTAL ACCOUNTS", + "type": "stat" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 12, + "w": 9, + "x": 0, + "y": 20 + }, + "id": 57, + "options": { + "displayMode": "gradient", + "maxVizHeight": 300, + "minVizHeight": 10, + "minVizWidth": 0, + "namePlacement": "auto", + "orientation": "horizontal", + "reduceOptions": { + "calcs": ["sum"], + "fields": "", + "values": false + }, + "showUnfilled": true, + "sizing": "auto", + "valueMode": "color" + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "alias": "", + "bucketAggs": [ + { + "field": "userAgent_raw", + "id": "2", + "settings": { + "min_doc_count": "1", + "order": "desc", + "orderBy": "_count", + "size": "15" + }, + "type": "terms" + }, + { + "field": "timestamp", + "id": "3", + "settings": { + "interval": "auto", + "min_doc_count": "1", + "timeZone": "utc", + "trimEdges": "0" + }, + "type": "date_histogram" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "metrics": [ + { + "id": "1", + "type": "count" + } + ], + "query": "logSource:$log_source", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "USER AGENTS", + "type": "bargauge" + }, + { + "circleMaxSize": 30, + "circleMinSize": 2, + "colors": ["rgba(245, 54, 54, 0.9)", "rgba(237, 129, 40, 0.89)", "rgba(50, 172, 45, 0.97)"], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "decimals": 0, + "esMetric": "Count", + "gridPos": { + "h": 12, + "w": 7, + "x": 9, + "y": 20 + }, + "hideEmpty": false, + "hideZero": false, + "id": 50, + "initialZoom": 1, + "locationData": "countries", + "mapCenter": "(0°, 0°)", + "mapCenterLatitude": 0, + "mapCenterLongitude": 0, + "maxDataPoints": 1, + "mouseWheelZoom": false, + "showLegend": true, + "stickyLabels": false, + "tableQueryOptions": { + "geohashField": "geohash", + "latitudeField": "latitude", + "longitudeField": "longitude", + "metricField": "metric", + "queryType": "geohash" + }, + "targets": [ + { + "bucketAggs": [ + { + "fake": true, + "field": "ip_country_code", + "id": "3", + "settings": { + "min_doc_count": 1, + "order": "desc", + "orderBy": "_term", + "size": "0" + }, + "type": "terms" + }, + { + "field": "timestamp", + "id": "2", + "settings": { + "interval": "auto", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "query": "logSource:$log_source", + "refId": "A", + "timeField": "timestamp" + } + ], + "thresholds": "0,10", + "title": "ALL AUTH EVENTS BY COUNTRY", + "type": "grafana-worldmap-panel", + "unitPlural": "", + "unitSingle": "", + "valueName": "total" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "orange", + "value": null + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Count" + }, + "properties": [ + { + "id": "displayName", + "value": "EVENTS" + }, + { + "id": "unit", + "value": "short" + }, + { + "id": "decimals", + "value": -1 + }, + { + "id": "custom.align" + }, + { + "id": "custom.cellOptions", + "value": { + "type": "color-text" + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "AD ACCOUNT" + }, + "properties": [ + { + "id": "custom.width", + "value": 219 + }, + { + "id": "custom.cellOptions", + "value": { + "type": "color-text" + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "data_office_365_LogonError" + }, + "properties": [ + { + "id": "custom.width", + "value": 326 + }, + { + "id": "custom.cellOptions", + "value": { + "type": "color-text" + } + }, + { + "id": "links", + "value": [ + { + "targetBlank": true, + "title": "MICROSOFT LOGON ERROR", + "url": "https://docs.microsoft.com/en-us/azure/active-directory/develop/reference-aadsts-error-codes" + } + ] + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "data_office_365_ActorIpAddress_country_code" + }, + "properties": [ + { + "id": "custom.cellOptions", + "value": { + "type": "color-text" + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "COUNTRY" + }, + "properties": [ + { + "id": "custom.width", + "value": 86 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "ERROR" + }, + "properties": [ + { + "id": "custom.width", + "value": 399 + } + ] + } + ] + }, + "gridPos": { + "h": 12, + "w": 8, + "x": 16, + "y": 20 + }, + "id": 58, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "fields": "", + "reducer": ["sum"], + "show": false + }, + "showHeader": true, + "sortBy": [] + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "bucketAggs": [ + { + "fake": true, + "field": "params_loginID", + "id": "4", + "settings": { + "min_doc_count": 1, + "order": "desc", + "orderBy": "_count", + "size": "0" + }, + "type": "terms" + }, + { + "field": "errDetails", + "id": "5", + "settings": { + "min_doc_count": "1", + "order": "desc", + "orderBy": "_term", + "size": "10" + }, + "type": "terms" + }, + { + "field": "ip_country_code", + "id": "6", + "settings": { + "min_doc_count": "1", + "order": "desc", + "orderBy": "_term", + "size": "10" + }, + "type": "terms" + } + ], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "metrics": [ + { + "field": "select field", + "id": "1", + "type": "count" + } + ], + "query": "_exists_:errCode AND logSource:$log_source", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "FAILED AUTHS BY ACCOUNT AND ERROR", + "transformations": [ + { + "id": "merge", + "options": { + "reducers": [] + } + }, + { + "id": "organize", + "options": { + "excludeByName": {}, + "indexByName": { + "Count": 3, + "data_errDetails": 2, + "data_ip_country_code": 1, + "data_userKeyDetails_name": 0 + }, + "renameByName": { + "Count": "EVENTS", + "data_errDetails": "ERROR", + "data_ip_country_code": "COUNTRY", + "data_office_365_ActorIpAddress_country_code": "COUNTRY", + "data_office_365_LogonError": "ERROR", + "data_office_365_Operation": "WORKLOAD", + "data_office_365_UserId": "AD ACCOUNT", + "data_userKeyDetails_name": "ACCT" + } + } + } + ], + "type": "table" + }, + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "custom": { + "align": "auto", + "cellOptions": { + "type": "auto" + }, + "filterable": true, + "inspect": false + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "rule_level" + }, + "properties": [ + { + "id": "displayName", + "value": "RULE LEVEL" + }, + { + "id": "unit", + "value": "short" + }, + { + "id": "decimals", + "value": -1 + }, + { + "id": "custom.cellOptions", + "value": { + "mode": "gradient", + "type": "color-background" + } + }, + { + "id": "custom.align" + }, + { + "id": "thresholds", + "value": { + "mode": "absolute", + "steps": [ + { + "color": "#37872D", + "value": null + }, + { + "color": "red", + "value": 7 + }, + { + "color": "rgba(245, 54, 54, 0.9)", + "value": 12 + } + ] + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Date/Time" + }, + "properties": [ + { + "id": "custom.width", + "value": 242 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "MITRE TACTIC" + }, + "properties": [ + { + "id": "custom.width", + "value": 332 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "IP ADDRESS" + }, + "properties": [ + { + "id": "custom.width", + "value": 163 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "MITRE TECHNIQUE" + }, + "properties": [ + { + "id": "custom.width", + "value": 312 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "EVENT ID" + }, + "properties": [ + { + "id": "links", + "value": [ + { + "targetBlank": true, + "title": "VIEW EVENT DETAILS", + "url": "https://grafana.detacon.cloud/explore?left=%7B%22datasource%22:%22SAP_SIEM%22,%22queries%22:%5B%7B%22refId%22:%22A%22,%22query%22:%22_id:${__value.text}%22,%22alias%22:%22%22,%22metrics%22:%5B%7B%22id%22:%221%22,%22type%22:%22logs%22,%22settings%22:%7B%22limit%22:%22500%22%7D%7D%5D,%22bucketAggs%22:%5B%5D,%22timeField%22:%22timestamp%22%7D%5D,%22range%22:%7B%22from%22:%22now-6h%22,%22to%22:%22now%22%7D%7D" + } + ] + }, + { + "id": "custom.width", + "value": 336 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "RESULT" + }, + "properties": [ + { + "id": "custom.width", + "value": 118 + }, + { + "id": "mappings", + "value": [ + { + "options": { + "8": { + "color": "red", + "index": 2 + }, + "Failed": { + "color": "orange", + "index": 1 + }, + "Success": { + "color": "green", + "index": 0 + } + }, + "type": "value" + } + ] + }, + { + "id": "custom.cellOptions", + "value": { + "type": "color-text" + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "SRC IP" + }, + "properties": [ + { + "id": "custom.width", + "value": 265 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "ACCOUNT" + }, + "properties": [ + { + "id": "custom.width", + "value": 343 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "EVENT TIME" + }, + "properties": [ + { + "id": "custom.width", + "value": 195 + } + ] + } + ] + }, + "gridPos": { + "h": 16, + "w": 24, + "x": 0, + "y": 32 + }, + "id": 27, + "options": { + "cellHeight": "sm", + "footer": { + "countRows": false, + "enablePagination": true, + "fields": "", + "reducer": ["sum"], + "show": false + }, + "showHeader": true, + "sortBy": [ + { + "desc": true, + "displayName": "RULE LEVEL" + } + ] + }, + "pluginVersion": "10.2.3", + "targets": [ + { + "bucketAggs": [], + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "metrics": [ + { + "id": "1", + "settings": { + "size": "250" + }, + "type": "raw_data" + } + ], + "query": "logSource:$log_source", + "refId": "A", + "timeField": "timestamp" + } + ], + "title": "EVENTS", + "transformations": [ + { + "id": "organize", + "options": { + "excludeByName": { + "@metadata_beat": true, + "@metadata_type": true, + "@metadata_version": true, + "IMPHASH": true, + "MD5": true, + "SHA1": true, + "SHA256": true, + "_id": false, + "_index": true, + "_type": true, + "agent_ephemeral_id": true, + "agent_hostname": true, + "agent_id": true, + "agent_ip": true, + "agent_ip_city_name": true, + "agent_ip_country_code": true, + "agent_ip_geolocation": true, + "agent_labels_customer": true, + "agent_name": true, + "agent_type": true, + "agent_version": true, + "beats_type": true, + "cluster_name": true, + "cluster_node": true, + "collector_node_id": true, + "data_XffFirstIp": true, + "data_XffFirstIp_city_name": true, + "data_XffFirstIp_country_code": true, + "data_XffFirstIp_geolocation": true, + "data_alert_action": true, + "data_alert_category": true, + "data_alert_gid": true, + "data_alert_rev": true, + "data_alert_severity": true, + "data_alert_signature": true, + "data_alert_signature_id": true, + "data_apikey": true, + "data_app_proto": true, + "data_audit_auid": true, + "data_audit_command": true, + "data_audit_euid": true, + "data_audit_exe": true, + "data_audit_gid": true, + "data_audit_id": true, + "data_audit_pid": true, + "data_audit_res": true, + "data_audit_session": true, + "data_audit_type": true, + "data_audit_uid": true, + "data_callID": true, + "data_dest_ip": true, + "data_dest_port": true, + "data_dstuser": true, + "data_endpoint": true, + "data_errCode": true, + "data_event_type": true, + "data_extra_data": true, + "data_file": true, + "data_flow_bytes_toclient": true, + "data_flow_bytes_toserver": true, + "data_flow_id": true, + "data_flow_pkts_toclient": true, + "data_flow_pkts_toserver": true, + "data_flow_start": true, + "data_httpReq_SDK": true, + "data_httpReq_country": true, + "data_http_http_content_type": true, + "data_http_http_port": true, + "data_http_length": true, + "data_http_status": true, + "data_http_url": true, + "data_id": true, + "data_in_iface": true, + "data_integration": true, + "data_ip_city_name": true, + "data_ip_geolocation": true, + "data_label": true, + "data_metadata_flowbits": true, + "data_metadata_flowints_http_anomaly_count": true, + "data_metadata_flowints_tcp_retransmission_count": true, + "data_office_365_Actor": true, + "data_office_365_ActorContextId": true, + "data_office_365_ActorIpAddress": false, + "data_office_365_ActorIpAddress_city_name": true, + "data_office_365_ActorIpAddress_country_code": true, + "data_office_365_ActorIpAddress_geolocation": true, + "data_office_365_ApplicationId": true, + "data_office_365_AzureActiveDirectoryEventType": true, + "data_office_365_ClientIP": true, + "data_office_365_ClientIP_city_name": true, + "data_office_365_ClientIP_country_code": true, + "data_office_365_ClientIP_geolocation": true, + "data_office_365_CreationTime": false, + "data_office_365_DeviceProperties": true, + "data_office_365_ErrorNumber": true, + "data_office_365_ExtendedProperties": true, + "data_office_365_Id": true, + "data_office_365_InterSystemsId": true, + "data_office_365_IntraSystemId": true, + "data_office_365_LogonError": true, + "data_office_365_ModifiedProperties": true, + "data_office_365_ObjectId": true, + "data_office_365_Operation": true, + "data_office_365_OrganizationId": true, + "data_office_365_RecordType": true, + "data_office_365_ResultStatus": false, + "data_office_365_Target": true, + "data_office_365_TargetContextId": true, + "data_office_365_UserId": false, + "data_office_365_UserKey": true, + "data_office_365_UserType": true, + "data_office_365_Version": true, + "data_office_365_Workload": true, + "data_osquery_action": true, + "data_osquery_calendarTime": true, + "data_osquery_columns_address": true, + "data_osquery_columns_address_city_name": true, + "data_osquery_columns_address_country_code": true, + "data_osquery_columns_address_geolocation": true, + "data_osquery_columns_cmdline": true, + "data_osquery_columns_cwd": true, + "data_osquery_columns_description": true, + "data_osquery_columns_directory": true, + "data_osquery_columns_disk_bytes_read": true, + "data_osquery_columns_disk_bytes_written": true, + "data_osquery_columns_egid": true, + "data_osquery_columns_euid": true, + "data_osquery_columns_family": true, + "data_osquery_columns_fd": true, + "data_osquery_columns_gid": true, + "data_osquery_columns_gid_signed": true, + "data_osquery_columns_host": true, + "data_osquery_columns_interface": true, + "data_osquery_columns_local_address": true, + "data_osquery_columns_local_address_city_name": true, + "data_osquery_columns_local_address_country_code": true, + "data_osquery_columns_local_address_geolocation": true, + "data_osquery_columns_local_port": true, + "data_osquery_columns_mac": true, + "data_osquery_columns_name": true, + "data_osquery_columns_net_namespace": true, + "data_osquery_columns_nice": true, + "data_osquery_columns_on_disk": true, + "data_osquery_columns_parent": true, + "data_osquery_columns_path": true, + "data_osquery_columns_pgroup": true, + "data_osquery_columns_pid": true, + "data_osquery_columns_port": true, + "data_osquery_columns_protocol": true, + "data_osquery_columns_remote_address": true, + "data_osquery_columns_remote_address_city_name": true, + "data_osquery_columns_remote_address_country_code": true, + "data_osquery_columns_remote_address_geolocation": true, + "data_osquery_columns_remote_port": true, + "data_osquery_columns_resident_size": true, + "data_osquery_columns_root": true, + "data_osquery_columns_sgid": true, + "data_osquery_columns_shell": true, + "data_osquery_columns_socket": true, + "data_osquery_columns_start_time": true, + "data_osquery_columns_state": true, + "data_osquery_columns_suid": true, + "data_osquery_columns_system_time": true, + "data_osquery_columns_threads": true, + "data_osquery_columns_time_utc": true, + "data_osquery_columns_total_size": true, + "data_osquery_columns_tty": true, + "data_osquery_columns_type": true, + "data_osquery_columns_uid": true, + "data_osquery_columns_uid_signed": true, + "data_osquery_columns_user": true, + "data_osquery_columns_user_time": true, + "data_osquery_columns_username": true, + "data_osquery_columns_wired_size": true, + "data_osquery_counter": true, + "data_osquery_decorations_host_uuid": true, + "data_osquery_decorations_hostname": true, + "data_osquery_epoch": true, + "data_osquery_hostIdentifier": true, + "data_osquery_name": true, + "data_osquery_numerics": true, + "data_osquery_unixTime": true, + "data_params_apiKey": true, + "data_params_include": true, + "data_params_loginID": true, + "data_params_password": true, + "data_params_secret": true, + "data_params_userKey": true, + "data_proto": true, + "data_response": true, + "data_restrictions_ipBlacklistRestricted": true, + "data_restrictions_ipBlacklistRestrictionEnforced": true, + "data_restrictions_ipWhitelistRestricted": true, + "data_restrictions_ipWhitelistRestrictionEnforced": true, + "data_sca_check_command": true, + "data_sca_check_compliance_cis": true, + "data_sca_check_compliance_cis_csc": true, + "data_sca_check_compliance_gdpr_IV": true, + "data_sca_check_compliance_gpg_13": true, + "data_sca_check_compliance_hipaa": true, + "data_sca_check_compliance_nist_800_53": true, + "data_sca_check_compliance_pci_dss": true, + "data_sca_check_compliance_tsc": true, + "data_sca_check_description": true, + "data_sca_check_id": true, + "data_sca_check_previous_result": true, + "data_sca_check_rationale": true, + "data_sca_check_remediation": true, + "data_sca_check_result": true, + "data_sca_check_title": true, + "data_sca_description": true, + "data_sca_failed": true, + "data_sca_file": true, + "data_sca_invalid": true, + "data_sca_passed": true, + "data_sca_policy": true, + "data_sca_policy_id": true, + "data_sca_scan_id": true, + "data_sca_score": true, + "data_sca_total_checks": true, + "data_sca_type": true, + "data_script": true, + "data_serverIP_city_name": true, + "data_serverIP_country_code": false, + "data_serverIP_geolocation": true, + "data_src_ip": true, + "data_src_ip_city_name": true, + "data_src_ip_country_code": true, + "data_src_ip_geolocation": true, + "data_src_port": true, + "data_srcip": true, + "data_srcip_city_name": true, + "data_srcip_country_code": true, + "data_srcip_geolocation": true, + "data_srcuser": true, + "data_status_code": true, + "data_timestamp": true, + "data_title": true, + "data_tls_session_resumed": true, + "data_tls_version": true, + "data_tx_id": true, + "data_type": true, + "data_uid": true, + "data_userAgent_platform": true, + "data_userAgent_raw": true, + "data_userAgent_version": true, + "data_userKey": true, + "data_userKeyDetails_name": true, + "data_win_eventXML_binaryData": true, + "data_win_eventXML_binaryDataSize": true, + "data_win_eventXML_param1": true, + "data_win_eventdata_authenticationPackageName": true, + "data_win_eventdata_callTrace": true, + "data_win_eventdata_commandLine": true, + "data_win_eventdata_company": true, + "data_win_eventdata_creationUtcTime": true, + "data_win_eventdata_currentDirectory": true, + "data_win_eventdata_description": true, + "data_win_eventdata_destinationHostname": true, + "data_win_eventdata_destinationIp": true, + "data_win_eventdata_destinationIp_city_name": true, + "data_win_eventdata_destinationIp_country_code": true, + "data_win_eventdata_destinationIp_geolocation": true, + "data_win_eventdata_destinationIsIpv6": true, + "data_win_eventdata_destinationPort": true, + "data_win_eventdata_destinationPortName": true, + "data_win_eventdata_details": true, + "data_win_eventdata_elevatedToken": true, + "data_win_eventdata_eventType": true, + "data_win_eventdata_fileVersion": true, + "data_win_eventdata_fileVersion_city_name": true, + "data_win_eventdata_fileVersion_country_code": true, + "data_win_eventdata_fileVersion_geolocation": true, + "data_win_eventdata_grantedAccess": true, + "data_win_eventdata_hashes": true, + "data_win_eventdata_image": true, + "data_win_eventdata_imageLoaded": true, + "data_win_eventdata_impersonationLevel": true, + "data_win_eventdata_initiated": true, + "data_win_eventdata_integrityLevel": true, + "data_win_eventdata_ipAddress": true, + "data_win_eventdata_ipPort": true, + "data_win_eventdata_keyLength": true, + "data_win_eventdata_logonGuid": true, + "data_win_eventdata_logonId": true, + "data_win_eventdata_logonProcessName": true, + "data_win_eventdata_logonType": true, + "data_win_eventdata_originalFileName": true, + "data_win_eventdata_param1": true, + "data_win_eventdata_param2": true, + "data_win_eventdata_param3": true, + "data_win_eventdata_param4": true, + "data_win_eventdata_parentCommandLine": true, + "data_win_eventdata_parentImage": true, + "data_win_eventdata_parentProcessGuid": true, + "data_win_eventdata_parentProcessId": true, + "data_win_eventdata_processGuid": true, + "data_win_eventdata_processId": true, + "data_win_eventdata_processName": true, + "data_win_eventdata_product": true, + "data_win_eventdata_protocol": true, + "data_win_eventdata_queryName": true, + "data_win_eventdata_queryResults": true, + "data_win_eventdata_queryStatus": true, + "data_win_eventdata_ruleName": true, + "data_win_eventdata_serviceName": true, + "data_win_eventdata_serviceSid": true, + "data_win_eventdata_signature": true, + "data_win_eventdata_signatureStatus": true, + "data_win_eventdata_signed": true, + "data_win_eventdata_sourceHostname": true, + "data_win_eventdata_sourceImage": true, + "data_win_eventdata_sourceIp": true, + "data_win_eventdata_sourceIp_city_name": true, + "data_win_eventdata_sourceIp_country_code": true, + "data_win_eventdata_sourceIp_geolocation": true, + "data_win_eventdata_sourceIsIpv6": true, + "data_win_eventdata_sourcePort": true, + "data_win_eventdata_sourceProcessGUID": true, + "data_win_eventdata_sourceProcessId": true, + "data_win_eventdata_sourceThreadId": true, + "data_win_eventdata_status": true, + "data_win_eventdata_subjectDomainName": true, + "data_win_eventdata_subjectLogonId": true, + "data_win_eventdata_subjectUserName": true, + "data_win_eventdata_subjectUserSid": true, + "data_win_eventdata_targetDomainName": true, + "data_win_eventdata_targetFilename": true, + "data_win_eventdata_targetImage": true, + "data_win_eventdata_targetLinkedLogonId": true, + "data_win_eventdata_targetLogonId": true, + "data_win_eventdata_targetObject": true, + "data_win_eventdata_targetProcessGUID": true, + "data_win_eventdata_targetProcessId": true, + "data_win_eventdata_targetUserName": true, + "data_win_eventdata_targetUserSid": true, + "data_win_eventdata_terminalSessionId": true, + "data_win_eventdata_ticketEncryptionType": true, + "data_win_eventdata_ticketOptions": true, + "data_win_eventdata_user": true, + "data_win_eventdata_utcTime": true, + "data_win_eventdata_virtualAccount": true, + "data_win_system_channel": true, + "data_win_system_computer": true, + "data_win_system_eventID": true, + "data_win_system_eventRecordID": true, + "data_win_system_eventSourceName": true, + "data_win_system_keywords": true, + "data_win_system_level": true, + "data_win_system_message": true, + "data_win_system_opcode": true, + "data_win_system_processID": true, + "data_win_system_providerGuid": true, + "data_win_system_providerName": true, + "data_win_system_severityValue": true, + "data_win_system_systemTime": true, + "data_win_system_task": true, + "data_win_system_threadID": true, + "data_win_system_version": true, + "decoder_name": true, + "decoder_parent": true, + "dns_query": true, + "dns_query_threat_indicated": true, + "dst_ip": true, + "dst_ip_city_name": true, + "dst_ip_country_code": true, + "dst_ip_geolocation": true, + "dst_ip_threat_indicated": true, + "dst_port": true, + "ecs_version": true, + "error": true, + "event_hash": true, + "file_path": true, + "firewall_rule_name": true, + "full_log": false, + "gl2_accounted_message_size": true, + "gl2_message_id": true, + "gl2_processing_error": true, + "gl2_remote_ip": true, + "gl2_remote_port": true, + "gl2_source_collector": true, + "gl2_source_input": true, + "gl2_source_node": true, + "hash_md5": true, + "hash_sha1": true, + "hash_sha256": true, + "highlight": true, + "host_architecture": true, + "host_containerized": true, + "host_hostname": true, + "host_id": true, + "host_ip": true, + "host_mac": true, + "host_name": true, + "host_os_codename": true, + "host_os_kernel": true, + "host_os_name": true, + "host_os_platform": true, + "host_os_version": true, + "hostname": true, + "id": true, + "input_type": true, + "level": true, + "location": true, + "log_file_path": true, + "log_offset": true, + "manager_name": true, + "message": true, + "module": true, + "msg_timestamp": true, + "parent_process_cmd_line": true, + "parent_process_id": true, + "parent_process_image": true, + "pid": true, + "predecoder_hostname": true, + "predecoder_program_name": true, + "predecoder_timestamp": true, + "previous_log": true, + "previous_output": true, + "process_cmd_line": true, + "process_id": true, + "process_image": true, + "process_name": true, + "protocol": true, + "rule_cis": true, + "rule_cis_csc": true, + "rule_firedtimes": true, + "rule_gdpr": true, + "rule_gdpr_IV": true, + "rule_gpg13": true, + "rule_gpg_13": true, + "rule_group1": true, + "rule_groups": true, + "rule_hipaa": true, + "rule_id": true, + "rule_info": true, + "rule_mail": true, + "rule_mitre_id": true, + "rule_mitre_tactic": false, + "rule_nist_800_53": true, + "rule_pci_dss": true, + "rule_tsc": true, + "scanid": true, + "service": true, + "software_package": true, + "software_vendor": true, + "sort": true, + "source": true, + "src_ip": true, + "src_ip_city_name": true, + "src_ip_country_code": true, + "src_ip_geolocation": true, + "src_port": true, + "streams": true, + "syscheck_attrs_after": true, + "syscheck_audit_effective_user_id": true, + "syscheck_audit_effective_user_name": true, + "syscheck_audit_group_id": true, + "syscheck_audit_group_name": true, + "syscheck_audit_login_user_id": true, + "syscheck_audit_login_user_name": true, + "syscheck_audit_process_cwd": true, + "syscheck_audit_process_id": true, + "syscheck_audit_process_name": true, + "syscheck_audit_process_parent_cwd": true, + "syscheck_audit_process_parent_name": true, + "syscheck_audit_process_ppid": true, + "syscheck_audit_user_id": true, + "syscheck_audit_user_name": true, + "syscheck_changed_attributes": true, + "syscheck_event": true, + "syscheck_gid_after": true, + "syscheck_gname_after": true, + "syscheck_hard_links": true, + "syscheck_inode_after": true, + "syscheck_inode_before": true, + "syscheck_md5_after": true, + "syscheck_md5_before": true, + "syscheck_mode": true, + "syscheck_mtime_after": true, + "syscheck_mtime_before": true, + "syscheck_path": true, + "syscheck_perm_after": true, + "syscheck_perm_before": true, + "syscheck_sha1_after": true, + "syscheck_sha1_before": true, + "syscheck_sha256_after": true, + "syscheck_sha256_before": true, + "syscheck_size_after": true, + "syscheck_size_before": true, + "syscheck_uid_after": true, + "syscheck_uname_after": true, + "syscheck_win_perm_after": true, + "syscheck_win_perm_after_0_allowed": true, + "syscheck_win_perm_after_0_name": true, + "syscheck_win_perm_after_1_allowed": true, + "syscheck_win_perm_after_1_name": true, + "syscheck_win_perm_after_2_allowed": true, + "syscheck_win_perm_after_2_name": true, + "syscheck_win_perm_after_3_allowed": true, + "syscheck_win_perm_after_3_name": true, + "syslog_customer": true, + "syslog_level": true, + "syslog_tag": true, + "syslog_type": true, + "sysmon_event_description": true, + "threat_ids": true, + "threat_indicated": true, + "threat_names": true, + "time": true, + "timestamp": false, + "timestamp_utc": true, + "true": true, + "user_name": true, + "win_registry_key": true, + "win_system_eventID": true, + "windows_auth_package": true, + "windows_domain": true, + "windows_event_id": true, + "windows_event_severity": true, + "windows_logon_type": true + }, + "indexByName": { + "_id": 2, + "_index": 10, + "_type": 11, + "agent_id": 12, + "agent_name": 5, + "cluster_name": 13, + "cluster_node": 14, + "data_office_365_Actor": 15, + "data_office_365_ActorContextId": 16, + "data_office_365_ActorIpAddress": 4, + "data_office_365_ActorIpAddress_city_name": 17, + "data_office_365_ActorIpAddress_country_code": 18, + "data_office_365_ActorIpAddress_geolocation": 19, + "data_office_365_ApplicationId": 20, + "data_office_365_AzureActiveDirectoryEventType": 21, + "data_office_365_ClientIP": 22, + "data_office_365_ClientIP_city_name": 23, + "data_office_365_ClientIP_country_code": 24, + "data_office_365_ClientIP_geolocation": 25, + "data_office_365_CreationTime": 0, + "data_office_365_DeviceProperties": 26, + "data_office_365_ErrorNumber": 27, + "data_office_365_ExtendedProperties": 28, + "data_office_365_Id": 29, + "data_office_365_InterSystemsId": 30, + "data_office_365_IntraSystemId": 31, + "data_office_365_LogonError": 32, + "data_office_365_ModifiedProperties": 33, + "data_office_365_ObjectId": 34, + "data_office_365_Operation": 42, + "data_office_365_OrganizationId": 35, + "data_office_365_RecordType": 36, + "data_office_365_ResultStatus": 6, + "data_office_365_Target": 37, + "data_office_365_TargetContextId": 38, + "data_office_365_UserId": 3, + "data_office_365_UserKey": 39, + "data_office_365_UserType": 40, + "data_office_365_Version": 41, + "data_office_365_Workload": 66, + "decoder_name": 43, + "gl2_accounted_message_size": 44, + "gl2_message_id": 45, + "gl2_processing_error": 46, + "gl2_remote_ip": 47, + "gl2_remote_port": 48, + "gl2_source_input": 49, + "gl2_source_node": 50, + "highlight": 51, + "id": 52, + "location": 53, + "manager_name": 54, + "message": 55, + "rule_description": 7, + "rule_firedtimes": 56, + "rule_group1": 57, + "rule_groups": 58, + "rule_id": 9, + "rule_level": 8, + "rule_mail": 59, + "sort": 60, + "source": 61, + "streams": 62, + "syslog_level": 63, + "syslog_type": 64, + "timestamp": 1, + "true": 65 + }, + "renameByName": { + "_id": "EVENT ID", + "agent_name": "", + "data_errDetails": "ERROR", + "data_errMessage": "ERROR MSG", + "data_ip": "IP", + "data_ip_country_code": "COUNTRY", + "data_message": "MSG", + "data_office_365_ActorIpAddress": "SRC IP", + "data_office_365_CreationTime": "EVENT TIME", + "data_office_365_DeviceProperties_BrowserType": "BROWSER", + "data_office_365_DeviceProperties_DisplayName": "HOSTNAME", + "data_office_365_DeviceProperties_OS": "OS", + "data_office_365_ExtendedProperties_RequestType": "REQ TYPE", + "data_office_365_ExtendedProperties_ResultStatusDetail": "RESULT", + "data_office_365_ExtendedProperties_UserAgent": "AGENT", + "data_office_365_ExtendedProperties_UserAuthenticationMethod": "AUTH METHOD", + "data_office_365_ResultStatus": "RESULT", + "data_office_365_UserId": "ACCOUNT", + "data_serverIP": "SERVER IP", + "data_serverIP_country_code": "SERVER COUNTRY", + "data_serverIP_geolocation": "", + "data_status_code": "", + "data_userAgent_browser": "BROWSER", + "data_userAgent_os": "OS", + "rule_description": "DESCRIPTION", + "rule_id": "RULE ID", + "rule_level": "", + "rule_mitre_tactic": "MITRE TACTIC", + "rule_mitre_technique": "MITRE TECHNIQUE", + "timestamp": "Date/Time" + } + } + } + ], + "transparent": true, + "type": "table" + } + ], + "refresh": "", + "schemaVersion": 39, + "tags": ["SAP"], + "templating": { + "list": [ + { + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "filters": [], + "hide": 0, + "label": "", + "name": "Filters", + "skipUrlSync": false, + "type": "adhoc" + }, + { + "current": { + "selected": true, + "text": ["All"], + "value": ["$__all"] + }, + "datasource": { + "type": "grafana-opensearch-datasource", + "uid": "replace_datasource_uid" + }, + "definition": "{ \"find\": \"terms\", \"field\": \"logSource\", \"query\": \"\"}", + "hide": 1, + "includeAll": true, + "label": "LOG SOURCE", + "multi": true, + "name": "log_source", + "options": [], + "query": "{ \"find\": \"terms\", \"field\": \"logSource\", \"query\": \"\"}", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query" + } + ] + }, + "time": { + "from": "now-24h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": ["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"], + "time_options": ["5m", "15m", "1h", "6h", "12h", "24h", "2d", "7d", "30d"] + }, + "timezone": "", + "title": "SAP - USERS AUTH", + "version": 5, + "weekStart": "" +} diff --git a/backend/app/connectors/grafana/schema/dashboards.py b/backend/app/connectors/grafana/schema/dashboards.py index b477949e..208e1132 100644 --- a/backend/app/connectors/grafana/schema/dashboards.py +++ b/backend/app/connectors/grafana/schema/dashboards.py @@ -66,6 +66,10 @@ class MimecastDashboard(Enum): SUMMARY = ("Mimecast", "summary.json") +class SapSiemDashboard(Enum): + USERS_AUTH = ("SapSiem", "users_auth.json") + + class DashboardProvisionRequest(BaseModel): dashboards: List[str] = Field( ..., @@ -83,7 +87,9 @@ class DashboardProvisionRequest(BaseModel): @validator("dashboards", each_item=True) def check_dashboard_exists(cls, e): - valid_dashboards = {item.name: item for item in list(WazuhDashboard) + list(Office365Dashboard)} + valid_dashboards = { + item.name: item for item in list(WazuhDashboard) + list(Office365Dashboard) + list(MimecastDashboard) + list(SapSiemDashboard) + } if e not in valid_dashboards: raise ValueError(f'Dashboard identifier "{e}" is not recognized.') return e diff --git a/backend/app/connectors/grafana/services/dashboards.py b/backend/app/connectors/grafana/services/dashboards.py index 91138ee1..a721b2ab 100644 --- a/backend/app/connectors/grafana/services/dashboards.py +++ b/backend/app/connectors/grafana/services/dashboards.py @@ -9,6 +9,7 @@ from app.connectors.grafana.schema.dashboards import GrafanaDashboardResponse from app.connectors.grafana.schema.dashboards import MimecastDashboard from app.connectors.grafana.schema.dashboards import Office365Dashboard +from app.connectors.grafana.schema.dashboards import SapSiemDashboard from app.connectors.grafana.schema.dashboards import WazuhDashboard from app.connectors.grafana.utils.universal import create_grafana_client @@ -144,7 +145,9 @@ async def provision_dashboards( provisioned_dashboards = [] errors = [] - valid_dashboards = {item.name: item for item in list(WazuhDashboard) + list(Office365Dashboard) + list(MimecastDashboard)} + valid_dashboards = { + item.name: item for item in list(WazuhDashboard) + list(Office365Dashboard) + list(MimecastDashboard) + list(SapSiemDashboard) + } for dashboard_name in dashboard_request.dashboards: dashboard_enum = valid_dashboards[dashboard_name] diff --git a/backend/app/db/all_models.py b/backend/app/db/all_models.py index 08bfec29..0a84900b 100644 --- a/backend/app/db/all_models.py +++ b/backend/app/db/all_models.py @@ -13,3 +13,4 @@ from app.integrations.models.customer_integration_settings import CustomerIntegrations from app.schedulers.models.scheduler import JobMetadata from app.integrations.monitoring_alert.models.monitoring_alert import MonitoringAlerts +from app.integrations.sap_siem.models.sap_siem import SapSiemMultipleLogins diff --git a/backend/app/db/db_populate.py b/backend/app/db/db_populate.py index 8b62fe85..a0f117ed 100644 --- a/backend/app/db/db_populate.py +++ b/backend/app/db/db_populate.py @@ -129,16 +129,6 @@ def get_connectors_list(): "Connection to Graylog GELF Input to receive events from integrations. Make sure you have created a GELF Input in Graylog.", "GELF_INPUT_PORT", ), - ( - "Alert Creation Provisioning", - "3", - "host_only", - ( - "Connection to Alert Creation Provisioning. Make sure you have " - "deployed the Alert Creation Provisioning Application provided by " - "SOCFortress: https://github.com/socfortress/Customer-Provisioning-Alert" - ), - ), # ... Add more connectors as needed ... ] @@ -238,6 +228,9 @@ def load_markdown_for_integration(integration_name: str) -> str: str: The content of the markdown file. """ # file_path = os.path.join("integrations_markdown", f"{integration_name.lower()}.md") + # if space in the integration name, replace it with underscore + if " " in integration_name: + integration_name = integration_name.replace(" ", "_") file_path = os.path.join( "app", "integrations", @@ -261,6 +254,7 @@ def get_available_integrations_list(): available_integrations = [ ("Office365", "Integrate Office365 with SOCFortress."), ("Mimecast", "Integrate Mimecast with SOCFortress."), + ("SAP SIEM", "Integrate SAP SIEM with SOCFortress."), # ... Add more available integrations as needed ... ] @@ -350,6 +344,10 @@ async def get_available_integrations_auth_keys_list(session: AsyncSession): ("Mimecast", "EMAIL_ADDRESS"), ("Mimecast", "ACCESS_KEY"), ("Mimecast", "SECRET_KEY"), + ("SAP SIEM", "API_KEY"), + ("SAP SIEM", "SECRET_KEY"), + ("SAP SIEM", "USER_KEY"), + ("SAP SIEM", "API_DOMAIN"), # ... Add more available integrations auth keys as needed ... ] diff --git a/backend/app/db/db_setup.py b/backend/app/db/db_setup.py index 48fde787..e8fa7623 100644 --- a/backend/app/db/db_setup.py +++ b/backend/app/db/db_setup.py @@ -1,4 +1,6 @@ from loguru import logger +from sqlalchemy import text +from sqlalchemy.exc import OperationalError from sqlalchemy.ext.asyncio import AsyncSession # ! New with Async @@ -33,6 +35,34 @@ async def create_tables(async_engine): await add_connectors_if_not_exist(session) +async def update_tables(async_engine): + """ + Updates tables in the database. Needed for adding new columns to existing tables. + + Args: + async_engine (AsyncEngine): The async engine to connect to the database. + + Returns: + None + """ + logger.info("Updating tables") + + # Define the new columns to be added + new_columns = {"scheduled_job_metadata": ["extra_data TEXT"]} + + async with async_engine.begin() as conn: + for table_name, columns in new_columns.items(): + for column in columns: + alter_table_query = text(f"ALTER TABLE {table_name} ADD COLUMN {column}") + try: + await conn.execute(alter_table_query) + except OperationalError as e: + if "duplicate column name" in str(e): + logger.info(f"Column {column} already exists in {table_name}") + else: + raise + + async def create_roles(async_engine): """ Creates roles in the database. diff --git a/backend/app/integrations/markdown/sap_siem.md b/backend/app/integrations/markdown/sap_siem.md new file mode 100644 index 00000000..130ea7d5 --- /dev/null +++ b/backend/app/integrations/markdown/sap_siem.md @@ -0,0 +1 @@ +# [SAP SIEM](https://help.sap.com/docs/SAP_CUSTOMER_DATA_CLOUD/8b8d6fffe113457094a17701f63e3d6a/4143815a70b21014bbc5a10ce4041860.html) diff --git a/backend/app/integrations/mimecast/routes/mimecast.py b/backend/app/integrations/mimecast/routes/mimecast.py index 3f926a44..7d4a0895 100644 --- a/backend/app/integrations/mimecast/routes/mimecast.py +++ b/backend/app/integrations/mimecast/routes/mimecast.py @@ -13,7 +13,7 @@ from app.integrations.mimecast.services.mimecast import get_ttp_urls from app.integrations.mimecast.services.mimecast import invoke_mimecast from app.integrations.routes import find_customer_integration -from app.integrations.utils.utils import extract_mimecast_auth_keys +from app.integrations.utils.utils import extract_auth_keys from app.integrations.utils.utils import get_customer_integration_response integration_mimecast_router = APIRouter() @@ -57,7 +57,7 @@ async def invoke_mimecast_route( customer_integration_response, ) - mimecast_auth_keys = extract_mimecast_auth_keys(customer_integration) + mimecast_auth_keys = extract_auth_keys(customer_integration, service_name="Mimecast") auth_keys = MimecastAuthKeys(**mimecast_auth_keys) @@ -87,7 +87,7 @@ async def mimecast_ttp_url_route( customer_integration_response, ) - mimecast_auth_keys = extract_mimecast_auth_keys(customer_integration) + mimecast_auth_keys = extract_auth_keys(customer_integration, service_name="Mimecast") auth_keys = MimecastAuthKeys(**mimecast_auth_keys) diff --git a/backend/app/integrations/monitoring_alert/routes/monitoring_alert.py b/backend/app/integrations/monitoring_alert/routes/monitoring_alert.py index 28a4b43a..1a20ffa4 100644 --- a/backend/app/integrations/monitoring_alert/routes/monitoring_alert.py +++ b/backend/app/integrations/monitoring_alert/routes/monitoring_alert.py @@ -1,4 +1,5 @@ from typing import List +from typing import Optional from fastapi import APIRouter from fastapi import Depends @@ -27,6 +28,12 @@ ) from app.integrations.monitoring_alert.services.suricata import analyze_suricata_alerts from app.integrations.monitoring_alert.services.wazuh import analyze_wazuh_alerts +from app.integrations.sap_siem.services.sap_siem_multiple_logins import ( + sap_siem_multiple_logins_same_ip, +) +from app.integrations.sap_siem.services.sap_siem_suspicious_logins import ( + sap_siem_suspicious_logins, +) monitoring_alerts_router = APIRouter() @@ -222,3 +229,71 @@ async def run_suricata_analysis( success=True, message="Analysis completed successfully", ) + + +@monitoring_alerts_router.post( + "/run_analysis/sap_siem/suspicious_logins", + response_model=AlertAnalysisResponse, +) +async def run_sap_siem_suspicious_logins_analysis( + threshold: Optional[int] = 3, + session: AsyncSession = Depends(get_db), +) -> AlertAnalysisResponse: + """ + This route is used to run analysis on the monitoring alerts. + + 1. Get all the monitoring alerts from the database where the customer_code matches the customer_code provided + and the alert_source is SAP SIEM. + + 2. Call the sap_siem_suspicious_logins function to analyze the alerts. + + Args: + request (CollectSapSiemRequest): The customer code. + session (AsyncSession, optional): The database session. Defaults to Depends(get_db). + + Returns: + WazuhAnalysisResponse: The response containing the analysis results. + """ + logger.info("Running analysis for SAP SIEM suspicious logins") + + # Call the analyze_wazuh_alerts function to analyze the alerts + await sap_siem_suspicious_logins(threshold=threshold, session=session) + + return AlertAnalysisResponse( + success=True, + message="Analysis completed successfully", + ) + + +@monitoring_alerts_router.post( + "/run_analysis/sap_siem/multiple_logins", + response_model=AlertAnalysisResponse, +) +async def run_sap_siem_multiple_logins_same_ip_analysis( + threshold: Optional[int] = 1, + session: AsyncSession = Depends(get_db), +) -> AlertAnalysisResponse: + """ + This route is used to run analysis on the monitoring alerts. + + 1. Get all the monitoring alerts from the database where the customer_code matches the customer_code provided + and the alert_source is SAP SIEM. + + 2. Call the sap_siem_multiple_logins_same_ip function to analyze the alerts. + + Args: + request (CollectSapSiemRequest): The customer code. + session (AsyncSession, optional): The database session. Defaults to Depends(get_db). + + Returns: + WazuhAnalysisResponse: The response containing the analysis results. + """ + logger.info("Running analysis for SAP SIEM multiple logins") + + # Call the analyze_wazuh_alerts function to analyze the alerts + await sap_siem_multiple_logins_same_ip(threshold=threshold, session=session) + + return AlertAnalysisResponse( + success=True, + message="Analysis completed successfully", + ) diff --git a/backend/app/integrations/monitoring_alert/routes/provision.py b/backend/app/integrations/monitoring_alert/routes/provision.py index d21421cf..f4d39eb8 100644 --- a/backend/app/integrations/monitoring_alert/routes/provision.py +++ b/backend/app/integrations/monitoring_alert/routes/provision.py @@ -152,7 +152,7 @@ async def provision_monitoring_alert_testing_route( Used for testing purposes. """ message = EventShipperPayload( - customer_code="replace_me", + customer_code="00002", integration="testing", version="1.0", **request, diff --git a/backend/app/integrations/monitoring_alert/services/suricata.py b/backend/app/integrations/monitoring_alert/services/suricata.py index aa127eb6..6009a947 100644 --- a/backend/app/integrations/monitoring_alert/services/suricata.py +++ b/backend/app/integrations/monitoring_alert/services/suricata.py @@ -70,9 +70,6 @@ async def construct_alert_source_link( """ logger.info(f"Constructing alert source link for alert: {alert_details}") query_string = f"%22query%22:%22alert_signature_id:%5C%22{alert_details.alert_id}%5C%22%20AND%20" - # ! TODO: REMOVE ONCE TESTING IS COMPLETE - if alert_details.agent_labels_customer == "WCPS": - alert_details.agent_labels_customer = "00002" grafana_url = ( await get_customer_alert_settings( customer_code=alert_details.agent_labels_customer, diff --git a/backend/app/integrations/routes.py b/backend/app/integrations/routes.py index 4905852d..e8fe935a 100644 --- a/backend/app/integrations/routes.py +++ b/backend/app/integrations/routes.py @@ -613,7 +613,6 @@ async def get_customer_integrations_by_customer_code( ) result = await session.execute(stmt) customer_integrations = result.scalars().unique().all() - logger.info(f"customer_integrations: {customer_integrations}") return CustomerIntegrationsResponse( available_integrations=customer_integrations, message="Customer integrations successfully retrieved.", diff --git a/backend/app/integrations/sap_siem/models/sap_siem.py b/backend/app/integrations/sap_siem/models/sap_siem.py new file mode 100644 index 00000000..f703723c --- /dev/null +++ b/backend/app/integrations/sap_siem/models/sap_siem.py @@ -0,0 +1,23 @@ +from datetime import datetime +from typing import Optional + +from sqlmodel import Field +from sqlmodel import SQLModel + + +class SapSiemMultipleLogins(SQLModel, table=True): + """ + Represents the SAP SIEM multiple logins table. + Table is used to track when an IP has successfully logged in with multiple loginIDs. + Used in the SAP SIEM integration. + """ + + __tablename__ = "sap_siem_multiple_logins" + id: Optional[int] = Field(primary_key=True) + ip: str = Field(description="The IP involved in the case.") + last_case_created_timestamp: datetime = Field( + description="Timestamp of the last case created.", + ) + associated_loginIDs: str = Field( + description="Comma-separated loginIDs associated with this IP.", + ) diff --git a/backend/app/integrations/sap_siem/routes/provision.py b/backend/app/integrations/sap_siem/routes/provision.py new file mode 100644 index 00000000..eb64211b --- /dev/null +++ b/backend/app/integrations/sap_siem/routes/provision.py @@ -0,0 +1,65 @@ +from fastapi import APIRouter +from fastapi import Depends +from sqlalchemy.ext.asyncio import AsyncSession + +from app.db.db_session import get_db +from app.integrations.sap_siem.schema.provision import ProvisionSapSiemRequest +from app.integrations.sap_siem.schema.provision import ProvisionSapSiemResponse +from app.integrations.sap_siem.services.provision import provision_sap_siem +from app.integrations.utils.utils import get_customer_integration_response +from app.schedulers.models.scheduler import CreateSchedulerRequest +from app.schedulers.scheduler import add_scheduler_jobs + +integration_sap_siem_provision_scheduler_router = APIRouter() + + +@integration_sap_siem_provision_scheduler_router.post( + "/provision", + response_model=ProvisionSapSiemResponse, + description="Provision a SAP SIEM integration.", +) +async def provision_sap_siem_route( + provision_sap_siem_request: ProvisionSapSiemRequest, + session: AsyncSession = Depends(get_db), +) -> ProvisionSapSiemResponse: + """ + Provisions a mimecast integration. + + Args: + provision_sap_siem_request (ProvisionSapSiemRequest): The request object containing the necessary data for provisioning. + session (AsyncSession, optional): The database session. Defaults to Depends(get_db). + + Returns: + ProvisionMimecastResponse: The response object indicating the success or failure of the provisioning process. + """ + # Check if the customer integration settings are available and can be provisioned + await get_customer_integration_response( + provision_sap_siem_request.customer_code, + session, + ) + await provision_sap_siem(provision_sap_siem_request, session) + await add_scheduler_jobs( + CreateSchedulerRequest( + function_name="invoke_sap_siem_integration_collection", + time_interval=provision_sap_siem_request.time_interval, + job_id="invoke_sap_siem_integration_collection", + ), + ) + await add_scheduler_jobs( + CreateSchedulerRequest( + function_name="invoke_sap_siem_integration_suspicious_logins_analysis", + time_interval=provision_sap_siem_request.time_interval, + job_id="invoke_sap_siem_integration_suspicious_logins_analysis", + ), + ) + await add_scheduler_jobs( + CreateSchedulerRequest( + function_name="invoke_sap_siem_integration_multiple_logins_same_ip_analysis", + time_interval=provision_sap_siem_request.time_interval, + job_id="invoke_sap_siem_integration_multiple_logins_same_ip_analysis", + ), + ) + return ProvisionSapSiemResponse( + success=True, + message="SAP SIEM integration provisioned successfully.", + ) diff --git a/backend/app/integrations/sap_siem/routes/sap_siem.py b/backend/app/integrations/sap_siem/routes/sap_siem.py new file mode 100644 index 00000000..3ebfe2ec --- /dev/null +++ b/backend/app/integrations/sap_siem/routes/sap_siem.py @@ -0,0 +1,71 @@ +from fastapi import APIRouter +from fastapi import Depends +from loguru import logger +from sqlalchemy.ext.asyncio import AsyncSession + +from app.db.db_session import get_db +from app.integrations.routes import find_customer_integration +from app.integrations.sap_siem.schema.sap_siem import CollectSapSiemRequest +from app.integrations.sap_siem.schema.sap_siem import InvokeSapSiemRequest +from app.integrations.sap_siem.schema.sap_siem import InvokeSAPSiemResponse +from app.integrations.sap_siem.schema.sap_siem import SapSiemAuthKeys +from app.integrations.sap_siem.services.collect import collect_sap_siem +from app.integrations.utils.utils import extract_auth_keys +from app.integrations.utils.utils import get_customer_integration_response + +integration_sap_siem_router = APIRouter() + + +@integration_sap_siem_router.post( + "", + response_model=InvokeSAPSiemResponse, + description="Pull down SAP SIEM Events.", +) +async def collect_sap_siem_route(sap_siem_request: InvokeSapSiemRequest, session: AsyncSession = Depends(get_db)): + """Pull down SAP SIEM Events.""" + customer_integration_response = await get_customer_integration_response( + sap_siem_request.customer_code, + session, + ) + + customer_integration = await find_customer_integration( + sap_siem_request.customer_code, + sap_siem_request.integration_name, + customer_integration_response, + ) + + sap_siem_auth_keys = extract_auth_keys(customer_integration, service_name="SAP SIEM") + + logger.info(f"SAP SIEM Auth Keys: {sap_siem_auth_keys}") + + auth_keys = SapSiemAuthKeys(**sap_siem_auth_keys) + # if multiple apiKey values are present, make a loop to iterate through them + # and collect the data for each apiKey + if "," in auth_keys.API_KEY: + api_keys = auth_keys.API_KEY.split(",") + for key in api_keys: + collect_sap_siem_request = CollectSapSiemRequest( + apiKey=key, + secretKey=auth_keys.SECRET_KEY, + userKey=auth_keys.USER_KEY, + apiDomain=auth_keys.API_DOMAIN, + threshold=sap_siem_request.threshold, + lower_bound=sap_siem_request.lower_bound, + upper_bound=sap_siem_request.upper_bound, + customer_code=sap_siem_request.customer_code, + ) + await collect_sap_siem(sap_siem_request=collect_sap_siem_request) + else: + collect_sap_siem_request = CollectSapSiemRequest( + apiKey=auth_keys.API_KEY, + secretKey=auth_keys.SECRET_KEY, + userKey=auth_keys.USER_KEY, + apiDomain=auth_keys.API_DOMAIN, + threshold=sap_siem_request.threshold, + lower_bound=sap_siem_request.lower_bound, + upper_bound=sap_siem_request.upper_bound, + customer_code=sap_siem_request.customer_code, + ) + await collect_sap_siem(sap_siem_request=collect_sap_siem_request) + + return InvokeSAPSiemResponse(success=True, message="SAP SIEM Events collected successfully.") diff --git a/backend/app/integrations/sap_siem/schema/provision.py b/backend/app/integrations/sap_siem/schema/provision.py new file mode 100644 index 00000000..7809b732 --- /dev/null +++ b/backend/app/integrations/sap_siem/schema/provision.py @@ -0,0 +1,87 @@ +from typing import Any +from typing import Dict +from typing import List +from typing import Optional + +from pydantic import BaseModel +from pydantic import Field +from pydantic import root_validator + + +class ProvisionSapSiemRequest(BaseModel): + customer_code: str = Field( + ..., + description="The customer code.", + examples=["00002"], + ) + time_interval: int = Field( + ..., + description="The time interval for the scheduler.", + examples=[5], + ) + integration_name: str = Field( + "SAP SIEM", + description="The integration name.", + examples=["SAP SIEM"], + ) + + # ensure the `integration_name` is always set to "Mimecast" + @root_validator(pre=True) + def set_integration_name(cls, values: Dict[str, Any]) -> Dict[str, Any]: + values["integration_name"] = "SAP SIEM" + return values + + +class ProvisionSapSiemResponse(BaseModel): + success: bool + message: str + + +# ! STREAMS ! # +class StreamRule(BaseModel): + field: str + type: int + inverted: bool + value: str + + +class SapSiemEventStream(BaseModel): + title: str = Field(..., description="Title of the stream") + description: str = Field(..., description="Description of the stream") + index_set_id: str = Field(..., description="ID of the associated index set") + rules: List[StreamRule] = Field(..., description="List of rules for the stream") + matching_type: str = Field(..., description="Matching type for the rules") + remove_matches_from_default_stream: bool = Field( + ..., + description="Whether to remove matches from the default stream", + ) + content_pack: Optional[str] = Field( + None, + description="Associated content pack, if any", + ) + + class Config: + schema_extra = { + "example": { + "title": "SAP SIEM EVENTS - Example Company", + "description": "SAP SIEM EVENTS - Example Company", + "index_set_id": "12345", + "rules": [ + { + "field": "customer_code", + "type": 1, + "inverted": False, + "value": "ExampleCode", + }, + { + "field": "integration", + "type": 1, + "inverted": False, + "value": "sap_siem", + }, + ], + "matching_type": "AND", + "remove_matches_from_default_stream": True, + "content_pack": None, + }, + } diff --git a/backend/app/integrations/sap_siem/schema/sap_siem.py b/backend/app/integrations/sap_siem/schema/sap_siem.py new file mode 100644 index 00000000..650f2806 --- /dev/null +++ b/backend/app/integrations/sap_siem/schema/sap_siem.py @@ -0,0 +1,383 @@ +from datetime import datetime +from datetime import timedelta +from enum import Enum +from typing import Dict +from typing import List +from typing import Optional + +from pydantic import BaseModel +from pydantic import Field +from pydantic import root_validator + + +class InvokeSapSiemRequest(BaseModel): + customer_code: str = Field( + ..., + description="The customer code.", + examples=["00002"], + ) + integration_name: str = Field( + "SAP SIEM", + description="The integration name.", + examples=["SAP SIEM"], + ) + threshold: Optional[int] = Field( + 3, + description="Number of 'Invalid LoginID' before the first 'OK'", + ) + time_range: Optional[str] = Field( + "15m", + pattern="^[1-9][0-9]*[mhdw]$", + description="Time range for the query (1m, 1h, 1d, 1w)", + ) + + lower_bound: str = None + upper_bound: str = None + + @root_validator(pre=True) + def set_time_bounds(cls, values): + time_range = values.get("time_range") + if time_range: + unit = time_range[-1] + amount = int(time_range[:-1]) + + now = datetime.utcnow() + + if unit == "m": + lower_bound = now - timedelta(minutes=amount) + elif unit == "h": + lower_bound = now - timedelta(hours=amount) + elif unit == "d": + lower_bound = now - timedelta(days=amount) + elif unit == "w": + lower_bound = now - timedelta(weeks=amount) + + values["lower_bound"] = lower_bound.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z" + values["upper_bound"] = now.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z" + return values + + +class InvokeSAPSiemResponse(BaseModel): + success: bool + message: str + + +class SapSiemAuthKeys(BaseModel): + API_KEY: str = Field( + ..., + description="YOUR API KEY", + examples=["3_yUWT3uDMs9E1N87r4Ey"], + ) + SECRET_KEY: str = Field( + ..., + description="YOUR SECRET KEY", + examples=["4ijD6uMCca"], + ) + USER_KEY: Optional[str] = Field( + None, + description="YOUR USER KEY", + examples=["AK9zAL"], + ) + API_DOMAIN: str = Field( + ..., + description="YOUR API DOMAIN", + examples=["audit.eu1.gigya.com"], + ) + + +class CollectSapSiemRequest(BaseModel): + customer_code: str = Field( + ..., + description="The customer code.", + examples=["00002"], + ) + apiKey: str = Field(..., description="API key for authorization") + secretKey: str = Field(..., description="Secret key for authorization") + userKey: str = Field(..., description="User key for identification") + apiDomain: str = Field(..., description="API domain") + threshold: Optional[int] = Field( + 1, + description="Number of 'Invalid LoginID' before the first 'OK'", + ) + lower_bound: str = None + upper_bound: str = None + + +######### ! SAP API RESPONSE ! ######### +class HttpReq(BaseModel): + SDK: str = Field( + ..., + description="Software Development Kit used for the HTTP request", + ) + country: str = Field(..., description="Country code") + + +class Params(BaseModel): + clientContext: Optional[str] = Field(None, description="Client context information") + include: Optional[str] = Field(None, description="Data to include in the response") + password: str = Field(..., description="Password for the user") + loginID: str = Field(..., description="Login ID of the user") + apiKey: str = Field(..., description="API key for authorization") + format: Optional[str] = Field(None, description="Response format") + secret: Optional[str] = Field(None, description="Secret key for authorization") + userKey: Optional[str] = Field(None, description="User key for identification") + + +class UserAgent(BaseModel): + os: str = Field(..., description="Operating system of the user") + browser: str = Field(..., description="Browser used by the user") + raw: Optional[str] = Field(None, description="Raw user agent string") + version: str = Field(..., description="Browser version") + platform: str = Field(..., description="Platform type (desktop/mobile)") + + +class UserKeyDetails(BaseModel): + name: Optional[str] = Field(None, description="Name of the user") + emailDomain: Optional[str] = Field(None, description="Email domain of the user") + + +class Restrictions(BaseModel): + ipWhitelistRestricted: bool = Field( + ..., + description="Is IP whitelisting restricted", + ) + ipWhitelistRestrictionEnforced: bool = Field( + ..., + description="Is IP whitelist restriction enforced", + ) + ipBlacklistRestricted: bool = Field( + ..., + description="Is IP blacklisting restricted", + ) + ipBlacklistRestrictionEnforced: bool = Field( + ..., + description="Is IP blacklist restriction enforced", + ) + + +class Result(BaseModel): + callID: str = Field(..., description="Unique identifier for the call") + authType: Optional[str] = Field(None, description="Type of authentication used") + timestamp: str = Field( + ..., + alias="@timestamp", + description="Timestamp of the event", + ) + errCode: str = Field(..., description="Error code") + errDetails: Optional[str] = Field(None, description="Detailed error message") + errMessage: str = Field(..., description="Error message") + endpoint: str = Field(..., description="API endpoint hit") + userKey: Optional[str] = Field(None, description="User key for identification") + httpReq: HttpReq = Field(..., description="HTTP request details") + ip: str = Field(..., description="IP address of the user") + serverIP: str = Field(..., description="Server IP address") + params: Params = Field(..., description="Parameters passed in the request") + uid: Optional[str] = Field( + "No uid found", + description="Unique identifier for the user", + ) + apikey: str = Field(..., description="API key used for the request") + userAgent: UserAgent = Field(..., description="User agent details") + userKeyDetails: Optional[UserKeyDetails] = Field( + None, + description="Details related to user key", + ) + XffFirstIp: str = Field(..., description="First IP in the X-Forwarded-For header") + restrictions: Restrictions = Field(..., description="IP restrictions") + riskScore: Optional[str] = Field( + None, + description="Risk score associated with the request", + ) + event_timestamp: Optional[datetime] = Field( + None, + description="Timestamp of the event", + ) + case_created: Optional[str] = Field( + "False", + description="Whether a case has been created for the event", + ) + event_analyzed: Optional[str] = Field( + "False", + description="Whether the event has been analyzed", + ) + event_analyzed_multiple_logins: Optional[str] = Field( + "False", + description="Whether the event has been analyzed for multiple logins", + ) + + +class SapSiemResponseBody(BaseModel): + results: List[Result] = Field(..., description="List of result objects") + totalCount: int = Field(..., description="Total count of results") + statusCode: int = Field(..., description="Status code of the response") + errorCode: int = Field(..., description="Error code of the response") + statusReason: str = Field(..., description="Status reason of the response") + callId: str = Field(..., description="Unique identifier for the overall call") + time: str = Field(..., description="Time of the response") + objectsCount: int = Field(..., description="Count of objects in results") + + +#### ! WAZUH INDEXER RESULTS ! #### + + +class SapSiemSource(BaseModel): + logSource: Optional[str] = Field(None, description="The source of the log") + params_loginID: str = Field(..., description="The login ID of the user") + errCode: str = Field(..., description="The error code") + ip: str = Field(..., description="The IP address of the user") + httpReq_country: str = Field(..., description="The country from which the HTTP request originated") + event_timestamp: str = Field(..., description="The timestamp of the event") + errMessage: Optional[str] = Field(None, description="The error message") + customer_code: str = Field(..., description="The customer code") + errDetails: Optional[str] = Field(None, description="Detailed error message") + + +class SapSiemHit(BaseModel): + index: str = Field(..., description="The index of the hit", alias="_index") + id: str = Field(..., description="The ID of the hit", alias="_id") + score: Optional[float] = Field(None, description="The score of the hit", alias="_score") + source: SapSiemSource = Field(..., description="The source data of the hit", alias="_source") + sort: Optional[List[int]] = Field(None, description="The sort order of the hit") + + +class SapSiemTotal(BaseModel): + value: int = Field(..., description="The total number of hits") + relation: str = Field(..., description="The relation of the total hits") + + +class SapSiemHits(BaseModel): + total: SapSiemTotal = Field(..., description="The total hits data") + max_score: Optional[float] = Field(None, description="The maximum score among the hits") + hits: List[SapSiemHit] = Field(..., description="The list of hits") + + +class SapSiemShards(BaseModel): + total: int = Field(..., description="The total number of shards") + successful: int = Field(..., description="The number of successful shards") + skipped: int = Field(..., description="The number of skipped shards") + failed: int = Field(..., description="The number of failed shards") + + +class SapSiemWazuhIndexerResponse(BaseModel): + scroll_id: Optional[str] = Field(None, description="The scroll ID", alias="_scroll_id") + took: int = Field(..., description="The time it took to execute the request") + timed_out: bool = Field(..., description="Whether the request timed out") + shards: SapSiemShards = Field(..., description="The shards data", alias="_shards") + hits: SapSiemHits = Field(..., description="The hits data") + + +class SuspiciousLogin(BaseModel): + customer_code: str + logSource: Optional[str] = Field(None) + loginID: str + country: Optional[str] + ip: str + event_timestamp: str + errMessage: str + index: Optional[str] = Field(None, description="The index of the hit", alias="_index") + id: Optional[str] = Field(None, description="The ID of the hit", alias="_id") + errDetails: Optional[str] = Field(None, description="Detailed error message") + + +class ErrCode(Enum): + """ + Error codes for SAP SIEM + """ + + INVALID_LOGIN_ID = "403042" # Invalid LoginID + IP_BLOCKED = "403051" # IP is blocked + ACCOUNT_TEMPORARILY_LOCKED = "403120" # Account temporarily locked + OK = "0" # Successful login + + +################# ! IRIS CASE CREATION SCHEMA ! ################# +class IrisCasePayload(BaseModel): + case_name: str = Field(..., description="The name of the case.") + case_description: str = Field(..., description="The description of the case.") + case_customer: int = Field(1, description="The customer of the case.") + case_classification: int = Field( + 1, + description="The classification of the case.", + ) + soc_id: str = Field("1", description="The SOC ID of the case.") + custom_attributes: Optional[Dict] = Field( + None, + description="The custom attributes of the case.", + ) + create_customer: bool = Field( + False, + description="The create customer flag of the case.", + ) + + def to_dict(self): + return self.dict(exclude_none=True) + + +class ModificationHistoryEntry(BaseModel): + user: str + user_id: int + action: str + + +class CaseData(BaseModel): + case_id: int + open_date: str + modification_history: Dict[str, ModificationHistoryEntry] + close_date: Optional[str] + case_description: str + classification_id: int + case_soc_id: str + case_name: str + custom_attributes: Optional[Dict[str, str]] + case_uuid: str + review_status_id: Optional[int] + state_id: int + case_customer: int + reviewer_id: Optional[int] + user_id: int + owner_id: int + closing_note: Optional[str] + status_id: int + + +class CaseResponse(BaseModel): + success: bool + data: CaseData + + +################# ! IRIS ASSET ADD SCHEMA ! ################# +class AddAssetModel(BaseModel): + name: str + asset_type: int + analysis_status: int = Field( + None, + description="The analysis status ID of the asset.", + ) + compromise_status: int = Field( + None, + description="The asset compromise status ID of the asset.", + ) + asset_tags: Optional[List[str]] = Field( + None, + description="The asset tags of the asset.", + ) + description: Optional[str] = Field( + None, + description="The asset description of the asset.", + ) + asset_domain: Optional[str] = Field( + None, + description="The asset domain of the asset.", + ) + ip: Optional[str] = Field(None, description="The asset IP of the asset.") + ioc_links: Optional[List[int]] = Field( + None, + description="The IoC links of the asset.", + ) + custom_attributes: Optional[Dict[str, str]] = Field( + None, + description="The custom attributes of the asset.", + ) + + def to_dict(self): + return self.dict(exclude_none=True) diff --git a/backend/app/integrations/sap_siem/services/collect.py b/backend/app/integrations/sap_siem/services/collect.py new file mode 100644 index 00000000..ee0782ea --- /dev/null +++ b/backend/app/integrations/sap_siem/services/collect.py @@ -0,0 +1,85 @@ +import requests +from loguru import logger + +from app.integrations.sap_siem.schema.sap_siem import CollectSapSiemRequest +from app.integrations.sap_siem.schema.sap_siem import InvokeSAPSiemResponse +from app.integrations.sap_siem.schema.sap_siem import SapSiemResponseBody +from app.integrations.utils.event_shipper import event_shipper +from app.integrations.utils.schema import EventShipperPayload + + +def build_request_payload(sap_siem_request: CollectSapSiemRequest) -> dict: + return { + "apiKey": sap_siem_request.apiKey, + "secret": sap_siem_request.secretKey, + "userKey": sap_siem_request.userKey, + "query": f"SELECT * FROM auditLog WHERE endpoint = 'accounts.login' and @timestamp >= '{sap_siem_request.lower_bound}' " + f"and @timestamp < '{sap_siem_request.upper_bound}'", + } + + +async def make_request(sap_siem_request: CollectSapSiemRequest) -> SapSiemResponseBody: + """ + Makes a request to the SAP SIEM integration. + + Args: + sap_siem_request (CollectSapSiemRequest): The request payload containing the necessary information for the SAP SIEM integration. + + Returns: + SapSiemResponseBody: The response model containing the result of the SAP SIEM integration invocation. + + Raises: + HTTPException: If the SAP SIEM integration fails. + """ + logger.info("Making request to SAP SIEM") + form_data = build_request_payload(sap_siem_request) + response = requests.post( + f"https://{sap_siem_request.apiDomain}/audit.search", + data=form_data, + ) + return SapSiemResponseBody(**response.json()) + + +async def send_to_event_shipper(message: EventShipperPayload) -> None: + """ + Sends the message to the event shipper. + + Args: + message (EventShipperPayload): The message to send to the event shipper. + """ + await event_shipper(message) + + +async def collect_sap_siem(sap_siem_request: CollectSapSiemRequest) -> InvokeSAPSiemResponse: + """ + Collects SAP SIEM events. + + Args: + sap_siem_request (CollectSapSiemRequest): The request payload containing the necessary information for the SAP SIEM integration. + + Returns: + InvokeSAPSiemResponse: The response model containing the result of the SAP SIEM integration invocation. + + Raises: + HTTPException: If the SAP SIEM integration fails. + """ + logger.info(f"Collecting SAP SIEM Events for customer_code: {sap_siem_request.customer_code}") + + results = await make_request(sap_siem_request) + + for result in results.results: + # write the `timestamp` field as `event_timestamp` + result.event_timestamp = result.timestamp + await send_to_event_shipper( + EventShipperPayload( + customer_code=sap_siem_request.customer_code, + integration="sap_siem", + version="1.0", + **result.dict(), + ), + ) + + return InvokeSAPSiemResponse( + success=True, + message="SAP SIEM Events collected successfully", + ) diff --git a/backend/app/integrations/sap_siem/services/provision.py b/backend/app/integrations/sap_siem/services/provision.py new file mode 100644 index 00000000..325b8c11 --- /dev/null +++ b/backend/app/integrations/sap_siem/services/provision.py @@ -0,0 +1,404 @@ +import json +from datetime import datetime + +from loguru import logger +from sqlalchemy import and_ +from sqlalchemy import update +from sqlalchemy.ext.asyncio import AsyncSession + +from app.connectors.grafana.schema.dashboards import DashboardProvisionRequest +from app.connectors.grafana.schema.dashboards import SapSiemDashboard +from app.connectors.grafana.services.dashboards import provision_dashboards +from app.connectors.grafana.utils.universal import create_grafana_client +from app.connectors.graylog.services.management import start_stream +from app.connectors.graylog.utils.universal import send_post_request +from app.customer_provisioning.schema.grafana import GrafanaDatasource +from app.customer_provisioning.schema.grafana import GrafanaDataSourceCreationResponse +from app.customer_provisioning.schema.graylog import GraylogIndexSetCreationResponse +from app.customer_provisioning.schema.graylog import StreamCreationResponse +from app.customer_provisioning.schema.graylog import TimeBasedIndexSet +from app.customer_provisioning.services.grafana import create_grafana_folder +from app.customer_provisioning.services.grafana import get_opensearch_version +from app.customers.routes.customers import get_customer +from app.customers.routes.customers import get_customer_meta +from app.integrations.models.customer_integration_settings import CustomerIntegrations +from app.integrations.routes import create_integration_meta +from app.integrations.sap_siem.schema.provision import ProvisionSapSiemRequest +from app.integrations.sap_siem.schema.provision import ProvisionSapSiemResponse +from app.integrations.sap_siem.schema.provision import SapSiemEventStream +from app.integrations.schema import CustomerIntegrationsMetaSchema +from app.utils import get_connector_attribute + + +################## ! GRAYLOG ! ################## +async def build_index_set_config( + customer_code: str, + session: AsyncSession, +) -> TimeBasedIndexSet: + """ + Build the configuration for a time-based index set. + + Args: + request (ProvisionNewCustomer): The request object containing customer information. + + Returns: + TimeBasedIndexSet: The configured time-based index set. + """ + return TimeBasedIndexSet( + title=f"SAP SIEM - {(await get_customer(customer_code, session)).customer.customer_name}", + description=f"SAP SIEM - {customer_code}", + index_prefix=f"sap_siem_{customer_code}", + rotation_strategy_class="org.graylog2.indexer.rotation.strategies.TimeBasedRotationStrategy", + rotation_strategy={ + "type": "org.graylog2.indexer.rotation.strategies.TimeBasedRotationStrategyConfig", + "rotation_period": "P1D", + "rotate_empty_index_set": False, + "max_rotation_period": None, + }, + retention_strategy_class="org.graylog2.indexer.retention.strategies.DeletionRetentionStrategy", + retention_strategy={ + "type": "org.graylog2.indexer.retention.strategies.DeletionRetentionStrategyConfig", + "max_number_of_indices": 30, + }, + creation_date=datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + index_analyzer="standard", + shards=1, + replicas=0, + index_optimization_max_num_segments=1, + index_optimization_disabled=False, + writable=True, + field_type_refresh_interval=5000, + ) + + +# Function to send the POST request and handle the response +async def send_index_set_creation_request( + index_set: TimeBasedIndexSet, +) -> GraylogIndexSetCreationResponse: + """ + Sends a request to create an index set in Graylog. + + Args: + index_set (TimeBasedIndexSet): The index set to be created. + + Returns: + GraylogIndexSetCreationResponse: The response from Graylog after creating the index set. + """ + json_index_set = json.dumps(index_set.dict()) + logger.info(f"json_index_set set: {json_index_set}") + response_json = await send_post_request( + endpoint="/api/system/indices/index_sets", + data=index_set.dict(), + ) + return GraylogIndexSetCreationResponse(**response_json) + + +async def create_index_set( + customer_code: str, + session: AsyncSession, +) -> GraylogIndexSetCreationResponse: + """ + Creates an index set for a new customer. + + Args: + request (ProvisionNewCustomer): The request object containing the customer information. + + Returns: + GraylogIndexSetCreationResponse: The response object containing the result of the index set creation. + """ + logger.info(f"Creating index set for customer {customer_code}") + index_set_config = await build_index_set_config(customer_code, session) + return await send_index_set_creation_request(index_set_config) + + +# ! Event STREAMS ! # +# Function to create event stream configuration +async def build_event_stream_config( + customer_code: str, + index_set_id: str, + session: AsyncSession, +) -> SapSiemEventStream: + """ + Builds the configuration for a SAP SIEM event stream. + + Args: + customer_code (str): The customer code. + index_set_id (str): The index set ID. + session (AsyncSession): The async session. + + Returns: + SapSiemEventStream: The configured SAP SIEM event stream. + """ + return SapSiemEventStream( + title=f"SAP SIEM EVENTS - {(await get_customer(customer_code, session)).customer.customer_name}", + description=f"SAP SIEM EVENTS - {(await get_customer(customer_code, session)).customer.customer_name}", + index_set_id=index_set_id, + rules=[ + { + "field": "integration", + "type": 1, + "inverted": False, + "value": "sap_siem", + }, + { + "field": "customer_code", + "type": 1, + "inverted": False, + "value": f"{customer_code}", + }, + ], + matching_type="AND", + remove_matches_from_default_stream=True, + content_pack=None, + ) + + +async def send_event_stream_creation_request( + event_stream: SapSiemEventStream, +) -> StreamCreationResponse: + """ + Sends a request to create an event stream. + + Args: + event_stream (SapSiemEventStream): The event stream to be created. + + Returns: + StreamCreationResponse: The response containing the created event stream. + """ + json_event_stream = json.dumps(event_stream.dict()) + logger.info(f"json_event_stream set: {json_event_stream}") + response_json = await send_post_request( + endpoint="/api/streams", + data=event_stream.dict(), + ) + return StreamCreationResponse(**response_json) + + +async def create_event_stream( + customer_code: str, + index_set_id: str, + session: AsyncSession, +) -> StreamCreationResponse: + """ + Creates an event stream for a customer. + + Args: + request (ProvisionNewCustomer): The request object containing customer information. + index_set_id (str): The ID of the index set. + + Returns: + The result of the event stream creation request. + """ + event_stream_config = await build_event_stream_config( + customer_code, + index_set_id, + session, + ) + return await send_event_stream_creation_request(event_stream_config) + + +#### ! GRAFANA ! #### +async def create_grafana_datasource( + customer_code: str, + session: AsyncSession, +) -> GrafanaDataSourceCreationResponse: + """ + Creates a Grafana datasource for the specified customer. + + Args: + customer_code (str): The customer code. + session (AsyncSession): The async session. + + Returns: + GrafanaDataSourceCreationResponse: The response containing the created datasource details. + """ + logger.info("Creating Grafana datasource") + grafana_client = await create_grafana_client("Grafana") + # Switch to the newly created organization + grafana_client.user.switch_actual_user_organisation( + (await get_customer_meta(customer_code, session)).customer_meta.customer_meta_grafana_org_id, + ) + datasource_payload = GrafanaDatasource( + name="SAP SIEM", + type="grafana-opensearch-datasource", + typeName="OpenSearch", + access="proxy", + url=await get_connector_attribute( + connector_id=1, + column_name="connector_url", + session=session, + ), + database=f"sap_siem_{customer_code}*", + basicAuth=True, + basicAuthUser=await get_connector_attribute( + connector_id=1, + column_name="connector_username", + session=session, + ), + secureJsonData={ + "basicAuthPassword": await get_connector_attribute( + connector_id=1, + column_name="connector_password", + session=session, + ), + }, + isDefault=False, + jsonData={ + "database": f"sap_siem_{customer_code}*", + "flavor": "opensearch", + "includeFrozen": False, + "logLevelField": "level", + "logMessageField": "params_loginID", + "maxConcurrentShardRequests": 5, + "pplEnabled": True, + "timeField": "timestamp", + "tlsSkipVerify": True, + "version": await get_opensearch_version(), + }, + readOnly=True, + ) + results = grafana_client.datasource.create_datasource( + datasource=datasource_payload.dict(), + ) + return GrafanaDataSourceCreationResponse(**results) + + +async def provision_sap_siem( + provision_sap_siem_request: ProvisionSapSiemRequest, + session: AsyncSession, +) -> ProvisionSapSiemResponse: + """ + Provisions Mimecast integration for a customer. + + Args: + provision_mimecast_request (ProvisionMimecastRequest): The request object containing the necessary information for provisioning. + session (AsyncSession, optional): The database session. Defaults to Depends(get_db). + + Returns: + ProvisionMimecastResponse: The response object containing the result of the provisioning. + """ + logger.info( + f"Provisioning SAP SIEM integration for customer {provision_sap_siem_request.customer_code}.", + ) + + # Create Index Set + index_set_id = ( + await create_index_set( + customer_code=provision_sap_siem_request.customer_code, + session=session, + ) + ).data.id + logger.info(f"Index set: {index_set_id}") + # Create event stream + stream_id = ( + await create_event_stream( + provision_sap_siem_request.customer_code, + index_set_id, + session, + ) + ).data.stream_id + # Start stream + await start_stream(stream_id=stream_id) + + # Grafana Deployment + sap_siem_datasource_uid = ( + await create_grafana_datasource( + customer_code=provision_sap_siem_request.customer_code, + session=session, + ) + ).datasource.uid + grafana_sap_siem_folder_id = ( + await create_grafana_folder( + organization_id=( + await get_customer_meta( + provision_sap_siem_request.customer_code, + session, + ) + ).customer_meta.customer_meta_grafana_org_id, + folder_title="SAP SIEM", + ) + ).id + await provision_dashboards( + DashboardProvisionRequest( + dashboards=[dashboard.name for dashboard in SapSiemDashboard], + organizationId=( + await get_customer_meta( + provision_sap_siem_request.customer_code, + session, + ) + ).customer_meta.customer_meta_grafana_org_id, + folderId=grafana_sap_siem_folder_id, + datasourceUid=sap_siem_datasource_uid, + ), + ) + await create_integration_meta_entry( + CustomerIntegrationsMetaSchema( + customer_code=provision_sap_siem_request.customer_code, + integration_name="SAP SIEM", + graylog_input_id=None, + graylog_index_id=index_set_id, + graylog_stream_id=stream_id, + grafana_org_id=( + await get_customer_meta( + provision_sap_siem_request.customer_code, + session, + ) + ).customer_meta.customer_meta_grafana_org_id, + grafana_dashboard_folder_id=grafana_sap_siem_folder_id, + ), + session, + ) + await update_customer_integration_table( + provision_sap_siem_request.customer_code, + session, + ) + + return ProvisionSapSiemResponse( + success=True, + message="SAP SIEM integration provisioned successfully.", + ) + + +############## ! WRITE TO DB ! ############## +async def create_integration_meta_entry( + customer_integration_meta: CustomerIntegrationsMetaSchema, + session: AsyncSession, +) -> None: + """ + Creates an entry for the customer integration meta in the database. + + Args: + customer_integration_meta (CustomerIntegrationsMetaSchema): The customer integration meta object. + session (AsyncSession): The async session object for database operations. + """ + await create_integration_meta(customer_integration_meta, session) + logger.info( + f"Integration meta entry created for customer {customer_integration_meta.customer_code}.", + ) + + +async def update_customer_integration_table( + customer_code: str, + session: AsyncSession, +) -> None: + """ + Updates the `customer_integrations` table to set the `deployed` column to True where the `customer_code` + matches the given customer code and the `integration_service_name` is "SAP SIEM". + + Args: + customer_code (str): The customer code. + session (AsyncSession): The async session object for making HTTP requests. + """ + await session.execute( + update(CustomerIntegrations) + .where( + and_( + CustomerIntegrations.customer_code == customer_code, + CustomerIntegrations.integration_service_name == "SAP SIEM", + ), + ) + .values(deployed=True), + ) + await session.commit() + + return None diff --git a/backend/app/integrations/sap_siem/services/sap_siem_multiple_logins.py b/backend/app/integrations/sap_siem/services/sap_siem_multiple_logins.py new file mode 100644 index 00000000..7189e9f8 --- /dev/null +++ b/backend/app/integrations/sap_siem/services/sap_siem_multiple_logins.py @@ -0,0 +1,529 @@ +from collections import defaultdict +from datetime import datetime +from typing import List +from typing import Set + +from fastapi import HTTPException +from loguru import logger +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.connectors.dfir_iris.utils.universal import fetch_and_validate_data +from app.connectors.dfir_iris.utils.universal import initialize_client_and_case +from app.connectors.wazuh_indexer.utils.universal import create_wazuh_indexer_client +from app.integrations.sap_siem.models.sap_siem import SapSiemMultipleLogins +from app.integrations.sap_siem.schema.sap_siem import AddAssetModel +from app.integrations.sap_siem.schema.sap_siem import CaseResponse +from app.integrations.sap_siem.schema.sap_siem import InvokeSAPSiemResponse +from app.integrations.sap_siem.schema.sap_siem import IrisCasePayload +from app.integrations.sap_siem.schema.sap_siem import SapSiemWazuhIndexerResponse +from app.integrations.sap_siem.schema.sap_siem import SuspiciousLogin +from app.utils import get_customer_alert_settings + +# Global set to keep track of IPs that have already been checked +checked_ips = set() + + +async def handle_common_suspicious_login_tasks( + suspicious_login, + unique_instances, + case_ids, + create_case_fn, + session: AsyncSession, +): + """ + Handles common tasks for suspicious logins. + + Args: + suspicious_login: The suspicious login object. + unique_instances: List of unique instances. + case_ids: List of case IDs. + create_case_fn: Function to create a case. + session: The async session. + + Returns: + None + """ + case = await create_case_fn(suspicious_login, session) + case_ids.append(case.data.case_id) + user_activity = await collect_user_activity(suspicious_login) + await handle_user_activity(user_activity, unique_instances, case.data.case_id) + await mark_as_checked(suspicious_login) + + +async def handle_suspicious_login_multiple(suspicious_login, unique_instances, case_ids, session: AsyncSession): + """ + Handles suspicious login events with multiple logins. + + Args: + suspicious_login: The suspicious login event. + unique_instances: List of unique instances of the suspicious login event. + case_ids: List of case IDs associated with the suspicious login event. + session: The database session. + + Returns: + None + """ + await handle_common_suspicious_login_tasks( + suspicious_login, + unique_instances, + case_ids, + create_iris_case_multiple, + session, + ) + await update_event_analyzed_multiple_logins_flag(suspicious_login.id, suspicious_login.index) + + +async def update_event_analyzed_multiple_logins_flag(id: str, index: str): + """ + Update the event_analyzed_multiple_logins flag in the Elasticsearch document to True. + + :param suspicious_login: The suspicious login to update + + :return: None + """ + es_client = await create_wazuh_indexer_client("Wazuh-Indexer") + try: + es_client.update( + index=index, + id=id, + body={ + "doc": { + "event_analyzed_multiple_logins": "True", + }, + }, + ) + logger.info(f"Updated event_analyzed_multiple_logins flag for suspicious login: {id}") + except Exception as e: + logger.error( + f"Failed to update case created flag {e}", + ) + # Attempt to remove read-only block + try: + es_client.indices.put_settings( + index=index, + body={"index.blocks.write": None}, + ) + logger.info( + f"Removed read-only block from index {index}. Retrying update.", + ) + + # Retry the update operation + es_client.update( + index=index, + id=id, + body={"doc": {"event_analyzed_multiple_logins": "True"}}, + ) + logger.info( + f"Added event_analyzed_multiple_logins flag to index {index} for suspicious login: {id}", + ) + + # Reenable the write block + es_client.indices.put_settings( + index=index, + body={"index.blocks.write": True}, + ) + except Exception as e2: + logger.error( + f"Failed to remove read-only block from index {index}: {e2}", + ) + return False + + +async def mark_as_checked(suspicious_login): + """ + Marks a suspicious login as checked by adding it to the set of checked IPs. + + Args: + suspicious_login (Login): The suspicious login object to mark as checked. + + Returns: + None + """ + checked_ips.add((suspicious_login.loginID, suspicious_login.ip)) + + +async def handle_user_activity(user_activity: SapSiemWazuhIndexerResponse, unique_instances, case_id): + """ + Handles user activity by processing each hit in the user_activity and performing the following steps: + 1. Extracts relevant information from the hit. + 2. Checks if the current activity is already present in the unique_instances set. + 3. If not present, adds the user activity to the IRIS case. + 4. Creates an asset payload using the current activity. + 5. Updates the case with the asset payload. + 6. Updates the event analyzed multiple logins flag for the hit. + 7. Adds the current activity to the unique_instances set. + + Parameters: + - user_activity (SapSiemWazuhIndexerResponse): The user activity to be processed. + - unique_instances (set): A set containing unique instances of user activity. + - case_id (str): The ID of the IRIS case. + + Returns: + None + """ + for hit in user_activity.hits.hits: + current_activity = { + "loginID": hit.source.params_loginID, + "ip": hit.source.ip, + "country": hit.source.httpReq_country, + "errMessage": hit.source.errMessage, + "event_timestamp": hit.source.event_timestamp, + "customer_code": hit.source.customer_code, + "errDetails": hit.source.errDetails, + } + current_activity_frozenset = frozenset(current_activity.items()) + if current_activity_frozenset not in unique_instances: + logger.info(f"Adding user activity to IRIS case: {current_activity}") + current_asset = SuspiciousLogin(**current_activity) + asset_payload = create_asset_payload(asset=current_asset) + logger.info(f"Asset Payload: {asset_payload}") + await update_case_with_asset(case_id, asset_payload) + await update_event_analyzed_multiple_logins_flag(hit.id, hit.index) + unique_instances.add(current_activity_frozenset) + + +def create_asset_payload(asset: SuspiciousLogin): + """ + Create a payload for adding an asset based on a SuspiciousLogin object. + + Args: + asset (SuspiciousLogin): The SuspiciousLogin object containing the asset details. + + Returns: + AddAssetModel: The payload for adding the asset. + + """ + if asset.errMessage == "OK": + return AddAssetModel( + name=asset.loginID, + ip=asset.ip, + description=f"Country: {asset.country}\n\nMessage: {asset.errDetails}\n\nTimestamp: {asset.event_timestamp}", + asset_type=1, + compromise_status=1, + analysis_status=2, + ) + return AddAssetModel( + name=asset.loginID, + ip=asset.ip, + description=f"Country: {asset.country}\n\nMessage: {asset.errDetails}\n\nTimestamp: {asset.event_timestamp}", + asset_type=1, + analysis_status=2, + ) + + +async def update_case_with_asset(case_id: str, asset_payload): + """ + Update the case with the asset information. + + :param case_id: The ID of the case to update + :param asset_payload: The payload to update the case with + + :return: None + """ + logger.info(f"Updating IRIS case {case_id} with asset: {asset_payload}") + client, case_client = await initialize_client_and_case("DFIR-IRIS") + return await fetch_and_validate_data( + client, + case_client.add_asset, + cid=case_id, + **asset_payload.to_dict(), + ) + + +async def create_iris_case_multiple(suspicious_login: SuspiciousLogin, session: AsyncSession) -> CaseResponse: + """ + Creates an IRIS case for multiple logins with the same IP address. + + Args: + suspicious_login (SuspiciousLogin): The suspicious login information. + session (AsyncSession): The async session for database operations. + + Returns: + CaseResponse: The response containing the created case information. + """ + logger.info(f"Creating IRIS case same IP with multiple users: {suspicious_login}") + case_name = ( + f"Log Source: {suspicious_login.logSource} SAP SIEM. " f"IP Address: {suspicious_login.ip} found logging in with multiple users." + ) + + case_description = ( + f"Log Source: {suspicious_login.logSource}\n\n" + f"IP Address: {suspicious_login.ip}\n\n" + f"Country: {suspicious_login.country}\n\n" + f"Timestamp: {suspicious_login.event_timestamp}" + ) + + case_customer = (await get_customer_alert_settings(suspicious_login.customer_code, session=session)).iris_customer_id + + payload = IrisCasePayload( + case_name=case_name, + case_description=case_description, + case_customer=case_customer, + case_classification=18, + soc_id="1", + create_customer=False, + ) + client, case_client = await initialize_client_and_case("DFIR-IRIS") + result = await fetch_and_validate_data( + client, + case_client.add_case, + **payload.to_dict(), + ) + await update_event_analyzed_multiple_logins_flag(suspicious_login.id, suspicious_login.index) + + return CaseResponse(**result) + + +async def collect_user_activity(suspicious_logins: SuspiciousLogin) -> SapSiemWazuhIndexerResponse: + """ + Collect the IP addresses of the suspicious logins and query the database for all activity from those IP addresses. + Collects a max of 1000 records. + + :param suspicious_logins: A list of suspicious logins + + :return: List of the user Activity collected from the sap_siem table + """ + es_client = await create_wazuh_indexer_client("Wazuh-Indexer") + results = es_client.search( + index="integrations_*", + body={ + "size": 1000, + "query": {"bool": {"must": [{"term": {"ip": suspicious_logins.ip}}]}}, + }, + ) + return SapSiemWazuhIndexerResponse(**results) + + +async def get_initial_search_results(es_client): + """ + Retrieves the initial search results from Elasticsearch. + + Args: + es_client (Elasticsearch): The Elasticsearch client. + + Returns: + dict: The search results. + """ + return es_client.search( + index="integrations_*", + body={ + "size": 1000, + "query": {"bool": {"must": [{"term": {"errMessage": "OK"}}, {"term": {"event_analyzed_multiple_logins": "False"}}]}}, + "sort": [{"event_timestamp": {"order": "asc"}}], + }, + scroll="1m", + ) + + +async def get_next_batch_of_results(es_client, scroll_id): + """ + Retrieves the next batch of results using the provided Elasticsearch client and scroll ID. + + Args: + es_client (Elasticsearch): The Elasticsearch client. + scroll_id (str): The scroll ID to retrieve the next batch of results. + + Returns: + dict: The next batch of results. + """ + return es_client.scroll(scroll_id=scroll_id, scroll="1m") + + +async def process_hits(hits, ip_to_login_ids, suspicious_activity): + """ + Process the hits received from SAP SIEM and update the IP to login IDs mapping and suspicious activity. + + Args: + hits (list): List of hits received from SAP SIEM. + ip_to_login_ids (dict): Dictionary mapping IP addresses to login IDs. + suspicious_activity (dict): Dictionary mapping IP addresses to a list of suspicious login objects. + + Returns: + None + """ + for hit in hits: + if hit.source.errMessage == "OK": + # Convert loginID to lowercase before comparing + login_id = hit.source.params_loginID.lower() + ip_to_login_ids[hit.source.ip].add(login_id) + + suspicious_login = SuspiciousLogin( + _index=hit.index, + _id=hit.id, + customer_code=hit.source.customer_code, + logSource=hit.source.logSource, + loginID=hit.source.params_loginID, + country=hit.source.httpReq_country, + ip=hit.source.ip, + event_timestamp=hit.source.event_timestamp, + errMessage=hit.source.errMessage, + errDetails=hit.source.errDetails, + ) + + suspicious_activity[hit.source.ip].append(suspicious_login) + + +async def check_multiple_successful_logins_by_ip(threshold: int) -> List[SuspiciousLogin]: + """ + Checks for multiple successful logins by IP address. + + Args: + threshold (int): The minimum number of logins required to be considered suspicious. + + Returns: + List[SuspiciousLogin]: A list of suspicious login objects. + """ + ip_to_login_ids = defaultdict(set) + suspicious_activity = defaultdict(list) + + es_client = await create_wazuh_indexer_client("Wazuh-Indexer") + scroll_id = None + + while True: + if scroll_id is None: + results = await get_initial_search_results(es_client) + else: + results = await get_next_batch_of_results(es_client, scroll_id) + + if not results["hits"]["hits"]: + break + + results = SapSiemWazuhIndexerResponse(**results) + await process_hits(results.hits.hits, ip_to_login_ids, suspicious_activity) + + scroll_id = results.scroll_id + + # Clear the scroll when you're done to free up resources + if scroll_id is not None: + es_client.clear_scroll(scroll_id=scroll_id) + + suspicious_activity = {ip: results for ip, results in suspicious_activity.items() if len(ip_to_login_ids[ip]) > threshold} + + return [login for sublist in suspicious_activity.values() for login in sublist] + + +async def get_suspicious_ips(threshold: int) -> List[SuspiciousLogin]: + """ + Retrieves a list of suspicious login attempts based on the specified threshold. + + Args: + threshold (int): The number of successful logins from the same IP address that is considered suspicious. + + Returns: + List[SuspiciousLogin]: A list of SuspiciousLogin objects representing the suspicious login attempts. + """ + return await check_multiple_successful_logins_by_ip(threshold=threshold) + + +async def get_existing_database_record(session: AsyncSession, ip: str) -> SapSiemMultipleLogins: + """ + Retrieves an existing database record for the given IP address. + + Args: + session (AsyncSession): The async session object for database operations. + ip (str): The IP address to search for. + + Returns: + SapSiemMultipleLogins: The database record matching the IP address, or None if not found. + """ + result = await session.execute(select(SapSiemMultipleLogins).where(SapSiemMultipleLogins.ip == ip)) + return result.scalar_one_or_none() if result is not None else None + + +def update_existing_database_record(existing_case: SapSiemMultipleLogins, new_login_ids: Set[str]) -> None: + """ + Update the existing database record for a SapSiemMultipleLogins case with new login IDs. + + Args: + existing_case (SapSiemMultipleLogins): The existing database record to be updated. + new_login_ids (Set[str]): The new login IDs to be added to the existing record. + + Returns: + None + """ + existing_loginIDs = set(existing_case.associated_loginIDs.split(",")) + if not new_login_ids.issubset(existing_loginIDs): + updated_login_ids = existing_loginIDs.union(new_login_ids) + existing_case.associated_loginIDs = ",".join(updated_login_ids) + existing_case.last_case_created_timestamp = datetime.now() + + +def create_new_database_record(ip: str, new_login_ids: Set[str]) -> SapSiemMultipleLogins: + """ + Creates a new database record for SAP SIEM multiple logins. + + Args: + ip (str): The IP address associated with the multiple logins. + new_login_ids (Set[str]): The set of new login IDs. + + Returns: + SapSiemMultipleLogins: The newly created database record. + """ + return SapSiemMultipleLogins( + ip=ip, + last_case_created_timestamp=datetime.now(), + associated_loginIDs=",".join(new_login_ids), + ) + + +async def sap_siem_multiple_logins_same_ip(threshold: int, session: AsyncSession) -> InvokeSAPSiemResponse: + """ + Finds same IP with multiple users and handles suspicious logins. + + Args: + threshold (int): The threshold value for determining suspicious logins. + session (AsyncSession): The database session. + + Returns: + InvokeSAPSiemResponse: The response indicating the success of the operation. + """ + logger.info("Finding same IP with multiple users") + + suspicious_ips = await get_suspicious_ips(threshold) + logger.info(f"Suspicious IPs: {suspicious_ips}") + + unique_instances = set() + case_ids = [] + # Dictionary to aggregate suspicious logins by IP + aggregated_logins_by_ip = defaultdict(list) + + for suspicious_login in suspicious_ips: + aggregated_logins_by_ip[suspicious_login.ip].append(suspicious_login) + + for ip, associated_logins in aggregated_logins_by_ip.items(): + logger.info(f"IP: {ip}, Associated Logins: {associated_logins}") + if session is not None: + existing_case = await get_existing_database_record(session, ip) + + new_login_ids = {login.loginID for login in associated_logins} + if existing_case: + logger.info(f"Updating existing database record: {existing_case}") + update_existing_database_record(existing_case, new_login_ids) + else: + logger.info(f"Creating new case for IP: {ip}") + new_case = create_new_database_record(ip, new_login_ids) + session.add(new_case) + + # Create a single new IRIS case for this IP + # Modify this to include information from all associated_logins + await handle_suspicious_login_multiple( + associated_logins[0], + unique_instances, + case_ids, + session=session, + ) + else: + raise HTTPException( + status_code=500, + detail="Failed to create IRIS case", + ) + await session.commit() + + # Clear the global set + checked_ips.clear() + + return InvokeSAPSiemResponse( + success=True, + message="SAP SIEM multiple logins invoked.", + ) diff --git a/backend/app/integrations/sap_siem/services/sap_siem_suspicious_logins.py b/backend/app/integrations/sap_siem/services/sap_siem_suspicious_logins.py new file mode 100644 index 00000000..cce68406 --- /dev/null +++ b/backend/app/integrations/sap_siem/services/sap_siem_suspicious_logins.py @@ -0,0 +1,401 @@ +from typing import List + +from loguru import logger +from sqlalchemy.ext.asyncio import AsyncSession + +from app.connectors.dfir_iris.utils.universal import fetch_and_validate_data +from app.connectors.dfir_iris.utils.universal import initialize_client_and_case +from app.connectors.wazuh_indexer.utils.universal import create_wazuh_indexer_client +from app.integrations.sap_siem.schema.sap_siem import AddAssetModel +from app.integrations.sap_siem.schema.sap_siem import CaseResponse +from app.integrations.sap_siem.schema.sap_siem import ErrCode +from app.integrations.sap_siem.schema.sap_siem import InvokeSAPSiemResponse +from app.integrations.sap_siem.schema.sap_siem import IrisCasePayload +from app.integrations.sap_siem.schema.sap_siem import SapSiemWazuhIndexerResponse +from app.integrations.sap_siem.schema.sap_siem import SuspiciousLogin +from app.utils import get_customer_alert_settings + +# global set to keep track of checked IPs +checked_ips = set() + + +async def check_for_suspicious_login(hit, last_invalid_login, suspicious_logins, threshold): + logSource = hit.source.logSource + loginID = hit.source.params_loginID + errCode = hit.source.errCode + country = hit.source.httpReq_country + ip = hit.source.ip + event_timestamp = hit.source.event_timestamp + customer_code = hit.source.customer_code + index = hit.index + id = hit.id + logger.info(f"Checking loginID: {loginID} with errCode: {errCode} and IP: {ip} and index: {index} and id: {id}") + + if ip not in last_invalid_login: + last_invalid_login[ip] = {"count": 0, "event_timestamp": None} + + if errCode in [e.value for e in ErrCode] and errCode != ErrCode.OK.value: + logger.info(f"Found invalid login: {loginID} with IP: {ip} and errCode: {errCode}") + last_invalid_login[ip]["count"] += 1 + last_invalid_login[ip]["event_timestamp"] = event_timestamp + + logger.info(f"Checking if last_invalid_login[ip]['count'] >= threshold: {last_invalid_login[ip]['count']} >= {threshold}") + logger.info(f"Hit: {hit}") + if errCode == ErrCode.OK.value and last_invalid_login[ip]["count"] >= threshold: + logger.info(f"Found suspicious login: {loginID} with IP: {ip} and errCode: {errCode}") + suspicious_login = SuspiciousLogin( + _index=index, + _id=id, + customer_code=customer_code, + logSource=logSource, + loginID=loginID, + country=country, + ip=ip, + event_timestamp=event_timestamp, + errMessage=str(errCode), + errDetails=hit.source.errDetails, + ) + suspicious_logins.append(suspicious_login) + last_invalid_login[ip] = {"count": 0, "event_timestamp": None} + + +async def find_suscpicious_logins(threshold: int) -> List[SuspiciousLogin]: + es_client = await create_wazuh_indexer_client("Wazuh-Indexer") + scroll_id = None + suspicious_logins = [] + last_invalid_login = {} + + while True: + if scroll_id is None: + # Initial search + results = es_client.search( + # ! TODO: change to sap_siem index when ready for deploy + index="integrations_*", + body={ + "size": 10, + "query": {"bool": {"must": [{"term": {"case_created": "False"}}, {"term": {"event_analyzed": "False"}}]}}, + "_source": ["logSource", "params_loginID", "errCode", "httpReq_country", "ip", "event_timestamp", "customer_code"], + "sort": [{"event_timestamp": {"order": "asc"}}], + }, + scroll="1m", # Keep the search context open for 1 minute + ) + else: + # Get the next batch of results + results = es_client.scroll(scroll_id=scroll_id, scroll="1m") + + # If there are no more results, break the loop + if not results["hits"]["hits"]: + logger.info("No more results") + break + else: + logger.info(f"Results: {results}") + + results = SapSiemWazuhIndexerResponse(**results) + for hit in results.hits.hits: + logger.info(f"Hit: {hit}") + await check_for_suspicious_login(hit, last_invalid_login, suspicious_logins, threshold=threshold) + await update_event_analyzed_flag(hit.id, hit.index) + + # Update the scroll ID + scroll_id = results.scroll_id + + # Clear the scroll when you're done to free up resources + if scroll_id is not None: + es_client.clear_scroll(scroll_id=scroll_id) + + return suspicious_logins + + +async def collect_user_activity(suspicious_logins: SuspiciousLogin) -> SapSiemWazuhIndexerResponse: + """ + Collect the IP addresses of the suspicious logins and query the database for all activity from those IP addresses. + Collects a max of 1000 records. + + :param suspicious_logins: A list of suspicious logins + + :return: List of the user Activity collected from the sap_siem table + """ + es_client = await create_wazuh_indexer_client("Wazuh-Indexer") + results = es_client.search( + index="integrations_*", + body={ + "size": 1000, + "query": {"bool": {"must": [{"term": {"ip": suspicious_logins.ip}}]}}, + }, + ) + return SapSiemWazuhIndexerResponse(**results) + + +def create_asset_payload(asset: SuspiciousLogin): + if asset.errMessage == "OK": + return AddAssetModel( + name=asset.loginID, + ip=asset.ip, + description=f"Country: {asset.country}\n\nMessage: {asset.errDetails}\n\nTimestamp: {asset.event_timestamp}", + asset_type=1, + compromise_status=1, + analysis_status=2, + ) + return AddAssetModel( + name=asset.loginID, + ip=asset.ip, + description=f"Country: {asset.country}\n\nMessage: {asset.errDetails}\n\nTimestamp: {asset.event_timestamp}", + asset_type=1, + analysis_status=2, + ) + + +async def update_case_with_asset(case_id: str, asset_payload): + """ + Update the case with the asset information. + + :param case_id: The ID of the case to update + :param asset_payload: The payload to update the case with + + :return: None + """ + logger.info(f"Updating IRIS case {case_id} with asset: {asset_payload}") + client, case_client = await initialize_client_and_case("DFIR-IRIS") + return await fetch_and_validate_data( + client, + case_client.add_asset, + cid=case_id, + **asset_payload.to_dict(), + ) + + +async def handle_user_activity(user_activity: SapSiemWazuhIndexerResponse, unique_instances, case_id): + for hit in user_activity.hits.hits: + current_activity = { + "loginID": hit.source.params_loginID, + "ip": hit.source.ip, + "country": hit.source.httpReq_country, + "errMessage": hit.source.errMessage, + "event_timestamp": hit.source.event_timestamp, + "customer_code": hit.source.customer_code, + "errDetails": hit.source.errDetails, + } + current_activity_frozenset = frozenset(current_activity.items()) + if current_activity_frozenset not in unique_instances: + logger.info(f"Adding user activity to IRIS case: {current_activity}") + current_asset = SuspiciousLogin(**current_activity) + asset_payload = create_asset_payload(asset=current_asset) + logger.info(f"Asset Payload: {asset_payload}") + await update_case_with_asset(case_id, asset_payload) + unique_instances.add(current_activity_frozenset) + + +async def mark_as_checked(suspicious_login): + checked_ips.add((suspicious_login.loginID, suspicious_login.ip)) + + +async def handle_common_suspicious_login_tasks( + suspicious_login, + unique_instances, + case_ids, + create_case_fn, + session: AsyncSession, +): + logger.info(f"Handling common suspicious login tasks: {suspicious_login}") + case = await create_case_fn(suspicious_login, session=session) + logger.info(f"Case: {case}") + case_ids.append(case.data.case_id) + user_activity = await collect_user_activity(suspicious_login) + logger.info(f"User Activity: {user_activity}") + await handle_user_activity(user_activity, unique_instances, case.data.case_id) + await mark_as_checked(suspicious_login) + + +async def handle_suspicious_login(suspicious_login, unique_instances, case_ids, session: AsyncSession): + logger.info(f"Handling suspicious login: {suspicious_login}") + await handle_common_suspicious_login_tasks(suspicious_login, unique_instances, case_ids, create_iris_case, session) + logger.info(f"Marking suspicious login as checked: {suspicious_login}") + await update_case_created_flag( + id=suspicious_login.id, + index=suspicious_login.index, + ) + + +async def update_case_created_flag(id: str, index: str): + """ + Update the case_created flag in the Elasticsearch document to True. + + :param suspicious_login: The suspicious login to update + + :return: None + """ + es_client = await create_wazuh_indexer_client("Wazuh-Indexer") + try: + es_client.update( + index=index, + id=id, + body={ + "doc": { + "case_created": "True", + }, + }, + ) + logger.info(f"Updated case_created flag for suspicious login: {id}") + except Exception as e: + logger.error( + f"Failed to update case created flag {e}", + ) + # Attempt to remove read-only block + try: + es_client.indices.put_settings( + index=index, + body={"index.blocks.write": None}, + ) + logger.info( + f"Removed read-only block from index {index}. Retrying update.", + ) + + # Retry the update operation + es_client.update( + index=index, + id=id, + body={"doc": {"case_created": "True"}}, + ) + logger.info( + f"Added case_created flag to index {index} for suspicious login: {id}", + ) + + # Reenable the write block + es_client.indices.put_settings( + index=index, + body={"index.blocks.write": True}, + ) + except Exception as e2: + logger.error( + f"Failed to remove read-only block from index {index}: {e2}", + ) + return False + + +async def update_event_analyzed_flag(id: str, index: str): + """ + Update the event_analyzed flag in the Elasticsearch document to True. + + :param suspicious_login: The suspicious login to update + + :return: None + """ + es_client = await create_wazuh_indexer_client("Wazuh-Indexer") + try: + es_client.update( + index=index, + id=id, + body={ + "doc": { + "event_analyzed": "True", + }, + }, + ) + logger.info(f"Updated event_analyzed flag for suspicious login: {id}") + except Exception as e: + logger.error( + f"Failed to update event analyzed flag {e}", + ) + # Attempt to remove read-only block + try: + es_client.indices.put_settings( + index=index, + body={"index.blocks.write": None}, + ) + logger.info( + f"Removed read-only block from index {index}. Retrying update.", + ) + + # Retry the update operation + es_client.update( + index=index, + id=id, + body={"doc": {"event_analyzed": "True"}}, + ) + logger.info( + f"Added event_analyzed flag to index {index} for suspicious login: {id}", + ) + + # Reenable the write block + es_client.indices.put_settings( + index=index, + body={"index.blocks.write": True}, + ) + except Exception as e2: + logger.error( + f"Failed to remove read-only block from index {index}: {e2}", + ) + return False + + +async def create_iris_case(suspicious_login: SuspiciousLogin, session: AsyncSession) -> CaseResponse: + """ + Create a case in IRIS for the suspicious activity. + + :param user_activity: A list of suspicious activity from the sap_siem table + + :return: None + """ + logger.info(f"Creating IRIS case for suspicious activity: {suspicious_login}") + case_name = ( + f"Log Source: {suspicious_login.logSource} " + f"Potential SAP SIEM Unauthorized Access: " + f"{suspicious_login.loginID} from {suspicious_login.ip}" + ) + + case_description = ( + f"Log Source: {suspicious_login.logSource}\n\n" + f"IP Address: {suspicious_login.ip}\n\n" + f"Country: {suspicious_login.country}\n\n" + f"Timestamp: {suspicious_login.event_timestamp}" + ) + + case_customer = (await get_customer_alert_settings(suspicious_login.customer_code, session=session)).iris_customer_id + + payload = IrisCasePayload( + case_name=case_name, + case_description=case_description, + case_customer=case_customer, + case_classification=18, + soc_id="1", + create_customer=False, + ) + client, case_client = await initialize_client_and_case("DFIR-IRIS") + result = await fetch_and_validate_data( + client, + case_client.add_case, + **payload.dict(), + ) + + return CaseResponse(**result) + + +async def sap_siem_suspicious_logins(threshold: int, session: AsyncSession) -> InvokeSAPSiemResponse: + """ + Collects SAP SIEM events. + + Args: + sap_siem_request (CollectSapSiemRequest): The request payload containing the necessary information for the SAP SIEM integration. + + Returns: + InvokeSAPSiemResponse: The response model containing the result of the SAP SIEM integration invocation. + + Raises: + HTTPException: If the SAP SIEM integration fails. + """ + logger.info("Checking for suspicious logins") + + suspicious_logins = await find_suscpicious_logins(threshold=threshold) + logger.info(f"Suspicious Logins: {suspicious_logins}") + + unique_instaces = set() + case_ids = [] + for suspicious_login in suspicious_logins: + await handle_suspicious_login(suspicious_login, unique_instaces, case_ids, session=session) + + # Clear the global set + checked_ips.clear() + return InvokeSAPSiemResponse( + success=True, + message="SAP SIEM Events collected successfully", + ) diff --git a/backend/app/integrations/utils/utils.py b/backend/app/integrations/utils/utils.py index bcf2d7d3..fa52b808 100644 --- a/backend/app/integrations/utils/utils.py +++ b/backend/app/integrations/utils/utils.py @@ -38,36 +38,35 @@ async def get_customer_integration_response( return customer_integration_response -def extract_mimecast_auth_keys( - customer_integration: CustomerIntegrations, -) -> Dict[str, str]: +def extract_auth_keys(customer_integration: CustomerIntegrations, service_name: str) -> Dict[str, str]: """ - Extracts the authentication keys for Office365 integration from the given customer integration. + Extracts the authentication keys for the given service name from the customer integration. Args: customer_integration (CustomerIntegrations): The customer integration object. + service_name (str): The name of the service to extract the authentication keys for. Returns: - Dict[str, str]: A dictionary containing the authentication keys for Office365 integration. + Dict[str, str]: A dictionary containing the authentication keys for the service. Raises: - HTTPException: If no authentication keys are found for Office365 integration. + HTTPException: If no authentication keys are found for the service. """ - mimecast_auth_keys = {} + auth_keys = {} try: for subscription in customer_integration.integration_subscriptions: - if subscription.integration_service.service_name == "Mimecast": + if subscription.integration_service.service_name == service_name: for auth_key in subscription.integration_auth_keys: - mimecast_auth_keys[auth_key.auth_key_name] = auth_key.auth_value - if not mimecast_auth_keys: + auth_keys[auth_key.auth_key_name] = auth_key.auth_value + if not auth_keys: raise HTTPException( status_code=404, - detail="No auth keys found for Mimecast integration. Please create auth keys for Mimecast integration.", + detail=f"No auth keys found for {service_name} integration. Please create auth keys for {service_name} integration.", ) except Exception as e: - logger.error(f"Error extracting auth keys for Mimecast integration: {e}") + logger.error(f"Error extracting auth keys for {service_name} integration: {e}") raise HTTPException( status_code=404, - detail="No auth keys found for Mimecast integration. Please create auth keys for Mimecast integration.", + detail=f"No auth keys found for {service_name} integration. Please create auth keys for {service_name} integration.", ) - return mimecast_auth_keys + return auth_keys diff --git a/backend/app/routers/sap_siem.py b/backend/app/routers/sap_siem.py new file mode 100644 index 00000000..d0cf6ddb --- /dev/null +++ b/backend/app/routers/sap_siem.py @@ -0,0 +1,23 @@ +from fastapi import APIRouter + +from app.integrations.sap_siem.routes.provision import ( + integration_sap_siem_provision_scheduler_router, +) +from app.integrations.sap_siem.routes.sap_siem import integration_sap_siem_router + +# Instantiate the APIRouter +router = APIRouter() + +# Include the SAP SIEM related routes +router.include_router( + integration_sap_siem_router, + prefix="/sap_siem", + tags=["sap_siem"], +) + +# Include the SAP SIEM provisioning related routes +router.include_router( + integration_sap_siem_provision_scheduler_router, + prefix="/sap_siem", + tags=["sap_siem"], +) diff --git a/backend/app/schedulers/models/scheduler.py b/backend/app/schedulers/models/scheduler.py index fad84330..851df863 100644 --- a/backend/app/schedulers/models/scheduler.py +++ b/backend/app/schedulers/models/scheduler.py @@ -12,6 +12,7 @@ class JobMetadata(SQLModel, table=True): job_id: str = Field(index=True) # Corresponds to the APScheduler job ID last_success: Optional[datetime] = None time_interval: int # The frequency of the job in minutes + extra_data: Optional[str] = None # Extra data for the job enabled: bool # Indicates if the job is active or not diff --git a/backend/app/schedulers/routes/scheduler.py b/backend/app/schedulers/routes/scheduler.py index ed06ef61..0654b1f7 100644 --- a/backend/app/schedulers/routes/scheduler.py +++ b/backend/app/schedulers/routes/scheduler.py @@ -1,3 +1,5 @@ +from typing import Optional + from fastapi import APIRouter from fastapi import Depends from loguru import logger @@ -147,6 +149,7 @@ async def pause_job(job_id: str): async def update_job( job_id: str, time_interval: int, + extra_data: Optional[str] = None, session: AsyncSession = Depends(get_db), ): """ @@ -175,6 +178,7 @@ async def update_job( job_id, "update", time_interval=time_interval, + extra_data=extra_data, ) logger.info(f"Job {job_id} updated successfully") return {"success": True, "message": "Job updated successfully"} diff --git a/backend/app/schedulers/scheduler.py b/backend/app/schedulers/scheduler.py index a58c3f6f..f8754a54 100644 --- a/backend/app/schedulers/scheduler.py +++ b/backend/app/schedulers/scheduler.py @@ -9,6 +9,13 @@ from app.schedulers.services.agent_sync import agent_sync from app.schedulers.services.invoke_mimecast import invoke_mimecast_integration from app.schedulers.services.invoke_mimecast import invoke_mimecast_integration_ttp +from app.schedulers.services.invoke_sap_siem import invoke_sap_siem_integration_collect +from app.schedulers.services.invoke_sap_siem import ( + invoke_sap_siem_integration_multiple_logins_same_ip_analysis, +) +from app.schedulers.services.invoke_sap_siem import ( + invoke_sap_siem_integration_suspicious_logins_analysis, +) from app.schedulers.services.monitoring_alert import invoke_suricata_monitoring_alert from app.schedulers.services.monitoring_alert import invoke_wazuh_monitoring_alert @@ -87,6 +94,9 @@ def get_function_by_name(function_name: str): "invoke_mimecast_integration_ttp": invoke_mimecast_integration_ttp, "invoke_wazuh_monitoring_alert": invoke_wazuh_monitoring_alert, "invoke_suricata_monitoring_alert": invoke_suricata_monitoring_alert, + "invoke_sap_siem_integration_collection": invoke_sap_siem_integration_collect, + "invoke_sap_siem_integration_suspicious_logins_analysis": invoke_sap_siem_integration_suspicious_logins_analysis, + "invoke_sap_siem_integration_multiple_logins_same_ip_analysis": invoke_sap_siem_integration_multiple_logins_same_ip_analysis, # Add other function mappings here } return function_map.get( diff --git a/backend/app/schedulers/services/invoke_sap_siem.py b/backend/app/schedulers/services/invoke_sap_siem.py new file mode 100644 index 00000000..05068f55 --- /dev/null +++ b/backend/app/schedulers/services/invoke_sap_siem.py @@ -0,0 +1,134 @@ +from datetime import datetime + +from dotenv import load_dotenv +from loguru import logger +from sqlalchemy import select + +from app.db.db_session import get_db_session +from app.db.db_session import get_sync_db_session +from app.integrations.models.customer_integration_settings import CustomerIntegrations +from app.integrations.monitoring_alert.routes.monitoring_alert import ( + run_sap_siem_multiple_logins_same_ip_analysis, +) +from app.integrations.monitoring_alert.routes.monitoring_alert import ( + run_sap_siem_suspicious_logins_analysis, +) +from app.integrations.sap_siem.routes.sap_siem import collect_sap_siem_route +from app.integrations.sap_siem.schema.sap_siem import InvokeSapSiemRequest +from app.integrations.sap_siem.schema.sap_siem import InvokeSAPSiemResponse +from app.schedulers.models.scheduler import JobMetadata +from app.schedulers.utils.universal import get_scheduled_job_metadata + +load_dotenv() + + +async def invoke_sap_siem_integration_collect() -> InvokeSAPSiemResponse: + """ + Invokes the SAP SIEM integration for collection. + """ + logger.info("Invoking SAP SIEM integration scheduled job.") + customer_codes = [] + async with get_db_session() as session: + stmt = select(CustomerIntegrations).where( + CustomerIntegrations.integration_service_name == "SAP SIEM", + ) + result = await session.execute(stmt) + customer_codes = [row.customer_code for row in result.scalars()] + logger.info(f"customer_codes: {customer_codes}") + for customer_code in customer_codes: + await collect_sap_siem_route( + InvokeSapSiemRequest( + customer_code=customer_code, + integration_name="SAP SIEM", + time_range=f"{(await get_scheduled_job_metadata('invoke_sap_siem_integration_collection')).time_interval}m", + ), + session, + ) + # Close the session + await session.close() + with get_sync_db_session() as session: + # Synchronous ORM operations + job_metadata = session.query(JobMetadata).filter_by(job_id="invoke_sap_siem_integration_collection").one_or_none() + if job_metadata: + job_metadata.last_success = datetime.utcnow() + session.add(job_metadata) + session.commit() + else: + # Handle the case where job_metadata does not exist + print("JobMetadata for 'invoke_mimecast_integration' not found.") + + return InvokeSAPSiemResponse(success=True, message="SAP SIEM integration invoked.") + + +async def invoke_sap_siem_integration_suspicious_logins_analysis() -> InvokeSAPSiemResponse: + """ + Invokes the SAP SIEM integration for suspicious logins analysis. + """ + logger.info("Invoking SAP SIEM integration for suspicious logins analysis scheduled job.") + customer_codes = [] + async with get_db_session() as session: + stmt = select(CustomerIntegrations).where( + CustomerIntegrations.integration_service_name == "SAP SIEM", + ) + result = await session.execute(stmt) + customer_codes = [row.customer_code for row in result.scalars()] + logger.info(f"customer_codes: {customer_codes}") + for customer_code in customer_codes: + extra_data = (await get_scheduled_job_metadata("invoke_sap_siem_integration_suspicious_logins_analysis")).extra_data + threshold = int(extra_data) if extra_data is not None else 3 + await run_sap_siem_suspicious_logins_analysis( + threshold=threshold, + session=session, + ) + # Close the session + await session.close() + with get_sync_db_session() as session: + # Synchronous ORM operations + job_metadata = session.query(JobMetadata).filter_by(job_id="invoke_sap_siem_integration_suspicious_logins_analysis").one_or_none() + if job_metadata: + job_metadata.last_success = datetime.utcnow() + session.add(job_metadata) + session.commit() + else: + # Handle the case where job_metadata does not exist + print("JobMetadata for 'invoke_sap_siem_integration_suspicious_logins_analysis' not found.") + + return InvokeSAPSiemResponse(success=True, message="SAP SIEM integration invoked for suspicious logins analysis.") + + +async def invoke_sap_siem_integration_multiple_logins_same_ip_analysis() -> InvokeSAPSiemResponse: + """ + Invokes the SAP SIEM integration for multiple logins from the same IP analysis. + """ + logger.info("Invoking SAP SIEM integration for multiple logins from the same IP analysis scheduled job.") + customer_codes = [] + async with get_db_session() as session: + stmt = select(CustomerIntegrations).where( + CustomerIntegrations.integration_service_name == "SAP SIEM", + ) + result = await session.execute(stmt) + customer_codes = [row.customer_code for row in result.scalars()] + logger.info(f"customer_codes: {customer_codes}") + for customer_code in customer_codes: + extra_data = (await get_scheduled_job_metadata("invoke_sap_siem_integration_multiple_logins_same_ip_analysis")).extra_data + threshold = int(extra_data) if extra_data is not None else 1 + await run_sap_siem_multiple_logins_same_ip_analysis( + threshold=threshold, + session=session, + ) + # Close the session + await session.close() + with get_sync_db_session() as session: + # Synchronous ORM operations + job_metadata = ( + session.query(JobMetadata).filter_by(job_id="invoke_sap_siem_integration_multiple_logins_same_ip_analysis").one_or_none() + ) + if job_metadata: + job_metadata.last_success = datetime.utcnow() + session.add(job_metadata) + session.commit() + else: + # Handle the case where job_metadata does not exist + print("JobMetadata for 'invoke_sap_siem_integration_multiple_logins_same_ip_analysis' not found.") + + return InvokeSAPSiemResponse(success=True, message="SAP SIEM integration invoked for multiple logins from the same IP analysis.") diff --git a/backend/app/schedulers/utils/universal.py b/backend/app/schedulers/utils/universal.py index ef9dd4b9..7c2c2b2c 100644 --- a/backend/app/schedulers/utils/universal.py +++ b/backend/app/schedulers/utils/universal.py @@ -2,8 +2,12 @@ import requests from dotenv import load_dotenv +from loguru import logger +from sqlalchemy import select from app.auth.services.universal import get_scheduler_password +from app.db.db_session import get_db_session +from app.schedulers.models.scheduler import JobMetadata load_dotenv() @@ -43,3 +47,25 @@ def scheduler_login(): else: print("Failed to retrieve token") return None + + +async def get_scheduled_job_metadata(job_id: str) -> JobMetadata: + """ + Retrieves the metadata for a scheduled job. + + Args: + job_id (str): The ID of the scheduled job. + + Returns: + dict: The metadata for the scheduled job. + Returns None if the metadata retrieval fails. + """ + async with get_db_session() as session: + stmt = select(JobMetadata).where(JobMetadata.job_id == job_id) + result = await session.execute(stmt) + job_metadata = result.scalars().first() + if job_metadata: + return job_metadata + else: + logger.info(f"JobMetadata for {job_id} not found.") + return None diff --git a/backend/copilot.py b/backend/copilot.py index f3f66a84..27b219e8 100644 --- a/backend/copilot.py +++ b/backend/copilot.py @@ -17,6 +17,7 @@ from app.db.db_setup import ensure_admin_user from app.db.db_setup import ensure_scheduler_user from app.db.db_setup import ensure_scheduler_user_removed +from app.db.db_setup import update_tables from app.middleware.exception_handlers import custom_http_exception_handler from app.middleware.exception_handlers import validation_exception_handler from app.middleware.exception_handlers import value_error_handler @@ -41,6 +42,7 @@ from app.routers import mimecast from app.routers import monitoring_alert from app.routers import office365 +from app.routers import sap_siem from app.routers import scheduler from app.routers import shuffle from app.routers import smtp @@ -114,6 +116,7 @@ api_router.include_router(mimecast.router) api_router.include_router(scheduler.router) api_router.include_router(monitoring_alert.router) +api_router.include_router(sap_siem.router) # Include the APIRouter in the FastAPI app app.include_router(api_router) @@ -123,6 +126,7 @@ async def init_db(): # create_tables(engine) await create_tables(async_engine) + await update_tables(async_engine) await create_roles(async_engine) await create_available_integrations(async_engine) await ensure_admin_user(async_engine) diff --git a/frontend/vite.config.mts b/frontend/vite.config.mts index 706e15ea..27f04c22 100644 --- a/frontend/vite.config.mts +++ b/frontend/vite.config.mts @@ -39,12 +39,12 @@ export default defineConfig({ ? { key: fs.readFileSync("/certs/key.pem"), cert: fs.readFileSync("/certs/cert.pem") - } + } : false, proxy: { "/api": { - target: "http://copilot-backend:5000", - //target: "http://127.0.0.1:5000", // for local development + //target: "http://copilot-backend:5000", + target: "http://127.0.0.1:5000", // for local development changeOrigin: true } }