diff --git a/conf/synapse.sample.conf b/conf/synapse.sample.conf index 2b19fd5..7605c39 100755 --- a/conf/synapse.sample.conf +++ b/conf/synapse.sample.conf @@ -29,7 +29,7 @@ Automation: internal_contact: debug_contact: #The regexes that are used to find tags within The Hive that are used for automation purposes - automation_regexes: + automation_regexes: - 'uc-\w+-\d{3}\w{0,2}' #Logs the received webhooks in a different file. Can be useful for developping, troubleshooting and/or auditing log_webhooks: false @@ -73,7 +73,7 @@ TheHive: sender_name: Notifications Cortex: - : false + enabled: false #Url to contact Cortex url: http://127.0.0.1:9002 #The user from The Hive created for Synapse @@ -157,10 +157,10 @@ QRadar: #Enable/disable the automation identifiers (such as Rule Id) extract_automation_identifiers: true #Fields to parse automation identifiers from. Which will be turned into tags - automation_fields: + automation_fields: - rules #The regex used for the parsing - tag_regexes: + tag_regexes: - 'uc-\w+-\d{3}\w{0,2}' # In case there you have a kb where the rule name can be appended to the url like so: # https://kb.url/use_cases/ @@ -173,13 +173,13 @@ QRadar: #Optional configuration to configure timezone for the received timestamps. Default is local (of the server running synapse) #timezone: observables_in_offense_type: - Hostname (custom): + Hostname (custom): - regex: '(([\w\d-]+)\.?)((([\w\d]+)\.?)*\.([\w\d]+))' match_groups: 0: fqdn 3: domain MD5 Hash (custom): hash - + Splunk: #Enable/disable the integration part of the module @@ -214,7 +214,7 @@ MISP: enabled: false automation_enabled: false #A list of data types that are checked for when the feature is enable to automatically search for observables in QRadar - supported_datatypes: + supported_datatypes: - "ip" - "domain" - "fqdn" @@ -225,8 +225,12 @@ MISP: AzureSentinel: #Enable/disable the integration part of the module enabled: false + #Enable/disable the automation part of the module + automation_enabled: false #Endpoint used in the integration url synapse_endpoint: sentinel + #Import incident labels as TheHive tags? + import_tags: false #Your Office 365 tenant id where the service account/sentinel lives tenant_id: #The id of the service account @@ -241,6 +245,12 @@ AzureSentinel: workspace: #A case template to assign on alerts created for an incident case_template: Sentinel Incident + #Optional configuration to import Sentinel incident labels as TheHive tags? Default is false + # import_tags: true + #Optional configuration to configure timezone for the received timestamps. Default is local (of the server running synapse) + # timezone: Australia/Sydney + #Optional how far to lookback for updated incidents. Takes units in days (d) and hours (h). E.g. 1d, 20d, 5h + # lookback: 24h Lexsi: #Enable/disable the integration part of the module diff --git a/core/webhookidentifier.py b/core/webhookidentifier.py index 1659b19..487ee04 100755 --- a/core/webhookidentifier.py +++ b/core/webhookidentifier.py @@ -115,7 +115,7 @@ def isNew(self): self.logger.debug('%s.isNew starts', __name__) - if self.data['operation'] == 'Creation': + if self.data['operation'].lower() in ['creation', 'create']: return True else: return False @@ -130,7 +130,7 @@ def isUpdate(self): self.logger.debug('%s.isUpdate starts', __name__) - if self.data['operation'] == 'Update': + if self.data['operation'].lower() == 'update': return True else: return False @@ -151,6 +151,9 @@ def isMarkedAsRead(self): else: return False except KeyError: + # Difference in TH4 + if self.data['details'].get('read', False): + return True # when the alert is ignored (ignore new updates), the webhook does # not have the status key, this exception handles that return False @@ -191,7 +194,7 @@ def isDeleted(self): self.logger.debug('%s.isDeleted starts', __name__) - if self.data['operation'] == 'Delete': + if self.data['operation'].lower() == 'delete': return True else: return False @@ -562,7 +565,8 @@ def isAzureSentinelAlertImported(self): """ self.logger.debug('%s.isAzureSentinelAlertImported starts', __name__) - + if (self.isNewCase() and self.isAzureSentinel()): + return True if (self.isImportedAlert() and self.isAzureSentinel()): return True else: @@ -580,12 +584,13 @@ def fromAzureSentinel(self, esCaseId): :rtype: bool """ - query = dict() - query['case'] = esCaseId + self.logger.debug('%s.fromAzureSentinel starts', __name__) + + query = {'case': esCaseId} results = self.theHiveConnector.findAlert(query) - if len(results) == 1: - # should only have one hit + if len(results) > 0: + # should only have one or more hits if results[0]['source'] == 'Azure_Sentinel_incidents': # case opened from incident # and from AzureSentinel diff --git a/modules/AzureSentinel/automation.py b/modules/AzureSentinel/automation.py index e3cf3c2..165ce81 100644 --- a/modules/AzureSentinel/automation.py +++ b/modules/AzureSentinel/automation.py @@ -29,24 +29,38 @@ def __init__(self, webhook, cfg): self.report_action = report_action def parse_hooks(self): + logger.debug(f'Azure Sentinel webhook parsing starts') # Update incident status to active when imported as Alert if self.webhook.isAzureSentinelAlertImported(): - self.incidentId = self.webhook.data['object']['sourceRef'] - logger.info('Incident {} needs to be updated to status Active'.format(self.incidentId)) - self.AzureSentinelConnector.updateIncidentStatusToActive(self.incidentId) - self.report_action = 'updateIncident' + # We need to get the alert sourceRefs from the opened case for TH4 + if self.webhook.data['objectType'] == "case": + alert_query = {'case': self.webhook.data['objectId']} + alerts = self.TheHiveConnector.findAlert(alert_query) + for alert in alerts: + if 'sourceRef' in alert and alert['source'] == 'Azure_Sentinel_incidents': + logger.info(f"Sentinel incident '{alert['title']}' needs to be updated to status Active") + self.AzureSentinelConnector.updateIncidentStatusToActive(alert['sourceRef']) + else: + self.incidentId = self.webhook.data['object']['sourceRef'] + logger.info(f'Incident {self.incidentId} needs to be updated to status Active') + self.AzureSentinelConnector.updateIncidentStatusToActive(self.incidentId) + self.report_action = 'updateIncident' # Close incidents in Azure Sentinel if self.webhook.isClosedAzureSentinelCase() or self.webhook.isDeletedAzureSentinelCase() or self.webhook.isAzureSentinelAlertMarkedAsRead(): - if self.webhook.data['operation'] == 'Delete': + if self.webhook.data['operation'].lower() == 'delete': + logger.debug(f'Azure Sentinel: Adding Delete comment') self.case_id = self.webhook.data['objectId'] self.classification = "Undetermined" self.classification_comment = "Closed by Synapse with summary: Deleted within The Hive" elif self.webhook.isAzureSentinelAlertMarkedAsRead(): + logger.debug(f'Azure Sentinel: Adding Undetermined comment') + self.case_id = self.webhook.data['objectId'] self.classification = "Undetermined" self.classification_comment = "Closed by Synapse with summary: Marked as Read within The Hive" else: + logger.debug(f'Azure Sentinel: Adding Case Closure comment') self.case_id = self.webhook.data['object']['id'] # Translation table for case statusses @@ -60,7 +74,12 @@ def parse_hooks(self): self.classification_comment = "Closed by Synapse with summary: {}".format(self.webhook.data['details']['summary']) logger.info('Incident {} needs to be be marked as Closed'.format(self.case_id)) - self.AzureSentinelConnector.closeIncident(self.webhook.incidentId, self.classification, self.classification_comment) + alert_query = {'case': self.webhook.data['objectId']} + alerts = self.TheHiveConnector.findAlert(alert_query) + for alert in alerts: + if 'sourceRef' in alert and alert['source'] == 'Azure_Sentinel_incidents': + logger.info(f"Closing Sentinel incident {alert['title']}") + self.AzureSentinelConnector.closeIncident(alert['sourceRef'], self.classification, self.classification_comment) self.report_action = 'closeIncident' return self.report_action diff --git a/modules/AzureSentinel/connector.py b/modules/AzureSentinel/connector.py index 4e0025f..c238730 100644 --- a/modules/AzureSentinel/connector.py +++ b/modules/AzureSentinel/connector.py @@ -2,7 +2,7 @@ import requests import json import time -from datetime import datetime, timezone +from datetime import datetime, timedelta, timezone from dateutil import tz class AzureSentinelConnector: @@ -27,10 +27,27 @@ def __init__(self, cfg): self.tenant_id = self.cfg.get('AzureSentinel', 'tenant_id') self.client_id = self.cfg.get('AzureSentinel', 'client_id') self.client_secret = self.cfg.get('AzureSentinel', 'client_secret') - self.base_url = 'https://management.azure.com/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights'.format(self.subscription_id, self.resource_group, self.workspace) + self.lookback = self.cfg.get('AzureSentinel', 'lookback', fallback=False) + self.base_url = f'https://management.azure.com/subscriptions/{self.subscription_id}/resourceGroups/{self.resource_group}/providers/Microsoft.OperationalInsights/workspaces/{self.workspace}/providers/Microsoft.SecurityInsights' self.bearer_token = self.getBearerToken() + if self.lookback: + # try: + units = self.lookback[-1] + if units in "hH": + self.lookback = timedelta(hours=int(self.lookback[:-1])) + self.logger.debug(f"Azure Sentinel lookback set to {self.lookback.days} days and {int(self.lookback.seconds/60/60)} hours") + elif units in "dD": + self.lookback = timedelta(days=int(self.lookback[:-1])) + self.logger.debug(f"Azure Sentinel lookback set to {self.lookback.days} days") + else: + raise ValueError(self.response.content) + # except Exception as e: + # self.logger.error(f"Could not calculate Sentinel timedelta from lookback: {self.lookback}. Ignoring lookback...", exc_info=True) + # self.lookback = False + + def getBearerToken(self): self.url = 'https://login.microsoftonline.com/{}/oauth2/token'.format(self.tenant_id) self.data = 'grant_type=client_credentials&client_id={}&client_secret={}&resource=https%3A%2F%2Fmanagement.azure.com&undefined='.format(self.client_id, self.client_secret) @@ -50,7 +67,7 @@ def formatDate(self, target, sentinelTimeStamp): #Example: 2020-10-22T12:55:27.9576603Z << can also be six milliseconds. Cropping the timestamp therefore... #Define timezones current_timezone = tz.gettz('UTC') - + #Retrieve timezone from config or use local time (None) configured_timezone = self.cfg.get('AzureSentinel', 'timezone', fallback=None) new_timezone = tz.gettz(configured_timezone) @@ -64,7 +81,6 @@ def formatDate(self, target, sentinelTimeStamp): ntz_formatted_time = formatted_time.astimezone(new_timezone) if target == "description": - #Create a string from time object string_formatted_time = ntz_formatted_time.strftime('%Y-%m-%d %H:%M:%S') @@ -154,7 +170,7 @@ def updateIncidentStatusToActive(self, incidentId): except Exception as e: self.logger.error('Failed to update incident %s', incidentId, exc_info=True) raise - + def closeIncident(self, incidentId, classification, classification_comment): # Variable required for handling regeneration of the Bearer token self.bearer_token_regenerated = False @@ -215,7 +231,15 @@ def getIncidents(self): # Empty array for incidents self.incidents = [] - self.url = self.base_url + '/incidents?api-version=2020-01-01&%24filter=(properties%2Fstatus%20eq%20\'New\'%20or%20properties%2Fstatus%20eq%20\'Active\')&%24orderby=properties%2FcreatedTimeUtc%20desc' + # Query only for recently updated incidents if lookback is set + query_filter = "(properties/status eq 'New' or properties/status eq 'Active')" + if self.lookback: + lookback_date = datetime.utcnow() - self.lookback + query_filter += f" and (properties/lastModifiedTimeUtc ge {lookback_date.isoformat()}Z)" + + order_by = "properties/createdTimeUtc desc" + + self.url = f'{self.base_url}/incidents?api-version=2020-01-01&$filter={requests.utils.quote(query_filter)}&$orderby={requests.utils.quote(order_by)}' # Adding empty header as parameters are being sent in payload self.headers = { @@ -249,13 +273,14 @@ def getIncidents(self): self.logger.error('Failed to retrieve incidents', exc_info=True) raise + self.logger.debug(f'{len(self.incidents)} AzureSentinel incidents retrieved') return self.incidents def getRule(self, uri): #Variable required for handling regeneration of the Bearer token self.bearer_token_regenerated = False - self.url = 'https://management.azure.com{}?api-version={}'.format(uri, "2020-01-01") + self.url = f'https://management.azure.com{uri}?api-version=2020-01-01' # Adding empty header as parameters are being sent in payload self.headers = { @@ -265,12 +290,40 @@ def getRule(self, uri): try: self.response = requests.get(self.url, headers=self.headers) return self.response.json() - except Exception as e: + except ConnectionRefusedError as e: #Supporting regeneration of the token automatically. Will try once and will fail after if self.response.status_code == 401 and not self.bearer_token_regenerated: self.logger.info("Bearer token expired. Generating a new one") self.bearer_token = self.getBearerToken() self.bearer_token_regenerated = True - self.getIncidents() + self.getRule(uri) + else: + self.logger.error("Could not retrieve rule information from Azure Sentinel:", exc_info=True) + except Exception as e: + self.logger.error(f'Failed to retrieve rule', exc_info=True) + + def getEntities(self, uri): + #Variable required for handling regeneration of the Bearer token + self.bearer_token_regenerated = False - self.logger.error("Could not retrieve rule information from Azure Sentinel: {}".format(e)) + self.url = f'https://management.azure.com{uri}?api-version=2019-01-01-preview' + + # Adding empty header as parameters are being sent in payload + self.headers = { + "Authorization": "Bearer " + self.bearer_token, + "cache-control": "no-cache", + } + try: + self.response = requests.post(self.url, headers=self.headers) + return self.response.json() + except ConnectionRefusedError as e: + #Supporting regeneration of the token automatically. Will try once and will fail after + if self.response.status_code == 401 and not self.bearer_token_regenerated: + self.logger.info("Bearer token expired. Generating a new one") + self.bearer_token = self.getBearerToken() + self.bearer_token_regenerated = True + self.getEntities(uri) + else: + self.logger.error("Could not retrieve entity information from Azure Sentinel:", exc_info=True) + except Exception as e: + self.logger.error(f'Failed to retrieve entities:', exc_info=True) diff --git a/modules/AzureSentinel/integration.py b/modules/AzureSentinel/integration.py index ea1ca2b..55411c1 100644 --- a/modules/AzureSentinel/integration.py +++ b/modules/AzureSentinel/integration.py @@ -10,6 +10,8 @@ def __init__(self): super().__init__() self.azureSentinelConnector = AzureSentinelConnector(self.cfg) self.theHiveConnector = TheHiveConnector(self.cfg) + # Add incident labels as tags + self.importTags = self.cfg.get('AzureSentinel', 'import_tags', fallback=True) def craftAlertDescription(self, incident): """ @@ -22,7 +24,7 @@ def craftAlertDescription(self, incident): self.description = "" # Add url to incident - self.url = ('[%s](%s)' % (str(incident['properties']['incidentNumber']), str(incident['properties']['incidentUrl']))) + self.url = ('[View incident %s](%s) in Sentinel' % (str(incident['properties']['incidentNumber']), str(incident['properties']['incidentUrl']))) self.description += '#### Incident: \n - ' + self.url + '\n\n' # Format associated rules @@ -66,15 +68,73 @@ def craftAlertDescription(self, incident): '| | |\n' + '| ----------------------- | ------------- |\n' + '| **Start Time** | ' + str(self.azureSentinelConnector.formatDate("description", incident['properties']['createdTimeUtc'])) + ' |\n' + - '| **incident ID** | ' + str(incident['properties']['incidentNumber']) + ' |\n' + + '| **incident ID** | ' + str(incident['properties']['incidentNumber']) + ' |\n' + '| **Description** | ' + str(incident['properties']['description'].replace('\n', '')) + ' |\n' + - '| **incident Type** | ' + str(incident['type']) + ' |\n' + - '| **incident Source** | ' + str(incident['properties']['additionalData']['alertProductNames']) + ' |\n' + - '| **incident Status** | ' + str(incident['properties']['status']) + ' |\n' + + '| **incident Type** | ' + str(incident['type']) + ' |\n' + + '| **incident Source** | ' + ', '.join(incident['properties']['additionalData'].get('alertProductNames', [])) + ' |\n' + + '| **incident Status** | ' + str(incident['properties']['status']) + ' |\n' + + '| **Tactics** | ' + ', '.join(incident['properties']['additionalData'].get('tactics', [])) + ' |\n' + '\n\n\n\n') return self.description + def extractEntities(self, incident): + """ + From the incident metadata, extract the correlation entities + for TheHive + """ + self.logger.debug('extractEntities starts') + + entity_mapping = { + "Account": "other", + "File": "filename", + "FileHash": "hash", + "Host": "hostname", + "Ip": "ip", + "Url": "url", + } + + entities_uri = f"{incident['id']}/entities" + entity_info = self.azureSentinelConnector.getEntities(entities_uri) + + for entity in entity_info['entities']: + artifact = { + 'data': entity['properties']['friendlyName'], + 'dataType': entity_mapping.get(entity['kind'], 'other'), + 'message': f"Azure Sentinel {entity['kind']} artifact", + 'tags': ['AzureSentinel'], + 'tlp': 2, + } + + if 'additionalData' in entity["properties"]: + additional_data = '\n'.join(f"{k}: {v}" for k, v in entity["properties"]['additionalData'].items()) + artifact['message'] += f"\n\n{additional_data}" + + if entity['kind'] == "Account": + if 'isDomainJoined' in entity['properties']: + artifact['tags'].append('DomainJoined') + # If username is an email + if 'upnSuffix' in entity['properties']: + artifact['data'] = f"{entity['properties']['accountName']}@{entity['properties']['upnSuffix']}" + artifact['dataType'] = 'mail' + yield artifact + # Set type back to account + artifact['dataType'] = entity_mapping.get(entity['kind'], 'other') + + artifact['data'] = entity['properties']['accountName'] + + elif entity['kind'] == "Malware": + if 'category' in entity['properties']: + artifact['tags'].append(entity['properties']['category']) + + elif entity['kind'] == "FileHash": + artifact['data'] = entity['properties']['hashValue'] + if 'algorithm' in entity['properties']: + artifact['tags'].append(entity['properties']['algorithm']) + + yield artifact + + def sentinelIncidentToHiveAlert(self, incident): def getHiveSeverity(incident): @@ -96,26 +156,33 @@ def getHiveSeverity(incident): # Setup Tags self.tags = ['AzureSentinel', 'incident', 'Synapse'] - # Skip for now + if self.importTags: + for tag in incident["properties"].get("labels", []): + self.tags.append(tag["labelName"]) + + # # Skip for now self.artifacts = [] + # # Extract entities + for artifact in self.extractEntities(incident): + self.artifacts.append(self.theHiveConnector.craftAlertArtifact(**artifact)) # Retrieve the configured case_template self.sentinelCaseTemplate = self.cfg.get('AzureSentinel', 'case_template') # Build TheHive alert self.alert = self.theHiveConnector.craftAlert( - "{}, {}".format(incident['properties']['incidentNumber'], incident['properties']['title']), - self.craftAlertDescription(incident), - getHiveSeverity(incident), - self.azureSentinelConnector.formatDate("alert_timestamp", incident['properties']['createdTimeUtc']), - self.tags, - 2, - 'New', - 'internal', - 'Azure_Sentinel_incidents', - str(incident['name']), - self.artifacts, - self.sentinelCaseTemplate) + title="{}: {}".format(incident['properties']['incidentNumber'], incident['properties']['title']), + description=self.craftAlertDescription(incident), + severity=getHiveSeverity(incident), + date=self.azureSentinelConnector.formatDate("alert_timestamp", incident['properties']['createdTimeUtc']), + tags=self.tags, + tlp=2, + status='New', + type='internal', + source='Azure_Sentinel_incidents', + sourceRef=str(incident['name']), + artifacts=self.artifacts, + caseTemplate=self.sentinelCaseTemplate) return self.alert diff --git a/requirements.txt b/requirements.txt index 9d9ea2a..50a835f 100755 --- a/requirements.txt +++ b/requirements.txt @@ -32,4 +32,5 @@ tzlocal==1.5.1 urllib3==1.23 werkzeug==0.14.1 PyYAML==5.1.1 -redis==3.3.8 \ No newline at end of file +redis==3.3.8 +msal \ No newline at end of file