Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 17 additions & 7 deletions conf/synapse.sample.conf
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ Automation:
internal_contact: <customer_id>
debug_contact: <customer_id>
#The regexes that are used to find tags within The Hive that are used for automation purposes
automation_regexes:
automation_regexes:
- 'uc-\w+-\d{3}\w{0,2}'
#Logs the received webhooks in a different file. Can be useful for developping, troubleshooting and/or auditing
log_webhooks: false
Expand Down Expand Up @@ -73,7 +73,7 @@ TheHive:
sender_name: Notifications

Cortex:
: false
enabled: false
#Url to contact Cortex
url: http://127.0.0.1:9002
#The user from The Hive created for Synapse
Expand Down Expand Up @@ -157,10 +157,10 @@ QRadar:
#Enable/disable the automation identifiers (such as Rule Id)
extract_automation_identifiers: true
#Fields to parse automation identifiers from. Which will be turned into tags
automation_fields:
automation_fields:
- rules
#The regex used for the parsing
tag_regexes:
tag_regexes:
- 'uc-\w+-\d{3}\w{0,2}'
# In case there you have a kb where the rule name can be appended to the url like so:
# https://kb.url/use_cases/<uc_kb_name>
Expand All @@ -173,13 +173,13 @@ QRadar:
#Optional configuration to configure timezone for the received timestamps. Default is local (of the server running synapse)
#timezone: <timezone>
observables_in_offense_type:
Hostname (custom):
Hostname (custom):
- regex: '(([\w\d-]+)\.?)((([\w\d]+)\.?)*\.([\w\d]+))'
match_groups:
0: fqdn
3: domain
MD5 Hash (custom): hash


Splunk:
#Enable/disable the integration part of the module
Expand Down Expand Up @@ -214,7 +214,7 @@ MISP:
enabled: false
automation_enabled: false
#A list of data types that are checked for when the feature is enable to automatically search for observables in QRadar
supported_datatypes:
supported_datatypes:
- "ip"
- "domain"
- "fqdn"
Expand All @@ -225,8 +225,12 @@ MISP:
AzureSentinel:
#Enable/disable the integration part of the module
enabled: false
#Enable/disable the automation part of the module
automation_enabled: false
#Endpoint used in the integration url
synapse_endpoint: sentinel
#Import incident labels as TheHive tags?
import_tags: false
#Your Office 365 tenant id where the service account/sentinel lives
tenant_id: <tenant_id>
#The id of the service account
Expand All @@ -241,6 +245,12 @@ AzureSentinel:
workspace: <workspace>
#A case template to assign on alerts created for an incident
case_template: Sentinel Incident
#Optional configuration to import Sentinel incident labels as TheHive tags? Default is false
# import_tags: true
#Optional configuration to configure timezone for the received timestamps. Default is local (of the server running synapse)
# timezone: Australia/Sydney
#Optional how far to lookback for updated incidents. Takes units in days (d) and hours (h). E.g. 1d, 20d, 5h
# lookback: 24h

Lexsi:
#Enable/disable the integration part of the module
Expand Down
21 changes: 13 additions & 8 deletions core/webhookidentifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def isNew(self):

self.logger.debug('%s.isNew starts', __name__)

if self.data['operation'] == 'Creation':
if self.data['operation'].lower() in ['creation', 'create']:
return True
else:
return False
Expand All @@ -130,7 +130,7 @@ def isUpdate(self):

self.logger.debug('%s.isUpdate starts', __name__)

if self.data['operation'] == 'Update':
if self.data['operation'].lower() == 'update':
return True
else:
return False
Expand All @@ -151,6 +151,9 @@ def isMarkedAsRead(self):
else:
return False
except KeyError:
# Difference in TH4
if self.data['details'].get('read', False):
return True
# when the alert is ignored (ignore new updates), the webhook does
# not have the status key, this exception handles that
return False
Expand Down Expand Up @@ -191,7 +194,7 @@ def isDeleted(self):

self.logger.debug('%s.isDeleted starts', __name__)

if self.data['operation'] == 'Delete':
if self.data['operation'].lower() == 'delete':
return True
else:
return False
Expand Down Expand Up @@ -562,7 +565,8 @@ def isAzureSentinelAlertImported(self):
"""

self.logger.debug('%s.isAzureSentinelAlertImported starts', __name__)

if (self.isNewCase() and self.isAzureSentinel()):
return True
if (self.isImportedAlert() and self.isAzureSentinel()):
return True
else:
Expand All @@ -580,12 +584,13 @@ def fromAzureSentinel(self, esCaseId):
:rtype: bool
"""

query = dict()
query['case'] = esCaseId
self.logger.debug('%s.fromAzureSentinel starts', __name__)

query = {'case': esCaseId}
results = self.theHiveConnector.findAlert(query)

if len(results) == 1:
# should only have one hit
if len(results) > 0:
# should only have one or more hits
if results[0]['source'] == 'Azure_Sentinel_incidents':
# case opened from incident
# and from AzureSentinel
Expand Down
31 changes: 25 additions & 6 deletions modules/AzureSentinel/automation.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,24 +29,38 @@ def __init__(self, webhook, cfg):
self.report_action = report_action

def parse_hooks(self):
logger.debug(f'Azure Sentinel webhook parsing starts')
# Update incident status to active when imported as Alert
if self.webhook.isAzureSentinelAlertImported():
self.incidentId = self.webhook.data['object']['sourceRef']
logger.info('Incident {} needs to be updated to status Active'.format(self.incidentId))
self.AzureSentinelConnector.updateIncidentStatusToActive(self.incidentId)
self.report_action = 'updateIncident'
# We need to get the alert sourceRefs from the opened case for TH4
if self.webhook.data['objectType'] == "case":
alert_query = {'case': self.webhook.data['objectId']}
alerts = self.TheHiveConnector.findAlert(alert_query)
for alert in alerts:
if 'sourceRef' in alert and alert['source'] == 'Azure_Sentinel_incidents':
logger.info(f"Sentinel incident '{alert['title']}' needs to be updated to status Active")
self.AzureSentinelConnector.updateIncidentStatusToActive(alert['sourceRef'])
else:
self.incidentId = self.webhook.data['object']['sourceRef']
logger.info(f'Incident {self.incidentId} needs to be updated to status Active')
self.AzureSentinelConnector.updateIncidentStatusToActive(self.incidentId)
self.report_action = 'updateIncident'

# Close incidents in Azure Sentinel
if self.webhook.isClosedAzureSentinelCase() or self.webhook.isDeletedAzureSentinelCase() or self.webhook.isAzureSentinelAlertMarkedAsRead():
if self.webhook.data['operation'] == 'Delete':
if self.webhook.data['operation'].lower() == 'delete':
logger.debug(f'Azure Sentinel: Adding Delete comment')
self.case_id = self.webhook.data['objectId']
self.classification = "Undetermined"
self.classification_comment = "Closed by Synapse with summary: Deleted within The Hive"

elif self.webhook.isAzureSentinelAlertMarkedAsRead():
logger.debug(f'Azure Sentinel: Adding Undetermined comment')
self.case_id = self.webhook.data['objectId']
self.classification = "Undetermined"
self.classification_comment = "Closed by Synapse with summary: Marked as Read within The Hive"
else:
logger.debug(f'Azure Sentinel: Adding Case Closure comment')
self.case_id = self.webhook.data['object']['id']

# Translation table for case statusses
Expand All @@ -60,7 +74,12 @@ def parse_hooks(self):
self.classification_comment = "Closed by Synapse with summary: {}".format(self.webhook.data['details']['summary'])

logger.info('Incident {} needs to be be marked as Closed'.format(self.case_id))
self.AzureSentinelConnector.closeIncident(self.webhook.incidentId, self.classification, self.classification_comment)
alert_query = {'case': self.webhook.data['objectId']}
alerts = self.TheHiveConnector.findAlert(alert_query)
for alert in alerts:
if 'sourceRef' in alert and alert['source'] == 'Azure_Sentinel_incidents':
logger.info(f"Closing Sentinel incident {alert['title']}")
self.AzureSentinelConnector.closeIncident(alert['sourceRef'], self.classification, self.classification_comment)
self.report_action = 'closeIncident'

return self.report_action
73 changes: 63 additions & 10 deletions modules/AzureSentinel/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import requests
import json
import time
from datetime import datetime, timezone
from datetime import datetime, timedelta, timezone
from dateutil import tz

class AzureSentinelConnector:
Expand All @@ -27,10 +27,27 @@ def __init__(self, cfg):
self.tenant_id = self.cfg.get('AzureSentinel', 'tenant_id')
self.client_id = self.cfg.get('AzureSentinel', 'client_id')
self.client_secret = self.cfg.get('AzureSentinel', 'client_secret')
self.base_url = 'https://management.azure.com/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights'.format(self.subscription_id, self.resource_group, self.workspace)
self.lookback = self.cfg.get('AzureSentinel', 'lookback', fallback=False)
self.base_url = f'https://management.azure.com/subscriptions/{self.subscription_id}/resourceGroups/{self.resource_group}/providers/Microsoft.OperationalInsights/workspaces/{self.workspace}/providers/Microsoft.SecurityInsights'

self.bearer_token = self.getBearerToken()

if self.lookback:
# try:
units = self.lookback[-1]
if units in "hH":
self.lookback = timedelta(hours=int(self.lookback[:-1]))
self.logger.debug(f"Azure Sentinel lookback set to {self.lookback.days} days and {int(self.lookback.seconds/60/60)} hours")
elif units in "dD":
self.lookback = timedelta(days=int(self.lookback[:-1]))
self.logger.debug(f"Azure Sentinel lookback set to {self.lookback.days} days")
else:
raise ValueError(self.response.content)
# except Exception as e:
# self.logger.error(f"Could not calculate Sentinel timedelta from lookback: {self.lookback}. Ignoring lookback...", exc_info=True)
# self.lookback = False


def getBearerToken(self):
self.url = 'https://login.microsoftonline.com/{}/oauth2/token'.format(self.tenant_id)
self.data = 'grant_type=client_credentials&client_id={}&client_secret={}&resource=https%3A%2F%2Fmanagement.azure.com&undefined='.format(self.client_id, self.client_secret)
Expand All @@ -50,7 +67,7 @@ def formatDate(self, target, sentinelTimeStamp):
#Example: 2020-10-22T12:55:27.9576603Z << can also be six milliseconds. Cropping the timestamp therefore...
#Define timezones
current_timezone = tz.gettz('UTC')

#Retrieve timezone from config or use local time (None)
configured_timezone = self.cfg.get('AzureSentinel', 'timezone', fallback=None)
new_timezone = tz.gettz(configured_timezone)
Expand All @@ -64,7 +81,6 @@ def formatDate(self, target, sentinelTimeStamp):
ntz_formatted_time = formatted_time.astimezone(new_timezone)

if target == "description":

#Create a string from time object
string_formatted_time = ntz_formatted_time.strftime('%Y-%m-%d %H:%M:%S')

Expand Down Expand Up @@ -154,7 +170,7 @@ def updateIncidentStatusToActive(self, incidentId):
except Exception as e:
self.logger.error('Failed to update incident %s', incidentId, exc_info=True)
raise

def closeIncident(self, incidentId, classification, classification_comment):
# Variable required for handling regeneration of the Bearer token
self.bearer_token_regenerated = False
Expand Down Expand Up @@ -215,7 +231,15 @@ def getIncidents(self):
# Empty array for incidents
self.incidents = []

self.url = self.base_url + '/incidents?api-version=2020-01-01&%24filter=(properties%2Fstatus%20eq%20\'New\'%20or%20properties%2Fstatus%20eq%20\'Active\')&%24orderby=properties%2FcreatedTimeUtc%20desc'
# Query only for recently updated incidents if lookback is set
query_filter = "(properties/status eq 'New' or properties/status eq 'Active')"
if self.lookback:
lookback_date = datetime.utcnow() - self.lookback
query_filter += f" and (properties/lastModifiedTimeUtc ge {lookback_date.isoformat()}Z)"

order_by = "properties/createdTimeUtc desc"

self.url = f'{self.base_url}/incidents?api-version=2020-01-01&$filter={requests.utils.quote(query_filter)}&$orderby={requests.utils.quote(order_by)}'

# Adding empty header as parameters are being sent in payload
self.headers = {
Expand Down Expand Up @@ -249,13 +273,14 @@ def getIncidents(self):
self.logger.error('Failed to retrieve incidents', exc_info=True)
raise

self.logger.debug(f'{len(self.incidents)} AzureSentinel incidents retrieved')
return self.incidents

def getRule(self, uri):
#Variable required for handling regeneration of the Bearer token
self.bearer_token_regenerated = False

self.url = 'https://management.azure.com{}?api-version={}'.format(uri, "2020-01-01")
self.url = f'https://management.azure.com{uri}?api-version=2020-01-01'

# Adding empty header as parameters are being sent in payload
self.headers = {
Expand All @@ -265,12 +290,40 @@ def getRule(self, uri):
try:
self.response = requests.get(self.url, headers=self.headers)
return self.response.json()
except Exception as e:
except ConnectionRefusedError as e:
#Supporting regeneration of the token automatically. Will try once and will fail after
if self.response.status_code == 401 and not self.bearer_token_regenerated:
self.logger.info("Bearer token expired. Generating a new one")
self.bearer_token = self.getBearerToken()
self.bearer_token_regenerated = True
self.getIncidents()
self.getRule(uri)
else:
self.logger.error("Could not retrieve rule information from Azure Sentinel:", exc_info=True)
except Exception as e:
self.logger.error(f'Failed to retrieve rule', exc_info=True)

def getEntities(self, uri):
#Variable required for handling regeneration of the Bearer token
self.bearer_token_regenerated = False

self.logger.error("Could not retrieve rule information from Azure Sentinel: {}".format(e))
self.url = f'https://management.azure.com{uri}?api-version=2019-01-01-preview'

# Adding empty header as parameters are being sent in payload
self.headers = {
"Authorization": "Bearer " + self.bearer_token,
"cache-control": "no-cache",
}
try:
self.response = requests.post(self.url, headers=self.headers)
return self.response.json()
except ConnectionRefusedError as e:
#Supporting regeneration of the token automatically. Will try once and will fail after
if self.response.status_code == 401 and not self.bearer_token_regenerated:
self.logger.info("Bearer token expired. Generating a new one")
self.bearer_token = self.getBearerToken()
self.bearer_token_regenerated = True
self.getEntities(uri)
else:
self.logger.error("Could not retrieve entity information from Azure Sentinel:", exc_info=True)
except Exception as e:
self.logger.error(f'Failed to retrieve entities:', exc_info=True)
Loading