Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for SDS resources #284

Merged
merged 5 commits into from
Sep 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,9 @@ When running againts multiple destination organizations, a seperate working dire
| powerpacks | Sync Datadog powerpacks. |
| restriction_policies | Sync Datadog restriction policies. |
| roles | Sync Datadog roles. |
| sensitive_data_scanner_groups | Sync SDS groups |
| sensitive_data_scanner_groups_order | Sync SDS groups order |
| sensitive_data_scanner_rules | Sync SDS rules |
| service_level_objectives | Sync Datadog SLOs. |
| slo_corrections | Sync Datadog SLO corrections. |
| spans_metrics | Sync Datadog spans metrics. |
Expand Down Expand Up @@ -271,6 +274,9 @@ See [Supported resources](#supported-resources) section below for potential reso
| powerpacks | monitors, service_level_objectives |
| restriction_policies | dashboards, service_level_objectives, notebooks, users, roles |
| roles | - |
| sensitive_data_scanner_groups | - |
| sensitive_data_scanner_groups_order | sensitive_data_scanner_groups |
| sensitive_data_scanner_rules | sensitive_data_scanner_groups |
| service_level_objectives | monitors, synthetics_tests |
| slo_corrections | service_level_objectives |
| spans_metrics | - |
Expand Down
80 changes: 80 additions & 0 deletions datadog_sync/model/sensitive_data_scanner_groups.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
# Unless explicitly stated otherwise all files in this repository are licensed
# under the 3-clause BSD style license (see LICENSE).
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019 Datadog, Inc.
from __future__ import annotations
from typing import TYPE_CHECKING, Optional, List, Dict, Tuple

from datadog_sync.utils.base_resource import BaseResource, ResourceConfig

if TYPE_CHECKING:
from datadog_sync.utils.custom_client import CustomClient


class SensitiveDataScannerGroups(BaseResource):
resource_type = "sensitive_data_scanner_groups"
resource_config = ResourceConfig(
non_nullable_attr=[],
base_path="/api/v2/sensitive-data-scanner/config",
excluded_attributes=[
"id",
"relationships",
],
concurrent=False,
)
# Additional SensitiveDataScannerGroups specific attributes

async def get_resources(self, client: CustomClient) -> List[Dict]:
resp = await client.get(self.resource_config.base_path)

return [r for r in resp.get("included", []) if r["type"] == "sensitive_data_scanner_group"]

async def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> Tuple[str, Dict]:
if _id:
source_client = self.config.source_client
groups = await self.get_resources(source_client)
found = False
for group in groups:
if group["id"] == _id:
resource = group
found = True
break
if not found:
raise Exception(f"Group with id {_id} not found")
skarimo marked this conversation as resolved.
Show resolved Hide resolved
skarimo marked this conversation as resolved.
Show resolved Hide resolved

return resource["id"], resource

async def pre_resource_action_hook(self, _id, resource: Dict) -> None:
pass

async def pre_apply_hook(self) -> None:
pass

async def create_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]:
destination_client = self.config.destination_client
payload = {"data": resource, "meta": {}}
resp = await destination_client.post(self.resource_config.base_path + "/groups", payload)

return _id, resp["data"]

async def update_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]:
destination_client = self.config.destination_client
resource["id"] = self.config.state.destination[self.resource_type][_id]["id"]
payload = {"data": resource, "meta": {}}
await destination_client.patch(
self.resource_config.base_path + f"/groups/{self.config.state.destination[self.resource_type][_id]['id']}",
payload,
)

return _id, resource

async def delete_resource(self, _id: str) -> None:
destination_client = self.config.destination_client
payload = {"meta": {}}
await destination_client.delete(
self.resource_config.base_path + f"/groups/{self.config.state.destination[self.resource_type][_id]['id']}",
body=payload,
)

def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]:
pass
129 changes: 129 additions & 0 deletions datadog_sync/model/sensitive_data_scanner_groups_order.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
# Unless explicitly stated otherwise all files in this repository are licensed
# under the 3-clause BSD style license (see LICENSE).
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019 Datadog, Inc.

from __future__ import annotations
from typing import TYPE_CHECKING, Optional, List, Dict, Tuple
from copy import deepcopy

from aiohttp import ClientResponseError
from deepdiff.operator import BaseOperator

from datadog_sync.utils.base_resource import BaseResource, ResourceConfig

if TYPE_CHECKING:
from datadog_sync.utils.custom_client import CustomClient


class SensitiveDataScannerGroupsOrderIdsComparator(BaseOperator):
def match(self, level):
if "groups" in level.t1 and "groups" in level.t2:
# make copy so we do not mutate the original
level.t1 = deepcopy(level.t1)
level.t2 = deepcopy(level.t2)

# If we are at the top level, modify the list to exclude extra archives in destination.
t1 = set(level.t1["groups"])
t2 = set(level.t2["groups"])
d_ignore = t1 - t2

level.t1["groups"] = [_id for _id in level.t1["groups"] if _id not in d_ignore]
return True

def give_up_diffing(self, level, diff_instance) -> bool:
return False


class SensitiveDataScannerGroupsOrder(BaseResource):
resource_type = "sensitive_data_scanner_groups_order"
resource_config = ResourceConfig(
concurrent=False,
base_path="/api/v2/sensitive-data-scanner/config",
resource_connections={
"sensitive_data_scanner_groups": ["groups"],
},
deep_diff_config={
"ignore_order": False,
"custom_operators": [SensitiveDataScannerGroupsOrderIdsComparator()],
},
excluded_attributes=[
"id",
],
)
# Additional SensitiveDataScannerGroupsOrder specific attributes
destination_sensitive_data_scanner_group_order: Dict[str, Dict] = dict()
default_id: str = "sensitive-data-scanner-group-order"

async def get_resources(self, client: CustomClient) -> List[Dict]:
resp = await client.get(self.resource_config.base_path)

order = {
"id": resp["data"]["id"],
"groups": [r["id"] for r in resp.get("included", []) if r["type"] == "sensitive_data_scanner_group"],
}

return [order]

async def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> Tuple[str, Dict]:
return self.default_id, resource

async def pre_resource_action_hook(self, _id, resource: Dict) -> None:
destination_client = self.config.destination_client
self.destination_sensitive_data_scanner_group_order = (await self.get_resources(destination_client))[0]

async def pre_apply_hook(self) -> None:
pass

async def create_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]:
if not self.destination_sensitive_data_scanner_group_order:
raise Exception("Failed to retrieve destination orgs sensitive data scanner group order")
skarimo marked this conversation as resolved.
Show resolved Hide resolved

self.config.state.destination[self.resource_type][_id] = self.destination_sensitive_data_scanner_group_order
return await self.update_resource(_id, resource)

async def update_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]:
destination_resources = (
self.destination_sensitive_data_scanner_group_order
or self.config.state.destination[self.resource_type][_id]
)

ids_to_omit = set(resource["groups"]) - set(destination_resources["groups"])

extra_ids_to_include = [_id for _id in destination_resources["groups"] if _id not in resource["groups"]]
resource["groups"] = [_id for _id in resource["groups"] if _id not in ids_to_omit]
resource["groups"] = resource["groups"] + extra_ids_to_include
groups = [{"id": r, "type": "sensitive_data_scanner_group"} for r in resource["groups"]]

payload = {
"data": {
"type": "sensitive_data_scanner_configuration",
"id": destination_resources["id"],
"relationships": {"groups": {"data": groups}},
},
"meta": {},
}
resource["id"] = destination_resources["id"]

destination_client = self.config.destination_client
retry_count = 0
while retry_count < 3:
try:
await destination_client.patch(self.resource_config.base_path, payload)
break
except ClientResponseError as e:
if e.status == 400 and "specified version is out of date" in e.message:
retry_count += 1
continue
else:
raise e

return _id, resource

async def delete_resource(self, _id: str) -> None:
self.config.logger.warning(
"sensitive_data_scanner_groups_order cannot deleted. Removing resource from config only."
)

def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]:
return super(SensitiveDataScannerGroupsOrder, self).connect_id(key, r_obj, resource_to_connect)
104 changes: 104 additions & 0 deletions datadog_sync/model/sensitive_data_scanner_rules.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
# Unless explicitly stated otherwise all files in this repository are licensed
# under the 3-clause BSD style license (see LICENSE).
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019 Datadog, Inc.
from __future__ import annotations
from typing import TYPE_CHECKING, Optional, List, Dict, Tuple

from datadog_sync.utils.base_resource import BaseResource, ResourceConfig

if TYPE_CHECKING:
from datadog_sync.utils.custom_client import CustomClient


class SensitiveDataScannerRules(BaseResource):
resource_type = "sensitive_data_scanner_rules"
resource_config = ResourceConfig(
base_path="/api/v2/sensitive-data-scanner/config",
excluded_attributes=[
"id",
],
resource_connections={"sensitive_data_scanner_groups": ["relationships.group.data.id"]},
concurrent=False,
)
# Additional SensitiveDataScannerRules specific attributes
standard_pattern_path = "/api/v2/sensitive-data-scanner/standard-patterns"
source_standard_pattern_mapping: Dict = {} # pattern_id -> pattern_name
destination_standard_pattern_mapping: Dict = {} # pattern_name -> pattern_id

async def get_resources(self, client: CustomClient) -> List[Dict]:
resp = await client.get(self.resource_config.base_path)

return [r for r in resp.get("included", []) if r["type"] == "sensitive_data_scanner_rule"]

async def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> Tuple[str, Dict]:
source_client = self.config.source_client
if not self.source_standard_pattern_mapping:
# Populate the standard pattern mapping
try:
std_patterns = (await source_client.get(self.resource_config.base_path + "/standard-patterns"))["data"]
for pattern in std_patterns:
self.source_standard_pattern_mapping[pattern["id"]] = pattern["attributes"]["name"]
except Exception as e:
self.config.logger.warning("error retrieving standard patterns: %s", e)

if _id:
resource = await source_client.get(self.resource_config.base_path + f"/rules/{_id}")

if _std_id := resource.get("relationships", {}).get("standard_pattern", {}).get("data", {}).get("id"):
resource["relationships"]["standard_pattern"]["data"]["id"] = self.source_standard_pattern_mapping.get(
_std_id, _std_id
)

return resource["id"], resource

async def pre_resource_action_hook(self, _id, resource: Dict) -> None:
if name := resource.get("relationships", {}).get("standard_pattern", {}).get("data", {}).get("id"):
resource["relationships"]["standard_pattern"]["data"]["id"] = self.destination_standard_pattern_mapping.get(
name, name
)

async def pre_apply_hook(self) -> None:
destination_client = self.config.destination_client
if not self.destination_standard_pattern_mapping:
mapping = {}
# Populate the standard pattern mapping
try:
std_patterns = (await destination_client.get(self.resource_config.base_path + "/standard-patterns"))[
"data"
]
for pattern in std_patterns:
mapping[pattern["attributes"]["name"]] = pattern["id"]
self.destination_standard_pattern_mapping = mapping
except Exception as e:
self.config.logger.warning("error retrieving standard patterns: %s", e)

async def create_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]:
destination_client = self.config.destination_client

payload = {"data": resource, "meta": {}}
resp = await destination_client.post(self.resource_config.base_path + "/rules", payload)

return _id, resp["data"]

async def update_resource(self, _id: str, resource: Dict) -> Tuple[str, Dict]:
destination_client = self.config.destination_client
resource["id"] = self.config.state.destination[self.resource_type][_id]["id"]
payload = {"data": resource, "meta": {}}
await destination_client.patch(
self.resource_config.base_path + f"/rules/{self.config.state.destination[self.resource_type][_id]['id']}",
payload,
)

return _id, resource

async def delete_resource(self, _id: str) -> None:
destination_client = self.config.destination_client
payload = {"meta": {}}
await destination_client.delete(
self.resource_config.base_path + f"/rules/{self.config.state.destination[self.resource_type][_id]['id']}",
body=payload,
)

def connect_id(self, key: str, r_obj: Dict, resource_to_connect: str) -> Optional[List[str]]:
return super(SensitiveDataScannerRules, self).connect_id(key, r_obj, resource_to_connect)
3 changes: 3 additions & 0 deletions datadog_sync/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@
from datadog_sync.model.powerpacks import Powerpacks
from datadog_sync.model.restriction_policies import RestrictionPolicies
from datadog_sync.model.roles import Roles
from datadog_sync.model.sensitive_data_scanner_groups import SensitiveDataScannerGroups
from datadog_sync.model.sensitive_data_scanner_groups_order import SensitiveDataScannerGroupsOrder
from datadog_sync.model.sensitive_data_scanner_rules import SensitiveDataScannerRules
from datadog_sync.model.service_level_objectives import ServiceLevelObjectives
from datadog_sync.model.slo_corrections import SLOCorrections
from datadog_sync.model.spans_metrics import SpansMetrics
Expand Down
8 changes: 8 additions & 0 deletions scripts/cleanup_org.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ def __init__(self):

# Delete all supported resources
self.cleanup_authn_mappings()
self.cleanup_sensitive_data_scanner_groups()
self.cleanup_service_level_objectives()
self.cleanup_slo_corrections()
self.cleanup_synthetics_tests()
Expand Down Expand Up @@ -183,6 +184,13 @@ def cleanup_synthetics_tests(
except requests.exceptions.HTTPError as e:
print("Error deleting resource: %s", e)

def cleanup_sensitive_data_scanner_groups(self):
path = "/api/v2/sensitive-data-scanner/config"
res = self.get_resources(path)
for resource in res["included"]:
if resource["type"] == "sensitive_data_scanner_group":
self.delete_resource(resource["id"], path + "/groups", data=json.dumps({"meta": {}}))

def cleanup_service_level_objectives(self):
path = "/api/v1/slo"
res = self.get_resources(path)
Expand Down
2 changes: 2 additions & 0 deletions tests/integration/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,8 @@ def test_resource_update_sync(self, runner, caplog):
value.append("updated")
if isinstance(value, str):
value = value + "updated"
if isinstance(value, bool):
value = not value

path_update(resource, self.field_to_update, value)
except Exception as e:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2024-09-16T15:11:03.200194-04:00
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2024-09-16T15:11:03.218649-04:00
Loading
Loading