diff --git a/README.md b/README.md index 946c79c5..84cb3ae2 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,8 @@ Then, you can run the `sync` command which will use the stored files from previo The `migrate` command will run an `import` followed immediately by a `sync`. +The `reset` command will delete resources at the destination; however, by default it backs up those resources first and fails if it cannot. You can (but probably shouldn't) skip the backup by using the `--do-not-backup` flag. + *Note*: The tool uses the `resources` directory as the source of truth for determining what resources need to be created and modified. Hence, this directory should not be removed or corrupted. **Example Usage** diff --git a/datadog_sync/commands/__init__.py b/datadog_sync/commands/__init__.py index 438bce28..c77b2981 100644 --- a/datadog_sync/commands/__init__.py +++ b/datadog_sync/commands/__init__.py @@ -7,6 +7,7 @@ from datadog_sync.commands._import import _import from datadog_sync.commands.diffs import diffs from datadog_sync.commands.migrate import migrate +from datadog_sync.commands.reset import reset ALL_COMMANDS = [ @@ -14,4 +15,5 @@ _import, diffs, migrate, + reset, ] diff --git a/datadog_sync/commands/migrate.py b/datadog_sync/commands/migrate.py index 37f23181..01190659 100644 --- a/datadog_sync/commands/migrate.py +++ b/datadog_sync/commands/migrate.py @@ -23,5 +23,5 @@ @diffs_common_options @sync_common_options def migrate(**kwargs): - """Migrate Datadog resources from one datqaacenter to another.""" + """Migrate Datadog resources from one datacenter to another.""" run_cmd(Command.MIGRATE, **kwargs) diff --git a/datadog_sync/commands/reset.py b/datadog_sync/commands/reset.py new file mode 100644 index 00000000..d2c54dce --- /dev/null +++ b/datadog_sync/commands/reset.py @@ -0,0 +1,30 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the 3-clause BSD style license (see LICENSE). +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +from click import command, option + +from datadog_sync.commands.shared.options import ( + CustomOptionClass, + common_options, + destination_auth_options, +) +from datadog_sync.commands.shared.utils import run_cmd +from datadog_sync.constants import Command + + +@command(Command.RESET.value, short_help="WARNING: Reset Datadog resources by deleting them.") +@destination_auth_options +@common_options +@option( + "--do-not-backup", + required=False, + is_flag=True, + default=False, + help="Skip backing up the destination you are about to reset. Not recommended.", + cls=CustomOptionClass, +) +def reset(**kwargs): + """WARNING: Reset Datadog resources by deleting them.""" + run_cmd(Command.RESET, **kwargs) diff --git a/datadog_sync/commands/shared/utils.py b/datadog_sync/commands/shared/utils.py index 36f7f2fb..67dc2ed1 100644 --- a/datadog_sync/commands/shared/utils.py +++ b/datadog_sync/commands/shared/utils.py @@ -17,7 +17,7 @@ def run_cmd(cmd: Command, **kwargs): asyncio.run(run_cmd_async(cfg, handler, cmd)) except KeyboardInterrupt: cfg.logger.error("Process interrupted by user") - if cmd in [Command.SYNC, Command.MIGRATE]: + if cmd in [Command.SYNC, Command.MIGRATE, Command.RESET]: cfg.logger.info("Writing synced resources to disk before exit...") cfg.state.dump_state() exit(0) @@ -44,6 +44,8 @@ async def run_cmd_async(cfg: Configuration, handler: ResourcesHandler, cmd: Comm elif cmd == Command.MIGRATE: await handler.import_resources() await handler.apply_resources() + elif cmd == Command.RESET: + await handler.reset() else: cfg.logger.error(f"Command {cmd.value} not found") return diff --git a/datadog_sync/constants.py b/datadog_sync/constants.py index 345ee7d4..35457fb4 100644 --- a/datadog_sync/constants.py +++ b/datadog_sync/constants.py @@ -48,6 +48,7 @@ class Command(Enum): SYNC = "sync" DIFFS = "diffs" MIGRATE = "migrate" + RESET = "reset" # Origin diff --git a/datadog_sync/utils/configuration.py b/datadog_sync/utils/configuration.py index 55dcd814..8733640b 100644 --- a/datadog_sync/utils/configuration.py +++ b/datadog_sync/utils/configuration.py @@ -4,20 +4,12 @@ # Copyright 2019 Datadog, Inc. from __future__ import annotations +from dataclasses import dataclass, field import logging import sys -from dataclasses import dataclass, field +import time from typing import Any, Optional, Union, Dict, List -from datadog_sync import models -from datadog_sync.model.logs_pipelines import LogsPipelines -from datadog_sync.model.logs_custom_pipelines import LogsCustomPipelines -from datadog_sync.model.downtimes import Downtimes -from datadog_sync.model.downtime_schedules import DowntimeSchedules -from datadog_sync.utils.custom_client import CustomClient -from datadog_sync.utils.base_resource import BaseResource -from datadog_sync.utils.log import Log -from datadog_sync.utils.filter import Filter, process_filters from datadog_sync.constants import ( Command, DESTINATION_PATH_DEFAULT, @@ -31,6 +23,15 @@ VALIDATE_ENDPOINT, VALID_DDR_STATES, ) +from datadog_sync import models +from datadog_sync.model.logs_pipelines import LogsPipelines +from datadog_sync.model.logs_custom_pipelines import LogsCustomPipelines +from datadog_sync.model.downtimes import Downtimes +from datadog_sync.model.downtime_schedules import DowntimeSchedules +from datadog_sync.utils.custom_client import CustomClient +from datadog_sync.utils.base_resource import BaseResource +from datadog_sync.utils.log import Log +from datadog_sync.utils.filter import Filter, process_filters from datadog_sync.utils.resource_utils import CustomClientHTTPError from datadog_sync.utils.state import State @@ -51,6 +52,7 @@ class Configuration(object): send_metrics: bool state: State verify_ddr_status: bool + backup_before_reset: bool resources: Dict[str, BaseResource] = field(default_factory=dict) resources_arg: List[str] = field(default_factory=list) @@ -60,15 +62,14 @@ async def init_async(self, cmd: Command): for resource in self.resources.values(): await resource.init_async() - # Validate the clients. For import we only validate the source client - # For sync/diffs we validate the destination client. + # Validate the clients. if self.validate: - if cmd in [Command.SYNC, Command.DIFFS, Command.MIGRATE]: + if cmd in [Command.SYNC, Command.DIFFS, Command.MIGRATE, Command.RESET]: try: await _validate_client(self.destination_client) except Exception: sys.exit(1) - if cmd in [Command.IMPORT, Command.MIGRATE]: + if cmd in [Command.IMPORT, Command.MIGRATE, Command.RESET]: try: await _validate_client(self.source_client) except Exception: @@ -140,6 +141,7 @@ def build_config(cmd: Command, **kwargs: Optional[Any]) -> Configuration: create_global_downtime = kwargs.get("create_global_downtime") validate = kwargs.get("validate") verify_ddr_status = kwargs.get("verify_ddr_status") + backup_before_reset = not kwargs.get("do_not_backup") cleanup = kwargs.get("cleanup") if cleanup: @@ -149,9 +151,19 @@ def build_config(cmd: Command, **kwargs: Optional[Any]) -> Configuration: "force": FORCE, }[cleanup.lower()] - # Initialize state + # Set resource paths source_resources_path = kwargs.get(SOURCE_PATH_PARAM, SOURCE_PATH_DEFAULT) destination_resources_path = kwargs.get(DESTINATION_PATH_PARAM, DESTINATION_PATH_DEFAULT) + + # Confusing, but the source for the import needs to be the destination of the reset + # If a destination is going to be reset then a backup needs to be preformed. A back up + # is just an import, the source of that import is the destination of the reset. + if cmd == Command.RESET: + cleanup = TRUE + source_client = CustomClient(destination_api_url, destination_auth, retry_timeout, timeout, send_metrics) + source_resources_path = f"{destination_resources_path}/.backup/{str(time.time())}" + + # Initialize state state = State(source_resources_path=source_resources_path, destination_resources_path=destination_resources_path) # Initialize Configuration @@ -170,6 +182,7 @@ def build_config(cmd: Command, **kwargs: Optional[Any]) -> Configuration: send_metrics=send_metrics, state=state, verify_ddr_status=verify_ddr_status, + backup_before_reset=backup_before_reset, ) # Initialize resource classes diff --git a/datadog_sync/utils/resources_handler.py b/datadog_sync/utils/resources_handler.py index 3201b925..3fa1db39 100644 --- a/datadog_sync/utils/resources_handler.py +++ b/datadog_sync/utils/resources_handler.py @@ -7,6 +7,7 @@ import asyncio from collections import defaultdict from copy import deepcopy +from time import sleep from typing import Dict, TYPE_CHECKING, List, Optional, Set, Tuple from click import confirm @@ -41,6 +42,33 @@ def __init__(self, config: Configuration) -> None: async def init_async(self) -> None: self.worker: Workers = Workers(self.config) + async def reset(self) -> None: + if self.config.backup_before_reset: + await self.import_resources() + else: + # make the warning red and give the user time to hit ctrl-c + self.config.logger.warning("\n\033[91m\nABOUT TO RESET WITHOUT BACKUP\033[00m\n") + sleep(5) + await self.import_resources_without_saving() + + # move the import data from source to destination + self.config.state._data.destination = self.config.state._data.source + + for resource_type in self.config.resources_arg: + resources = {} + for _id, resource in self.config.state._data.destination[resource_type].items(): + resources[(resource_type, _id)] = resource + + if resources: + delete = _cleanup_prompt(self.config, resources) + if delete: + self.config.logger.info("deleting resources...") + await self.worker.init_workers(self._cleanup_worker, None, None) + for resource in resources: + self.worker.work_queue.put_nowait(resource) + await self.worker.schedule_workers() + self.config.logger.info("finished deleting resources") + async def apply_resources(self) -> Tuple[int, int]: # Build dependency graph and missing resources self._dependency_graph, missing = self.get_dependency_graph() @@ -210,6 +238,10 @@ async def _diffs_worker_cb(self, q_item: List) -> None: ) async def import_resources(self) -> None: + await self.import_resources_without_saving() + self.config.state.dump_state(Origin.SOURCE) + + async def import_resources_without_saving(self) -> None: # Get all resources for each resource type tmp_storage = defaultdict(list) await self.worker.init_workers(self._import_get_resources_cb, None, len(self.config.resources_arg), tmp_storage) @@ -229,9 +261,6 @@ async def import_resources(self) -> None: await self.worker.schedule_workers_with_pbar(total=total) self.config.logger.info(f"finished importing individual resource items: {self.worker.counter}.") - # Dump resources - self.config.state.dump_state(Origin.SOURCE) - async def _import_get_resources_cb(self, resource_type: str, tmp_storage) -> None: self.config.logger.info("getting resources", resource_type=resource_type) @@ -396,6 +425,6 @@ def _cleanup_prompt( _id=_id, ) - return confirm("Delete above resources from destination org?") + return confirm(f"Delete above {len(resources_to_cleanup)} resources from destination org?") else: return False diff --git a/tests/conftest.py b/tests/conftest.py index d3e8d83d..a0c6c0cd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -140,6 +140,7 @@ def config(): state=State(), verify_ddr_status=True, send_metrics=True, + backup_before_reset=True, ) resources = init_resources(cfg)