Skip to content

Commit

Permalink
analyse missing debug output
Browse files Browse the repository at this point in the history
  • Loading branch information
psy0rz committed Sep 26, 2023
1 parent defbc2d commit ea8beee
Showing 1 changed file with 10 additions and 6 deletions.
16 changes: 10 additions & 6 deletions zfs_autobackup/ZfsAutobackup.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,14 +144,18 @@ def get_parser(self):

# NOTE: this method also uses self.args. args that need extra processing are passed as function parameters:
def thin_missing_targets(self, target_dataset, used_target_datasets):
"""thin target datasets that are missing on the source."""
"""thin target datasets that are missing on the source.
:type used_target_datasets: list[ZfsDataset]
:type target_dataset: ZfsDataset
"""

self.debug("Thinning obsolete datasets")
missing_datasets = [dataset for dataset in target_dataset.recursive_datasets if
dataset not in used_target_datasets]

count = 0
for dataset in missing_datasets:
self.debug("analyse missing {}".format(dataset))

count = count + 1
if self.args.progress:
Expand All @@ -169,7 +173,11 @@ def thin_missing_targets(self, target_dataset, used_target_datasets):

# NOTE: this method also uses self.args. args that need extra processing are passed as function parameters:
def destroy_missing_targets(self, target_dataset, used_target_datasets):
"""destroy target datasets that are missing on the source and that meet the requirements"""
"""destroy target datasets that are missing on the source and that meet the requirements
:type used_target_datasets: list[ZfsDataset]
:type target_dataset: ZfsDataset
"""

self.debug("Destroying obsolete datasets")

Expand Down Expand Up @@ -364,17 +372,13 @@ def sync_datasets(self, source_node, source_datasets, target_node):
decrypt=self.args.decrypt, encrypt=self.args.encrypt,
zfs_compressed=self.args.zfs_compressed, force=self.args.force, guid_check=not self.args.no_guid_check)
except Exception as e:
# if self.args.progress:
# self.clear_progress()

fail_count = fail_count + 1
source_dataset.error("FAILED: " + str(e))
if self.args.debug:
self.verbose("Debug mode, aborting on first error")
raise

# if self.args.progress:
# self.clear_progress()

target_path_dataset = target_node.get_dataset(self.args.target_path)
if not self.args.no_thinning:
Expand Down

0 comments on commit ea8beee

Please sign in to comment.