Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python

Check failure on line 1 in src/temporal/t.rast.aggregate.condition/t.rast.aggregate.condition.py

View workflow job for this annotation

GitHub Actions / Python Code Quality Checks (ubuntu-22.04)

[pylint] src/temporal/t.rast.aggregate.condition/t.rast.aggregate.condition.py E:232, 5: unsupported operand type(s) for | (unsupported-binary-operation) E:276,12: unsupported operand type(s) for | (unsupported-binary-operation) E:277,12: unsupported operand type(s) for | (unsupported-binary-operation) E:372,15: unsupported operand type(s) for | (unsupported-binary-operation) E:373,14: unsupported operand type(s) for | (unsupported-binary-operation) E:374,17: unsupported operand type(s) for | (unsupported-binary-operation) E:375,12: unsupported operand type(s) for | (unsupported-binary-operation) E:376,15: unsupported operand type(s) for | (unsupported-binary-operation) E:377,16: unsupported operand type(s) for | (unsupported-binary-operation) E:378,16: unsupported operand type(s) for | (unsupported-binary-operation) E:379,21: unsupported operand type(s) for | (unsupported-binary-operation) E:380,25: unsupported operand type(s) for | (unsupported-binary-operation) E:381,24: unsupported operand type(s) for | (unsupported-binary-operation) E:382,12: unsupported operand type(s) for | (unsupported-binary-operation) E:615,13: unsupported operand type(s) for | (unsupported-binary-operation) E:616,11: unsupported operand type(s) for | (unsupported-binary-operation) E:617,14: unsupported operand type(s) for | (unsupported-binary-operation) E:618,20: unsupported operand type(s) for | (unsupported-binary-operation) E:619,22: unsupported operand type(s) for | (unsupported-binary-operation) E:620,10: unsupported operand type(s) for | (unsupported-binary-operation)

"""MODULE: t.rast.aggregate.condition
AUTHOR(S): Stefan Blumentrath
Expand Down Expand Up @@ -202,7 +202,6 @@
from grass.temporal.datetime_math import (
create_numeric_suffix,
create_suffix_from_datetime,
create_time_suffix,
)
from grass.temporal.open_stds import open_old_stds
from grass.temporal.space_time_datasets import RasterDataset
Expand Down Expand Up @@ -407,7 +406,8 @@
:param aggregate_condition: string of the r.mapcalc method used for
aggregating the condition maps (default is nmax)
:param aggregation_labels: Semantic labels that represent maps to be aggregated
:param nprocs: The number of processes used for parallel computation (only used with )
:param nprocs: The number of processes used for parallel computation
(only used with CPU)
:param dbif: The database interface to be used
:return: A list of RasterDataset objects that contain the new map names
and the temporal extent as well as semantic_labels for map registration.
Expand Down Expand Up @@ -459,13 +459,14 @@
res_dict_template = {
"condition_labels": [], # Condition label
"mask_labels": [], # Mask label
"mask_map_labels": [], # Mask map labels
}
for aggregation_label in aggregation_labels:
res_dict_template[aggregation_label] = []

for count, granule in enumerate(granularity_list):
granule_temporal_extent = granule.get_temporal_extent()

for aggregation_label in aggregation_labels:
res_dict_template[aggregation_label] = []

res_dict = deepcopy(res_dict_template)

# Loop over maps with matching temporal topology
Expand Down Expand Up @@ -494,7 +495,7 @@
[
dt.isoformat()
for dt in matching_object.get_absolute_time()
]
],
),
labels=(
",".join(semantic_labels)
Expand All @@ -508,25 +509,39 @@
# Create mask expression for condition map
mask_map = map_ids[semantic_labels.index(mask_label)]
condition_map = map_ids[semantic_labels.index(condition_label)]
mask_list = f"if({mask_map}=={mask_value},{condition_map},null())"
mask_check = f"{mask_map}=={mask_value}"
mask_list = f"if({mask_check},{condition_map},null())"

# Create mask expression for aggregation map
for aggregation_label in aggregation_labels:
res_dict[aggregation_label].append(
f"if({mask_map}=={mask_value},if({{output_condition_map}}=={condition_map},{map_ids[semantic_labels.index(aggregation_label)]},null()),null())",
f"if({mask_check},if({{output_condition_map}}=={condition_map},{map_ids[semantic_labels.index(aggregation_label)]},null()),null())",
)
res_dict["mask_labels"].append(mask_list)
# For creating an ggregated mask invert the check
res_dict["mask_map_labels"].append(mask_check.replace("==", "!="))

# Check if any maps are temporally related to the granule
if res_dict != res_dict_template:
if granule.is_time_absolute() is True and time_suffix == "gran":
suffix = create_suffix_from_datetime(
granule.temporal_extent.get_start_time(),
granularity,
)
elif granule.is_time_absolute() is True and time_suffix == "time":
suffix = create_time_suffix(granule)
).replace("_", "")
suffix += "_"
suffix += create_suffix_from_datetime(
granule.temporal_extent.get_end_time(),
granularity,
).replace("_", "")

elif granule.is_time_absolute() is True and time_suffix == "time":
suffix = granule.temporal_extent.get_start_time().strftime(
"%Y%m%d%H%M%S",
)
suffix += "_"
suffix += granule.temporal_extent.get_end_time().strftime(
"%Y%m%d%H%M%S",
)
else:
suffix = create_numeric_suffix(
"",
Expand All @@ -536,7 +551,8 @@
output_name = f"{basename}_{suffix}"

# Compile expressions
expression = f"{output_name}_{condition_label}_{aggregate_condition}={aggregate_condition}({','.join(res_dict['mask_labels'])})\n"
maps = ",".join(res_dict["mask_labels"])
expression = f"{output_name}_{condition_label}_{aggregate_condition}={aggregate_condition}({maps})\n"
map_layer = initialize_raster_layer(
f"{output_name}_{condition_label}_{aggregate_condition}@{current_mapset}",
granule_temporal_extent,
Expand All @@ -547,16 +563,25 @@
condition_module.inputs.expression = expression
expression = ""
for aggregation_label in aggregation_labels:
expression += f"{output_name}_{aggregation_label}=nmedian({','.join(list(res_dict[aggregation_label]))})"
maps = ",".join(list(res_dict[aggregation_label]))
expression += f"{output_name}_{aggregation_label}=nmedian({maps})\n"
map_layer = initialize_raster_layer(
f"{output_name}_{aggregation_label}@{current_mapset}",
granule_temporal_extent,
aggregation_label,
)
output_list.append(map_layer)
maps = ",".join(res_dict["mask_map_labels"])
expression += f"{output_name}_{mask_label}=nmin({maps})"
expression = expression.format(
output_condition_map=f"{output_name}_{condition_label}_{aggregate_condition}",
)
map_layer = initialize_raster_layer(
f"{output_name}_{mask_label}@{current_mapset}",
granule_temporal_extent,
mask_label,
)
output_list.append(map_layer)

mc_module = deepcopy(agg_module)
mc_module.inputs.expression = expression.format(
Expand Down Expand Up @@ -644,7 +669,7 @@
group_columns
+ ", "
+ ", ".join(
[f"group_concat({column},',') AS {column}s" for column in columns]
[f"group_concat({column},',') AS {column}s" for column in columns],
)
)

Expand Down Expand Up @@ -729,7 +754,7 @@
),
)
semantic_labels = ",".join(
[f"'{semantic_label}'" for semantic_label in semantic_labels]
[f"'{semantic_label}'" for semantic_label in semantic_labels],
)

if where:
Expand Down Expand Up @@ -798,7 +823,8 @@
f"{options['output']}@{get_current_mapset()}",
)

# Check if target STRDS exists and create it if not or abort if overwriting is not allowed
# Check if target STRDS exists and create it if not
# or abort if overwriting is not allowed
if output_strds.is_in_db() and not gs.overwrite():
gs.fatal(
_(
Expand Down
Loading