diff --git a/src/temporal/t.rast.aggregate.condition/t.rast.aggregate.condition.html b/src/temporal/t.rast.aggregate.condition/t.rast.aggregate.condition.html index 73ca6de5..a6cf9e9d 100644 --- a/src/temporal/t.rast.aggregate.condition/t.rast.aggregate.condition.html +++ b/src/temporal/t.rast.aggregate.condition/t.rast.aggregate.condition.html @@ -9,7 +9,22 @@

DESCRIPTION

Which and how many maps are aggregated can be controled using the -granularity and sampling options. +granularity, temporal_buffer, temporal_offset, +and sampling options.

The temporal extent of the resulting +raster maps is calculated as follows: +new_start_time = adjust_datetime_to_granularity(start_time) + +temporal_offset - temporal_buffer
+new_end_time = adjust_datetime_to_granularity(start_time) + +granularity + temporal_offset + temporal_buffer + +

+For example, if granularity is set to "1 day" and the +temporal_buffer is set to "1 day", and the temoral_offset is +not given or 0, the granule for a raster map with the start_time +"2024-12-12 13:45" will have a start_time of "2024-12-11 00:00" +and an end_time of "2024-12-14 00:00". If the temporal_offset is +set to "-1 day", the start_time will be "2024-12-10 00:00" and the end_time +will be "2024-12-13 00:00".

t.rast.aggregate.condition applies a mask (given in mask_label) while @@ -22,8 +37,8 @@

DESCRIPTION

The module assumes and requires that the input STRDS is equipped with semantic_labels. semantic_labels are used to compile -mapcalculator expressions for each ganule and temporal extent selected -for the given granule. +mapcalculator expressions for each ganule (temporal extent) selected +for the given granularity.

Per granule, one raster map is produced for the condition_label, @@ -44,9 +59,19 @@

DESCRIPTION

Both input and output of this module is a single space time raster dataset. A subset of the input space time raster dataset can be selected using the -where option. In addition, input maps can be filtered spatially using +where option. In addition, input maps can be filtered spatialy using the region_relation option. +

+If the i-flag is set, only granules that are fully within the +temporal extent of the (selected) temporal extent of the input +STRDS are processed. Granules that start before the temporal extent of +the input maps or end after the temporal extent of the input maps are +being skipped. If temporal_buffer or temporal_offset or a larger +granularity are given, the user should make sure that the temporal +extent of the STRDS or the raster maps selected by the where-clause +or spatial filter cover at least one granule. +

The resulting raster maps can also be registered into an existing Space Time Raster Dataset if the e-flag and --overwrite flags @@ -54,11 +79,11 @@

DESCRIPTION

EXAMPLE

-The example uses the North Carolina extra time series of MODIS Land Surface Temperature -maps (download). -(The mapset has to be unzip in one of the North Carolina locations.) -

-Patching the MODIS Land Surface Temperature for 2016 (filling missing pixels by subsequent maps in the time series): + +

Daily mosaic for Sentinel-3 Fractiona Snow Cover

+Patching the Sentinel-3 Fractional Snow Cover (FSC) to daily mosaics, filling missing +pixels from other maps within the same granule (day), and selecting pixels with the lowest +solar zenith angle if more FSC maps contain valid values for the same pixel:
 t.rast.aggregate.condition input=Sentinel_3_SLSTR_FSC output=Sentinel_3_SLSTR_FSC_daily \
   --overwrite --verbose -e mask_label=S3_SLSTR_cloud_mask mask_value=0 \
@@ -73,6 +98,31 @@ 

EXAMPLE

t.info Sentinel_3_SLSTR_FSC
+

5-day moving window mosaic for Sentinel-3 Fractiona Snow Cover

+Patching the Sentinel-3 Fractional Snow Cover (FSC) to 5-day mosaics, filling missing +pixels from other maps within the same granule, and selecting pixels with the lowest +solar zenith angle if more FSC maps contain valid values for the same pixel. The +granule is a 5-day period (granularity + 2 * temporal_buffer), placed as a moving +window in relation to each time step (granularity) in the SpaceTimeRasterDataset. +End time in the example is at the end of each time step (day) where the +>granule end time = start of current day + granularity + temporal_buffer + +temporal_offset. Start time in the example is 4 days before the start of each +time step (day) where the >granule start time = start of current day - +temporal_buffer + temporal_offset. +
+t.rast.aggregate.condition input=Sentinel_3_SLSTR_FSC output=Sentinel_3_SLSTR_FSC_5days_running \
+  --overwrite --verbose -e -i mask_label=S3_SLSTR_cloud_mask mask_value=0 \
+  condition_label=S3_solar_zenith aggregate_condition=nmin \
+  aggregation_labels=S3_SLSTR_fractional_snow_cover \
+  granularity="1 day" temporal_buffer="2 days" temporal_offset="-2 days" \
+  basename=S3_SLSTR_FSC title="Sentinel-3 SLSTR FSC daily" \
+  description="Daily Fractional Snow Cover measurements from Seninel-3 SLSTR instrument" \
+  region_relation=contains \
+  where="start_time >= '2023-01' and start_time <= '2023-12'" \
+  nprocs=8
+t.info Sentinel_3_SLSTR_FSC
+
+

SEE ALSO

diff --git a/src/temporal/t.rast.aggregate.condition/t.rast.aggregate.condition.py b/src/temporal/t.rast.aggregate.condition/t.rast.aggregate.condition.py index c496fa92..58a63f86 100755 --- a/src/temporal/t.rast.aggregate.condition/t.rast.aggregate.condition.py +++ b/src/temporal/t.rast.aggregate.condition/t.rast.aggregate.condition.py @@ -1,11 +1,12 @@ #!/usr/bin/env python -""" -MODULE: t.rast.aggregate.condition +"""MODULE: t.rast.aggregate.condition AUTHOR(S): Stefan Blumentrath -PURPOSE: Aggregates rasters maps in space and time, applying a condition for valid data using r.mapcalc -COPYRIGHT: (C) 2024 by Stefan Blumentrath, Norwegian Water and Energy Directorate and the GRASS Development Team +PURPOSE: Aggregates rasters maps in space and time, applying a condition for valid + data using r.mapcalc +COPYRIGHT: (C) 2024-2025 by Stefan Blumentrath, Norwegian Water and Energy Directorate + and the GRASS Development Team This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by @@ -115,6 +116,22 @@ # % multiple: no # %end +# %option +# % key: temporal_buffer +# % type: string +# % description: Temporal buffer around the granule of the aggregation, format absolute time "x years, x months, x weeks, x days, x hours, x minutes, x seconds" or an integer value for relative time +# % required: no +# % multiple: no +# %end + +# %option +# % key: temporal_offset +# % type: string +# % description: Temporal offset applied to the aggregation granularity, format absolute time "x years, x months, x weeks, x days, x hours, x minutes, x seconds" or an integer value for relative time +# % required: no +# % multiple: no +# %end + # %option G_OPT_T_SAMPLE # % options: equal,overlaps,overlapped,starts,started,finishes,finished,during,contains # % answer: contains @@ -147,6 +164,11 @@ # % guisection: Settings # %end +# %flag +# % key: i +# % description: Do not process granules that are incomplete (not completely within the temporal extend of the (selected) maps of the input STRDS) +# %end + # %flag # % key: n # % description: Register Null maps @@ -158,11 +180,7 @@ # % required: -e,title,description # %end -# ToDo: -# - Support granules from moving windows (even if leading to invalid temporal topology) with: -# m-flag for moving temporal window + d-flag for decrementing granule (default is increment) - -# - implement n-flag +# TODO: # - Create a TemporalExtentTuple class based on # https://grass.osgeo.org/grass84/manuals/libpython/_modules/grass/temporal/temporal_extent.html # That would improve the performance if no more advanced temporal objects are needed @@ -172,13 +190,13 @@ import sys from copy import deepcopy +from datetime import datetime import grass.pygrass.modules as pymod import grass.script as gs import grass.temporal as tgis from grass.temporal.core import ( get_current_mapset, - get_tgis_message_interface, init_dbif, ) from grass.temporal.datetime_math import ( @@ -191,74 +209,181 @@ from grass.temporal.spatio_temporal_relationships import SpatioTemporalTopologyBuilder -def create_ganule_list(map_list, granularity, relative_time_unit=None): - """Create a list of empty RasterDataset with the given temporal - granularity from a list of input maps +def initialize_raster_layer( + map_id: str, + temporal_extent: tuple[datetime, datetime], + semantic_label: str, +) -> RasterDataset: + """Initialize the raster layer. + + :param id: The id of the raster layer + :param temporal_extent: The temporal extent of the raster layer + :param semantic_label: The semantic label of the raster layer + :return: The raster layer + """ + map_layer = RasterDataset(map_id) + map_layer.set_temporal_extent(temporal_extent) + map_layer.set_semantic_label(semantic_label) + return map_layer + + +def check_absolute_granularity_string( + input_option: str, + granularity: str, +) -> str | None: + """Check if the granularity string is a valid absolute time granularity string or None. + + :param input_option: string representing the key of the input option + :param granularity: string to represent the granularity, temporal offset or temporal buffer + :return: The granularity string if the string is valid or None if granularity is None or + empty. The function throws a fatal error if the granularity string is not valid. + """ + # Check if the granularity string is valid + if not granularity: + return None + if not tgis.check_granularity_string(granularity, "absolute"): + gs.fatal( + _( + "Invalid granularity string <{gran}> absolute temporal type in {opt} option.", + ).format(gran=granularity, opt=input_option), + ) + return granularity + + +def check_relative_granularity_string(input_option: str, granularity: str) -> int: + """Check if the granularity string empty / None or a valid integer value. + + :param input_option: string representing the key of the input option + :param granularity: string to represent the granularity, temporal offset or temporal buffer + :return: The granularity string as integer or 0. The function throws a fatal error if + the string is not a valid integer + """ + # Check if the granularity string is valid and can be converted to integer + if not granularity: + return 0 + try: + return int(granularity) + except ValueError: + gs.fatal( + _( + "Invalid granularity string <{gran}> for relative temporal type in: {opt}", + ).format(gran=granularity, opt=input_option), + ) + + +def create_granule_list( + map_list: list[dict], + granularity: str, + buffer: str | int, + offset: str | int, + *, + relative_time_unit: bool = False, + process_incomplete_granules: bool = False, +) -> list[RasterDataset]: + """Create a list of empty RasterDataset with the requested temporal extent. + + The function creates a list of empty RasterDatasets with the requested + granularity from a list of input maps. The temporal extent of the output + is determined by the first and last map in the input list as well as a + temporal buffer and offset if provided. + :param map_list: List of database rows (SQLite or PostgreSQL) :param granularity: string describing the granularity of the output list, is expected to be validated beforehand :relative_time_unit: string with the relative time unit of the input STRDS, None means absolute time - :return granularity_list: a list of RasterDataset with temporal extent""" - start_time = map_list[0]["start_time"] + :process_incomplete_granules: Process granules that are not comletely + within the temporal extent of the map list + :return granularity_list: a list of initialized RasterDataset with temporal extent + """ + # Get the start time of the series + series_start_time = map_list[0]["start_time"] if not relative_time_unit: - start_time = tgis.adjust_datetime_to_granularity(start_time, granularity) + series_start_time = tgis.adjust_datetime_to_granularity( + series_start_time, + granularity, + ) + start_time = tgis.increment_datetime_by_string( + tgis.decrement_datetime_by_string( + series_start_time, + buffer, + ), + offset, + ) + else: + start_time = series_start_time - buffer + offset - # We use the end time first - end_time = map_list[-1]["end_time"] + # Get the end time of the series + # Try using the last map's end time + series_end_time = map_list[-1]["end_time"] has_end_time = True - # In case no end time is available, then we use the start time of the last map layer - if end_time is None: - end_time = map_list[-1]["start_time"] + # In case no end time is available for the series, the start time of the last map layer is used + if series_end_time is None: + series_end_time = map_list[-1]["start_time"] has_end_time = False granularity_list = [] # Build the granularity list while True: - if has_end_time is True: - if start_time >= end_time: - break - else: - if start_time > end_time: - break - + if ( + has_end_time and start_time >= series_end_time + ) or start_time > series_end_time: + break + if not process_incomplete_granules and start_time < series_start_time: + start_time = tgis.increment_datetime_by_string(start_time, granularity) + continue + # Initialize granule granule = tgis.RasterDataset(None) + # Set start start = start_time + # Set end if relative_time_unit: # For input STRDS with relative time - end = start_time + int(granularity) + end = start + granularity + 2 * buffer + offset granule.set_relative_time(start, end, relative_time_unit) else: # For input STRDS with absolute time - end = tgis.increment_datetime_by_string(start_time, granularity) + end = tgis.increment_datetime_by_string( + tgis.increment_datetime_by_string( + tgis.increment_datetime_by_string(start, granularity), + buffer, + ), + buffer, + ) granule.set_absolute_time(start, end) - start_time = end + # Check if granule is covered by series if required + if not process_incomplete_granules and end > series_end_time: + break + # Compute new start time for next granule + start_time = tgis.increment_datetime_by_string(start_time, granularity) granularity_list.append(granule) + return granularity_list def aggregate_with_condition( - granularity_list, - granularity, - map_list, - time_unit=None, - basename=None, - time_suffix="gran", - offset=0, - topo_list=None, - mask_label=None, - mask_value=0, - condition_label=None, - aggregate_condition="nmax", - aggregation_labels=None, - nprocs=1, - dbif=None, -): - """Aggregate a list of raster input maps with r.mapcalc + granularity_list: list, + granularity: str, + map_list: list[RasterDataset], + *, + time_unit: str | None = None, + basename: str | None = None, + time_suffix: str | None = "gran", + offset: int | None = 0, + topo_list: list | None = None, + mask_label: str | None = None, + mask_value: int | None = 0, + condition_label: str | None = None, + aggregate_condition: str | None = "nmax", + aggregation_labels: list | None = None, + nprocs: int | None = 1, + dbif: object = None, +) -> list[RasterDataset] | None: + """Aggregate a list of raster input maps with r.mapcalc. :param granularity_list: A list of AbstractMapDataset objects. The temporal extents of the objects are used @@ -285,17 +410,15 @@ def aggregate_with_condition( :param nprocs: The number of processes used for parallel computation (only used with ) :param dbif: The database interface to be used :return: A list of RasterDataset objects that contain the new map names - and the temporal extent as well as semantic_labels for map registration + and the temporal extent as well as semantic_labels for map registration. + Returns None if map_list is None. """ - if not map_list: return None if not topo_list: topo_list = ["contains"] - msgr = get_tgis_message_interface() - dbif, connection_state_changed = init_dbif(dbif) agg_module = pymod.Module( @@ -303,26 +426,26 @@ def aggregate_with_condition( overwrite=gs.overwrite(), quiet=True, run_=False, - # finish_=False, ) - count = 0 output_list = [] current_mapset = get_current_mapset() # The module queue for parallel execution process_queue = pymod.ParallelModuleQueue(nprocs) - map_dict = {} for raster_maps in map_list: raster_map = tgis.RasterDataset(None) if time_unit: raster_map.set_relative_time( - raster_maps["start_time"], raster_maps["end_time"], time_unit + raster_maps["start_time"], + raster_maps["end_time"], + time_unit, ) else: raster_map.set_absolute_time( - raster_maps["start_time"], raster_maps["end_time"] + raster_maps["start_time"], + raster_maps["end_time"], ) map_dict[raster_map] = { @@ -337,10 +460,7 @@ def aggregate_with_condition( "condition_labels": [], # Condition label "mask_labels": [], # Mask label } - for granule in granularity_list: - msgr.percent(count, len(granularity_list), 1) - count += 1 - + for count, granule in enumerate(granularity_list): granule_temporal_extent = granule.get_temporal_extent() for aggregation_label in aggregation_labels: @@ -356,17 +476,19 @@ def aggregate_with_condition( for matching_object in matching_objects: map_ids = map_dict[matching_object]["id"].split(",") semantic_labels = map_dict[matching_object]["semantic_label"].split( - "," + ",", ) if len(map_ids) != len(semantic_labels): gs.warning("Missing maps") continue - if not set( - [mask_label, condition_label, *aggregation_labels] - ).issubset(set(semantic_labels)): + if not { + mask_label, + condition_label, + *aggregation_labels, + }.issubset(set(semantic_labels)): gs.warning( _( - "Missing input some raster maps for {extent}. Found only the following semantic_labels: {labels}" + "Missing input some raster maps for {extent}. Found only the following semantic_labels: {labels}", ).format( extent=" - ".join( [ @@ -379,7 +501,7 @@ def aggregate_with_condition( if semantic_labels else None ), - ) + ), ) continue @@ -391,7 +513,7 @@ def aggregate_with_condition( # Create mask expression for aggregation map for aggregation_label in aggregation_labels: res_dict[aggregation_label].append( - f"if({mask_map}=={mask_value},if({{output_condition_map}}=={condition_map},{map_ids[semantic_labels.index(aggregation_label)]},null()),null())" + f"if({mask_map}=={mask_value},if({{output_condition_map}}=={condition_map},{map_ids[semantic_labels.index(aggregation_label)]},null()),null())", ) res_dict["mask_labels"].append(mask_list) @@ -399,66 +521,79 @@ def aggregate_with_condition( if res_dict != res_dict_template: if granule.is_time_absolute() is True and time_suffix == "gran": suffix = create_suffix_from_datetime( - granule.temporal_extent.get_start_time(), granularity + granule.temporal_extent.get_start_time(), + granularity, ) elif granule.is_time_absolute() is True and time_suffix == "time": suffix = create_time_suffix(granule) else: suffix = create_numeric_suffix( - "", count + offset, time_suffix + "", + count + offset, + time_suffix, ).removeprefix("_") output_name = f"{basename}_{suffix}" # Compile expressions expression = f"{output_name}_{condition_label}_{aggregate_condition}={aggregate_condition}({','.join(res_dict['mask_labels'])})\n" - map_layer = RasterDataset( - f"{output_name}_{condition_label}_{aggregate_condition}@{current_mapset}" + map_layer = initialize_raster_layer( + f"{output_name}_{condition_label}_{aggregate_condition}@{current_mapset}", + granule_temporal_extent, + f"{condition_label}_{aggregate_condition}", ) - map_layer.set_temporal_extent(granule_temporal_extent) - map_layer.set_semantic_label(f"{condition_label}_{aggregate_condition}") output_list.append(map_layer) condition_module = deepcopy(agg_module) condition_module.inputs.expression = expression expression = "" for aggregation_label in aggregation_labels: - expression += f"{output_name}_{aggregation_label}=nmedian({','.join([eval_expression for eval_expression in res_dict[aggregation_label]])})" - map_layer = RasterDataset( - f"{output_name}_{aggregation_label}@{current_mapset}" + expression += f"{output_name}_{aggregation_label}=nmedian({','.join(list(res_dict[aggregation_label]))})" + map_layer = initialize_raster_layer( + f"{output_name}_{aggregation_label}@{current_mapset}", + granule_temporal_extent, + aggregation_label, ) - map_layer.set_temporal_extent(granule_temporal_extent) - map_layer.set_semantic_label(aggregation_label) output_list.append(map_layer) expression = expression.format( - output_condition_map=f"{output_name}_{condition_label}_{aggregate_condition}" + output_condition_map=f"{output_name}_{condition_label}_{aggregate_condition}", ) mc_module = deepcopy(agg_module) mc_module.inputs.expression = expression.format( - output_condition_map=f"{output_name}_{condition_label}_{aggregate_condition}" + output_condition_map=f"{output_name}_{condition_label}_{aggregate_condition}", ) - - # Add modules to process queue process_queue.put(pymod.MultiModule([condition_module, mc_module])) + + if not process_queue.get_num_run_procs() > 0: + gs.info(_("No enough maps found for aggregation")) + return [] + gs.verbose( + _("Aggregating a total of %s time steps within %s granules") + % ( + len(map_dict), + process_queue.get_num_run_procs(), + ), + ) + # Add modules to process queue + process_queue.wait() if connection_state_changed: dbif.close() - msgr.percent(1, 1, 1) - return output_list def get_registered_maps_grouped( - stds, - columns=None, - where=None, - group_by=None, - spatial_extent=None, - spatial_relation=None, - dbif=None, -): + stds: tgis.SpaceTimeRasterDataset, + *, + columns: list[str] | None = None, + where: str | None = None, + group_by: str | None = None, + spatial_extent: tuple[datetime, datetime] | None = None, + spatial_relation: str | None = None, + dbif: object | None = None, +) -> list[dict]: """Return SQL rows of all registered maps. In case columns are not specified, each row includes all columns @@ -487,7 +622,6 @@ def get_registered_maps_grouped( :return: SQL rows of all registered maps, In case nothing found None is returned """ - dbif, connection_state_changed = init_dbif(dbif) if not columns: @@ -514,21 +648,21 @@ def get_registered_maps_grouped( ) ) - # filter by spatial extent + # Filter by spatial extent if spatial_extent and spatial_relation: where = stds._update_where_statement_by_spatial_extent( - where, spatial_extent, spatial_relation + where, + spatial_extent, + spatial_relation, ) - sql = "SELECT %s FROM %s WHERE %s.id IN (SELECT id FROM %s)" % ( - columns, - map_view, - map_view, - stds.get_map_register(), + sql = ( + f"SELECT {columns} FROM {map_view} " + f"WHERE {map_view}.id IN (SELECT id FROM {stds.get_map_register()})" ) - if where is not None and where != "": - sql += " AND (%s)" % (where.split(";")[0]) + if where: + sql += f" AND ({where.split(';')[0]})" sql += f" GROUP BY {group_columns};" try: dbif.execute(sql, mapset=stds.base.mapset) @@ -538,8 +672,8 @@ def get_registered_maps_grouped( dbif.close() stds.msgr.error( _("Unable to get map ids from register table <{}>").format( - stds.get_map_register() - ) + stds.get_map_register(), + ), ) raise @@ -549,12 +683,9 @@ def get_registered_maps_grouped( return rows -def main(): - """Main function""" - # lazy imports - overwrite = gs.overwrite() - - # Get the options +def main() -> None: + """Do the main work.""" + # Get where option where = options["where"] # Make sure the temporal database exists @@ -562,7 +693,6 @@ def main(): dbif = tgis.SQLDatabaseInterfaceConnection() dbif.connect() - current_mapset = get_current_mapset() spatial_extent = None if options["region_relation"]: @@ -570,9 +700,18 @@ def main(): input_strds = open_old_stds(options["input"], "strds") # We will create the strds later, but need to check here - tgis.check_new_stds(options["output"], "strds", dbif, overwrite) + tgis.check_new_stds(options["output"], "strds", dbif, gs.overwrite()) relative_time_unit = input_strds.get_relative_time_unit() + # Validate the granularity options input + validate_granularity = ( + check_relative_granularity_string + if relative_time_unit + else check_absolute_granularity_string + ) + for opt in ("granularity", "temporal_buffer", "temporal_offset"): + options[opt] = validate_granularity(opt, options[opt]) + # Get and check semantic labels semantic_labels = [ options["condition_label"], @@ -580,13 +719,14 @@ def main(): *options["aggregation_labels"].split(","), ] missing_labels = set(semantic_labels).difference( - input_strds.metadata.semantic_labels.split(",") + input_strds.metadata.semantic_labels.split(","), ) if missing_labels: gs.fatal( _("Semantic labels <{labels}> are missing from STRDS <{strds}>").format( - strds=input_strds.get_id(), labels=", ".join(missing_labels) - ) + strds=input_strds.get_id(), + labels=", ".join(missing_labels), + ), ) semantic_labels = ",".join( [f"'{semantic_label}'" for semantic_label in semantic_labels] @@ -609,15 +749,20 @@ def main(): if not map_list: gs.warning( _("No maps found to process in Space time raster dataset <{}>.").format( - options["input"] - ) + options["input"], + ), ) dbif.close() sys.exit(0) # Create granule list from map list - granularity_list = create_ganule_list( - map_list, options["granularity"], relative_time_unit=relative_time_unit + granularity_list = create_granule_list( + map_list, + options["granularity"], + options["temporal_buffer"], + options["temporal_offset"], + relative_time_unit=relative_time_unit, + process_incomplete_granules=not flags["i"], ) output_list = aggregate_with_condition( @@ -649,20 +794,21 @@ def main(): description = options["description"] or description # Initialize SpaceTimeRasterDataset (STRDS) using tgis - strds_long_name = f"{options['output']}@{current_mapset}" - output_strds = tgis.SpaceTimeRasterDataset(strds_long_name) + output_strds = tgis.SpaceTimeRasterDataset( + f"{options['output']}@{get_current_mapset()}", + ) # Check if target STRDS exists and create it if not or abort if overwriting is not allowed - if output_strds.is_in_db() and not overwrite: + if output_strds.is_in_db() and not gs.overwrite(): gs.fatal( _( "Output STRDS <{}> exists." - "Use --overwrite together with -e to modify the existing STRDS." - ).format(options["output"]) + "Use --overwrite together with -e to modify the existing STRDS.", + ).format(options["output"]), ) # Create STRDS if needed - if not output_strds.is_in_db() or (overwrite and not flags["e"]): + if not output_strds.is_in_db() or (gs.overwrite() and not flags["e"]): output_strds = tgis.open_new_stds( options["output"], "strds", @@ -671,7 +817,7 @@ def main(): description, semantic_type, dbif, - overwrite, + gs.overwrite(), ) else: output_strds = open_old_stds(options["output"], "strds") @@ -684,9 +830,6 @@ def main(): relative_time_unit, dbif, ) - - # Update the raster metadata table entries with aggregation type - # output_strds.set_aggregation_type(method) output_strds.metadata.update(dbif) dbif.close()