diff --git a/avaframe/com4FlowPy/__init__.py b/avaframe/com4FlowPy/__init__.py index e69de29bb..c47489f31 100644 --- a/avaframe/com4FlowPy/__init__.py +++ b/avaframe/com4FlowPy/__init__.py @@ -0,0 +1,4 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +'''Flow-Py model: statistical approach for gravitational mass flows''' diff --git a/avaframe/com4FlowPy/com4FlowPy.py b/avaframe/com4FlowPy/com4FlowPy.py index b3812f0ee..237a47cd8 100755 --- a/avaframe/com4FlowPy/com4FlowPy.py +++ b/avaframe/com4FlowPy/com4FlowPy.py @@ -33,7 +33,7 @@ def com4FlowPyMain(cfgPath, cfgSetup): - """com4FlowPy main function - handles: + """com4FlowPy main function performs the model run and writes results to disk: * reading of input data and model Parameters * calculation * writing of result files @@ -42,13 +42,12 @@ def com4FlowPyMain(cfgPath, cfgSetup): already been created (workDir, tempDir) Parameters - ---------- - cfgPath: dictionary with paths from .ini file - cfgSetup: configparser.SectionProxy Object with "GENERAL" model configs from .ini file + ----------- + cfgPath: configparser.SectionProxy Object + contains paths to input data (from .ini file) + cfgSetup: configparser.SectionProxy Object + "GENERAL" model configs (from .ini file) - Returns - ---------- - nothing - performs model run and writes results to disk """ _startTime = datetime.now().replace(microsecond=0) # used for timing model runtime @@ -63,11 +62,16 @@ def com4FlowPyMain(cfgPath, cfgSetup): modelParameters["infraBool"] = cfgSetup.getboolean("infra") modelParameters["forestBool"] = cfgSetup.getboolean("forest") modelParameters["forestInteraction"] = cfgSetup.getboolean("forestInteraction") + # modelParameters["infra"] = cfgSetup["infra"] + # modelParameters["forest"] = cfgSetup["forest"] + + # Flags for use of dynamic input parameters modelParameters["varUmaxBool"] = cfgSetup.getboolean("variableUmaxLim") modelParameters["varAlphaBool"] = cfgSetup.getboolean("variableAlpha") modelParameters["varExponentBool"] = cfgSetup.getboolean("variableExponent") - # modelParameters["infra"] = cfgSetup["infra"] - # modelParameters["forest"] = cfgSetup["forest"] + + # Flag for use of old flux distribution version + modelParameters["fluxDistOldVersionBool"] = cfgSetup.getboolean("fluxDistOldVersion") # Tiling Parameters used for calculation of large model-domains tilingParameters = {} @@ -163,6 +167,9 @@ def com4FlowPyMain(cfgPath, cfgSetup): # check if input layers have same x,y dimensions checkInputLayerDimensions(modelParameters, modelPaths) + # check if input parameters are within physically sensible ranges + checkInputParameterValues(modelParameters, modelPaths) + # get information on cellsize and nodata value from demHeader rasterAttributes = {} @@ -200,9 +207,18 @@ def com4FlowPyMain(cfgPath, cfgSetup): def startLogging(modelParameters, forestParams, modelPaths, MPOptions): - """ - just used to move this chunk of code out of the main function - only performs logging at the start of the simulation + """performs logging at the start of the simulation + + Parameters + ----------- + modelParameters: dict + model input parameters (from .ini - file) + forestParams: dict + input parameters regarding forest interaction (from .ini - file) + modelPaths: dict + paths to input files, workDir, resDir, outputFileFormat, etc. (from .ini - file) + MPOptions: dict + contains parameters for multiprocessing (from .ini - file) """ # Start of Calculation (logging...) log.info("==================================") @@ -243,6 +259,9 @@ def startLogging(modelParameters, forestParams, modelPaths, MPOptions): log.info("calculation with Infrastructure") log.info(f"{'INFRA LAYER:' : <14}{'%s'%modelPaths['infraPath'] : <5}") log.info("------------------------") + if modelParameters["fluxDistOldVersionBool"]: + log.info("Calculation using old (BUGGY!!) version of flux distribution!") + log.info("------------------------") for param, value in MPOptions.items(): log.info(f"{'%s:'%param : <20}{value : <5}") # log.info("{}:\t{}".format(param,value)) @@ -255,9 +274,15 @@ def startLogging(modelParameters, forestParams, modelPaths, MPOptions): def checkInputLayerDimensions(modelParameters, modelPaths): - """ - check if all layers have the same size + """check if all input layers have the same size and can be read from the provided paths + + Parameters + ----------- + modelParameters: dict + model input parameters (from .ini - file) + modelPaths: dict + contains paths to input files """ # Check if Layers have same size!!! try: @@ -316,9 +341,9 @@ def checkInputLayerDimensions(modelParameters, modelPaths): if modelParameters["varExponentBool"]: _varExponentHeader = io.read_header(modelPaths["varExponentPath"]) if _demHeader["ncols"] == _varExponentHeader["ncols"] and _demHeader["nrows"] == _varExponentHeader["nrows"]: - log.info("variable Alpha Layer ok!") + log.info("variable exponent Layer ok!") else: - log.error("Error: variable Alpha Layer doesn't match DEM!") + log.error("Error: variable exponent Layer doesn't match DEM!") sys.exit(1) log.info("========================") @@ -330,8 +355,90 @@ def checkInputLayerDimensions(modelParameters, modelPaths): sys.exit(1) +def checkInputParameterValues(modelParameters, modelPaths): + """check if the input parameters alpha, uMaxLimit/ zDeltaMaxLimit, exponent + are within a physically sensible range + + Parameters + ----------- + modelParameters: dict + model input parameters (from .ini - file) + modelPaths: dict + contains paths to input files + """ + alpha = modelParameters['alpha'] + if (alpha < 0 or alpha > 90): + log.error("Error: Alpha value is not within a physically sensible range ([0,90]).") + sys.exit(1) + + zDelta = modelParameters['max_z'] + if (zDelta < 0 or zDelta > 8848): + log.error("Error: zDeltaMaxLimit value is not within a physically sensible range ([0,8848]).") + sys.exit(1) + + exp = modelParameters['exp'] + if exp < 0: + log.error("Error: Exponent value is not within a physically sensible range (> 0).") + sys.exit(1) + + _checkVarParams = True + + if modelParameters["varAlphaBool"]: + rasterValues, header = io.read_raster(modelPaths["varAlphaPath"]) + rasterValues[rasterValues < 0] = np.nan # handle different noData values + if np.any(rasterValues > 90, where=~np.isnan(rasterValues)): + log.error("Error: Not all Alpha-raster values are within a physically sensible range ([0,90]),\ + in respective startcells the general alpha angle is used.") + _checkVarParams = False + + if modelParameters["varUmaxBool"]: + rasterValues, header = io.read_raster(modelPaths["varUmaxPath"]) + rasterValues[rasterValues < 0] = np.nan + if modelParameters["varUmaxType"].lower() == 'umax': + _maxVal = 1500 # ~sqrt(8848*2*9.81) + else: + _maxVal = 8848 + if np.any(rasterValues > _maxVal, where=~np.isnan(rasterValues)): + log.error("Error: Not all zDeltaMaxLimit-raster values are within a physically sensible range \ + ([0, 8848 m] or [0, 1500 m/s]), in respective startcells the general zDeltaMax value is used.") + _checkVarParams = False + + if _checkVarParams: + log.info("All input parameters are within a physically sensible range.") + log.info("========================") + else: + log.info("NOT ALL variable input parameter rasters are within physically sensible ranges.") + log.info("!!PLEASE RE-CHECK Input Rasters and handle Results with Care!!") + log.info("========================") + + def tileInputLayers(modelParameters, modelPaths, rasterAttributes, tilingParameters): + """computes the number of tiles (_tileCols, _tileRows) and tileOverlap (_U) + based on input layer dimensions and tilingParameters, + divides all used input layers into tiles + and saves the tiles to the temp folder + + the function is a wrapper around the code in splitAndMerge.py, + where the actual tiling is handled. + + Parameters + ----------- + modelParameters: dict + model input parameters (from .ini - file) + modelPaths: dict + contains paths to input files + rasterAttributes: dict + contains (header) information about the rasters (that are the same for all rasters) + tilingParameters: dict + parameters relevant for tiling (from .ini - file) + Returns + ----------- + nTiles: tuple + nTiles[0]: maximum index of tiles along rows + nTiles[1]: maximum index of tiles along columns + actual number of tiles = (nTiles[0] + 1) * (nTiles[1] + 1) + """ _tileCOLS = int(tilingParameters["tileSize"] / rasterAttributes["cellsize"]) _tileROWS = int(tilingParameters["tileSize"] / rasterAttributes["cellsize"]) _U = int(tilingParameters["tileOverlap"] / rasterAttributes["cellsize"]) @@ -364,6 +471,21 @@ def performModelCalculation(nTiles, modelParameters, modelPaths, rasterAttribute """wrapper around fc.run() handles passing of model paths, configurations to fc.run() also responsible for processing input-data tiles in sequence + + Parameters + ----------- + nTiles: tuple + number of tiles + modelParameters: dict + model input parameters (from .ini - file) + modelPaths: dict + contains paths to input files + rasterAttributes: dict + contains (header) information about the rasters (that are the same for all rasters) + forestParams: dict + input parameters regarding forest interaction (from .ini - file) + MPOptions: dict + contains parameters for multiprocessing (from .ini - file) """ optList = [] @@ -388,6 +510,13 @@ def performModelCalculation(nTiles, modelParameters, modelPaths, rasterAttribute def mergeAndWriteResults(modelPaths, modelOptions): """function handles merging of results for all tiles inside the temp Folder and also writing result files to the resultDir + + Parameters + ----------- + modelPaths: dict + contains paths to input files + modelOptions: dict + contains model input parameters (from .ini - file) """ _uid = modelPaths["uid"] _outputs = set(modelPaths['outputFileList']) @@ -398,8 +527,10 @@ def mergeAndWriteResults(modelPaths, modelOptions): # Merge calculated tiles zDelta = SPAM.mergeRaster(modelPaths["tempDir"], "res_z_delta") flux = SPAM.mergeRaster(modelPaths["tempDir"], "res_flux") - cellCounts = SPAM.mergeRaster(modelPaths["tempDir"], "res_count") - zDeltaSum = SPAM.mergeRaster(modelPaths["tempDir"], "res_z_delta_sum") + cellCounts = SPAM.mergeRaster(modelPaths["tempDir"], "res_count", method='sum') + zDeltaSum = SPAM.mergeRaster(modelPaths["tempDir"], "res_z_delta_sum", method='sum') + routFluxSum = SPAM.mergeRaster(modelPaths["tempDir"], "res_rout_flux_sum", method='sum') + depFluxSum = SPAM.mergeRaster(modelPaths["tempDir"], "res_dep_flux_sum", method='sum') fpTa = SPAM.mergeRaster(modelPaths["tempDir"], "res_fp") slTa = SPAM.mergeRaster(modelPaths["tempDir"], "res_sl") travelLength = SPAM.mergeRaster(modelPaths["tempDir"], "res_travel_length") @@ -429,6 +560,12 @@ def mergeAndWriteResults(modelPaths, modelOptions): if 'zDeltaSum' in _outputs: io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_zDeltaSum{}".format(_uid, _ts, _oF), zDeltaSum) + if 'routFluxSum' in _outputs: + io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_routFluxSum{}".format(_uid, _ts, _oF), + routFluxSum) + if 'depFluxSum' in _outputs: + io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_depFluxSum{}".format(_uid, _ts, _oF), + depFluxSum) if 'fpTravelAngle' in _outputs: io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_fpTravelAngle{}".format(_uid, _ts, _oF), fpTa) @@ -439,9 +576,6 @@ def mergeAndWriteResults(modelPaths, modelOptions): io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_travelLength{}".format(_uid, _ts, _oF), travelLength) - # TODO: List of result files, which are produced should be specified also in the .ini file!!!! - # NOTE: Probably good to have "default" output files (z_delta,FP_travel_angle,cell_counts) - # and only write other output files if set accordingly # NOTE: # if not modelOptions["infraBool"]: # if no infra # io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / ("cell_counts%s" %(output_format)),cell_counts) @@ -457,14 +591,15 @@ def mergeAndWriteResults(modelPaths, modelOptions): def checkConvertReleaseShp2Tif(modelPaths): """function checks if release area is a .shp file and tries to convert to tif in that case - Parameters: - --------------- - modelPaths: {} - dict with modelPaths - - Returns: - --------------- - modelPaths: {} - dict with added ["releasePathWork"] + Parameters + ----------- + modelPaths: dict + contains modelPaths + Returns + ----------- + modelPaths: dict + contains paths including ["releasePathWork"] """ # the release is a shp polygon, we need to convert it to a raster # releaseLine = shpConv.readLine(releasePath, 'releasePolygon', demDict) @@ -481,7 +616,7 @@ def checkConvertReleaseShp2Tif(modelPaths): #demHeader = io.read_header(modelPaths["demPath"]) _errMsg = "using release area in '.shp' format currently only supported in combination with '.asc' DEMs" log.error(_errMsg) - raise ValueError(_errMsg) + raise ValueError(_errMsg) else: _errMsg = f"file format '{ext}' for DEM is not supported, please provide '.tif' or '.asc'" log.error(_errMsg) @@ -507,10 +642,15 @@ def checkConvertReleaseShp2Tif(modelPaths): def deleteTempFolder(tempFolderPath): """delete tempFolder containing the pickled np.arrays of the input data and output data tiles. - - should be called after all merged model results are written to disk. + should be called after all merged model results are written to disk. performs a few checks to make sure the folder is indeed a com4FlowPy tempFolder, i.e. - does not contain subfolders - no other file-extensions than '.npy' and '' + + Parameters + ----------- + tempFolderPath: str + path to temp folder """ log.info("+++++++++++++++++++++++") diff --git a/avaframe/com4FlowPy/com4FlowPyCfg.ini b/avaframe/com4FlowPy/com4FlowPyCfg.ini index 89fdb18ee..3388b5244 100644 --- a/avaframe/com4FlowPy/com4FlowPyCfg.ini +++ b/avaframe/com4FlowPy/com4FlowPyCfg.ini @@ -144,6 +144,17 @@ forestFrictionLayerType = absolute # NOTE: this currently only works with 'forestFrictionLayer' module!! skipForestCells = 1 +#++++++++++++ Method to calculate flux distribution +# We fixed a bug in flowClass.py, which affects the distribution of the remaining flux, +# if a cell receives flux smaller than the provided flux_threshold. +# +# The default now (post Jan. 2025) is a calculation with the fixed bug! +# +# For backward compatibility the old version (prior to Jan. 2025 - with minor bug) can +# be switched on by setting "fluxDistOldVersion = True". + +fluxDistOldVersion = False + #++++++++++++ Parameters for Tiling # tileSize: size of tiles in x and y direction in meters (if total size of) x # or y of input DEM is larger than tileSize, then the input raster @@ -188,6 +199,8 @@ outputFileFormat = .tif # slTravelAngle # flux # zDeltaSum +# routFluxSum +# depFluxSum # if forestInteraction: forestInteraction is automatically added to outputs # if infra: backCalculation is automatically added to output outputFiles = zDelta|cellCounts|travelLength|fpTravelAngle diff --git a/avaframe/com4FlowPy/flowClass.py b/avaframe/com4FlowPy/flowClass.py index c326ebf34..fbf121a08 100644 --- a/avaframe/com4FlowPy/flowClass.py +++ b/avaframe/com4FlowPy/flowClass.py @@ -1,25 +1,25 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- +""" + Functions for calculations at the 'cell level' (vgl. D'Amboise et al., 2022) +""" + import numpy as np import math class Cell: - """This is the com4FlowPy 'Cell ' class - This class handles the calculation at the 'cell level' (vgl. D'Amboise et al., 2022) - """ - def __init__( self, rowindex, colindex, dem_ng, cellsize, flux, z_delta, parent, alpha, exp, flux_threshold, max_z_delta, - startcell, + startcell, fluxDistOldVersionBool=False, FSI=None, forestParams=None, ): - """constructor for the Cell class + """constructor for the Cell class that describes a raster cell that is hit by the GMF. the constructor function is called every time a new instance of type 'Cell' is initialized. NOTE/TODO: parent can be of different data types still, maybe split into two separate variables @@ -39,6 +39,7 @@ def __init__( self.no_flow = np.ones_like(self.dem_ng) self.flux = flux + self.fluxDep = 0 self.z_delta = z_delta self.alpha = float(alpha) @@ -46,6 +47,8 @@ def __init__( self.max_z_delta = float(max_z_delta) self.flux_threshold = float(flux_threshold) + self.fluxDistOldVersionBool = fluxDistOldVersionBool + self.tanAlpha = np.tan(np.deg2rad(self.alpha)) # moved to constructor, so this doesn't have to be calculated on # every iteration of calc_z_delta(self) @@ -146,9 +149,26 @@ def __init__( self.forestIntCount += parent.forestIntCount def add_os(self, flux): + """ + Adds flux to 'existing' flux + + Parameters + ----------- + flux: float + added flux + """ self.flux += flux def add_parent(self, parent): + """ + Adds parent to parents list + and optionally the forest interaction value of the parent + + Parameters + ----------- + parent: class Cell + Cell object to add to the list of parent cells 'lOfParents' + """ self.lOfParents.append(parent) if self.forestInteraction: # check if new/ younger parent has a lower forest interaction number @@ -157,9 +177,9 @@ def add_parent(self, parent): self.forestIntCount = parent.forestIntCount + self.isForest def calc_fp_travelangle(self): - """function calculates the travel-angle along the shortest flow-path from the start-cell to the current cell - the trave-angle along the shortest flow-path is equivalent to the maximum travel angle along all paths from - the startcell to this cell. + """function calculates the travel-angle along the shortest flow-path from the start-cell + to the current cell. The travel-angle along the shortest flow-path is equivalent to the + maximum travel angle along all paths from the startcell to this cell. """ _ldistMin = [] # _dh = self.startcell.altitude - self.altitude # elevation difference from cell to start-cell @@ -171,6 +191,12 @@ def calc_fp_travelangle(self): self.max_gamma = np.rad2deg(np.arctan(_dh / self.min_distance)) def calc_sl_travelangle(self): + """function calculates the travel-angle between the start cell and the current cell + using the shortest connection or straight line (sl) between the two cells. + The travel angle calculated with this shortest horizontal distance 'sl_gamma' is always + larger or equal to the travel angle 'max_gamma', which is calculated along the + (potentially curved) flow path (fp). (vgl. Meißl, 1998) + """ _dx = abs(self.startcell.colindex - self.colindex) _dy = abs(self.startcell.rowindex - self.rowindex) _dh = self.startcell.altitude - self.altitude @@ -180,7 +206,7 @@ def calc_sl_travelangle(self): def calc_z_delta(self): """ - function calculates zDelta to the eligible neighbours + function calculates zDelta (velocity or energy line height) to the eligible neighbours NOTE: forestFriction related mechanics are implemented here! """ self.z_delta_neighbour = np.zeros((3, 3)) @@ -235,6 +261,8 @@ def calc_z_delta(self): self.z_delta_neighbour[self.z_delta_neighbour > self.max_z_delta] = self.max_z_delta def calc_tanbeta(self): + """calculates the normalized terrain based routing + """ _ds = np.array([[self._SQRT2, 1, self._SQRT2], [1, 1, 1], [self._SQRT2, 1, self._SQRT2]]) _distance = _ds * self.cellsize @@ -248,6 +276,9 @@ def calc_tanbeta(self): self.r_t = self.tan_beta**self.exp / np.sum(self.tan_beta**self.exp) def calc_persistence(self): + """ + calculates persistence-based routing + """ self.persistence = np.zeros_like(self.dem_ng) if self.is_start: self.persistence += 1 @@ -301,7 +332,14 @@ def calc_persistence(self): self.persistence[1, 0] += 0.707 * maxweight def calc_distribution(self): + """ + calculates flux and zdelta that is distributed to the adjacent cells + Returns + ----------- + tuple + list of row, col, flux, zdelta of adjacent cells that receive flux/zdelta + """ self.calc_z_delta() self.calc_persistence() self.persistence *= self.no_flow @@ -331,19 +369,35 @@ def calc_distribution(self): # still above threshold # NOTE: this only works if "0 < n < 8" AND "0 < m < 8", the case where # "0= threshold).sum() #this is the correct way to calculate count - # TODO: make this the default, but keep option to use "old" version with minor Bug for backward compatibility of - # model results + + if self.fluxDistOldVersionBool: + """ + legacy version of code (pre 01-2025) WITH BUG (is used if self.fluxDistOldVersionBool) + here 'count' returns the number of neighbor/child cells that receive flux > 0, but + below the flux_threshold. + """ + count = ((0 < self.dist) & (self.dist < threshold)).sum() + else: + """ + default/correct version with FIXED BUG (used if self.fluxDistOldVersionBool == False) + her 'count' returns the number of neighbor/child cells which receive + flux >= the flux_threshold. + """ + count = (self.dist >= threshold).sum() + # massToDistribute ~ sum of flux of child cells below the flux_threshold mass_to_distribute = np.sum(self.dist[self.dist < threshold]) - """Checking if flux is distributed to a field that isn't taking in account, when then distribute it equally to - the other fields""" + if mass_to_distribute > 0 and count > 0: + # local flux redistribution to eligible child cells self.dist[self.dist > threshold] += mass_to_distribute / count self.dist[self.dist < threshold] = 0 if np.sum(self.dist) < self.flux and count > 0: + # correction/flux conservation for potential rounding losses self.dist[self.dist > threshold] += (self.flux - np.sum(self.dist)) / count - + if count == 0: + # if all child cells are below flux_threshold, the flux is deposited + # TODO: check alternatives (e.g. 'global' redistribution or within generations) + self.fluxDep = self.flux row_local, col_local = np.where(self.dist > threshold) return ( diff --git a/avaframe/com4FlowPy/flowCore.py b/avaframe/com4FlowPy/flowCore.py index 20cb19b76..244c202bf 100644 --- a/avaframe/com4FlowPy/flowCore.py +++ b/avaframe/com4FlowPy/flowCore.py @@ -1,6 +1,10 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- +""" + Calculation functions (raster level) +""" + import sys import numpy as np import logging @@ -24,13 +28,19 @@ def get_start_idx(dem, release): """Sort Release Pixels by altitude and return the result as lists for the Rows and Columns, starting with the highest altitude - Input parameters: - dem Digital Elevation Model to gain information about altitude - release The release layer, release pixels need int value > 0 + Parameters + ----------- + dem: numpy array + Digital Elevation Model to gain information about altitude + release: numpy array + The release layer, release pixels need int value > 0 - Output parameters: - row_list Row index of release pixels sorted by altitude - col_list Column index of release pixels sorted by altitude + Returns + ----------- + row_list: list + Row indices of release pixels sorted by altitude + col_list: list + Column indices of release pixels sorted by altitude """ row_list, col_list = np.where(release > 0) # Gives back the indices of the release areas if len(row_list) > 0: @@ -60,12 +70,15 @@ def split_release(release, pieces): Parameters ----------- - release: np.array - assumes a binary 0|1 array with release pixels designated by '1' - pieces: int - number of chunck in which the release layer should be split + release: np.array + a binary 0|1 array with release pixels designated by '1' + pieces: int + number of chunck in which the release layer should be split Returns ----------- - release_list: A list with the tiles(arrays) in it [array0, array1, ..] + release_list: list + contains the tiles(arrays) [array0, array1, ..] """ # Flatten the array and compute the cumulative sum @@ -103,12 +116,15 @@ def back_calculation(back_cell): """Here the back calculation from a run out pixel that hits a infrastructure to the release pixel is performed. - Input parameters: - hit_cell_list All cells that hit a Infrastructure + Parameters + ----------- + back_cell: class-object cell + cell that hit a Infrastructure - Output parameters: - Back_list List of pixels that are on the way to the start cell - Maybe change it to array like DEM? + Returns + ----------- + back_list: list + List of cells that are on the way to the start cell TODO:Maybe change it to array like DEM? """ back_list = [] @@ -126,23 +142,22 @@ def back_calculation(back_cell): def run(optTuple): """This is a wrapper around calculation() for performing model runs for a single tile of the model domain - using multiprocessing across multiple CPUs - - Parameters: - ---------- - optTuple: tuple with all necessary model information - - optTuple[0] - int - i index of the processed tile (for loading correct data for tiles) - - optTuple[1] - int - j index of the processed tile (for loading correct data for tiles) - - - optTuple[2] - {} - dict containing modelParameters - - optTuple[3] - {} - dict containing modelPaths - - optTuple[4] - {} - dict containing rasterAttributes - - optTuple[5] - {} - dict containing forestParameters - - optTuple[6] - {} - dict containing MPOptions - - Returns: - ---------- - Nothing --> saves results for processed tile to temp folder (modelPaths["tempFolder"]) + using multiprocessing across multiple CPUs and for saving results for processed tile to temp folder + (modelPaths["tempFolder"]) + + Parameters + ----------- + optTuple: tuple + with all necessary model information: + + - optTuple[0] (int) - i index of the processed tile (for loading correct data for tiles) + - optTuple[1] (int) - j index of the processed tile (for loading correct data for tiles) + - optTuple[2] (dict) - containing modelParameters + - optTuple[3] (dict) - containing modelPaths + - optTuple[4] (dict) - containing rasterAttributes + - optTuple[5] (dict) - containing forestParameters + - optTuple[6] (dict) - containing MPOptions + """ log = logging.getLogger(__name__) @@ -158,6 +173,7 @@ def run(optTuple): varUmaxBool = optTuple[2]["varUmaxBool"] varAlphaBool = optTuple[2]["varAlphaBool"] varExponentBool = optTuple[2]["varExponentBool"] + fluxDistOldVersionBool = optTuple[2]["fluxDistOldVersionBool"] # Temp-Dir (all input files are located here and results are written back in here) tempDir = optTuple[3]["tempDir"] @@ -191,9 +207,9 @@ def run(optTuple): if varUmaxBool: varUmaxArray = np.load(tempDir / ("varUmax_%s_%s.npy" % (optTuple[0], optTuple[1]))) if optTuple[2]["varUmaxType"].lower() == 'umax': - varUmaxArray[varUmaxArray>0] = varUmaxArray[varUmaxArray>0]**2 / 2 / 9.81 + varUmaxArray[varUmaxArray > 0] = varUmaxArray[varUmaxArray > 0] ** 2 / 2 / 9.81 elif optTuple[2]["varUmaxType"].lower() != 'zdeltalim': - log.error("PLease provide the type of the uMax Limit: 'uMax' (in m/s) or zDeltaMax (in m)!") + log.error("PLease provide the type of the uMax Limit: 'uMax' (in m/s) or zDeltaMax (in m)!") else: varUmaxArray = None @@ -207,6 +223,15 @@ def run(optTuple): else: varExponentArray = None + varParams = { + 'varUmaxBool': varUmaxBool, + 'varUmaxArray': varUmaxArray, + 'varAlphaBool': varAlphaBool, + 'varAlphaArray': varAlphaArray, + 'varExponentBool': varExponentBool, + 'varExponentArray': varExponentArray, + } + # convert release areas to binary (0: no release areas, 1: release areas) # every positive value >0 is interpreted as release area release[release < 0] = 0 @@ -230,14 +255,12 @@ def run(optTuple): results = pool.map( calculation, [ - [ + [ # TODO: write in dicts: dem, infra, release_sub, alpha, exp, flux_threshold, max_z_delta, nodata, cellsize, infraBool, forestBool, - varUmaxBool, varUmaxArray, - varAlphaBool, varAlphaArray, - varExponentBool, varExponentArray, + varParams, fluxDistOldVersionBool, forestArray, forestParams, ] for release_sub in release_list @@ -256,6 +279,8 @@ def run(optTuple): fluxArray = np.zeros_like(dem, dtype=np.float32) countArray = np.zeros_like(dem, dtype=np.int32) zDeltaSumArray = np.zeros_like(dem, dtype=np.float32) + routFluxSumArray = np.zeros_like(dem, dtype=np.float32) + depFluxSumArray = np.zeros_like(dem, dtype=np.float32) backcalc = np.zeros_like(dem, dtype=np.int32) fpTravelAngleArray = np.zeros_like(dem, dtype=np.float32) slTravelAngleArray = np.zeros_like(dem, dtype=np.float32) @@ -267,6 +292,8 @@ def run(optTuple): fluxList = [] ccList = [] zDeltaSumList = [] + routFluxSumList = [] + depFluxSumList = [] backcalcList = [] fpTravelAngleList = [] slTravelAngleList = [] @@ -285,8 +312,10 @@ def run(optTuple): fpTravelAngleList.append(res[5]) slTravelAngleList.append(res[6]) travelLengthList.append(res[7]) + routFluxSumList.append(res[8]) + depFluxSumList.append(res[9]) if forestInteraction: - forestIntList.append(res[8]) + forestIntList.append(res[10]) logging.info("Calculation finished, getting results.") for i in range(len(zDeltaList)): @@ -294,6 +323,8 @@ def run(optTuple): fluxArray = np.maximum(fluxArray, fluxList[i]) countArray += ccList[i] zDeltaSumArray += zDeltaSumList[i] + routFluxSumArray += routFluxSumList[i] + depFluxSumArray += depFluxSumList[i] backcalc = np.maximum(backcalc, backcalcList[i]) fpTravelAngleArray = np.maximum(fpTravelAngleArray, fpTravelAngleList[i]) slTravelAngleArray = np.maximum(slTravelAngleArray, slTravelAngleList[i]) @@ -306,6 +337,8 @@ def run(optTuple): # Save Calculated tiles np.save(tempDir / ("res_z_delta_%s_%s" % (optTuple[0], optTuple[1])), zDeltaArray) np.save(tempDir / ("res_z_delta_sum_%s_%s" % (optTuple[0], optTuple[1])), zDeltaSumArray) + np.save(tempDir / ("res_rout_flux_sum_%s_%s" % (optTuple[0], optTuple[1])), routFluxSumArray) + np.save(tempDir / ("res_dep_flux_sum_%s_%s" % (optTuple[0], optTuple[1])), depFluxSumArray) np.save(tempDir / ("res_flux_%s_%s" % (optTuple[0], optTuple[1])), fluxArray) np.save(tempDir / ("res_count_%s_%s" % (optTuple[0], optTuple[1])), countArray) np.save(tempDir / ("res_fp_%s_%s" % (optTuple[0], optTuple[1])), fpTravelAngleArray) @@ -321,23 +354,52 @@ def calculation(args): """This is the core function where all the data handling and calculation is done. - Input parameters: - dem The digital elevation model - header The header of the elevation model - infra The infra layer - release The list of release arrays - alpha - exp - flux_threshold - max_z_delta - - Output parameters: - z_delta Array like DEM with the max. kinetic Energy Height for every - pixel - fluxArray Array with max. concentration factor saved - countArray Array with the number of hits for every pixel - elh_sum Array with the sum of Energy Line Height - back_calc Array with back calculation, still to do!!! + Parameters + ----------- + args: list + contains the following model input data: + + - args[0] (np.array) - The digital elevation model + - args[1] (np.array) - The infrastructure layer + - args[2] (np.array) - cells with value 1 are PRAs + - args[3] (float) - alpha angle + - args[4] (float) - exponent + - args[5] (float) - threshold of minimum flux + - args[6] (float) - maximum of zDelta + - args[7] (float) - nodata values of rasters + - args[8] (float) - cellsize of rasters + - args[9] (bool) - flag for calculation with/without infrastructure + - args[10] (bool) - flag for calculation with/without forest + - args[11] (dict) - contains flags and numpy arrays for variable input parameters (Alpha, exp, uMax) + - args[12] (bool) - flag for computing flux distribution with old version + - args[13] (numpy array) - contains forest information (None if forestBool=False) + - args[14] (dict) - contains parameters for forest interaction models (None if forestBool=False) + + Returns + ----------- + zDeltaArray: numpy array + the maximum of kinetic velocity height (zDelta) in every raster cell + fluxArray: numpy array + the maximum of flux in every cell + countArray: numpy array + the number of hits (GMF paths) in every cell + zDeltaSumArray: numpy array + the sum of kinetic velocity height (zDelta) in every raster cell + backcalc: numpy array + Array with back calculation, still TODO!!! + fpTravelAngleArray: numpy array + maximum of flow-path travel-angle in every cell + slTravelAngleArray: numpy array + maximum of sl travel-angle in every cell + travelLengthArray: numpy array + maximum of travel length in every cell + routFluxSumArray:numpy array + sum of routing flux in every cell + depFluxSumArray:numpy array + sum of deposition flux in every cell + forestIntArray: numpy array + minimum of the count a forested cell is hit (only returned if args[18]["forestInteraction"]==True) + """ # check if there's enough RAM available (default value set to 5%) @@ -357,16 +419,17 @@ def calculation(args): cellsize = args[8] infraBool = args[9] forestBool = args[10] - varUmaxBool = args[11] - varUmaxArray = args[12] - varAlphaBool = args[13] - varAlphaArray = args[14] - varExponentBool = args[15] - varExponentArray = args[16] + varUmaxBool = args[11]['varUmaxBool'] + varUmaxArray = args[11]['varUmaxArray'] + varAlphaBool = args[11]['varAlphaBool'] + varAlphaArray = args[11]['varAlphaArray'] + varExponentBool = args[11]['varExponentBool'] + varExponentArray = args[11]['varExponentArray'] + fluxDistOldVersionBool = args[12] if forestBool: - forestArray = args[17] - forestParams = args[18] + forestArray = args[13] + forestParams = args[14] forestInteraction = forestParams["forestInteraction"] else: forestInteraction = False @@ -375,11 +438,13 @@ def calculation(args): zDeltaArray = np.zeros_like(dem, dtype=np.float32) zDeltaSumArray = np.zeros_like(dem, dtype=np.float32) + routFluxSumArray = np.zeros_like(dem, dtype=np.float32) + depFluxSumArray = np.zeros_like(dem, dtype=np.float32) fluxArray = np.zeros_like(dem, dtype=np.float32) countArray = np.zeros_like(dem, dtype=np.int32) fpTravelAngleArray = np.zeros_like(dem, dtype=np.float32) # fp = Flow Path - slTravelAngleArray = np.zeros_like(dem, dtype=np.float32) * 90 # sl = Straight Line + slTravelAngleArray = np.zeros_like(dem, dtype=np.float32) # sl = Straight Line travelLengthArray = np.zeros_like(dem, dtype=np.float32) @@ -424,7 +489,7 @@ def calculation(args): dem_ng, cellsize, 1, 0, None, alpha, exp, flux_threshold, max_z_delta, - startcell=True, + startcell=True, fluxDistOldVersionBool=fluxDistOldVersionBool, FSI=forestArray[row_idx, col_idx] if isinstance(forestArray, np.ndarray) else None, forestParams=forestParams, ) @@ -482,13 +547,15 @@ def calculation(args): flux[k], z_delta[k], cell, alpha, exp, flux_threshold, max_z_delta, - startcell, + startcell, fluxDistOldVersionBool=fluxDistOldVersionBool, FSI=forestArray[row[k], col[k]] if isinstance(forestArray, np.ndarray) else None, forestParams=forestParams, )) zDeltaArray[cell.rowindex, cell.colindex] = max(zDeltaArray[cell.rowindex, cell.colindex], cell.z_delta) fluxArray[cell.rowindex, cell.colindex] = max(fluxArray[cell.rowindex, cell.colindex], cell.flux) + routFluxSumArray[cell.rowindex, cell.colindex] += cell.flux + depFluxSumArray[cell.rowindex, cell.colindex] += cell.fluxDep zDeltaSumArray[cell.rowindex, cell.colindex] += cell.z_delta fpTravelAngleArray[cell.rowindex, cell.colindex] = max(fpTravelAngleArray[cell.rowindex, cell.colindex], cell.max_gamma) @@ -531,10 +598,10 @@ def calculation(args): gc.collect() if forestInteraction: return zDeltaArray, fluxArray, countArray, zDeltaSumArray, backcalc, fpTravelAngleArray, slTravelAngleArray, \ - travelLengthArray, forestIntArray + travelLengthArray, routFluxSumArray, depFluxSumArray, forestIntArray else: return zDeltaArray, fluxArray, countArray, zDeltaSumArray, backcalc, fpTravelAngleArray, slTravelAngleArray, \ - travelLengthArray + travelLengthArray, routFluxSumArray, depFluxSumArray def enoughMemoryAvailable(limit=0.05): @@ -543,12 +610,14 @@ def enoughMemoryAvailable(limit=0.05): Parameters ----------- - limit: float (between 0 and 1) - default at 0.05 (i.e. 5%) + limit: float + available RAM memory limit (between 0 and 1) - default at 0.05 (i.e. 5%) Returns ----------- - 'True' if more than the defined memory-limit is still available - 'False' if less than the defined memory-limit is available + bool + 'True' if more than the defined memory-limit is still available; + 'False' if less than the defined memory-limit is available """ log = logging.getLogger(__name__) @@ -573,28 +642,34 @@ def calculateMultiProcessingOptions(nRel, nCPU, procPerCPU=1, maxChunks=500, chu trouble with RAM issues ... NOTE: this is still a quick'n'dirty hack, it might make sense to have a more sophisticated approach for optimization - of CPU and RAM resource usage during multiprocessing depending on e.g.: - - size of the numpy arrays that are processed (depending on tileSize and rasterResolution) - - density of release areas in the tile - - total available RAM and CPUs on the machine - - (other com4FlowPy Parameterization) + of CPU and RAM resource usage during multiprocessing depending on e.g.: + - size of the numpy arrays that are processed (depending on tileSize and rasterResolution) + - density of release areas in the tile + - total available RAM and CPUs on the machine + - (other com4FlowPy Parameterization) Parameters ----------- - nRel: int - number of release Pixels inside the tile (i.e. all cells/pixels with values >=1 in 'release') - nCPU: int - number of available CPUs (as defined in the .ini files) - procPerCPU: int - number of processes to be spawned per CPU (default = 1) - might be set higher for increased - performance - - maxChunks: int - hard limit to the maximum number of chunks that is used --> a larger number of chunks will very - probably increase performance in terms of maximising CPU workload (especially with large numbers of - nCPU) but also cause higher RAM consumption (in the current multiprocessing implementation) - chunkSize: int - default number of release pixels per chunk in cases where the chunk-size is not constrained by - nCPU*procPerCPU or maxChunks + nRel: int + number of release Pixels inside the tile (i.e. all cells/pixels with values >=1 in 'release') + nCPU: int + number of available CPUs (as defined in the .ini files) + procPerCPU: int + number of processes to be spawned per CPU (default = 1) - might be set higher for increased performance + maxChunks: int + hard limit to the maximum number of chunks that is used --> a larger number of chunks will very + probably increase performance in terms of maximising CPU workload (especially with large numbers of + nCPU) but also cause higher RAM consumption (in the current multiprocessing implementation) + chunkSize: int + default number of release pixels per chunk in cases where the chunk-size is not constrained by + nCPU*procPerCPU or maxChunks + Returns ----------- - nChunks: int - the number of chunks into which the release layer/array is split for multiprocessing - nProcesses: int - the number of processes used in Pool.map() inside run() + nChunks: int + the number of chunks into which the release layer/array is split for multiprocessing + nProcesses: int + the number of processes used in Pool.map() inside run() """ nProcesses = int(nCPU * procPerCPU) # check if release is empty - if so, there's no reason to split @@ -619,15 +694,17 @@ def handleMemoryAvailability(recheckInterval=30): and handle the situation if not NOTE: currently only time.sleep() is called to delay the subprocess for a defined time and then re-check - memory availability. - other possible options: - - log message and abort model run? + memory availability. + other possible options: + - log message and abort model run? NOTE: The implementation with time.sleep() can cause an "infinite loop" in time-sleep if for some - reason memory is not freed after a sensible amount of time.altzone - Memory consumption is dependend on tile-sizes and number of Chunks/tile ... + reason memory is not freed after a sensible amount of time.altzone + Memory consumption is dependend on tile-sizes and number of Chunks/tile ... + Parameters ----------- - recheckInterval: int - delay time for the process after which memory availability is re-checked + recheckInterval: int + delay time (in seconds) for the process after which memory availability is re-checked """ while not enoughMemoryAvailable(): time.sleep(recheckInterval) diff --git a/avaframe/com4FlowPy/rasterIo.py b/avaframe/com4FlowPy/rasterIo.py index 2c5d179f5..b567128ae 100755 --- a/avaframe/com4FlowPy/rasterIo.py +++ b/avaframe/com4FlowPy/rasterIo.py @@ -1,5 +1,9 @@ # -*- coding: utf-8 -*- +""" + Functions to handle raster files. +""" + import rasterio import sys import logging @@ -9,7 +13,20 @@ def read_header(input_file): - # Reads in the header of the raster file, input: filepath + """ + Reads the header of the raster file + raster file should be readable by rasterio (e.g. .tif, .asc) + + Parameters + ----------- + input_file: str + path to raster file + + Returns + ----------- + header: dict + header of raster file in style of ASCII-Rasters + """ raster = rasterio.open(input_file) if raster is None: @@ -27,6 +44,21 @@ def read_header(input_file): def read_raster(input_file): + """ + Reads in a raster file + + Parameters + ----------- + input_file: str + path to raster file + + Returns + ----------- + my_array: np.array + numpy array with values read in from the raster file + header: dict + header of raster file in style of ASCII-Rasters + """ header = read_header(input_file) raster = rasterio.open(input_file) @@ -35,15 +67,21 @@ def read_raster(input_file): return my_array, header -def output_raster(file, file_out, raster): - """Input is the original file, path to new file, raster_data +def output_raster(referenceFile, file_out, raster): + """ + Saves raster - Input parameters: - file the path to the file to reference on, mostly DEM on where - Calculations were done - file_out path for the outputfile, possible extends are .asc or .tif""" + Parameters + ----------- + referenceFile: str + path to raster file to reference on, mostly DEM + file_out: str + path for the outputfile, possible extends are .asc or .tif + raster: np.array + raster (array) that is saved + """ - raster_trans = rasterio.open(file) + raster_trans = rasterio.open(referenceFile) try: crs = rasterio.crs.CRS.from_dict(raster_trans.crs.data) except: diff --git a/avaframe/com4FlowPy/splitAndMerge.py b/avaframe/com4FlowPy/splitAndMerge.py index 7d58863b1..e621d6241 100644 --- a/avaframe/com4FlowPy/splitAndMerge.py +++ b/avaframe/com4FlowPy/splitAndMerge.py @@ -1,6 +1,11 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- +""" + Functions to handle the raster tiles. + Tiling is intended to manage processing of large(r) computational domains. +""" + import logging import pickle import gc @@ -12,6 +17,26 @@ def tileRaster(fNameIn, fNameOut, dirName, xDim, yDim, U, isInit=False): + """ + divides a raster into tiles and saves the tiles + + Parameters + ----------- + fNameIn : str + path to raster that is tiled + fNameOut: str + name of saved raster file + dirName: str + path to folder, where tiled raster is saved (temp - folder) + xDim: int + size of one tile in x dimension (number of raster columns) + yDim: int + size of one tile in y dimension (number of raster rows) + U: int + size of tile overlapping (number of raster cells) + isInit: bool + if isInit is True, edges are assigned to -9999 (default: False) + """ # if not os.path.exists(dirName): # os.makedirs(dirName) @@ -165,6 +190,7 @@ def mergeRaster(inDirPath, fName, method='max'): method, how the tiles should be merged (default: max) method 'min' calculates the minimum of input raster tiles, if the minimum is < 0, then 0 is used + method 'sum' calculates the sum of the raster tiles Returns ------- @@ -180,7 +206,8 @@ def mergeRaster(inDirPath, fName, method='max'): mergedRas = np.zeros((extL[0], extL[1])) # create Raster with original size - mergedRas[:, :] = np.nan + if method != 'sum': + mergedRas[:, :] = np.nan for i in range(nTiles[0] + 1): for j in range(nTiles[1] + 1): @@ -198,6 +225,10 @@ def mergeRaster(inDirPath, fName, method='max'): np.where((mergedRas[pos[0][0]:pos[0][1], pos[1][0]:pos[1][1]] >= 0) & (smallRas >= 0), np.fmin(mergedRas[pos[0][0]:pos[0][1], pos[1][0]:pos[1][1]], smallRas), np.fmax(mergedRas[pos[0][0]:pos[0][1], pos[1][0]:pos[1][1]], smallRas)) + if method == 'sum': + mergedRas[pos[0][0]: pos[0][1], pos[1][0]: pos[1][1]] = np.add( + mergedRas[pos[0][0]: pos[0][1], pos[1][0]: pos[1][1]], smallRas + ) del smallRas log.info("appended result %s_%i_%i", fName, i, j) - return mergedRas \ No newline at end of file + return mergedRas diff --git a/avaframe/runCom4FlowPy.py b/avaframe/runCom4FlowPy.py index 5a632b565..4a7f3af7f 100644 --- a/avaframe/runCom4FlowPy.py +++ b/avaframe/runCom4FlowPy.py @@ -174,10 +174,23 @@ def main(): def readFlowPyinputs(avalancheDir, cfgFlowPy, log): """function is used to read necessary flowPy Inputs function is only called from '../runCom4FlowPy.py', - which in turn creates the necessary - 'cfgPath' dictionary, that is passed to + which in turn creates the necessary 'cfgPath' dictionary, that is passed to the com4FlowPyMain() function in this file... TODO-20240418: check this function - this is still from MTo Flow-Py --> avaFrame port + + Parameters + ----------- + avalancheDir: str + directory to avalanche - folder + cfgFlowPy: configParser object + Configparser object containing FlowPy model parameters (from (local_)com4FlowPyCfg.ini) + log: logging object + initiated logger + + Returns + ----------- + cfgPath: dict + contains paths to input data """ cfgPath = {} @@ -343,21 +356,22 @@ def checkOutputFilesFormat(strOutputFiles): """check if outputFiles option is provided in proper format, else return default string in right format - Parameters: + Parameters --------------- - strOutputFiles: string - outputFiles string from (local_)com4FlowPy.ini + strOutputFiles: str + outputFiles string from (local_)com4FlowPy.ini - Returns: + Returns --------------- - strOutputFiles: string - returns the input if the format is ok, else default - value string is returned + strOutputFiles: str + returns the input if the format is ok, else default value string is returned """ try: setA = set(strOutputFiles.split('|')) setB = set(['zDelta', 'cellCounts', 'fpTravelAngle', 'travelLength', - 'slTravelAngle', 'flux', 'zDeltaSum']) - # if there is at least 1 correct ouputfile defined, we use the string provided in the .ini file + 'slTravelAngle', 'flux', 'zDeltaSum', 'routFluxSum', 'depFluxSum']) + # if there is at least 1 correct outputfile defined, we use the string provided in the .ini file if (setA & setB): return strOutputFiles else: @@ -372,15 +386,19 @@ def writeCfgJSON(cfg, uid, workDir): writes a JSON file containing all the input parameters from the configFile using the same uid as the simulation results - Parameters: + Parameters --------------- - cfg: configParser object - all the model configs are in here - uid: string - UID created based on the cfg object - workDir: string - workDirectory (place to write the .json file to) - - Returns: + cfg: configParser object + all the model configs are in here + uid: str + UID created based on the cfg object + workDir: str + workDirectory (place to write the .json file to) + + Returns --------------- - success: boolean/Exception - True if file is written successfully, else Exception + success: bool/Exception + True if file is written successfully, else Exception """ diff --git a/docs/moduleCom4FlowPy.rst b/docs/moduleCom4FlowPy.rst index 93b191eca..39ac8640c 100644 --- a/docs/moduleCom4FlowPy.rst +++ b/docs/moduleCom4FlowPy.rst @@ -233,15 +233,18 @@ deleted after completion of the model run (can be useful for calculation of larg Output ------- -All outputs are in the .tif raster format in the same resolution and extent as the input raster layers. - -- ``z_delta.tif``: the maximum z_delta of all paths for every raster cell (geometric measure of process magnitude, can be associated to kinetic energy/velocity) -- ``flux.tif``: The maximum routing flux of all paths for every raster cell -- ``z_delta_sum.tif``: z_delta summed up over all paths on every raster cell -- ``cell_counts.tif``: number of paths that route flux through a raster cell -- ``FP_travel_angle.tif``: the gamma angle along the flow path -- ``SL_travel_angle.tif``: Saves the gamma angle, while the distances are calculated via a straight line from the release cell to the current cell -- ``forestInteraction.tif``: only if ``forestInteraction = True``: minimum number of forested raster cells a path runs through +All outputs are in the .tif or in .asc raster format in the same resolution and extent as the input raster layers. + +- ``zdelta``: the maximum z_delta of all paths for every raster cell (geometric measure of process magnitude, can be associated to kinetic energy/velocity) +- ``flux``: The maximum routing flux of all paths for every raster cell +- ``zDeltaSum``: z_delta summed up over all paths on every raster cell +- ``cellCounts``: number of paths/release cells that route flux through a raster cell +- ``fpTravelAngle``: the gamma angle along the flow path +- ``slTravelAngle``: gamma angle calculated along a straight-line between release cell and current cell +- ``travelLength``: the travel length along the flow path +- ``routFluxSum``: routing flux summed up over all paths +- ``depFluxSum``: deposited flux summed up over all paths +- ``forestInteraction``: only if ``forestInteraction = True``: minimum number of forested raster cells a path runs through .. Note:: * **please interpret** ``cell_counts.tif`` **with caution, since absolute cell_count values do currently not reflect the number of release-cells which route flux through a cell - we are currently fixing the implementation of this feature**