Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions avaframe/com4FlowPy/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-

'''Flow-Py model: statistical approach for gravitational mass flows'''
200 changes: 170 additions & 30 deletions avaframe/com4FlowPy/com4FlowPy.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@


def com4FlowPyMain(cfgPath, cfgSetup):
"""com4FlowPy main function - handles:
"""com4FlowPy main function performs the model run and writes results to disk:
* reading of input data and model Parameters
* calculation
* writing of result files
Expand All @@ -42,13 +42,12 @@ def com4FlowPyMain(cfgPath, cfgSetup):
already been created (workDir, tempDir)

Parameters
----------
cfgPath: dictionary with paths from .ini file
cfgSetup: configparser.SectionProxy Object with "GENERAL" model configs from .ini file
-----------
cfgPath: configparser.SectionProxy Object
contains paths to input data (from .ini file)
cfgSetup: configparser.SectionProxy Object
"GENERAL" model configs (from .ini file)

Returns
----------
nothing - performs model run and writes results to disk
"""
_startTime = datetime.now().replace(microsecond=0) # used for timing model runtime

Expand All @@ -63,11 +62,16 @@ def com4FlowPyMain(cfgPath, cfgSetup):
modelParameters["infraBool"] = cfgSetup.getboolean("infra")
modelParameters["forestBool"] = cfgSetup.getboolean("forest")
modelParameters["forestInteraction"] = cfgSetup.getboolean("forestInteraction")
# modelParameters["infra"] = cfgSetup["infra"]
# modelParameters["forest"] = cfgSetup["forest"]

# Flags for use of dynamic input parameters
modelParameters["varUmaxBool"] = cfgSetup.getboolean("variableUmaxLim")
modelParameters["varAlphaBool"] = cfgSetup.getboolean("variableAlpha")
modelParameters["varExponentBool"] = cfgSetup.getboolean("variableExponent")
# modelParameters["infra"] = cfgSetup["infra"]
# modelParameters["forest"] = cfgSetup["forest"]

# Flag for use of old flux distribution version
modelParameters["fluxDistOldVersionBool"] = cfgSetup.getboolean("fluxDistOldVersion")

# Tiling Parameters used for calculation of large model-domains
tilingParameters = {}
Expand Down Expand Up @@ -163,6 +167,9 @@ def com4FlowPyMain(cfgPath, cfgSetup):
# check if input layers have same x,y dimensions
checkInputLayerDimensions(modelParameters, modelPaths)

# check if input parameters are within physically sensible ranges
checkInputParameterValues(modelParameters, modelPaths)

# get information on cellsize and nodata value from demHeader
rasterAttributes = {}

Expand Down Expand Up @@ -200,9 +207,18 @@ def com4FlowPyMain(cfgPath, cfgSetup):


def startLogging(modelParameters, forestParams, modelPaths, MPOptions):
"""
just used to move this chunk of code out of the main function
only performs logging at the start of the simulation
"""performs logging at the start of the simulation

Parameters
-----------
modelParameters: dict
model input parameters (from .ini - file)
forestParams: dict
input parameters regarding forest interaction (from .ini - file)
modelPaths: dict
paths to input files, workDir, resDir, outputFileFormat, etc. (from .ini - file)
MPOptions: dict
contains parameters for multiprocessing (from .ini - file)
"""
# Start of Calculation (logging...)
log.info("==================================")
Expand Down Expand Up @@ -243,6 +259,9 @@ def startLogging(modelParameters, forestParams, modelPaths, MPOptions):
log.info("calculation with Infrastructure")
log.info(f"{'INFRA LAYER:' : <14}{'%s'%modelPaths['infraPath'] : <5}")
log.info("------------------------")
if modelParameters["fluxDistOldVersionBool"]:
log.info("Calculation using old (BUGGY!!) version of flux distribution!")
log.info("------------------------")
for param, value in MPOptions.items():
log.info(f"{'%s:'%param : <20}{value : <5}")
# log.info("{}:\t{}".format(param,value))
Expand All @@ -255,9 +274,15 @@ def startLogging(modelParameters, forestParams, modelPaths, MPOptions):


def checkInputLayerDimensions(modelParameters, modelPaths):
"""
check if all layers have the same size
"""check if all input layers have the same size
and can be read from the provided paths

Parameters
-----------
modelParameters: dict
model input parameters (from .ini - file)
modelPaths: dict
contains paths to input files
"""
# Check if Layers have same size!!!
try:
Expand Down Expand Up @@ -316,9 +341,9 @@ def checkInputLayerDimensions(modelParameters, modelPaths):
if modelParameters["varExponentBool"]:
_varExponentHeader = io.read_header(modelPaths["varExponentPath"])
if _demHeader["ncols"] == _varExponentHeader["ncols"] and _demHeader["nrows"] == _varExponentHeader["nrows"]:
log.info("variable Alpha Layer ok!")
log.info("variable exponent Layer ok!")
else:
log.error("Error: variable Alpha Layer doesn't match DEM!")
log.error("Error: variable exponent Layer doesn't match DEM!")
sys.exit(1)

log.info("========================")
Expand All @@ -330,8 +355,90 @@ def checkInputLayerDimensions(modelParameters, modelPaths):
sys.exit(1)


def checkInputParameterValues(modelParameters, modelPaths):
"""check if the input parameters alpha, uMaxLimit/ zDeltaMaxLimit, exponent
are within a physically sensible range

Parameters
-----------
modelParameters: dict
model input parameters (from .ini - file)
modelPaths: dict
contains paths to input files
"""
alpha = modelParameters['alpha']
if (alpha < 0 or alpha > 90):
log.error("Error: Alpha value is not within a physically sensible range ([0,90]).")
sys.exit(1)

zDelta = modelParameters['max_z']
if (zDelta < 0 or zDelta > 8848):
log.error("Error: zDeltaMaxLimit value is not within a physically sensible range ([0,8848]).")
sys.exit(1)

exp = modelParameters['exp']
if exp < 0:
log.error("Error: Exponent value is not within a physically sensible range (> 0).")
sys.exit(1)

_checkVarParams = True

if modelParameters["varAlphaBool"]:
rasterValues, header = io.read_raster(modelPaths["varAlphaPath"])
rasterValues[rasterValues < 0] = np.nan # handle different noData values
if np.any(rasterValues > 90, where=~np.isnan(rasterValues)):
log.error("Error: Not all Alpha-raster values are within a physically sensible range ([0,90]),\
in respective startcells the general alpha angle is used.")
_checkVarParams = False

if modelParameters["varUmaxBool"]:
rasterValues, header = io.read_raster(modelPaths["varUmaxPath"])
rasterValues[rasterValues < 0] = np.nan
if modelParameters["varUmaxType"].lower() == 'umax':
_maxVal = 1500 # ~sqrt(8848*2*9.81)
else:
_maxVal = 8848
if np.any(rasterValues > _maxVal, where=~np.isnan(rasterValues)):
log.error("Error: Not all zDeltaMaxLimit-raster values are within a physically sensible range \
([0, 8848 m] or [0, 1500 m/s]), in respective startcells the general zDeltaMax value is used.")
_checkVarParams = False

if _checkVarParams:
log.info("All input parameters are within a physically sensible range.")
log.info("========================")
else:
log.info("NOT ALL variable input parameter rasters are within physically sensible ranges.")
log.info("!!PLEASE RE-CHECK Input Rasters and handle Results with Care!!")
log.info("========================")


def tileInputLayers(modelParameters, modelPaths, rasterAttributes, tilingParameters):
"""computes the number of tiles (_tileCols, _tileRows) and tileOverlap (_U)
based on input layer dimensions and tilingParameters,
divides all used input layers into tiles
and saves the tiles to the temp folder

the function is a wrapper around the code in splitAndMerge.py,
where the actual tiling is handled.

Parameters
-----------
modelParameters: dict
model input parameters (from .ini - file)
modelPaths: dict
contains paths to input files
rasterAttributes: dict
contains (header) information about the rasters (that are the same for all rasters)
tilingParameters: dict
parameters relevant for tiling (from .ini - file)

Returns
-----------
nTiles: tuple
nTiles[0]: maximum index of tiles along rows
nTiles[1]: maximum index of tiles along columns
actual number of tiles = (nTiles[0] + 1) * (nTiles[1] + 1)
"""
_tileCOLS = int(tilingParameters["tileSize"] / rasterAttributes["cellsize"])
_tileROWS = int(tilingParameters["tileSize"] / rasterAttributes["cellsize"])
_U = int(tilingParameters["tileOverlap"] / rasterAttributes["cellsize"])
Expand Down Expand Up @@ -364,6 +471,21 @@ def performModelCalculation(nTiles, modelParameters, modelPaths, rasterAttribute
"""wrapper around fc.run()
handles passing of model paths, configurations to fc.run()
also responsible for processing input-data tiles in sequence

Parameters
-----------
nTiles: tuple
number of tiles
modelParameters: dict
model input parameters (from .ini - file)
modelPaths: dict
contains paths to input files
rasterAttributes: dict
contains (header) information about the rasters (that are the same for all rasters)
forestParams: dict
input parameters regarding forest interaction (from .ini - file)
MPOptions: dict
contains parameters for multiprocessing (from .ini - file)
"""

optList = []
Expand All @@ -388,6 +510,13 @@ def performModelCalculation(nTiles, modelParameters, modelPaths, rasterAttribute
def mergeAndWriteResults(modelPaths, modelOptions):
"""function handles merging of results for all tiles inside the temp Folder
and also writing result files to the resultDir

Parameters
-----------
modelPaths: dict
contains paths to input files
modelOptions: dict
contains model input parameters (from .ini - file)
"""
_uid = modelPaths["uid"]
_outputs = set(modelPaths['outputFileList'])
Expand All @@ -398,8 +527,10 @@ def mergeAndWriteResults(modelPaths, modelOptions):
# Merge calculated tiles
zDelta = SPAM.mergeRaster(modelPaths["tempDir"], "res_z_delta")
flux = SPAM.mergeRaster(modelPaths["tempDir"], "res_flux")
cellCounts = SPAM.mergeRaster(modelPaths["tempDir"], "res_count")
zDeltaSum = SPAM.mergeRaster(modelPaths["tempDir"], "res_z_delta_sum")
cellCounts = SPAM.mergeRaster(modelPaths["tempDir"], "res_count", method='sum')
zDeltaSum = SPAM.mergeRaster(modelPaths["tempDir"], "res_z_delta_sum", method='sum')
routFluxSum = SPAM.mergeRaster(modelPaths["tempDir"], "res_rout_flux_sum", method='sum')
depFluxSum = SPAM.mergeRaster(modelPaths["tempDir"], "res_dep_flux_sum", method='sum')
fpTa = SPAM.mergeRaster(modelPaths["tempDir"], "res_fp")
slTa = SPAM.mergeRaster(modelPaths["tempDir"], "res_sl")
travelLength = SPAM.mergeRaster(modelPaths["tempDir"], "res_travel_length")
Expand Down Expand Up @@ -429,6 +560,12 @@ def mergeAndWriteResults(modelPaths, modelOptions):
if 'zDeltaSum' in _outputs:
io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_zDeltaSum{}".format(_uid, _ts, _oF),
zDeltaSum)
if 'routFluxSum' in _outputs:
io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_routFluxSum{}".format(_uid, _ts, _oF),
routFluxSum)
if 'depFluxSum' in _outputs:
io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_depFluxSum{}".format(_uid, _ts, _oF),
depFluxSum)
if 'fpTravelAngle' in _outputs:
io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_fpTravelAngle{}".format(_uid,
_ts, _oF), fpTa)
Expand All @@ -439,9 +576,6 @@ def mergeAndWriteResults(modelPaths, modelOptions):
io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_travelLength{}".format(_uid,
_ts, _oF), travelLength)

# TODO: List of result files, which are produced should be specified also in the .ini file!!!!
# NOTE: Probably good to have "default" output files (z_delta,FP_travel_angle,cell_counts)
# and only write other output files if set accordingly
# NOTE:
# if not modelOptions["infraBool"]: # if no infra
# io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / ("cell_counts%s" %(output_format)),cell_counts)
Expand All @@ -457,14 +591,15 @@ def mergeAndWriteResults(modelPaths, modelOptions):
def checkConvertReleaseShp2Tif(modelPaths):
"""function checks if release area is a .shp file and tries to convert to tif in that case

Parameters:
---------------
modelPaths: {} - dict with modelPaths

Returns:
---------------
modelPaths: {} - dict with added ["releasePathWork"]
Parameters
-----------
modelPaths: dict
contains modelPaths

Returns
-----------
modelPaths: dict
contains paths including ["releasePathWork"]
"""
# the release is a shp polygon, we need to convert it to a raster
# releaseLine = shpConv.readLine(releasePath, 'releasePolygon', demDict)
Expand All @@ -481,7 +616,7 @@ def checkConvertReleaseShp2Tif(modelPaths):
#demHeader = io.read_header(modelPaths["demPath"])
_errMsg = "using release area in '.shp' format currently only supported in combination with '.asc' DEMs"
log.error(_errMsg)
raise ValueError(_errMsg)
raise ValueError(_errMsg)
else:
_errMsg = f"file format '{ext}' for DEM is not supported, please provide '.tif' or '.asc'"
log.error(_errMsg)
Expand All @@ -507,10 +642,15 @@ def checkConvertReleaseShp2Tif(modelPaths):

def deleteTempFolder(tempFolderPath):
"""delete tempFolder containing the pickled np.arrays of the input data and output data tiles.
- should be called after all merged model results are written to disk.
should be called after all merged model results are written to disk.
performs a few checks to make sure the folder is indeed a com4FlowPy tempFolder, i.e.
- does not contain subfolders
- no other file-extensions than '.npy' and ''

Parameters
-----------
tempFolderPath: str
path to temp folder
"""

log.info("+++++++++++++++++++++++")
Expand Down
13 changes: 13 additions & 0 deletions avaframe/com4FlowPy/com4FlowPyCfg.ini
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,17 @@ forestFrictionLayerType = absolute
# NOTE: this currently only works with 'forestFrictionLayer' module!!
skipForestCells = 1

#++++++++++++ Method to calculate flux distribution
# We fixed a bug in flowClass.py, which affects the distribution of the remaining flux,
# if a cell receives flux smaller than the provided flux_threshold.
#
# The default now (post Jan. 2025) is a calculation with the fixed bug!
#
# For backward compatibility the old version (prior to Jan. 2025 - with minor bug) can
# be switched on by setting "fluxDistOldVersion = True".

fluxDistOldVersion = False

#++++++++++++ Parameters for Tiling
# tileSize: size of tiles in x and y direction in meters (if total size of) x
# or y of input DEM is larger than tileSize, then the input raster
Expand Down Expand Up @@ -188,6 +199,8 @@ outputFileFormat = .tif
# slTravelAngle
# flux
# zDeltaSum
# routFluxSum
# depFluxSum
# if forestInteraction: forestInteraction is automatically added to outputs
# if infra: backCalculation is automatically added to output
outputFiles = zDelta|cellCounts|travelLength|fpTravelAngle
Expand Down
Loading
Loading