diff --git a/avaframe/com4FlowPy/com4FlowPy.py b/avaframe/com4FlowPy/com4FlowPy.py index 237a47cd8..daa8caa41 100755 --- a/avaframe/com4FlowPy/com4FlowPy.py +++ b/avaframe/com4FlowPy/com4FlowPy.py @@ -55,8 +55,8 @@ def com4FlowPyMain(cfgPath, cfgSetup): # Flow-Py parameters modelParameters["alpha"] = cfgSetup.getfloat("alpha") # float(cfgSetup["alpha"]) modelParameters["exp"] = cfgSetup.getfloat("exp") # float(cfgSetup["exp"]) - modelParameters["flux_threshold"] = cfgSetup.getfloat("flux_threshold") # float(cfgSetup["flux_threshold"]) - modelParameters["max_z"] = cfgSetup.getfloat("max_z") # float(cfgSetup["max_z"]) + modelParameters["fluxThreshold"] = cfgSetup.getfloat("fluxThreshold") # float(cfgSetup["fluxThreshold"]) + modelParameters["maxZ"] = cfgSetup.getfloat("maxZ") # float(cfgSetup["maxZ"]) # Flags for use of Forest and/or Infrastructure modelParameters["infraBool"] = cfgSetup.getboolean("infra") @@ -136,7 +136,7 @@ def com4FlowPyMain(cfgPath, cfgSetup): modelPaths["forestPath"] = "" modelParameters["forestInteraction"] = False - # check if with u_max limit + # check if with uMax limit if modelParameters["varUmaxBool"]: modelParameters["varUmaxType"] = cfgSetup.get("varUmaxParameter") modelPaths["varUmaxPath"] = cfgPath["varUmaxPath"] @@ -178,7 +178,7 @@ def com4FlowPyMain(cfgPath, cfgSetup): demHeader = IOf.readASCheader(modelPaths["demPath"]) rasterAttributes["nodata"] = demHeader["nodata_value"] elif demExt in ['.tif', '.tiff', '.TIF', '.TIFF']: - demHeader = io.read_header(modelPaths["demPath"]) + demHeader = io.readHeader(modelPaths["demPath"]) rasterAttributes["nodata"] = demHeader["noDataValue"] else: _errMsg = f"file format '{demExt}' for DEM is not supported, please provide '.tif' or '.asc'" @@ -226,8 +226,8 @@ def startLogging(modelParameters, forestParams, modelPaths, MPOptions): log.info("==================================") log.info(f"{'Alpha Angle:' : <20}{modelParameters['alpha'] : <5}") log.info(f"{'Exponent:' : <20}{modelParameters['exp'] : <5}") - log.info(f"{'Flux Threshold:' : <20}{modelParameters['flux_threshold'] : <5}") - log.info(f"{'Max Z_delta:' : <20}{modelParameters['max_z'] : <5}") + log.info(f"{'Flux Threshold:' : <20}{modelParameters['fluxThreshold'] : <5}") + log.info(f"{'Max zDdelta:' : <20}{modelParameters['maxZ'] : <5}") log.info("------------------------") # Also log the used input-files log.info(f"{'DEM:' : <5}{'%s'%modelPaths['demPath'] : <5}") @@ -292,10 +292,10 @@ def checkInputLayerDimensions(modelParameters, modelPaths): if ext in ['.asc', '.ASC']: _demHeader = IOf.readASCheader(modelPaths["demPath"]) - _relHeader = io.read_header(modelPaths["releasePathWork"]) + _relHeader = io.readHeader(modelPaths["releasePathWork"]) elif ext in ['.tif', '.tiff', '.TIF', '.TIFF']: - _demHeader = io.read_header(modelPaths["demPath"]) - _relHeader = io.read_header(modelPaths["releasePathWork"]) + _demHeader = io.readHeader(modelPaths["demPath"]) + _relHeader = io.readHeader(modelPaths["releasePathWork"]) else: _errMsg = f"file format '{ext}' for DEM is not supported, please provide '.tif' or '.asc'" raise ValueError(_errMsg) @@ -307,7 +307,7 @@ def checkInputLayerDimensions(modelParameters, modelPaths): sys.exit(1) if modelParameters["infraBool"]: - _infraHeader = io.read_header(modelPaths["infraPath"]) + _infraHeader = io.readHeader(modelPaths["infraPath"]) if _demHeader["ncols"] == _infraHeader["ncols"] and _demHeader["nrows"] == _infraHeader["nrows"]: log.info("Infra Layer ok!") else: @@ -315,7 +315,7 @@ def checkInputLayerDimensions(modelParameters, modelPaths): sys.exit(1) if modelParameters["forestBool"]: - _forestHeader = io.read_header(modelPaths["forestPath"]) + _forestHeader = io.readHeader(modelPaths["forestPath"]) if _demHeader["ncols"] == _forestHeader["ncols"] and _demHeader["nrows"] == _forestHeader["nrows"]: log.info("Forest Layer ok!") else: @@ -323,7 +323,7 @@ def checkInputLayerDimensions(modelParameters, modelPaths): sys.exit(1) if modelParameters["varUmaxBool"]: - _varUmaxHeader = io.read_header(modelPaths["varUmaxPath"]) + _varUmaxHeader = io.readHeader(modelPaths["varUmaxPath"]) if _demHeader["ncols"] == _varUmaxHeader["ncols"] and _demHeader["nrows"] == _varUmaxHeader["nrows"]: log.info("uMax Limit Layer ok!") else: @@ -331,7 +331,7 @@ def checkInputLayerDimensions(modelParameters, modelPaths): sys.exit(1) if modelParameters["varAlphaBool"]: - _varAlphaHeader = io.read_header(modelPaths["varAlphaPath"]) + _varAlphaHeader = io.readHeader(modelPaths["varAlphaPath"]) if _demHeader["ncols"] == _varAlphaHeader["ncols"] and _demHeader["nrows"] == _varAlphaHeader["nrows"]: log.info("variable Alpha Layer ok!") else: @@ -339,7 +339,7 @@ def checkInputLayerDimensions(modelParameters, modelPaths): sys.exit(1) if modelParameters["varExponentBool"]: - _varExponentHeader = io.read_header(modelPaths["varExponentPath"]) + _varExponentHeader = io.readHeader(modelPaths["varExponentPath"]) if _demHeader["ncols"] == _varExponentHeader["ncols"] and _demHeader["nrows"] == _varExponentHeader["nrows"]: log.info("variable exponent Layer ok!") else: @@ -371,7 +371,7 @@ def checkInputParameterValues(modelParameters, modelPaths): log.error("Error: Alpha value is not within a physically sensible range ([0,90]).") sys.exit(1) - zDelta = modelParameters['max_z'] + zDelta = modelParameters['maxZ'] if (zDelta < 0 or zDelta > 8848): log.error("Error: zDeltaMaxLimit value is not within a physically sensible range ([0,8848]).") sys.exit(1) @@ -384,7 +384,7 @@ def checkInputParameterValues(modelParameters, modelPaths): _checkVarParams = True if modelParameters["varAlphaBool"]: - rasterValues, header = io.read_raster(modelPaths["varAlphaPath"]) + rasterValues, header = io.readRaster(modelPaths["varAlphaPath"]) rasterValues[rasterValues < 0] = np.nan # handle different noData values if np.any(rasterValues > 90, where=~np.isnan(rasterValues)): log.error("Error: Not all Alpha-raster values are within a physically sensible range ([0,90]),\ @@ -392,7 +392,7 @@ def checkInputParameterValues(modelParameters, modelPaths): _checkVarParams = False if modelParameters["varUmaxBool"]: - rasterValues, header = io.read_raster(modelPaths["varUmaxPath"]) + rasterValues, header = io.readRaster(modelPaths["varUmaxPath"]) rasterValues[rasterValues < 0] = np.nan if modelParameters["varUmaxType"].lower() == 'umax': _maxVal = 1500 # ~sqrt(8848*2*9.81) @@ -525,15 +525,15 @@ def mergeAndWriteResults(modelPaths, modelOptions): log.info("-------------------------") # Merge calculated tiles - zDelta = SPAM.mergeRaster(modelPaths["tempDir"], "res_z_delta") + zDelta = SPAM.mergeRaster(modelPaths["tempDir"], "res_zDelta") flux = SPAM.mergeRaster(modelPaths["tempDir"], "res_flux") cellCounts = SPAM.mergeRaster(modelPaths["tempDir"], "res_count", method='sum') - zDeltaSum = SPAM.mergeRaster(modelPaths["tempDir"], "res_z_delta_sum", method='sum') - routFluxSum = SPAM.mergeRaster(modelPaths["tempDir"], "res_rout_flux_sum", method='sum') - depFluxSum = SPAM.mergeRaster(modelPaths["tempDir"], "res_dep_flux_sum", method='sum') + zDeltaSum = SPAM.mergeRaster(modelPaths["tempDir"], "res_zDeltaSum", method='sum') + routFluxSum = SPAM.mergeRaster(modelPaths["tempDir"], "res_routFluxSum", method='sum') + depFluxSum = SPAM.mergeRaster(modelPaths["tempDir"], "res_depFluxSum", method='sum') fpTa = SPAM.mergeRaster(modelPaths["tempDir"], "res_fp") slTa = SPAM.mergeRaster(modelPaths["tempDir"], "res_sl") - travelLength = SPAM.mergeRaster(modelPaths["tempDir"], "res_travel_length") + travelLength = SPAM.mergeRaster(modelPaths["tempDir"], "res_travelLength") if modelOptions["infraBool"]: backcalc = SPAM.mergeRaster(modelPaths["tempDir"], "res_backcalc") @@ -549,42 +549,42 @@ def mergeAndWriteResults(modelPaths, modelOptions): _ts = modelPaths["timeString"] if 'flux' in _outputs: - io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_flux{}".format(_uid, _ts, _oF), + io.outputRaster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_flux{}".format(_uid, _ts, _oF), flux) if 'zDelta' in _outputs: - io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_zdelta{}".format(_uid, _ts, _oF), + io.outputRaster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_zdelta{}".format(_uid, _ts, _oF), zDelta) if 'cellCounts' in _outputs: - io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_cellCounts{}".format(_uid, _ts, _oF), + io.outputRaster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_cellCounts{}".format(_uid, _ts, _oF), cellCounts) if 'zDeltaSum' in _outputs: - io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_zDeltaSum{}".format(_uid, _ts, _oF), + io.outputRaster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_zDeltaSum{}".format(_uid, _ts, _oF), zDeltaSum) if 'routFluxSum' in _outputs: - io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_routFluxSum{}".format(_uid, _ts, _oF), + io.outputRaster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_routFluxSum{}".format(_uid, _ts, _oF), routFluxSum) if 'depFluxSum' in _outputs: - io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_depFluxSum{}".format(_uid, _ts, _oF), + io.outputRaster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_depFluxSum{}".format(_uid, _ts, _oF), depFluxSum) if 'fpTravelAngle' in _outputs: - io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_fpTravelAngle{}".format(_uid, + io.outputRaster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_fpTravelAngle{}".format(_uid, _ts, _oF), fpTa) if 'slTravelAngle' in _outputs: - io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_slTravelAngle{}".format(_uid, + io.outputRaster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_slTravelAngle{}".format(_uid, _ts, _oF), slTa) if 'travelLength' in _outputs: - io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_travelLength{}".format(_uid, + io.outputRaster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_travelLength{}".format(_uid, _ts, _oF), travelLength) # NOTE: # if not modelOptions["infraBool"]: # if no infra - # io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / ("cell_counts%s" %(output_format)),cell_counts) - # io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / ("z_delta_sum%s" %(output_format)),z_delta_sum) + # io.outputRaster(modelPaths["demPath"], modelPaths["resDir"] / ("cellCounts%s" %(output_format)),cellCounts) + # io.outputRaster(modelPaths["demPath"], modelPaths["resDir"] / ("zDeltaSum%s" %(output_format)),zDeltaSum) if modelOptions["infraBool"]: # if infra - io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_backcalculation{}".format(_uid, + io.outputRaster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_backcalculation{}".format(_uid, _ts, _oF), backcalc) if modelOptions["forestInteraction"]: - io.output_raster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_forestInteraction{}".format(_uid, + io.outputRaster(modelPaths["demPath"], modelPaths["resDir"] / "com4_{}_{}_forestInteraction{}".format(_uid, _ts, _oF), forestInteraction) @@ -612,8 +612,8 @@ def checkConvertReleaseShp2Tif(modelPaths): dem = IOf.readRaster(modelPaths["demPath"]) demHeader = IOf.readASCheader(modelPaths["demPath"]) elif ext in ['.tif', '.tiff', '.TIF', '.TIFF']: - #dem = io.read_raster(modelPaths["demPath"]) - #demHeader = io.read_header(modelPaths["demPath"]) + #dem = io.readRaster(modelPaths["demPath"]) + #demHeader = io.readHeader(modelPaths["demPath"]) _errMsg = "using release area in '.shp' format currently only supported in combination with '.asc' DEMs" log.error(_errMsg) raise ValueError(_errMsg) @@ -631,8 +631,8 @@ def checkConvertReleaseShp2Tif(modelPaths): releaseAreaHeader = demHeader releaseArea = np.flipud(releaseLine["rasterData"]) modelPaths["releasePathWork"] = modelPaths["workDir"] / "release.tif" - io.output_raster(modelPaths["demPath"], modelPaths["workDir"] / "release.asc", releaseArea) - io.output_raster(modelPaths["demPath"], modelPaths["workDir"] / "release.tif", releaseArea) + io.outputRaster(modelPaths["demPath"], modelPaths["workDir"] / "release.asc", releaseArea) + io.outputRaster(modelPaths["demPath"], modelPaths["workDir"] / "release.tif", releaseArea) del releaseLine else: modelPaths["releasePathWork"] = modelPaths["releasePath"] diff --git a/avaframe/com4FlowPy/com4FlowPyCfg.ini b/avaframe/com4FlowPy/com4FlowPyCfg.ini index 3388b5244..1813f7fe6 100644 --- a/avaframe/com4FlowPy/com4FlowPyCfg.ini +++ b/avaframe/com4FlowPy/com4FlowPyCfg.ini @@ -13,12 +13,12 @@ # exp: # Spreading Coefficient (influences lateral spreading) #--------------------- -# flux_threshold: +# fluxThreshold: # Flux threshold (influences lateral spreading) #--------------------- -# max_z: +# maxZ: # Energy-Line-Height Limit (can be interpreted as velocity limit) -# max_v = sqrt(max_z*19.62) +# maxV = sqrt(maxZ*19.62) # typical values: # - Avalanche: ~270 m ... ~72 m/s # - Rockfall: ~130 m ... ~50 m/s @@ -27,19 +27,19 @@ alpha = 25 exp = 8 -flux_threshold = 3.0e-4 -max_z = 8848 +fluxThreshold = 3.0e-4 +maxZ = 8848 #++++++++++++ Use Infrastructure infra = False -#++++++++++++ Use a dynamic u_max Limit +#++++++++++++ Use a dynamic uMax Limit # Requires an additional tif-file containing the uMax (in m/s) # or zDeltaMax (m) Limits # in every cell where a release cell is. # the paths for each release cells are calculated with these uMax values -# (computed to z_delta, similar to the max_z) or zDelta values. +# (computed to zDelta, similar to the maxZ) or zDelta values. # In varUmaxParameter the parameter (uMax in m/s or zDeltaMax in m) # provided is given. variableUmaxLim = False @@ -116,7 +116,7 @@ velThForFriction = 30 #++++++++++++ Forest Detrainment # These are the parameters for the "forestDetrainment"-approach - D'Amboise et al. (2022??) # The idea is to remove 'virtual mass' (aka 'flux' in Flow-Py) from the modeled flow/process -# on forested pixels in dependency of FSI and local z_delta +# on forested pixels in dependency of FSI and local zDelta # # maxDetrainmentFor [°]: default set to '0' (i.e. no detrainment) - foreste_detrainment val: # minDetrainmentFor [°]: default set to '0' (i.e. no detrainment) - foreste_detrainment val: @@ -146,7 +146,7 @@ skipForestCells = 1 #++++++++++++ Method to calculate flux distribution # We fixed a bug in flowClass.py, which affects the distribution of the remaining flux, -# if a cell receives flux smaller than the provided flux_threshold. +# if a cell receives flux smaller than the provided fluxThreshold. # # The default now (post Jan. 2025) is a calculation with the fixed bug! # diff --git a/avaframe/com4FlowPy/flowClass.py b/avaframe/com4FlowPy/flowClass.py index fbf121a08..3cd6afe77 100644 --- a/avaframe/com4FlowPy/flowClass.py +++ b/avaframe/com4FlowPy/flowClass.py @@ -13,9 +13,9 @@ class Cell: def __init__( self, rowindex, colindex, - dem_ng, cellsize, - flux, z_delta, parent, - alpha, exp, flux_threshold, max_z_delta, + demNeighbours, cellsize, + flux, zDelta, parent, + alpha, exp, fluxThreshold, maxZdelta, startcell, fluxDistOldVersionBool=False, FSI=None, forestParams=None, ): @@ -23,40 +23,40 @@ def __init__( the constructor function is called every time a new instance of type 'Cell' is initialized. NOTE/TODO: parent can be of different data types still, maybe split into two separate variables - * bool --> isStart + * bool --> is_start * Cell --> startCell """ self.rowindex = rowindex # index of the Cell in row-direction (i.e. local y-index in the calculation domain) self.colindex = colindex # index of the Cell in column-direction (i.e. local x-index in the calculation domain) - self.dem_ng = dem_ng # elevation values in the 3x3 neigbourhood around the Cell - self.altitude = dem_ng[1, 1] # elevation value of the cell (central cell of 3x3 neighbourhood) + self.demNeighbours = demNeighbours # elevation values in the 3x3 neigbourhood around the Cell + self.altitude = demNeighbours[1, 1] # elevation value of the cell (central cell of 3x3 neighbourhood) self.cellsize = cellsize # cellsize in meters - self.tan_beta = np.zeros_like(self.dem_ng) - self.dist = np.zeros_like(self.dem_ng) - self.persistence = np.zeros_like(self.dem_ng) - self.r_t = np.zeros_like(self.dem_ng) - self.no_flow = np.ones_like(self.dem_ng) + self.tanBeta = np.zeros_like(self.demNeighbours) + self.dist = np.zeros_like(self.demNeighbours) + self.persistence = np.zeros_like(self.demNeighbours) + self.rT = np.zeros_like(self.demNeighbours) + self.noFlow = np.ones_like(self.demNeighbours) self.flux = flux self.fluxDep = 0 - self.z_delta = z_delta + self.zDelta = zDelta self.alpha = float(alpha) self.exp = int(exp) - self.max_z_delta = float(max_z_delta) - self.flux_threshold = float(flux_threshold) + self.maxZdelta = float(maxZdelta) + self.fluxThreshold = float(fluxThreshold) self.fluxDistOldVersionBool = fluxDistOldVersionBool self.tanAlpha = np.tan(np.deg2rad(self.alpha)) # moved to constructor, so this doesn't have to be calculated on - # every iteration of calc_z_delta(self) + # every iteration of calc_zDelta(self) - self.min_distance = 0 # minimal distance to start-cell (i.e. along shortest path) min_distance >= - self.max_distance = 0 # NOTE: self.max_distance is never used - maybe remove!? - self.min_gamma = 0 # NOTE: self.min_gamma (assumingly minimal travel angle to cell) never used - maybe remove!? - self.max_gamma = 0 - self.sl_gamma = 0 + self.minDistance = 0 # minimal distance to start-cell (i.e. along shortest path) minDistance >= + self.maxDistance = 0 # NOTE: self.maxDistance is never used - maybe remove!? + self.minGamma = 0 # NOTE: self.minGamma (assumingly minimal travel angle to cell) never used - maybe remove!? + self.maxGamma = 0 + self.slGamma = 0 self._SQRT2 = np.sqrt(2.0) self._RAD90 = np.deg2rad(90.0) @@ -108,7 +108,7 @@ def __init__( # NOTE: This is a quick hack to check if all values for Detrainment are set to 0 (as provided in the # .ini file) # if this is the case, then the self.forest_detrainment function does not have to be called inside - # self.calc_distribution + # self.calcDistribution # TO-DO: clean this up and handle it better if (self.forestModule == "forestFriction") or (self.forestModule == "forestFrictionLayer"): self.forestDetrainmentBool = False @@ -148,7 +148,7 @@ def __init__( if self.forestInteraction: self.forestIntCount += parent.forestIntCount - def add_os(self, flux): + def addFlux(self, flux): """ Adds flux to 'existing' flux @@ -159,7 +159,7 @@ def add_os(self, flux): """ self.flux += flux - def add_parent(self, parent): + def addParent(self, parent): """ Adds parent to parents list and optionally the forest interaction value of the parent @@ -176,7 +176,7 @@ def add_parent(self, parent): if parent.forestIntCount < (self.forestIntCount - self.isForest): self.forestIntCount = parent.forestIntCount + self.isForest - def calc_fp_travelangle(self): + def calc_fpTravelangle(self): """function calculates the travel-angle along the shortest flow-path from the start-cell to the current cell. The travel-angle along the shortest flow-path is equivalent to the maximum travel angle along all paths from the startcell to this cell. @@ -186,15 +186,15 @@ def calc_fp_travelangle(self): for parent in self.lOfParents: _dx = abs(parent.colindex - self.colindex) _dy = abs(parent.rowindex - self.rowindex) - _ldistMin.append(math.sqrt(_dx * _dx + _dy * _dy) * self.cellsize + parent.min_distance) - self.min_distance = np.amin(_ldistMin) - self.max_gamma = np.rad2deg(np.arctan(_dh / self.min_distance)) + _ldistMin.append(math.sqrt(_dx * _dx + _dy * _dy) * self.cellsize + parent.minDistance) + self.minDistance = np.amin(_ldistMin) + self.maxGamma = np.rad2deg(np.arctan(_dh / self.minDistance)) - def calc_sl_travelangle(self): + def calc_slTravelangle(self): """function calculates the travel-angle between the start cell and the current cell using the shortest connection or straight line (sl) between the two cells. - The travel angle calculated with this shortest horizontal distance 'sl_gamma' is always - larger or equal to the travel angle 'max_gamma', which is calculated along the + The travel angle calculated with this shortest horizontal distance 'slGamma' is always + larger or equal to the travel angle 'maxGamma', which is calculated along the (potentially curved) flow path (fp). (vgl. Meißl, 1998) """ _dx = abs(self.startcell.colindex - self.colindex) @@ -202,15 +202,15 @@ def calc_sl_travelangle(self): _dh = self.startcell.altitude - self.altitude _ds = math.sqrt(_dx * _dx + _dy * _dy) * self.cellsize - self.sl_gamma = np.rad2deg(np.arctan(_dh / _ds)) + self.slGamma = np.rad2deg(np.arctan(_dh / _ds)) - def calc_z_delta(self): + def calc_zDelta(self): """ function calculates zDelta (velocity or energy line height) to the eligible neighbours NOTE: forestFriction related mechanics are implemented here! """ - self.z_delta_neighbour = np.zeros((3, 3)) - self.z_gamma = self.altitude - self.dem_ng + self.zDeltaNeighbour = np.zeros((3, 3)) + self.zGamma = self.altitude - self.demNeighbours ds = np.array([[self._SQRT2, 1, self._SQRT2], [1, 0, 1], [self._SQRT2, 1, self._SQRT2]]) if self.forestBool: @@ -234,13 +234,13 @@ def calc_z_delta(self): # ideally be handled by a separate release-area algorithm in the pre-processing # NOTE-TODO: The rest of this implementation is also just copy+pasted from 'foreste_detraiment' # branch and not yet fully tested!! - if self.z_delta < self.noFricitonEffectZdelta: + if self.zDelta < self.noFricitonEffectZdelta: # friction at rest v=0 would be applied to start cells _rest = self.maxAddedFrictionForest * self.FSI # rise over run _slope = (_rest - self.minAddedFrictionForest) / (0 - self.noFricitonEffectZdelta) - # y = mx + b, shere z_delta is the x - friction = max(self.minAddedFrictionForest, _slope * self.z_delta + _rest) + # y = mx + b, shere zDelta is the x + friction = max(self.minAddedFrictionForest, _slope * self.zDelta + _rest) _alpha_calc = self.alpha + max(0, friction) # NOTE: not sure what this does, seems redundant! else: @@ -255,31 +255,31 @@ def calc_z_delta(self): # else simply use tanAlpha _tanAlpha = self.tanAlpha - self.z_alpha = ds * self.cellsize * _tanAlpha - self.z_delta_neighbour = self.z_delta + self.z_gamma - self.z_alpha - self.z_delta_neighbour[self.z_delta_neighbour < 0] = 0 - self.z_delta_neighbour[self.z_delta_neighbour > self.max_z_delta] = self.max_z_delta + self.zAlpha = ds * self.cellsize * _tanAlpha + self.zDeltaNeighbour = self.zDelta + self.zGamma - self.zAlpha + self.zDeltaNeighbour[self.zDeltaNeighbour < 0] = 0 + self.zDeltaNeighbour[self.zDeltaNeighbour > self.maxZdelta] = self.maxZdelta - def calc_tanbeta(self): + def calc_tanBeta(self): """calculates the normalized terrain based routing """ _ds = np.array([[self._SQRT2, 1, self._SQRT2], [1, 1, 1], [self._SQRT2, 1, self._SQRT2]]) _distance = _ds * self.cellsize - _beta = np.arctan((self.altitude - self.dem_ng) / _distance) + self._RAD90 - self.tan_beta = np.tan(_beta / 2) + _beta = np.arctan((self.altitude - self.demNeighbours) / _distance) + self._RAD90 + self.tanBeta = np.tan(_beta / 2) - self.tan_beta[self.z_delta_neighbour <= 0] = 0 - self.tan_beta[self.persistence <= 0] = 0 - self.tan_beta[1, 1] = 0 - if abs(np.sum(self.tan_beta)) > 0: - self.r_t = self.tan_beta**self.exp / np.sum(self.tan_beta**self.exp) + self.tanBeta[self.zDeltaNeighbour <= 0] = 0 + self.tanBeta[self.persistence <= 0] = 0 + self.tanBeta[1, 1] = 0 + if abs(np.sum(self.tanBeta)) > 0: + self.rT = self.tanBeta**self.exp / np.sum(self.tanBeta**self.exp) def calc_persistence(self): """ calculates persistence-based routing """ - self.persistence = np.zeros_like(self.dem_ng) + self.persistence = np.zeros_like(self.demNeighbours) if self.is_start: self.persistence += 1 elif self.lOfParents[0].is_start: @@ -289,9 +289,9 @@ def calc_persistence(self): dx = parent.colindex - self.colindex dy = parent.rowindex - self.rowindex - self.no_flow[dy + 1, dx + 1] = 0 # 3x3 Matrix of ones, every parent gets a 0, no flow to a parent field + self.noFlow[dy + 1, dx + 1] = 0 # 3x3 Matrix of ones, every parent gets a 0, no flow to a parent field - maxweight = parent.z_delta + maxweight = parent.zDelta # Old Calculation if dx == -1: if dy == -1: @@ -331,7 +331,7 @@ def calc_persistence(self): self.persistence[0, 1] += 0.707 * maxweight self.persistence[1, 0] += 0.707 * maxweight - def calc_distribution(self): + def calcDistribution(self): """ calculates flux and zdelta that is distributed to the adjacent cells @@ -340,10 +340,10 @@ def calc_distribution(self): tuple list of row, col, flux, zdelta of adjacent cells that receive flux/zdelta """ - self.calc_z_delta() + self.calc_zDelta() self.calc_persistence() - self.persistence *= self.no_flow - self.calc_tanbeta() + self.persistence *= self.noFlow + self.calc_tanBeta() # print(self.persistence) if not self.is_start: @@ -351,19 +351,19 @@ def calc_distribution(self): if self.forestBool and self.forestDetrainmentBool: self.forest_detrainment() - self.calc_fp_travelangle() - self.calc_sl_travelangle() + self.calc_fpTravelangle() + self.calc_slTravelangle() # FOREST-Detrainment # here we subtract the detrainment from the flux before moving flux to new cells. if self.forestBool and self.forestDetrainmentBool: # NOTE-TODO: check/test what the hard-coded 0.0003 does here or if this should be - # substituted by self.flux_threshold???? + # substituted by self.fluxThreshold???? self.flux = max(0.0003, self.flux - self.detrainment) - threshold = self.flux_threshold - if np.sum(self.r_t) > 0: - self.dist = (self.persistence * self.r_t) / np.sum(self.persistence * self.r_t) * self.flux + threshold = self.fluxThreshold + if np.sum(self.rT) > 0: + self.dist = (self.persistence * self.rT) / np.sum(self.persistence * self.rT) * self.flux # This handles (local) flux re-distribution if n cells are below threshold, but lager 0 and m cells are # still above threshold @@ -374,37 +374,37 @@ def calc_distribution(self): """ legacy version of code (pre 01-2025) WITH BUG (is used if self.fluxDistOldVersionBool) here 'count' returns the number of neighbor/child cells that receive flux > 0, but - below the flux_threshold. + below the fluxThreshold. """ count = ((0 < self.dist) & (self.dist < threshold)).sum() else: """ default/correct version with FIXED BUG (used if self.fluxDistOldVersionBool == False) her 'count' returns the number of neighbor/child cells which receive - flux >= the flux_threshold. + flux >= the fluxThreshold. """ count = (self.dist >= threshold).sum() - # massToDistribute ~ sum of flux of child cells below the flux_threshold - mass_to_distribute = np.sum(self.dist[self.dist < threshold]) + # massToDistribute ~ sum of flux of child cells below the fluxThreshold + massToDistribute = np.sum(self.dist[self.dist < threshold]) - if mass_to_distribute > 0 and count > 0: + if massToDistribute > 0 and count > 0: # local flux redistribution to eligible child cells - self.dist[self.dist > threshold] += mass_to_distribute / count + self.dist[self.dist > threshold] += massToDistribute / count self.dist[self.dist < threshold] = 0 if np.sum(self.dist) < self.flux and count > 0: # correction/flux conservation for potential rounding losses self.dist[self.dist > threshold] += (self.flux - np.sum(self.dist)) / count if count == 0: - # if all child cells are below flux_threshold, the flux is deposited + # if all child cells are below fluxThreshold, the flux is deposited # TODO: check alternatives (e.g. 'global' redistribution or within generations) self.fluxDep = self.flux - row_local, col_local = np.where(self.dist > threshold) + rowLocal, colLocal = np.where(self.dist > threshold) return ( - self.rowindex - 1 + row_local, - self.colindex - 1 + col_local, - self.dist[row_local, col_local], - self.z_delta_neighbour[row_local, col_local], + self.rowindex - 1 + rowLocal, + self.colindex - 1 + colLocal, + self.dist[rowLocal, colLocal], + self.zDeltaNeighbour[rowLocal, colLocal], ) def forest_detrainment(self): @@ -421,4 +421,4 @@ def forest_detrainment(self): # rise over run (should be negative slope) slope = (_rest - self.minAddedDetrainmentForest) / (0 - _noDetrainmentEffectZdelta) # y=mx+b, where zDelta is x - self.detrainment = max(self.minAddedDetrainmentForest, slope * self.z_delta + _rest) + self.detrainment = max(self.minAddedDetrainmentForest, slope * self.zDelta + _rest) diff --git a/avaframe/com4FlowPy/flowCore.py b/avaframe/com4FlowPy/flowCore.py index 244c202bf..74f234d25 100644 --- a/avaframe/com4FlowPy/flowCore.py +++ b/avaframe/com4FlowPy/flowCore.py @@ -24,7 +24,7 @@ from avaframe.com4FlowPy.flowClass import Cell -def get_start_idx(dem, release): +def getStartIdx(dem, release): """Sort Release Pixels by altitude and return the result as lists for the Rows and Columns, starting with the highest altitude @@ -37,22 +37,22 @@ def get_start_idx(dem, release): Returns ----------- - row_list: list + rowList: list Row indices of release pixels sorted by altitude - col_list: list + colList: list Column indices of release pixels sorted by altitude """ - row_list, col_list = np.where(release > 0) # Gives back the indices of the release areas - if len(row_list) > 0: - altitude_list = [] - for i in range(len(row_list)): - altitude_list.append(dem[row_list[i], col_list[i]]) - altitude_list, row_list, col_list = list(zip(*sorted(zip(altitude_list, row_list, col_list), reverse=True))) + rowList, colList = np.where(release > 0) # Gives back the indices of the release areas + if len(rowList) > 0: + altitudeList = [] + for i in range(len(rowList)): + altitudeList.append(dem[rowList[i], colList[i]]) + altitudeList, rowList, colList = list(zip(*sorted(zip(altitudeList, rowList, colList), reverse=True))) # Sort this lists by altitude - return row_list, col_list + return rowList, colList -def split_release(release, pieces): +def splitRelease(release, pieces): """Split the release layer in several tiles. The area is determined by the number of release pixels in it, so that every tile has the same amount of release pixels in it. @@ -77,67 +77,67 @@ def split_release(release, pieces): Returns ----------- - release_list: list + releaseList: list contains the tiles(arrays) [array0, array1, ..] """ # Flatten the array and compute the cumulative sum - flat_release = release.flatten() - cumulative_sum = np.cumsum(flat_release) + _flatRelease = release.flatten() + _cumulativeSum = np.cumsum(_flatRelease) - total_sum = cumulative_sum[-1] - sum_per_split = total_sum / pieces + _totalSum = _cumulativeSum[-1] + _sumPerSplit = _totalSum / pieces - release_list = [] - start_index = 0 + releaseList = [] + startIndex = 0 for i in range(1, pieces): # Find the split point in the flattened array - split_index = np.searchsorted(cumulative_sum, sum_per_split * i) + _splitIndex = np.searchsorted(_cumulativeSum, _sumPerSplit * i) # Create a new array for this split - split_flat = np.zeros_like(flat_release) - split_flat[start_index:split_index] = flat_release[start_index:split_index] + _splitFlat = np.zeros_like(_flatRelease) + _splitFlat[startIndex:_splitIndex] = _flatRelease[startIndex:_splitIndex] # Reshape the flat array back to 2D and add to the list - release_list.append(split_flat.reshape(release.shape)) + releaseList.append(_splitFlat.reshape(release.shape)) - start_index = split_index + startIndex = _splitIndex # Handle the last piece - split_flat = np.zeros_like(flat_release) - split_flat[start_index:] = flat_release[start_index:] - release_list.append(split_flat.reshape(release.shape)) + _splitFlat = np.zeros_like(_flatRelease) + _splitFlat[startIndex:] = _flatRelease[startIndex:] + releaseList.append(_splitFlat.reshape(release.shape)) - return release_list + return releaseList -def back_calculation(back_cell): +def backCalculation(backCell): """Here the back calculation from a run out pixel that hits a infrastructure to the release pixel is performed. Parameters ----------- - back_cell: class-object cell + backCell: class-object cell cell that hit a Infrastructure Returns ----------- - back_list: list + backList: list List of cells that are on the way to the start cell TODO:Maybe change it to array like DEM? """ - back_list = [] - for parent in back_cell.lOfParents: - if parent not in back_list: - back_list.append(parent) - for cell in back_list: + backList = [] + for parent in backCell.lOfParents: + if parent not in backList: + backList.append(parent) + for cell in backList: for parent in cell.lOfParents: # Check if parent already in list - if parent not in back_list: - back_list.append(parent) + if parent not in backList: + backList.append(parent) - return back_list + return backList def run(optTuple): @@ -165,8 +165,8 @@ def run(optTuple): # Flow-Py parameters alpha = float(optTuple[2]["alpha"]) exp = float(optTuple[2]["exp"]) - flux_threshold = float(optTuple[2]["flux_threshold"]) - max_z_delta = float(optTuple[2]["max_z"]) + fluxThreshold = float(optTuple[2]["fluxThreshold"]) + maxZdelta = float(optTuple[2]["maxZ"]) infraBool = optTuple[2]["infraBool"] forestBool = optTuple[2]["forestBool"] forestInteraction = optTuple[2]["forestInteraction"] @@ -248,7 +248,7 @@ def run(optTuple): chunkSize=MPOptions["chunkSize"], ) - release_list = split_release(release, nChunks) + releaseList = splitRelease(release, nChunks) log.info("Multiprocessing starts, used Cores/Processes/Chunks: %i/%i/%i" % (MPOptions["nCPU"], nProcesses, nChunks)) with Pool(processes=nProcesses) as pool: @@ -256,14 +256,14 @@ def run(optTuple): calculation, [ [ # TODO: write in dicts: - dem, infra, release_sub, - alpha, exp, flux_threshold, max_z_delta, + dem, infra, releaseSub, + alpha, exp, fluxThreshold, maxZdelta, nodata, cellsize, infraBool, forestBool, varParams, fluxDistOldVersionBool, forestArray, forestParams, ] - for release_sub in release_list + for releaseSub in releaseList ], ) pool.close() @@ -335,15 +335,15 @@ def run(optTuple): np.maximum(forestIntArray, forestIntList[i])) # Save Calculated tiles - np.save(tempDir / ("res_z_delta_%s_%s" % (optTuple[0], optTuple[1])), zDeltaArray) - np.save(tempDir / ("res_z_delta_sum_%s_%s" % (optTuple[0], optTuple[1])), zDeltaSumArray) - np.save(tempDir / ("res_rout_flux_sum_%s_%s" % (optTuple[0], optTuple[1])), routFluxSumArray) - np.save(tempDir / ("res_dep_flux_sum_%s_%s" % (optTuple[0], optTuple[1])), depFluxSumArray) + np.save(tempDir / ("res_zDelta_%s_%s" % (optTuple[0], optTuple[1])), zDeltaArray) + np.save(tempDir / ("res_zDeltaSum_%s_%s" % (optTuple[0], optTuple[1])), zDeltaSumArray) + np.save(tempDir / ("res_routFluxSum_%s_%s" % (optTuple[0], optTuple[1])), routFluxSumArray) + np.save(tempDir / ("res_depFluxSum_%s_%s" % (optTuple[0], optTuple[1])), depFluxSumArray) np.save(tempDir / ("res_flux_%s_%s" % (optTuple[0], optTuple[1])), fluxArray) np.save(tempDir / ("res_count_%s_%s" % (optTuple[0], optTuple[1])), countArray) np.save(tempDir / ("res_fp_%s_%s" % (optTuple[0], optTuple[1])), fpTravelAngleArray) np.save(tempDir / ("res_sl_%s_%s" % (optTuple[0], optTuple[1])), slTravelAngleArray) - np.save(tempDir / ("res_travel_length_%s_%s" % (optTuple[0], optTuple[1])), travelLengthArray) + np.save(tempDir / ("res_travelLength_%s_%s" % (optTuple[0], optTuple[1])), travelLengthArray) if infraBool: np.save(tempDir / ("res_backcalc_%s_%s" % (optTuple[0], optTuple[1])), backcalc) if forestInteraction: @@ -413,8 +413,8 @@ def calculation(args): release = args[2] alpha = args[3] exp = args[4] - flux_threshold = args[5] - max_z_delta = args[6] + fluxThreshold = args[5] + maxZdelta = args[6] nodata = args[7] cellsize = args[8] infraBool = args[9] @@ -452,85 +452,85 @@ def calculation(args): backcalc = np.zeros_like(dem, dtype=np.int32) if infraBool: - back_list = [] + backList = [] if forestInteraction: forestIntArray = np.ones_like(dem, dtype=np.float32) * -9999 # Core # start = datetime.now().replace(microsecond=0) - # NOTE-TODO: row_list, col_list are tuples - rethink variable naming - row_list, col_list = get_start_idx(dem, release) + # NOTE-TODO: rowList, colList are tuples - rethink variable naming + rowList, colList = getStartIdx(dem, release) - startcell_idx = 0 - while startcell_idx < len(row_list): + startcellIdx = 0 + while startcellIdx < len(rowList): processedCells = {} # dictionary of cells that have been processed already - cell_list = [] - row_idx = row_list[startcell_idx] - col_idx = col_list[startcell_idx] - dem_ng = dem[row_idx - 1: row_idx + 2, col_idx - 1: col_idx + 2] # neighbourhood DEM + cellList = [] + rowIdx = rowList[startcellIdx] + colIdx = colList[startcellIdx] + demNeighbours = dem[rowIdx - 1: rowIdx + 2, colIdx - 1: colIdx + 2] # neighbourhood DEM if varUmaxBool and varUmaxArray is not None: - if varUmaxArray[row_idx, col_idx] > 0 and varUmaxArray[row_idx, col_idx] <= 8848: - max_z_delta = varUmaxArray[row_idx, col_idx] + if varUmaxArray[rowIdx, colIdx] > 0 and varUmaxArray[rowIdx, colIdx] <= 8848: + maxZdelta = varUmaxArray[rowIdx, colIdx] if varAlphaBool and varAlphaArray is not None: - if varAlphaArray[row_idx, col_idx] > 0 and varAlphaArray[row_idx, col_idx] <= 90: - alpha = varAlphaArray[row_idx, col_idx] + if varAlphaArray[rowIdx, colIdx] > 0 and varAlphaArray[rowIdx, colIdx] <= 90: + alpha = varAlphaArray[rowIdx, colIdx] if varExponentBool and varExponentArray is not None: - if varExponentArray[row_idx, col_idx] > 0: - exp = varExponentArray[row_idx, col_idx] + if varExponentArray[rowIdx, colIdx] > 0: + exp = varExponentArray[rowIdx, colIdx] - if (nodata in dem_ng) or np.size(dem_ng) < 9: - startcell_idx += 1 + if (nodata in demNeighbours) or np.size(demNeighbours) < 9: + startcellIdx += 1 continue startcell = Cell( - row_idx, col_idx, - dem_ng, cellsize, + rowIdx, colIdx, + demNeighbours, cellsize, 1, 0, None, - alpha, exp, flux_threshold, max_z_delta, + alpha, exp, fluxThreshold, maxZdelta, startcell=True, fluxDistOldVersionBool=fluxDistOldVersionBool, - FSI=forestArray[row_idx, col_idx] if isinstance(forestArray, np.ndarray) else None, + FSI=forestArray[rowIdx, colIdx] if isinstance(forestArray, np.ndarray) else None, forestParams=forestParams, ) # dictionary of all the cells that have been processed and the number of times the cell has been visited processedCells[(startcell.rowindex, startcell.colindex)] = 1 # list of flowClass.Cell() Objects that is contains the "path" for each release-cell - cell_list.append(startcell) + cellList.append(startcell) - for idx, cell in enumerate(cell_list): + for idx, cell in enumerate(cellList): - row, col, flux, z_delta = cell.calc_distribution() + row, col, flux, zDelta = cell.calcDistribution() if len(flux) > 0: # mass, row, col = list(zip(*sorted(zip( mass, row, col), reverse=False))) - z_delta, flux, row, col = list(zip(*sorted(zip(z_delta, flux, row, col), reverse=False))) + zDelta, flux, row, col = list(zip(*sorted(zip(zDelta, flux, row, col), reverse=False))) # Sort this lists by elh, to start with the highest cell # check if cell already exists - for i in range(idx, len(cell_list)): # Check if Cell already exists + for i in range(idx, len(cellList)): # Check if Cell already exists k = 0 while k < len(row): - if row[k] == cell_list[i].rowindex and col[k] == cell_list[i].colindex: - cell_list[i].add_os(flux[k]) - cell_list[i].add_parent(cell) - if z_delta[k] > cell_list[i].z_delta: - cell_list[i].z_delta = z_delta[k] + if row[k] == cellList[i].rowindex and col[k] == cellList[i].colindex: + cellList[i].addFlux(flux[k]) + cellList[i].addParent(cell) + if zDelta[k] > cellList[i].zDelta: + cellList[i].zDelta = zDelta[k] row = np.delete(row, k) col = np.delete(col, k) flux = np.delete(flux, k) - z_delta = np.delete(z_delta, k) + zDelta = np.delete(zDelta, k) else: k += 1 for k in range(len(row)): - dem_ng = dem[row[k] - 1: row[k] + 2, col[k] - 1: col[k] + 2] # neighbourhood DEM + demNeighbours = dem[row[k] - 1: row[k] + 2, col[k] - 1: col[k] + 2] # neighbourhood DEM # This bit handles edge cases and noData-values in the DEM!! this is an important piece of code, since # no-data handling is expected (by some users/applications) to behave like here: # i.e. if nodata in the 3x3 neighbourhood --> no calculation - if (nodata in dem_ng) or np.size(dem_ng) < 9: + if (nodata in demNeighbours) or np.size(demNeighbours) < 9: continue # if the current child cell is already in processedCells @@ -541,28 +541,28 @@ def calculation(args): else: processedCells[(row[k], col[k])] = 1 - cell_list.append(Cell( + cellList.append(Cell( row[k], col[k], - dem_ng, cellsize, - flux[k], z_delta[k], + demNeighbours, cellsize, + flux[k], zDelta[k], cell, - alpha, exp, flux_threshold, max_z_delta, + alpha, exp, fluxThreshold, maxZdelta, startcell, fluxDistOldVersionBool=fluxDistOldVersionBool, FSI=forestArray[row[k], col[k]] if isinstance(forestArray, np.ndarray) else None, forestParams=forestParams, )) - zDeltaArray[cell.rowindex, cell.colindex] = max(zDeltaArray[cell.rowindex, cell.colindex], cell.z_delta) + zDeltaArray[cell.rowindex, cell.colindex] = max(zDeltaArray[cell.rowindex, cell.colindex], cell.zDelta) fluxArray[cell.rowindex, cell.colindex] = max(fluxArray[cell.rowindex, cell.colindex], cell.flux) routFluxSumArray[cell.rowindex, cell.colindex] += cell.flux depFluxSumArray[cell.rowindex, cell.colindex] += cell.fluxDep - zDeltaSumArray[cell.rowindex, cell.colindex] += cell.z_delta + zDeltaSumArray[cell.rowindex, cell.colindex] += cell.zDelta fpTravelAngleArray[cell.rowindex, cell.colindex] = max(fpTravelAngleArray[cell.rowindex, cell.colindex], - cell.max_gamma) + cell.maxGamma) slTravelAngleArray[cell.rowindex, cell.colindex] = max(slTravelAngleArray[cell.rowindex, cell.colindex], - cell.sl_gamma) + cell.slGamma) travelLengthArray[cell.rowindex, cell.colindex] = max(travelLengthArray[cell.rowindex, cell.colindex], - cell.min_distance) + cell.minDistance) if processedCells[(cell.rowindex, cell.colindex)] == 1: countArray[cell.rowindex, cell.colindex] += int(1) @@ -573,7 +573,7 @@ def calculation(args): # do backcalculation after the path calculation is finished if infra[cell.rowindex, cell.colindex] > 0: # backlist = [] - backList = back_calculation(cell) + backList = backCalculation(cell) for bCell in backList: backcalc[bCell.rowindex, bCell.colindex] = max(backcalc[bCell.rowindex, bCell.colindex], @@ -589,11 +589,11 @@ def calculation(args): if infraBool: release[zDeltaArray > 0] = 0 # Check if i hit a release Cell, if so set it to zero and get again the indexes of release cells - row_list, col_list = get_start_idx(dem, release) + rowList, colList = getStartIdx(dem, release) - del cell_list, processedCells + del cellList, processedCells - startcell_idx += 1 + startcellIdx += 1 # end = datetime.now().replace(microsecond=0) gc.collect() if forestInteraction: @@ -637,7 +637,7 @@ def enoughMemoryAvailable(limit=0.05): def calculateMultiProcessingOptions(nRel, nCPU, procPerCPU=1, maxChunks=500, chunkSize=50): """compute required options for multiprocessing of calulation() function inside run() and accompanied splitting of - release cells into chunks in split_release().append + release cells into chunks in splitRelease().append The general idea is to make good use of available CPU resources to speed up calculations while not getting into trouble with RAM issues ... diff --git a/avaframe/com4FlowPy/rasterIo.py b/avaframe/com4FlowPy/rasterIo.py index b567128ae..0c7711cb8 100755 --- a/avaframe/com4FlowPy/rasterIo.py +++ b/avaframe/com4FlowPy/rasterIo.py @@ -12,14 +12,14 @@ log = logging.getLogger(__name__) -def read_header(input_file): +def readHeader(inputFile): """ Reads the header of the raster file raster file should be readable by rasterio (e.g. .tif, .asc) Parameters ----------- - input_file: str + inputFile: str path to raster file Returns @@ -28,9 +28,9 @@ def read_header(input_file): header of raster file in style of ASCII-Rasters """ - raster = rasterio.open(input_file) + raster = rasterio.open(inputFile) if raster is None: - print("Unable to open {}".format(input_file)) + print("Unable to open {}".format(inputFile)) sys.exit(1) header = {} @@ -43,31 +43,31 @@ def read_header(input_file): return header -def read_raster(input_file): +def readRaster(inputFile): """ Reads in a raster file Parameters ----------- - input_file: str + inputFile: str path to raster file Returns ----------- - my_array: np.array + readArray: np.array numpy array with values read in from the raster file header: dict header of raster file in style of ASCII-Rasters """ - header = read_header(input_file) - raster = rasterio.open(input_file) - my_array = raster.read(1) + header = readHeader(inputFile) + raster = rasterio.open(inputFile) + readArray = raster.read(1) - return my_array, header + return readArray, header -def output_raster(referenceFile, file_out, raster): +def outputRaster(referenceFile, fileOut, raster): """ Saves raster @@ -75,29 +75,29 @@ def output_raster(referenceFile, file_out, raster): ----------- referenceFile: str path to raster file to reference on, mostly DEM - file_out: str + fileOut: str path for the outputfile, possible extends are .asc or .tif raster: np.array raster (array) that is saved """ - raster_trans = rasterio.open(referenceFile) + rasterTrans = rasterio.open(referenceFile) try: - crs = rasterio.crs.CRS.from_dict(raster_trans.crs.data) + crs = rasterio.crs.CRS.from_dict(rasterTrans.crs.data) except: # crs = rasterio.crs.CRS.from_epsg(4326) crs = None _success = True - if file_out.suffix == ".asc": + if fileOut.suffix == ".asc": _driver = "AAIGrid" - elif file_out.suffix == ".tif": + elif fileOut.suffix == ".tif": _driver = "GTiff" try: with rasterio.open( - file_out, + fileOut, "w", driver=_driver, height=raster.shape[0], @@ -105,18 +105,18 @@ def output_raster(referenceFile, file_out, raster): count=1, dtype=raster.dtype, crs=crs, - transform=raster_trans.transform, + transform=rasterTrans.transform, nodata=-9999, - ) as new_dataset: - new_dataset.write(raster, 1) + ) as newDataset: + newDataset.write(raster, 1) except: _success = False - log.error("could not write {} to {}".format(raster, file_out)) + log.error("could not write {} to {}".format(raster, fileOut)) try: if _success is True: - log.info("wrote file: {}".format(file_out)) + log.info("wrote file: {}".format(fileOut)) else: - log.info("failed to write file: {}".format(file_out)) + log.info("failed to write file: {}".format(fileOut)) except: pass diff --git a/avaframe/com4FlowPy/splitAndMerge.py b/avaframe/com4FlowPy/splitAndMerge.py index e621d6241..d0c02f45e 100644 --- a/avaframe/com4FlowPy/splitAndMerge.py +++ b/avaframe/com4FlowPy/splitAndMerge.py @@ -42,7 +42,7 @@ def tileRaster(fNameIn, fNameOut, dirName, xDim, yDim, U, isInit=False): # os.makedirs(dirName) # largeRaster, largeHeader = iof.f_readASC(fNameIn, dType='float') - largeRaster, largeHeader = io.read_raster(fNameIn) + largeRaster, largeHeader = io.readRaster(fNameIn) # einlesen des Rasters und der Header i, j, imax, jmax = 0, 0, 0, 0 diff --git a/avaframe/runCom4FlowPy.py b/avaframe/runCom4FlowPy.py index 4a7f3af7f..3298641e9 100644 --- a/avaframe/runCom4FlowPy.py +++ b/avaframe/runCom4FlowPy.py @@ -119,18 +119,18 @@ def main(): timeString = datetime.now().strftime("%Y%m%d_%H%M%S") try: os.makedirs(workDir / "res_{}".format(uid)) # (time_string)) - res_dir = workDir / "res_{}".format(uid) # (time_string) + resDir = workDir / "res_{}".format(uid) # (time_string) except FileExistsError: log.info("folder with same name already exists - aborting") log.info("simulation results folder with same .ini parameters already exists: simulation {}".format(uid)) sys.exit(1) try: - os.makedirs(workDir / res_dir / "temp") - temp_dir = workDir / res_dir / "temp" + os.makedirs(workDir / resDir / "temp") + tempDir = workDir / resDir / "temp" except FileExistsError: log.info("temp folder for simualtion {} already exists - aborting".format(uid)) sys.exit(1) - log = logUtils.initiateLogger(res_dir, logName) + log = logUtils.initiateLogger(resDir, logName) # writing config to .json file successToJSON = writeCfgJSON(cfg, uid, workDir) @@ -142,9 +142,9 @@ def main(): log.error("Exception occurred: %s", str(successToJSON), exc_info=True) cfgPath["workDir"] = pathlib.Path(workDir) - cfgPath["outDir"] = pathlib.Path(res_dir) + cfgPath["outDir"] = pathlib.Path(resDir) cfgPath["resDir"] = cfgPath["outDir"] - cfgPath["tempDir"] = pathlib.Path(temp_dir) + cfgPath["tempDir"] = pathlib.Path(tempDir) cfgPath["demPath"] = pathlib.Path(cfgCustomPaths["demPath"]) cfgPath["releasePath"] = pathlib.Path(cfgCustomPaths["releasePath"]) cfgPath["infraPath"] = pathlib.Path(cfgCustomPaths["infraPath"])