From d0a81e904433b6971cee853bc6a989f9dc29ca97 Mon Sep 17 00:00:00 2001 From: Felix Oesterle Date: Mon, 22 Dec 2025 23:30:15 +0100 Subject: [PATCH 1/5] refactor(fileHandlerUtils): streamline filename parsing using `parseSimName` - Integrated `cfgUtils.parseSimName` to simplify extraction of simulation components across old and new formats. - Unified reconstruction logic for `simName` using parsed components and removed redundant conditions. - Updated corresponding logic in dataframe population to use parsed results for consistency. - Added extensive unit tests in `cfgUtils` to validate `parseSimName` functionality across different naming conventions and edge cases. - Minor refactors in test cases and `com1DFA` to align with new parsing logic. refactor(cfgUtils): streamline `parseSimName` to extract short module names - Updated `parseSimName` to extract short module names (e.g., "com1" instead of "com1DFA"). - Refactored multiple files to consistently use parsed components, replacing manual string parsing. - Modified tests to cover enhanced `parseSimName` behavior, removing dependency on legacy module patterns. --- avaframe/ana1Tests/rotationTest.py | 5 +- avaframe/ana3AIMEC/dfa2Aimec.py | 16 +- avaframe/ana5Utils/DFAPathGeneration.py | 2 +- avaframe/com1DFA/com1DFA.py | 8 +- avaframe/in3Utils/cfgUtils.py | 126 ++++++++++- avaframe/in3Utils/fileHandlerUtils.py | 148 ++++++------- avaframe/log2Report/generateReport.py | 12 +- avaframe/tests/test_cfgUtils.py | 278 +++++++++++++++++++++++- avaframe/tests/test_com1DFA.py | 4 +- 9 files changed, 489 insertions(+), 110 deletions(-) diff --git a/avaframe/ana1Tests/rotationTest.py b/avaframe/ana1Tests/rotationTest.py index 7eaa1be27..32812257a 100644 --- a/avaframe/ana1Tests/rotationTest.py +++ b/avaframe/ana1Tests/rotationTest.py @@ -11,6 +11,7 @@ # Local imports # import config and init tools from avaframe.in3Utils import fileHandlerUtils as fU +from avaframe.in3Utils import cfgUtils from avaframe.log2Report import generateReport as gR from avaframe.in3Utils import geoTrans as gT import avaframe.in2Trans.rasterUtils as IOf @@ -63,7 +64,7 @@ def mainRotationTest(avalancheDir, energyLineTestCfg, com1DFACfg, dem, simDF, re fU.makeADir(outDir) # get reference angle simName = simDF.loc[refSimRowHash, 'simName'] - relName = (simName.split('_'))[0] + relName = cfgUtils.parseSimName(simName)["releaseName"] thetaRef = float(relName[3:]) for rowSimHash in simDF.index: # rotate results to be able to proceed to the aimec analysis @@ -119,7 +120,7 @@ def rotateDFAResults(avalancheDir, simDF, rowSimHash, resTypeList, thetaRef, com """ log.debug('Rotating simulation: %s' % rowSimHash) simName = simDF.loc[rowSimHash, 'simName'] - relName = (simName.split('_'))[0] + relName = cfgUtils.parseSimName(simName)["releaseName"] theta = float(relName[3:]) simDF.loc[rowSimHash, 'relAngle'] = theta thetaRot = theta - thetaRef diff --git a/avaframe/ana3AIMEC/dfa2Aimec.py b/avaframe/ana3AIMEC/dfa2Aimec.py index 45131a047..473fcb4f5 100644 --- a/avaframe/ana3AIMEC/dfa2Aimec.py +++ b/avaframe/ana3AIMEC/dfa2Aimec.py @@ -9,6 +9,7 @@ # local modules from avaframe.in3Utils import fileHandlerUtils as fU +from avaframe.in3Utils import cfgUtils # create local logger # change log level in calling module to DEBUG to see log messages @@ -52,12 +53,19 @@ def getMBInfo(avaDir, inputsDF, comMod, simName=''): message = 'No mass log file found in directory %s' % (str(dir)) log.error(message) raise FileNotFoundError(message) - mbNames = sorted(set(mbFiles), key=lambda s: (str(s).split("_")[1], str(s).split("_")[2], str(s).split("_")[4])) + # Sort mass balance files by simName components using parseSimName + def sortKey(filepath): + """Extract sort key from mass balance filename""" + # Remove 'mass_' prefix from stem to get simName + simName = filepath.stem[5:] # 'mass_' is 5 characters + parsed = cfgUtils.parseSimName(simName) + return (parsed["simHash"], parsed["modName"], parsed["simType"]) + + mbNames = sorted(set(mbFiles), key=sortKey) for mFile in mbNames: - name = mFile.stem - nameParts = name.split('_') - simName = ('_'.join(nameParts[1:])) + # Extract simName from filename (remove 'mass_' prefix) + simName = mFile.stem[5:] simRowHash = inputsDF[inputsDF['simName'] == simName].index[0] inputsDF.loc[simRowHash, 'massBal'] = mFile log.debug('Added to inputsDF[massBal] %s' % (mFile)) diff --git a/avaframe/ana5Utils/DFAPathGeneration.py b/avaframe/ana5Utils/DFAPathGeneration.py index 4b38b6c47..25d46ec24 100644 --- a/avaframe/ana5Utils/DFAPathGeneration.py +++ b/avaframe/ana5Utils/DFAPathGeneration.py @@ -690,7 +690,7 @@ def saveSplitAndPath(avalancheDir, simDFrow, splitPoint, avaProfileMass, dem): avaProfileMass['y'] = avaProfileMass['y'] + dem['originalHeader']['yllcenter'] # get projection from release shp layer simName = simDFrow['simName'] - relName = simName.split('_')[0] + relName = cfgUtils.parseSimName(simName)["releaseName"] inProjection = pathlib.Path(avalancheDir, 'Inputs', 'REL', relName + '.prj') # save profile in Inputs pathAB = pathlib.Path(avalancheDir, 'Outputs', 'ana5Utils', 'DFAPath', 'massAvgPath_%s_AB_aimec' % simName) diff --git a/avaframe/com1DFA/com1DFA.py b/avaframe/com1DFA/com1DFA.py index c0eda26d4..574d84b4b 100644 --- a/avaframe/com1DFA/com1DFA.py +++ b/avaframe/com1DFA/com1DFA.py @@ -9,6 +9,7 @@ import pathlib import pickle import platform +import re import time from datetime import datetime from functools import partial @@ -3070,8 +3071,10 @@ def prepareVarSimDict(standardCfg, inputSimFiles, variationDict, simNameExisting simType and contains full configuration configparser object for simulation run """ - # extract the name of the module - modName = module.__name__.split(".")[-1] + # extract the full module name and short form (e.g., "com1DFA" -> "com1") + modName = module.__name__.split(".")[-1] # Full name: com1DFA, com8MoTPSA, etc. + shortModMatch = re.match(r"^(com\d+)", modName) + modNameShort = shortModMatch.group(1) if shortModMatch else modName # Short name: com1, com8, etc. # get list of simulation types that are desired if "simTypeList" in variationDict: @@ -3304,6 +3307,7 @@ def prepareVarSimDict(standardCfg, inputSimFiles, variationDict, simNameExisting [ relNameSim, simHash, + modNameShort, defID, frictIndi or volIndi, row._asdict()["simTypeList"], diff --git a/avaframe/in3Utils/cfgUtils.py b/avaframe/in3Utils/cfgUtils.py index 69cd0a5e5..667f89a79 100644 --- a/avaframe/in3Utils/cfgUtils.py +++ b/avaframe/in3Utils/cfgUtils.py @@ -408,6 +408,122 @@ def readCfgFile(avaDir, module="", fileName=""): return cfg +def parseSimName(name): + """Parse simulation name handling both old and new formats. + + Auto-detects: + - Old format: relName_simHash_[defID]_[frictIndi]_simType_modelType[_resType][_timeStep] + - New format: relName_simHash_modName_[defID]_[frictIndi]_simType_modelType[_resType][_timeStep] + + Parameters + ---------- + name : str + Simulation name or full filename to parse + + Returns + ------- + dict + Dictionary with keys: + - releaseName: str (required) + - simHash: str (required) + - modName: str (required, "NA" for old format) + - defID: str (required, defaults to "C") + - frictIndi: str | None (optional, values: "S", "M", "L") + - simType: str (required) + - modelType: str (required) + - resType: str | None (optional, only in filenames) + - timeStep: str | None (optional, only in filenames) + + Raises + ------ + ValueError + If required components are missing or format is invalid + + Examples + -------- + >>> parseSimName("release1_a1b2c3_C_S_ent_dfa") + {'releaseName': 'release1', 'simHash': 'a1b2c3', 'modName': 'NA', 'defID': 'C', + 'frictIndi': 'S', 'simType': 'ent', 'modelType': 'dfa', 'resType': None, 'timeStep': None} + + >>> parseSimName("release1_a1b2c3_com1_C_S_ent_dfa") + {'releaseName': 'release1', 'simHash': 'a1b2c3', 'modName': 'com1', 'defID': 'C', + 'frictIndi': 'S', 'simType': 'ent', 'modelType': 'dfa', 'resType': None, 'timeStep': None} + """ + + # Step 1: Handle _AF_ separator + if "_AF_" in name: + nameParts = name.split("_AF_") + releaseName = nameParts[0] + infoParts = nameParts[1].split("_") + else: + nameParts = name.split("_") + releaseName = nameParts[0] + infoParts = nameParts[1:] + + # Step 2: Extract simHash (always first in infoParts) + if len(infoParts) < 1: + raise ValueError(f"Invalid simName format: no simHash found in '{name}'") + simHash = infoParts[0] + + # Step 3: Detect format via module name pattern (com\d+ with optional letters) + # Matches both "com1" and "com1DFA", but extracts only short form (e.g., "com1") + modulePattern = re.compile(r"^com\d+[A-Za-z]*$") + shortModPattern = re.compile(r"^(com\d+)") + + if len(infoParts) > 1 and modulePattern.match(infoParts[1]): + # NEW FORMAT - extract short module name (e.g., "com1" from "com1DFA" or "com1") + match = shortModPattern.match(infoParts[1]) + modName = match.group(1) if match else infoParts[1] + remainingParts = infoParts[2:] # Start after modName + else: + # OLD FORMAT + modName = "NA" + remainingParts = infoParts[1:] # Start after simHash + + # Step 4: Detect optional indicators + defID = "C" # Default + frictIndi = None + offset = 0 + + if len(remainingParts) > 0 and remainingParts[0] in ["C", "D"]: + defID = remainingParts[0] + offset = 1 + + if len(remainingParts) > offset and remainingParts[offset] in ["S", "M", "L"]: + frictIndi = remainingParts[offset] + offset += 1 + + # Step 5: Extract required components (simType, modelType) + if len(remainingParts) < offset + 2: + raise ValueError(f"Invalid simName format: missing required components in '{name}'") + + simType = remainingParts[offset] + modelType = remainingParts[offset + 1] + + # Step 6: Extract optional file components (resType, timeStep) + resType = None + timeStep = None + + if len(remainingParts) > offset + 2: + resType = remainingParts[offset + 2] + + if len(remainingParts) > offset + 3: + timeStep = remainingParts[offset + 3] + + # Step 7: Return structured dictionary + return { + "releaseName": releaseName, + "simHash": simHash, + "modName": modName, + "defID": defID, + "frictIndi": frictIndi, + "simType": simType, + "modelType": modelType, + "resType": resType, + "timeStep": timeStep, + } + + def cfgHash(cfg, typeDict=False): """UID hash of a config. Given a configParser object cfg, or a dictionary - then typeDict=True, returns a uid hash @@ -530,14 +646,8 @@ def createConfigurationInfo( for cFile in configFiles: if "sourceConfiguration" not in str(cFile): simName = pathlib.Path(cFile).stem - if "_AF_" in simName: - nameParts = simName.split("_AF_") - infoParts = nameParts[1].split("_") - - else: - nameParts = simName.split("_") - infoParts = nameParts[1:] - simHash = infoParts[0] + # Extract simHash using parseSimName + simHash = parseSimName(simName)["simHash"] cfgObject = readCfgFile(avaDir, fileName=cFile) simDF = appendCgf2DF(simHash, simName, cfgObject, simDF) diff --git a/avaframe/in3Utils/fileHandlerUtils.py b/avaframe/in3Utils/fileHandlerUtils.py index 1c1b76b71..6a4c11e85 100644 --- a/avaframe/in3Utils/fileHandlerUtils.py +++ b/avaframe/in3Utils/fileHandlerUtils.py @@ -13,6 +13,7 @@ # Local imports import avaframe.in2Trans.rasterUtils as IOf +import avaframe.in3Utils.cfgUtils as cfgUtils # create local logger # change log level in calling module to DEBUG to see log messages @@ -600,61 +601,41 @@ def makeSimDF(inputDir, avaDir="", simID="simID"): data["files"].append(datafiles[m]) name = datafiles[m].stem data["names"].append(name) - if "_AF_" in name: - nameParts = name.split("_AF_") - fNamePart = nameParts[0] + "_AF" - relNameSim = nameParts[0] - infoParts = nameParts[1].split("_") + # Parse the filename to extract all components + parsed = cfgUtils.parseSimName(name) + + # Populate data dictionary from parsed result + data["releaseArea"].append(parsed["releaseName"]) + data[simID].append(parsed["simHash"]) + data["isDefault"].append(parsed["defID"] if parsed["defID"] != "_C_" else None) + data["frictCalib"].append(parsed["frictIndi"]) + data["simType"].append(parsed["simType"]) + data["modelType"].append(parsed["modelType"]) + data["resType"].append(parsed["resType"] if parsed["resType"] else "") + data["timeStep"].append(parsed["timeStep"] if parsed["timeStep"] else "") + + # Reconstruct simName (without resType and timeStep) + # Preserve _AF_ separator if present in original name + if "_AF_" in name: + simNameBase = parsed["releaseName"] + "_AF_" + parsed["simHash"] else: - nameParts = name.split("_") - fNamePart = nameParts[0] - relNameSim = nameParts[0] - infoParts = nameParts[1:] - - data["releaseArea"].append(relNameSim) - data[simID].append(infoParts[0]) - - indiStr = ["_C_", "_D_"] - if any(x in name for x in indiStr): - data["isDefault"].append(infoParts[1]) - # now check for friction calibration info - frictIndi = ["_S_", "_M_", "_L_"] - if any(x in name for x in frictIndi): - data["frictCalib"].append(infoParts[2]) - j = 1 # j indicates whether there's an additional info - else: - data["frictCalib"].append(None) - j = 0 - - data["simType"].append(infoParts[2 + j]) - data["modelType"].append(infoParts[3 + j]) - data["resType"].append(infoParts[4 + j]) - data["simName"].append(fNamePart + "_" + ("_".join(infoParts[0 : (4 + j)]))) - - header = IOf.readRasterHeader(datafiles[m]) - data["cellSize"].append(header["cellsize"]) - if len(infoParts) == (6 + j): - data["timeStep"].append(infoParts[5 + j]) - else: - data["timeStep"].append("") - - # If it still is an 'old' simname - # This can be removed at one point - else: - data["isDefault"].append(None) - data["frictCalib"].append(None) - data["simType"].append(infoParts[1]) - data["modelType"].append(infoParts[2]) - data["resType"].append(infoParts[3]) - data["simName"].append(fNamePart + "_" + ("_".join(infoParts[0:3]))) - - header = IOf.readRasterHeader(datafiles[m]) - data["cellSize"].append(header["cellsize"]) - if len(infoParts) == 5: - data["timeStep"].append(infoParts[4]) - else: - data["timeStep"].append("") + simNameBase = parsed["releaseName"] + "_" + parsed["simHash"] + + parts = [simNameBase] + if parsed["modName"] != "NA": + parts.append(parsed["modName"]) + # Only add defID if it was explicitly in the original filename + if "_C_" in name or "_D_" in name: + parts.append(parsed["defID"]) + # Only add frictIndi if it was in the original + if parsed["frictIndi"]: + parts.append(parsed["frictIndi"]) + parts.extend([parsed["simType"], parsed["modelType"]]) + data["simName"].append("_".join(parts)) + + header = IOf.readRasterHeader(datafiles[m]) + data["cellSize"].append(header["cellsize"]) # Set name of avalanche if avaDir is given if avaDir != "": @@ -730,43 +711,44 @@ def makeSimFromResDF(avaDir, comModule, inputDir="", simName=""): for file in datafiles: name = file.stem - if "_AF_" in name: - nameParts = name.split("_AF_") - fNamePart = nameParts[0] + "_AF" - relNameSim = nameParts[0] - infoParts = nameParts[1].split("_") - resType = infoParts[-1] + # Parse the filename to extract components + parsed = cfgUtils.parseSimName(name) + + # Extract simName (without resType/timeStep) and resType + resType = parsed["resType"] if parsed["resType"] else name.split("_")[-1] + # Reconstruct simName without resType and timeStep + # Preserve _AF_ separator if present in original name + if "_AF_" in name: + simNameBase = parsed["releaseName"] + "_AF_" + parsed["simHash"] else: - nameParts = name.split("_") - fNamePart = nameParts[0] - relNameSim = nameParts[0] - infoParts = nameParts[1:] - resType = infoParts[-1] - simName = fNamePart + "_" + ("_".join(infoParts[0:-1])) + simNameBase = parsed["releaseName"] + "_" + parsed["simHash"] + + parts = [simNameBase] + if parsed["modName"] != "NA": + parts.append(parsed["modName"]) + # Only add defID if it was explicitly in the original filename + if "_C_" in name or "_D_" in name: + parts.append(parsed["defID"]) + # Only add frictIndi if it was in the original + if parsed["frictIndi"]: + parts.append(parsed["frictIndi"]) + parts.extend([parsed["simType"], parsed["modelType"]]) + simName = "_".join(parts) + # add line in the DF if the simulation does not exist yet if simName not in dataDF.simName.values: newLine = pd.DataFrame([[simName]], columns=["simName"], index=[simName]) dataDF = pd.concat([dataDF, newLine], ignore_index=False) - dataDF.loc[simName, "releaseArea"] = relNameSim - dataDF.loc[simName, "simHash"] = infoParts[0] - # TODO: remove once all simNames are updated to include C or D as simModified - if len(infoParts) == 6: # this is the _C_M_ etc variant - dataDF.loc[simName, "simModified"] = infoParts[1] - dataDF.loc[simName, "simType"] = infoParts[3] - dataDF.loc[simName, "modelType"] = infoParts[4] - elif len(infoParts) == 5: - dataDF.loc[simName, "simModified"] = infoParts[1] - dataDF.loc[simName, "simType"] = infoParts[2] - dataDF.loc[simName, "modelType"] = infoParts[3] - elif len(infoParts) == 4: - dataDF.loc[simName, "simModified"] = "not specified" - dataDF.loc[simName, "simType"] = infoParts[1] - dataDF.loc[simName, "modelType"] = infoParts[2] + dataDF.loc[simName, "releaseArea"] = parsed["releaseName"] + dataDF.loc[simName, "simHash"] = parsed["simHash"] + # Only set simModified if defID was explicitly in filename + if "_C_" in name or "_D_" in name: + dataDF.loc[simName, "simModified"] = parsed["defID"] else: - message = "simName format not recognized for simName: %s" % simName - log.error(message) - raise AssertionError(message) + dataDF.loc[simName, "simModified"] = "not specified" + dataDF.loc[simName, "simType"] = parsed["simType"] + dataDF.loc[simName, "modelType"] = parsed["modelType"] # add info about the cell size header = IOf.readRasterHeader(file) diff --git a/avaframe/log2Report/generateReport.py b/avaframe/log2Report/generateReport.py index 00ea810fd..7ec6e0b6e 100644 --- a/avaframe/log2Report/generateReport.py +++ b/avaframe/log2Report/generateReport.py @@ -11,6 +11,9 @@ from tabulate import tabulate from datetime import datetime +# Local imports +from avaframe.in3Utils import cfgUtils + # create local logger # change log level in calling module to DEBUG to see log messages log = logging.getLogger(__name__) @@ -252,13 +255,8 @@ def checkAndCleanReportDictOnWinIssue872(reportDictList): for k, listItem in enumerate(reportDictList): simName = listItem['simName']['name'] - if '_AF_' in simName: - nameParts = simName.split('_AF_') - else: - nameParts = simName.split('_') - - # This is the proper simName - simNameClean = nameParts[0] + # Extract release name using parseSimName + simNameClean = cfgUtils.parseSimName(simName)["releaseName"] if listItem['Simulation Parameters']['Release Area Scenario'] != simNameClean: reportDictList[k]['Simulation Parameters']['Release Area Scenario'] = simNameClean diff --git a/avaframe/tests/test_cfgUtils.py b/avaframe/tests/test_cfgUtils.py index 96920697d..0dfef3056 100644 --- a/avaframe/tests/test_cfgUtils.py +++ b/avaframe/tests/test_cfgUtils.py @@ -768,4 +768,280 @@ def test_setStrnanToNan_case_sensitivity(): # Verify all variations of 'nan' were converted for i in range(5): - assert pd.isna(resultDF.at[i, 'column1']) \ No newline at end of file + assert pd.isna(resultDF.at[i, 'column1']) + + +# Tests for parseSimName function + + +def test_parseSimName_oldFormat_basic(): + """Test parsing old format without modName""" + name = "release1_a1b2c3_C_S_ent_dfa" + result = cfgUtils.parseSimName(name) + + assert result["releaseName"] == "release1" + assert result["simHash"] == "a1b2c3" + assert result["modName"] == "NA" + assert result["defID"] == "C" + assert result["frictIndi"] == "S" + assert result["simType"] == "ent" + assert result["modelType"] == "dfa" + assert result["resType"] is None + assert result["timeStep"] is None + + +def test_parseSimName_oldFormat_minimal(): + """Test parsing minimal old format (no indicators)""" + name = "release1_a1b2c3_ent_dfa" + result = cfgUtils.parseSimName(name) + + assert result["releaseName"] == "release1" + assert result["simHash"] == "a1b2c3" + assert result["modName"] == "NA" + assert result["defID"] == "C" # Default + assert result["frictIndi"] is None + assert result["simType"] == "ent" + assert result["modelType"] == "dfa" + assert result["resType"] is None + assert result["timeStep"] is None + + +def test_parseSimName_oldFormat_defID_only(): + """Test parsing old format with defID but no frictIndi""" + name = "release1_a1b2c3_D_ent_dfa" + result = cfgUtils.parseSimName(name) + + assert result["releaseName"] == "release1" + assert result["simHash"] == "a1b2c3" + assert result["modName"] == "NA" + assert result["defID"] == "D" + assert result["frictIndi"] is None + assert result["simType"] == "ent" + assert result["modelType"] == "dfa" + + +def test_parseSimName_newFormat_basic(): + """Test parsing new format with modName""" + name = "release1_a1b2c3_com1_C_S_ent_dfa" + result = cfgUtils.parseSimName(name) + + assert result["releaseName"] == "release1" + assert result["simHash"] == "a1b2c3" + assert result["modName"] == "com1" + assert result["defID"] == "C" + assert result["frictIndi"] == "S" + assert result["simType"] == "ent" + assert result["modelType"] == "dfa" + assert result["resType"] is None + assert result["timeStep"] is None + + +def test_parseSimName_newFormat_com8(): + """Test parsing new format with com8 module""" + name = "release1_a1b2c3_com8_C_M_ent_dfa" + result = cfgUtils.parseSimName(name) + + assert result["releaseName"] == "release1" + assert result["simHash"] == "a1b2c3" + assert result["modName"] == "com8" + assert result["defID"] == "C" + assert result["frictIndi"] == "M" + assert result["simType"] == "ent" + assert result["modelType"] == "dfa" + + +def test_parseSimName_newFormat_com9(): + """Test parsing new format with com9 module""" + name = "release1_a1b2c3_com9_D_L_null_dfa" + result = cfgUtils.parseSimName(name) + + assert result["releaseName"] == "release1" + assert result["simHash"] == "a1b2c3" + assert result["modName"] == "com9" + assert result["defID"] == "D" + assert result["frictIndi"] == "L" + assert result["simType"] == "null" + assert result["modelType"] == "dfa" + + +def test_parseSimName_newFormat_minimal(): + """Test parsing new format without optional indicators""" + name = "release1_a1b2c3_com1_ent_dfa" + result = cfgUtils.parseSimName(name) + + assert result["releaseName"] == "release1" + assert result["simHash"] == "a1b2c3" + assert result["modName"] == "com1" + assert result["defID"] == "C" # Default + assert result["frictIndi"] is None + assert result["simType"] == "ent" + assert result["modelType"] == "dfa" + + +def test_parseSimName_withFile_oldFormat(): + """Test parsing full filename with resType and timeStep (old format)""" + name = "release1_a1b2c3_C_S_ent_dfa_ppr_100.5" + result = cfgUtils.parseSimName(name) + + assert result["releaseName"] == "release1" + assert result["simHash"] == "a1b2c3" + assert result["modName"] == "NA" + assert result["defID"] == "C" + assert result["frictIndi"] == "S" + assert result["simType"] == "ent" + assert result["modelType"] == "dfa" + assert result["resType"] == "ppr" + assert result["timeStep"] == "100.5" + + +def test_parseSimName_withFile_newFormat(): + """Test parsing full filename with resType and timeStep (new format)""" + name = "release1_a1b2c3_com1_C_S_ent_dfa_pft_50.2" + result = cfgUtils.parseSimName(name) + + assert result["releaseName"] == "release1" + assert result["simHash"] == "a1b2c3" + assert result["modName"] == "com1" + assert result["defID"] == "C" + assert result["frictIndi"] == "S" + assert result["simType"] == "ent" + assert result["modelType"] == "dfa" + assert result["resType"] == "pft" + assert result["timeStep"] == "50.2" + + +def test_parseSimName_withFile_resTypeOnly(): + """Test parsing filename with resType but no timeStep""" + name = "release1_a1b2c3_com1_C_ent_dfa_pfv" + result = cfgUtils.parseSimName(name) + + assert result["releaseName"] == "release1" + assert result["simHash"] == "a1b2c3" + assert result["modName"] == "com1" + assert result["defID"] == "C" + assert result["frictIndi"] is None + assert result["simType"] == "ent" + assert result["modelType"] == "dfa" + assert result["resType"] == "pfv" + assert result["timeStep"] is None + + +def test_parseSimName_AF_separator_oldFormat(): + """Test parsing with _AF_ separator (old format)""" + name = "release1_AF_a1b2c3_C_S_ent_dfa" + result = cfgUtils.parseSimName(name) + + assert result["releaseName"] == "release1" + assert result["simHash"] == "a1b2c3" + assert result["modName"] == "NA" + assert result["defID"] == "C" + assert result["frictIndi"] == "S" + assert result["simType"] == "ent" + assert result["modelType"] == "dfa" + + +def test_parseSimName_AF_separator_newFormat(): + """Test parsing with _AF_ separator (new format)""" + name = "release1_AF_a1b2c3_com1_D_ent_dfa" + result = cfgUtils.parseSimName(name) + + assert result["releaseName"] == "release1" + assert result["simHash"] == "a1b2c3" + assert result["modName"] == "com1" + assert result["defID"] == "D" + assert result["frictIndi"] is None + assert result["simType"] == "ent" + assert result["modelType"] == "dfa" + + +def test_parseSimName_all_defID_options(): + """Test all defID options (_C_ and _D_)""" + name_C = "release1_a1b2c3_com1_C_ent_dfa" + result_C = cfgUtils.parseSimName(name_C) + assert result_C["defID"] == "C" + + name_D = "release1_a1b2c3_com1_D_ent_dfa" + result_D = cfgUtils.parseSimName(name_D) + assert result_D["defID"] == "D" + + +def test_parseSimName_all_frictIndi_options(): + """Test all frictIndi options (_S_, _M_, _L_)""" + name_S = "release1_a1b2c3_com1_C_S_ent_dfa" + result_S = cfgUtils.parseSimName(name_S) + assert result_S["frictIndi"] == "S" + + name_M = "release1_a1b2c3_com1_C_M_ent_dfa" + result_M = cfgUtils.parseSimName(name_M) + assert result_M["frictIndi"] == "M" + + name_L = "release1_a1b2c3_com1_C_L_ent_dfa" + result_L = cfgUtils.parseSimName(name_L) + assert result_L["frictIndi"] == "L" + + +def test_parseSimName_complex_releaseName(): + """Test parsing with complex release names""" + name = "myRelease_Test_AF_a1b2c3_com1_C_ent_dfa" + result = cfgUtils.parseSimName(name) + assert result["releaseName"] == "myRelease_Test" + + name2 = "rel123_a1b2c3_com1_ent_dfa" + result2 = cfgUtils.parseSimName(name2) + assert result2["releaseName"] == "rel123" + + +def test_parseSimName_invalid_noSimHash(): + """Test error handling when simHash is missing""" + name = "release1" + with pytest.raises(ValueError, match="Invalid simName format: no simHash found"): + cfgUtils.parseSimName(name) + + +def test_parseSimName_invalid_missingComponents(): + """Test error handling when required components are missing""" + name = "release1_a1b2c3_ent" # Missing modelType + with pytest.raises(ValueError, match="Invalid simName format: missing required components"): + cfgUtils.parseSimName(name) + + +def test_parseSimName_invalid_onlyHash(): + """Test error handling when only hash exists""" + name = "release1_a1b2c3" + with pytest.raises(ValueError, match="Invalid simName format: missing required components"): + cfgUtils.parseSimName(name) + + +def test_parseSimName_realWorld_examples(): + """Test with realistic simulation names from codebase""" + # Typical old format from existing simulations + name1 = "release_9ae8f6_null_dfa" + result1 = cfgUtils.parseSimName(name1) + assert result1["releaseName"] == "release" + assert result1["simHash"] == "9ae8f6" + assert result1["modName"] == "NA" + assert result1["simType"] == "null" + assert result1["modelType"] == "dfa" + + # Old format with all indicators + name2 = "avalanche_abc123_C_S_ent_dfa_ppr" + result2 = cfgUtils.parseSimName(name2) + assert result2["releaseName"] == "avalanche" + assert result2["simHash"] == "abc123" + assert result2["modName"] == "NA" + assert result2["defID"] == "C" + assert result2["frictIndi"] == "S" + assert result2["simType"] == "ent" + assert result2["modelType"] == "dfa" + assert result2["resType"] == "ppr" + + # New format example + name3 = "testRel_xyz789_com1_D_M_ent_dfa" + result3 = cfgUtils.parseSimName(name3) + assert result3["releaseName"] == "testRel" + assert result3["simHash"] == "xyz789" + assert result3["modName"] == "com1" + assert result3["defID"] == "D" + assert result3["frictIndi"] == "M" + assert result3["simType"] == "ent" + assert result3["modelType"] == "dfa" \ No newline at end of file diff --git a/avaframe/tests/test_com1DFA.py b/avaframe/tests/test_com1DFA.py index 33374be59..3bfd76dab 100644 --- a/avaframe/tests/test_com1DFA.py +++ b/avaframe/tests/test_com1DFA.py @@ -2085,7 +2085,7 @@ def test_prepareVarSimDict(tmp_path, caplog): testCfg["GENERAL"]["avalancheDir"] = str(avaDir) simHash = cfgUtils.cfgHash(testCfg) - simName1 = "relAlr_" + simHash + "_C_L_entres_dfa" + simName1 = "relAlr_" + simHash + "_com1_C_L_entres_dfa" testDict = { simName1: { "simHash": simHash, @@ -2193,7 +2193,7 @@ def test_prepareVarSimDict(tmp_path, caplog): testCfg2["INPUT"]["resistanceScenario"] = str(pathlib.Path("RES", "entAlr.shp")) testCfg2["GENERAL"]["avalancheDir"] = str(avaDir) simHash2 = cfgUtils.cfgHash(testCfg2) - simName2 = "relAlr_" + simHash2 + "_C_L_entres_dfa" + simName2 = "relAlr_" + simHash2 + "_com1_C_L_entres_dfa" testDict2 = { simName2: { "simHash": simHash2, From 0cb682f5b70b5570c1ca3cb3870c4248ec6e1b48 Mon Sep 17 00:00:00 2001 From: Felix Oesterle Date: Tue, 23 Dec 2025 09:58:14 +0100 Subject: [PATCH 2/5] docs(cfgUtils, com1DFA): document simulation naming conventions and module name addition - Updated `cfgUtils` docstring with details about new and old simulation name formats, including components and structure. - Revised `com1DFA` documentation to reflect the addition of the short module name (`com1`) to simulation names - Noted backward compatibility with older naming conventions. refactor(fileHandlerUtils): replace `parsed` with `simNameParts` - Renamed variable `parsed` to `simNameParts` refactor(dfa2Aimec): simplify mass balance file sorting with lambda function --- avaframe/ana3AIMEC/dfa2Aimec.py | 14 +++--- avaframe/in3Utils/cfgUtils.py | 34 +++++++++++++- avaframe/in3Utils/fileHandlerUtils.py | 64 +++++++++++++-------------- docs/moduleCom1DFA.rst | 7 ++- 4 files changed, 77 insertions(+), 42 deletions(-) diff --git a/avaframe/ana3AIMEC/dfa2Aimec.py b/avaframe/ana3AIMEC/dfa2Aimec.py index 473fcb4f5..115ac43f3 100644 --- a/avaframe/ana3AIMEC/dfa2Aimec.py +++ b/avaframe/ana3AIMEC/dfa2Aimec.py @@ -54,14 +54,12 @@ def getMBInfo(avaDir, inputsDF, comMod, simName=''): log.error(message) raise FileNotFoundError(message) # Sort mass balance files by simName components using parseSimName - def sortKey(filepath): - """Extract sort key from mass balance filename""" - # Remove 'mass_' prefix from stem to get simName - simName = filepath.stem[5:] # 'mass_' is 5 characters - parsed = cfgUtils.parseSimName(simName) - return (parsed["simHash"], parsed["modName"], parsed["simType"]) - - mbNames = sorted(set(mbFiles), key=sortKey) + mbNames = sorted(set(mbFiles), key=lambda f: ( + # Extract simName by removing 'mass_' prefix (5 characters) and parse components + cfgUtils.parseSimName(f.stem[5:])["simHash"], + cfgUtils.parseSimName(f.stem[5:])["modName"], + cfgUtils.parseSimName(f.stem[5:])["simType"] + )) for mFile in mbNames: # Extract simName from filename (remove 'mass_' prefix) diff --git a/avaframe/in3Utils/cfgUtils.py b/avaframe/in3Utils/cfgUtils.py index 667f89a79..332c71604 100644 --- a/avaframe/in3Utils/cfgUtils.py +++ b/avaframe/in3Utils/cfgUtils.py @@ -1,5 +1,37 @@ """ -Utilities for handling configuration files +Utilities for handling configuration files and simulation naming + +This module provides functions for: +- Configuration file reading, writing, and merging +- Configuration comparison and hashing +- Simulation name parsing and construction + +Simulation Name Format +---------------------- +AvaFrame uses structured simulation names with two supported formats: + +**New format (with module name):** + relName_simHash_modName_[defID]_[frictIndi]_simType_modelType[_resType][_timeStep] + +**Old format (without module name):** + relName_simHash_[defID]_[frictIndi]_simType_modelType[_resType][_timeStep] + +Where: + - relName: Release area scenario name (required) + - simHash: Configuration hash (required, 10 characters) + - modName: Short module name - "com1", "com2", etc. (new format only) + - defID: Default indicator - "C" or "D" (optional, defaults to "C") + - frictIndi: Friction calibration - "S", "M", or "L" (optional) + - simType: Simulation type - "null", "ent", "res", "entres" (required) + - modelType: Model type - "dfa", etc. (required) + - resType: Result type - "ppr", "pft", "pfv", etc. (filename only) + - timeStep: Time step value (filename only) + +The module name in the new format uses SHORT form only (e.g., "com1" not "com1DFA"). +This was implemented in 2025-12 to support better organization and filtering of simulations. + +Use `parseSimName()` to extract components from any simulation name. +Backward compatibility is maintained - old format names are still supported. """ diff --git a/avaframe/in3Utils/fileHandlerUtils.py b/avaframe/in3Utils/fileHandlerUtils.py index 6a4c11e85..91f42ea75 100644 --- a/avaframe/in3Utils/fileHandlerUtils.py +++ b/avaframe/in3Utils/fileHandlerUtils.py @@ -603,35 +603,35 @@ def makeSimDF(inputDir, avaDir="", simID="simID"): data["names"].append(name) # Parse the filename to extract all components - parsed = cfgUtils.parseSimName(name) + simNameParts = cfgUtils.parseSimName(name) # Populate data dictionary from parsed result - data["releaseArea"].append(parsed["releaseName"]) - data[simID].append(parsed["simHash"]) - data["isDefault"].append(parsed["defID"] if parsed["defID"] != "_C_" else None) - data["frictCalib"].append(parsed["frictIndi"]) - data["simType"].append(parsed["simType"]) - data["modelType"].append(parsed["modelType"]) - data["resType"].append(parsed["resType"] if parsed["resType"] else "") - data["timeStep"].append(parsed["timeStep"] if parsed["timeStep"] else "") + data["releaseArea"].append(simNameParts["releaseName"]) + data[simID].append(simNameParts["simHash"]) + data["isDefault"].append(simNameParts["defID"] if simNameParts["defID"] != "_C_" else None) + data["frictCalib"].append(simNameParts["frictIndi"]) + data["simType"].append(simNameParts["simType"]) + data["modelType"].append(simNameParts["modelType"]) + data["resType"].append(simNameParts["resType"] if simNameParts["resType"] else "") + data["timeStep"].append(simNameParts["timeStep"] if simNameParts["timeStep"] else "") # Reconstruct simName (without resType and timeStep) # Preserve _AF_ separator if present in original name if "_AF_" in name: - simNameBase = parsed["releaseName"] + "_AF_" + parsed["simHash"] + simNameBase = simNameParts["releaseName"] + "_AF_" + simNameParts["simHash"] else: - simNameBase = parsed["releaseName"] + "_" + parsed["simHash"] + simNameBase = simNameParts["releaseName"] + "_" + simNameParts["simHash"] parts = [simNameBase] - if parsed["modName"] != "NA": - parts.append(parsed["modName"]) + if simNameParts["modName"] != "NA": + parts.append(simNameParts["modName"]) # Only add defID if it was explicitly in the original filename if "_C_" in name or "_D_" in name: - parts.append(parsed["defID"]) + parts.append(simNameParts["defID"]) # Only add frictIndi if it was in the original - if parsed["frictIndi"]: - parts.append(parsed["frictIndi"]) - parts.extend([parsed["simType"], parsed["modelType"]]) + if simNameParts["frictIndi"]: + parts.append(simNameParts["frictIndi"]) + parts.extend([simNameParts["simType"], simNameParts["modelType"]]) data["simName"].append("_".join(parts)) header = IOf.readRasterHeader(datafiles[m]) @@ -712,43 +712,43 @@ def makeSimFromResDF(avaDir, comModule, inputDir="", simName=""): for file in datafiles: name = file.stem # Parse the filename to extract components - parsed = cfgUtils.parseSimName(name) + simNameParts = cfgUtils.parseSimName(name) # Extract simName (without resType/timeStep) and resType - resType = parsed["resType"] if parsed["resType"] else name.split("_")[-1] + resType = simNameParts["resType"] if simNameParts["resType"] else name.split("_")[-1] # Reconstruct simName without resType and timeStep # Preserve _AF_ separator if present in original name if "_AF_" in name: - simNameBase = parsed["releaseName"] + "_AF_" + parsed["simHash"] + simNameBase = simNameParts["releaseName"] + "_AF_" + simNameParts["simHash"] else: - simNameBase = parsed["releaseName"] + "_" + parsed["simHash"] + simNameBase = simNameParts["releaseName"] + "_" + simNameParts["simHash"] parts = [simNameBase] - if parsed["modName"] != "NA": - parts.append(parsed["modName"]) + if simNameParts["modName"] != "NA": + parts.append(simNameParts["modName"]) # Only add defID if it was explicitly in the original filename if "_C_" in name or "_D_" in name: - parts.append(parsed["defID"]) + parts.append(simNameParts["defID"]) # Only add frictIndi if it was in the original - if parsed["frictIndi"]: - parts.append(parsed["frictIndi"]) - parts.extend([parsed["simType"], parsed["modelType"]]) + if simNameParts["frictIndi"]: + parts.append(simNameParts["frictIndi"]) + parts.extend([simNameParts["simType"], simNameParts["modelType"]]) simName = "_".join(parts) # add line in the DF if the simulation does not exist yet if simName not in dataDF.simName.values: newLine = pd.DataFrame([[simName]], columns=["simName"], index=[simName]) dataDF = pd.concat([dataDF, newLine], ignore_index=False) - dataDF.loc[simName, "releaseArea"] = parsed["releaseName"] - dataDF.loc[simName, "simHash"] = parsed["simHash"] + dataDF.loc[simName, "releaseArea"] = simNameParts["releaseName"] + dataDF.loc[simName, "simHash"] = simNameParts["simHash"] # Only set simModified if defID was explicitly in filename if "_C_" in name or "_D_" in name: - dataDF.loc[simName, "simModified"] = parsed["defID"] + dataDF.loc[simName, "simModified"] = simNameParts["defID"] else: dataDF.loc[simName, "simModified"] = "not specified" - dataDF.loc[simName, "simType"] = parsed["simType"] - dataDF.loc[simName, "modelType"] = parsed["modelType"] + dataDF.loc[simName, "simType"] = simNameParts["simType"] + dataDF.loc[simName, "modelType"] = simNameParts["modelType"] # add info about the cell size header = IOf.readRasterHeader(file) diff --git a/docs/moduleCom1DFA.rst b/docs/moduleCom1DFA.rst index 594d48db2..7dc6e9f5f 100644 --- a/docs/moduleCom1DFA.rst +++ b/docs/moduleCom1DFA.rst @@ -271,11 +271,13 @@ Using the default configuration, the simulation results are saved to: *Outputs/c to be resumed without re-running simulations that have already been performed. For this, just restart the run. The naming of the output files has the following structure, shown with the example of -*relAlr_ff5f9b78c6_C_L_null_dfa_ppr*: +*relAlr_ff5f9b78c6_com1_C_L_null_dfa_ppr*: * *relAlr* - release area name, usually the name of the shapefile * *ff5f9b78c6* - individual hash of the configuration file used for the simulation. All files related to this simulation have the same hash in their name. This allows to identify which files belong to which simulation. +* *com1* - short module name (com1 for com1DFA, com2 for com2AB, etc.). This component was added in 2025-12 + to support better filtering and organization of simulations from different computational modules. * *C* - indicator of the setup used: D for default setup, C for custom setup, i.e. something was changed in the configuration file * *L* - indicator of the size category used for the friction model: L for large, M for medium, S for small @@ -283,6 +285,9 @@ The naming of the output files has the following structure, shown with the examp * *dfa* - indicator of the simulation type: dfa for dense flow avalanche * *ppr* - indicator of the result type: ppr for peak pressure, pfv for peak flow velocity, pft for peak flow thickness, etc +**Note:** Older simulations may not have the module name component (*com1*). The system automatically detects +and handles both formats for backward compatibility. + Optional outputs From 4bde17d07d43c9a3f7929fbd79b2b3eef0a0f9c9 Mon Sep 17 00:00:00 2001 From: Felix Oesterle Date: Tue, 23 Dec 2025 18:05:14 +0100 Subject: [PATCH 3/5] fix(cfgUtils): correct simulation name format in documentation - Adjusted simulation name structure in docstrings and parsing logic - Updated examples for old and new formats to remove redundant brackets around `defID`. test(ana1Tests): add unit tests for `rotationTest` initialization - Added tests for `initializeRotationTestReport`, verifying report structure and columns. - Included test cases for mass-related columns when `flagMass` is enabled or disabled. test(DFAPathGeneration): add unit tests for split point and mass-averaged path generation - Added `test_getSplitPoint_noPointFound` to validate behavior when no split point meets criteria. - Added `test_getMassAvgPathFromFields` to test mass-averaged path computation, including velocity and energy fields. - Added `test_getMassAvgPathFromFields_noVelocity` to cover cases without velocity data. test(DFAPathGeneration): replace `== True` with `is True` in test assertion --- avaframe/in3Utils/cfgUtils.py | 9 +- avaframe/tests/test_DFAPathGeneration.py | 136 +++++++++++++++++++++++ avaframe/tests/test_ana1Tests.py | 63 +++++++++++ 3 files changed, 204 insertions(+), 4 deletions(-) diff --git a/avaframe/in3Utils/cfgUtils.py b/avaframe/in3Utils/cfgUtils.py index 332c71604..5d963de27 100644 --- a/avaframe/in3Utils/cfgUtils.py +++ b/avaframe/in3Utils/cfgUtils.py @@ -11,7 +11,7 @@ AvaFrame uses structured simulation names with two supported formats: **New format (with module name):** - relName_simHash_modName_[defID]_[frictIndi]_simType_modelType[_resType][_timeStep] + relName_simHash_modName_defID_[frictIndi]_simType_modelType[_resType][_timeStep] **Old format (without module name):** relName_simHash_[defID]_[frictIndi]_simType_modelType[_resType][_timeStep] @@ -20,7 +20,7 @@ - relName: Release area scenario name (required) - simHash: Configuration hash (required, 10 characters) - modName: Short module name - "com1", "com2", etc. (new format only) - - defID: Default indicator - "C" or "D" (optional, defaults to "C") + - defID: Default indicator - "C" or "D" (defaults to "C") - frictIndi: Friction calibration - "S", "M", or "L" (optional) - simType: Simulation type - "null", "ent", "res", "entres" (required) - modelType: Model type - "dfa", etc. (required) @@ -444,8 +444,9 @@ def parseSimName(name): """Parse simulation name handling both old and new formats. Auto-detects: - - Old format: relName_simHash_[defID]_[frictIndi]_simType_modelType[_resType][_timeStep] - - New format: relName_simHash_modName_[defID]_[frictIndi]_simType_modelType[_resType][_timeStep] + - Old format: relName_simHash_defID_[frictIndi]_simType_modelType[_resType][_timeStep] + - New format: relName_simHash_modName_defID_[frictIndi]_simType_modelType[_resType][_timeStep] + [ ] denotes optional items Parameters ---------- diff --git a/avaframe/tests/test_DFAPathGeneration.py b/avaframe/tests/test_DFAPathGeneration.py index 720eba791..c47b2452a 100644 --- a/avaframe/tests/test_DFAPathGeneration.py +++ b/avaframe/tests/test_DFAPathGeneration.py @@ -203,3 +203,139 @@ def test_getParabolicFit(): # print(splitPoint) # print(angle) assert splitPoint['s'] == 50 + + +def test_getSplitPoint_noPointFound(): + """Test getSplitPoint when no point meets slope criteria - should return top point""" + cfg = configparser.ConfigParser() + cfg['PATH'] = {'slopeSplitPoint': '5', 'dsMin': '5'} # Very low slope requirement + + # Create profile with steep slope everywhere + avaProfile = { + 'x': np.array([0, 10, 20, 30]), + 'y': np.array([0, 10, 20, 30]), + 'z': np.array([50, 30, 10, 0]), # Steep slope throughout + 's': np.array([0, 14.14, 28.28, 42.43]), + 'indStartMassAverage': 0, + 'indEndMassAverage': 3 + } + # Parabolic fit with steep slope at bottom + parabolicFit = {'a': 0.01, 'b': -2, 'c': 50} + + splitPoint = DFAPathGeneration.getSplitPoint(cfg['PATH'], avaProfile, parabolicFit) + + # Should return top point when no split point found + assert splitPoint.get('isTopSplitPoint', False) is True + assert splitPoint['x'] == avaProfile['x'][0] + assert splitPoint['y'] == avaProfile['y'][0] + assert splitPoint['z'] == avaProfile['z'][0] + + +def test_getMassAvgPathFromFields(): + """Test computing mass-averaged path from field data""" + # Create simple 5x5 field with flow in the middle + fieldsList = [{ + 'FT': np.array([[0, 0, 0, 0, 0], + [0, 1, 2, 1, 0], + [0, 2, 3, 2, 0], + [0, 1, 2, 1, 0], + [0, 0, 0, 0, 0]]), # Flow thickness + 'FM': np.array([[0, 0, 0, 0, 0], + [0, 5, 10, 5, 0], + [0, 10, 15, 10, 0], + [0, 5, 10, 5, 0], + [0, 0, 0, 0, 0]]), # Flow mass + 'FV': np.array([[0, 0, 0, 0, 0], + [0, 2, 3, 2, 0], + [0, 3, 4, 3, 0], + [0, 2, 3, 2, 0], + [0, 0, 0, 0, 0]]) # Flow velocity + }] + + fieldHeader = { + 'ncols': 5, + 'nrows': 5, + 'xllcenter': 100, + 'yllcenter': 200, + 'cellsize': 5 + } + + dem = { + 'rasterData': np.array([[50, 50, 50, 50, 50], + [40, 40, 40, 40, 40], + [30, 30, 30, 30, 30], + [20, 20, 20, 20, 20], + [10, 10, 10, 10, 10]]) + } + + result = DFAPathGeneration.getMassAvgPathFromFields(fieldsList, fieldHeader, dem) + + # Verify structure + assert 'x' in result + assert 'y' in result + assert 'z' in result + assert 's' in result + assert 'xstd' in result + assert 'ystd' in result + assert 'zstd' in result + + # Verify velocity info is included + assert 'u2' in result + assert 'ekin' in result + assert 'u2std' in result + assert 'ekinstd' in result + assert 'totEKin' in result + + # Should have one time step + assert len(result['x']) == 1 + assert len(result['y']) == 1 + assert len(result['z']) == 1 + + # Coordinates should be relative to origin (xllcenter and yllcenter subtracted) + # Mass-weighted average should be close to center + assert result['x'][0] > 0 # Relative to xllcenter + assert result['y'][0] > 0 # Relative to yllcenter + + +def test_getMassAvgPathFromFields_noVelocity(): + """Test getMassAvgPathFromFields without velocity data""" + # Create simple field without velocity info + fieldsList = [{ + 'FT': np.array([[0, 1, 0], + [0, 2, 0], + [0, 0, 0]]), + 'FM': np.array([[0, 5, 0], + [0, 10, 0], + [0, 0, 0]]) + # No FV field + }] + + fieldHeader = { + 'ncols': 3, + 'nrows': 3, + 'xllcenter': 0, + 'yllcenter': 0, + 'cellsize': 10 + } + + dem = { + 'rasterData': np.array([[30, 30, 30], + [20, 20, 20], + [10, 10, 10]]) + } + + result = DFAPathGeneration.getMassAvgPathFromFields(fieldsList, fieldHeader, dem) + + # Verify basic structure + assert 'x' in result + assert 'y' in result + assert 'z' in result + assert 's' in result + + # Velocity info should NOT be present + assert 'u2' not in result + assert 'ekin' not in result + assert 'totEKin' not in result + + # Should have one time step + assert len(result['x']) == 1 diff --git a/avaframe/tests/test_ana1Tests.py b/avaframe/tests/test_ana1Tests.py index 17dd3b75b..03f4c24ab 100644 --- a/avaframe/tests/test_ana1Tests.py +++ b/avaframe/tests/test_ana1Tests.py @@ -8,6 +8,7 @@ import avaframe.ana1Tests.analysisTools as anaTools import avaframe.ana1Tests.energyLineTest as energyLineTest +import avaframe.ana1Tests.rotationTest as rotationTest import avaframe.com1DFA.com1DFA as com1DFA import avaframe.in3Utils.fileHandlerUtils as fU from avaframe.in3Utils import cfgHandling @@ -229,3 +230,65 @@ def test_mainEnergyLineTest(tmp_path): assert abs(resultEnergyTest["runOutZError"]) < 0.02 assert abs(resultEnergyTest["rmseVelocityElevation"]) < 0.02 assert abs(resultEnergyTest["runOutAngleError"]) < 0.0003 + + +# ############# Test rotation test ########## +# ############################################ +def test_initializeRotationTestReport(tmp_path): + """Test report initialization creates correct structure""" + avalancheDir = tmp_path / "testAva" + resTypeList = ["ppr", "pfv", "pft"] + comModule = "com1DFA" + refSimName = "testSim" + flagMass = False + + report = rotationTest.initializeRotationTestReport( + avalancheDir, resTypeList, comModule, refSimName, flagMass + ) + + # Check report structure + assert "headerLine" in report + assert report["headerLine"]["type"] == "title" + assert report["headerLine"]["title"] == "Rotation Test for DFA Simulation" + + assert "avaName" in report + assert report["avaName"]["type"] == "avaName" + + assert "time" in report + assert report["time"]["type"] == "time" + + assert "Simulation Parameters" in report + assert report["Simulation Parameters"]["type"] == "list" + assert report["Simulation Parameters"]["DFA module"] == comModule + assert report["Simulation Parameters"]["Reference simulation"] == refSimName + + # Check column names for result tables + assert "Rotation test input simulations" in report + assert "simName" in report["Rotation test input simulations"]["column names"] + + assert "Rotation test Energy line result table" in report + assert "sDiff" in report["Rotation test Energy line result table"]["column names"] + + assert "Rotation test AIMEC result table" in report + assert "sRunout" in report["Rotation test AIMEC result table"]["column names"] + + # Mass columns should not be present when flagMass is False + assert "relMass" not in report["Rotation test AIMEC result table"]["column names"] + + +def test_initializeRotationTestReport_withMass(tmp_path): + """Test report initialization with mass analysis enabled""" + avalancheDir = tmp_path / "testAva" + resTypeList = ["ppr", "pfv", "pft"] + comModule = "com1DFA" + refSimName = "testSim" + flagMass = True + + report = rotationTest.initializeRotationTestReport( + avalancheDir, resTypeList, comModule, refSimName, flagMass + ) + + # Mass columns should be present when flagMass is True + assert "relMass" in report["Rotation test AIMEC result table"]["column names"] + assert "finalMass" in report["Rotation test AIMEC result table"]["column names"] + assert "entMass" in report["Rotation test AIMEC result table"]["column names"] From e559a50e56d5d157dd95983f4976f6eb785a41c1 Mon Sep 17 00:00:00 2001 From: Felix Oesterle <6945681+fso42@users.noreply.github.com> Date: Wed, 7 Jan 2026 14:28:37 +0100 Subject: [PATCH 4/5] feat(com1DFA): add `getModuleNames` utility for dynamic module name extraction - Introduced `getModuleNames` function to dynamically extract full and shortened module names based on the call stack. chore(pyproject): add `build`, `clean`, and `rebuild` tasks - Added custom `build`, `clean`, and `rebuild` tasks feat(out1Peak, com1DFA, ana4Stats): extend module support and refactor for readability - Added support for `com5SnowSlide` and `com6RockAvalanche` modules across several workflows, including DEM handling, filtering simulations, and post-processing tasks. add(tests) add pytests for getModuleNames fix(com1DFA): handle `com7Regional` as a special case for module name extraction - Added a condition to treat `com7Regional` as `com1DFA` during module name extraction --- avaframe/ana4Stats/probAna.py | 149 ++++-- avaframe/com1DFA/com1DFA.py | 519 ++++++++++++++++----- avaframe/com1DFA/com1DFATools.py | 77 ++- avaframe/out1Peak/outPlotAllPeak.py | 28 +- avaframe/runScripts/runPlotAreaRefDiffs.py | 88 ++-- avaframe/tests/test_com1DFA.py | 427 ++++++++++++++--- avaframe/tests/test_scarp.py | 143 ++++-- pyproject.toml | 5 + 8 files changed, 1120 insertions(+), 316 deletions(-) diff --git a/avaframe/ana4Stats/probAna.py b/avaframe/ana4Stats/probAna.py index 176435977..27ac3a431 100644 --- a/avaframe/ana4Stats/probAna.py +++ b/avaframe/ana4Stats/probAna.py @@ -58,7 +58,9 @@ def createComModConfig(cfgProb, avaDir, modName): variationsDict = makeDictFromVars(cfgProb["PROBRUN"]) if cfgProb["PROBRUN"].getint("samplingStrategy") == 2: - log.info("Probability run performed by varying one parameter at a time - local approach.") + log.info( + "Probability run performed by varying one parameter at a time - local approach." + ) cfgFiles = cfgFilesLocalApproach(variationsDict, cfgProb, modName, outDir) else: log.info("Probability run perfromed drawing parameter set from full sample.") @@ -103,7 +105,9 @@ def cfgFilesGlobalApproach(avaDir, cfgProb, modName, outDir): if len(paramValuesD["varParNamesInitial"]) == 2: sP.plotThSampleFromVals(paramValuesD, plotDir) else: - log.debug("More or less than two parameters have been varied - no plot of sample available") + log.debug( + "More or less than two parameters have been varied - no plot of sample available" + ) # write cfg files one for each parameter set drawn from full sample cfgFiles = createCfgFiles(paramValuesDList, modName, cfgProb, cfgPath=outDir) @@ -191,9 +195,13 @@ def updateCfgRange(cfg, cfgProb, varName, varDict): fileOverride="", modInfo=False, toPrint=False, - onlyDefault=cfgProb["in1Data_computeFromDistribution_override"].getboolean("defaultConfig"), + onlyDefault=cfgProb["in1Data_computeFromDistribution_override"].getboolean( + "defaultConfig" + ), + ) + cfgDist, cfgProb = cfgHandling.applyCfgOverride( + cfgDist, cfgProb, cP, addModValues=False ) - cfgDist, cfgProb = cfgHandling.applyCfgOverride(cfgDist, cfgProb, cP, addModValues=False) # set variation in configuration if varName in ["relTh", "entTh", "secondaryRelTh"]: @@ -250,7 +258,9 @@ def updateCfgRange(cfg, cfgProb, varName, varDict): cfgDist = { "sampleSize": valSteps, "mean": valVal, - "buildType": cfgProb["in1Data_computeFromDistribution_override"]["buildType"], + "buildType": cfgProb["in1Data_computeFromDistribution_override"][ + "buildType" + ], "buildValue": valVariation, "minMaxInterval": cfgDist["GENERAL"]["minMaxInterval"], "support": cfgDist["GENERAL"]["support"], @@ -364,7 +374,10 @@ def checkIfParameterInConfig(cfg, varPar): # Check for duplicates if len(exactKeyMatch) != 1: - message = "Parameter '%s' does not uniquely match a single configuration parameter." % varPar + message = ( + "Parameter '%s' does not uniquely match a single configuration parameter." + % varPar + ) log.error(message) raise AssertionError(message) @@ -413,17 +426,25 @@ def checkForNumberOfReferenceValues(cfgGen, varPar): thRCiV = varPar + "RangeFromCiVariation" # check if variation is set - if cfgGen[thPV] != "" or cfgGen[thRV] != "" or cfgGen[thDV] != "" or cfgGen[thRCiV] != "": - message = "Only one reference value is allowed for %s: but %s %s, %s %s, %s %s, %s %s is given" % ( - varPar, - thPV, - cfgGen[thPV], - thRV, - cfgGen[thRV], - thDV, - cfgGen[thDV], - thRCiV, - cfgGen[thRCiV], + if ( + cfgGen[thPV] != "" + or cfgGen[thRV] != "" + or cfgGen[thDV] != "" + or cfgGen[thRCiV] != "" + ): + message = ( + "Only one reference value is allowed for %s: but %s %s, %s %s, %s %s, %s %s is given" + % ( + varPar, + thPV, + cfgGen[thPV], + thRV, + cfgGen[thRV], + thDV, + cfgGen[thDV], + thRCiV, + cfgGen[thRCiV], + ) ) log.error(message) raise AssertionError(message) @@ -431,7 +452,9 @@ def checkForNumberOfReferenceValues(cfgGen, varPar): return True -def probAnalysis(avaDir, cfg, modName, parametersDict="", inputDir="", probConf="", simDFActual=""): +def probAnalysis( + avaDir, cfg, modName, parametersDict="", inputDir="", probConf="", simDFActual="" +): """Compute probability map of a given set of simulation result exceeding a particular threshold and save to outDir Parameters @@ -461,8 +484,10 @@ def probAnalysis(avaDir, cfg, modName, parametersDict="", inputDir="", probConf= fU.makeADir(outDir) # fetch all result files and filter simulations according to parametersDict - if modName.lower() == "com1dfa": - simNameList = cfgHandling.filterSims(avaDir, parametersDict, specDir=inputDir, simDF=simDFActual) + if modName.lower() in ["com1dfa", "com5snowslide", "com6rockavalanche"]: + simNameList = cfgHandling.filterSims( + avaDir, parametersDict, specDir=inputDir, simDF=simDFActual + ) filtering = True else: simNameList = [] @@ -514,7 +539,10 @@ def probAnalysis(avaDir, cfg, modName, parametersDict="", inputDir="", probConf= # check if extent is the same as first loaded dataset # if not - remesh and print warning - if fileData["header"]["nrows"] != nRows or fileData["header"]["ncols"] != nCols: + if ( + fileData["header"]["nrows"] != nRows + or fileData["header"]["ncols"] != nCols + ): log.warning( "datasets used to create probMap do not match in extent - remeshing: %s to cellSize %s" % (fileName, header["cellsize"]) @@ -534,7 +562,10 @@ def probAnalysis(avaDir, cfg, modName, parametersDict="", inputDir="", probConf= ) contourDict[fileName.stem] = contourDictXY - log.info("File Name: %s , simulation parameter %s " % (fileName, cfg["GENERAL"]["peakVar"])) + log.info( + "File Name: %s , simulation parameter %s " + % (fileName, cfg["GENERAL"]["peakVar"]) + ) # Check if peak values exceed desired threshold dataLim[data > float(cfg["GENERAL"]["peakLim"])] = 1.0 @@ -546,7 +577,8 @@ def probAnalysis(avaDir, cfg, modName, parametersDict="", inputDir="", probConf= unit = pU.cfgPlotUtils["unit%s" % cfg["GENERAL"]["peakVar"]] log.info( "probability analysis performed for peak parameter: %s and a peak value " - "threshold of: %s %s" % (cfg["GENERAL"]["peakVar"], cfg["GENERAL"]["peakLim"], unit) + "threshold of: %s %s" + % (cfg["GENERAL"]["peakVar"], cfg["GENERAL"]["peakLim"], unit) ) log.info("%s peak fields added to analysis" % count) @@ -597,7 +629,12 @@ def makeDictFromVars(cfg): log.error(message) raise AssertionError(message) - if (len(varParList) == len(varValues) == len(cfg[lengthsPar].split("|")) == len(varTypes)) is False: + if ( + len(varParList) + == len(varValues) + == len(cfg[lengthsPar].split("|")) + == len(varTypes) + ) is False: message = ( "For every parameter in varParList a variationValue, %s and variationType needs to be provided" % lengthsPar @@ -688,7 +725,12 @@ def createSampleFromConfig(avaDir, cfgProb, comMod): _, thReadFromShp = checkParameterSettings(cfgStart, varParList) modNameString = str(pathlib.Path(comMod.__file__).stem) - if modNameString.lower() in ["com1dfa", "com8motpsa"]: + if modNameString.lower() in [ + "com1dfa", + "com5snowslide", + "com6rockavalanche", + "com8motpsa", + ]: # check if thickness parameters are actually read from shp file _, thReadFromShp = checkParameterSettings(cfgStart, varParList) else: @@ -720,7 +762,9 @@ def createSampleFromConfig(avaDir, cfgProb, comMod): return paramValuesDList -def createSampleWithVariationStandardParameters(cfgProb, cfgStart, varParList, valVariationValue, varType): +def createSampleWithVariationStandardParameters( + cfgProb, cfgStart, varParList, valVariationValue, varType +): """create a sample for a parameter variation using latin hypercube sampling Parameters @@ -849,7 +893,9 @@ def createSampleWithVariationForThParameters( ) # add to list all the parameter names fullListOfParameters = fullListOfParameters + thFeatureNames - parentParameterId = parentParameterId + [varParList.index(varPar)] * len(thFeatureNames) + parentParameterId = parentParameterId + [ + varParList.index(varPar) + ] * len(thFeatureNames) thValues = np.append(thValues, thV) ciValues = np.append(ciValues, ciV) else: @@ -873,7 +919,9 @@ def createSampleWithVariationForThParameters( ) fullValVar = np.asarray( [ - float(valVariationValue[i]) if valVariationValue[i] != "ci95" else np.nan + float(valVariationValue[i]) + if valVariationValue[i] != "ci95" + else np.nan for i in parentParameterId ] ) @@ -882,12 +930,16 @@ def createSampleWithVariationForThParameters( upperBounds = np.asarray([None] * len(fullListOfParameters)) # set lower and upper bounds depending on varType (percent, range, rangefromci) - lowerBounds[fullVarType == "percent"] = varValList[fullVarType == "percent"] - varValList[ + lowerBounds[fullVarType == "percent"] = varValList[ fullVarType == "percent" - ] * (fullValVar[fullVarType == "percent"] / 100.0) - upperBounds[fullVarType == "percent"] = varValList[fullVarType == "percent"] + varValList[ + ] - varValList[fullVarType == "percent"] * ( + fullValVar[fullVarType == "percent"] / 100.0 + ) + upperBounds[fullVarType == "percent"] = varValList[ fullVarType == "percent" - ] * (fullValVar[fullVarType == "percent"] / 100.0) + ] + varValList[fullVarType == "percent"] * ( + fullValVar[fullVarType == "percent"] / 100.0 + ) lowerBounds[fullVarType == "range"] = ( varValList[fullVarType == "range"] - fullValVar[fullVarType == "range"] @@ -897,10 +949,12 @@ def createSampleWithVariationForThParameters( ) lowerBounds[fullVarType == "rangefromci"] = ( - varValList[fullVarType == "rangefromci"] - ciValues[fullVarType == "rangefromci"] + varValList[fullVarType == "rangefromci"] + - ciValues[fullVarType == "rangefromci"] ) upperBounds[fullVarType == "rangefromci"] = ( - varValList[fullVarType == "rangefromci"] + ciValues[fullVarType == "rangefromci"] + varValList[fullVarType == "rangefromci"] + + ciValues[fullVarType == "rangefromci"] ) # create a sample of parameter values using scipy latin hypercube or morris sampling @@ -917,7 +971,9 @@ def createSampleWithVariationForThParameters( ) ) else: - fullSample = np.zeros((int(cfgProb["PROBRUN"]["nSample"]), len(fullListOfParameters))) + fullSample = np.zeros( + (int(cfgProb["PROBRUN"]["nSample"]), len(fullListOfParameters)) + ) for idx, varPar in enumerate(fullListOfParameters): lB = [0] * len(varParList) @@ -928,7 +984,9 @@ def createSampleWithVariationForThParameters( fullSample[:, idx] = parSample[:, parentParameterId[idx]] # create dictionary with all the info - thFromIni = cfgUtils.convertToCfgList(list(set(varParList).symmetric_difference(set(staParameter)))) + thFromIni = cfgUtils.convertToCfgList( + list(set(varParList).symmetric_difference(set(staParameter))) + ) paramValuesD = { "names": fullListOfParameters, "values": fullSample, @@ -985,7 +1043,10 @@ def createSample(cfgProb, varParList): sample = sampler.random(n=int(cfgProb["PROBRUN"]["nSample"])) log.info("Parameter sample created using latin hypercube sampling") else: - message = "Sampling method: %s not a valid option" % cfgProb["PROBRUN"]["sampleMethod"] + message = ( + "Sampling method: %s not a valid option" + % cfgProb["PROBRUN"]["sampleMethod"] + ) log.error(message) raise AssertionError(message) @@ -1079,11 +1140,13 @@ def createCfgFiles(paramValuesDList, comMod, cfg, cfgPath=""): cfgStart[section][par] = str(pVal[index]) else: cfgStart["GENERAL"][par] = str(pVal[index]) - if modName.lower() == "com1dfa": + if modName.lower() in ["com1dfa", "com5snowslide", "com6rockavalanche"]: cfgStart["VISUALISATION"]["scenario"] = str(count1) cfgStart["INPUT"]["thFromIni"] = paramValuesD["thFromIni"] if "releaseScenario" in paramValuesD.keys(): - cfgStart["INPUT"]["releaseScenario"] = paramValuesD["releaseScenario"] + cfgStart["INPUT"]["releaseScenario"] = paramValuesD[ + "releaseScenario" + ] cfgF = pathlib.Path(cfgPath, ("%d_%sCfg.ini" % (countS, modName))) with open(cfgF, "w") as configfile: cfgStart.write(configfile) @@ -1119,11 +1182,15 @@ def fetchStartCfg(comMod, cfgProb): comMod, fileOverride="", toPrint=False, - onlyDefault=cfgProb["%s_%s_override" % (modP, modName)].getboolean("defaultConfig"), + onlyDefault=cfgProb["%s_%s_override" % (modP, modName)].getboolean( + "defaultConfig" + ), ) # override with parameters set in in the cfgProb comMod_override section - cfgStart, cfgProb = cfgHandling.applyCfgOverride(cfgStart, cfgProb, comMod, addModValues=False) + cfgStart, cfgProb = cfgHandling.applyCfgOverride( + cfgStart, cfgProb, comMod, addModValues=False + ) return cfgStart diff --git a/avaframe/com1DFA/com1DFA.py b/avaframe/com1DFA/com1DFA.py index 574d84b4b..2370267a1 100644 --- a/avaframe/com1DFA/com1DFA.py +++ b/avaframe/com1DFA/com1DFA.py @@ -3,6 +3,7 @@ """ import copy +import inspect import logging import math import os @@ -103,12 +104,16 @@ def com1DFAPreprocess(cfgMain, typeCfgInfo, cfgInfo, module=com1DFA): cfgStart = cfgInfo # fetch input data and create work and output directories - inputSimFilesAll, outDir, simDFExisting, simNameExisting = com1DFATools.initializeInputs( - avalancheDir, cfgStart["GENERAL"].getboolean("cleanRemeshedRasters"), module + inputSimFilesAll, outDir, simDFExisting, simNameExisting = ( + com1DFATools.initializeInputs( + avalancheDir, cfgStart["GENERAL"].getboolean("cleanRemeshedRasters"), module + ) ) # create dictionary with one key for each simulation that shall be performed - simDict = dP.createSimDict(avalancheDir, module, cfgStart, inputSimFilesAll, simNameExisting) + simDict = dP.createSimDict( + avalancheDir, module, cfgStart, inputSimFilesAll, simNameExisting + ) return simDict, outDir, inputSimFilesAll, simDFExisting @@ -145,10 +150,14 @@ def com1DFAMain(cfgMain, cfgInfo=""): typeCfgInfo = com1DFATools.checkCfgInfoType(cfgInfo) if typeCfgInfo == "cfgFromDir": # preprocessing to create configuration objects for all simulations to run by reading multiple cfg files - simDict, inputSimFiles, simDFExisting, outDir = com1DFATools.createSimDictFromCfgs(cfgMain, cfgInfo) + simDict, inputSimFiles, simDFExisting, outDir = ( + com1DFATools.createSimDictFromCfgs(cfgMain, cfgInfo) + ) else: # preprocessing to create configuration objects for all simulations to run - simDict, outDir, inputSimFiles, simDFExisting = com1DFAPreprocess(cfgMain, typeCfgInfo, cfgInfo) + simDict, outDir, inputSimFiles, simDFExisting = com1DFAPreprocess( + cfgMain, typeCfgInfo, cfgInfo + ) log.info("The following simulations will be performed") for key in simDict: @@ -173,7 +182,9 @@ def com1DFAMain(cfgMain, cfgInfo=""): nCPU = cfgUtils.getNumberOfProcesses(cfgMain, len(simDict)) # Supply compute task with inputs - com1DFACoreTaskWithInput = partial(com1DFACoreTask, simDict, inputSimFiles, avalancheDir, outDir) + com1DFACoreTaskWithInput = partial( + com1DFACoreTask, simDict, inputSimFiles, avalancheDir, outDir + ) # Create parallel pool and run # with multiprocessing.Pool(processes=nCPU) as pool: @@ -232,7 +243,10 @@ def com1DFACoreTask(simDict, inputSimFiles, avalancheDir, outDir, cuSim): # fetch simHash for current sim simHash = simDict[cuSim]["simHash"] - log.info("%s runs as process: %s, %s" % (cuSim, os.getpid(), threading.current_thread().ident)) + log.info( + "%s runs as process: %s, %s" + % (cuSim, os.getpid(), threading.current_thread().ident) + ) # append configuration to dataframe simDF = cfgUtils.appendCgf2DF(simHash, cuSim, cfg, simDF) @@ -247,7 +261,9 @@ def com1DFACoreTask(simDict, inputSimFiles, avalancheDir, outDir, cuSim): cfgFinal, tCPU, nPartInitial, - ) = com1DFA.com1DFACore(cfg, avalancheDir, cuSim, inputSimFiles, outDir, simHash=simHash) + ) = com1DFA.com1DFACore( + cfg, avalancheDir, cuSim, inputSimFiles, outDir, simHash=simHash + ) simDF.at[simHash, "nPart"] = str(int(nPartInitial)) @@ -257,7 +273,9 @@ def com1DFACoreTask(simDict, inputSimFiles, avalancheDir, outDir, cuSim): # create hash to check if configuration didn't change simHashFinal = cfgUtils.cfgHash(cfgFinal) if simHashFinal != simHash: - cfgUtils.writeCfgFile(avalancheDir, com1DFA, cfg, fileName="%s_butModified" % simHash) + cfgUtils.writeCfgFile( + avalancheDir, com1DFA, cfg, fileName="%s_butModified" % simHash + ) message = "Simulation configuration has been changed since start" log.error(message) raise AssertionError(message) @@ -266,7 +284,9 @@ def com1DFACoreTask(simDict, inputSimFiles, avalancheDir, outDir, cuSim): return simDF, tCPUDF, dem, reportDict -def com1DFAPostprocess(simDF, tCPUDF, simDFExisting, cfgMain, dem, reportDictList, exportData): +def com1DFAPostprocess( + simDF, tCPUDF, simDFExisting, cfgMain, dem, reportDictList, exportData +): """postprocessing of simulation results: save configuration to csv, create plots and report Parameters @@ -311,7 +331,9 @@ def com1DFAPostprocess(simDF, tCPUDF, simDFExisting, cfgMain, dem, reportDictLis # write the actually simulated sims to a separate csv file, # this is used for the qgis connector - cfgUtils.writeAllConfigurationInfo(avalancheDir, simDF, specDir="", csvName="latestSims.csv") + cfgUtils.writeAllConfigurationInfo( + avalancheDir, simDF, specDir="", csvName="latestSims.csv" + ) # append new simulations configuration to old ones (if they exist), # return total dataFrame and write it to csv @@ -332,7 +354,9 @@ def com1DFAPostprocess(simDF, tCPUDF, simDFExisting, cfgMain, dem, reportDictLis else: plotDict = "" # create contour line plot - reportDictList, _ = outCom1DFA.createContourPlot(reportDictList, avalancheDir, simDF) + reportDictList, _ = outCom1DFA.createContourPlot( + reportDictList, avalancheDir, simDF + ) if cfgMain["FLAGS"].getboolean("createReport"): # write report @@ -427,7 +451,9 @@ def com1DFACore(cfg, avaDir, cuSimName, inputSimFiles, outDir, simHash=""): log.info(("cpu time DFA = %s s" % (tCPUDFA))) # write report dictionary - reportDict = createReportDict(avaDir, cuSimName, relName, inputSimLines, cfg, reportAreaInfo) + reportDict = createReportDict( + avaDir, cuSimName, relName, inputSimLines, cfg, reportAreaInfo + ) # add time and mass info to report reportDict = reportAddTimeMassInfo(reportDict, tCPUDFA, infoDict) @@ -437,7 +463,9 @@ def com1DFACore(cfg, avaDir, cuSimName, inputSimFiles, outDir, simHash=""): # write text file to Outputs/com1DFA/configurationFilesDone to indicate that this simulation has been performed configFileName = "%s.ini" % cuSimName for saveDir in ["configurationFilesDone", "configurationFilesLatest"]: - configDir = pathlib.Path(avaDir, "Outputs", "com1DFA", "configurationFiles", saveDir) + configDir = pathlib.Path( + avaDir, "Outputs", "com1DFA", "configurationFiles", saveDir + ) with open((configDir / configFileName), "w") as fi: fi.write("see directory configurationFiles for info on config") fi.close() @@ -486,7 +514,9 @@ def prepareReleaseEntrainment(cfg, rel, inputSimLines): if cfg["GENERAL"].getboolean("iniStep"): # set release thickness for buffer - releaseLineBuffer = setThickness(cfg, inputSimLines["releaseLineBuffer"], "relTh") + releaseLineBuffer = setThickness( + cfg, inputSimLines["releaseLineBuffer"], "relTh" + ) inputSimLines["releaseLineBuffer"] = releaseLineBuffer if ( @@ -494,14 +524,19 @@ def prepareReleaseEntrainment(cfg, rel, inputSimLines): and inputSimLines["entResInfo"]["flagSecondaryRelease"] == "Yes" ): if cfg["INPUT"]["secondaryRelThFile"] == "": - secondaryReleaseLine = setThickness(cfg, inputSimLines["secondaryReleaseLine"], "secondaryRelTh") + secondaryReleaseLine = setThickness( + cfg, inputSimLines["secondaryReleaseLine"], "secondaryRelTh" + ) inputSimLines["secondaryReleaseLine"] = secondaryReleaseLine else: inputSimLines["entResInfo"]["flagSecondaryRelease"] = "No" secondaryReleaseLine = None inputSimLines["secondaryReleaseLine"] = secondaryReleaseLine - if cfg["GENERAL"]["simTypeActual"] in ["ent", "entres"] and cfg["INPUT"]["entThFile"] == "": + if ( + cfg["GENERAL"]["simTypeActual"] in ["ent", "entres"] + and cfg["INPUT"]["entThFile"] == "" + ): # set entrainment thickness entLine = setThickness(cfg, inputSimLines["entLine"], "entTh") inputSimLines["entLine"] = entLine @@ -601,7 +636,9 @@ def prepareInputData(inputSimFiles, cfg): relFile = inputSimFiles["releaseScenario"] # get dem dictionary - already read DEM with correct mesh cell size - demOri = gI.initializeDEM(cfg["GENERAL"]["avalancheDir"], demPath=cfg["INPUT"]["DEM"]) + demOri = gI.initializeDEM( + cfg["GENERAL"]["avalancheDir"], demPath=cfg["INPUT"]["DEM"] + ) dOHeader = demOri["header"] # read data from relThFile if needed, already with correct mesh cell size @@ -620,7 +657,9 @@ def prepareInputData(inputSimFiles, cfg): ) relThFieldData = "" else: - relRasterPath = pathlib.Path(cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"]["relThFile"]) + relRasterPath = pathlib.Path( + cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"]["relThFile"] + ) relRasterDict = IOf.readRaster(relRasterPath) relThFieldData = relRasterDict["rasterData"] releaseLine = { @@ -638,7 +677,9 @@ def prepareInputData(inputSimFiles, cfg): if entResInfo["flagSecondaryRelease"] == "Yes": if cfg["INPUT"]["secondaryRelThFile"] == "": secondaryReleaseFile = inputSimFiles["secondaryRelFile"] - secondaryReleaseLine = shpConv.readLine(secondaryReleaseFile, "", demOri) + secondaryReleaseLine = shpConv.readLine( + secondaryReleaseFile, "", demOri + ) secondaryReleaseLine["fileName"] = secondaryReleaseFile secondaryReleaseLine["type"] = "Secondary release" secondaryReleaseLine["initializedFrom"] = "shapefile" @@ -652,7 +693,9 @@ def prepareInputData(inputSimFiles, cfg): ) else: secRelRasterPath = pathlib.Path( - cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"]["secondaryRelThFile"] + cfg["GENERAL"]["avalancheDir"], + "Inputs", + cfg["INPUT"]["secondaryRelThFile"], ) secrelRasterDict = IOf.readRaster(secRelRasterPath) secondaryReleaseLine = { @@ -688,7 +731,9 @@ def prepareInputData(inputSimFiles, cfg): cfg["GENERAL"]["avalancheDir"], entLine, "com1DFA", type="entrainment" ) else: - entRasterPath = pathlib.Path(cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"]["entThFile"]) + entRasterPath = pathlib.Path( + cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"]["entThFile"] + ) entLineDict = IOf.readRaster(entRasterPath) entLine = { "rasterData": entLineDict["rasterData"], @@ -717,7 +762,9 @@ def prepareInputData(inputSimFiles, cfg): cfg["GENERAL"]["avalancheDir"], resLine, "com1DFA", type="resistance" ) else: - resRasterPath = pathlib.Path(cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"]["resFile"]) + resRasterPath = pathlib.Path( + cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"]["resFile"] + ) resLineDict = IOf.readRaster(resRasterPath) resLine = { "rasterData": resLineDict["rasterData"], @@ -978,8 +1025,12 @@ def reportAddTimeMassInfo(reportDict, tCPUDFA, infoDict): """Add time and mass info to report""" # add mass info - reportDict["Simulation Parameters"].update({"Initial mass [kg]": ("%.2f" % infoDict["initial mass"])}) - reportDict["Simulation Parameters"].update({"Final mass [kg]": ("%.2f" % infoDict["final mass"])}) + reportDict["Simulation Parameters"].update( + {"Initial mass [kg]": ("%.2f" % infoDict["initial mass"])} + ) + reportDict["Simulation Parameters"].update( + {"Final mass [kg]": ("%.2f" % infoDict["final mass"])} + ) reportDict["Simulation Parameters"].update( {"Entrained mass [kg]": ("%.2f" % infoDict["entrained mass"])} ) @@ -1122,7 +1173,9 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): releaseLine = inputSimLines["releaseLineBuffer"] releaseLineReal = inputSimLines["releaseLine"] # check if release features overlap between features - geoTrans.prepareArea(releaseLineReal, dem, thresholdPointInPoly, combine=True, checkOverlap=True) + geoTrans.prepareArea( + releaseLineReal, dem, thresholdPointInPoly, combine=True, checkOverlap=True + ) buffer1 = ( cfg["GENERAL"].getfloat("sphKernelRadius") * cfg["GENERAL"].getfloat("additionallyFixedFactor") @@ -1150,7 +1203,9 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): # create release area raster if not read from file if inputSimLines["releaseLine"]["initializedFrom"] == "shapefile": # check if release features overlap between features - geoTrans.prepareArea(releaseLine, dem, thresholdPointInPoly, combine=True, checkOverlap=True) + geoTrans.prepareArea( + releaseLine, dem, thresholdPointInPoly, combine=True, checkOverlap=True + ) # if no release thickness field or function - set release according to shapefile or ini file # this is a list of release rasters that we want to combine @@ -1173,8 +1228,8 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): # for area computation use smaller threshold to identify raster cells that lie within release line # as for creating particles a bigger radius is chosen as particles that lie outside are removed afterwards releaseInfoDict = copy.deepcopy(releaseLine) - relAreaActualList, relAreaProjectedList, releaseInfoDict = gI.computeAreasFromRasterAndLine( - releaseInfoDict, dem + relAreaActualList, relAreaProjectedList, releaseInfoDict = ( + gI.computeAreasFromRasterAndLine(releaseInfoDict, dem) ) relAreaProjected = np.sum(relAreaProjectedList) relAreaActual = np.sum(relAreaActualList) @@ -1237,7 +1292,9 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): # perform initialisation step for redistributing particles if cfg["GENERAL"].getboolean("iniStep"): startTimeIni = time.time() - particles, fields = pI.getIniPosition(cfg, particles, dem, fields, inputSimLines, relThField) + particles, fields = pI.getIniPosition( + cfg, particles, dem, fields, inputSimLines, relThField + ) tIni = time.time() - startTimeIni log.info( "Ini step for initialising particles finalized, total mass: %.2f, number of particles: %d" @@ -1264,8 +1321,12 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): ) # check if entrainment and release overlap - entrMassRaster = geoTrans.checkOverlap(entrMassRaster, relRaster, "Entrainment", "Release", crop=True) - entrEnthRaster = geoTrans.checkOverlap(entrEnthRaster, relRaster, "Entrainment", "Release", crop=True) + entrMassRaster = geoTrans.checkOverlap( + entrMassRaster, relRaster, "Entrainment", "Release", crop=True + ) + entrEnthRaster = geoTrans.checkOverlap( + entrEnthRaster, relRaster, "Entrainment", "Release", crop=True + ) # check for overlap with the secondary release area if secondaryReleaseInfo["flagSecondaryRelease"] == "Yes": for secIndex, secRelRaster in enumerate(secondaryReleaseInfo["rasterData"]): @@ -1285,7 +1346,9 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): ) # export secondary release raster used for computations (after cutting potential overlap with release) if cfg["EXPORTS"].getboolean("exportRasters"): - outDir = pathlib.Path(cfg["GENERAL"]["avalancheDir"], "Outputs", "internalRasters") + outDir = pathlib.Path( + cfg["GENERAL"]["avalancheDir"], "Outputs", "internalRasters" + ) useCompression = cfg["EXPORTS"].getboolean("useCompression") IOf.writeResultToRaster( dem["originalHeader"], @@ -1303,7 +1366,9 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): ) # export entrainment raster used for computations (after cutting potential overlap with release or secondary release) if cfg["EXPORTS"].getboolean("exportRasters"): - outDir = pathlib.Path(cfg["GENERAL"]["avalancheDir"], "Outputs", "internalRasters") + outDir = pathlib.Path( + cfg["GENERAL"]["avalancheDir"], "Outputs", "internalRasters" + ) useCompression = cfg["EXPORTS"].getboolean("useCompression") IOf.writeResultToRaster( dem["originalHeader"], @@ -1314,7 +1379,10 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): ) log.info( "Entrainment area raster derived from %s saved to %s" - % (inputSimLines["entResInfo"]["entThFileType"], str(outDir / "entrainmentRaster")) + % ( + inputSimLines["entResInfo"]["entThFileType"], + str(outDir / "entrainmentRaster"), + ) ) # surfacic entrainment mass available (unit kg/m²) @@ -1367,7 +1435,9 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): return particles, fields, dem, reportAreaInfo -def initializeParticles(cfg, releaseLine, dem, inputSimLines="", logName="", relThField="", thName="rel"): +def initializeParticles( + cfg, releaseLine, dem, inputSimLines="", logName="", relThField="", thName="rel" +): """Initialize DFA simulation Create particles and fields dictionary according to config parameters @@ -1429,7 +1499,9 @@ def initializeParticles(cfg, releaseLine, dem, inputSimLines="", logName="", rel # find all non empty cells (meaning release area) indRelY, indRelX = np.nonzero(relRasterMask) if inputSimLines != "": - indRelYReal, indRelXReal = np.nonzero(inputSimLines["releaseLine"]["rasterData"]) + indRelYReal, indRelXReal = np.nonzero( + inputSimLines["releaseLine"]["rasterData"] + ) else: indRelYReal, indRelXReal = np.nonzero(relRaster) iReal = list(zip(indRelYReal, indRelXReal)) @@ -1442,9 +1514,7 @@ def initializeParticles(cfg, releaseLine, dem, inputSimLines="", logName="", rel # make option available to read initial particle distribution from file if cfg.getboolean("initialiseParticlesFromFile"): if cfg.getboolean("iniStep"): - message = ( - "If initialiseParticlesFromFile is used, iniStep cannot be performed - chose only one option" - ) + message = "If initialiseParticlesFromFile is used, iniStep cannot be performed - chose only one option" log.error(message) raise AssertionError(message) particles, hPartArray = particleTools.initialiseParticlesFromFile( @@ -1464,7 +1534,10 @@ def initializeParticles(cfg, releaseLine, dem, inputSimLines="", logName="", rel if len(relThField) != 0 and cfg.getboolean("iniStep"): # set release thickness to a constant value for initialisation relRaster = np.where(relRaster > 0.0, cfg.getfloat("%sTh" % thName), 0.0) - log.warning("%sThField!= 0, but relRaster set to %sTh value (from ini)" % (thName, thName)) + log.warning( + "%sThField!= 0, but relRaster set to %sTh value (from ini)" + % (thName, thName) + ) # loop on non empty cells for indRelx, indRely in zip(indRelX, indRelY): # compute number of particles for this cell @@ -1528,7 +1601,11 @@ def initializeParticles(cfg, releaseLine, dem, inputSimLines="", logName="", rel particles["trajectoryAngle"] = np.zeros(np.shape(hPartArray)) particles["stoppCriteria"] = False mPartArray = particles["m"] - kineticEne = np.sum(0.5 * mPartArray * DFAtls.norm2(particles["ux"], particles["uy"], particles["uz"])) + kineticEne = np.sum( + 0.5 + * mPartArray + * DFAtls.norm2(particles["ux"], particles["uy"], particles["uz"]) + ) particles["kineticEne"] = kineticEne particles["potentialEne"] = np.sum(gravAcc * mPartArray * particles["z"]) particles["peakKinEne"] = kineticEne @@ -1678,12 +1755,16 @@ def initializeFields(cfg, dem, particles, releaseLine): fields["Vx"] = np.zeros((nrows, ncols)) # velocity in x direction [m/s] fields["Vy"] = np.zeros((nrows, ncols)) # velocity in y direction [m/s] fields["Vz"] = np.zeros((nrows, ncols)) # velocity in z direction [m/s] - fields["dmDet"] = np.zeros((nrows, ncols)) # flowing mass change due to detrainment [kg] + fields["dmDet"] = np.zeros( + (nrows, ncols) + ) # flowing mass change due to detrainment [kg] fields["FTStop"] = np.zeros((nrows, ncols)) # flow thickness that is stopped [m] fields["FTDet"] = np.zeros((nrows, ncols)) # flow thickness that is detrained [m] fields["FTEnt"] = np.zeros((nrows, ncols)) # flow thickness that is entrained [m] fields["sfcChange"] = np.zeros((nrows, ncols)) # depth that changes the surface [m] - fields["sfcChangeTotal"] = np.zeros((nrows, ncols)) # total depth that changed the surface [m] + fields["sfcChangeTotal"] = np.zeros( + (nrows, ncols) + ) # total depth that changed the surface [m] fields["demAdapted"] = np.zeros((nrows, ncols)) # adapted topography [m] fields["timeInfo"] = np.zeros((nrows, ncols)) # first time # for optional fields, initialize with dummys (minimum size array). The cython functions then need something @@ -1731,9 +1812,12 @@ def initializeFields(cfg, dem, particles, releaseLine): # Cleaning up the triangles (remove unwanted bonds) # masking triangles exiting the release (plan small buffer to be sure to keep all inner edges) # this happends on non-convex release areas - outline = sPolygon(zip(xOutline, yOutline)).buffer(cfgGen.getfloat("thresholdPointInPoly")) + outline = sPolygon(zip(xOutline, yOutline)).buffer( + cfgGen.getfloat("thresholdPointInPoly") + ) mask = [ - not outline.contains(sPolygon(zip(x[tri], y[tri]))) for tri in triangles.get_masked_triangles() + not outline.contains(sPolygon(zip(x[tri], y[tri]))) + for tri in triangles.get_masked_triangles() ] triangles.set_mask(mask) # masking triangles with sidelength bigger than some threshold @@ -1792,7 +1876,9 @@ def initializeSecRelease(inputSimLines, dem, relRaster, reportAreaInfo): """ if inputSimLines["entResInfo"]["flagSecondaryRelease"] == "Yes": secondaryReleaseInfo = inputSimLines["secondaryReleaseLine"] - log.info("Initializing secondary release area: %s" % secondaryReleaseInfo["fileName"]) + log.info( + "Initializing secondary release area: %s" % secondaryReleaseInfo["fileName"] + ) log.info("Secondary release area features: %s" % (secondaryReleaseInfo["Name"])) secondaryReleaseInfo["header"] = dem["originalHeader"] @@ -1854,7 +1940,9 @@ def initializeSecRelease(inputSimLines, dem, relRaster, reportAreaInfo): return secondaryReleaseInfo, reportAreaInfo -def initializeMassEnt(dem, simTypeActual, entLine, reportAreaInfo, thresholdPointInPoly, cfg): +def initializeMassEnt( + dem, simTypeActual, entLine, reportAreaInfo, thresholdPointInPoly, cfg +): """Initialize mass for entrainment Parameters @@ -1889,7 +1977,9 @@ def initializeMassEnt(dem, simTypeActual, entLine, reportAreaInfo, thresholdPoin log.info("Initializing entrainment area: %s" % (entrainmentArea)) log.info("Entrainment area features: %s" % (entLine["Name"])) if entLine["initializedFrom"] == "shapefile": - entLine = geoTrans.prepareArea(entLine, dem, thresholdPointInPoly, thList=entLine["thickness"]) + entLine = geoTrans.prepareArea( + entLine, dem, thresholdPointInPoly, thList=entLine["thickness"] + ) entrMassRaster = entLine["rasterData"] # ToDo: not used in samos but implemented # tempRaster = cfg['GENERAL'].getfloat('entTempRef') + (dem['rasterData'] - cfg['GENERAL'].getfloat('entMinZ')) @@ -1912,7 +2002,9 @@ def initializeMassEnt(dem, simTypeActual, entLine, reportAreaInfo, thresholdPoin return entrMassRaster, entrEnthRaster, reportAreaInfo -def initializeResistance(cfg, dem, simTypeActual, resLine, reportAreaInfo, thresholdPointInPoly): +def initializeResistance( + cfg, dem, simTypeActual, resLine, reportAreaInfo, thresholdPointInPoly +): """Initialize resistance matrix Parameters @@ -1972,7 +2064,9 @@ def initializeResistance(cfg, dem, simTypeActual, resLine, reportAreaInfo, thres reportAreaInfo["resistance"] = "Yes" if detrainment: - log.info("Initializing detrainment (resistance) area: %s" % (resistanceArea)) + log.info( + "Initializing detrainment (resistance) area: %s" % (resistanceArea) + ) log.info("Detrainment (Resistance) area features: %s" % (resLine["Name"])) detRaster = K * mask reportAreaInfo["detrainment"] = "Yes" @@ -1988,7 +2082,9 @@ def initializeResistance(cfg, dem, simTypeActual, resLine, reportAreaInfo, thres return cResRaster, detRaster, reportAreaInfo -def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, simHash=""): +def DFAIterate( + cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, simHash="" +): """Perform time loop for DFA simulation Save results at desired intervals @@ -2065,7 +2161,9 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si ] frictModel = cfgGen["frictModel"].lower() frictType = frictModelsList.index(frictModel) + 1 - log.debug("Friction Model used: %s, %s" % (frictModelsList[frictType - 1], frictType)) + log.debug( + "Friction Model used: %s, %s" % (frictModelsList[frictType - 1], frictType) + ) # turn resistance model into integer ResModel = cfgGen["ResistanceModel"].lower() @@ -2073,7 +2171,10 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si "default", ] resistanceType = ResModelsList.index(ResModel) + 1 - log.debug("Resistance Model used: %s, %s" % (ResModelsList[resistanceType - 1], resistanceType)) + log.debug( + "Resistance Model used: %s, %s" + % (ResModelsList[resistanceType - 1], resistanceType) + ) # Initialise Lists to save fields and add initial time step contourDictXY = None @@ -2085,7 +2186,9 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si pfvTimeMax = [] # setup a result fields info data frame to save max values of fields and avalanche front - resultsDF = setupresultsDF(resTypes, cfg["VISUALISATION"].getboolean("createRangeTimeDiagram")) + resultsDF = setupresultsDF( + resTypes, cfg["VISUALISATION"].getboolean("createRangeTimeDiagram") + ) # Add different time stepping options here log.debug("Use standard time stepping") @@ -2119,7 +2222,6 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si # Update dtSave to remove the initial timestep we just saved dtSave = updateSavingTimeStep(dtSaveOriginal, cfgGen, t) - # export particles properties for visulation if cfg["VISUALISATION"].getboolean("writePartToCSV"): particleTools.savePartToCsv( @@ -2138,7 +2240,9 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si # check if range-time diagram should be performed, if yes - initialize if cfg["VISUALISATION"].getboolean("createRangeTimeDiagram"): demRT = dtAna.setDemOrigin(dem) - mtiInfo, dtRangeTime, cfgRangeTime = dtAna.initializeRangeTime(dtAna, cfg, demRT, simHash) + mtiInfo, dtRangeTime, cfgRangeTime = dtAna.initializeRangeTime( + dtAna, cfg, demRT, simHash + ) # fetch initial time step too mtiInfo, dtRangeTime = dtAna.fetchRangeTimeInfo( cfgRangeTime, cfg, dtRangeTime, t, demRT["header"], fields, mtiInfo @@ -2187,7 +2291,9 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si rangeValue = mtiInfo["rangeList"][-1] else: rangeValue = "" - resultsDF = addMaxValuesToDF(resultsDF, fields, t, resTypes, rangeValue=rangeValue) + resultsDF = addMaxValuesToDF( + resultsDF, fields, t, resTypes, rangeValue=rangeValue + ) tCPU["nSave"] = nSave particles["t"] = t @@ -2204,13 +2310,18 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si # create range time diagram # determine avalanche front and flow characteristics in respective coodrinate system - if cfg["VISUALISATION"].getboolean("createRangeTimeDiagram") and t >= dtRangeTime[0]: + if ( + cfg["VISUALISATION"].getboolean("createRangeTimeDiagram") + and t >= dtRangeTime[0] + ): mtiInfo, dtRangeTime = dtAna.fetchRangeTimeInfo( cfgRangeTime, cfg, dtRangeTime, t, demRT["header"], fields, mtiInfo ) # create plots for tt diagram animation - if cfgRangeTime["PLOTS"].getboolean("animate") and cfg["VISUALISATION"].getboolean("TTdiagram"): + if cfgRangeTime["PLOTS"].getboolean("animate") and cfg[ + "VISUALISATION" + ].getboolean("TTdiagram"): TTResType = cfgRangeTime["GENERAL"]["rangeTimeResType"] dtAnaPlots.animationPlot( demRT, @@ -2226,7 +2337,9 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si if t >= (dtSave[0] - 1.0e-8): Tsave.append(t) log.debug("Saving results for time step t = %f s", t) - log.debug("MTot = %f kg, %s particles" % (particles["mTot"], particles["nPart"])) + log.debug( + "MTot = %f kg, %s particles" % (particles["mTot"], particles["nPart"]) + ) log.debug(("cpu time Force = %s s" % (tCPU["timeForce"] / nIter))) log.debug(("cpu time ForceSPH = %s s" % (tCPU["timeForceSPH"] / nIter))) log.debug(("cpu time Position = %s s" % (tCPU["timePos"] / nIter))) @@ -2235,7 +2348,9 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si # Result parameters to be exported if cfg["EXPORTS"].getboolean("exportData"): - exportFields(cfg, t, fields, dem, outDir, cuSimName, TSave="intermediate") + exportFields( + cfg, t, fields, dem, outDir, cuSimName, TSave="intermediate" + ) # export particles dictionaries of saving time steps if "particles" in resTypes: @@ -2360,7 +2475,9 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si dtAna.exportData(mtiInfo, cfgRangeTime, "com1DFA") # save resultsDF to file - resultsDFPath = pathlib.Path(cfgGen["avalancheDir"], "Outputs", "com1DFA", "resultsDF_%s.csv" % simHash) + resultsDFPath = pathlib.Path( + cfgGen["avalancheDir"], "Outputs", "com1DFA", "resultsDF_%s.csv" % simHash + ) resultsDF.to_csv(resultsDFPath) if cfg["EXPORTS"].getboolean("exportData"): @@ -2570,7 +2687,15 @@ def writeMBFile(infoDict, avaDir, logName): def computeEulerTimeStep( - cfg, particles, fields, zPartArray0, dem, tCPU, frictType, resistanceType, reportAreaInfo + cfg, + particles, + fields, + zPartArray0, + dem, + tCPU, + frictType, + resistanceType, + reportAreaInfo, ): """compute next time step using an euler forward scheme @@ -2616,13 +2741,17 @@ def computeEulerTimeStep( # update resistance area fields using thresholds fields = com1DFATools.updateResCoeffFields(fields, cfg) if debugPlot: - outCom1DFA.plotResFields(fields, cfg, particles["tPlot"], dem, particles["mTot"]) + outCom1DFA.plotResFields( + fields, cfg, particles["tPlot"], dem, particles["mTot"] + ) # get forces startTime = time.time() # loop version of the compute force log.debug("Compute Force C") - particles, force, fields = DFAfunC.computeForceC(cfg, particles, fields, dem, frictType, resistanceType) + particles, force, fields = DFAfunC.computeForceC( + cfg, particles, fields, dem, frictType, resistanceType + ) tCPUForce = time.time() - startTime tCPU["timeForce"] = tCPU["timeForce"] + tCPUForce @@ -2697,9 +2826,15 @@ def computeEulerTimeStep( # adapt DEM considering erosion and deposition # only adapt DEM when in one grid cell the changing height > threshold thresholdAdaptSfc = cfg.getfloat("thresholdAdaptSfc") - adaptStop = cfg.getboolean("adaptSfcStopped") and np.any(abs(fields["FTStop"]) > thresholdAdaptSfc) - adaptDet = cfg.getboolean("adaptSfcDetrainment") and np.any(abs(fields["FTDet"]) > thresholdAdaptSfc) - adaptEnt = cfg.getboolean("adaptSfcEntrainment") and np.any(abs(fields["FTEnt"]) > thresholdAdaptSfc) + adaptStop = cfg.getboolean("adaptSfcStopped") and np.any( + abs(fields["FTStop"]) > thresholdAdaptSfc + ) + adaptDet = cfg.getboolean("adaptSfcDetrainment") and np.any( + abs(fields["FTDet"]) > thresholdAdaptSfc + ) + adaptEnt = cfg.getboolean("adaptSfcEntrainment") and np.any( + abs(fields["FTEnt"]) > thresholdAdaptSfc + ) if particles["t"] > 0: if adaptStop or adaptDet or adaptEnt: dem, fields = adaptDEM(dem, fields, cfg) @@ -2728,19 +2863,28 @@ def releaseSecRelArea(cfg, particles, fields, dem, zPartArray0, reportAreaInfo): mask = (secRelRaster > 0) & (flowThicknessField > 0) if mask.any(): # create secondary release area particles - log.info("Initializing secondary release area feature %s" % secRelRasterName) + log.info( + "Initializing secondary release area feature %s" % secRelRasterName + ) if secondaryReleaseInfo["initializedFrom"] == "shapefile": secRelInfo = shpConv.extractFeature(secondaryReleaseInfo, count) secRelInfo["rasterData"] = secRelRaster - secRelParticles = initializeParticles(cfg, secRelInfo, dem, thName="secondaryRel") + secRelParticles = initializeParticles( + cfg, secRelInfo, dem, thName="secondaryRel" + ) else: secondaryReleaseInfo["rasterData"] = secRelRaster secRelParticles = initializeParticles( - cfg, secondaryReleaseInfo, dem, relThField=secRelRaster, thName="secondaryRel" + cfg, + secondaryReleaseInfo, + dem, + relThField=secRelRaster, + thName="secondaryRel", ) # release secondary release area by just appending the particles log.info( - "Releasing secondary release area %s at t = %.2f s" % (secRelRasterName, particles["t"]) + "Releasing secondary release area %s at t = %.2f s" + % (secRelRasterName, particles["t"]) ) particles = particleTools.mergeParticleDict(particles, secRelParticles) # save index of secRel feature @@ -2789,11 +2933,15 @@ def savePartToPickle(dictList, outDir, logName): if isinstance(dictList, list): for dict in dictList: - fi = open(outDir / ("particles_%s_%09.4f.pickle" % (logName, dict["t"])), "wb") + fi = open( + outDir / ("particles_%s_%09.4f.pickle" % (logName, dict["t"])), "wb" + ) pickle.dump(dict, fi) fi.close() else: - fi = open(outDir / ("particles_%s_%09.4f.pickle" % (logName, dictList["t"])), "wb") + fi = open( + outDir / ("particles_%s_%09.4f.pickle" % (logName, dictList["t"])), "wb" + ) pickle.dump(dictList, fi) fi.close() @@ -2837,7 +2985,9 @@ def trackParticles(cfgTrackPart, dem, particlesList): if particleProperties == "": particleProperties = ["x", "y", "z", "ux", "uy", "uz", "m", "h"] else: - particleProperties = set(["x", "y", "z", "ux", "uy", "uz", "m", "h"] + particleProperties.split("|")) + particleProperties = set( + ["x", "y", "z", "ux", "uy", "uz", "m", "h"] + particleProperties.split("|") + ) # read location of particle to be tracked radius = cfgTrackPart.getfloat("radius") centerList = cfgTrackPart["centerTrackPartPoint"] @@ -2846,8 +2996,12 @@ def trackParticles(cfgTrackPart, dem, particlesList): "x": np.array([float(centerList[0])]), "y": np.array([float(centerList[1])]), } - centerTrackPartPoint["x"] = centerTrackPartPoint["x"] - dem["originalHeader"]["xllcenter"] - centerTrackPartPoint["y"] = centerTrackPartPoint["y"] - dem["originalHeader"]["yllcenter"] + centerTrackPartPoint["x"] = ( + centerTrackPartPoint["x"] - dem["originalHeader"]["xllcenter"] + ) + centerTrackPartPoint["y"] = ( + centerTrackPartPoint["y"] - dem["originalHeader"]["yllcenter"] + ) # start by finding the particles to be tracked particles2Track, track = particleTools.findParticles2Track( @@ -2855,7 +3009,9 @@ def trackParticles(cfgTrackPart, dem, particlesList): ) if track: # find those same particles and their children in the particlesList - particlesList, nPartTracked = particleTools.getTrackedParticles(particlesList, particles2Track) + particlesList, nPartTracked = particleTools.getTrackedParticles( + particlesList, particles2Track + ) # extract the wanted properties for the tracked particles trackedPartProp = particleTools.getTrackedParticlesProperties( @@ -2925,7 +3081,9 @@ def readFields( else: name = "*_" + r + "*.*" FieldsNameList = list(inDir.glob(name)) - timeListTemp = [float(element.stem.split("_t")[-1]) for element in FieldsNameList] + timeListTemp = [ + float(element.stem.split("_t")[-1]) for element in FieldsNameList + ] FieldsNameList = [x for _, x in sorted(zip(timeListTemp, FieldsNameList))] count = 0 for fieldsName in FieldsNameList: @@ -3024,7 +3182,11 @@ def exportFields( outFile = outDirPeak / dataName useCompression = cfg["EXPORTS"].getboolean("useCompression") IOf.writeResultToRaster( - dem["originalHeader"], resField, outFile, flip=True, useCompression=useCompression + dem["originalHeader"], + resField, + outFile, + flip=True, + useCompression=useCompression, ) log.debug( "Results parameter: %s has been exported to Outputs/peakFiles for time step: %.2f " @@ -3043,11 +3205,69 @@ def exportFields( outFile = outDirPeakAll / dataName useCompression = cfg["EXPORTS"].getboolean("useCompression") IOf.writeResultToRaster( - dem["originalHeader"], resField, outFile, flip=True, useCompression=useCompression + dem["originalHeader"], + resField, + outFile, + flip=True, + useCompression=useCompression, ) -def prepareVarSimDict(standardCfg, inputSimFiles, variationDict, simNameExisting="", module=com1DFA): +def getModuleNames(module): + """Extract module name and short form by checking the call stack for wrapper modules. + + This function checks if we're being called from a wrapper module (e.g., com5SnowSlide, + com6RockAvalanche) by inspecting the call stack. If found, it uses the wrapper's name. + Otherwise, it falls back to the passed module parameter. + + Parameters + ---------- + module: module + The module object to extract names from (fallback if no wrapper is found) + + Returns + ------- + tuple + (modName, modNameShort) where modName is the full name (e.g., "com1DFA") + and modNameShort is the short form (e.g., "com1") + """ + # First check if we're being called from a wrapper module (com5SnowSlide, com6RockAvalanche, etc.) + callerModName = None + for frameInfo in inspect.stack(): + frameModule = frameInfo.frame.f_globals.get("__name__", "") + # Look for modules matching comN{Name}.comN{Name} pattern (e.g., avaframe.com6RockAvalanche.com6RockAvalanche) + # but not com1DFA.com1DFA itself, unless nothing else found + if frameModule.startswith("avaframe.com"): + # Extract the last component (the actual module name) + moduleName = frameModule.split(".")[-1] + # Check if it matches the comN pattern (starts with "com" followed by a digit) + if re.match(r"^com\d+", moduleName) and not frameModule.endswith( + "com1DFA.com1DFA" + ): + callerModName = moduleName + break + + # Use caller module name if found, otherwise fall back to the passed module parameter + if callerModName: + modName = callerModName + else: + modName = module.__name__.split(".")[-1] # Full name: com1DFA, com8MoTPSA, etc. + + # Special case: com7Regional should be treated as com1DFA + if modName == "com7Regional": + modName = "com1DFA" + + shortModMatch = re.match(r"^(com\d+)", modName) + modNameShort = ( + shortModMatch.group(1) if shortModMatch else modName + ) # Short name: com1, com8, etc. + + return modName, modNameShort + + +def prepareVarSimDict( + standardCfg, inputSimFiles, variationDict, simNameExisting="", module=com1DFA +): """Prepare a dictionary with simulations that shall be run with varying parameters following the variation dict Parameters @@ -3072,9 +3292,7 @@ def prepareVarSimDict(standardCfg, inputSimFiles, variationDict, simNameExisting """ # extract the full module name and short form (e.g., "com1DFA" -> "com1") - modName = module.__name__.split(".")[-1] # Full name: com1DFA, com8MoTPSA, etc. - shortModMatch = re.match(r"^(com\d+)", modName) - modNameShort = shortModMatch.group(1) if shortModMatch else modName # Short name: com1, com8, etc. + modName, modNameShort = getModuleNames(module) # get list of simulation types that are desired if "simTypeList" in variationDict: @@ -3088,7 +3306,9 @@ def prepareVarSimDict(standardCfg, inputSimFiles, variationDict, simNameExisting # set simTypeList (that has been checked if available) as parameter in variationDict variationDict["simTypeList"] = simTypeList # create a dataFrame with all possible combinations of the variationDict values - variationDF = pd.DataFrame(product(*variationDict.values()), columns=variationDict.keys()) + variationDF = pd.DataFrame( + product(*variationDict.values()), columns=variationDict.keys() + ) # generate a dictionary of full simulation info for all simulations to be performed # simulation info must contain: simName, releaseScenario, relFile, configuration as dictionary @@ -3159,12 +3379,16 @@ def prepareVarSimDict(standardCfg, inputSimFiles, variationDict, simNameExisting # check if DEM in Inputs has desired mesh size pathToDem = dP.checkRasterMeshSize(cfgSim, inputSimFiles["demFile"], "DEM") cfgSim["INPUT"]["DEM"] = pathToDem - dem = IOf.readRaster(pathlib.Path(cfgSim["GENERAL"]["avalancheDir"], "Inputs", pathToDem)) + dem = IOf.readRaster( + pathlib.Path(cfgSim["GENERAL"]["avalancheDir"], "Inputs", pathToDem) + ) # check extent of inputs read from raster have correct extent and cellSize # first release area if inputSimFiles["entResInfo"]["relThFileType"] in [".asc", ".tif"]: - pathToRel, pathToRelFull, remeshedRel = dP.checkExtentAndCellSize(cfgSim, relThFile, dem, "rel") + pathToRel, pathToRelFull, remeshedRel = dP.checkExtentAndCellSize( + cfgSim, relThFile, dem, "rel" + ) cfgSim["INPUT"]["relThFile"] = pathToRel inputSimFiles["entResInfo"]["relRemeshed"] = remeshedRel @@ -3179,10 +3403,12 @@ def prepareVarSimDict(standardCfg, inputSimFiles, variationDict, simNameExisting cfgSim["INPUT"]["secondaryRelThFile"] = pathToSecRel inputSimFiles["entResInfo"]["secondaryRelRemeshed"] = remeshedSecRel - if modName == "com1DFA": + if modName in ["com1DFA", "com5SnowSlide", "com6RockAvalanche"]: # check if spatialVoellmy is chosen that friction fields have correct extent if cfgSim["GENERAL"]["frictModel"].lower() == "spatialvoellmy": - dem = IOf.readRaster(pathlib.Path(cfgSim["GENERAL"]["avalancheDir"], "Inputs", pathToDem)) + dem = IOf.readRaster( + pathlib.Path(cfgSim["GENERAL"]["avalancheDir"], "Inputs", pathToDem) + ) for fric in ["mu", "xi"]: if inputSimFiles["entResInfo"][fric] == "Yes": pathToFric, _, remeshedFric = dP.checkExtentAndCellSize( @@ -3199,12 +3425,19 @@ def prepareVarSimDict(standardCfg, inputSimFiles, variationDict, simNameExisting raise FileNotFoundError(message) # add info about dam file path to the cfg - if cfgSim["GENERAL"]["dam"] == "True" and inputSimFiles["damFile"] is not None: - cfgSim["INPUT"]["DAM"] = str(pathlib.Path("DAM", inputSimFiles["damFile"].name)) + if ( + cfgSim["GENERAL"]["dam"] == "True" + and inputSimFiles["damFile"] is not None + ): + cfgSim["INPUT"]["DAM"] = str( + pathlib.Path("DAM", inputSimFiles["damFile"].name) + ) # if tauC, mu, k used in com8 and com9 check extent of cellSize if modName in ["com8MoTPSA", "com9MoTVoellmy"]: - dem = IOf.readRaster(pathlib.Path(cfgSim["GENERAL"]["avalancheDir"], "Inputs", pathToDem)) + dem = IOf.readRaster( + pathlib.Path(cfgSim["GENERAL"]["avalancheDir"], "Inputs", pathToDem) + ) if inputSimFiles["entResInfo"]["tauC"] == "Yes": pathToFric, pathToFricFull, remeshedFric = dP.checkExtentAndCellSize( @@ -3224,43 +3457,60 @@ def prepareVarSimDict(standardCfg, inputSimFiles, variationDict, simNameExisting if cfgSim["Physical_parameters"]["Parameters"] == "auto": for fric in ["mu", "k"]: if inputSimFiles["entResInfo"][fric] == "Yes": - pathToFric, pathToFricFull, remeshedFric = dP.checkExtentAndCellSize( - cfgSim, inputSimFiles["%sFile" % fric], dem, fric + pathToFric, pathToFricFull, remeshedFric = ( + dP.checkExtentAndCellSize( + cfgSim, inputSimFiles["%sFile" % fric], dem, fric + ) ) cfgSim["INPUT"]["%sFile" % fric] = pathToFric inputSimFiles["entResInfo"]["%sRemeshed" % fric] = remeshedFric # check if forest effects = auto is chosen that forest parameter fields have correct extent - if "res" in row._asdict()["simTypeList"] and inputSimFiles["resFile"] is not None: + if ( + "res" in row._asdict()["simTypeList"] + and inputSimFiles["resFile"] is not None + ): if ( cfgSim["FOREST_EFFECTS"]["Forest effects"] == "auto" and inputSimFiles["entResInfo"]["bhd"] == "Yes" ): - pathToForest, pathToForestFull, remeshedForest = dP.checkExtentAndCellSize( - cfgSim, inputSimFiles["%sFile" % "bhd"], dem, "bhd" + pathToForest, pathToForestFull, remeshedForest = ( + dP.checkExtentAndCellSize( + cfgSim, inputSimFiles["%sFile" % "bhd"], dem, "bhd" + ) ) cfgSim["INPUT"]["%sFile" % "bhd"] = pathToForest inputSimFiles["entResInfo"]["%sRemeshed" % "bhd"] = remeshedForest # add info about entrainment file path to the cfg - if "ent" in row._asdict()["simTypeList"] and inputSimFiles["entFile"] is not None: + if ( + "ent" in row._asdict()["simTypeList"] + and inputSimFiles["entFile"] is not None + ): if inputSimFiles["entResInfo"]["entThFileType"] != ".shp": pathToEnt, pathToEntFull, remeshedEnt = dP.checkExtentAndCellSize( cfgSim, inputSimFiles["entThFile"], dem, "ent" ) cfgSim["INPUT"]["entThFile"] = pathToEnt inputSimFiles["entResInfo"]["entRemeshed"] = remeshedEnt - cfgSim["INPUT"]["entrainmentScenario"] = str(pathlib.Path("ENT", inputSimFiles["entFile"].name)) + cfgSim["INPUT"]["entrainmentScenario"] = str( + pathlib.Path("ENT", inputSimFiles["entFile"].name) + ) # add info about resistance file path to the cfg - if "res" in row._asdict()["simTypeList"] and inputSimFiles["resFile"] is not None: + if ( + "res" in row._asdict()["simTypeList"] + and inputSimFiles["resFile"] is not None + ): if inputSimFiles["entResInfo"]["resFileType"] != ".shp": pathToRes, pathToResFull, remeshedRes = dP.checkExtentAndCellSize( cfgSim, inputSimFiles["resFile"], dem, "res" ) cfgSim["INPUT"]["resFile"] = pathToRes inputSimFiles["entResInfo"]["resRemeshed"] = remeshedRes - cfgSim["INPUT"]["resistanceScenario"] = str(pathlib.Path("RES", inputSimFiles["resFile"].name)) + cfgSim["INPUT"]["resistanceScenario"] = str( + pathlib.Path("RES", inputSimFiles["resFile"].name) + ) # add thickness values if read from shp and not varied cfgSim = dP.appendThicknessToCfg(cfgSim) @@ -3272,12 +3522,16 @@ def prepareVarSimDict(standardCfg, inputSimFiles, variationDict, simNameExisting frictIndi = None volIndi = None - pathToDemFull = pathlib.Path(cfgSim["GENERAL"]["avalancheDir"], "Inputs", pathToDem) + pathToDemFull = pathlib.Path( + cfgSim["GENERAL"]["avalancheDir"], "Inputs", pathToDem + ) - if modName == "com1DFA": + if modName in ["com1DFA", "com5SnowSlide", "com6RockAvalanche"]: # if frictModel is samosATAuto compute release vol if cfgSim["GENERAL"]["frictModel"].lower() == "samosatauto": - relVolume = fetchRelVolume(rel, cfgSim, pathToDemFull, inputSimFiles["secondaryRelFile"]) + relVolume = fetchRelVolume( + rel, cfgSim, pathToDemFull, inputSimFiles["secondaryRelFile"] + ) else: relVolume = "" @@ -3285,13 +3539,17 @@ def prepareVarSimDict(standardCfg, inputSimFiles, variationDict, simNameExisting cfgSim = checkCfg.checkCellSizeKernelRadius(cfgSim) # only keep friction model parameters that are used - cfgSim = checkCfg.checkCfgFrictionModel(cfgSim, inputSimFiles, relVolume=relVolume) + cfgSim = checkCfg.checkCfgFrictionModel( + cfgSim, inputSimFiles, relVolume=relVolume + ) # set frictModelIndicator, this needs to happen AFTER checkCfgFrictModel frictIndi = com1DFATools.setFrictTypeIndicator(cfgSim) - elif modName == "com8MoTPSA": - relVolume = fetchRelVolume(rel, cfgSim, pathToDemFull, inputSimFiles["secondaryRelFile"]) + elif modName in ["com8MoTPSA", "com9MoTVoellmy"]: + relVolume = fetchRelVolume( + rel, cfgSim, pathToDemFull, inputSimFiles["secondaryRelFile"] + ) # set Volume class identificator volIndi = setVolumeIndicator(cfgSim, relVolume) @@ -3325,7 +3583,7 @@ def prepareVarSimDict(standardCfg, inputSimFiles, variationDict, simNameExisting "relFile": rel, "cfgSim": cfgSimObject, } - if modName == "com1DFA": + if modName in ["com1DFA", "com5SnowSlide", "com6RockAvalanche"]: # write configuration file, dont need to write cfg file for com8MoTPSA (does this later when creating rcf file) cfgUtils.writeCfgFile( cfgSimObject["GENERAL"]["avalancheDir"], @@ -3372,7 +3630,9 @@ def getSimTypeList(standardCfg, simTypeList, inputSimFiles): validSimTypes = validSimTypesStr.split("|") validArray = [True if item in validSimTypes else False for item in simTypeList] if False in validArray: - message = "A non-valid entry found in simType, valid Types are %s" % validSimTypesStr + message = ( + "A non-valid entry found in simType, valid Types are %s" % validSimTypesStr + ) log.error(message) raise AssertionError(message) @@ -3406,12 +3666,17 @@ def getSimTypeList(standardCfg, simTypeList, inputSimFiles): if entResInfo["flagSecondaryRelease"] == "No": standardCfg["GENERAL"]["secRelArea"] = "False" else: - log.info("Using the secondary release area file: %s" % inputSimFiles["secondaryRelFile"]) + log.info( + "Using the secondary release area file: %s" + % inputSimFiles["secondaryRelFile"] + ) return standardCfg, simTypeList -def runOrLoadCom1DFA(avalancheDir, cfgMain, runDFAModule=True, cfgFile="", deleteOutput=True): +def runOrLoadCom1DFA( + avalancheDir, cfgMain, runDFAModule=True, cfgFile="", deleteOutput=True +): """Run or load DFA results depending on runDFAModule=True or False Parameters @@ -3449,11 +3714,16 @@ def runOrLoadCom1DFA(avalancheDir, cfgMain, runDFAModule=True, cfgFile="", delet # load DFA results simDF, _ = cfgUtils.readAllConfigurationInfo(avalancheDir) if simDF is None: - message = "Did not find any com1DFA simulations in %s/Outputs/com1DFA/" % avalancheDir + message = ( + "Did not find any com1DFA simulations in %s/Outputs/com1DFA/" + % avalancheDir + ) log.error(message) raise FileExistsError(message) - dataDF, resTypeList = fU.makeSimFromResDF(avalancheDir, "com1DFA", inputDir="", simName="") + dataDF, resTypeList = fU.makeSimFromResDF( + avalancheDir, "com1DFA", inputDir="", simName="" + ) simDF = simDF.reset_index().merge(dataDF, on="simName").set_index("index") return dem, simDF, resTypeList @@ -3496,7 +3766,9 @@ def fetchRelVolume(releaseFile, cfg, pathToDem, secondaryReleaseFile, radius=0.0 demVol = DFAtls.getAreaMesh(demVol, methodMeshNormal) # compute volume of release area - relVolume = initializeRelVol(cfg, demVol, releaseFile, radius, releaseType="primary") + relVolume = initializeRelVol( + cfg, demVol, releaseFile, radius, releaseType="primary" + ) if cfg["GENERAL"]["secRelArea"] == "True": # compute volume of secondary release area @@ -3513,7 +3785,8 @@ def fetchRelVolume(releaseFile, cfg, pathToDem, secondaryReleaseFile, radius=0.0 relVolume = relVolume + secondaryRelVolume else: log.info( - "%.2f meter grid based release volume is: %.2f m3" % (demVol["header"]["cellsize"], relVolume) + "%.2f meter grid based release volume is: %.2f m3" + % (demVol["header"]["cellsize"], relVolume) ) return relVolume @@ -3549,7 +3822,9 @@ def initializeRelVol(cfg, demVol, releaseFile, radius, releaseType="primary"): # check if release thickness provided as field or constant value if cfg["INPUT"][(typeTh + "File")] != "": # read relThField from file - relThFilePath = pathlib.Path(cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"][typeTh + "File"]) + relThFilePath = pathlib.Path( + cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"][typeTh + "File"] + ) relThFieldFull = IOf.readRaster(relThFilePath) relThField = relThFieldFull["rasterData"] @@ -3564,7 +3839,9 @@ def initializeRelVol(cfg, demVol, releaseFile, radius, releaseType="primary"): releaseLine = shpConv.readLine(releaseFile, "release1", demVol) # check if release features overlap between features thresholdPointInPoly = cfg["GENERAL"].getfloat("thresholdPointInPoly") - geoTrans.prepareArea(releaseLine, demVol, thresholdPointInPoly, combine=True, checkOverlap=True) + geoTrans.prepareArea( + releaseLine, demVol, thresholdPointInPoly, combine=True, checkOverlap=True + ) releaseLine["type"] = "Release" # set thickness values on releaseLine releaseLine = setThickness(cfg, releaseLine, typeTh) @@ -3656,7 +3933,9 @@ def adaptDEM(dem, fields, cfg): sfcChange = np.zeros_like(FTDet) ZDEMadapt = ZDEM - _, _, NzNormed = DFAtls.normalize(dem["Nx"].copy(), dem["Ny"].copy(), dem["Nz"].copy()) + _, _, NzNormed = DFAtls.normalize( + dem["Nx"].copy(), dem["Ny"].copy(), dem["Nz"].copy() + ) if cfg.getboolean("adaptSfcStopped"): # compute thickness to depth diff --git a/avaframe/com1DFA/com1DFATools.py b/avaframe/com1DFA/com1DFATools.py index a1b52b2b5..a1bdc0c8e 100644 --- a/avaframe/com1DFA/com1DFATools.py +++ b/avaframe/com1DFA/com1DFATools.py @@ -53,12 +53,17 @@ def getPartInitMethod(cfg, csz, relThForPart): # derive mass per particle to define number of particles per cell: if massPerParticleDeterminationMethod == "MPPDIR": massPerPart = cfg.getfloat("massPerPart") - log.debug("Number of particles defined by: mass per particle %s" % cfg["massPerPart"]) + log.debug( + "Number of particles defined by: mass per particle %s" % cfg["massPerPart"] + ) elif massPerParticleDeterminationMethod == "MPPDH": deltaTh = cfg.getfloat("deltaTh") ds = min(csz, cfg.getfloat("sphKernelRadius")) massPerPart = rho * ds * ds * deltaTh - log.debug("Number of particles defined by: release thickness per particle: %s" % cfg["deltaTh"]) + log.debug( + "Number of particles defined by: release thickness per particle: %s" + % cfg["deltaTh"] + ) log.debug("mass per particle is %.2f" % massPerPart) elif massPerParticleDeterminationMethod == "MPPKR": sphKernelRadius = cfg.getfloat("sphKernelRadius") @@ -140,14 +145,25 @@ def compareSimCfgToDefaultCfgCom1DFA(simCfg, module=com1DFA): # If entrainment is requested, and it is set in shapefile, check if it contains the default entrainment thickness # in ALL features of the shapefile - if simCfg["GENERAL"]["simTypeList"] == "ent" and simCfg["GENERAL"]["entThFromFile"] == "True": + if ( + simCfg["GENERAL"]["simTypeList"] == "ent" + and simCfg["GENERAL"]["entThFromFile"] == "True" + ): defaultEntTh = defCfg["GENERAL"]["entThIfMissingInShp"] # this try handles raster instead of shapefile try: - if not all([x == defaultEntTh for x in simCfg["INPUT"]["entThThickness"].split("|")]): + if not all( + [ + x == defaultEntTh + for x in simCfg["INPUT"]["entThThickness"].split("|") + ] + ): defaultIdentifierString = "C" - log.info("Non-default entrainment value(s) used: %s" % simCfg["INPUT"]["entThThickness"]) + log.info( + "Non-default entrainment value(s) used: %s" + % simCfg["INPUT"]["entThThickness"] + ) except KeyError: defaultIdentifierString = "D" @@ -161,16 +177,21 @@ def compareSimCfgToDefaultCfgCom1DFA(simCfg, module=com1DFA): # sphKernelSize is set during runtime, make sure it is not reported # as changed if default is set to meshCellSize - if modName == "com1DFA": + if modName in ["com1DFA", "com5SnowSlide", "com6RockAvalanche"]: if defCfg["GENERAL"]["sphKernelRadius"] == "meshCellSize": - if simCfg["GENERAL"]["sphKernelRadius"] == simCfg["GENERAL"]["meshCellSize"]: + if ( + simCfg["GENERAL"]["sphKernelRadius"] + == simCfg["GENERAL"]["meshCellSize"] + ): excludeItems.append("root['GENERAL']['sphKernelRadius']") # do the diff and analyse # this is the deepdiff > 8.0 version # TODO: remove this again in the future when deepdiff > 8.0 is wider established try: - diff = DeepDiff(defCfg, simCfg, exclude_paths=excludeItems, threshold_to_diff_deeper=0) + diff = DeepDiff( + defCfg, simCfg, exclude_paths=excludeItems, threshold_to_diff_deeper=0 + ) # for older deepdiff versions which don't know threshold_to_diff_deeper except ValueError: diff = DeepDiff(defCfg, simCfg, exclude_paths=excludeItems) @@ -278,7 +299,9 @@ def createSimDictFromCfgs(cfgMain, cfgPath, module=com1DFA): # fetch input data and create work and output directories # TODO: so for now remeshed dir is cleaned before a run - inputSimFilesAll, outDir, simDFExisting, simNameExisting = initializeInputs(avalancheDir, True, module) + inputSimFilesAll, outDir, simDFExisting, simNameExisting = initializeInputs( + avalancheDir, True, module + ) # save dem file path as it is deleted from input sim files dict once it is set in the config demFile = inputSimFilesAll["demFile"] @@ -287,7 +310,9 @@ def createSimDictFromCfgs(cfgMain, cfgPath, module=com1DFA): cfgDir = pathlib.Path(cfgPath) cfgFilesAll = list(cfgDir.glob("*.ini")) if len(cfgFilesAll) == 0: - message = "No configuration file found to create simulation runs in: %s" % str(cfgDir) + message = "No configuration file found to create simulation runs in: %s" % str( + cfgDir + ) log.error(message) raise FileNotFoundError(message) else: @@ -299,12 +324,16 @@ def createSimDictFromCfgs(cfgMain, cfgPath, module=com1DFA): # loop over all cfgFiles and create simDict for index, cfgFile in enumerate(cfgFilesAll): # read configuration - cfgFromFile = cfgUtils.getModuleConfig(module, fileOverride=cfgFile, toPrint=False) + cfgFromFile = cfgUtils.getModuleConfig( + module, fileOverride=cfgFile, toPrint=False + ) # create dictionary with one key for each simulation that shall be performed # NOTE: sims that are added don't need to be added to the simNameExisting list as # if new identical sims are added the simDict entry is just updated and not a duplicate one added - simDict = dP.createSimDict(avalancheDir, module, cfgFromFile, inputSimFilesAll, simNameExisting) + simDict = dP.createSimDict( + avalancheDir, module, cfgFromFile, inputSimFilesAll, simNameExisting + ) simDictAll.update(simDict) # reset dem file @@ -428,10 +457,18 @@ def updateResCoeffFields(fields, cfg): thMax = cfg.getfloat("forestThMax") # create new rasters using FV, FT and thresholds to mask - detRasterInt = np.where(((fields["FV"] <= vMin) | (fields["FT"] <= thMin)), detOrig, 0.0) - detRaster = np.where(((fields["FV"] > vMax) | (fields["FT"] > thMax)), 0, detRasterInt) - cResRasterInt = np.where(((fields["FV"] > vMin) & (fields["FT"] > thMin)), cResOrig, 0.0) - cResRaster = np.where(((fields["FV"] > vMax) | (fields["FT"] > thMax)), 0, cResRasterInt) + detRasterInt = np.where( + ((fields["FV"] <= vMin) | (fields["FT"] <= thMin)), detOrig, 0.0 + ) + detRaster = np.where( + ((fields["FV"] > vMax) | (fields["FT"] > thMax)), 0, detRasterInt + ) + cResRasterInt = np.where( + ((fields["FV"] > vMin) & (fields["FT"] > thMin)), cResOrig, 0.0 + ) + cResRaster = np.where( + ((fields["FV"] > vMax) | (fields["FT"] > thMax)), 0, cResRasterInt + ) if len(np.where((detRaster > 0) & (cResRaster > 0))[0]) > 0: message = "Detrainment and increased friction within same cell!" @@ -441,7 +478,9 @@ def updateResCoeffFields(fields, cfg): # if max thresholds are exceeded: forest destroyed remove forest lTh = len(np.where((fields["FV"] > vMax) | (fields["FT"] > thMax))[0]) if lTh > 0: - cResOrig = np.where(((fields["FV"] > vMax) | (fields["FT"] > thMax)), 0, cResOrig) + cResOrig = np.where( + ((fields["FV"] > vMax) | (fields["FT"] > thMax)), 0, cResOrig + ) detOrig = np.where(((fields["FV"] > vMax) | (fields["FT"] > thMax)), 0, detOrig) fields["cResRasterOrig"] = cResOrig fields["detRasterOrig"] = detOrig @@ -502,6 +541,8 @@ def updateTimeField(fields, timeStep): FT = fields["FT"] # set time step to previously not affected cells - fields["timeInfo"] = np.where(((fields["timeInfo"] == 0) & (FT != 0)), timeStep, fields["timeInfo"]) + fields["timeInfo"] = np.where( + ((fields["timeInfo"] == 0) & (FT != 0)), timeStep, fields["timeInfo"] + ) return fields diff --git a/avaframe/out1Peak/outPlotAllPeak.py b/avaframe/out1Peak/outPlotAllPeak.py index f3e392726..d800e0dbd 100644 --- a/avaframe/out1Peak/outPlotAllPeak.py +++ b/avaframe/out1Peak/outPlotAllPeak.py @@ -53,11 +53,16 @@ def plotAllPeakFields(avaDir, cfgFLAGS, modName, demData=""): inputDir = avaDir / "Outputs" / modName / "peakFiles" inDir = avaDir / "Inputs" peakFilesDF = fU.makeSimDF(inputDir, avaDir=avaDir) - if modName in ["com1DFA", "com9MoTVoellmy"] and demData == "": + if ( + modName in ["com1DFA", "com5SnowSlide", "com6RockAvalanche", "com9MoTVoellmy"] + and demData == "" + ): configurationDF = cfgUtils.createConfigurationInfo(avaDir, comModule=modName) configurationDF = configurationDF.rename(columns={"resType": "resTypeList"}) peakFilesDF = ( - peakFilesDF.reset_index().merge(configurationDF, on=["simName", "modelType"]).set_index("index") + peakFilesDF.reset_index() + .merge(configurationDF, on=["simName", "modelType"]) + .set_index("index") ) if demData == "": @@ -104,7 +109,12 @@ def plotAllPeakFields(avaDir, cfgFLAGS, modName, demData=""): # this enables to append simulations to an already existing output without regenerating all plots if not plotName.is_file(): # for comModules load DEM used for computation - if demData == "" and modName in ["com1DFA", "com9MoTVoellmy"]: + if demData == "" and modName in [ + "com1DFA", + "com5SnowSlide", + "com6RockAvalanche", + "com9MoTVoellmy", + ]: demFile = inDir / row["DEM"] demDataRaster = IOf.readRaster(demFile, noDataToNan=True) demDataField = demDataRaster["rasterData"] @@ -161,7 +171,9 @@ def plotAllPeakFields(avaDir, cfgFLAGS, modName, demData=""): elif fileSType in [".asc", ".tif"]: sarea = IOf.readRaster(sFile, noDataToNan=True) xGrid, yGrid, _, _ = gT.makeCoordGridFromHeader(sarea["header"]) - contourDictXY = pU.fetchContourCoords(xGrid, yGrid, sarea["rasterData"], 0.001) + contourDictXY = pU.fetchContourCoords( + xGrid, yGrid, sarea["rasterData"], 0.001 + ) for key in contourDictXY: ax.plot( contourDictXY[key]["x"], @@ -206,7 +218,9 @@ def plotAllPeakFields(avaDir, cfgFLAGS, modName, demData=""): return plotDict -def addConstrainedDataField(fileName, resType, demField, ax, cellSize, alpha=1.0, oneColor=""): +def addConstrainedDataField( + fileName, resType, demField, ax, cellSize, alpha=1.0, oneColor="" +): """find fileName data, constrain data and demField to where there is data, create colormap, define extent, add hillshade contours, add to axes and add colorbar @@ -391,7 +405,9 @@ def plotAllFields(avaDir, inputDir, outDir, unit="", constrainData=True): aspect=nx / ny, ) else: - extentCellCenters, extentCellCorners = pU.createExtentMinMax(data, header, originLLCenter=True) + extentCellCenters, extentCellCorners = pU.createExtentMinMax( + data, header, originLLCenter=True + ) im1 = ax.imshow( data, cmap=cmap, diff --git a/avaframe/runScripts/runPlotAreaRefDiffs.py b/avaframe/runScripts/runPlotAreaRefDiffs.py index 752dc5092..85f842fc2 100644 --- a/avaframe/runScripts/runPlotAreaRefDiffs.py +++ b/avaframe/runScripts/runPlotAreaRefDiffs.py @@ -1,6 +1,7 @@ """ - Run script for plotting a comparison of simulation result to reference polygon +Run script for plotting a comparison of simulation result to reference polygon """ + # Load modules # importing general python modules import pathlib @@ -21,13 +22,13 @@ ################USER Input############# resType = "ppr" thresholdValueSimulation = 0.9 -modName = 'com1DFA' +modName = "com1DFA" ############################################################ # Load avalanche directory from general configuration file cfgMain = cfgUtils.getGeneralConfig() avalancheDir = cfgMain["MAIN"]["avalancheDir"] -outDir = pathlib.Path(avalancheDir, 'Outputs', 'out1Peak') +outDir = pathlib.Path(avalancheDir, "Outputs", "out1Peak") fU.makeADir(outDir) # Start logging @@ -45,16 +46,18 @@ dem = gT.getNormalMesh(dem, num=1) # get real Area dem = DFAtls.getAreaMesh(dem, 1) -dem['originalHeader'] = dem['header'] +dem["originalHeader"] = dem["header"] # read reference data set -inDir = pathlib.Path(avalancheDir, 'Inputs') +inDir = pathlib.Path(avalancheDir, "Inputs") referenceFile, availableFile, _ = gI.getAndCheckInputFiles( inDir, "REFDATA", "POLY", fileExt="shp", fileSuffix="POLY" ) # convert polygon to raster with value 1 inside polygon and 0 outside the polygon referenceLine = shpConv.readLine(referenceFile, "reference", dem) -referenceLine= gT.prepareArea(referenceLine, dem, np.sqrt(2),combine=True, checkOverlap=False) +referenceLine = gT.prepareArea( + referenceLine, dem, np.sqrt(2), combine=True, checkOverlap=False +) # if available zoom into area provided by crop shp file in Inputs/CROPSHAPE cropFile, cropInfo, _ = gI.getAndCheckInputFiles( @@ -62,9 +65,11 @@ ) if cropInfo: cropLine = shpConv.readLine(cropFile, "cropFile", dem) - cropLine = gT.prepareArea(cropLine, dem, np.sqrt(2), combine=True, checkOverlap=False) + cropLine = gT.prepareArea( + cropLine, dem, np.sqrt(2), combine=True, checkOverlap=False + ) -if modName == 'com1DFA': +if modName in ["com1DFA", "com5SnowSlide", "com6RockAvalanche"]: # load dataFrame for all configurations of simulations in avalancheDir simDF = cfgUtils.createConfigurationInfo(avalancheDir) # create data frame that lists all available simulations and path to their result type result files @@ -80,35 +85,60 @@ # compute referenceMask and simulationMask and true positive, false positive and false neg. arrays # here thresholdValueReference is set to 0.9 as when converting the polygon to a raster, # values inside polygon are set to 1 and outside to 0 - refMask, compMask, indicatorDict = oPD.computeAreaDiff(referenceLine['rasterData'], - simData['rasterData'], - 0.9, - thresholdValueSimulation, - dem, - cropToArea=cropLine['rasterData']) + refMask, compMask, indicatorDict = oPD.computeAreaDiff( + referenceLine["rasterData"], + simData["rasterData"], + 0.9, + thresholdValueSimulation, + dem, + cropToArea=cropLine["rasterData"], + ) # plot differences - oPD.plotAreaDiff(referenceLine['rasterData'], refMask, simData['rasterData'], compMask, resType, simData['header'], - thresholdValueSimulation, outDir, - indicatorDict, row['simName'], cropFile=cropFile) + oPD.plotAreaDiff( + referenceLine["rasterData"], + refMask, + simData["rasterData"], + compMask, + resType, + simData["header"], + thresholdValueSimulation, + outDir, + indicatorDict, + row["simName"], + cropFile=cropFile, + ) else: # load all result files - resultDir = pathlib.Path(avalancheDir, 'Outputs', modName, 'peakFiles') - peakFilesList = list(resultDir.glob("*_%s.tif" % resType)) + list(resultDir.glob("*_%s.asc" % resType)) + resultDir = pathlib.Path(avalancheDir, "Outputs", modName, "peakFiles") + peakFilesList = list(resultDir.glob("*_%s.tif" % resType)) + list( + resultDir.glob("*_%s.asc" % resType) + ) for pF in peakFilesList: simData = IOf.readRaster(pF) simName = pF.stem # compute referenceMask and simulationMask and true positive, false positive and false neg. arrays - refMask, compMask, indicatorDict = oPD.computeAreaDiff(referenceLine['rasterData'], - simData['rasterData'], - 0.9, - thresholdValueSimulation, - dem, - cropToArea=cropLine['rasterData']) + refMask, compMask, indicatorDict = oPD.computeAreaDiff( + referenceLine["rasterData"], + simData["rasterData"], + 0.9, + thresholdValueSimulation, + dem, + cropToArea=cropLine["rasterData"], + ) # plot differences - oPD.plotAreaDiff(referenceLine['rasterData'], refMask, simData['rasterData'], compMask, resType, - simData['header'], - thresholdValueSimulation, outDir, - indicatorDict, simName, cropFile=cropFile) + oPD.plotAreaDiff( + referenceLine["rasterData"], + refMask, + simData["rasterData"], + compMask, + resType, + simData["header"], + thresholdValueSimulation, + outDir, + indicatorDict, + simName, + cropFile=cropFile, + ) diff --git a/avaframe/tests/test_com1DFA.py b/avaframe/tests/test_com1DFA.py index 3bfd76dab..0b32a7e56 100644 --- a/avaframe/tests/test_com1DFA.py +++ b/avaframe/tests/test_com1DFA.py @@ -29,7 +29,9 @@ def test_prepareInputData(tmp_path): """test preparing input data""" # setup requuired input data - inputSimFiles = {"entResInfo": {"flagEnt": "Yes", "flagRes": "No", "flagSecondaryRelease": "No"}} + inputSimFiles = { + "entResInfo": {"flagEnt": "Yes", "flagRes": "No", "flagSecondaryRelease": "No"} + } dirName = pathlib.Path(__file__).parents[0] avaDir = dirName / ".." / "data" / "avaAlr" relFile = avaDir / "Inputs" / "REL" / "relAlr.shp" @@ -72,7 +74,9 @@ def test_prepareInputData(tmp_path): assert inputSimLines["entLine"]["initializedFrom"] == "shapefile" # call function to be tested - inputSimFiles = {"entResInfo": {"flagEnt": "No", "flagRes": "Yes", "flagSecondaryRelease": "No"}} + inputSimFiles = { + "entResInfo": {"flagEnt": "No", "flagRes": "Yes", "flagSecondaryRelease": "No"} + } dirName = pathlib.Path(__file__).parents[0] avaDir = dirName / ".." / "data" / "avaParabola" relFile = avaDir / "Inputs" / "REL" / "release1PF.shp" @@ -102,7 +106,9 @@ def test_prepareInputData(tmp_path): assert inputSimLines["resLine"]["initializedFrom"] == "shapefile" # call function to be tested - inputSimFiles = {"entResInfo": {"flagEnt": "No", "flagRes": "Yes", "flagSecondaryRelease": "No"}} + inputSimFiles = { + "entResInfo": {"flagEnt": "No", "flagRes": "Yes", "flagSecondaryRelease": "No"} + } dirName = pathlib.Path(__file__).parents[0] avaDir = dirName / ".." / "data" / "avaParabola" relFile = avaDir / "Inputs" / "REL" / "release1PF.shp" @@ -130,7 +136,9 @@ def test_prepareInputData(tmp_path): assert inputSimLines["relThField"] == "" # call function to be tested - inputSimFiles = {"entResInfo": {"flagEnt": "No", "flagRes": "Yes", "flagSecondaryRelease": "No"}} + inputSimFiles = { + "entResInfo": {"flagEnt": "No", "flagRes": "Yes", "flagSecondaryRelease": "No"} + } dirName = pathlib.Path(__file__).parents[0] avaDir = dirName / ".." / "data" / "avaParabola" relFile = avaDir / "Inputs" / "REL" / "release1PF.shp" @@ -159,11 +167,16 @@ def test_prepareInputData(tmp_path): assert inputSimLines["releaseLine"]["initializedFrom"] == "raster" assert inputSimLines["releaseLine"]["Name"] == "from raster" assert inputSimLines["releaseLine"]["thickness"] == "from raster" - assert inputSimLines["releaseLine"]["file"] == dirName / "data" / "relThFieldTestFile.asc" + assert ( + inputSimLines["releaseLine"]["file"] + == dirName / "data" / "relThFieldTestFile.asc" + ) assert inputSimLines["releaseLine"]["type"] == "Release from raster" # call function to be tested - inputSimFiles = {"entResInfo": {"flagEnt": "No", "flagRes": "Yes", "flagSecondaryRelease": "No"}} + inputSimFiles = { + "entResInfo": {"flagEnt": "No", "flagRes": "Yes", "flagSecondaryRelease": "No"} + } dirName = pathlib.Path(__file__).parents[0] avaDir = dirName / ".." / "data" / "avaParabola" relFile = avaDir / "Inputs" / "REL" / "release1PF.shp" @@ -205,7 +218,9 @@ def test_prepareInputData(tmp_path): # ) # setup required input data - inputSimFiles = {"entResInfo": {"flagEnt": "No", "flagRes": "No", "flagSecondaryRelease": "No"}} + inputSimFiles = { + "entResInfo": {"flagEnt": "No", "flagRes": "No", "flagSecondaryRelease": "No"} + } dirName = pathlib.Path(__file__).parents[0] avaDir = dirName / "data" / "avaTestRelTh" relFile = avaDir / "Inputs" / "REL" / "rel1.shp" @@ -246,7 +261,9 @@ def test_prepareInputData(tmp_path): assert inputSimLines["releaseLine"]["initializedFrom"] == "raster" # setup requuired input data - inputSimFiles = {"entResInfo": {"flagEnt": "No", "flagRes": "No", "flagSecondaryRelease": "Yes"}} + inputSimFiles = { + "entResInfo": {"flagEnt": "No", "flagRes": "No", "flagSecondaryRelease": "Yes"} + } dirName = pathlib.Path(__file__).parents[0] avaDir = dirName / "data" / "avaTestRelTh" relFile = avaDir / "Inputs" / "REL" / "rel1.shp" @@ -255,7 +272,9 @@ def test_prepareInputData(tmp_path): inputSimFiles["secondaryRelScenario"] = secrelFile inputSimFiles["demFile"] = avaDir / "Inputs" / "testDEM.asc" inputSimFiles["relThFile"] = None - inputSimFiles["secondaryRelThFile"] = avaDir / "Inputs" / "SECREL" / "testSecRel2.asc" + inputSimFiles["secondaryRelThFile"] = ( + avaDir / "Inputs" / "SECREL" / "testSecRel2.asc" + ) inputSimFiles["muFile"] = None inputSimFiles["xiFile"] = None inputSimFiles["kFile"] = None @@ -283,18 +302,24 @@ def test_prepareInputData(tmp_path): assert demOri["header"]["nrows"] == 22 assert inputSimLines["releaseLine"]["thickness"] == ["1.5", "0.7"] assert np.array_equal(inputSimLines["releaseLine"]["Start"], np.asarray([0.0, 9.0])) - assert np.array_equal(inputSimLines["releaseLine"]["Length"], np.asarray([9.0, 5.0])) + assert np.array_equal( + inputSimLines["releaseLine"]["Length"], np.asarray([9.0, 5.0]) + ) assert inputSimLines["releaseLine"]["Name"] == ["releaseNew1", "releaseNew2"] assert inputSimLines["releaseLine"]["ci95"] == ["0.4", "0.1"] assert inputSimLines["secondaryReleaseLine"]["Name"] == "from raster" assert inputSimLines["secondaryReleaseLine"]["thickness"] == "from raster" assert inputSimLines["secondaryReleaseLine"]["initializedFrom"] == "raster" - assert inputSimLines["secondaryReleaseLine"]["type"] == "Secondary release from raster" + assert ( + inputSimLines["secondaryReleaseLine"]["type"] == "Secondary release from raster" + ) assert inputSimLines["releaseLine"]["type"] == "Release" assert inputSimLines["releaseLine"]["initializedFrom"] == "shapefile" # setup requuired input data - inputSimFiles = {"entResInfo": {"flagEnt": "No", "flagRes": "No", "flagSecondaryRelease": "No"}} + inputSimFiles = { + "entResInfo": {"flagEnt": "No", "flagRes": "No", "flagSecondaryRelease": "No"} + } dirName = pathlib.Path(__file__).parents[0] avaDir = dirName / "data" / "avaTestRelTh" relFile = avaDir / "Inputs" / "REL" / "testRel2.asc" @@ -343,7 +368,10 @@ def test_prepareInputData(tmp_path): with pytest.raises(AssertionError) as e: assert com1DFA.prepareInputData(inputSimFiles, cfg) - assert "One or more release features in relAlr2.shp have holes - check error plots in" in str(e.value) + assert ( + "One or more release features in relAlr2.shp have holes - check error plots in" + in str(e.value) + ) def test_prepareReleaseEntrainment(tmp_path): @@ -386,7 +414,9 @@ def test_prepareReleaseEntrainment(tmp_path): rel = pathlib.Path(tmp_path, "release1PF_test.shp") # call function to be tested - relName, inputSimLines, badName = com1DFA.prepareReleaseEntrainment(cfg, rel, inputSimLines) + relName, inputSimLines, badName = com1DFA.prepareReleaseEntrainment( + cfg, rel, inputSimLines + ) assert relName == "release1PF_test" assert inputSimLines["entResInfo"]["flagSecondaryRelease"] == "Yes" @@ -429,7 +459,9 @@ def test_prepareReleaseEntrainment(tmp_path): rel = pathlib.Path(tmp_path, "release1PF_test.shp") # call function to be tested - relName2, inputSimLines2, badName2 = com1DFA.prepareReleaseEntrainment(cfg, rel, inputSimLines) + relName2, inputSimLines2, badName2 = com1DFA.prepareReleaseEntrainment( + cfg, rel, inputSimLines + ) assert relName2 == "release1PF_test" assert inputSimLines2["entResInfo"]["flagSecondaryRelease"] == "Yes" @@ -473,7 +505,9 @@ def test_prepareReleaseEntrainment(tmp_path): rel = pathlib.Path(tmp_path, "release1PF_test.shp") # call function to be tested - relName2, inputSimLines2, badName2 = com1DFA.prepareReleaseEntrainment(cfg, rel, inputSimLines) + relName2, inputSimLines2, badName2 = com1DFA.prepareReleaseEntrainment( + cfg, rel, inputSimLines + ) # print( # "Test", @@ -492,7 +526,9 @@ def test_prepareReleaseEntrainment(tmp_path): # call function to be tested cfg["GENERAL"]["secRelArea"] = "False" - relName3, inputSimLines3, badName3 = com1DFA.prepareReleaseEntrainment(cfg, rel, inputSimLines) + relName3, inputSimLines3, badName3 = com1DFA.prepareReleaseEntrainment( + cfg, rel, inputSimLines + ) assert relName3 == "release1PF_test" assert inputSimLines3["entResInfo"]["flagSecondaryRelease"] == "No" @@ -516,7 +552,9 @@ def test_prepareReleaseEntrainment(tmp_path): cfg["GENERAL"]["relTh"] = "1.32" # call function to test - relName4, inputSimLines4, badName4 = com1DFA.prepareReleaseEntrainment(cfg, rel, inputSimLines) + relName4, inputSimLines4, badName4 = com1DFA.prepareReleaseEntrainment( + cfg, rel, inputSimLines + ) assert relName4 == "release1PF_test" assert inputSimLines4["entResInfo"]["flagSecondaryRelease"] == "No" @@ -559,7 +597,9 @@ def test_prepareReleaseEntrainment(tmp_path): "id": ["0", "1"], "initializedFrom": "shapefile", } - relName5, inputSimLines5, badName5 = com1DFA.prepareReleaseEntrainment(cfg, rel, inputSimLines) + relName5, inputSimLines5, badName5 = com1DFA.prepareReleaseEntrainment( + cfg, rel, inputSimLines + ) assert relName5 == "release1PF_test" assert inputSimLines5["entResInfo"]["flagSecondaryRelease"] == "No" @@ -600,7 +640,9 @@ def test_setThickness(): assert lineTh["thickness"] == [1.0, 1.0] assert lineTh["thicknessSource"] == ["ini file", "ini file"] - assert np.array_equal(lineTh["x"], np.asarray([0, 10.0, 10.0, 0.0, 0.0, 20.0, 26.0, 26.0, 20.0, 20.0])) + assert np.array_equal( + lineTh["x"], np.asarray([0, 10.0, 10.0, 0.0, 0.0, 20.0, 26.0, 26.0, 20.0, 20.0]) + ) # call function to be tested lineTh = { @@ -617,7 +659,9 @@ def test_setThickness(): assert lineTh["thickness"] == [1.0, 1.0] assert lineTh["thicknessSource"] == ["ini file", "ini file"] - assert np.array_equal(lineTh["x"], np.asarray([0, 10.0, 10.0, 0.0, 0.0, 20.0, 26.0, 26.0, 20.0, 20.0])) + assert np.array_equal( + lineTh["x"], np.asarray([0, 10.0, 10.0, 0.0, 0.0, 20.0, 26.0, 26.0, 20.0, 20.0]) + ) # call function to be tested cfg["GENERAL"]["entThFromFile"] = "True" @@ -641,7 +685,9 @@ def test_setThickness(): assert lineTh["thickness"] == [1.0, 0.7] assert lineTh["thicknessSource"] == ["shp file", "shp file"] - assert np.array_equal(lineTh["x"], np.asarray([0, 10.0, 10.0, 0.0, 0.0, 20.0, 26.0, 26.0, 20.0, 20.0])) + assert np.array_equal( + lineTh["x"], np.asarray([0, 10.0, 10.0, 0.0, 0.0, 20.0, 26.0, 26.0, 20.0, 20.0]) + ) # call function to be tested cfg["GENERAL"]["entThFromFile"] = "True" @@ -661,7 +707,9 @@ def test_setThickness(): assert lineTh["thickness"] == [1.2, 0.7] assert lineTh["thicknessSource"] == ["shp file", "shp file"] - assert np.array_equal(lineTh["x"], np.asarray([0, 10.0, 10.0, 0.0, 0.0, 20.0, 26.0, 26.0, 20.0, 20.0])) + assert np.array_equal( + lineTh["x"], np.asarray([0, 10.0, 10.0, 0.0, 0.0, 20.0, 26.0, 26.0, 20.0, 20.0]) + ) def test_createReportDict(): @@ -698,7 +746,9 @@ def test_createReportDict(): } # call function to be tested - reportST = com1DFA.createReportDict(avaDir, logName, relName, inputSimLines, cfg, reportAreaInfo) + reportST = com1DFA.createReportDict( + avaDir, logName, relName, inputSimLines, cfg, reportAreaInfo + ) assert "Simulation Parameters" in reportST assert "Program version" in reportST["Simulation Parameters"] @@ -1071,12 +1121,28 @@ def test_initializeMesh(): assert np.all(np.isnan(dem["rasterData"][0:5, 4])) assert abs(dem["Nx"][2, 2]) == abs(dem["Nz"][2, 2]) assert np.isclose(dem["areaRaster"][2, 2], demTest["areaRaster"][2, 2]) - assert dem["headerNeighbourGrid"]["xllcenter"] == demTest["headerNeighbourGrid"]["xllcenter"] - assert dem["headerNeighbourGrid"]["yllcenter"] == demTest["headerNeighbourGrid"]["yllcenter"] - assert dem["headerNeighbourGrid"]["ncols"] == demTest["headerNeighbourGrid"]["ncols"] - assert dem["headerNeighbourGrid"]["nrows"] == demTest["headerNeighbourGrid"]["nrows"] - assert dem["headerNeighbourGrid"]["cellsize"] == demTest["headerNeighbourGrid"]["cellsize"] - assert dem["headerNeighbourGrid"]["yllcenter"] == demTest["headerNeighbourGrid"]["yllcenter"] + assert ( + dem["headerNeighbourGrid"]["xllcenter"] + == demTest["headerNeighbourGrid"]["xllcenter"] + ) + assert ( + dem["headerNeighbourGrid"]["yllcenter"] + == demTest["headerNeighbourGrid"]["yllcenter"] + ) + assert ( + dem["headerNeighbourGrid"]["ncols"] == demTest["headerNeighbourGrid"]["ncols"] + ) + assert ( + dem["headerNeighbourGrid"]["nrows"] == demTest["headerNeighbourGrid"]["nrows"] + ) + assert ( + dem["headerNeighbourGrid"]["cellsize"] + == demTest["headerNeighbourGrid"]["cellsize"] + ) + assert ( + dem["headerNeighbourGrid"]["yllcenter"] + == demTest["headerNeighbourGrid"]["yllcenter"] + ) def test_getSimTypeList(): @@ -1086,10 +1152,14 @@ def test_getSimTypeList(): standardCfg = configparser.ConfigParser() standardCfg["GENERAL"] = {"secRelArea": "False"} simTypeList = ["ent", "res", "null", "available", "entres"] - inputSimFiles = {"entResInfo": {"flagEnt": "Yes", "flagRes": "Yes", "flagSecondaryRelease": "No"}} + inputSimFiles = { + "entResInfo": {"flagEnt": "Yes", "flagRes": "Yes", "flagSecondaryRelease": "No"} + } # call function to be tested - standardCfg, simTypeList = com1DFA.getSimTypeList(standardCfg, simTypeList, inputSimFiles) + standardCfg, simTypeList = com1DFA.getSimTypeList( + standardCfg, simTypeList, inputSimFiles + ) # setup test result simTypeListTest = ["ent", "null", "res", "entres"] @@ -1100,7 +1170,9 @@ def test_getSimTypeList(): # call function to be tested simTypeList = ["ent", "null", "available"] inputSimFiles["entResInfo"]["flagRes"] = "No" - standardCfg2, simTypeList2 = com1DFA.getSimTypeList(standardCfg, simTypeList, inputSimFiles) + standardCfg2, simTypeList2 = com1DFA.getSimTypeList( + standardCfg, simTypeList, inputSimFiles + ) # setup test result simTypeListTest2 = ["ent", "null"] @@ -1114,7 +1186,9 @@ def test_getSimTypeList(): simTypeList = ["res", "null", "available"] inputSimFiles["entResInfo"]["flagEnt"] = "No" inputSimFiles["entResInfo"]["flagRes"] = "Yes" - standardCfg3, simTypeList3 = com1DFA.getSimTypeList(standardCfg, simTypeList, inputSimFiles) + standardCfg3, simTypeList3 = com1DFA.getSimTypeList( + standardCfg, simTypeList, inputSimFiles + ) # setup test result simTypeListTest3 = ["res", "null"] @@ -1336,11 +1410,19 @@ def test_releaseSecRelArea(): # print("particles IN pytest socond", particles2) assert particles["nPart"] == 6 - assert np.array_equal(particles["x"], np.asarray([6.0, 7.0, 6.75, 7.25, 6.75, 7.25])) - assert np.array_equal(particles["totalEnthalpy"], np.asarray([6.0, 7.0, pEnt, pEnt, pEnt, pEnt])) - assert np.array_equal(particles["y"], np.asarray([6.0, 7.0, 6.75, 6.75, 7.25, 7.25])) + assert np.array_equal( + particles["x"], np.asarray([6.0, 7.0, 6.75, 7.25, 6.75, 7.25]) + ) + assert np.array_equal( + particles["totalEnthalpy"], np.asarray([6.0, 7.0, pEnt, pEnt, pEnt, pEnt]) + ) + assert np.array_equal( + particles["y"], np.asarray([6.0, 7.0, 6.75, 6.75, 7.25, 7.25]) + ) assert np.array_equal(zPartArray0New, np.asarray([2, 3, 1.0, 1.0, 1.0, 1.0])) - assert np.array_equal(particles["m"], np.asarray([1250.0, 1250.0, 50.0, 50.0, 50.0, 50.0])) + assert np.array_equal( + particles["m"], np.asarray([1250.0, 1250.0, 50.0, 50.0, 50.0, 50.0]) + ) assert particles["mTot"] == 2700.0 assert particles2["nPart"] == 11 assert np.array_equal( @@ -1351,10 +1433,14 @@ def test_releaseSecRelArea(): particles2["y"], np.asarray([6.0, 7.0, 9.1, 6.75, 6.75, 7.25, 7.25, 8.75, 8.75, 9.25, 9.25]), ) - assert np.array_equal(zPartArray0New2, np.asarray([1, 2, 3, 1, 1, 1, 1, 1, 1, 1, 1])) + assert np.array_equal( + zPartArray0New2, np.asarray([1, 2, 3, 1, 1, 1, 1, 1, 1, 1, 1]) + ) assert np.array_equal( particles2["m"], - np.asarray([1250.0, 1250.0, 1250.0, 50.0, 50.0, 50.0, 50.0, 25.0, 25.0, 25.0, 25.0]), + np.asarray( + [1250.0, 1250.0, 1250.0, 50.0, 50.0, 50.0, 50.0, 25.0, 25.0, 25.0, 25.0] + ), ) assert particles2["mTot"] == 4050.0 @@ -1365,18 +1451,28 @@ def test_getRelThFromPart(): # setup required input cfg = configparser.ConfigParser() cfg["GENERAL"] = {"relThFromFile": "True", "relTh": ""} - inputSimLines = {"releaseLine": {"thickness": ["1.2", "1.5"], "id": ["0", "1"], "type": "Release"}} + inputSimLines = { + "releaseLine": { + "thickness": ["1.2", "1.5"], + "id": ["0", "1"], + "type": "Release", + } + } relThField = "" # call function to be tested - relThFromPart = com1DFA.getRelThFromPart(cfg["GENERAL"], inputSimLines["releaseLine"], relThField, "rel") + relThFromPart = com1DFA.getRelThFromPart( + cfg["GENERAL"], inputSimLines["releaseLine"], relThField, "rel" + ) assert relThFromPart == 1.5 cfg["GENERAL"]["relThFromFile"] = "False" cfg["GENERAL"]["relTh"] = "2.0" # call function to be tested - relThFromPart = com1DFA.getRelThFromPart(cfg["GENERAL"], inputSimLines["releaseLine"], relThField, "rel") + relThFromPart = com1DFA.getRelThFromPart( + cfg["GENERAL"], inputSimLines["releaseLine"], relThField, "rel" + ) assert relThFromPart == 2.0 @@ -1385,7 +1481,9 @@ def test_getRelThFromPart(): relThField = np.zeros((10, 10)) relThField[0:10, 1] = 10.0 # call function to be tested - relThFromPart = com1DFA.getRelThFromPart(cfg["GENERAL"], inputSimLines["releaseLine"], relThField, "rel") + relThFromPart = com1DFA.getRelThFromPart( + cfg["GENERAL"], inputSimLines["releaseLine"], relThField, "rel" + ) assert relThFromPart == 10.0 @@ -1812,7 +1910,9 @@ def test_exportFields(tmp_path): fieldsList = [fields1, fields2, fields3, fields4, fields5] # call function to be tested - com1DFA.exportFields(cfg, 10.00, fields2, dem, outDir, logName, TSave="intermediate") + com1DFA.exportFields( + cfg, 10.00, fields2, dem, outDir, logName, TSave="intermediate" + ) com1DFA.exportFields(cfg, 40.00, fields5, dem, outDir, logName, TSave="final") # read fields @@ -1848,9 +1948,15 @@ def test_exportFields(tmp_path): cfg["REPORT"] = {} com1DFA.exportFields(cfg, 0.00, fields1, dem, outDir2, logName, TSave="initial") - com1DFA.exportFields(cfg, 10.00, fields2, dem, outDir2, logName, TSave="intermediate") - com1DFA.exportFields(cfg, 15.00, fields3, dem, outDir2, logName, TSave="intermediate") - com1DFA.exportFields(cfg, 25.00, fields4, dem, outDir2, logName, TSave="intermediate") + com1DFA.exportFields( + cfg, 10.00, fields2, dem, outDir2, logName, TSave="intermediate" + ) + com1DFA.exportFields( + cfg, 15.00, fields3, dem, outDir2, logName, TSave="intermediate" + ) + com1DFA.exportFields( + cfg, 25.00, fields4, dem, outDir2, logName, TSave="intermediate" + ) com1DFA.exportFields(cfg, 40.00, fields5, dem, outDir2, logName, TSave="final") # read fields @@ -2519,7 +2625,9 @@ def test_runCom1DFA(tmp_path, caplog): "reportOneFile": "True", "debugPlot": "False", } - modCfg, modInfo = cfgUtils.getModuleConfig(com1DFA, fileOverride=cfgFile, modInfo=True) + modCfg, modInfo = cfgUtils.getModuleConfig( + com1DFA, fileOverride=cfgFile, modInfo=True + ) dem, plotDict, reportDictList, simDF = com1DFA.com1DFAMain(cfgMain, cfgInfo=cfgFile) @@ -2609,16 +2717,26 @@ def test_runCom1DFA(tmp_path, caplog): # print(simDF["simName"]) outDir = avaDir / "Outputs" / "com1DFA" for ext in ["ppr", "pft", "pfv"]: - assert (outDir / "peakFiles" / ("%s_%s.asc" % (simDF["simName"].iloc[0], ext))).is_file() - assert (outDir / "peakFiles" / ("%s_%s.asc" % (simDF["simName"].iloc[1], ext))).is_file() - - assert (outDir / "configurationFiles" / ("%s.ini" % (simDF["simName"].iloc[0]))).is_file() - assert (outDir / "configurationFiles" / ("%s.ini" % (simDF["simName"].iloc[1]))).is_file() + assert ( + outDir / "peakFiles" / ("%s_%s.asc" % (simDF["simName"].iloc[0], ext)) + ).is_file() + assert ( + outDir / "peakFiles" / ("%s_%s.asc" % (simDF["simName"].iloc[1], ext)) + ).is_file() + + assert ( + outDir / "configurationFiles" / ("%s.ini" % (simDF["simName"].iloc[0])) + ).is_file() + assert ( + outDir / "configurationFiles" / ("%s.ini" % (simDF["simName"].iloc[1])) + ).is_file() assert (outDir / "configurationFiles" / ("allConfigurations.csv")).is_file() initProj.cleanModuleFiles(avaDir, com1DFA, deleteOutput=False) with caplog.at_level(logging.WARNING): - dem, plotDict, reportDictList, simDF = com1DFA.com1DFAMain(cfgMain, cfgInfo=cfgFile) + dem, plotDict, reportDictList, simDF = com1DFA.com1DFAMain( + cfgMain, cfgInfo=cfgFile + ) assert "There is no simulation to be performed" in caplog.text @@ -2635,7 +2753,9 @@ def test_runOrLoadCom1DFA(tmp_path, caplog): testDir = pathlib.Path(__file__).parents[0] avalancheDir = testDir / ".." / ".." / "benchmarks" / "avaHockeyChannelPytest" cfgMain = configparser.ConfigParser() - dem, simDF, resTypeList = com1DFA.runOrLoadCom1DFA(avalancheDir, cfgMain, runDFAModule=False, cfgFile="") + dem, simDF, resTypeList = com1DFA.runOrLoadCom1DFA( + avalancheDir, cfgMain, runDFAModule=False, cfgFile="" + ) # print(simDF.index) # print(simDF.columns) assert "pft" in resTypeList @@ -2674,7 +2794,9 @@ def test_fetchRelVolume(tmp_path): dem["rasterData"] = np.ones((10, 20)) demPath = pathlib.Path(avaDir, "Inputs", "testDem.asc") fU.makeADir(pathlib.Path(avaDir, "Inputs")) - IOf.writeResultToRaster(dem["header"], dem["rasterData"], demPath.parent / demPath.stem, flip=False) + IOf.writeResultToRaster( + dem["header"], dem["rasterData"], demPath.parent / demPath.stem, flip=False + ) # subprocess.run(["cat", demPath]) # write relThField @@ -2807,7 +2929,9 @@ def test_adaptDEM(): dem = geoTrans.getNormalMesh(dem, num=cfg["GENERAL"].getfloat("methodMeshNormal")) dem = DFAtls.getAreaMesh(dem, cfg["GENERAL"].getfloat("methodMeshNormal")) - _, _, NzNormed = DFAtls.normalize(dem["Nx"].copy(), dem["Ny"].copy(), dem["Nz"].copy()) + _, _, NzNormed = DFAtls.normalize( + dem["Nx"].copy(), dem["Ny"].copy(), dem["Nz"].copy() + ) demInput = dem.copy() fieldsInput = fields.copy() @@ -2942,17 +3066,21 @@ def test_tSteps_output_behavior(tmp_path, caplog): # Get main configuration cfgMain = cfgUtils.getGeneralConfig() - cfgMain['MAIN']['avalancheDir'] = str(avaDir1) + cfgMain["MAIN"]["avalancheDir"] = str(avaDir1) # Modify config to have empty tSteps and NO parameter variations cfg = cfgUtils.getModuleConfig(com1DFA, cfgFile1) cfg["GENERAL"]["tSteps"] = "" cfg["GENERAL"]["tEnd"] = "10" # Short simulation cfg["GENERAL"]["dt"] = "0.1" # Single value, no variations - cfg["GENERAL"]["simTypeList"] = "null" # Simple simulation, no entrainment/resistance + cfg["GENERAL"]["simTypeList"] = ( + "null" # Simple simulation, no entrainment/resistance + ) with open(cfgFile1, "w") as f: cfg.write(f) - dem, plotDict, reportDictList, simDF = com1DFA.com1DFAMain(cfgMain, cfgInfo=cfgFile1) + dem, plotDict, reportDictList, simDF = com1DFA.com1DFAMain( + cfgMain, cfgInfo=cfgFile1 + ) # Check that only final timestep files exist in timeSteps directory timeStepsDir1 = avaDir1 / "Outputs" / "com1DFA" / "peakFiles" / "timeSteps" @@ -2961,38 +3089,46 @@ def test_tSteps_output_behavior(tmp_path, caplog): # Should only have final timestep files (one per result type: ppr, pft, pfv) # Not initial timestep at t=0 for tFile in tStepFiles1: - assert "_t0.0" not in tFile.stem, f"Found initial timestep file {tFile} but tSteps was empty" + assert "_t0.0" not in tFile.stem, ( + f"Found initial timestep file {tFile} but tSteps was empty" + ) # Test 2: Explicit tSteps with t=0 should export t=0 timestep avaDir2 = pathlib.Path(tmp_path, "testExplicitTSteps") shutil.copytree(inputDir, avaDir2) cfgFile2 = avaDir2 / "test_com1DFACfg.ini" - cfgMain['MAIN']['avalancheDir'] = str(avaDir2) + cfgMain["MAIN"]["avalancheDir"] = str(avaDir2) # Modify config to have explicit tSteps including t=0 and NO parameter variations cfg2 = cfgUtils.getModuleConfig(com1DFA, cfgFile2) cfg2["GENERAL"]["tSteps"] = "0|5" cfg2["GENERAL"]["tEnd"] = "10" # Short simulation cfg2["GENERAL"]["dt"] = "0.1" # Single value, no variations - cfg2["GENERAL"]["simTypeList"] = "null" # Simple simulation, no entrainment/resistance + cfg2["GENERAL"]["simTypeList"] = ( + "null" # Simple simulation, no entrainment/resistance + ) with open(cfgFile2, "w") as f: cfg2.write(f) - dem2, plotDict2, reportDictList2, simDF2 = com1DFA.com1DFAMain(cfgMain, cfgInfo=cfgFile2) + dem2, plotDict2, reportDictList2, simDF2 = com1DFA.com1DFAMain( + cfgMain, cfgInfo=cfgFile2 + ) # Check that t=0 timestep files exist timeStepsDir2 = avaDir2 / "Outputs" / "com1DFA" / "peakFiles" / "timeSteps" assert timeStepsDir2.exists(), "timeSteps directory should exist" tStepFiles2 = list(timeStepsDir2.glob("*_t0.0*.asc")) - assert len(tStepFiles2) > 0, "Should have initial timestep files at t=0 when tSteps includes 0" + assert len(tStepFiles2) > 0, ( + "Should have initial timestep files at t=0 when tSteps includes 0" + ) # Test 3: exportData = False should trigger contour fetching in else block avaDir3 = pathlib.Path(tmp_path, "testExportDataFalse") shutil.copytree(inputDir, avaDir3) cfgFile3 = avaDir3 / "test_com1DFACfg.ini" - cfgMain['MAIN']['avalancheDir'] = str(avaDir3) + cfgMain["MAIN"]["avalancheDir"] = str(avaDir3) # Modify config to have exportData = False cfg3 = cfgUtils.getModuleConfig(com1DFA, cfgFile3) @@ -3004,13 +3140,164 @@ def test_tSteps_output_behavior(tmp_path, caplog): with open(cfgFile3, "w") as f: cfg3.write(f) - dem3, plotDict3, reportDictList3, simDF3 = com1DFA.com1DFAMain(cfgMain, cfgInfo=cfgFile3) + dem3, plotDict3, reportDictList3, simDF3 = com1DFA.com1DFAMain( + cfgMain, cfgInfo=cfgFile3 + ) # Check that contour data was generated (stored in reportDict) instead of exported files - assert len(reportDictList3) > 0, "Should have report dict even with exportData=False" + assert len(reportDictList3) > 0, ( + "Should have report dict even with exportData=False" + ) # Verify that timeSteps directory doesn't exist (no data exported) timeStepsDir3 = avaDir3 / "Outputs" / "com1DFA" / "peakFiles" / "timeSteps" if timeStepsDir3.exists(): tStepFiles3 = list(timeStepsDir3.glob("*.asc")) # With exportData=False, intermediate timesteps should not be exported - assert len(tStepFiles3) == 0, "No timestep files should be exported when exportData=False" + assert len(tStepFiles3) == 0, ( + "No timestep files should be exported when exportData=False" + ) + + +def test_getModuleNames(): + """Test getModuleNames function for extracting module names from call stack""" + import inspect + from unittest.mock import patch, MagicMock + + # Test 1: Direct call from com1DFA module + with patch("inspect.stack") as mock_stack: + mock_stack.return_value = [ + MagicMock( + frame=MagicMock(f_globals={"__name__": "avaframe.com1DFA.com1DFA"}) + ), + MagicMock(frame=MagicMock(f_globals={"__name__": "avaframe.com1DFA"})), + ] + result = com1DFA.getModuleNames(com1DFA) + assert result == ("com1DFA", "com1"), ( + f"Expected ('com1DFA', 'com1'), got {result}" + ) + + # Test 2: Call from wrapper module com5SnowSlide + with patch("inspect.stack") as mock_stack: + mock_stack.return_value = [ + MagicMock( + frame=MagicMock( + f_globals={"__name__": "avaframe.com5SnowSlide.com5SnowSlide"} + ) + ), + MagicMock( + frame=MagicMock(f_globals={"__name__": "avaframe.com1DFA.com1DFA"}) + ), + ] + result = com1DFA.getModuleNames(com1DFA) + assert result == ("com5SnowSlide", "com5"), ( + f"Expected ('com5SnowSlide', 'com5'), got {result}" + ) + + # Test 3: Call from wrapper module com6RockAvalanche + with patch("inspect.stack") as mock_stack: + mock_stack.return_value = [ + MagicMock( + frame=MagicMock( + f_globals={ + "__name__": "avaframe.com6RockAvalanche.com6RockAvalanche" + } + ) + ), + MagicMock( + frame=MagicMock(f_globals={"__name__": "avaframe.com1DFA.com1DFA"}) + ), + ] + result = com1DFA.getModuleNames(com1DFA) + assert result == ("com6RockAvalanche", "com6"), ( + f"Expected ('com6RockAvalanche', 'com6'), got {result}" + ) + + # Test 4: Call from wrapper module com8MoTPSA + with patch("inspect.stack") as mock_stack: + mock_stack.return_value = [ + MagicMock( + frame=MagicMock( + f_globals={"__name__": "avaframe.com8MoTPSA.com8MoTPSA"} + ) + ), + MagicMock( + frame=MagicMock(f_globals={"__name__": "avaframe.com1DFA.com1DFA"}) + ), + ] + result = com1DFA.getModuleNames(com1DFA) + assert result == ("com8MoTPSA", "com8"), ( + f"Expected ('com8MoTPSA', 'com8'), got {result}" + ) + + # Test 5: Call from wrapper module com9MoTVoellmy + with patch("inspect.stack") as mock_stack: + mock_stack.return_value = [ + MagicMock( + frame=MagicMock( + f_globals={"__name__": "avaframe.com9MoTVoellmy.com9MoTVoellmy"} + ) + ), + MagicMock( + frame=MagicMock(f_globals={"__name__": "avaframe.com1DFA.com1DFA"}) + ), + ] + result = com1DFA.getModuleNames(com1DFA) + assert result == ("com9MoTVoellmy", "com9"), ( + f"Expected ('com9MoTVoellmy', 'com9'), got {result}" + ) + + # Test 6: Non-com module (fallback to passed module) + with patch("inspect.stack") as mock_stack: + mock_stack.return_value = [ + MagicMock(frame=MagicMock(f_globals={"__name__": "some.other.module"})), + MagicMock(frame=MagicMock(f_globals={"__name__": "another.module"})), + ] + # Create a mock module object + mock_module = MagicMock() + mock_module.__name__ = "avaframe.someModule" + result = com1DFA.getModuleNames(mock_module) + assert result == ("someModule", "someModule"), ( + f"Expected ('someModule', 'someModule'), got {result}" + ) + + # Test 7: Module without "com" prefix in name (fallback) + with patch("inspect.stack") as mock_stack: + mock_stack.return_value = [ + MagicMock( + frame=MagicMock( + f_globals={"__name__": "avaframe.otherModule.otherModule"} + ) + ), + ] + # Create a mock module object + mock_module = MagicMock() + mock_module.__name__ = "avaframe.otherModule" + result = com1DFA.getModuleNames(mock_module) + assert result == ("otherModule", "otherModule"), ( + f"Expected ('otherModule', 'otherModule'), got {result}" + ) + + # Test 8: Deep call stack with multiple com modules (should pick first non-com1DFA.com1DFA) + with patch("inspect.stack") as mock_stack: + mock_stack.return_value = [ + MagicMock( + frame=MagicMock(f_globals={"__name__": "avaframe.com1DFA.com1DFA"}) + ), # Should be skipped + MagicMock( + frame=MagicMock( + f_globals={"__name__": "avaframe.com5SnowSlide.com5SnowSlide"} + ) + ), # Should be picked + MagicMock( + frame=MagicMock( + f_globals={ + "__name__": "avaframe.com6RockAvalanche.com6RockAvalanche" + } + ) + ), # Should be ignored + MagicMock(frame=MagicMock(f_globals={"__name__": "avaframe.com1DFA"})), + ] + result = com1DFA.getModuleNames(com1DFA) + assert result == ("com5SnowSlide", "com5"), ( + f"Expected ('com5SnowSlide', 'com5'), got {result}" + ) diff --git a/avaframe/tests/test_scarp.py b/avaframe/tests/test_scarp.py index fbc40c61c..b46c84a9e 100644 --- a/avaframe/tests/test_scarp.py +++ b/avaframe/tests/test_scarp.py @@ -90,7 +90,9 @@ def temp_output_dir(tmp_path): def test_readPerimeterSHP(scarp_test_data): """Test perimeter shapefile reading and rasterization""" # Get paths to test data - perimeterShp = scarp_test_data / "Inputs" / "POLYGONS" / "scarpFluchthorn_perimeter.shp" + perimeterShp = ( + scarp_test_data / "Inputs" / "POLYGONS" / "scarpFluchthorn_perimeter.shp" + ) demPath = scarp_test_data / "Inputs" / "fluchthorn.tif" # Read DEM to get transform and shape @@ -104,7 +106,9 @@ def test_readPerimeterSHP(scarp_test_data): # Assertions assert periData.shape == elevShape, "Perimeter shape should match DEM shape" assert periData.dtype == np.uint8, "Perimeter should be uint8 type" - assert np.all((periData == 0) | (periData == 1)), "Perimeter should only contain 0 and 1" + assert np.all((periData == 0) | (periData == 1)), ( + "Perimeter should only contain 0 and 1" + ) assert np.sum(periData) > 0, "Perimeter should contain some pixels marked as 1" assert np.sum(periData) < periData.size, "Perimeter should not mark all pixels" @@ -121,9 +125,13 @@ def test_plane_parameter_extraction(scarp_test_data): planesSlope = list(map(float, SHPdata["slopeangle"])) # Assertions - assert len(planesZseed) == SHPdata["nFeatures"], "Should have zseed for each feature" + assert len(planesZseed) == SHPdata["nFeatures"], ( + "Should have zseed for each feature" + ) assert len(planesDip) == SHPdata["nFeatures"], "Should have dip for each feature" - assert len(planesSlope) == SHPdata["nFeatures"], "Should have slope for each feature" + assert len(planesSlope) == SHPdata["nFeatures"], ( + "Should have slope for each feature" + ) assert SHPdata["nFeatures"] == 2, "Test data should have 2 features" # Build feature string @@ -139,7 +147,9 @@ def test_plane_parameter_extraction(scarp_test_data): features = ",".join(map(str, planeFeatures)) # Should have 5 parameters per feature - assert len(planeFeatures) == SHPdata["nFeatures"] * 5, "Should have 5 params per plane" + assert len(planeFeatures) == SHPdata["nFeatures"] * 5, ( + "Should have 5 params per plane" + ) assert len(features) > 0, "Feature string should not be empty" assert features.count(",") == len(planeFeatures) - 1, "Comma count should match" @@ -163,7 +173,9 @@ def test_plane_geometry_calculations(): west, north = 150.0, 250.0 # Point coordinates # Plane equation: z = zSeed + (north - ySeed) * betaY - (west - xSeed) * betaX - scarpVal = zSeed + (north - ySeed) * expected_betaY - (west - xSeed) * expected_betaX + scarpVal = ( + zSeed + (north - ySeed) * expected_betaY - (west - xSeed) * expected_betaX + ) # Manual calculation expected_scarpVal = 1000.0 + (50.0 * expected_betaY) - (50.0 * expected_betaX) @@ -171,7 +183,9 @@ def test_plane_geometry_calculations(): assert abs(scarpVal - expected_scarpVal) < 0.001, "Plane equation should be correct" -def test_calculateScarpWithPlanes_single_plane(mock_dem, mock_perimeter, mock_transform): +def test_calculateScarpWithPlanes_single_plane( + mock_dem, mock_perimeter, mock_transform +): """Test plane-based scarp calculation with single plane""" # Create a simple plane definition # Place seed point at center of grid with known parameters @@ -182,7 +196,9 @@ def test_calculateScarpWithPlanes_single_plane(mock_dem, mock_perimeter, mock_tr planes = f"{xSeed},{ySeed},{zSeed},{dip},{slope}" # Call function under test - scarpData = scarp.calculateScarpWithPlanes(mock_dem, mock_perimeter, mock_transform, planes) + scarpData = scarp.calculateScarpWithPlanes( + mock_dem, mock_perimeter, mock_transform, planes + ) # Assertions assert scarpData.shape == mock_dem.shape, "Scarp should have same shape as DEM" @@ -190,18 +206,20 @@ def test_calculateScarpWithPlanes_single_plane(mock_dem, mock_perimeter, mock_tr # Outside perimeter, scarp should equal DEM outside_mask = mock_perimeter == 0 - assert np.allclose( - scarpData[outside_mask], mock_dem[outside_mask] - ), "Outside perimeter, scarp should equal DEM" + assert np.allclose(scarpData[outside_mask], mock_dem[outside_mask]), ( + "Outside perimeter, scarp should equal DEM" + ) # Inside perimeter, scarp should be <= DEM inside_mask = mock_perimeter > 0 - assert np.all( - scarpData[inside_mask] <= mock_dem[inside_mask] + 0.001 - ), "Inside perimeter, scarp should not exceed DEM" + assert np.all(scarpData[inside_mask] <= mock_dem[inside_mask] + 0.001), ( + "Inside perimeter, scarp should not exceed DEM" + ) -def test_calculateScarpWithPlanes_multiple_planes(mock_dem, mock_perimeter, mock_transform): +def test_calculateScarpWithPlanes_multiple_planes( + mock_dem, mock_perimeter, mock_transform +): """Test plane calculation with multiple planes (maximum selection)""" # Create two planes with different seed points # Plane 1 @@ -215,16 +233,18 @@ def test_calculateScarpWithPlanes_multiple_planes(mock_dem, mock_perimeter, mock planes = f"{xSeed1},{ySeed1},{zSeed1},{dip1},{slope1},{xSeed2},{ySeed2},{zSeed2},{dip2},{slope2}" # Call function under test - scarpData = scarp.calculateScarpWithPlanes(mock_dem, mock_perimeter, mock_transform, planes) + scarpData = scarp.calculateScarpWithPlanes( + mock_dem, mock_perimeter, mock_transform, planes + ) # Assertions assert scarpData.shape == mock_dem.shape, "Scarp should have same shape as DEM" # Outside perimeter, scarp should equal DEM outside_mask = mock_perimeter == 0 - assert np.allclose( - scarpData[outside_mask], mock_dem[outside_mask] - ), "Outside perimeter, scarp should equal DEM" + assert np.allclose(scarpData[outside_mask], mock_dem[outside_mask]), ( + "Outside perimeter, scarp should equal DEM" + ) def test_calculateScarpWithPlanes_edge_cases(mock_dem, mock_perimeter, mock_transform): @@ -234,17 +254,23 @@ def test_calculateScarpWithPlanes_edge_cases(mock_dem, mock_perimeter, mock_tran dip, slope = 0.0, 0.0 # Zero slope planes = f"{xSeed},{ySeed},{zSeed},{dip},{slope}" - scarpData = scarp.calculateScarpWithPlanes(mock_dem, mock_perimeter, mock_transform, planes) + scarpData = scarp.calculateScarpWithPlanes( + mock_dem, mock_perimeter, mock_transform, planes + ) # With zero slope, plane should be horizontal at zSeed inside_mask = mock_perimeter > 0 expected = np.minimum(mock_dem[inside_mask], zSeed) - assert np.allclose(scarpData[inside_mask], expected), "Zero slope should create horizontal plane" + assert np.allclose(scarpData[inside_mask], expected), ( + "Zero slope should create horizontal plane" + ) # Test case 2: Vertical dip (90 degrees) dip, slope = 90.0, 10.0 planes = f"{xSeed},{ySeed},{zSeed},{dip},{slope}" - scarpData = scarp.calculateScarpWithPlanes(mock_dem, mock_perimeter, mock_transform, planes) + scarpData = scarp.calculateScarpWithPlanes( + mock_dem, mock_perimeter, mock_transform, planes + ) # Should not crash and produce valid output assert scarpData.shape == mock_dem.shape, "Should handle 90 degree dip" @@ -256,7 +282,9 @@ def test_calculateScarpWithPlanes_edge_cases(mock_dem, mock_perimeter, mock_tran # ============================================================================ -def test_scarpAnalysisMain_plane_method(scarp_test_data, scarp_config, tmp_path, caplog): +def test_scarpAnalysisMain_plane_method( + scarp_test_data, scarp_config, tmp_path, caplog +): """End-to-end test using plane method with real test data""" # Set caplog to capture INFO level logs caplog.set_level("INFO") @@ -285,14 +313,18 @@ def test_scarpAnalysisMain_plane_method(scarp_test_data, scarp_config, tmp_path, assert src.height == 220, "Output should have correct height" assert src.width == 300, "Output should have correct width" - assert np.all(np.isfinite(scarp_data[scarp_data != src.nodata])), "Scarp data should be finite" + assert np.all(np.isfinite(scarp_data[scarp_data != src.nodata])), ( + "Scarp data should be finite" + ) with rasterio.open(hrel_file) as src: hrel_data = src.read(1) # hRel should be non-negative where valid (DEM - scarp >= 0) valid_mask = hrel_data != src.nodata - assert np.all(hrel_data[valid_mask] >= -0.001), "hRel values should be non-negative" + assert np.all(hrel_data[valid_mask] >= -0.001), ( + "hRel values should be non-negative" + ) # Check logging output assert "Perimeterfile is:" in caplog.text, "Should log perimeter file" @@ -304,7 +336,9 @@ def test_scarpAnalysisMain_plane_method(scarp_test_data, scarp_config, tmp_path, # ============================================================================ -def test_scarpAnalysisMain_missing_perimeter_file(scarp_config, temp_output_dir, caplog): +def test_scarpAnalysisMain_missing_perimeter_file( + scarp_config, temp_output_dir, caplog +): """Test error handling when perimeter shapefile is missing""" # Create a minimal test setup with missing perimeter file # Create a dummy DEM @@ -320,12 +354,14 @@ def test_scarpAnalysisMain_missing_perimeter_file(scarp_config, temp_output_dir, scarp.scarpAnalysisMain(scarp_config, str(temp_output_dir)) # Check that error was logged - assert ( - "not found" in caplog.text.lower() or "error" in caplog.text.lower() - ), "Should log error about missing file" + assert "not found" in caplog.text.lower() or "error" in caplog.text.lower(), ( + "Should log error about missing file" + ) -def test_scarpAnalysisMain_invalid_shapefile_attributes(scarp_config, temp_output_dir, caplog): +def test_scarpAnalysisMain_invalid_shapefile_attributes( + scarp_config, temp_output_dir, caplog +): """Test validation of shapefile attributes for plane method""" # This test would require creating a shapefile with missing attributes # For now, we test the error path by checking the ValueError is raised @@ -339,7 +375,9 @@ def test_scarpAnalysisMain_invalid_shapefile_attributes(scarp_config, temp_outpu assert True, "Attribute validation tested through code inspection" -def test_scarpAnalysisMain_useShapefiles_false(scarp_config, scarp_test_data, tmp_path, caplog): +def test_scarpAnalysisMain_useShapefiles_false( + scarp_config, scarp_test_data, tmp_path, caplog +): """Test configuration validation when useShapefiles is False""" # Copy test data to temporary directory to have a valid DEM test_dir = tmp_path / "scarpTestNoShapefiles" @@ -356,7 +394,9 @@ def test_scarpAnalysisMain_useShapefiles_false(scarp_config, scarp_test_data, tm pass # Expected to fail after logging error # Check error message was logged - assert "Shapefile option not selected" in caplog.text, "Should log error about shapefile option" + assert "Shapefile option not selected" in caplog.text, ( + "Should log error about shapefile option" + ) def test_scarpAnalysisMain_invalid_method(scarp_test_data, scarp_config, tmp_path): @@ -390,3 +430,42 @@ def test_scarpAnalysisMain_missing_required_attributes(scarp_test_data, tmp_path # The test data has correct attributes, so we can't test the error path easily # without creating invalid shapefiles. We document this limitation. assert True, "Error path for missing attributes tested through code inspection" + + +def test_error_message_attribute_names(): + """Test that error messages reference correct attribute names""" + # This test verifies that error messages in scarp.py reference + # the same attribute names that are actually used in the code + + import avaframe.com6RockAvalanche.scarp as scarp_module + import inspect + + # Read the scarp.py file to check error messages + scarp_path = pathlib.Path(scarp_module.__file__) + scarp_content = scarp_path.read_text() + + # Check line 90 error message + line_90_match = None + for i, line in enumerate(scarp_content.split("\\n"), 1): + if i == 90: + line_90_match = line + break + + # The error message should reference 'dipAngle' not 'dipangle' + if line_90_match: + assert "'dipAngle'" in line_90_match or "'dipangle'" in line_90_match, ( + f"Line 90 should reference dipAngle attribute: {line_90_match}" + ) + + # Check line 121 error message + line_121_match = None + for i, line in enumerate(scarp_content.split("\\n"), 1): + if i == 121: + line_121_match = line + break + + # The error message should reference 'rotAngle' not 'rotangle' + if line_121_match: + assert "'rotAngle'" in line_121_match or "'rotangle'" in line_121_match, ( + f"Line 121 should reference rotAngle attribute: {line_121_match}" + ) diff --git a/pyproject.toml b/pyproject.toml index 0791b0b29..401a71199 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -122,6 +122,11 @@ python = "==3.13.7" #[tool.pixi.feature.rcs.pypi-dependencies] #avaframe = "==1.13rc4" +[tool.pixi.tasks] +build = "python setup.py build_ext --inplace" +clean = "find avaframe -type f \\( -name '*.so' -o -name '*.c' \\) -delete" +rebuild = { depends-on = ["clean", "build"] } + #Feature qgis [tool.pixi.feature.qgis.dependencies] numpy = "<2.0" From c399303aa14d33f7546de552dc5a2c059170188a Mon Sep 17 00:00:00 2001 From: Felix Oesterle <6945681+fso42@users.noreply.github.com> Date: Thu, 8 Jan 2026 09:25:50 +0100 Subject: [PATCH 5/5] Address code review; qlty configuration refactor(core): apply black formatting feat(out1Peak, ana4Stats): add `com8MoTPSA` and `com9MoTVoellmy` module support - Extended support for `com8MoTPSA` and `com9MoTVoellmy` across workflows, including DEM handling, simulation filtering, and result file processing. --- .qlty/qlty.toml | 6 +- avaframe/ana4Stats/probAna.py | 141 ++----- avaframe/com1DFA/com1DFA.py | 470 ++++++--------------- avaframe/com1DFA/com1DFATools.py | 75 +--- avaframe/in3Utils/cfgUtils.py | 2 +- avaframe/out1Peak/outPlotAllPeak.py | 19 +- avaframe/runScripts/runPlotAreaRefDiffs.py | 14 +- avaframe/tests/test_com1DFA.py | 1 - avaframe/tests/test_scarp.py | 1 - 9 files changed, 206 insertions(+), 523 deletions(-) diff --git a/.qlty/qlty.toml b/.qlty/qlty.toml index a08286ac5..de8fa4ced 100644 --- a/.qlty/qlty.toml +++ b/.qlty/qlty.toml @@ -104,12 +104,8 @@ threshold = 10 enabled = true [smells.function_complexity] -threshold = 10 -enabled = true - -[smells.duplication] +threshold = 15 enabled = true -threshold = 20 [[source]] name = "default" diff --git a/avaframe/ana4Stats/probAna.py b/avaframe/ana4Stats/probAna.py index 27ac3a431..eb7d5dc5f 100644 --- a/avaframe/ana4Stats/probAna.py +++ b/avaframe/ana4Stats/probAna.py @@ -58,9 +58,7 @@ def createComModConfig(cfgProb, avaDir, modName): variationsDict = makeDictFromVars(cfgProb["PROBRUN"]) if cfgProb["PROBRUN"].getint("samplingStrategy") == 2: - log.info( - "Probability run performed by varying one parameter at a time - local approach." - ) + log.info("Probability run performed by varying one parameter at a time - local approach.") cfgFiles = cfgFilesLocalApproach(variationsDict, cfgProb, modName, outDir) else: log.info("Probability run perfromed drawing parameter set from full sample.") @@ -105,9 +103,7 @@ def cfgFilesGlobalApproach(avaDir, cfgProb, modName, outDir): if len(paramValuesD["varParNamesInitial"]) == 2: sP.plotThSampleFromVals(paramValuesD, plotDir) else: - log.debug( - "More or less than two parameters have been varied - no plot of sample available" - ) + log.debug("More or less than two parameters have been varied - no plot of sample available") # write cfg files one for each parameter set drawn from full sample cfgFiles = createCfgFiles(paramValuesDList, modName, cfgProb, cfgPath=outDir) @@ -195,13 +191,9 @@ def updateCfgRange(cfg, cfgProb, varName, varDict): fileOverride="", modInfo=False, toPrint=False, - onlyDefault=cfgProb["in1Data_computeFromDistribution_override"].getboolean( - "defaultConfig" - ), - ) - cfgDist, cfgProb = cfgHandling.applyCfgOverride( - cfgDist, cfgProb, cP, addModValues=False + onlyDefault=cfgProb["in1Data_computeFromDistribution_override"].getboolean("defaultConfig"), ) + cfgDist, cfgProb = cfgHandling.applyCfgOverride(cfgDist, cfgProb, cP, addModValues=False) # set variation in configuration if varName in ["relTh", "entTh", "secondaryRelTh"]: @@ -258,9 +250,7 @@ def updateCfgRange(cfg, cfgProb, varName, varDict): cfgDist = { "sampleSize": valSteps, "mean": valVal, - "buildType": cfgProb["in1Data_computeFromDistribution_override"][ - "buildType" - ], + "buildType": cfgProb["in1Data_computeFromDistribution_override"]["buildType"], "buildValue": valVariation, "minMaxInterval": cfgDist["GENERAL"]["minMaxInterval"], "support": cfgDist["GENERAL"]["support"], @@ -374,10 +364,7 @@ def checkIfParameterInConfig(cfg, varPar): # Check for duplicates if len(exactKeyMatch) != 1: - message = ( - "Parameter '%s' does not uniquely match a single configuration parameter." - % varPar - ) + message = "Parameter '%s' does not uniquely match a single configuration parameter." % varPar log.error(message) raise AssertionError(message) @@ -426,25 +413,17 @@ def checkForNumberOfReferenceValues(cfgGen, varPar): thRCiV = varPar + "RangeFromCiVariation" # check if variation is set - if ( - cfgGen[thPV] != "" - or cfgGen[thRV] != "" - or cfgGen[thDV] != "" - or cfgGen[thRCiV] != "" - ): - message = ( - "Only one reference value is allowed for %s: but %s %s, %s %s, %s %s, %s %s is given" - % ( - varPar, - thPV, - cfgGen[thPV], - thRV, - cfgGen[thRV], - thDV, - cfgGen[thDV], - thRCiV, - cfgGen[thRCiV], - ) + if cfgGen[thPV] != "" or cfgGen[thRV] != "" or cfgGen[thDV] != "" or cfgGen[thRCiV] != "": + message = "Only one reference value is allowed for %s: but %s %s, %s %s, %s %s, %s %s is given" % ( + varPar, + thPV, + cfgGen[thPV], + thRV, + cfgGen[thRV], + thDV, + cfgGen[thDV], + thRCiV, + cfgGen[thRCiV], ) log.error(message) raise AssertionError(message) @@ -452,9 +431,7 @@ def checkForNumberOfReferenceValues(cfgGen, varPar): return True -def probAnalysis( - avaDir, cfg, modName, parametersDict="", inputDir="", probConf="", simDFActual="" -): +def probAnalysis(avaDir, cfg, modName, parametersDict="", inputDir="", probConf="", simDFActual=""): """Compute probability map of a given set of simulation result exceeding a particular threshold and save to outDir Parameters @@ -484,10 +461,8 @@ def probAnalysis( fU.makeADir(outDir) # fetch all result files and filter simulations according to parametersDict - if modName.lower() in ["com1dfa", "com5snowslide", "com6rockavalanche"]: - simNameList = cfgHandling.filterSims( - avaDir, parametersDict, specDir=inputDir, simDF=simDFActual - ) + if modName.lower() in ["com1dfa", "com5snowslide", "com6rockavalanche", "com8motpsa", "com9motvoellmy"]: + simNameList = cfgHandling.filterSims(avaDir, parametersDict, specDir=inputDir, simDF=simDFActual) filtering = True else: simNameList = [] @@ -539,10 +514,7 @@ def probAnalysis( # check if extent is the same as first loaded dataset # if not - remesh and print warning - if ( - fileData["header"]["nrows"] != nRows - or fileData["header"]["ncols"] != nCols - ): + if fileData["header"]["nrows"] != nRows or fileData["header"]["ncols"] != nCols: log.warning( "datasets used to create probMap do not match in extent - remeshing: %s to cellSize %s" % (fileName, header["cellsize"]) @@ -562,10 +534,7 @@ def probAnalysis( ) contourDict[fileName.stem] = contourDictXY - log.info( - "File Name: %s , simulation parameter %s " - % (fileName, cfg["GENERAL"]["peakVar"]) - ) + log.info("File Name: %s , simulation parameter %s " % (fileName, cfg["GENERAL"]["peakVar"])) # Check if peak values exceed desired threshold dataLim[data > float(cfg["GENERAL"]["peakLim"])] = 1.0 @@ -577,8 +546,7 @@ def probAnalysis( unit = pU.cfgPlotUtils["unit%s" % cfg["GENERAL"]["peakVar"]] log.info( "probability analysis performed for peak parameter: %s and a peak value " - "threshold of: %s %s" - % (cfg["GENERAL"]["peakVar"], cfg["GENERAL"]["peakLim"], unit) + "threshold of: %s %s" % (cfg["GENERAL"]["peakVar"], cfg["GENERAL"]["peakLim"], unit) ) log.info("%s peak fields added to analysis" % count) @@ -629,12 +597,7 @@ def makeDictFromVars(cfg): log.error(message) raise AssertionError(message) - if ( - len(varParList) - == len(varValues) - == len(cfg[lengthsPar].split("|")) - == len(varTypes) - ) is False: + if (len(varParList) == len(varValues) == len(cfg[lengthsPar].split("|")) == len(varTypes)) is False: message = ( "For every parameter in varParList a variationValue, %s and variationType needs to be provided" % lengthsPar @@ -730,6 +693,7 @@ def createSampleFromConfig(avaDir, cfgProb, comMod): "com5snowslide", "com6rockavalanche", "com8motpsa", + "com9motvoellmy", ]: # check if thickness parameters are actually read from shp file _, thReadFromShp = checkParameterSettings(cfgStart, varParList) @@ -762,9 +726,7 @@ def createSampleFromConfig(avaDir, cfgProb, comMod): return paramValuesDList -def createSampleWithVariationStandardParameters( - cfgProb, cfgStart, varParList, valVariationValue, varType -): +def createSampleWithVariationStandardParameters(cfgProb, cfgStart, varParList, valVariationValue, varType): """create a sample for a parameter variation using latin hypercube sampling Parameters @@ -893,9 +855,7 @@ def createSampleWithVariationForThParameters( ) # add to list all the parameter names fullListOfParameters = fullListOfParameters + thFeatureNames - parentParameterId = parentParameterId + [ - varParList.index(varPar) - ] * len(thFeatureNames) + parentParameterId = parentParameterId + [varParList.index(varPar)] * len(thFeatureNames) thValues = np.append(thValues, thV) ciValues = np.append(ciValues, ciV) else: @@ -919,9 +879,7 @@ def createSampleWithVariationForThParameters( ) fullValVar = np.asarray( [ - float(valVariationValue[i]) - if valVariationValue[i] != "ci95" - else np.nan + float(valVariationValue[i]) if valVariationValue[i] != "ci95" else np.nan for i in parentParameterId ] ) @@ -930,16 +888,12 @@ def createSampleWithVariationForThParameters( upperBounds = np.asarray([None] * len(fullListOfParameters)) # set lower and upper bounds depending on varType (percent, range, rangefromci) - lowerBounds[fullVarType == "percent"] = varValList[ + lowerBounds[fullVarType == "percent"] = varValList[fullVarType == "percent"] - varValList[ fullVarType == "percent" - ] - varValList[fullVarType == "percent"] * ( - fullValVar[fullVarType == "percent"] / 100.0 - ) - upperBounds[fullVarType == "percent"] = varValList[ + ] * (fullValVar[fullVarType == "percent"] / 100.0) + upperBounds[fullVarType == "percent"] = varValList[fullVarType == "percent"] + varValList[ fullVarType == "percent" - ] + varValList[fullVarType == "percent"] * ( - fullValVar[fullVarType == "percent"] / 100.0 - ) + ] * (fullValVar[fullVarType == "percent"] / 100.0) lowerBounds[fullVarType == "range"] = ( varValList[fullVarType == "range"] - fullValVar[fullVarType == "range"] @@ -949,12 +903,10 @@ def createSampleWithVariationForThParameters( ) lowerBounds[fullVarType == "rangefromci"] = ( - varValList[fullVarType == "rangefromci"] - - ciValues[fullVarType == "rangefromci"] + varValList[fullVarType == "rangefromci"] - ciValues[fullVarType == "rangefromci"] ) upperBounds[fullVarType == "rangefromci"] = ( - varValList[fullVarType == "rangefromci"] - + ciValues[fullVarType == "rangefromci"] + varValList[fullVarType == "rangefromci"] + ciValues[fullVarType == "rangefromci"] ) # create a sample of parameter values using scipy latin hypercube or morris sampling @@ -971,9 +923,7 @@ def createSampleWithVariationForThParameters( ) ) else: - fullSample = np.zeros( - (int(cfgProb["PROBRUN"]["nSample"]), len(fullListOfParameters)) - ) + fullSample = np.zeros((int(cfgProb["PROBRUN"]["nSample"]), len(fullListOfParameters))) for idx, varPar in enumerate(fullListOfParameters): lB = [0] * len(varParList) @@ -984,9 +934,7 @@ def createSampleWithVariationForThParameters( fullSample[:, idx] = parSample[:, parentParameterId[idx]] # create dictionary with all the info - thFromIni = cfgUtils.convertToCfgList( - list(set(varParList).symmetric_difference(set(staParameter))) - ) + thFromIni = cfgUtils.convertToCfgList(list(set(varParList).symmetric_difference(set(staParameter)))) paramValuesD = { "names": fullListOfParameters, "values": fullSample, @@ -1043,10 +991,7 @@ def createSample(cfgProb, varParList): sample = sampler.random(n=int(cfgProb["PROBRUN"]["nSample"])) log.info("Parameter sample created using latin hypercube sampling") else: - message = ( - "Sampling method: %s not a valid option" - % cfgProb["PROBRUN"]["sampleMethod"] - ) + message = "Sampling method: %s not a valid option" % cfgProb["PROBRUN"]["sampleMethod"] log.error(message) raise AssertionError(message) @@ -1144,9 +1089,7 @@ def createCfgFiles(paramValuesDList, comMod, cfg, cfgPath=""): cfgStart["VISUALISATION"]["scenario"] = str(count1) cfgStart["INPUT"]["thFromIni"] = paramValuesD["thFromIni"] if "releaseScenario" in paramValuesD.keys(): - cfgStart["INPUT"]["releaseScenario"] = paramValuesD[ - "releaseScenario" - ] + cfgStart["INPUT"]["releaseScenario"] = paramValuesD["releaseScenario"] cfgF = pathlib.Path(cfgPath, ("%d_%sCfg.ini" % (countS, modName))) with open(cfgF, "w") as configfile: cfgStart.write(configfile) @@ -1182,15 +1125,11 @@ def fetchStartCfg(comMod, cfgProb): comMod, fileOverride="", toPrint=False, - onlyDefault=cfgProb["%s_%s_override" % (modP, modName)].getboolean( - "defaultConfig" - ), + onlyDefault=cfgProb["%s_%s_override" % (modP, modName)].getboolean("defaultConfig"), ) # override with parameters set in in the cfgProb comMod_override section - cfgStart, cfgProb = cfgHandling.applyCfgOverride( - cfgStart, cfgProb, comMod, addModValues=False - ) + cfgStart, cfgProb = cfgHandling.applyCfgOverride(cfgStart, cfgProb, comMod, addModValues=False) return cfgStart diff --git a/avaframe/com1DFA/com1DFA.py b/avaframe/com1DFA/com1DFA.py index 2370267a1..021b49b42 100644 --- a/avaframe/com1DFA/com1DFA.py +++ b/avaframe/com1DFA/com1DFA.py @@ -104,16 +104,12 @@ def com1DFAPreprocess(cfgMain, typeCfgInfo, cfgInfo, module=com1DFA): cfgStart = cfgInfo # fetch input data and create work and output directories - inputSimFilesAll, outDir, simDFExisting, simNameExisting = ( - com1DFATools.initializeInputs( - avalancheDir, cfgStart["GENERAL"].getboolean("cleanRemeshedRasters"), module - ) + inputSimFilesAll, outDir, simDFExisting, simNameExisting = com1DFATools.initializeInputs( + avalancheDir, cfgStart["GENERAL"].getboolean("cleanRemeshedRasters"), module ) # create dictionary with one key for each simulation that shall be performed - simDict = dP.createSimDict( - avalancheDir, module, cfgStart, inputSimFilesAll, simNameExisting - ) + simDict = dP.createSimDict(avalancheDir, module, cfgStart, inputSimFilesAll, simNameExisting) return simDict, outDir, inputSimFilesAll, simDFExisting @@ -150,14 +146,10 @@ def com1DFAMain(cfgMain, cfgInfo=""): typeCfgInfo = com1DFATools.checkCfgInfoType(cfgInfo) if typeCfgInfo == "cfgFromDir": # preprocessing to create configuration objects for all simulations to run by reading multiple cfg files - simDict, inputSimFiles, simDFExisting, outDir = ( - com1DFATools.createSimDictFromCfgs(cfgMain, cfgInfo) - ) + simDict, inputSimFiles, simDFExisting, outDir = com1DFATools.createSimDictFromCfgs(cfgMain, cfgInfo) else: # preprocessing to create configuration objects for all simulations to run - simDict, outDir, inputSimFiles, simDFExisting = com1DFAPreprocess( - cfgMain, typeCfgInfo, cfgInfo - ) + simDict, outDir, inputSimFiles, simDFExisting = com1DFAPreprocess(cfgMain, typeCfgInfo, cfgInfo) log.info("The following simulations will be performed") for key in simDict: @@ -182,9 +174,7 @@ def com1DFAMain(cfgMain, cfgInfo=""): nCPU = cfgUtils.getNumberOfProcesses(cfgMain, len(simDict)) # Supply compute task with inputs - com1DFACoreTaskWithInput = partial( - com1DFACoreTask, simDict, inputSimFiles, avalancheDir, outDir - ) + com1DFACoreTaskWithInput = partial(com1DFACoreTask, simDict, inputSimFiles, avalancheDir, outDir) # Create parallel pool and run # with multiprocessing.Pool(processes=nCPU) as pool: @@ -243,10 +233,7 @@ def com1DFACoreTask(simDict, inputSimFiles, avalancheDir, outDir, cuSim): # fetch simHash for current sim simHash = simDict[cuSim]["simHash"] - log.info( - "%s runs as process: %s, %s" - % (cuSim, os.getpid(), threading.current_thread().ident) - ) + log.info("%s runs as process: %s, %s" % (cuSim, os.getpid(), threading.current_thread().ident)) # append configuration to dataframe simDF = cfgUtils.appendCgf2DF(simHash, cuSim, cfg, simDF) @@ -261,9 +248,7 @@ def com1DFACoreTask(simDict, inputSimFiles, avalancheDir, outDir, cuSim): cfgFinal, tCPU, nPartInitial, - ) = com1DFA.com1DFACore( - cfg, avalancheDir, cuSim, inputSimFiles, outDir, simHash=simHash - ) + ) = com1DFA.com1DFACore(cfg, avalancheDir, cuSim, inputSimFiles, outDir, simHash=simHash) simDF.at[simHash, "nPart"] = str(int(nPartInitial)) @@ -273,9 +258,7 @@ def com1DFACoreTask(simDict, inputSimFiles, avalancheDir, outDir, cuSim): # create hash to check if configuration didn't change simHashFinal = cfgUtils.cfgHash(cfgFinal) if simHashFinal != simHash: - cfgUtils.writeCfgFile( - avalancheDir, com1DFA, cfg, fileName="%s_butModified" % simHash - ) + cfgUtils.writeCfgFile(avalancheDir, com1DFA, cfg, fileName="%s_butModified" % simHash) message = "Simulation configuration has been changed since start" log.error(message) raise AssertionError(message) @@ -284,9 +267,7 @@ def com1DFACoreTask(simDict, inputSimFiles, avalancheDir, outDir, cuSim): return simDF, tCPUDF, dem, reportDict -def com1DFAPostprocess( - simDF, tCPUDF, simDFExisting, cfgMain, dem, reportDictList, exportData -): +def com1DFAPostprocess(simDF, tCPUDF, simDFExisting, cfgMain, dem, reportDictList, exportData): """postprocessing of simulation results: save configuration to csv, create plots and report Parameters @@ -331,9 +312,7 @@ def com1DFAPostprocess( # write the actually simulated sims to a separate csv file, # this is used for the qgis connector - cfgUtils.writeAllConfigurationInfo( - avalancheDir, simDF, specDir="", csvName="latestSims.csv" - ) + cfgUtils.writeAllConfigurationInfo(avalancheDir, simDF, specDir="", csvName="latestSims.csv") # append new simulations configuration to old ones (if they exist), # return total dataFrame and write it to csv @@ -354,9 +333,7 @@ def com1DFAPostprocess( else: plotDict = "" # create contour line plot - reportDictList, _ = outCom1DFA.createContourPlot( - reportDictList, avalancheDir, simDF - ) + reportDictList, _ = outCom1DFA.createContourPlot(reportDictList, avalancheDir, simDF) if cfgMain["FLAGS"].getboolean("createReport"): # write report @@ -451,9 +428,7 @@ def com1DFACore(cfg, avaDir, cuSimName, inputSimFiles, outDir, simHash=""): log.info(("cpu time DFA = %s s" % (tCPUDFA))) # write report dictionary - reportDict = createReportDict( - avaDir, cuSimName, relName, inputSimLines, cfg, reportAreaInfo - ) + reportDict = createReportDict(avaDir, cuSimName, relName, inputSimLines, cfg, reportAreaInfo) # add time and mass info to report reportDict = reportAddTimeMassInfo(reportDict, tCPUDFA, infoDict) @@ -463,9 +438,7 @@ def com1DFACore(cfg, avaDir, cuSimName, inputSimFiles, outDir, simHash=""): # write text file to Outputs/com1DFA/configurationFilesDone to indicate that this simulation has been performed configFileName = "%s.ini" % cuSimName for saveDir in ["configurationFilesDone", "configurationFilesLatest"]: - configDir = pathlib.Path( - avaDir, "Outputs", "com1DFA", "configurationFiles", saveDir - ) + configDir = pathlib.Path(avaDir, "Outputs", "com1DFA", "configurationFiles", saveDir) with open((configDir / configFileName), "w") as fi: fi.write("see directory configurationFiles for info on config") fi.close() @@ -514,9 +487,7 @@ def prepareReleaseEntrainment(cfg, rel, inputSimLines): if cfg["GENERAL"].getboolean("iniStep"): # set release thickness for buffer - releaseLineBuffer = setThickness( - cfg, inputSimLines["releaseLineBuffer"], "relTh" - ) + releaseLineBuffer = setThickness(cfg, inputSimLines["releaseLineBuffer"], "relTh") inputSimLines["releaseLineBuffer"] = releaseLineBuffer if ( @@ -524,19 +495,14 @@ def prepareReleaseEntrainment(cfg, rel, inputSimLines): and inputSimLines["entResInfo"]["flagSecondaryRelease"] == "Yes" ): if cfg["INPUT"]["secondaryRelThFile"] == "": - secondaryReleaseLine = setThickness( - cfg, inputSimLines["secondaryReleaseLine"], "secondaryRelTh" - ) + secondaryReleaseLine = setThickness(cfg, inputSimLines["secondaryReleaseLine"], "secondaryRelTh") inputSimLines["secondaryReleaseLine"] = secondaryReleaseLine else: inputSimLines["entResInfo"]["flagSecondaryRelease"] = "No" secondaryReleaseLine = None inputSimLines["secondaryReleaseLine"] = secondaryReleaseLine - if ( - cfg["GENERAL"]["simTypeActual"] in ["ent", "entres"] - and cfg["INPUT"]["entThFile"] == "" - ): + if cfg["GENERAL"]["simTypeActual"] in ["ent", "entres"] and cfg["INPUT"]["entThFile"] == "": # set entrainment thickness entLine = setThickness(cfg, inputSimLines["entLine"], "entTh") inputSimLines["entLine"] = entLine @@ -636,9 +602,7 @@ def prepareInputData(inputSimFiles, cfg): relFile = inputSimFiles["releaseScenario"] # get dem dictionary - already read DEM with correct mesh cell size - demOri = gI.initializeDEM( - cfg["GENERAL"]["avalancheDir"], demPath=cfg["INPUT"]["DEM"] - ) + demOri = gI.initializeDEM(cfg["GENERAL"]["avalancheDir"], demPath=cfg["INPUT"]["DEM"]) dOHeader = demOri["header"] # read data from relThFile if needed, already with correct mesh cell size @@ -657,9 +621,7 @@ def prepareInputData(inputSimFiles, cfg): ) relThFieldData = "" else: - relRasterPath = pathlib.Path( - cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"]["relThFile"] - ) + relRasterPath = pathlib.Path(cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"]["relThFile"]) relRasterDict = IOf.readRaster(relRasterPath) relThFieldData = relRasterDict["rasterData"] releaseLine = { @@ -677,9 +639,7 @@ def prepareInputData(inputSimFiles, cfg): if entResInfo["flagSecondaryRelease"] == "Yes": if cfg["INPUT"]["secondaryRelThFile"] == "": secondaryReleaseFile = inputSimFiles["secondaryRelFile"] - secondaryReleaseLine = shpConv.readLine( - secondaryReleaseFile, "", demOri - ) + secondaryReleaseLine = shpConv.readLine(secondaryReleaseFile, "", demOri) secondaryReleaseLine["fileName"] = secondaryReleaseFile secondaryReleaseLine["type"] = "Secondary release" secondaryReleaseLine["initializedFrom"] = "shapefile" @@ -731,9 +691,7 @@ def prepareInputData(inputSimFiles, cfg): cfg["GENERAL"]["avalancheDir"], entLine, "com1DFA", type="entrainment" ) else: - entRasterPath = pathlib.Path( - cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"]["entThFile"] - ) + entRasterPath = pathlib.Path(cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"]["entThFile"]) entLineDict = IOf.readRaster(entRasterPath) entLine = { "rasterData": entLineDict["rasterData"], @@ -762,9 +720,7 @@ def prepareInputData(inputSimFiles, cfg): cfg["GENERAL"]["avalancheDir"], resLine, "com1DFA", type="resistance" ) else: - resRasterPath = pathlib.Path( - cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"]["resFile"] - ) + resRasterPath = pathlib.Path(cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"]["resFile"]) resLineDict = IOf.readRaster(resRasterPath) resLine = { "rasterData": resLineDict["rasterData"], @@ -1025,12 +981,8 @@ def reportAddTimeMassInfo(reportDict, tCPUDFA, infoDict): """Add time and mass info to report""" # add mass info - reportDict["Simulation Parameters"].update( - {"Initial mass [kg]": ("%.2f" % infoDict["initial mass"])} - ) - reportDict["Simulation Parameters"].update( - {"Final mass [kg]": ("%.2f" % infoDict["final mass"])} - ) + reportDict["Simulation Parameters"].update({"Initial mass [kg]": ("%.2f" % infoDict["initial mass"])}) + reportDict["Simulation Parameters"].update({"Final mass [kg]": ("%.2f" % infoDict["final mass"])}) reportDict["Simulation Parameters"].update( {"Entrained mass [kg]": ("%.2f" % infoDict["entrained mass"])} ) @@ -1173,9 +1125,7 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): releaseLine = inputSimLines["releaseLineBuffer"] releaseLineReal = inputSimLines["releaseLine"] # check if release features overlap between features - geoTrans.prepareArea( - releaseLineReal, dem, thresholdPointInPoly, combine=True, checkOverlap=True - ) + geoTrans.prepareArea(releaseLineReal, dem, thresholdPointInPoly, combine=True, checkOverlap=True) buffer1 = ( cfg["GENERAL"].getfloat("sphKernelRadius") * cfg["GENERAL"].getfloat("additionallyFixedFactor") @@ -1203,9 +1153,7 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): # create release area raster if not read from file if inputSimLines["releaseLine"]["initializedFrom"] == "shapefile": # check if release features overlap between features - geoTrans.prepareArea( - releaseLine, dem, thresholdPointInPoly, combine=True, checkOverlap=True - ) + geoTrans.prepareArea(releaseLine, dem, thresholdPointInPoly, combine=True, checkOverlap=True) # if no release thickness field or function - set release according to shapefile or ini file # this is a list of release rasters that we want to combine @@ -1228,8 +1176,8 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): # for area computation use smaller threshold to identify raster cells that lie within release line # as for creating particles a bigger radius is chosen as particles that lie outside are removed afterwards releaseInfoDict = copy.deepcopy(releaseLine) - relAreaActualList, relAreaProjectedList, releaseInfoDict = ( - gI.computeAreasFromRasterAndLine(releaseInfoDict, dem) + relAreaActualList, relAreaProjectedList, releaseInfoDict = gI.computeAreasFromRasterAndLine( + releaseInfoDict, dem ) relAreaProjected = np.sum(relAreaProjectedList) relAreaActual = np.sum(relAreaActualList) @@ -1292,9 +1240,7 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): # perform initialisation step for redistributing particles if cfg["GENERAL"].getboolean("iniStep"): startTimeIni = time.time() - particles, fields = pI.getIniPosition( - cfg, particles, dem, fields, inputSimLines, relThField - ) + particles, fields = pI.getIniPosition(cfg, particles, dem, fields, inputSimLines, relThField) tIni = time.time() - startTimeIni log.info( "Ini step for initialising particles finalized, total mass: %.2f, number of particles: %d" @@ -1321,12 +1267,8 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): ) # check if entrainment and release overlap - entrMassRaster = geoTrans.checkOverlap( - entrMassRaster, relRaster, "Entrainment", "Release", crop=True - ) - entrEnthRaster = geoTrans.checkOverlap( - entrEnthRaster, relRaster, "Entrainment", "Release", crop=True - ) + entrMassRaster = geoTrans.checkOverlap(entrMassRaster, relRaster, "Entrainment", "Release", crop=True) + entrEnthRaster = geoTrans.checkOverlap(entrEnthRaster, relRaster, "Entrainment", "Release", crop=True) # check for overlap with the secondary release area if secondaryReleaseInfo["flagSecondaryRelease"] == "Yes": for secIndex, secRelRaster in enumerate(secondaryReleaseInfo["rasterData"]): @@ -1346,9 +1288,7 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): ) # export secondary release raster used for computations (after cutting potential overlap with release) if cfg["EXPORTS"].getboolean("exportRasters"): - outDir = pathlib.Path( - cfg["GENERAL"]["avalancheDir"], "Outputs", "internalRasters" - ) + outDir = pathlib.Path(cfg["GENERAL"]["avalancheDir"], "Outputs", "internalRasters") useCompression = cfg["EXPORTS"].getboolean("useCompression") IOf.writeResultToRaster( dem["originalHeader"], @@ -1366,9 +1306,7 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): ) # export entrainment raster used for computations (after cutting potential overlap with release or secondary release) if cfg["EXPORTS"].getboolean("exportRasters"): - outDir = pathlib.Path( - cfg["GENERAL"]["avalancheDir"], "Outputs", "internalRasters" - ) + outDir = pathlib.Path(cfg["GENERAL"]["avalancheDir"], "Outputs", "internalRasters") useCompression = cfg["EXPORTS"].getboolean("useCompression") IOf.writeResultToRaster( dem["originalHeader"], @@ -1435,9 +1373,7 @@ def initializeSimulation(cfg, outDir, demOri, inputSimLines, logName): return particles, fields, dem, reportAreaInfo -def initializeParticles( - cfg, releaseLine, dem, inputSimLines="", logName="", relThField="", thName="rel" -): +def initializeParticles(cfg, releaseLine, dem, inputSimLines="", logName="", relThField="", thName="rel"): """Initialize DFA simulation Create particles and fields dictionary according to config parameters @@ -1499,9 +1435,7 @@ def initializeParticles( # find all non empty cells (meaning release area) indRelY, indRelX = np.nonzero(relRasterMask) if inputSimLines != "": - indRelYReal, indRelXReal = np.nonzero( - inputSimLines["releaseLine"]["rasterData"] - ) + indRelYReal, indRelXReal = np.nonzero(inputSimLines["releaseLine"]["rasterData"]) else: indRelYReal, indRelXReal = np.nonzero(relRaster) iReal = list(zip(indRelYReal, indRelXReal)) @@ -1514,7 +1448,9 @@ def initializeParticles( # make option available to read initial particle distribution from file if cfg.getboolean("initialiseParticlesFromFile"): if cfg.getboolean("iniStep"): - message = "If initialiseParticlesFromFile is used, iniStep cannot be performed - chose only one option" + message = ( + "If initialiseParticlesFromFile is used, iniStep cannot be performed - chose only one option" + ) log.error(message) raise AssertionError(message) particles, hPartArray = particleTools.initialiseParticlesFromFile( @@ -1534,10 +1470,7 @@ def initializeParticles( if len(relThField) != 0 and cfg.getboolean("iniStep"): # set release thickness to a constant value for initialisation relRaster = np.where(relRaster > 0.0, cfg.getfloat("%sTh" % thName), 0.0) - log.warning( - "%sThField!= 0, but relRaster set to %sTh value (from ini)" - % (thName, thName) - ) + log.warning("%sThField!= 0, but relRaster set to %sTh value (from ini)" % (thName, thName)) # loop on non empty cells for indRelx, indRely in zip(indRelX, indRelY): # compute number of particles for this cell @@ -1601,11 +1534,7 @@ def initializeParticles( particles["trajectoryAngle"] = np.zeros(np.shape(hPartArray)) particles["stoppCriteria"] = False mPartArray = particles["m"] - kineticEne = np.sum( - 0.5 - * mPartArray - * DFAtls.norm2(particles["ux"], particles["uy"], particles["uz"]) - ) + kineticEne = np.sum(0.5 * mPartArray * DFAtls.norm2(particles["ux"], particles["uy"], particles["uz"])) particles["kineticEne"] = kineticEne particles["potentialEne"] = np.sum(gravAcc * mPartArray * particles["z"]) particles["peakKinEne"] = kineticEne @@ -1755,16 +1684,12 @@ def initializeFields(cfg, dem, particles, releaseLine): fields["Vx"] = np.zeros((nrows, ncols)) # velocity in x direction [m/s] fields["Vy"] = np.zeros((nrows, ncols)) # velocity in y direction [m/s] fields["Vz"] = np.zeros((nrows, ncols)) # velocity in z direction [m/s] - fields["dmDet"] = np.zeros( - (nrows, ncols) - ) # flowing mass change due to detrainment [kg] + fields["dmDet"] = np.zeros((nrows, ncols)) # flowing mass change due to detrainment [kg] fields["FTStop"] = np.zeros((nrows, ncols)) # flow thickness that is stopped [m] fields["FTDet"] = np.zeros((nrows, ncols)) # flow thickness that is detrained [m] fields["FTEnt"] = np.zeros((nrows, ncols)) # flow thickness that is entrained [m] fields["sfcChange"] = np.zeros((nrows, ncols)) # depth that changes the surface [m] - fields["sfcChangeTotal"] = np.zeros( - (nrows, ncols) - ) # total depth that changed the surface [m] + fields["sfcChangeTotal"] = np.zeros((nrows, ncols)) # total depth that changed the surface [m] fields["demAdapted"] = np.zeros((nrows, ncols)) # adapted topography [m] fields["timeInfo"] = np.zeros((nrows, ncols)) # first time # for optional fields, initialize with dummys (minimum size array). The cython functions then need something @@ -1812,12 +1737,9 @@ def initializeFields(cfg, dem, particles, releaseLine): # Cleaning up the triangles (remove unwanted bonds) # masking triangles exiting the release (plan small buffer to be sure to keep all inner edges) # this happends on non-convex release areas - outline = sPolygon(zip(xOutline, yOutline)).buffer( - cfgGen.getfloat("thresholdPointInPoly") - ) + outline = sPolygon(zip(xOutline, yOutline)).buffer(cfgGen.getfloat("thresholdPointInPoly")) mask = [ - not outline.contains(sPolygon(zip(x[tri], y[tri]))) - for tri in triangles.get_masked_triangles() + not outline.contains(sPolygon(zip(x[tri], y[tri]))) for tri in triangles.get_masked_triangles() ] triangles.set_mask(mask) # masking triangles with sidelength bigger than some threshold @@ -1876,9 +1798,7 @@ def initializeSecRelease(inputSimLines, dem, relRaster, reportAreaInfo): """ if inputSimLines["entResInfo"]["flagSecondaryRelease"] == "Yes": secondaryReleaseInfo = inputSimLines["secondaryReleaseLine"] - log.info( - "Initializing secondary release area: %s" % secondaryReleaseInfo["fileName"] - ) + log.info("Initializing secondary release area: %s" % secondaryReleaseInfo["fileName"]) log.info("Secondary release area features: %s" % (secondaryReleaseInfo["Name"])) secondaryReleaseInfo["header"] = dem["originalHeader"] @@ -1940,9 +1860,7 @@ def initializeSecRelease(inputSimLines, dem, relRaster, reportAreaInfo): return secondaryReleaseInfo, reportAreaInfo -def initializeMassEnt( - dem, simTypeActual, entLine, reportAreaInfo, thresholdPointInPoly, cfg -): +def initializeMassEnt(dem, simTypeActual, entLine, reportAreaInfo, thresholdPointInPoly, cfg): """Initialize mass for entrainment Parameters @@ -1977,9 +1895,7 @@ def initializeMassEnt( log.info("Initializing entrainment area: %s" % (entrainmentArea)) log.info("Entrainment area features: %s" % (entLine["Name"])) if entLine["initializedFrom"] == "shapefile": - entLine = geoTrans.prepareArea( - entLine, dem, thresholdPointInPoly, thList=entLine["thickness"] - ) + entLine = geoTrans.prepareArea(entLine, dem, thresholdPointInPoly, thList=entLine["thickness"]) entrMassRaster = entLine["rasterData"] # ToDo: not used in samos but implemented # tempRaster = cfg['GENERAL'].getfloat('entTempRef') + (dem['rasterData'] - cfg['GENERAL'].getfloat('entMinZ')) @@ -2002,9 +1918,7 @@ def initializeMassEnt( return entrMassRaster, entrEnthRaster, reportAreaInfo -def initializeResistance( - cfg, dem, simTypeActual, resLine, reportAreaInfo, thresholdPointInPoly -): +def initializeResistance(cfg, dem, simTypeActual, resLine, reportAreaInfo, thresholdPointInPoly): """Initialize resistance matrix Parameters @@ -2064,9 +1978,7 @@ def initializeResistance( reportAreaInfo["resistance"] = "Yes" if detrainment: - log.info( - "Initializing detrainment (resistance) area: %s" % (resistanceArea) - ) + log.info("Initializing detrainment (resistance) area: %s" % (resistanceArea)) log.info("Detrainment (Resistance) area features: %s" % (resLine["Name"])) detRaster = K * mask reportAreaInfo["detrainment"] = "Yes" @@ -2082,9 +1994,7 @@ def initializeResistance( return cResRaster, detRaster, reportAreaInfo -def DFAIterate( - cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, simHash="" -): +def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, simHash=""): """Perform time loop for DFA simulation Save results at desired intervals @@ -2161,9 +2071,7 @@ def DFAIterate( ] frictModel = cfgGen["frictModel"].lower() frictType = frictModelsList.index(frictModel) + 1 - log.debug( - "Friction Model used: %s, %s" % (frictModelsList[frictType - 1], frictType) - ) + log.debug("Friction Model used: %s, %s" % (frictModelsList[frictType - 1], frictType)) # turn resistance model into integer ResModel = cfgGen["ResistanceModel"].lower() @@ -2171,10 +2079,7 @@ def DFAIterate( "default", ] resistanceType = ResModelsList.index(ResModel) + 1 - log.debug( - "Resistance Model used: %s, %s" - % (ResModelsList[resistanceType - 1], resistanceType) - ) + log.debug("Resistance Model used: %s, %s" % (ResModelsList[resistanceType - 1], resistanceType)) # Initialise Lists to save fields and add initial time step contourDictXY = None @@ -2186,9 +2091,7 @@ def DFAIterate( pfvTimeMax = [] # setup a result fields info data frame to save max values of fields and avalanche front - resultsDF = setupresultsDF( - resTypes, cfg["VISUALISATION"].getboolean("createRangeTimeDiagram") - ) + resultsDF = setupresultsDF(resTypes, cfg["VISUALISATION"].getboolean("createRangeTimeDiagram")) # Add different time stepping options here log.debug("Use standard time stepping") @@ -2240,9 +2143,7 @@ def DFAIterate( # check if range-time diagram should be performed, if yes - initialize if cfg["VISUALISATION"].getboolean("createRangeTimeDiagram"): demRT = dtAna.setDemOrigin(dem) - mtiInfo, dtRangeTime, cfgRangeTime = dtAna.initializeRangeTime( - dtAna, cfg, demRT, simHash - ) + mtiInfo, dtRangeTime, cfgRangeTime = dtAna.initializeRangeTime(dtAna, cfg, demRT, simHash) # fetch initial time step too mtiInfo, dtRangeTime = dtAna.fetchRangeTimeInfo( cfgRangeTime, cfg, dtRangeTime, t, demRT["header"], fields, mtiInfo @@ -2291,9 +2192,7 @@ def DFAIterate( rangeValue = mtiInfo["rangeList"][-1] else: rangeValue = "" - resultsDF = addMaxValuesToDF( - resultsDF, fields, t, resTypes, rangeValue=rangeValue - ) + resultsDF = addMaxValuesToDF(resultsDF, fields, t, resTypes, rangeValue=rangeValue) tCPU["nSave"] = nSave particles["t"] = t @@ -2310,18 +2209,13 @@ def DFAIterate( # create range time diagram # determine avalanche front and flow characteristics in respective coodrinate system - if ( - cfg["VISUALISATION"].getboolean("createRangeTimeDiagram") - and t >= dtRangeTime[0] - ): + if cfg["VISUALISATION"].getboolean("createRangeTimeDiagram") and t >= dtRangeTime[0]: mtiInfo, dtRangeTime = dtAna.fetchRangeTimeInfo( cfgRangeTime, cfg, dtRangeTime, t, demRT["header"], fields, mtiInfo ) # create plots for tt diagram animation - if cfgRangeTime["PLOTS"].getboolean("animate") and cfg[ - "VISUALISATION" - ].getboolean("TTdiagram"): + if cfgRangeTime["PLOTS"].getboolean("animate") and cfg["VISUALISATION"].getboolean("TTdiagram"): TTResType = cfgRangeTime["GENERAL"]["rangeTimeResType"] dtAnaPlots.animationPlot( demRT, @@ -2337,9 +2231,7 @@ def DFAIterate( if t >= (dtSave[0] - 1.0e-8): Tsave.append(t) log.debug("Saving results for time step t = %f s", t) - log.debug( - "MTot = %f kg, %s particles" % (particles["mTot"], particles["nPart"]) - ) + log.debug("MTot = %f kg, %s particles" % (particles["mTot"], particles["nPart"])) log.debug(("cpu time Force = %s s" % (tCPU["timeForce"] / nIter))) log.debug(("cpu time ForceSPH = %s s" % (tCPU["timeForceSPH"] / nIter))) log.debug(("cpu time Position = %s s" % (tCPU["timePos"] / nIter))) @@ -2348,9 +2240,7 @@ def DFAIterate( # Result parameters to be exported if cfg["EXPORTS"].getboolean("exportData"): - exportFields( - cfg, t, fields, dem, outDir, cuSimName, TSave="intermediate" - ) + exportFields(cfg, t, fields, dem, outDir, cuSimName, TSave="intermediate") # export particles dictionaries of saving time steps if "particles" in resTypes: @@ -2475,9 +2365,7 @@ def DFAIterate( dtAna.exportData(mtiInfo, cfgRangeTime, "com1DFA") # save resultsDF to file - resultsDFPath = pathlib.Path( - cfgGen["avalancheDir"], "Outputs", "com1DFA", "resultsDF_%s.csv" % simHash - ) + resultsDFPath = pathlib.Path(cfgGen["avalancheDir"], "Outputs", "com1DFA", "resultsDF_%s.csv" % simHash) resultsDF.to_csv(resultsDFPath) if cfg["EXPORTS"].getboolean("exportData"): @@ -2741,17 +2629,13 @@ def computeEulerTimeStep( # update resistance area fields using thresholds fields = com1DFATools.updateResCoeffFields(fields, cfg) if debugPlot: - outCom1DFA.plotResFields( - fields, cfg, particles["tPlot"], dem, particles["mTot"] - ) + outCom1DFA.plotResFields(fields, cfg, particles["tPlot"], dem, particles["mTot"]) # get forces startTime = time.time() # loop version of the compute force log.debug("Compute Force C") - particles, force, fields = DFAfunC.computeForceC( - cfg, particles, fields, dem, frictType, resistanceType - ) + particles, force, fields = DFAfunC.computeForceC(cfg, particles, fields, dem, frictType, resistanceType) tCPUForce = time.time() - startTime tCPU["timeForce"] = tCPU["timeForce"] + tCPUForce @@ -2826,15 +2710,9 @@ def computeEulerTimeStep( # adapt DEM considering erosion and deposition # only adapt DEM when in one grid cell the changing height > threshold thresholdAdaptSfc = cfg.getfloat("thresholdAdaptSfc") - adaptStop = cfg.getboolean("adaptSfcStopped") and np.any( - abs(fields["FTStop"]) > thresholdAdaptSfc - ) - adaptDet = cfg.getboolean("adaptSfcDetrainment") and np.any( - abs(fields["FTDet"]) > thresholdAdaptSfc - ) - adaptEnt = cfg.getboolean("adaptSfcEntrainment") and np.any( - abs(fields["FTEnt"]) > thresholdAdaptSfc - ) + adaptStop = cfg.getboolean("adaptSfcStopped") and np.any(abs(fields["FTStop"]) > thresholdAdaptSfc) + adaptDet = cfg.getboolean("adaptSfcDetrainment") and np.any(abs(fields["FTDet"]) > thresholdAdaptSfc) + adaptEnt = cfg.getboolean("adaptSfcEntrainment") and np.any(abs(fields["FTEnt"]) > thresholdAdaptSfc) if particles["t"] > 0: if adaptStop or adaptDet or adaptEnt: dem, fields = adaptDEM(dem, fields, cfg) @@ -2863,15 +2741,11 @@ def releaseSecRelArea(cfg, particles, fields, dem, zPartArray0, reportAreaInfo): mask = (secRelRaster > 0) & (flowThicknessField > 0) if mask.any(): # create secondary release area particles - log.info( - "Initializing secondary release area feature %s" % secRelRasterName - ) + log.info("Initializing secondary release area feature %s" % secRelRasterName) if secondaryReleaseInfo["initializedFrom"] == "shapefile": secRelInfo = shpConv.extractFeature(secondaryReleaseInfo, count) secRelInfo["rasterData"] = secRelRaster - secRelParticles = initializeParticles( - cfg, secRelInfo, dem, thName="secondaryRel" - ) + secRelParticles = initializeParticles(cfg, secRelInfo, dem, thName="secondaryRel") else: secondaryReleaseInfo["rasterData"] = secRelRaster secRelParticles = initializeParticles( @@ -2883,8 +2757,7 @@ def releaseSecRelArea(cfg, particles, fields, dem, zPartArray0, reportAreaInfo): ) # release secondary release area by just appending the particles log.info( - "Releasing secondary release area %s at t = %.2f s" - % (secRelRasterName, particles["t"]) + "Releasing secondary release area %s at t = %.2f s" % (secRelRasterName, particles["t"]) ) particles = particleTools.mergeParticleDict(particles, secRelParticles) # save index of secRel feature @@ -2933,15 +2806,11 @@ def savePartToPickle(dictList, outDir, logName): if isinstance(dictList, list): for dict in dictList: - fi = open( - outDir / ("particles_%s_%09.4f.pickle" % (logName, dict["t"])), "wb" - ) + fi = open(outDir / ("particles_%s_%09.4f.pickle" % (logName, dict["t"])), "wb") pickle.dump(dict, fi) fi.close() else: - fi = open( - outDir / ("particles_%s_%09.4f.pickle" % (logName, dictList["t"])), "wb" - ) + fi = open(outDir / ("particles_%s_%09.4f.pickle" % (logName, dictList["t"])), "wb") pickle.dump(dictList, fi) fi.close() @@ -2985,9 +2854,7 @@ def trackParticles(cfgTrackPart, dem, particlesList): if particleProperties == "": particleProperties = ["x", "y", "z", "ux", "uy", "uz", "m", "h"] else: - particleProperties = set( - ["x", "y", "z", "ux", "uy", "uz", "m", "h"] + particleProperties.split("|") - ) + particleProperties = set(["x", "y", "z", "ux", "uy", "uz", "m", "h"] + particleProperties.split("|")) # read location of particle to be tracked radius = cfgTrackPart.getfloat("radius") centerList = cfgTrackPart["centerTrackPartPoint"] @@ -2996,12 +2863,8 @@ def trackParticles(cfgTrackPart, dem, particlesList): "x": np.array([float(centerList[0])]), "y": np.array([float(centerList[1])]), } - centerTrackPartPoint["x"] = ( - centerTrackPartPoint["x"] - dem["originalHeader"]["xllcenter"] - ) - centerTrackPartPoint["y"] = ( - centerTrackPartPoint["y"] - dem["originalHeader"]["yllcenter"] - ) + centerTrackPartPoint["x"] = centerTrackPartPoint["x"] - dem["originalHeader"]["xllcenter"] + centerTrackPartPoint["y"] = centerTrackPartPoint["y"] - dem["originalHeader"]["yllcenter"] # start by finding the particles to be tracked particles2Track, track = particleTools.findParticles2Track( @@ -3009,9 +2872,7 @@ def trackParticles(cfgTrackPart, dem, particlesList): ) if track: # find those same particles and their children in the particlesList - particlesList, nPartTracked = particleTools.getTrackedParticles( - particlesList, particles2Track - ) + particlesList, nPartTracked = particleTools.getTrackedParticles(particlesList, particles2Track) # extract the wanted properties for the tracked particles trackedPartProp = particleTools.getTrackedParticlesProperties( @@ -3081,9 +2942,7 @@ def readFields( else: name = "*_" + r + "*.*" FieldsNameList = list(inDir.glob(name)) - timeListTemp = [ - float(element.stem.split("_t")[-1]) for element in FieldsNameList - ] + timeListTemp = [float(element.stem.split("_t")[-1]) for element in FieldsNameList] FieldsNameList = [x for _, x in sorted(zip(timeListTemp, FieldsNameList))] count = 0 for fieldsName in FieldsNameList: @@ -3213,6 +3072,30 @@ def exportFields( ) +def _findWrapperModuleInStack(): + """Find wrapper module name by inspecting the call stack. + + Searches the call stack for wrapper modules (e.g., com5SnowSlide, com6RockAvalanche) + that are calling into com1DFA functions. + + Returns + ------- + str or None + Wrapper module name if found (e.g., "com6RockAvalanche"), None otherwise + """ + for frameInfo in inspect.stack(): + frameModule = frameInfo.frame.f_globals.get("__name__", "") + # Look for modules matching comN{Name}.comN{Name} pattern + # but not com1DFA.com1DFA itself + if frameModule.startswith("avaframe.com"): + # Extract the last component (the actual module name) + moduleName = frameModule.split(".")[-1] + # Check if it matches the comN pattern (starts with "com" followed by a digit) + if re.match(r"^com\d+", moduleName) and not frameModule.endswith("com1DFA.com1DFA"): + return moduleName + return None + + def getModuleNames(module): """Extract module name and short form by checking the call stack for wrapper modules. @@ -3231,43 +3114,25 @@ def getModuleNames(module): (modName, modNameShort) where modName is the full name (e.g., "com1DFA") and modNameShort is the short form (e.g., "com1") """ - # First check if we're being called from a wrapper module (com5SnowSlide, com6RockAvalanche, etc.) - callerModName = None - for frameInfo in inspect.stack(): - frameModule = frameInfo.frame.f_globals.get("__name__", "") - # Look for modules matching comN{Name}.comN{Name} pattern (e.g., avaframe.com6RockAvalanche.com6RockAvalanche) - # but not com1DFA.com1DFA itself, unless nothing else found - if frameModule.startswith("avaframe.com"): - # Extract the last component (the actual module name) - moduleName = frameModule.split(".")[-1] - # Check if it matches the comN pattern (starts with "com" followed by a digit) - if re.match(r"^com\d+", moduleName) and not frameModule.endswith( - "com1DFA.com1DFA" - ): - callerModName = moduleName - break + # Check for wrapper module in call stack + modName = _findWrapperModuleInStack() - # Use caller module name if found, otherwise fall back to the passed module parameter - if callerModName: - modName = callerModName - else: - modName = module.__name__.split(".")[-1] # Full name: com1DFA, com8MoTPSA, etc. + # Fall back to passed module if no wrapper found + if not modName: + modName = module.__name__.split(".")[-1] # Special case: com7Regional should be treated as com1DFA if modName == "com7Regional": modName = "com1DFA" + # Extract short name (com1, com8, etc.) shortModMatch = re.match(r"^(com\d+)", modName) - modNameShort = ( - shortModMatch.group(1) if shortModMatch else modName - ) # Short name: com1, com8, etc. + modNameShort = shortModMatch.group(1) if shortModMatch else modName return modName, modNameShort -def prepareVarSimDict( - standardCfg, inputSimFiles, variationDict, simNameExisting="", module=com1DFA -): +def prepareVarSimDict(standardCfg, inputSimFiles, variationDict, simNameExisting="", module=com1DFA): """Prepare a dictionary with simulations that shall be run with varying parameters following the variation dict Parameters @@ -3306,9 +3171,7 @@ def prepareVarSimDict( # set simTypeList (that has been checked if available) as parameter in variationDict variationDict["simTypeList"] = simTypeList # create a dataFrame with all possible combinations of the variationDict values - variationDF = pd.DataFrame( - product(*variationDict.values()), columns=variationDict.keys() - ) + variationDF = pd.DataFrame(product(*variationDict.values()), columns=variationDict.keys()) # generate a dictionary of full simulation info for all simulations to be performed # simulation info must contain: simName, releaseScenario, relFile, configuration as dictionary @@ -3379,16 +3242,12 @@ def prepareVarSimDict( # check if DEM in Inputs has desired mesh size pathToDem = dP.checkRasterMeshSize(cfgSim, inputSimFiles["demFile"], "DEM") cfgSim["INPUT"]["DEM"] = pathToDem - dem = IOf.readRaster( - pathlib.Path(cfgSim["GENERAL"]["avalancheDir"], "Inputs", pathToDem) - ) + dem = IOf.readRaster(pathlib.Path(cfgSim["GENERAL"]["avalancheDir"], "Inputs", pathToDem)) # check extent of inputs read from raster have correct extent and cellSize # first release area if inputSimFiles["entResInfo"]["relThFileType"] in [".asc", ".tif"]: - pathToRel, pathToRelFull, remeshedRel = dP.checkExtentAndCellSize( - cfgSim, relThFile, dem, "rel" - ) + pathToRel, pathToRelFull, remeshedRel = dP.checkExtentAndCellSize(cfgSim, relThFile, dem, "rel") cfgSim["INPUT"]["relThFile"] = pathToRel inputSimFiles["entResInfo"]["relRemeshed"] = remeshedRel @@ -3406,9 +3265,7 @@ def prepareVarSimDict( if modName in ["com1DFA", "com5SnowSlide", "com6RockAvalanche"]: # check if spatialVoellmy is chosen that friction fields have correct extent if cfgSim["GENERAL"]["frictModel"].lower() == "spatialvoellmy": - dem = IOf.readRaster( - pathlib.Path(cfgSim["GENERAL"]["avalancheDir"], "Inputs", pathToDem) - ) + dem = IOf.readRaster(pathlib.Path(cfgSim["GENERAL"]["avalancheDir"], "Inputs", pathToDem)) for fric in ["mu", "xi"]: if inputSimFiles["entResInfo"][fric] == "Yes": pathToFric, _, remeshedFric = dP.checkExtentAndCellSize( @@ -3425,19 +3282,12 @@ def prepareVarSimDict( raise FileNotFoundError(message) # add info about dam file path to the cfg - if ( - cfgSim["GENERAL"]["dam"] == "True" - and inputSimFiles["damFile"] is not None - ): - cfgSim["INPUT"]["DAM"] = str( - pathlib.Path("DAM", inputSimFiles["damFile"].name) - ) + if cfgSim["GENERAL"]["dam"] == "True" and inputSimFiles["damFile"] is not None: + cfgSim["INPUT"]["DAM"] = str(pathlib.Path("DAM", inputSimFiles["damFile"].name)) # if tauC, mu, k used in com8 and com9 check extent of cellSize if modName in ["com8MoTPSA", "com9MoTVoellmy"]: - dem = IOf.readRaster( - pathlib.Path(cfgSim["GENERAL"]["avalancheDir"], "Inputs", pathToDem) - ) + dem = IOf.readRaster(pathlib.Path(cfgSim["GENERAL"]["avalancheDir"], "Inputs", pathToDem)) if inputSimFiles["entResInfo"]["tauC"] == "Yes": pathToFric, pathToFricFull, remeshedFric = dP.checkExtentAndCellSize( @@ -3457,60 +3307,43 @@ def prepareVarSimDict( if cfgSim["Physical_parameters"]["Parameters"] == "auto": for fric in ["mu", "k"]: if inputSimFiles["entResInfo"][fric] == "Yes": - pathToFric, pathToFricFull, remeshedFric = ( - dP.checkExtentAndCellSize( - cfgSim, inputSimFiles["%sFile" % fric], dem, fric - ) + pathToFric, pathToFricFull, remeshedFric = dP.checkExtentAndCellSize( + cfgSim, inputSimFiles["%sFile" % fric], dem, fric ) cfgSim["INPUT"]["%sFile" % fric] = pathToFric inputSimFiles["entResInfo"]["%sRemeshed" % fric] = remeshedFric # check if forest effects = auto is chosen that forest parameter fields have correct extent - if ( - "res" in row._asdict()["simTypeList"] - and inputSimFiles["resFile"] is not None - ): + if "res" in row._asdict()["simTypeList"] and inputSimFiles["resFile"] is not None: if ( cfgSim["FOREST_EFFECTS"]["Forest effects"] == "auto" and inputSimFiles["entResInfo"]["bhd"] == "Yes" ): - pathToForest, pathToForestFull, remeshedForest = ( - dP.checkExtentAndCellSize( - cfgSim, inputSimFiles["%sFile" % "bhd"], dem, "bhd" - ) + pathToForest, pathToForestFull, remeshedForest = dP.checkExtentAndCellSize( + cfgSim, inputSimFiles["%sFile" % "bhd"], dem, "bhd" ) cfgSim["INPUT"]["%sFile" % "bhd"] = pathToForest inputSimFiles["entResInfo"]["%sRemeshed" % "bhd"] = remeshedForest # add info about entrainment file path to the cfg - if ( - "ent" in row._asdict()["simTypeList"] - and inputSimFiles["entFile"] is not None - ): + if "ent" in row._asdict()["simTypeList"] and inputSimFiles["entFile"] is not None: if inputSimFiles["entResInfo"]["entThFileType"] != ".shp": pathToEnt, pathToEntFull, remeshedEnt = dP.checkExtentAndCellSize( cfgSim, inputSimFiles["entThFile"], dem, "ent" ) cfgSim["INPUT"]["entThFile"] = pathToEnt inputSimFiles["entResInfo"]["entRemeshed"] = remeshedEnt - cfgSim["INPUT"]["entrainmentScenario"] = str( - pathlib.Path("ENT", inputSimFiles["entFile"].name) - ) + cfgSim["INPUT"]["entrainmentScenario"] = str(pathlib.Path("ENT", inputSimFiles["entFile"].name)) # add info about resistance file path to the cfg - if ( - "res" in row._asdict()["simTypeList"] - and inputSimFiles["resFile"] is not None - ): + if "res" in row._asdict()["simTypeList"] and inputSimFiles["resFile"] is not None: if inputSimFiles["entResInfo"]["resFileType"] != ".shp": pathToRes, pathToResFull, remeshedRes = dP.checkExtentAndCellSize( cfgSim, inputSimFiles["resFile"], dem, "res" ) cfgSim["INPUT"]["resFile"] = pathToRes inputSimFiles["entResInfo"]["resRemeshed"] = remeshedRes - cfgSim["INPUT"]["resistanceScenario"] = str( - pathlib.Path("RES", inputSimFiles["resFile"].name) - ) + cfgSim["INPUT"]["resistanceScenario"] = str(pathlib.Path("RES", inputSimFiles["resFile"].name)) # add thickness values if read from shp and not varied cfgSim = dP.appendThicknessToCfg(cfgSim) @@ -3522,16 +3355,12 @@ def prepareVarSimDict( frictIndi = None volIndi = None - pathToDemFull = pathlib.Path( - cfgSim["GENERAL"]["avalancheDir"], "Inputs", pathToDem - ) + pathToDemFull = pathlib.Path(cfgSim["GENERAL"]["avalancheDir"], "Inputs", pathToDem) if modName in ["com1DFA", "com5SnowSlide", "com6RockAvalanche"]: # if frictModel is samosATAuto compute release vol if cfgSim["GENERAL"]["frictModel"].lower() == "samosatauto": - relVolume = fetchRelVolume( - rel, cfgSim, pathToDemFull, inputSimFiles["secondaryRelFile"] - ) + relVolume = fetchRelVolume(rel, cfgSim, pathToDemFull, inputSimFiles["secondaryRelFile"]) else: relVolume = "" @@ -3539,17 +3368,13 @@ def prepareVarSimDict( cfgSim = checkCfg.checkCellSizeKernelRadius(cfgSim) # only keep friction model parameters that are used - cfgSim = checkCfg.checkCfgFrictionModel( - cfgSim, inputSimFiles, relVolume=relVolume - ) + cfgSim = checkCfg.checkCfgFrictionModel(cfgSim, inputSimFiles, relVolume=relVolume) # set frictModelIndicator, this needs to happen AFTER checkCfgFrictModel frictIndi = com1DFATools.setFrictTypeIndicator(cfgSim) elif modName in ["com8MoTPSA", "com9MoTVoellmy"]: - relVolume = fetchRelVolume( - rel, cfgSim, pathToDemFull, inputSimFiles["secondaryRelFile"] - ) + relVolume = fetchRelVolume(rel, cfgSim, pathToDemFull, inputSimFiles["secondaryRelFile"]) # set Volume class identificator volIndi = setVolumeIndicator(cfgSim, relVolume) @@ -3630,9 +3455,7 @@ def getSimTypeList(standardCfg, simTypeList, inputSimFiles): validSimTypes = validSimTypesStr.split("|") validArray = [True if item in validSimTypes else False for item in simTypeList] if False in validArray: - message = ( - "A non-valid entry found in simType, valid Types are %s" % validSimTypesStr - ) + message = "A non-valid entry found in simType, valid Types are %s" % validSimTypesStr log.error(message) raise AssertionError(message) @@ -3666,17 +3489,12 @@ def getSimTypeList(standardCfg, simTypeList, inputSimFiles): if entResInfo["flagSecondaryRelease"] == "No": standardCfg["GENERAL"]["secRelArea"] = "False" else: - log.info( - "Using the secondary release area file: %s" - % inputSimFiles["secondaryRelFile"] - ) + log.info("Using the secondary release area file: %s" % inputSimFiles["secondaryRelFile"]) return standardCfg, simTypeList -def runOrLoadCom1DFA( - avalancheDir, cfgMain, runDFAModule=True, cfgFile="", deleteOutput=True -): +def runOrLoadCom1DFA(avalancheDir, cfgMain, runDFAModule=True, cfgFile="", deleteOutput=True): """Run or load DFA results depending on runDFAModule=True or False Parameters @@ -3714,16 +3532,11 @@ def runOrLoadCom1DFA( # load DFA results simDF, _ = cfgUtils.readAllConfigurationInfo(avalancheDir) if simDF is None: - message = ( - "Did not find any com1DFA simulations in %s/Outputs/com1DFA/" - % avalancheDir - ) + message = "Did not find any com1DFA simulations in %s/Outputs/com1DFA/" % avalancheDir log.error(message) raise FileExistsError(message) - dataDF, resTypeList = fU.makeSimFromResDF( - avalancheDir, "com1DFA", inputDir="", simName="" - ) + dataDF, resTypeList = fU.makeSimFromResDF(avalancheDir, "com1DFA", inputDir="", simName="") simDF = simDF.reset_index().merge(dataDF, on="simName").set_index("index") return dem, simDF, resTypeList @@ -3766,9 +3579,7 @@ def fetchRelVolume(releaseFile, cfg, pathToDem, secondaryReleaseFile, radius=0.0 demVol = DFAtls.getAreaMesh(demVol, methodMeshNormal) # compute volume of release area - relVolume = initializeRelVol( - cfg, demVol, releaseFile, radius, releaseType="primary" - ) + relVolume = initializeRelVol(cfg, demVol, releaseFile, radius, releaseType="primary") if cfg["GENERAL"]["secRelArea"] == "True": # compute volume of secondary release area @@ -3785,8 +3596,7 @@ def fetchRelVolume(releaseFile, cfg, pathToDem, secondaryReleaseFile, radius=0.0 relVolume = relVolume + secondaryRelVolume else: log.info( - "%.2f meter grid based release volume is: %.2f m3" - % (demVol["header"]["cellsize"], relVolume) + "%.2f meter grid based release volume is: %.2f m3" % (demVol["header"]["cellsize"], relVolume) ) return relVolume @@ -3822,9 +3632,7 @@ def initializeRelVol(cfg, demVol, releaseFile, radius, releaseType="primary"): # check if release thickness provided as field or constant value if cfg["INPUT"][(typeTh + "File")] != "": # read relThField from file - relThFilePath = pathlib.Path( - cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"][typeTh + "File"] - ) + relThFilePath = pathlib.Path(cfg["GENERAL"]["avalancheDir"], "Inputs", cfg["INPUT"][typeTh + "File"]) relThFieldFull = IOf.readRaster(relThFilePath) relThField = relThFieldFull["rasterData"] @@ -3839,9 +3647,7 @@ def initializeRelVol(cfg, demVol, releaseFile, radius, releaseType="primary"): releaseLine = shpConv.readLine(releaseFile, "release1", demVol) # check if release features overlap between features thresholdPointInPoly = cfg["GENERAL"].getfloat("thresholdPointInPoly") - geoTrans.prepareArea( - releaseLine, demVol, thresholdPointInPoly, combine=True, checkOverlap=True - ) + geoTrans.prepareArea(releaseLine, demVol, thresholdPointInPoly, combine=True, checkOverlap=True) releaseLine["type"] = "Release" # set thickness values on releaseLine releaseLine = setThickness(cfg, releaseLine, typeTh) @@ -3933,9 +3739,7 @@ def adaptDEM(dem, fields, cfg): sfcChange = np.zeros_like(FTDet) ZDEMadapt = ZDEM - _, _, NzNormed = DFAtls.normalize( - dem["Nx"].copy(), dem["Ny"].copy(), dem["Nz"].copy() - ) + _, _, NzNormed = DFAtls.normalize(dem["Nx"].copy(), dem["Ny"].copy(), dem["Nz"].copy()) if cfg.getboolean("adaptSfcStopped"): # compute thickness to depth diff --git a/avaframe/com1DFA/com1DFATools.py b/avaframe/com1DFA/com1DFATools.py index a1bdc0c8e..7c940f61b 100644 --- a/avaframe/com1DFA/com1DFATools.py +++ b/avaframe/com1DFA/com1DFATools.py @@ -53,17 +53,12 @@ def getPartInitMethod(cfg, csz, relThForPart): # derive mass per particle to define number of particles per cell: if massPerParticleDeterminationMethod == "MPPDIR": massPerPart = cfg.getfloat("massPerPart") - log.debug( - "Number of particles defined by: mass per particle %s" % cfg["massPerPart"] - ) + log.debug("Number of particles defined by: mass per particle %s" % cfg["massPerPart"]) elif massPerParticleDeterminationMethod == "MPPDH": deltaTh = cfg.getfloat("deltaTh") ds = min(csz, cfg.getfloat("sphKernelRadius")) massPerPart = rho * ds * ds * deltaTh - log.debug( - "Number of particles defined by: release thickness per particle: %s" - % cfg["deltaTh"] - ) + log.debug("Number of particles defined by: release thickness per particle: %s" % cfg["deltaTh"]) log.debug("mass per particle is %.2f" % massPerPart) elif massPerParticleDeterminationMethod == "MPPKR": sphKernelRadius = cfg.getfloat("sphKernelRadius") @@ -145,25 +140,14 @@ def compareSimCfgToDefaultCfgCom1DFA(simCfg, module=com1DFA): # If entrainment is requested, and it is set in shapefile, check if it contains the default entrainment thickness # in ALL features of the shapefile - if ( - simCfg["GENERAL"]["simTypeList"] == "ent" - and simCfg["GENERAL"]["entThFromFile"] == "True" - ): + if simCfg["GENERAL"]["simTypeList"] == "ent" and simCfg["GENERAL"]["entThFromFile"] == "True": defaultEntTh = defCfg["GENERAL"]["entThIfMissingInShp"] # this try handles raster instead of shapefile try: - if not all( - [ - x == defaultEntTh - for x in simCfg["INPUT"]["entThThickness"].split("|") - ] - ): + if not all([x == defaultEntTh for x in simCfg["INPUT"]["entThThickness"].split("|")]): defaultIdentifierString = "C" - log.info( - "Non-default entrainment value(s) used: %s" - % simCfg["INPUT"]["entThThickness"] - ) + log.info("Non-default entrainment value(s) used: %s" % simCfg["INPUT"]["entThThickness"]) except KeyError: defaultIdentifierString = "D" @@ -179,19 +163,14 @@ def compareSimCfgToDefaultCfgCom1DFA(simCfg, module=com1DFA): # as changed if default is set to meshCellSize if modName in ["com1DFA", "com5SnowSlide", "com6RockAvalanche"]: if defCfg["GENERAL"]["sphKernelRadius"] == "meshCellSize": - if ( - simCfg["GENERAL"]["sphKernelRadius"] - == simCfg["GENERAL"]["meshCellSize"] - ): + if simCfg["GENERAL"]["sphKernelRadius"] == simCfg["GENERAL"]["meshCellSize"]: excludeItems.append("root['GENERAL']['sphKernelRadius']") # do the diff and analyse # this is the deepdiff > 8.0 version # TODO: remove this again in the future when deepdiff > 8.0 is wider established try: - diff = DeepDiff( - defCfg, simCfg, exclude_paths=excludeItems, threshold_to_diff_deeper=0 - ) + diff = DeepDiff(defCfg, simCfg, exclude_paths=excludeItems, threshold_to_diff_deeper=0) # for older deepdiff versions which don't know threshold_to_diff_deeper except ValueError: diff = DeepDiff(defCfg, simCfg, exclude_paths=excludeItems) @@ -299,9 +278,7 @@ def createSimDictFromCfgs(cfgMain, cfgPath, module=com1DFA): # fetch input data and create work and output directories # TODO: so for now remeshed dir is cleaned before a run - inputSimFilesAll, outDir, simDFExisting, simNameExisting = initializeInputs( - avalancheDir, True, module - ) + inputSimFilesAll, outDir, simDFExisting, simNameExisting = initializeInputs(avalancheDir, True, module) # save dem file path as it is deleted from input sim files dict once it is set in the config demFile = inputSimFilesAll["demFile"] @@ -310,9 +287,7 @@ def createSimDictFromCfgs(cfgMain, cfgPath, module=com1DFA): cfgDir = pathlib.Path(cfgPath) cfgFilesAll = list(cfgDir.glob("*.ini")) if len(cfgFilesAll) == 0: - message = "No configuration file found to create simulation runs in: %s" % str( - cfgDir - ) + message = "No configuration file found to create simulation runs in: %s" % str(cfgDir) log.error(message) raise FileNotFoundError(message) else: @@ -324,16 +299,12 @@ def createSimDictFromCfgs(cfgMain, cfgPath, module=com1DFA): # loop over all cfgFiles and create simDict for index, cfgFile in enumerate(cfgFilesAll): # read configuration - cfgFromFile = cfgUtils.getModuleConfig( - module, fileOverride=cfgFile, toPrint=False - ) + cfgFromFile = cfgUtils.getModuleConfig(module, fileOverride=cfgFile, toPrint=False) # create dictionary with one key for each simulation that shall be performed # NOTE: sims that are added don't need to be added to the simNameExisting list as # if new identical sims are added the simDict entry is just updated and not a duplicate one added - simDict = dP.createSimDict( - avalancheDir, module, cfgFromFile, inputSimFilesAll, simNameExisting - ) + simDict = dP.createSimDict(avalancheDir, module, cfgFromFile, inputSimFilesAll, simNameExisting) simDictAll.update(simDict) # reset dem file @@ -457,18 +428,10 @@ def updateResCoeffFields(fields, cfg): thMax = cfg.getfloat("forestThMax") # create new rasters using FV, FT and thresholds to mask - detRasterInt = np.where( - ((fields["FV"] <= vMin) | (fields["FT"] <= thMin)), detOrig, 0.0 - ) - detRaster = np.where( - ((fields["FV"] > vMax) | (fields["FT"] > thMax)), 0, detRasterInt - ) - cResRasterInt = np.where( - ((fields["FV"] > vMin) & (fields["FT"] > thMin)), cResOrig, 0.0 - ) - cResRaster = np.where( - ((fields["FV"] > vMax) | (fields["FT"] > thMax)), 0, cResRasterInt - ) + detRasterInt = np.where(((fields["FV"] <= vMin) | (fields["FT"] <= thMin)), detOrig, 0.0) + detRaster = np.where(((fields["FV"] > vMax) | (fields["FT"] > thMax)), 0, detRasterInt) + cResRasterInt = np.where(((fields["FV"] > vMin) & (fields["FT"] > thMin)), cResOrig, 0.0) + cResRaster = np.where(((fields["FV"] > vMax) | (fields["FT"] > thMax)), 0, cResRasterInt) if len(np.where((detRaster > 0) & (cResRaster > 0))[0]) > 0: message = "Detrainment and increased friction within same cell!" @@ -478,9 +441,7 @@ def updateResCoeffFields(fields, cfg): # if max thresholds are exceeded: forest destroyed remove forest lTh = len(np.where((fields["FV"] > vMax) | (fields["FT"] > thMax))[0]) if lTh > 0: - cResOrig = np.where( - ((fields["FV"] > vMax) | (fields["FT"] > thMax)), 0, cResOrig - ) + cResOrig = np.where(((fields["FV"] > vMax) | (fields["FT"] > thMax)), 0, cResOrig) detOrig = np.where(((fields["FV"] > vMax) | (fields["FT"] > thMax)), 0, detOrig) fields["cResRasterOrig"] = cResOrig fields["detRasterOrig"] = detOrig @@ -541,8 +502,6 @@ def updateTimeField(fields, timeStep): FT = fields["FT"] # set time step to previously not affected cells - fields["timeInfo"] = np.where( - ((fields["timeInfo"] == 0) & (FT != 0)), timeStep, fields["timeInfo"] - ) + fields["timeInfo"] = np.where(((fields["timeInfo"] == 0) & (FT != 0)), timeStep, fields["timeInfo"]) return fields diff --git a/avaframe/in3Utils/cfgUtils.py b/avaframe/in3Utils/cfgUtils.py index 5d963de27..d4c9ef4b5 100644 --- a/avaframe/in3Utils/cfgUtils.py +++ b/avaframe/in3Utils/cfgUtils.py @@ -20,7 +20,7 @@ - relName: Release area scenario name (required) - simHash: Configuration hash (required, 10 characters) - modName: Short module name - "com1", "com2", etc. (new format only) - - defID: Default indicator - "C" or "D" (defaults to "C") + - defID: Default indicator - "C" or "D" (defaults to "C") (required) - frictIndi: Friction calibration - "S", "M", or "L" (optional) - simType: Simulation type - "null", "ent", "res", "entres" (required) - modelType: Model type - "dfa", etc. (required) diff --git a/avaframe/out1Peak/outPlotAllPeak.py b/avaframe/out1Peak/outPlotAllPeak.py index d800e0dbd..045511e37 100644 --- a/avaframe/out1Peak/outPlotAllPeak.py +++ b/avaframe/out1Peak/outPlotAllPeak.py @@ -54,15 +54,13 @@ def plotAllPeakFields(avaDir, cfgFLAGS, modName, demData=""): inDir = avaDir / "Inputs" peakFilesDF = fU.makeSimDF(inputDir, avaDir=avaDir) if ( - modName in ["com1DFA", "com5SnowSlide", "com6RockAvalanche", "com9MoTVoellmy"] + modName in ["com1DFA", "com5SnowSlide", "com6RockAvalanche", "com9MoTVoellmy", "com8MoTPSA"] and demData == "" ): configurationDF = cfgUtils.createConfigurationInfo(avaDir, comModule=modName) configurationDF = configurationDF.rename(columns={"resType": "resTypeList"}) peakFilesDF = ( - peakFilesDF.reset_index() - .merge(configurationDF, on=["simName", "modelType"]) - .set_index("index") + peakFilesDF.reset_index().merge(configurationDF, on=["simName", "modelType"]).set_index("index") ) if demData == "": @@ -114,6 +112,7 @@ def plotAllPeakFields(avaDir, cfgFLAGS, modName, demData=""): "com5SnowSlide", "com6RockAvalanche", "com9MoTVoellmy", + "com8MoTPSA", ]: demFile = inDir / row["DEM"] demDataRaster = IOf.readRaster(demFile, noDataToNan=True) @@ -171,9 +170,7 @@ def plotAllPeakFields(avaDir, cfgFLAGS, modName, demData=""): elif fileSType in [".asc", ".tif"]: sarea = IOf.readRaster(sFile, noDataToNan=True) xGrid, yGrid, _, _ = gT.makeCoordGridFromHeader(sarea["header"]) - contourDictXY = pU.fetchContourCoords( - xGrid, yGrid, sarea["rasterData"], 0.001 - ) + contourDictXY = pU.fetchContourCoords(xGrid, yGrid, sarea["rasterData"], 0.001) for key in contourDictXY: ax.plot( contourDictXY[key]["x"], @@ -218,9 +215,7 @@ def plotAllPeakFields(avaDir, cfgFLAGS, modName, demData=""): return plotDict -def addConstrainedDataField( - fileName, resType, demField, ax, cellSize, alpha=1.0, oneColor="" -): +def addConstrainedDataField(fileName, resType, demField, ax, cellSize, alpha=1.0, oneColor=""): """find fileName data, constrain data and demField to where there is data, create colormap, define extent, add hillshade contours, add to axes and add colorbar @@ -405,9 +400,7 @@ def plotAllFields(avaDir, inputDir, outDir, unit="", constrainData=True): aspect=nx / ny, ) else: - extentCellCenters, extentCellCorners = pU.createExtentMinMax( - data, header, originLLCenter=True - ) + extentCellCenters, extentCellCorners = pU.createExtentMinMax(data, header, originLLCenter=True) im1 = ax.imshow( data, cmap=cmap, diff --git a/avaframe/runScripts/runPlotAreaRefDiffs.py b/avaframe/runScripts/runPlotAreaRefDiffs.py index 85f842fc2..9afb82ebf 100644 --- a/avaframe/runScripts/runPlotAreaRefDiffs.py +++ b/avaframe/runScripts/runPlotAreaRefDiffs.py @@ -55,9 +55,7 @@ ) # convert polygon to raster with value 1 inside polygon and 0 outside the polygon referenceLine = shpConv.readLine(referenceFile, "reference", dem) -referenceLine = gT.prepareArea( - referenceLine, dem, np.sqrt(2), combine=True, checkOverlap=False -) +referenceLine = gT.prepareArea(referenceLine, dem, np.sqrt(2), combine=True, checkOverlap=False) # if available zoom into area provided by crop shp file in Inputs/CROPSHAPE cropFile, cropInfo, _ = gI.getAndCheckInputFiles( @@ -65,11 +63,9 @@ ) if cropInfo: cropLine = shpConv.readLine(cropFile, "cropFile", dem) - cropLine = gT.prepareArea( - cropLine, dem, np.sqrt(2), combine=True, checkOverlap=False - ) + cropLine = gT.prepareArea(cropLine, dem, np.sqrt(2), combine=True, checkOverlap=False) -if modName in ["com1DFA", "com5SnowSlide", "com6RockAvalanche"]: +if modName in ["com1DFA", "com5SnowSlide", "com6RockAvalanche", "com8MoTPSA", "com9MoTVoellmy"]: # load dataFrame for all configurations of simulations in avalancheDir simDF = cfgUtils.createConfigurationInfo(avalancheDir) # create data frame that lists all available simulations and path to their result type result files @@ -111,9 +107,7 @@ else: # load all result files resultDir = pathlib.Path(avalancheDir, "Outputs", modName, "peakFiles") - peakFilesList = list(resultDir.glob("*_%s.tif" % resType)) + list( - resultDir.glob("*_%s.asc" % resType) - ) + peakFilesList = list(resultDir.glob("*_%s.tif" % resType)) + list(resultDir.glob("*_%s.asc" % resType)) for pF in peakFilesList: simData = IOf.readRaster(pF) simName = pF.stem diff --git a/avaframe/tests/test_com1DFA.py b/avaframe/tests/test_com1DFA.py index 0b32a7e56..ea96aa329 100644 --- a/avaframe/tests/test_com1DFA.py +++ b/avaframe/tests/test_com1DFA.py @@ -3160,7 +3160,6 @@ def test_tSteps_output_behavior(tmp_path, caplog): def test_getModuleNames(): """Test getModuleNames function for extracting module names from call stack""" - import inspect from unittest.mock import patch, MagicMock # Test 1: Direct call from com1DFA module diff --git a/avaframe/tests/test_scarp.py b/avaframe/tests/test_scarp.py index b46c84a9e..cb1b89071 100644 --- a/avaframe/tests/test_scarp.py +++ b/avaframe/tests/test_scarp.py @@ -438,7 +438,6 @@ def test_error_message_attribute_names(): # the same attribute names that are actually used in the code import avaframe.com6RockAvalanche.scarp as scarp_module - import inspect # Read the scarp.py file to check error messages scarp_path = pathlib.Path(scarp_module.__file__)