|
| 1 | +package scratch.kevin.nshm23; |
| 2 | + |
| 3 | +import java.io.File; |
| 4 | +import java.io.IOException; |
| 5 | +import java.util.ArrayList; |
| 6 | +import java.util.List; |
| 7 | + |
| 8 | +import org.opensha.commons.geo.GriddedRegion; |
| 9 | +import org.opensha.commons.geo.json.Feature; |
| 10 | +import org.opensha.commons.hpc.JavaShellScriptWriter; |
| 11 | +import org.opensha.commons.hpc.mpj.FastMPJShellScriptWriter; |
| 12 | +import org.opensha.commons.hpc.mpj.NoMPJSingleNodeShellScriptWriter; |
| 13 | +import org.opensha.commons.hpc.pbs.BatchScriptWriter; |
| 14 | +import org.opensha.commons.hpc.pbs.HovenweepScriptWriter; |
| 15 | +import org.opensha.commons.hpc.pbs.USC_CARC_ScriptWriter; |
| 16 | +import org.opensha.sha.earthquake.faultSysSolution.hazard.mpj.MPJ_SingleSolHazardCalc; |
| 17 | +import org.opensha.sha.earthquake.param.IncludeBackgroundOption; |
| 18 | +import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader; |
| 19 | + |
| 20 | +import com.google.common.base.Preconditions; |
| 21 | + |
| 22 | +import edu.usc.kmilner.mpj.taskDispatch.MPJTaskCalculator; |
| 23 | + |
| 24 | +public class BranchAveragedHazardScriptWriter { |
| 25 | + |
| 26 | + public static void main(String[] args) throws IOException { |
| 27 | + String baseDirName = "2024_02_02-nshm23_branches-WUS_FM_v3"; |
| 28 | + |
| 29 | +// String suffix = "true_mean"; |
| 30 | +// String solFileName = "true_mean_solution.zip"; |
| 31 | + |
| 32 | + String suffix = "ba_only"; |
| 33 | + String solFileName = "results_WUS_FM_v3_branch_averaged_gridded.zip"; |
| 34 | + |
| 35 | + boolean noMFDs = false; |
| 36 | + |
| 37 | + GriddedRegion gridReg = new GriddedRegion( |
| 38 | + NSHM23_RegionLoader.loadFullConterminousWUS(), 0.1, GriddedRegion.ANCHOR_0_0); |
| 39 | + |
| 40 | + IncludeBackgroundOption[] bgOps = IncludeBackgroundOption.values(); |
| 41 | + |
| 42 | + String dirName = baseDirName+"-"+suffix; |
| 43 | + if (noMFDs) |
| 44 | + dirName += "-no_mfds"; |
| 45 | + |
| 46 | + File localMainDir = new File("/home/kevin/OpenSHA/UCERF4/batch_inversions"); |
| 47 | + File localDir = new File(localMainDir, dirName); |
| 48 | + Preconditions.checkState(localDir.exists() || localDir.mkdir()); |
| 49 | + |
| 50 | +// File remoteMainDir = new File("/project/scec_608/kmilner/nshm23/batch_inversions"); |
| 51 | +// int remoteTotalThreads = 20; |
| 52 | +// int remoteTotalMemGB = 50; |
| 53 | +// String queue = "scec"; |
| 54 | +// int nodes = 36; |
| 55 | +// int mins = 600; |
| 56 | +//// int nodes = 18; |
| 57 | +//// JavaShellScriptWriter mpjWrite = new MPJExpressShellScriptWriter( |
| 58 | +//// USC_CARC_ScriptWriter.JAVA_BIN, remoteTotalMemGB*1024, null, USC_CARC_ScriptWriter.MPJ_HOME); |
| 59 | +// JavaShellScriptWriter parallelMPJWrite = new FastMPJShellScriptWriter( |
| 60 | +// USC_CARC_ScriptWriter.JAVA_BIN, remoteTotalMemGB*1024, null, USC_CARC_ScriptWriter.FMPJ_HOME); |
| 61 | +// JavaShellScriptWriter singleMPJWrite = new NoMPJSingleNodeShellScriptWriter(USC_CARC_ScriptWriter.JAVA_BIN, |
| 62 | +// remoteTotalMemGB*1024, null); |
| 63 | +// BatchScriptWriter pbsWrite = new USC_CARC_ScriptWriter(); |
| 64 | + |
| 65 | + File remoteMainDir = new File("/caldera/hovenweep/projects/usgs/hazards/ehp/kmilner/nshm23/batch_inversions"); |
| 66 | + int remoteTotalThreads = 128; |
| 67 | + int remoteTotalMemGB = 448; |
| 68 | + String queue = null; |
| 69 | + int nodes = 4; |
| 70 | + int mins = 180; |
| 71 | +// int nodes = 18; |
| 72 | +// JavaShellScriptWriter mpjWrite = new MPJExpressShellScriptWriter( |
| 73 | +// USC_CARC_ScriptWriter.JAVA_BIN, remoteTotalMemGB*1024, null, USC_CARC_ScriptWriter.MPJ_HOME); |
| 74 | + JavaShellScriptWriter parallelMPJWrite = new FastMPJShellScriptWriter( |
| 75 | + HovenweepScriptWriter.JAVA_BIN, remoteTotalMemGB*1024, null, HovenweepScriptWriter.FMPJ_HOME); |
| 76 | + JavaShellScriptWriter singleMPJWrite = new NoMPJSingleNodeShellScriptWriter(HovenweepScriptWriter.JAVA_BIN, |
| 77 | + remoteTotalMemGB*1024, null); |
| 78 | + BatchScriptWriter pbsWrite = new HovenweepScriptWriter(); |
| 79 | + |
| 80 | + parallelMPJWrite.setEnvVar("MAIN_DIR", remoteMainDir.getAbsolutePath()); |
| 81 | + singleMPJWrite.setEnvVar("MAIN_DIR", remoteMainDir.getAbsolutePath()); |
| 82 | + String mainDirPath = "$MAIN_DIR"; |
| 83 | + parallelMPJWrite.setEnvVar("DIR", mainDirPath+"/"+dirName); |
| 84 | + singleMPJWrite.setEnvVar("DIR", mainDirPath+"/"+dirName); |
| 85 | + String dirPath = "$DIR"; |
| 86 | + |
| 87 | + List<File> classpath = new ArrayList<>(); |
| 88 | + classpath.add(new File(dirPath+"/opensha-dev-all.jar")); |
| 89 | + parallelMPJWrite.setClasspath(classpath); |
| 90 | + |
| 91 | + List<File> singleClasspath = new ArrayList<>(classpath); |
| 92 | + singleClasspath.add(new File("/project/scec_608/kmilner/git/opensha/lib/mpj-0.38.jar")); |
| 93 | + singleMPJWrite.setClasspath(singleClasspath); |
| 94 | + |
| 95 | + // write the region |
| 96 | + File localReg = new File(localDir, "gridded_region.json"); |
| 97 | + Feature.write(gridReg.toFeature(), localReg); |
| 98 | + |
| 99 | + String resultsPath = dirPath+"/results"; |
| 100 | + String regPath = dirPath+"/"+localReg.getName(); |
| 101 | + |
| 102 | + String solFilePath = "$DIR/"+solFileName; |
| 103 | + |
| 104 | + List<String> setupLines = new ArrayList<>(); |
| 105 | + setupLines.add("if [[ ! -e "+solFilePath+" ]];then"); |
| 106 | + setupLines.add(" ln -s $MAIN_DIR/"+baseDirName+"/"+solFileName+" "+solFilePath); |
| 107 | + setupLines.add("fi"); |
| 108 | + parallelMPJWrite.setCustomSetupLines(setupLines); |
| 109 | + singleMPJWrite.setCustomSetupLines(setupLines); |
| 110 | + |
| 111 | + |
| 112 | + for (IncludeBackgroundOption bgOp : bgOps) { |
| 113 | + int myNodes; |
| 114 | + JavaShellScriptWriter mpjWrite; |
| 115 | + String dispatchArgs; |
| 116 | + if (bgOp == IncludeBackgroundOption.INCLUDE && bgOps.length == 3) { |
| 117 | + // do single node |
| 118 | + myNodes = 1; |
| 119 | + mpjWrite = singleMPJWrite; |
| 120 | + dispatchArgs = MPJTaskCalculator.argumentBuilder().exactDispatch(gridReg.getNodeCount()).threads(remoteTotalThreads).build(); |
| 121 | + } else { |
| 122 | + // do parallel |
| 123 | + myNodes = nodes; |
| 124 | + mpjWrite = parallelMPJWrite; |
| 125 | + |
| 126 | + int maxDispatch; |
| 127 | + if (gridReg.getNodeCount() > 50000) |
| 128 | + maxDispatch = 1000; |
| 129 | + else if (gridReg.getNodeCount() > 10000) |
| 130 | + maxDispatch = 500; |
| 131 | + else if (gridReg.getNodeCount() > 5000) |
| 132 | + maxDispatch = remoteTotalThreads*5; |
| 133 | + else |
| 134 | + maxDispatch = remoteTotalThreads*3; |
| 135 | + dispatchArgs = MPJTaskCalculator.argumentBuilder().minDispatch(remoteTotalThreads) |
| 136 | + .maxDispatch(maxDispatch).threads(remoteTotalThreads).build(); |
| 137 | + } |
| 138 | + |
| 139 | + String argz = "--input-file "+dirPath+"/"+solFileName; |
| 140 | + argz += " --output-dir "+resultsPath; |
| 141 | + argz += " --output-file "+resultsPath+"_hazard_"+bgOp.name()+".zip"; |
| 142 | + argz += " --region "+regPath; |
| 143 | + if (noMFDs) |
| 144 | + argz += " --no-mfds"; |
| 145 | + argz += " --gridded-seis "+bgOp.name(); |
| 146 | + argz += " "+dispatchArgs; |
| 147 | + |
| 148 | + File jobFile = new File(localDir, "batch_hazard_"+bgOp.name()+".slurm"); |
| 149 | + |
| 150 | + List<String> script = mpjWrite.buildScript(MPJ_SingleSolHazardCalc.class.getName(), argz); |
| 151 | + |
| 152 | + System.out.println("Writing "+jobFile.getAbsolutePath()); |
| 153 | + |
| 154 | + pbsWrite.writeScript(jobFile, script, mins, myNodes, remoteTotalThreads, queue); |
| 155 | + } |
| 156 | + } |
| 157 | + |
| 158 | +} |
0 commit comments