Skip to content

Commit

Permalink
Merge pull request #53 from opensha/2024_09-point_source_dist_corr_re…
Browse files Browse the repository at this point in the history
…factor

Updates for point source refactor
  • Loading branch information
kevinmilner authored Dec 9, 2024
2 parents a1c0100 + fd6e42c commit 1ffabc3
Show file tree
Hide file tree
Showing 34 changed files with 1,843 additions and 397 deletions.
10 changes: 5 additions & 5 deletions src/main/java/scratch/kevin/nga/Idriss2013Debug.java
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,11 @@
import org.opensha.sha.earthquake.param.BackgroundRupType;
import org.opensha.sha.earthquake.param.ProbabilityModelOptions;
import org.opensha.sha.earthquake.rupForecastImpl.FaultRuptureSource;
import org.opensha.sha.earthquake.rupForecastImpl.PointSource13b;
import org.opensha.sha.earthquake.rupForecastImpl.PointSourceNshm;
import org.opensha.sha.earthquake.rupForecastImpl.WGCEP_UCERF_2_Final.griddedSeis.Point2Vert_FaultPoisSource;
import org.opensha.sha.faultSurface.EvenlyGriddedSurface;
import org.opensha.sha.faultSurface.FourPointEvenlyGriddedSurface;
import org.opensha.sha.faultSurface.utils.PointSourceDistanceCorrections;
import org.opensha.sha.imr.attenRelImpl.ngaw2.NGAW2_Wrappers.Idriss_2014_Wrapper;
import org.opensha.sha.imr.param.IntensityMeasureParams.PGA_Param;
import org.opensha.sha.imr.param.SiteParams.DepthTo1pt0kmPerSecParam;
Expand Down Expand Up @@ -66,27 +67,26 @@ public static void main(String[] args) {
double fracReverse = 0d;
WC1994_MagLengthRelationship magLenRel = new WC1994_MagLengthRelationship();
double ptSrcCutoff = 6.0;
double[] DEPTHS = new double[] {5.0, 1.0};

ProbEqkSource pointSource;

switch (bgRupType) {
case CROSSHAIR:
pointSource = new Point2Vert_FaultPoisSource(faultLoc, mfd, magLenRel, duration,
ptSrcCutoff, fracStrikeSlip, fracNormal,
fracReverse, true);
fracReverse, true, null);
break;
case FINITE:
pointSource = new Point2Vert_FaultPoisSource(faultLoc, mfd, magLenRel, duration,
ptSrcCutoff, fracStrikeSlip, fracNormal,
fracReverse, false);
fracReverse, false, null);
break;
case POINT:
Map<FocalMech, Double> mechMap = Maps.newHashMap();
mechMap.put(FocalMech.STRIKE_SLIP, fracStrikeSlip);
mechMap.put(FocalMech.REVERSE, fracReverse);
mechMap.put(FocalMech.NORMAL, fracNormal);
pointSource = new PointSource13b(faultLoc, mfd, duration, DEPTHS, mechMap);
pointSource = new PointSourceNshm(faultLoc, mfd, duration, mechMap, PointSourceDistanceCorrections.NONE.get());
break;
default:
throw new IllegalStateException("Unknown Background Rup Type: "+bgRupType);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package scratch.nshm23;
package scratch.kevin.nshm23;

import java.awt.Color;
import java.io.File;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ public static void main(String[] args) throws IOException {
boolean linkFromBase = true;
Double vs30 = null;
double gridSpacing = 0.1;
boolean supersample = false;

double[] periods = { 0d, 0.2d, 1d, 5d };
AttenRelRef[] gmms = null;
Expand Down Expand Up @@ -77,7 +78,9 @@ public static void main(String[] args) throws IOException {
// String solFileName = "results_PRVI_SUB_FM_LARGE_branch_averaged_gridded.zip";

String baseDirName = "2024_11_19-prvi25_crustal_subduction_combined_branches";
String suffix = "ba_only";
// String suffix = "ba_only";
// String suffix = "ba_only-test_grid_branch";
String suffix = "ba_only-test_grid_branch-supersample"; supersample = true;
String solFileName = "combined_branch_averaged_solution.zip";

// String baseDirName = "2024_11_19-prvi25_crustal_branches-dmSample5x";
Expand Down Expand Up @@ -158,6 +161,9 @@ public static void main(String[] args) throws IOException {
// remoteTotalMemGB*1024, null);
// BatchScriptWriter pbsWrite = new HovenweepScriptWriter();

if (parallelMPJWrite instanceof FastMPJShellScriptWriter)
((FastMPJShellScriptWriter)parallelMPJWrite).setUseLaunchWrapper(true);

parallelMPJWrite.setEnvVar("MAIN_DIR", remoteMainDir.getAbsolutePath());
singleMPJWrite.setEnvVar("MAIN_DIR", remoteMainDir.getAbsolutePath());
String mainDirPath = "$MAIN_DIR";
Expand Down Expand Up @@ -239,6 +245,8 @@ else if (gridReg.getNodeCount() > 5000)
argz += (float)periods[p];
}
}
if (supersample)
argz += " --supersample";
argz += " "+dispatchArgs;

File jobFile = new File(localDir, "batch_hazard_"+bgOp.name()+".slurm");
Expand Down
123 changes: 123 additions & 0 deletions src/main/java/scratch/kevin/nshm23/SectRIDistCSVWriter.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
package scratch.kevin.nshm23;

import java.awt.geom.Point2D;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.opensha.commons.data.CSVFile;
import org.opensha.commons.data.function.ArbDiscrEmpiricalDistFunc;
import org.opensha.commons.data.function.EvenlyDiscretizedFunc;
import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet;
import org.opensha.sha.earthquake.faultSysSolution.FaultSystemSolution;
import org.opensha.sha.earthquake.faultSysSolution.modules.BranchSectParticMFDs;
import org.opensha.sha.earthquake.faultSysSolution.util.FaultSysTools;
import org.opensha.sha.faultSurface.FaultSection;
import org.opensha.sha.magdist.IncrementalMagFreqDist;

public class SectRIDistCSVWriter {

public static void main(String[] args) throws IOException {
FaultSystemSolution sol = FaultSystemSolution.load(new File("/home/kevin/OpenSHA/nshm23/batch_inversions/"
+ "2024_02_02-nshm23_branches-WUS_FM_v3/results_WUS_FM_v3_branch_averaged.zip"));
FaultSystemRupSet rupSet = sol.getRupSet();

BranchSectParticMFDs sectMFDs = sol.requireModule(BranchSectParticMFDs.class);

double[] mags = {0d, 6d, 6.5d, 7d, 7.5d};

double[] fractiles = {0.025,0.16, 0.84, 0.975};

List<String> riHeader = List.of("Subsection Index", "Parent Section ID", "Subsection Name"
,"Mean RI", "Min RI", "Max RI", "Std. Dev", "p2.5", "p16.0", "p84.0", "p97.5");
List<String> rateHeader = new ArrayList<>(riHeader);
for (int i=0; i<rateHeader.size(); i++)
rateHeader.set(i, rateHeader.get(i).replace("RI", "Rate"));
List<CSVFile<String>> riCSVs = new ArrayList<>(mags.length);
List<CSVFile<String>> rateCSVs = new ArrayList<>(mags.length);
for (int m=0; m<mags.length; m++) {
CSVFile<String> riCSV = new CSVFile<>(true);
riCSV.addLine(riHeader);
riCSVs.add(riCSV);
CSVFile<String> rateCSV = new CSVFile<>(true);
rateCSV.addLine(rateHeader);
rateCSVs.add(rateCSV);
}

EvenlyDiscretizedFunc refMFD = FaultSysTools.initEmptyMFD(5.05, 8.45);
List<String> mfdHeader = new ArrayList<>();
mfdHeader.addAll(riHeader.subList(0, 3));
for (int i=0; i<refMFD.size(); i++)
mfdHeader.add((float)refMFD.getX(i)+"");
CSVFile<String> mfdParticCSV = new CSVFile<>(true);
mfdParticCSV.addLine(mfdHeader);
CSVFile<String> mfdNuclCSV = new CSVFile<>(true);
mfdNuclCSV.addLine(mfdHeader);

for (int s=0; s<rupSet.getNumSections(); s++) {
FaultSection sect = rupSet.getFaultSectionData(s);
IncrementalMagFreqDist particMFD = sol.calcParticipationMFD_forSect(s, refMFD.getMinX(), refMFD.getMaxX(), refMFD.size());
IncrementalMagFreqDist nuclMFD = sol.calcNucleationMFD_forSect(s, refMFD.getMinX(), refMFD.getMaxX(), refMFD.size());
List<String> common = List.of(s+"", sect.getParentSectionId()+"", sect.getSectionName());
List<String> partic = new ArrayList<>(common);
List<String> nucl = new ArrayList<>(common);
for (int i=0; i<refMFD.size(); i++) {
partic.add((float)particMFD.getY(i)+"");
nucl.add((float)nuclMFD.getY(i)+"");
}
mfdParticCSV.addLine(partic);
mfdNuclCSV.addLine(nucl);

for (boolean rate : new boolean[] {false,true}) {
ArbDiscrEmpiricalDistFunc[] dists = new ArbDiscrEmpiricalDistFunc[mags.length];
for (int m=0; m<dists.length; m++)
dists[m] = new ArbDiscrEmpiricalDistFunc();

for (int b=0; b<sectMFDs.getNumBranches(); b++) {
double weight = sectMFDs.getBranchWeight(b);
IncrementalMagFreqDist mfd = sectMFDs.getSectionMFD(b, s);
double[] rates = new double[mags.length];
for (Point2D pt : mfd)
for (int m=0; m<mags.length; m++)
if ((float)pt.getX() >= (float)mags[m])
rates[m] += pt.getY();
if (!rate)
for (int m=0; m<mags.length; m++)
rates[m] = 1d/rates[m];

for (int m=0; m<mags.length; m++)
dists[m].set(rates[m], weight);
}
for (int m=0; m<mags.length; m++) {
List<String> line = new ArrayList<>(common);
line.add((float)dists[m].getMean()+"");
line.add((float)dists[m].getMinX()+"");
line.add((float)dists[m].getMinY()+"");
line.add((float)dists[m].getStdDev()+"");
for (double fract : fractiles)
line.add((float)dists[m].getInterpolatedFractile(fract)+"");
if (rate)
rateCSVs.get(m).addLine(line);
else
riCSVs.get(m).addLine(line);
}
}
}

File outputDir = new File("/tmp");
String prefix = "nshm23_wus";
mfdParticCSV.writeToFile(new File(outputDir, prefix+"_partic_mfds.csv"));
mfdNuclCSV.writeToFile(new File(outputDir, prefix+"_nucl_mfds.csv"));
for (int m=0; m<mags.length; m++) {
String magPrefix;
if (mags[m] == 0)
magPrefix = prefix+"_supra_seis";
else
magPrefix = prefix+"_m"+(float)mags[m];
rateCSVs.get(m).writeToFile(new File(outputDir, magPrefix+"_rate_dists.csv"));
riCSVs.get(m).writeToFile(new File(outputDir, magPrefix+"_ri_dists.csv"));
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader;
import org.opensha.sha.faultSurface.PointSurface;
import org.opensha.sha.faultSurface.RuptureSurface;
import org.opensha.sha.faultSurface.utils.PtSrcDistCorr;
import org.opensha.sha.gui.infoTools.IMT_Info;
import org.opensha.sha.imr.AttenRelRef;
import org.opensha.sha.imr.ScalarIMR;
Expand Down Expand Up @@ -89,7 +88,6 @@ public static GriddedGeoDataSet[] distKernelSmooth(GriddedGeoDataSet[] momentMap

Location rupLoc = dist == 0d ? siteLoc : LocationUtils.location(siteLoc, 0d, dist);
PointSurface rupSurf = new PointSurface(rupLoc);
rupSurf.setDistCorrMagAndType(Double.NaN, PtSrcDistCorr.Type.NONE);
rupSurf.setAveDip(90d);
rupSurf.setAveStrike(0d);

Expand Down Expand Up @@ -319,7 +317,6 @@ public MomentScaledFixedMagERF(GriddedGeoDataSet momentMap, double refMag) {

Location rupLoc = momentMap.getLocation(i);
PointSurface rupSurf = new PointSurface(rupLoc);
rupSurf.setDistCorrMagAndType(Double.NaN, PtSrcDistCorr.Type.NONE);
rupSurf.setAveDip(90d);
rupSurf.setAveStrike(0d);

Expand Down Expand Up @@ -370,7 +367,6 @@ public MomentScaledMFDShapeERF(GriddedGeoDataSet momentMap, IncrementalMagFreqDi

Location rupLoc = momentMap.getLocation(i);
PointSurface rupSurf = new PointSurface(rupLoc);
rupSurf.setDistCorrMagAndType(Double.NaN, PtSrcDistCorr.Type.NONE);
rupSurf.setAveDip(90d);
rupSurf.setAveStrike(0d);

Expand Down Expand Up @@ -444,7 +440,6 @@ public RateScaledFixedMagERF(GriddedGeoDataSet rateMap, double refMag) {

Location rupLoc = rateMap.getLocation(i);
PointSurface rupSurf = new PointSurface(rupLoc);
rupSurf.setDistCorrMagAndType(Double.NaN, PtSrcDistCorr.Type.NONE);
rupSurf.setAveDip(90d);
rupSurf.setAveStrike(0d);

Expand Down Expand Up @@ -495,7 +490,6 @@ public RateScaledMFDShapeERF(GriddedGeoDataSet rateMap, IncrementalMagFreqDist m

Location rupLoc = rateMap.getLocation(i);
PointSurface rupSurf = new PointSurface(rupLoc);
rupSurf.setDistCorrMagAndType(Double.NaN, PtSrcDistCorr.Type.NONE);
rupSurf.setAveDip(90d);
rupSurf.setAveStrike(0d);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.apache.commons.cli.Options;
import org.opensha.commons.data.CSVFile;
import org.opensha.commons.data.Site;
import org.opensha.commons.data.WeightedList;
import org.opensha.commons.data.function.ArbitrarilyDiscretizedFunc;
import org.opensha.commons.data.function.DiscretizedFunc;
import org.opensha.commons.data.xyz.GriddedGeoDataSet;
Expand All @@ -45,6 +46,9 @@
import org.opensha.sha.earthquake.faultSysSolution.util.SolHazardMapCalc.ReturnPeriods;
import org.opensha.sha.earthquake.param.BackgroundRupType;
import org.opensha.sha.earthquake.param.IncludeBackgroundOption;
import org.opensha.sha.earthquake.util.GriddedSeismicitySettings;
import org.opensha.sha.faultSurface.utils.PointSourceDistanceCorrection;
import org.opensha.sha.faultSurface.utils.PointSourceDistanceCorrections;
import org.opensha.sha.gui.infoTools.IMT_Info;
import org.opensha.sha.imr.AttenRelRef;
import org.opensha.sha.imr.ScalarIMR;
Expand Down Expand Up @@ -85,6 +89,9 @@ public class MPJ_WrapperHazardCalc extends MPJTaskCalculator {
private static final IncludeBackgroundOption GRID_SEIS_DEFAULT = IncludeBackgroundOption.INCLUDE;
private IncludeBackgroundOption gridSeisOp = GRID_SEIS_DEFAULT;

// TODO: CLI
private GriddedSeismicitySettings gridSettings = GriddedSeismicitySettings.DEFAULT;

private boolean noSubduction = false;
private boolean noActive = false;
private boolean noStable = false;
Expand Down Expand Up @@ -414,7 +421,7 @@ public int getNumSources() {

@Override
public ProbEqkSource getSource(int idx) {
return externalGridProv.getSource(idx, 1d, null, BackgroundRupType.POINT);
return externalGridProv.getSource(idx, 1d, null, gridSettings);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,10 @@
import org.opensha.sha.earthquake.param.IncludeBackgroundParam;
import org.opensha.sha.earthquake.param.ProbabilityModelOptions;
import org.opensha.sha.earthquake.param.ProbabilityModelParam;
import org.opensha.sha.earthquake.util.GriddedSeismicitySettings;
import org.opensha.sha.faultSurface.RuptureSurface;
import org.opensha.sha.faultSurface.utils.PointSourceDistanceCorrection;
import org.opensha.sha.faultSurface.utils.PointSourceDistanceCorrections;
import org.opensha.sha.gui.infoTools.IMT_Info;
import org.opensha.sha.imr.AttenRelRef;
import org.opensha.sha.imr.ScalarIMR;
Expand Down Expand Up @@ -65,6 +68,8 @@ public static void main(String[] args) throws IOException {
boolean subduction = false;
Preconditions.checkState(outputDir.exists() || outputDir.mkdir());

GriddedSeismicitySettings gridSettings = GriddedSeismicitySettings.DEFAULT;

Set<TectonicRegionType> trts = EnumSet.of(TectonicRegionType.ACTIVE_SHALLOW);
if (subduction) {
trts.add(TectonicRegionType.SUBDUCTION_INTERFACE);
Expand Down Expand Up @@ -144,7 +149,7 @@ public static void main(String[] args) throws IOException {
calc.getHazardCurve(logXVals, site, gmpe, erf);
DiscretizedFunc wrapperFaultCurve = toLinear(logXVals, xVals);

ProbEqkSource modelTestSrc = gridProv.getSource(testIndex, 1d, null, BackgroundRupType.POINT);
ProbEqkSource modelTestSrc = gridProv.getSource(testIndex, 1d, null, gridSettings);
FaultSystemSolutionERF modelERF = new FaultSystemSolutionERF(baSol);
modelERF.setParameter(ApplyGardnerKnopoffAftershockFilterParam.NAME, false);
modelERF.setParameter(ProbabilityModelParam.NAME, ProbabilityModelOptions.POISSON);
Expand Down
Loading

0 comments on commit 1ffabc3

Please sign in to comment.