Skip to content

Commit c529937

Browse files
author
Kevin Milner
committed
updates for release candidate prvi model and paper
1 parent 29e7ad3 commit c529937

22 files changed

+1475
-155
lines changed
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
package scratch.kevin.prvi25;
2+
3+
import java.awt.geom.Point2D;
4+
import java.io.File;
5+
import java.io.IOException;
6+
import java.util.ArrayList;
7+
import java.util.List;
8+
9+
import org.opensha.commons.data.CSVFile;
10+
import org.opensha.commons.data.function.XY_DataSet;
11+
import org.opensha.commons.geo.BorderType;
12+
import org.opensha.commons.geo.Location;
13+
import org.opensha.commons.geo.LocationList;
14+
import org.opensha.commons.geo.Region;
15+
import org.opensha.commons.mapping.PoliticalBoundariesData;
16+
import org.opensha.commons.util.DataUtils.MinMaxAveTracker;
17+
18+
public class CA_COV_Compare {
19+
20+
public static void main(String[] args) throws IOException {
21+
XY_DataSet[] caOutlines = PoliticalBoundariesData.loadCAOutlines();
22+
List<Region> caRegions = new ArrayList<>();
23+
for (int i=0; i<caOutlines.length; i++) {
24+
LocationList outline = new LocationList();
25+
for (Point2D pt : caOutlines[i])
26+
outline.add(new Location(pt.getY(), pt.getX()));
27+
caRegions.add(new Region(outline, BorderType.MERCATOR_LINEAR));
28+
}
29+
30+
CSVFile<String> csv = CSVFile.readFile(new File("/tmp/1.0s_TWO_IN_50.csv"), false);
31+
32+
MinMaxAveTracker covTrack = new MinMaxAveTracker();
33+
for (int row=1; row<csv.getNumRows(); row++) {
34+
Location loc = new Location(csv.getDouble(row, 1), csv.getDouble(row, 2));
35+
boolean inside = false;
36+
for (Region reg : caRegions) {
37+
if (reg.contains(loc)) {
38+
inside = true;
39+
break;
40+
}
41+
}
42+
if (inside) {
43+
double cov = csv.getDouble(row, 7);
44+
covTrack.addValue(cov);
45+
}
46+
}
47+
System.out.println("CA COV: "+covTrack);
48+
}
49+
50+
}

src/main/java/scratch/kevin/prvi25/CrustalSubductionLogicTreeCombine.java

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,8 +64,16 @@ public static void main(String[] args) throws IOException {
6464
if (cmd.hasOption("hazard-dir-name"))
6565
hazardDirName = cmd.getOptionValue("hazard-dir-name");
6666
String gridRegFileName = GRID_REG_FILE_NAME_DEFAULT;
67-
if (cmd.hasOption("grid-reg-file-name"))
67+
if (cmd.hasOption("grid-reg-file-name")) {
6868
gridRegFileName = cmd.getOptionValue("grid-reg-file-name");
69+
} else if (hazardDirName != HAZARD_DIR_NAME_DEFAULT) {
70+
File testFile = new File(crustalDir, gridRegFileName);
71+
if (!testFile.exists()) {
72+
testFile = new File(crustalDir, hazardDirName+".zip");
73+
if (testFile.exists())
74+
gridRegFileName = testFile.getName();
75+
}
76+
}
6977
IncludeBackgroundOption bgOp = GRID_SEIS_DEFAULT;
7078
if (cmd.hasOption("gridded-seis"))
7179
bgOp = IncludeBackgroundOption.valueOf(cmd.getOptionValue("gridded-seis"));

src/main/java/scratch/kevin/prvi25/GMMFilterTests.java

Lines changed: 24 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,16 +5,24 @@
55
import java.util.Map;
66
import java.util.function.Supplier;
77

8+
import org.apache.commons.cli.CommandLine;
9+
import org.apache.commons.cli.Options;
810
import org.opensha.commons.logicTree.LogicTree;
911
import org.opensha.commons.logicTree.LogicTreeBranch;
1012
import org.opensha.commons.logicTree.LogicTreeLevel;
1113
import org.opensha.commons.logicTree.LogicTreeNode;
1214
import org.opensha.sha.earthquake.faultSysSolution.hazard.mpj.MPJ_LogicTreeHazardCalc;
15+
import org.opensha.sha.earthquake.faultSysSolution.util.FaultSysHazardCalcSettings;
16+
import org.opensha.sha.earthquake.faultSysSolution.util.FaultSysTools;
1317
import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_LogicTree;
1418
import org.opensha.sha.imr.AttenRelRef;
19+
import org.opensha.sha.imr.AttenRelSupplier;
1520
import org.opensha.sha.imr.ScalarIMR;
1621
import org.opensha.sha.imr.attenRelImpl.nshmp.NSHMP_GMM_Wrapper;
1722
import org.opensha.sha.imr.param.IntensityMeasureParams.PGA_Param;
23+
import org.opensha.sha.imr.param.OtherParams.SigmaTruncLevelParam;
24+
import org.opensha.sha.imr.param.OtherParams.SigmaTruncTypeParam;
25+
import org.opensha.sha.imr.param.SiteParams.Vs30_Param;
1826
import org.opensha.sha.util.TectonicRegionType;
1927

2028
import gov.usgs.earthquake.nshmp.gmm.GmmInput;
@@ -34,15 +42,30 @@ public static void main(String[] args) {
3442
LogicTree.buildExhaustive(allLevels, true)
3543
);
3644

45+
Options ops = new Options();
46+
FaultSysHazardCalcSettings.addCommonOptions(ops, false);
47+
String[] hazardArgs = {
48+
"--gmm-sigma-trunc-one-sided",
49+
"3.0",
50+
"--vs30",
51+
"260",
52+
};
53+
CommandLine cmd = FaultSysTools.parseOptions(ops, hazardArgs, GMMFilterTests.class);
54+
Map<TectonicRegionType, AttenRelSupplier> upstreamGMMs = FaultSysHazardCalcSettings.getGMMs(cmd);
55+
3756
for (LogicTree<?> tree : logicTrees) {
3857
System.out.println("Testing LogicTree with "+tree.size()+" levels");
3958
for (LogicTreeBranch<?> branch : tree) {
4059
System.out.println("\tBranch: "+branch);
41-
Map<TectonicRegionType, ? extends Supplier<ScalarIMR>> gmms = MPJ_LogicTreeHazardCalc.getGMM_Suppliers(branch, null);
60+
Map<TectonicRegionType, ? extends Supplier<ScalarIMR>> gmms = FaultSysHazardCalcSettings.getGMM_Suppliers(branch, upstreamGMMs, true);
4261
for (TectonicRegionType trt : gmms.keySet()) {
4362
NSHMP_GMM_Wrapper gmm = (NSHMP_GMM_Wrapper)gmms.get(trt).get();
4463
gmm.setIntensityMeasure(PGA_Param.NAME);
4564
System.out.println("\t\t"+trt.name()+" GMM: "+gmm.getName());
65+
System.out.println("\t\t\tVs30 is "+gmm.getSiteParams().getValue(Vs30_Param.NAME));
66+
System.out.println("\t\t\tTrunc is "+gmm.getParameter(SigmaTruncTypeParam.NAME).getValue());
67+
if (gmm.getOtherParams().containsParameter(SigmaTruncLevelParam.NAME))
68+
System.out.println("\t\t\tTrunc level is "+gmm.getParameter(SigmaTruncLevelParam.NAME).getValue());
4669
gmm.setCurrentGmmInput(GmmInput.builder().withDefaults().build());
4770
gov.usgs.earthquake.nshmp.tree.LogicTree<GroundMotion> gmmTree = gmm.getGroundMotionTree();
4871
System.out.println("\t\t\tTree has "+gmmTree.size()+" values: "+gmmTree);

src/main/java/scratch/kevin/prvi25/GMMLogicTreeWriter.java

Lines changed: 17 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,10 @@ public static void main(String[] args) throws IOException {
5050

5151
// GriddedRegion gridReg = new GriddedRegion(PRVI25_RegionLoader.loadPRVI_ModelBroad(), 0.1, GriddedRegion.ANCHOR_0_0);
5252
// GriddedRegion gridReg = new GriddedRegion(PRVI25_RegionLoader.loadPRVI_MapExtents(), 0.025, GriddedRegion.ANCHOR_0_0);
53-
GriddedRegion gridReg = new GriddedRegion(PRVI25_RegionLoader.loadPRVI_Tight(), 0.025, GriddedRegion.ANCHOR_0_0);
53+
// use for ERF+GMC
54+
GriddedRegion gridReg = new GriddedRegion(PRVI25_RegionLoader.loadPRVI_Tight(), 0.05, GriddedRegion.ANCHOR_0_0);
55+
// use for GMC
56+
// GriddedRegion gridReg = new GriddedRegion(PRVI25_RegionLoader.loadPRVI_Tight(), 0.025, GriddedRegion.ANCHOR_0_0);
5457
System.out.println("Region has "+gridReg.getNodeCount()+" nodes");
5558

5659
Double vs30 = null;
@@ -79,7 +82,7 @@ public static void main(String[] args) throws IOException {
7982
// // including gridded
8083
//// int mins = 1440*5;
8184
//// File sourceTreeFile = new File(sourceDir, "logic_tree_full_gridded.json");
82-
//// erfSamples = 10000; gmmSamplesPerERF = 1; jobSuffix = "_sampled"; logicTreeOutputName = "logic_tree_full_gridded_sampled.json";
85+
//// erfSamples = 20000; gmmSamplesPerERF = 1; jobSuffix = "_sampled"; logicTreeOutputName = "logic_tree_full_gridded_sampled.json";
8386
////// File sourceTreeFile = new File(sourceDir, "logic_tree_full_gridded_sampled.json"); jobSuffix = "_sampled";
8487
//// IncludeBackgroundOption bgOp = IncludeBackgroundOption.INCLUDE;
8588

@@ -96,9 +99,9 @@ public static void main(String[] args) throws IOException {
9699
// File sourceDir = SUBDUCTION_DIR;
97100
// File outputDir = new File(sourceDir.getParentFile(), sourceDir.getName()+"-gmTreeCalcs"+dirSuffix);
98101
// // supra-seis only
99-
//// File sourceTreeFile = new File(sourceDir, "logic_tree.json");
100-
//// int mins = 1440;
101-
//// IncludeBackgroundOption bgOp = IncludeBackgroundOption.EXCLUDE;
102+
// File sourceTreeFile = new File(sourceDir, "logic_tree.json");
103+
// int mins = 1440;
104+
// IncludeBackgroundOption bgOp = IncludeBackgroundOption.EXCLUDE;
102105
// // interface gridded only
103106
//// int mins = 1440;
104107
////// File sourceTreeFile = new File(sourceDir, "logic_tree_gridded_only.json");
@@ -112,14 +115,14 @@ public static void main(String[] args) throws IOException {
112115
//// jobSuffix = "_interface";
113116
//// outputSuffix = jobSuffix;
114117
// // interface both (combine only)
115-
// combineOnly = true;
116-
// int mins = 1440;
117-
// forceInputFileName = "results_full_gridded_interface_only.zip";
118-
// File sourceTreeFile = new File(sourceDir, "logic_tree_full_gridded.json");
119-
// logicTreeOutputName = "logic_tree_full_gridded_interface_only.json";
120-
// IncludeBackgroundOption bgOp = IncludeBackgroundOption.INCLUDE;
121-
// jobSuffix = "_interface";
122-
// outputSuffix = jobSuffix;
118+
//// combineOnly = true;
119+
//// int mins = 1440;
120+
//// forceInputFileName = "results_full_gridded_interface_only.zip";
121+
//// File sourceTreeFile = new File(sourceDir, "logic_tree_full_gridded.json");
122+
//// logicTreeOutputName = "logic_tree_full_gridded_interface_only.json";
123+
//// IncludeBackgroundOption bgOp = IncludeBackgroundOption.INCLUDE;
124+
//// jobSuffix = "_interface";
125+
//// outputSuffix = jobSuffix;
123126

124127
/*
125128
* Slab
@@ -303,7 +306,7 @@ else if (bgOp == IncludeBackgroundOption.EXCLUDE)
303306
List<Site> sites = new ArrayList<>();
304307
for (int row=1; row<csv.getNumRows(); row++) {
305308
String name = csv.get(row, 0);
306-
Location loc = new Location(csv.getDouble(row, 1), csv.getDouble(row, 2));
309+
Location loc = new Location(csv.getDouble(row, 2), csv.getDouble(row, 1));
307310
sites.add(new Site(loc, name));
308311
}
309312
csv = new CSVFile<>(true);

0 commit comments

Comments
 (0)