diff --git a/build.gradle b/build.gradle index a0d3a771..f2f1c759 100644 --- a/build.gradle +++ b/build.gradle @@ -13,6 +13,9 @@ compileJava.options.encoding = "UTF-8" apply from: '../opensha/build-git.gradle' dependencies { + implementation 'org.apache.sis.core:sis-referencing:1.4' + implementation 'org.apache.sis.non-free:sis-embedded-data:1.4' + implementation 'org.apache.sis.non-free:sis-epsg:1.4' /* no remote repo */ implementation files('python/share/py4j/py4j0.10.9.1.jar', //Py4j jar installed locally via `pip install py4j` 'lib/openmap.jar') diff --git a/src/main/java/nz/cri/gns/NZSHM22/opensha/inversion/NZSHM22_CrustalInversionRunner.java b/src/main/java/nz/cri/gns/NZSHM22/opensha/inversion/NZSHM22_CrustalInversionRunner.java index f8d0b191..599574a6 100644 --- a/src/main/java/nz/cri/gns/NZSHM22/opensha/inversion/NZSHM22_CrustalInversionRunner.java +++ b/src/main/java/nz/cri/gns/NZSHM22/opensha/inversion/NZSHM22_CrustalInversionRunner.java @@ -241,6 +241,30 @@ protected NZSHM22_CrustalInversionRunner configure() throws DocumentException, I initialSolution = variablePerturbationBasis.clone(); } + /* FIXME oakley, this is something for RSQSim + rupSet.removeRuptures(); + // rupSet.getFaultSectionDataList().forEach(s -> + // s.setSectionName(s.getSectionName().replace("Subsection ", ""))); + Set names = new HashSet<>(); + for (FaultSection section : rupSet.getFaultSectionDataList()) { + + section.setSectionName(section.getSectionId() + " " + section.getSectionName()); + } + // Preconditions.checkState(names.size() == rupSet.getNumSections()); + U3FaultSystemIO.writeRupSet(rupSet, new File("/tmp/NZSHM_crustal_u3_use_this_instead.zip")); + + List metaData = new ArrayList<>(); + metaData.add("NZSHM22 crustal fault sub sections"); + metaData.add("fault_model: CFM_1_0A_DOM_SANSTVZ"); + metaData.add("depth_scaling_tvz: 0.667"); + metaData.add("depth_scaling_sans: 0.8"); + + FaultSectionDataWriter.writeSectionsToFile( + rupSet.getFaultSectionDataList(), metaData, new File("/tmp/asciitest.txt"), false); + + + */ + InversionModels inversionModel = branch.getValue(InversionModels.class); // this contains all inversion weights diff --git a/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/joint/ManipulatedClusterRupture.java b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/joint/ManipulatedClusterRupture.java index 4bb58952..f1d2c439 100644 --- a/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/joint/ManipulatedClusterRupture.java +++ b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/joint/ManipulatedClusterRupture.java @@ -3,15 +3,13 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; +import java.util.*; import java.util.stream.Collectors; import nz.cri.gns.NZSHM22.opensha.ruptures.downDip.DownDipFaultSubSectionCluster; import org.opensha.sha.earthquake.faultSysSolution.ruptures.ClusterRupture; import org.opensha.sha.earthquake.faultSysSolution.ruptures.FaultSubsectionCluster; import org.opensha.sha.earthquake.faultSysSolution.ruptures.Jump; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.multiRupture.MultiRuptureJump; import org.opensha.sha.earthquake.faultSysSolution.ruptures.util.SectionDistanceAzimuthCalculator; import org.opensha.sha.earthquake.faultSysSolution.ruptures.util.UniqueRupture; import org.opensha.sha.faultSurface.FaultSection; @@ -59,6 +57,16 @@ static FaultSection last(FaultSubsectionCluster cluster) { return cluster.subSects.get(cluster.subSects.size() - 1); } + // FIXME use properties + static boolean isCrustal(FaultSection section) { + return !section.getSectionName().contains("row:"); + } + + // FIXME use properties + static boolean isSubduction(FaultSection section) { + return section.getSectionName().contains("row:"); + } + static ImmutableList makeInternalJumps( ClusterRupture ruptureA, ClusterRupture ruptureB, @@ -130,6 +138,18 @@ public static ManipulatedClusterRupture splay( ruptureA.internalUnique); } + public static ManipulatedClusterRupture splay( + ClusterRupture rupture, ClusterRupture splayRupture) { + Jump jump = + new Jump( + rupture.clusters[0].startSect, + rupture.clusters[0], + splayRupture.clusters[0].startSect, + splayRupture.clusters[0], + 5); + return splay(rupture, splayRupture, jump); + } + /** * Safely reverses ruptures that may have a subduction cluster. * @@ -163,4 +183,75 @@ public static ClusterRupture reverse(ClusterRupture rupture) { ImmutableList.copyOf(jumps), rupture.unique); } + + public static ManipulatedClusterRupture makeFromClusters( + List clusters) { + FaultSubsectionCluster[] clusterArray = clusters.toArray(new FaultSubsectionCluster[] {}); + List jumps = new ArrayList<>(); + UniqueRupture uniqueRupture = + clusters.stream().map(UniqueRupture::forClusters).reduce(UniqueRupture::add).get(); + + for (int c = 1; c < clusterArray.length; c++) { + FaultSubsectionCluster fromCluster = clusterArray[c - 1]; + FaultSection from = last(fromCluster); + FaultSubsectionCluster toCluster = clusterArray[c]; + FaultSection to = first(toCluster); + double distance = 5; + jumps.add(new Jump(from, fromCluster, to, toCluster, distance)); + } + + return new ManipulatedClusterRupture( + clusterArray, ImmutableList.copyOf(jumps), uniqueRupture); + } + + public static ManipulatedClusterRupture makeFromSections(List sections) { + List clusters = + sections.stream() + .collect(Collectors.groupingBy(FaultSection::getParentSectionId)) + .values() + .stream() + .peek(list -> list.sort(Comparator.comparing(FaultSection::getSectionId))) + .map(FaultSubsectionCluster::new) + .collect(Collectors.toList()); + return makeFromClusters(clusters); + } + + /** + * Can be used if we get a list of jumbled FaultSections from RSQSim data + * + * @param sections + * @return + */ + public static ClusterRupture makeRupture(List sections) { + ManipulatedClusterRupture crustal = + makeFromSections( + sections.stream() + .filter(ManipulatedClusterRupture::isCrustal) + .collect(Collectors.toList())); + ManipulatedClusterRupture subduction = + makeFromSections( + sections.stream() + .filter(ManipulatedClusterRupture::isSubduction) + .collect(Collectors.toList())); + return splay(subduction, crustal); + } + + /** + * Splits a MultiRuptureJump into two separate ruptures so that we can apply Coulomb filters + * + * @param jump + * @return + */ + public static MultiRuptureJump reconstructJump(ClusterRupture rupture) { + List fromSections = + Arrays.stream(rupture.clusters) + .flatMap(c -> c.subSects.stream()) + .collect(Collectors.toList()); + List toSections = + rupture.splays.values().asList().get(0).buildOrderedSectionList(); + ClusterRupture fromRupture = ManipulatedClusterRupture.makeFromSections(fromSections); + ClusterRupture toRupture = ManipulatedClusterRupture.makeFromSections(toSections); + return new MultiRuptureJump( + fromSections.get(0), fromRupture, toSections.get(0), toRupture, 10); + } } diff --git a/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/picklemapping.ipynb b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/picklemapping.ipynb new file mode 100644 index 00000000..c8f90d25 --- /dev/null +++ b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/picklemapping.ipynb @@ -0,0 +1,54 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 32, + "id": "a98437a0-d942-45ca-9f40-a8aa03edb6d8", + "metadata": {}, + "outputs": [], + "source": [ + "import pickle\n", + "import json\n", + "\n", + "mapping = pickle.load(open(\"hikkerm_discretized_trimmed_dict.pkl\", \"rb\"))\n", + "\n", + "indices = {k: [int(i) for i in mapping[k][\"triangle_indices\"]] for k in mapping}\n", + "\n", + "jsonString = json.dumps(indices, indent=2)\n", + "\n", + "f = open(\"hikkerm_discretized_trimmed_dict.json\", \"a\")\n", + "f.write(jsonString)\n", + "f.close()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2838bc29-cb92-4e53-bdf4-5c0db46c7883", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/ClusterAggregator.java b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/ClusterAggregator.java new file mode 100644 index 00000000..285b4afe --- /dev/null +++ b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/ClusterAggregator.java @@ -0,0 +1,136 @@ +package nz.cri.gns.NZSHM22.opensha.ruptures.experimental.rsqsims; + +import com.google.common.base.Preconditions; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.ClusterRupture; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.FaultSubsectionCluster; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.util.SectionDistanceAzimuthCalculator; +import org.opensha.sha.faultSurface.FaultSection; + +public class ClusterAggregator { + + final SectionDistanceAzimuthCalculator disAzCalc; + + final double maxInternalJumpDist; + + public ClusterAggregator( + SectionDistanceAzimuthCalculator disAzCalc, double maxInternalJumpDist) { + this.disAzCalc = disAzCalc; + this.maxInternalJumpDist = maxInternalJumpDist; + } + + /** + * Turns an Event into two ruptures, one subduction, one crustal + * + * @param event + * @return + */ + public List makeRuptures(RsqSimEventLoader.Event event) { + List subductionSections = + event.sections.stream() + .filter(s -> s.getSectionName().contains("row:")) + .collect(Collectors.toList()); + List crustalSections = + event.sections.stream() + .filter(s -> !s.getSectionName().contains("row:")) + .collect(Collectors.toList()); + Preconditions.checkState(!subductionSections.isEmpty()); + Preconditions.checkState(!crustalSections.isEmpty()); + List ruptures = new ArrayList<>(); + ruptures.add(ClusterRupture.forOrderedSingleStrandRupture(subductionSections, disAzCalc)); + ruptures.add(ClusterRupture.forOrderedSingleStrandRupture(crustalSections, disAzCalc)); + return ruptures; + } + + class ClusterData { + FaultSubsectionCluster cluster; + List endPoints = new ArrayList<>(); + public boolean connected = false; + + ClusterData(FaultSubsectionCluster cluster) { + this.cluster = cluster; + endPoints.add(cluster.subSects.get(0)); + endPoints.add(cluster.subSects.get(cluster.subSects.size() - 1)); + } + + boolean isNear(FaultSection section) { + for (FaultSection candidate : endPoints) { + if (section == candidate + || disAzCalc.getDistance(candidate, section) <= maxInternalJumpDist) { + return true; + } + } + return false; + } + } + + /** + * Returns true if all clusters can transitively be connected through maxInternalJumpDist jumps. + * + * @param clusters + * @return + */ + public boolean allConnected(FaultSubsectionCluster[] clusters) { + if (clusters.length == 1) { + return true; + } + + // wrap clusters in ClusterData + List groups = + Arrays.stream(clusters).map(ClusterData::new).collect(Collectors.toList()); + + ClusterData first = groups.get(0); + List edge = new ArrayList<>(first.endPoints); + first.connected = true; + + // Go through all fault sections at the edge of the connected cluster. + // The edge may grow as we add more clusters into the connected cluster. + // See if we can jump from the selected fault section to a cluster that's so far + // unconnected. + // If so, add the cluster to the connected cluster, and expand the edge accordingly + for (int e = 0; e < edge.size(); e++) { + FaultSection section = edge.get(e); + for (ClusterData cluster : groups) { + if (cluster.connected) { + continue; + } + if (cluster.isNear(section)) { + cluster.connected = true; + edge.addAll(cluster.endPoints); + } + } + } + + // return true if all clusters are connected + for (ClusterData data : groups) { + if (!data.connected) { + return false; + } + } + return true; + } + + /** + * Returns true if all clusters in the rupture can be connected by maxInternalJumpDist jumps + * + * @param rupture + * @return + */ + public boolean allConnected(ClusterRupture rupture) { + return allConnected(rupture.clusters); + } + + /** + * Returns true if all clusters in each rupture can be connected by maxInternalJumpDist jumps + * Assumes that exactly two ruptures are passed in. + * + * @param ruptures + * @return + */ + public boolean allConnected(List ruptures) { + return allConnected(ruptures.get(0)) && allConnected(ruptures.get(1)); + } +} diff --git a/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/CoordinateConverter.java b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/CoordinateConverter.java new file mode 100644 index 00000000..b0557bc6 --- /dev/null +++ b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/CoordinateConverter.java @@ -0,0 +1,102 @@ +package nz.cri.gns.NZSHM22.opensha.ruptures.experimental.rsqsims; + +import com.google.common.base.Preconditions; +import org.apache.sis.geometry.DirectPosition2D; +import org.apache.sis.referencing.CRS; +import org.apache.sis.referencing.CommonCRS; +import org.opengis.geometry.DirectPosition; +import org.opengis.referencing.crs.CoordinateReferenceSystem; +import org.opengis.referencing.operation.CoordinateOperation; +import org.opengis.referencing.operation.TransformException; +import org.opengis.util.FactoryException; +import org.opensha.commons.geo.Location; + +/** + * Converts coordinates from a specific coordinate system into WGS84. + * + *

Implementing classes need to set the operation property and implement toSourcePosition(). + * toString() should be overwritten for better error messages + */ +public abstract class CoordinateConverter { + + static CoordinateReferenceSystem targetCRS = CommonCRS.WGS84.geographic(); + protected CoordinateOperation operation; + + /** + * Returns the equivalent WGS84 location. Elevation is not modified. + * + * @param easting + * @param northing + * @param elevation + * @return + */ + public Location toWGS84(double easting, double northing, double elevation) { + try { + DirectPosition2D original = toSourcePosition(easting, northing); + DirectPosition transformed = operation.getMathTransform().transform(original, null); + return new Location(transformed.getOrdinate(0), transformed.getOrdinate(1), elevation); + } catch (TransformException x) { + throw new RuntimeException( + this + " could not transform " + easting + ", " + northing, x); + } + } + + protected abstract DirectPosition2D toSourcePosition(double easting, double northing); + + /** Transforms Universal Transverse Mercator coordinates into WGS84 coordinates. */ + public static class UTM extends CoordinateConverter { + final int zone; + final boolean north; + + /** + * Creates a UTM transformer for a specific UTM zone. + * + * @param zone the zone + * @param north whether the zone is in the northern hemisphere + * @throws FactoryException + */ + public UTM(int zone, boolean north) throws FactoryException { + Preconditions.checkArgument(zone < 62 && zone > 0); + this.zone = zone; + this.north = north; + // https://docs.up42.com/data/reference/utm#utm-wgs84-north + int epsCode = (north ? 32600 : 32700) + zone; + CoordinateReferenceSystem sourceCRS = CRS.forCode("EPSG:" + epsCode); + operation = CRS.findOperation(sourceCRS, targetCRS, null); + } + + @Override + protected DirectPosition2D toSourcePosition(double easting, double northing) { + return new DirectPosition2D(easting, northing); + } + + @Override + public String toString() { + return "UTM " + zone + (north ? "N" : "S") + " -> WGS84"; + } + } + + /** Converts New Zealand Transverse Mercator coordinates to WGS84 https://epsg.io/2193 */ + public static class NZTM extends CoordinateConverter { + + /** + * Creates an NZTM transformer. + * + * @throws FactoryException + */ + public NZTM() throws FactoryException { + CoordinateReferenceSystem sourceCRS = CRS.forCode("EPSG:2193"); + operation = CRS.findOperation(sourceCRS, targetCRS, null); + } + + @Override + protected DirectPosition2D toSourcePosition(double easting, double northing) { + return new DirectPosition2D(northing, easting); + } + + @Override + public String toString() { + return "NZTM -> WGS84"; + } + } +} diff --git a/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/CoulombTester.java b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/CoulombTester.java new file mode 100644 index 00000000..f04f9089 --- /dev/null +++ b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/CoulombTester.java @@ -0,0 +1,216 @@ +package nz.cri.gns.NZSHM22.opensha.ruptures.experimental.rsqsims; + +import java.io.*; +import java.util.*; +import java.util.stream.Collectors; +import nz.cri.gns.NZSHM22.opensha.ruptures.experimental.joint.ManipulatedClusterRupture; +import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet; +import org.opensha.sha.earthquake.faultSysSolution.modules.ClusterRuptures; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.multiRupture.ConcurrentCounter; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.multiRupture.MultiRuptureJump; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.multiRupture.StiffnessCalcModule; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.multiRupture.impl.MultiRuptureCoulombFilter; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.multiRupture.impl.MultiRuptureFractCoulombPositiveFilter; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.multiRupture.impl.MultiRuptureNetCoulombPositiveFilter; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.plausibility.PlausibilityResult; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.plausibility.impl.coulomb.ParentCoulombCompatibilityFilter; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.util.SectionDistanceAzimuthCalculator; +import org.opensha.sha.faultSurface.FaultSection; +import org.opensha.sha.simulators.stiffness.AggregatedStiffnessCalculator; + +public class CoulombTester implements Closeable { + + String stiffnessCache; + public FaultSystemRupSet rupSet; + final SectionDistanceAzimuthCalculator disAzCalc; + public StiffnessCalcModule stiffness; + public List filters = new ArrayList<>(); + BufferedWriter writer = null; + + public CoulombTester(FaultSystemRupSet rupSet, String stiffnessCache) throws IOException { + this(rupSet, stiffnessCache, null); + } + + public CoulombTester(FaultSystemRupSet rupSet, String stiffnessCache, File statsFile) + throws IOException { + this.rupSet = rupSet; + this.stiffnessCache = stiffnessCache; + this.disAzCalc = new SectionDistanceAzimuthCalculator(rupSet.getFaultSectionDataList()); + if (statsFile != null) { + writer = new BufferedWriter(new FileWriter(statsFile)); + } + } + + @Override + public void close() throws IOException { + if (writer != null) { + writer.close(); + } + } + + public List getFilters() { + return filters; + } + + public List getJumps(FaultSystemRupSet rupSet) { + ClusterRuptures ruptures = rupSet.requireModule(ClusterRuptures.class); + return ruptures.getAll().stream() + .map(ManipulatedClusterRupture::reconstructJump) + .collect(Collectors.toList()); + } + + public void setupStiffness() { + + stiffness = new StiffnessCalcModule(rupSet, 2, new File(stiffnessCache)); + + // what fraction of interactions should be positive? this number will take some tuning + float fractThreshold = 0.75f; + MultiRuptureFractCoulombPositiveFilter fractCoulombFilter = + new MultiRuptureFractCoulombPositiveFilter( + stiffness.stiffnessCalc, + fractThreshold, + ParentCoulombCompatibilityFilter.Directionality.BOTH); + filters.add(fractCoulombFilter); + + // force the net coulomb from one rupture to the other to positive; this more heavily + // weights nearby interactions + ParentCoulombCompatibilityFilter.Directionality netDirectionality = + ParentCoulombCompatibilityFilter.Directionality + .BOTH; // require it to be positive to from subduction to crustal AND from + // crustal to subduction + // Directionality netDirectionality = Directionality.EITHER; // require it to be + // positive to from subduction to crustal OR from crustal to subduction + MultiRuptureNetCoulombPositiveFilter netCoulombFilter = + new MultiRuptureNetCoulombPositiveFilter( + stiffness.stiffnessCalc, netDirectionality); + filters.add(netCoulombFilter); + } + + public void saveCache() { + stiffness.checkUpdateStiffnessCache(); + } + + public String testSelfStiffnessFilter(List events) { + + SelfStiffnessCoulombFilter selfStiffness = new SelfStiffnessCoulombFilter(stiffness); + + System.out.println("start"); + + ConcurrentCounter selfsub = new ConcurrentCounter(); + ConcurrentCounter selfCru = new ConcurrentCounter(); + ConcurrentCounter selfAll = new ConcurrentCounter(); + ConcurrentCounter selfBoth = new ConcurrentCounter(); + + events.parallelStream() + .forEach( + event -> { + double[] stats = selfStiffness.statsData(event); + if (stats[0] > 0) { + selfsub.inc(); + } + if (stats[1] > 0) { + selfCru.inc(); + } + if (stats[2] > 0) { + selfAll.inc(); + } + if (stats[0] > 0 && stats[1] > 0) { + selfBoth.inc(); + } + }); + + return "all->sub: " + + selfsub.get() + + " all->cru: " + + selfCru.get() + + " both: " + + selfBoth.get(); + } + + public void writeStats(List events, String fileName) + throws IOException { + int ruptureId = 0; + SelfStiffnessCoulombFilter selfStiffness = new SelfStiffnessCoulombFilter(stiffness); + try (BufferedWriter writer = new BufferedWriter(new FileWriter(fileName))) { + writer.write( + "rupture_id, event_id, FractCoulomb_pass, NetCoulomb_pass," + + selfStiffness.statsHeader()); + writer.newLine(); + + for (RsqSimEventLoader.Event event : events) { + PlausibilityResult filter0 = filters.get(0).apply(event.jump, false); + PlausibilityResult filter1 = filters.get(0).apply(event.jump, false); + + writer.write( + ruptureId + + ", " + + event.id + + ", " + + filter0.isPass() + + ", " + + filter1.isPass() + + ", " + + selfStiffness.stats(event)); + writer.newLine(); + ruptureId++; + } + } + } + + public List applyCoulomb(MultiRuptureJump jump) { + PlausibilityResult r0 = filters.get(0).apply(jump, false); + PlausibilityResult r1 = filters.get(1).apply(jump, false); + PlausibilityResult result = r0.logicalAnd(r1); + // if(writer != null) { + // try { + // int[] stats = filters.get(0).collectStats(jump); + // for (int stat : stats) { + // writer.write(stat + ", "); + // } + // writer.write("\n"); + // }catch(IOException x) { + // throw new RuntimeException(x); + // } + // } + + return List.of(r0, r1, result); + } + + /** + * helper function to fill the cache with subduction->subduction stiffness + * + * @param args + */ + public static void main(String[] args) throws IOException { + FaultSystemRupSet rupSet1 = + FaultSystemRupSet.load( + new File( + "C:\\Users\\user\\GNS\\science-playground\\WORKDIR\\rupSetBruceRundir5883.zip")); + // CoulombTester tester = new CoulombTester(rupSet1, + // "C:\\Users\\user\\GNS\\science-playground\\WORKDIR\\stiffnessCaches"); + CoulombTester tester = new CoulombTester(rupSet1, "/tmp/stiffnessCaches"); + tester.setupStiffness(); + AggregatedStiffnessCalculator aggCalc = tester.getFilters().get(1).getAggCalc(); + + List> sections = + rupSet1.getFaultSectionDataList().stream() + .filter(s -> s.getSectionName().startsWith("Puysegur")) + .map(List::of) + .collect(Collectors.toList()); + + System.out.println("section count: " + sections.size()); + + sections.stream() + .parallel() + .forEach( + section -> { + sections.forEach( + fromSection -> { + aggCalc.calc(fromSection, section); + }); + System.out.print("."); + }); + + tester.saveCache(); + } +} diff --git a/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/Patch.java b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/Patch.java new file mode 100644 index 00000000..fe58434f --- /dev/null +++ b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/Patch.java @@ -0,0 +1,82 @@ +package nz.cri.gns.NZSHM22.opensha.ruptures.experimental.rsqsims; + +import java.util.ArrayList; +import java.util.List; +import org.opensha.commons.geo.*; +import org.opensha.commons.geo.json.Feature; +import org.opensha.commons.geo.json.FeatureProperties; +import org.opensha.commons.geo.json.Geometry; +import org.opensha.sha.faultSurface.FaultSection; + +public class Patch { + public final LocationList locations; + public final int id; + public final double rake; + public final double slip; + public final double area; + public int sectionIdFromZname = -1; + + public String zname; + + public final String[] row; + + public List sections = new ArrayList<>(); + + public int getNameSectionId() { + return sectionIdFromZname; + } + + public double getMaxLat() { + return locations.stream().mapToDouble(l -> l.lat).max().getAsDouble(); + } + + public Patch( + int id, LocationList locations, double rake, double slip, double area, String[] row) { + this.id = id; + this.rake = rake; + this.slip = slip; + this.locations = locations; + this.row = row; + this.area = area; + } + + public static Patch create( + int id, + Location a, + Location b, + Location c, + double rake, + double slip, + double area, + String[] row) { + LocationList locs = new LocationList(); + locs.add(a); + locs.add(b); + locs.add(c); + return new Patch(id, locs, rake, slip, area, row); + } + + public Feature toFeature() { + LocationList list = new LocationList(); + list.addAll(locations); + list.add(locations.first()); + Geometry geometry = new Geometry.LineString(list); + FeatureProperties properties = new FeatureProperties(); + properties.set("patch_id", id); + properties.set("rake", rake); + properties.set("slip", slip); + return new Feature(id, geometry, properties); + } + + public Feature toPolygonFeature() { + LocationList list = new LocationList(); + list.addAll(locations); + list.add(locations.first()); + Geometry geometry = new Geometry.Polygon(list); + FeatureProperties properties = new FeatureProperties(); + properties.set("patch_id", id); + properties.set("rake", rake); + properties.set("slip", slip); + return new Feature(id, geometry, properties); + } +} diff --git a/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/PatchesFile.java b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/PatchesFile.java new file mode 100644 index 00000000..4e1ee8ba --- /dev/null +++ b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/PatchesFile.java @@ -0,0 +1,100 @@ +package nz.cri.gns.NZSHM22.opensha.ruptures.experimental.rsqsims; + +import com.google.common.base.Preconditions; +import java.io.BufferedReader; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import org.opensha.commons.geo.Location; +import org.opensha.commons.geo.PlaneUtils; + +/** + * Loads geometry as specified in https://zenodo.org/records/5534462 and as in + * https://github.com/uc-eqgeo/rsqsim-python-tools/blob/main/src/rsqsim_api/rsqsim_api/fault/multifault.py#L161 + * + *

Geometry File (ASCII): zfault_Deepen.in + * + *

ASCII file listing patch (triangular) geometry for the simulated faults in a UTM coordinate + * system (zone 11S). The primary columns are: + * + *

* /x1, y1, z1/ - UTM coordinates of the first vertex * /x2, y2, z2/ - UTM coordinates of the + * second vertex * /x3, y3, z3/ - UTM coordinates of the third vertex * /rake/ - Direction of the + * motion of the hanging wall relative to the footwall (in degrees, following the convention of Aki + * & Richards, 2002) * /slip_rate/ - Long-term average slip rate (in m/s) + * + *

The first line in the file (excluding comment lines that start with '#') is the patch with + * ID=1, the second ID=2, etc. Additional metadata columns may exist in each line beyond those + * listed and can be ignored. + */ +public class PatchesFile { + + String fileName; + CoordinateConverter coordinateConverter; + + public PatchesFile(String fileName, CoordinateConverter coordinateConverter) { + this.fileName = fileName; + this.coordinateConverter = coordinateConverter; + } + + Location toLatLon(double easting, double northing, double depth) { + return coordinateConverter.toWGS84(easting, northing, Math.abs(depth) / 1000); + } + + Patch loadPatch(int id, String line) { + String[] parts = line.split(" "); + Preconditions.checkArgument(parts.length >= 11, "Line must have at least 11 elements"); + double[] values = new double[11]; + for (int i = 0; i < 11; i++) { + Preconditions.checkArgument(!parts[i].isEmpty()); + values[i] = Double.parseDouble(parts[i]); + } + + // calculating triangle area + // we're assuming that the coordinates are in an orthographic coordinate system in meters + // (such as UTM) + double[] ab = + new double[] {values[3] - values[0], values[4] - values[1], values[5] - values[2]}; + double[] ac = + new double[] {values[6] - values[0], values[7] - values[1], values[8] - values[2]}; + double[] crossProduct = PlaneUtils.getCrossProduct(ab, ac, false); + double area = PlaneUtils.getMagnitude(crossProduct) / 2; + + // double[] cb = new double[]{values[6] - values[3], values[7] - values[4], values[8] + // - values[5]}; + // + // System.out.println(id +": " + PlaneUtils.getMagnitude(ab) + ", " + + // PlaneUtils.getMagnitude(ac)+", " + PlaneUtils.getMagnitude(cb) + ", " + area); + + Location l1 = toLatLon(values[0], values[1], values[2]); + Location l2 = toLatLon(values[3], values[4], values[5]); + Location l3 = toLatLon(values[6], values[7], values[8]); + Patch patch = null; + try { + patch = Patch.create(id, l1, l2, l3, values[9], values[10], area, parts); + } catch (Exception x) { + // x.printStackTrace(); + System.err.println("error at line " + id + " " + x.getMessage()); + } + return patch; + } + + public List loadPatches() throws IOException { + BufferedReader reader = new BufferedReader(new FileReader(fileName)); + List patches = new ArrayList<>(); + + String line; + int id = 1; + while ((line = reader.readLine()) != null) { + if (line.isEmpty() || line.startsWith("#")) { + continue; + } + Patch patch = loadPatch(id, line); + id++; + patches.add(patch); + } + + reader.close(); + return patches; + } +} diff --git a/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/RsqSimEventLoader.java b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/RsqSimEventLoader.java new file mode 100644 index 00000000..df4dc02d --- /dev/null +++ b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/RsqSimEventLoader.java @@ -0,0 +1,267 @@ +package nz.cri.gns.NZSHM22.opensha.ruptures.experimental.rsqsims; + +import com.google.common.base.Preconditions; +import java.io.*; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.*; +import java.util.stream.Collectors; +import nz.cri.gns.NZSHM22.opensha.util.SimpleGeoJsonBuilder; +import org.opensha.commons.geo.json.FeatureProperties; +import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.ClusterRupture; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.multiRupture.MultiRuptureJump; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.util.SectionDistanceAzimuthCalculator; +import org.opensha.sha.faultSurface.FaultSection; + +public class RsqSimEventLoader { + + final File runDir; + final RsqSimPatchLoader patchLoader; + final List jointEvents = new ArrayList<>(); + final List events = new ArrayList<>(); + + public RsqSimEventLoader(File runDir, RsqSimPatchLoader patchLoader) { + this.runDir = runDir; + this.patchLoader = patchLoader; + } + + public static class Event { + public final int id; + public List patches = new ArrayList<>(); + public List sections; + public MultiRuptureJump jump; + + public Event(int id) { + this.id = id; + } + + public List getPatches() { + return patches; + } + + /** + * Returns true if the event has subduction and crustal patches. + * + * @return + */ + boolean isJointRupture() { + boolean hasSubduction = false; + boolean hasCrustal = false; + for (Patch patch : patches) { + if (!patch.sections.isEmpty() + && patch.sections.get(0).getSectionName().contains("row:")) { + hasSubduction = true; + } else { + hasCrustal = true; + } + if (hasCrustal && hasSubduction) { + return true; + } + } + return false; + } + + /** + * Returns true if the rupture has crustal and subduction sections + * + * @return + */ + boolean isOpenShaJointRupture() { + boolean hasSubduction = false; + boolean hasCrustal = false; + for (FaultSection section : sections) { + if (section.getSectionName().contains("row:")) { + hasSubduction = true; + } else { + hasCrustal = true; + } + if (hasCrustal && hasSubduction) { + return true; + } + } + return false; + } + + public Map getSectionFillArea() { + Map result = new HashMap<>(); + for (Patch patch : patches) { + for (FaultSection section : patch.sections) { + result.compute( + section, + (key, value) -> patch.area + (Objects.isNull(value) ? 0 : value)); + } + } + return result; + } + } + + public List toFaultSections(Event event) { + List sections = new ArrayList<>(); + Map fillArea = event.getSectionFillArea(); + for (FaultSection section : fillArea.keySet()) { + double fillRatio = fillArea.get(section) / section.getArea(false); + if (fillRatio > 0.5) { + sections.add(section); + } + } + return sections; + } + + public List getJointRuptures() { + return jointEvents.stream() + .filter(Event::isJointRupture) + .peek(event -> event.sections = toFaultSections(event)) + .filter(Event::isOpenShaJointRupture) + .collect(Collectors.toList()); + } + + public static MultiRuptureJump makeJump(List ruptures) { + return new MultiRuptureJump( + ruptures.get(0).clusters[0].startSect, + ruptures.get(0), + ruptures.get(1).clusters[0].startSect, + ruptures.get(1), + 5); + } + + /** + * Filters input down to single crustal joint ruptures. Side effect: event.jump is populated. + * + * @param events + * @return + */ + public List makeSingleJointRuptures(List events) { + SectionDistanceAzimuthCalculator disAzCalc = + new SectionDistanceAzimuthCalculator( + patchLoader.loadedRupSet.getFaultSectionDataList()); + + ClusterAggregator aggregator = new ClusterAggregator(disAzCalc, 5); + + List singleCrustalJointRuptures = + events.stream() + .peek( + event -> { + List rs = aggregator.makeRuptures(event); + if (aggregator.allConnected(rs)) { + event.jump = makeJump(rs); + } + }) // turn into rupture pairs + .filter(event -> event.jump != null) // check if there's a single crustal + // rupture + .collect(Collectors.toList()); + + return singleCrustalJointRuptures; + } + + public static File findFile(File path, String... candidateNames) throws FileNotFoundException { + for (String fileName : candidateNames) { + File file = new File(path, fileName); + if (file.exists()) { + return file; + } + } + throw new FileNotFoundException( + "Could not find candidate files in " + + path + + " first candidate: " + + candidateNames[0]); + } + + public List loadEvents() throws IOException { + + File eListFile = findFile(runDir, ".eList", "eList", "whole_nz.eList"); + File pListFile = findFile(runDir, ".pList", "pList", "whole_nz.pList"); + + List eList = loadCatalogFile(eListFile); + List pList = loadCatalogFile(pListFile); + + Preconditions.checkState(eList.size() == pList.size()); + + System.out.println( + "Loaded " + eList.size() + " rows with " + eList.get(eList.size() - 1) + " events"); + + Event event = new Event(eList.get(0)); + + for (int i = 0; i < eList.size(); i++) { + int eid = eList.get(i); + int pid = pList.get(i); + if (event.id != eid) { + events.add(event); + if (event.isJointRupture()) { + jointEvents.add(event); + } + event = new Event(eid); + } + Patch patch = patchLoader.getPatch(pid); + Preconditions.checkState(Objects.nonNull(patch)); + event.patches.add(patch); + } + + System.out.println("Identified " + jointEvents.size() + " joint ruptures"); + return events; + } + + public List loadCatalogFile(File file) throws IOException { + List result = new ArrayList<>(); + + FileInputStream in = new FileInputStream(file); + + byte[] data = in.readAllBytes(); + ByteBuffer buffer = ByteBuffer.wrap(data); + buffer.order(ByteOrder.LITTLE_ENDIAN); + buffer.rewind(); + + while (buffer.hasRemaining()) { + result.add(buffer.getInt()); + } + + in.close(); + + return result; + } + + Map calculateParticipation(List events) { + Map result = new HashMap<>(); + for (RsqSimEventLoader.Event event : events) { + for (FaultSection section : event.sections) { + result.compute( + section.getSectionId(), (key, value) -> value == null ? 1 : value + 1); + } + } + return result; + } + + /** + * writes debug GeoJSON files to show how often each section participates in a rupture. + * + * @param events + * @param rupSet + */ + public void writeParticipationRates( + List events, FaultSystemRupSet rupSet, String baseOutput) { + Map participation = calculateParticipation(events); + SimpleGeoJsonBuilder builder = new SimpleGeoJsonBuilder(); + for (FaultSection section : rupSet.getFaultSectionDataList()) { + FeatureProperties props = builder.addFaultSectionPolygon(section); + Integer part = participation.get(section.getSectionId()); + props.set("participation", Objects.nonNull(part) ? part : 0); + } + builder.toJSON(baseOutput + "participation.geojson"); + + builder = new SimpleGeoJsonBuilder(); + for (FaultSection section : rupSet.getFaultSectionDataList()) { + if (!section.getSectionName().contains("row:")) { + Integer part = participation.get(section.getSectionId()); + + if (part != null && part > 0) { + + FeatureProperties props = builder.addFaultSectionPolygon(section); + + props.set("participation", part); + } + } + } + builder.toJSON(baseOutput + "participationCrustal.geojson"); + } +} diff --git a/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/RsqSimMain.java b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/RsqSimMain.java new file mode 100644 index 00000000..a61d78f5 --- /dev/null +++ b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/RsqSimMain.java @@ -0,0 +1,249 @@ +package nz.cri.gns.NZSHM22.opensha.ruptures.experimental.rsqsims; + +import java.io.*; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import nz.cri.gns.NZSHM22.opensha.ruptures.experimental.joint.ManipulatedClusterRupture; +import nz.cri.gns.NZSHM22.opensha.util.SimpleGeoJsonBuilder; +import org.opengis.util.FactoryException; +import org.opensha.commons.geo.json.FeatureProperties; +import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.ClusterRupture; +import org.opensha.sha.faultSurface.FaultSection; +import scratch.UCERF3.enumTreeBranches.ScalingRelationships; + +public class RsqSimMain implements Closeable { + + final SourceType sourceType; + final String runDirVersion; + final String outputDir; + final String baseOutputPath; + final String basePath; + + final BufferedWriter log; + + @Override + public void close() throws IOException { + log.close(); + } + + public enum SourceType { + BRUCE, + CANTERBURY + } + + public RsqSimMain( + String baseInputDir, String rundirVersion, SourceType sourceType, String baseOutputDir) + throws IOException { + this.sourceType = sourceType; + this.runDirVersion = rundirVersion; + this.outputDir = baseOutputDir + "/" + sourceType + "_" + runDirVersion + "/"; + Files.createDirectories(Paths.get(outputDir)); + this.baseOutputPath = outputDir + runDirVersion + "_"; + this.basePath = baseInputDir + "/" + runDirVersion + "/"; + log = new BufferedWriter(new FileWriter(outputDir + "log.txt")); + } + + public void log(String line) throws IOException { + System.out.println(line); + log.write(line); + log.newLine(); + } + + public static void writeDebugGeoJSON( + List singleCrustalJointRuptures, + List passes, + String baseOutput) + throws IOException { + List gjs = new ArrayList<>(); + List gjsRupturesOnly = new ArrayList<>(); + List filteredGeoJson = new ArrayList<>(); + Set passFilter = new HashSet<>(passes); + int ruptureId = 0; + for (RsqSimEventLoader.Event event : + singleCrustalJointRuptures) { // List.of(ruptures.get(0), ruptures.get(1))) { + + boolean isPass = passFilter.contains(event); + SimpleGeoJsonBuilder builder3 = new SimpleGeoJsonBuilder(); + + for (FaultSection section : event.sections) { + FeatureProperties props = builder3.addFaultSectionPolygon(section); + builder3.setPolygonColour(props, "red"); + builder3.setLineColour(props, "red"); + } + if (isPass) { + gjsRupturesOnly.add(builder3.toJSON()); + } + for (Patch patch : event.getPatches()) { + FeatureProperties props = builder3.addFeature(patch.toPolygonFeature()); + builder3.setPolygonColour(props, "green"); + } + if (isPass) { + gjs.add(builder3.toJSON()); + } + if (ruptureId == 264) { + filteredGeoJson.add(builder3.toJSON()); + System.out.println("event : " + event.id); + } + + ruptureId++; + } + + BufferedWriter writer = null; + + // writer = new BufferedWriter(new FileWriter(baseOutput + "ruptures.json")); + // List someRuptures = List.of(gjs.get(0), gjs.get(1)); + // writer.write("["); + // writer.write(String.join(",\n", someRuptures)); + // writer.write("]"); + // writer.close(); + + writer = new BufferedWriter(new FileWriter(baseOutput + "rupturesOnly.json")); + + writer.write("["); + writer.write(String.join(",\n", gjsRupturesOnly)); + writer.write("]"); + writer.close(); + + writer = new BufferedWriter(new FileWriter(baseOutput + "filteredRuptures.json")); + + writer.write("["); + writer.write(String.join(",\n", filteredGeoJson)); + writer.write("]"); + writer.close(); + } + + public void fillStiffnessCache(CoulombTester tester, List events) { + SelfStiffnessCoulombFilter selfStiffness = new SelfStiffnessCoulombFilter(tester.stiffness); + events.stream() + .flatMap( + event -> + event.sections.stream() + .flatMap( + s -> + event.sections.stream() + .map( + s2 -> + new FaultSection[] { + s, s2 + }))) + .parallel() + .forEach(ab -> selfStiffness.calc(ab[0], ab[1])); + tester.stiffness.checkUpdateStiffnessCache(); + } + + public void process() throws IOException, FactoryException { + + // load and match patches + RsqSimPatchLoader patchLoader = + sourceType == SourceType.BRUCE + ? RsqSimPatchLoader.getBrucePatches(basePath) + : RsqSimPatchLoader.getCanterburyPatches(basePath); + + patchLoader.writeDebugMappings(baseOutputPath); + + // ruptures + + RsqSimEventLoader eventLoader = new RsqSimEventLoader(new File(basePath), patchLoader); + List events = eventLoader.loadEvents(); + + log("- events: " + events.size()); + + events = eventLoader.getJointRuptures(); + + log("- joint events " + eventLoader.jointEvents.size()); + log("- reconstructed joint ruptures " + events.size()); + + List allSingleCrustalJointRuptures = + eventLoader.makeSingleJointRuptures(events); + + log("- single crustal joint ruptures " + allSingleCrustalJointRuptures.size()); + + List singleCrustalJointRuptures = + allSingleCrustalJointRuptures.stream() + .filter( + event -> + event.sections.stream() + .filter( + s -> + !s.getSectionName() + .contains("row:")) + .mapToDouble(s -> s.getArea(false)) + .sum() + >= 100000000 // 100 km^2 in m^2 + ) + .collect(Collectors.toList()); + log( + "- single crustal joint ruptures with crustal component >= 100km^2 " + + singleCrustalJointRuptures.size()); + + eventLoader.writeParticipationRates( + singleCrustalJointRuptures, patchLoader.loadedRupSet, baseOutputPath); + + CoulombTester tester = + new CoulombTester( + patchLoader.loadedRupSet, + "C:\\tmp\\stiffnessCaches"); // "C:\\Users\\user\\GNS\\rupture + // sets\\stiffnessCache-nzshm22_complete_merged\\"); + tester.setupStiffness(); + + fillStiffnessCache(tester, events); + + log("- self stiffness > 0 joint ruptures: " + tester.testSelfStiffnessFilter(events)); + log( + "- self stiffness > 0 single crustal joint ruptures: " + + tester.testSelfStiffnessFilter(singleCrustalJointRuptures)); + + // List> stiffness = ruptures.parallelStream().map(r -> + // r.jump).map(tester::applyCoulomb).collect(Collectors.toList()); + // System.out.println("passes: " +stiffness.stream().map(s -> s.get(2).isPass()).filter(p -> + // p).count()); + List passes = + allSingleCrustalJointRuptures.parallelStream() + .filter(event -> tester.applyCoulomb(event.jump).get(2).isPass()) + .collect(Collectors.toList()); + log("- original filter passes: " + passes.size()); + + List clusterRuptures = + singleCrustalJointRuptures.stream() + .map(event -> ManipulatedClusterRupture.makeRupture(event.sections)) + .collect(Collectors.toList()); + + FaultSystemRupSet resultRupSet = + FaultSystemRupSet.builderForClusterRups( + patchLoader.loadedRupSet.getFaultSectionDataList(), clusterRuptures) + .forScalingRelationship(ScalingRelationships.SHAW_2009_MOD) + .addModule(tester.stiffness) + .build(); + resultRupSet.write(new File(baseOutputPath + "rupset.zip")); + + tester.writeStats(singleCrustalJointRuptures, baseOutputPath + "filterStats.csv"); + + writeDebugGeoJSON(singleCrustalJointRuptures, passes, baseOutputPath); + } + + public static void processBruce5942() throws IOException, FactoryException { + RsqSimMain main = + new RsqSimMain("C:\\rsqsimsCatalogue\\", "rundir5942", SourceType.BRUCE, "/tmp/"); + main.process(); + main.close(); + } + + public static void processCanterbury() throws IOException, FactoryException { + RsqSimMain main = + new RsqSimMain( + "C:\\rsqsimsCatalogue\\", "fromAndyH", SourceType.CANTERBURY, "/tmp/"); + main.process(); + main.close(); + } + + public static void main(String[] args) throws FactoryException, IOException { + // processBruce5942(); + processCanterbury(); + } +} diff --git a/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/RsqSimPatchLoader.java b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/RsqSimPatchLoader.java new file mode 100644 index 00000000..3bea4ee4 --- /dev/null +++ b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/RsqSimPatchLoader.java @@ -0,0 +1,493 @@ +package nz.cri.gns.NZSHM22.opensha.ruptures.experimental.rsqsims; + +import com.google.common.base.Preconditions; +import java.awt.geom.Area; +import java.io.*; +import java.util.*; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import nz.cri.gns.NZSHM22.opensha.util.SimpleGeoJsonBuilder; +import org.opengis.util.FactoryException; +import org.opensha.commons.geo.Location; +import org.opensha.commons.geo.LocationList; +import org.opensha.commons.geo.json.FeatureProperties; +import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet; +import org.opensha.sha.earthquake.faultSysSolution.modules.PolygonFaultGridAssociations; +import org.opensha.sha.faultSurface.FaultSection; +import org.opensha.sha.faultSurface.RuptureSurface; + +public class RsqSimPatchLoader { + + public static final String RSQSIMS_HIKURANGI = "Hikurangi"; + public static final String RSQSIMS_PUYSEGUR = "Puysegar"; + + final File zfaultDeepenIn; + final File znamesDeepenIn; + final File rupSet; + public FaultSystemRupSet loadedRupSet; + + final PatchesFile patchesFile; + + Map> nameToSection; + PolygonFaultGridAssociations polys; + + List hikurangi; + List puysegur; + + public List patches = new ArrayList<>(); + + public Map patchLookup = new HashMap<>(); + + public Patch getPatch(int id) { + return patchLookup.get(id); + } + + public static class SubductionSection { + + final Area area; + public final FaultSection section; + + public SubductionSection(FaultSection section) { + this.section = section; + RuptureSurface surf = section.getFaultSurface(1, false, false); + LocationList locations = surf.getPerimeter(); + area = new Area(locations.toPath()); + } + + public boolean overlaps(Patch patch) { + + if (area.contains(patch.locations.first().lon, patch.locations.first().lat)) { + return true; + } + if (area.contains(patch.locations.get(1).lon, patch.locations.get(1).lat)) { + return true; + } + if (area.contains(patch.locations.last().lon, patch.locations.last().lat)) { + return true; + } + + return false; + } + } + + public RsqSimPatchLoader( + File zfaultDeepenIn, PatchesFile patchesFile, File znamesDeepenIn, File rupSet) + throws FactoryException { + this.zfaultDeepenIn = zfaultDeepenIn; + this.znamesDeepenIn = znamesDeepenIn; + this.rupSet = rupSet; + this.patchesFile = patchesFile; + } + + public List loadPatches() throws IOException { + patches = patchesFile.loadPatches(); + patches.forEach(p -> patchLookup.put(p.id, p)); + return patches; + } + + public void loadNames() throws IOException { + BufferedReader reader = new BufferedReader(new FileReader(znamesDeepenIn)); + int index = 0; + String line = null; + while ((line = reader.readLine()) != null) { + line = line.substring(4, line.length() - 3); + patches.get(index).zname = line; + String[] lineParts = line.split(" "); + if (lineParts.length > 1) { + patches.get(index).sectionIdFromZname = Integer.parseInt(lineParts[0]); + } + index++; + } + Preconditions.checkState(index == patches.size()); + reader.close(); + } + + Map patchCountPerSection = new HashMap<>(); + + public void addSectionToPatch(Patch patch, FaultSection section) { + + // reject patches that are too low or too high + // only do this if the section has a big enough dip, otherwise it might be too restrictive. + // only do this for crustal. subduction is modelled too coarsely by Bruce + if (!section.getSectionName().contains("row:") && section.getAveDip() > 20) { + boolean keep = false; + for (Location location : patch.locations) { + if (location.depth > section.getOrigAveUpperDepth() + && location.depth < section.getAveLowerDepth()) { + keep = true; + break; + } + } + if (!keep) { + return; + } + } + + patch.sections.add(section); + patchCountPerSection.compute( + section, (key, value) -> Objects.isNull(value) ? 1 : value + 1); + } + + public void findSubductionSections(Patch patch) { + List sections = + patch.zname.equals(RSQSIMS_HIKURANGI) ? hikurangi : puysegur; + + for (SubductionSection candidate : sections) { + if (candidate.overlaps(patch)) { + addSectionToPatch(patch, candidate.section); + } + } + } + + static class LambdaCounter { + public int count = 0; + + public void inc() { + count++; + } + + public String toString() { + return "" + count; + } + } + + /** + * For Bruce's rundir5883, where crustal patches have the subsection id in the zname + * + * @throws IOException + */ + public void loadRupSetNewBruce() throws IOException { + loadedRupSet = FaultSystemRupSet.load(this.rupSet); + hikurangi = new ArrayList<>(); + puysegur = new ArrayList<>(); + // load up the lists needed by findSubductionSections() + loadedRupSet + .getFaultSectionDataList() + .forEach( + s -> { + if (s.getSectionName().startsWith("Hikurangi")) { + hikurangi.add(new SubductionSection(s)); + } + if (s.getSectionName().startsWith("Puysegur")) { + puysegur.add(new SubductionSection(s)); + } + }); + + // crustal sections should be matched directly by id + patches.forEach( + p -> { + if (p.sectionIdFromZname != -1) { + addSectionToPatch( + p, loadedRupSet.getFaultSectionData(p.sectionIdFromZname)); + } + }); + // subduction is done geometrically + patches.stream() + .filter(p -> p.zname.equals(RSQSIMS_HIKURANGI) || p.zname.equals(RSQSIMS_PUYSEGUR)) + .forEach(this::findSubductionSections); + } + + void loadRupSetCanterbury(String mappingsFile, int sectionOffset) throws FileNotFoundException { + Map> patchIds = UCMappingsFile.read(mappingsFile, sectionOffset); + + patchIds.keySet() + .forEach( + sectionId -> { + patchIds.get(sectionId) + .forEach( + patchId -> { + Patch patch = patches.get(patchId - 1); + Preconditions.checkState(patch.id == patchId); + addSectionToPatch( + patch, + loadedRupSet.getFaultSectionData( + sectionId)); + }); + }); + } + + public void loadRupSetCanterbury(String basePath) throws IOException { + loadedRupSet = FaultSystemRupSet.load(this.rupSet); + loadRupSetCanterbury(basePath + "rsqsim_crustal_discretized_trimmed_dict.json", 0); + loadRupSetCanterbury(basePath + "hikkerm_discretized_trimmed_dict.json", 2596); + loadRupSetCanterbury(basePath + "puysegur_discretized_trimmed_dict.json", 2325); + } + + /** + * Writes two CSV files that contain the mappings between patches and sections. outputFile will + * be a CSV file that has a section id in the first column and matching patch ids in the + * following columns. outputFile2 has a patch id in the first column and matching section ids in + * the following columns. + * + * @param outputFile + * @param outputFile2 + * @throws IOException + */ + public void writeMappingsToCsv(String outputFile, String outputFile2) throws IOException { + + Map> bySection = new HashMap<>(); + patches.forEach( + patch -> { + patch.sections.forEach( + section -> { + bySection.compute( + section.getSectionId(), + (key, value) -> { + if (value == null) { + value = new ArrayList<>(); + } + value.add(patch); + return value; + }); + }); + }); + + BufferedWriter writer = new BufferedWriter(new FileWriter(outputFile)); + bySection.keySet().stream() + .sorted() + .forEach( + sectionId -> { + try { + writer.write( + sectionId + + ", " + + loadedRupSet + .getFaultSectionDataList() + .get(sectionId) + .getSectionName() + + ", "); + List patches = bySection.get(sectionId); + if (patches != null) { + String patchIds = + patches.stream() + .map(p -> p.id + "") + .collect(Collectors.joining(", ")); + writer.write(patchIds); + } + writer.write("\n"); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + writer.close(); + + BufferedWriter writer2 = new BufferedWriter(new FileWriter(outputFile2)); + patches.stream() + .sorted(Comparator.comparing(p -> p.id)) + .forEach( + patch -> { + try { + writer2.write(patch.id + ", "); + String sectionIds = + patch.sections.stream() + .map(s -> s.getSectionId() + "") + .collect(Collectors.joining(", ")); + writer2.write(sectionIds); + writer2.write("\n"); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + writer2.close(); + } + + public void writeMappingsToFile(String outputFile, Predicate sectionFilter) + throws IOException { + + Map> bySection = new HashMap<>(); + patches.forEach( + patch -> { + patch.sections.forEach( + section -> { + if (sectionFilter.test(section)) { + bySection.compute( + section.getSectionId(), + (key, value) -> { + if (value == null) { + value = new ArrayList<>(); + } + value.add(patch); + return value; + }); + } + }); + }); + + List geojsons = new ArrayList<>(); + bySection + .keySet() + .forEach( + sectionId -> { + if (sectionId == -1) { + return; + } + SimpleGeoJsonBuilder patchBuilder = new SimpleGeoJsonBuilder(); + FeatureProperties props = + patchBuilder.addFaultSectionPolygon( + loadedRupSet.getFaultSectionData(sectionId)); + patchBuilder.setPolygonColour(props, "rgba(255, 0, 0, 0.8)"); + bySection + .get(sectionId) + .forEach( + patch -> { + FeatureProperties p = + patchBuilder.addFeature( + patch.toPolygonFeature()); + patchBuilder.setPolygonColour(p, "#d5f024"); + }); + geojsons.add(patchBuilder.toJSON()); + }); + + BufferedWriter out = new BufferedWriter(new FileWriter(outputFile)); + out.write("["); + out.write(String.join(",\n", geojsons)); + out.write("]"); + out.close(); + } + + public void writeDebugMappings(String baseOutputPath) throws IOException { + writeMappingsToCsv( + baseOutputPath + "sectionToPatches.csv", baseOutputPath + "patchToSections.csv"); + + // patches debug files + writeMappingsToFile( + baseOutputPath + "mappingsCrustal.geojson", + section -> + !section.getSectionName().startsWith("Hikurangi") + && !section.getSectionName().startsWith("Hikurangi")); + + writeMappingsToFile( + baseOutputPath + "mappingsPuysegur.geojson", + section -> section.getSectionName().startsWith("Puysegur")); + + writeMappingsToFile( + baseOutputPath + "mappingsHikurangi.geojson", + section -> section.getSectionName().startsWith("Hikurangi")); + + SimpleGeoJsonBuilder builder = new SimpleGeoJsonBuilder(); + for (Patch patch : patches) { + if (patch.locations.get(0).lat < -44) { + builder.addFeature(patch.toPolygonFeature()); + } + } + builder.toJSON(baseOutputPath + "patches.geojson"); + } + + /** + * Processes Bruce's rundir5883 + * + * @throws IOException + * @throws FactoryException + */ + public static RsqSimPatchLoader getBrucePatches(String basePath) + throws IOException, FactoryException { + + String fileName = basePath + "zfault_Deepen.in"; + String namesFileName = basePath + "znames_Deepen.in"; + String rupSetFileName = "C:\\Users\\user\\GNS\\rupture sets\\nzshm22_complete_merged.zip"; + + PatchesFile patchesFile = new PatchesFile(fileName, new CoordinateConverter.UTM(59, false)); + RsqSimPatchLoader patchLoader = + new RsqSimPatchLoader( + new File(fileName), + patchesFile, + new File(namesFileName), + new File(rupSetFileName)); + patchLoader.loadPatches(); + patchLoader.loadNames(); + patchLoader.loadRupSetNewBruce(); + + return patchLoader; + } + + public static RsqSimPatchLoader getCanterburyPatches(String basePath) + throws IOException, FactoryException { + String fileName = basePath + "whole_nz_faults_2500_tapered_slip.flt"; + String namesFileName = basePath + "znames_Deepen.in"; + + String rupSetFileName = "C:\\Users\\user\\GNS\\rupture sets\\nzshm22_complete_merged.zip"; + + PatchesFile patchesFile = new PatchesFile(fileName, new CoordinateConverter.NZTM()); + RsqSimPatchLoader patchLoader = + new RsqSimPatchLoader( + new File(fileName), + patchesFile, + new File(namesFileName), + new File(rupSetFileName)); + patchLoader.loadPatches(); + + patchLoader.loadRupSetCanterbury(basePath); + + return patchLoader; + } + + public static void checkSectionEquality( + List superSet, + List subSet, + int startIndex) { + for (int i = 0; i < subSet.size(); i++) { + FaultSection a = superSet.get(i + startIndex); + FaultSection b = subSet.get(i); + boolean matches = a.getSectionName().equals(b.getSectionName()); + Preconditions.checkState( + matches, + i + + ": " + + (i + startIndex) + + " " + + a.getSectionName() + + " : " + + b.getSectionName()); + } + } + + /** + * This is to verify that we can use our combined rupset with the Canterbury data. + * + * @throws IOException + */ + public static void checkRupSetMatches() throws IOException { + FaultSystemRupSet rupSetPuy = + FaultSystemRupSet.load( + new File( + "C:\\Users\\user\\GNS\\rupture sets\\RupSet_Sub_FM(SBD_0_2_PUY_15)_mnSbS(2)_mnSSPP(2)_mxSSL(0.5)_ddAsRa(2.0,5.0,5)_ddMnFl(0.1)_ddPsCo(0.0)_ddSzCo(0.0)_thFc(0.0)(1).zip")); + FaultSystemRupSet rupSetHik = + FaultSystemRupSet.load( + new File( + "C:\\Users\\user\\GNS\\rupture sets\\RupSet_Sub_FM(SBD_0_3_HKR_LR_30)_mnSbS(2)_mnSSPP(2)_mxSSL(0.5)_ddAsRa(2.0,5.0,5)_ddMnFl(0.1)_ddPsCo(0.0)_ddSzCo(0.0)_thFc(0.0).zip")); + FaultSystemRupSet rupSetCru = + FaultSystemRupSet.load( + new File( + "C:\\Users\\user\\GNS\\rupture sets\\NZSHM22_RuptureSet-UnVwdHVyZUdlbmVyYXRpb25UYXNrOjEwMDAzOA==.zip")); + FaultSystemRupSet rupSetCombined = + FaultSystemRupSet.load( + new File( + "C:\\Users\\user\\GNS\\rupture sets\\nzshm22_complete_merged.zip")); + + Preconditions.checkState( + rupSetPuy.getNumSections() + rupSetHik.getNumSections() + rupSetCru.getNumSections() + == rupSetCombined.getNumSections()); + + int cruStart = 0; + int hikStart = 2596; + int puyStart = 2325; + + checkSectionEquality( + rupSetCombined.getFaultSectionDataList(), + rupSetCru.getFaultSectionDataList(), + cruStart); + checkSectionEquality( + rupSetCombined.getFaultSectionDataList(), + rupSetPuy.getFaultSectionDataList(), + puyStart); + checkSectionEquality( + rupSetCombined.getFaultSectionDataList(), + rupSetHik.getFaultSectionDataList(), + hikStart); + } + + public static void main(String[] args) throws FactoryException, IOException { + + // checkRupSetMatches(); + } +} diff --git a/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/SelfStiffnessCoulombFilter.java b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/SelfStiffnessCoulombFilter.java new file mode 100644 index 00000000..93286be0 --- /dev/null +++ b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/SelfStiffnessCoulombFilter.java @@ -0,0 +1,67 @@ +package nz.cri.gns.NZSHM22.opensha.ruptures.experimental.rsqsims; + +import java.text.DecimalFormat; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.multiRupture.StiffnessCalcModule; +import org.opensha.sha.faultSurface.FaultSection; +import org.opensha.sha.simulators.stiffness.AggregatedStiffnessCalculator; +import org.opensha.sha.simulators.stiffness.SubSectStiffnessCalculator; + +/** not a proper filter yet, for gathering data only */ +public class SelfStiffnessCoulombFilter { + + StiffnessCalcModule stiffnessCalcModule; + AggregatedStiffnessCalculator stiffnessCalculator; + + DecimalFormat fmt3 = new DecimalFormat("0.000"); + + public SelfStiffnessCoulombFilter(StiffnessCalcModule stiffnessCalcModule) { + this.stiffnessCalcModule = stiffnessCalcModule; + this.stiffnessCalculator = + new AggregatedStiffnessCalculator( + SubSectStiffnessCalculator.StiffnessType.CFF, + stiffnessCalcModule.stiffnessCalc, + true, + AggregatedStiffnessCalculator.AggregationMethod.FLATTEN, + AggregatedStiffnessCalculator.AggregationMethod.SUM, + AggregatedStiffnessCalculator.AggregationMethod.SUM, + AggregatedStiffnessCalculator.AggregationMethod.SUM); + } + + public String statsHeader() { + return "all->sub, all->cru, all->all"; + } + + public String stats(RsqSimEventLoader.Event event) { + double[] stats = statsData(event); + return Arrays.stream(stats).mapToObj(fmt3::format).collect(Collectors.joining(", ")); + } + + public double calc(FaultSection a, FaultSection b) { + return stiffnessCalculator.calc(List.of(a), List.of(b)); + } + + public double[] statsData(RsqSimEventLoader.Event event) { + List crustal = + event.sections.stream() + .filter(s -> !s.getSectionName().contains("row:")) + .collect(Collectors.toList()); + double crustalArea = crustal.stream().mapToDouble(s -> s.getArea(false)).sum(); + if (crustalArea < 100000000) { + return new double[] {0, 0, 0}; + } + List subduction = + event.sections.stream() + .filter(s -> s.getSectionName().contains("row:")) + .collect(Collectors.toList()); + List allSections = event.sections; + + double from = stiffnessCalculator.calc(allSections, subduction); + double to = stiffnessCalculator.calc(allSections, crustal); + double self = stiffnessCalculator.calc(allSections, allSections); + + return new double[] {from, to, self}; + } +} diff --git a/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/UCMappingsFile.java b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/UCMappingsFile.java new file mode 100644 index 00000000..5443de27 --- /dev/null +++ b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/UCMappingsFile.java @@ -0,0 +1,40 @@ +package nz.cri.gns.NZSHM22.opensha.ruptures.experimental.rsqsims; + +import com.google.gson.Gson; +import java.io.BufferedReader; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Reads a U Canterbury mapping file from section id to patch id. Section id is for crustal, + * Puysegur, or Hikurangi rupture set. An index offset can be used to move this into the combined + * rupture set id space. + */ +public class UCMappingsFile { + + static Map> read(String fileName, int sectionOffset) + throws FileNotFoundException { + BufferedReader bufferedReader = new BufferedReader(new FileReader(fileName)); + + Gson gson = new Gson(); + Map json = + (Map) gson.fromJson(bufferedReader, Object.class); + + Map> result = new HashMap<>(); + + json.keySet() + .forEach( + k -> { + int key = Integer.parseInt(k); + List ps = (List) json.get(k); + List patches = + ps.stream().map(Double::intValue).collect(Collectors.toList()); + result.put(key + sectionOffset, patches); + }); + return result; + } +} diff --git a/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/UCRuptureTester.java b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/UCRuptureTester.java new file mode 100644 index 00000000..b0b332eb --- /dev/null +++ b/src/main/java/nz/cri/gns/NZSHM22/opensha/ruptures/experimental/rsqsims/UCRuptureTester.java @@ -0,0 +1,246 @@ +package nz.cri.gns.NZSHM22.opensha.ruptures.experimental.rsqsims; + +import com.google.common.base.Preconditions; +import java.io.*; +import java.util.*; +import nz.cri.gns.NZSHM22.opensha.util.SimpleGeoJsonBuilder; +import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.ClusterRupture; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.multiRupture.MultiRuptureJump; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.multiRupture.StiffnessCalcModule; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.multiRupture.impl.MultiRuptureCoulombFilter; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.multiRupture.impl.MultiRuptureFractCoulombPositiveFilter; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.multiRupture.impl.MultiRuptureNetCoulombPositiveFilter; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.plausibility.PlausibilityResult; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.plausibility.impl.coulomb.ParentCoulombCompatibilityFilter; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.util.SectionDistanceAzimuthCalculator; +import org.opensha.sha.faultSurface.FaultSection; + +public class UCRuptureTester { + + final String rupFileName; + final FaultSystemRupSet rupSet; + final SectionDistanceAzimuthCalculator disAzCalc; + + final double maxInternalJumpDist = 5; + + StiffnessCalcModule stiffness; + + Map ruptures = new HashMap<>(); + int crustalEnd = 0; + int hikurangiStart = -1; + int hikurangiEnd = 0; + int puysegurStart = -1; + int puysegurEnd = 0; + + List filters = new ArrayList<>(); + + public UCRuptureTester(String rupFileName) throws IOException { + this.rupFileName = rupFileName; + this.rupSet = FaultSystemRupSet.load(new File(rupFileName)); + this.disAzCalc = new SectionDistanceAzimuthCalculator(rupSet.getFaultSectionDataList()); + for (int i = 0; i < rupSet.getNumSections(); i++) { + FaultSection section = rupSet.getFaultSectionData(i); + if (section.getSectionName().startsWith("Puysegur") + && section.getSectionName().contains("row:") + && puysegurStart == -1) { + puysegurStart = i; + } + + if (section.getSectionName().startsWith("Hikurangi") + && section.getSectionName().contains("row:") + && hikurangiStart == -1) { + hikurangiStart = i; + } + } + if (hikurangiStart < puysegurStart) { + crustalEnd = hikurangiStart - 1; + hikurangiEnd = puysegurStart - 1; + puysegurEnd = rupSet.getNumSections() - 1; + } else { + crustalEnd = puysegurStart - 1; + puysegurEnd = hikurangiStart - 1; + hikurangiEnd = rupSet.getNumSections() - 1; + } + } + + Integer getSectionId(String ruptureId, String source, String id) { + String key = source.toLowerCase().trim(); + int sectionId = Integer.parseInt(id); + if (key.startsWith("crustal")) { + Preconditions.checkArgument( + sectionId <= crustalEnd, + "rupture: " + + ruptureId + + " source: " + + source + + " id: " + + id + + " was over " + + crustalEnd); + return sectionId; + } + if (key.startsWith("puysegur")) { + Preconditions.checkArgument(sectionId <= puysegurEnd); + return sectionId + puysegurStart; + } + if (key.startsWith("hikurangi")) { + Preconditions.checkArgument(sectionId <= hikurangiEnd); + return sectionId + hikurangiStart; + } + throw new RuntimeException("Unexpected source " + source); + } + + protected MultiRuptureJump makeJump(ClusterRupture nucleation, ClusterRupture target) { + for (FaultSection targetSection : target.buildOrderedSectionList()) { + for (FaultSection nucleationSection : nucleation.clusters[0].subSects) { + double distance = disAzCalc.getDistance(targetSection, nucleationSection); + System.out.println(distance); + return new MultiRuptureJump( + nucleation.clusters[0].startSect, + nucleation, + target.clusters[0].startSect, + target, + distance); + } + } + return null; + } + + void loadCSV(String fileName) throws IOException { + ClusterAggregator aggregator = new ClusterAggregator(disAzCalc, 5); + BufferedReader reader = new BufferedReader(new FileReader(fileName)); + String currentRuptureId = null; + List currentRuptures = new ArrayList<>(); + int totalCount = 0; + int jointCount = 0; + while (true) { + String line = reader.readLine(); + String[] components = line != null ? line.trim().split("\\h") : new String[] {null}; + String ruptureId = components[0]; + if (!Objects.equals(ruptureId, currentRuptureId)) { + if (currentRuptureId != null) { + totalCount++; + } + if (currentRuptureId != null && currentRuptures.size() > 1) { + jointCount++; + } + + if (currentRuptureId != null + && currentRuptures.size() > 1 + && aggregator.allConnected(currentRuptures)) { + Preconditions.checkState(currentRuptures.size() == 2); + ruptures.put( + currentRuptureId, + makeJump(currentRuptures.get(0), currentRuptures.get(1))); + } + currentRuptureId = ruptureId; + currentRuptures = new ArrayList<>(); + } + if (currentRuptureId == null) { + break; + } + String source = components[1]; + List clusterSections = new ArrayList<>(); + for (int i = 2; i < components.length; i++) { + clusterSections.add( + rupSet.getFaultSectionData( + getSectionId(currentRuptureId, source, components[i]))); + } + ClusterRupture rupture = + ClusterRupture.forOrderedSingleStrandRupture(clusterSections, disAzCalc); + currentRuptures.add(rupture); + } + + System.out.println("total " + totalCount + " joint count " + jointCount); + reader.close(); + } + + void setupStiffness() { + + stiffness = + new StiffnessCalcModule( + rupSet, + 2, + new File( + "C:\\Users\\user\\GNS\\rupture sets\\stiffnessCache-nzshm22_complete_merged\\")); + + // what fraction of interactions should be positive? this number will take some tuning + float fractThreshold = 0.75f; + MultiRuptureFractCoulombPositiveFilter fractCoulombFilter = + new MultiRuptureFractCoulombPositiveFilter( + stiffness.stiffnessCalc, + fractThreshold, + ParentCoulombCompatibilityFilter.Directionality.BOTH); + filters.add(fractCoulombFilter); + + // force the net coulomb from one rupture to the other to positive; this more heavily + // weights nearby interactions + ParentCoulombCompatibilityFilter.Directionality netDirectionality = + ParentCoulombCompatibilityFilter.Directionality + .BOTH; // require it to be positive to from subduction to crustal AND from + // crustal to subduction + // Directionality netDirectionality = Directionality.EITHER; // require it to be + // positive to from subduction to crustal OR from crustal to subduction + MultiRuptureNetCoulombPositiveFilter netCoulombFilter = + new MultiRuptureNetCoulombPositiveFilter( + stiffness.stiffnessCalc, netDirectionality); + filters.add(netCoulombFilter); + } + + public void saveCache() { + stiffness.checkUpdateStiffnessCache(); + } + + public static void main(String[] args) throws IOException { + UCRuptureTester ruptureTester = + new UCRuptureTester( + "C:\\Users\\user\\GNS\\rupture sets\\nzshm22_complete_merged.zip"); + ruptureTester.loadCSV( + "C:\\Users\\user\\GNS\\RSQSim\\AndyHowellAugust24\\biggest_events.txt"); + System.out.println(ruptureTester.ruptures.size()); + ruptureTester.setupStiffness(); + + BufferedWriter writer = new BufferedWriter(new FileWriter("/tmp/UCRSQSimout.csv")); + + int count = 0; + int passCount = 0; + for (String ruptureId : ruptureTester.ruptures.keySet()) { + MultiRuptureJump jump = ruptureTester.ruptures.get(ruptureId); + PlausibilityResult r0 = ruptureTester.filters.get(0).apply(jump, false); + PlausibilityResult r1 = ruptureTester.filters.get(1).apply(jump, false); + PlausibilityResult result = r0.logicalAnd(r1); + if (result.canContinue()) { + passCount++; + } + writer.write(ruptureId + ", " + result + ", " + r0 + ", " + r1 + "\n"); + count++; + System.out.println(count + " of " + ruptureTester.ruptures.size()); + } + writer.close(); + + System.out.println(passCount); + + MultiRuptureJump[] jumps = ruptureTester.ruptures.values().toArray(new MultiRuptureJump[0]); + PlausibilityResult r0 = ruptureTester.filters.get(0).apply(jumps[10], true); + PlausibilityResult r1 = ruptureTester.filters.get(1).apply(jumps[10], true); + + ruptureTester.saveCache(); + + System.out.println("--------------"); + System.out.println(jumps[10].distance); + System.out.println(r0); + System.out.println(r1); + + MultiRuptureJump jump = ruptureTester.ruptures.get("2254817"); + + SimpleGeoJsonBuilder geoJson = new SimpleGeoJsonBuilder(); + for (FaultSection section : jump.fromRupture.buildOrderedSectionList()) { + geoJson.addFaultSectionPolygon(section); + } + for (FaultSection section : jump.toRupture.buildOrderedSectionList()) { + geoJson.addFaultSectionPolygon(section); + } + geoJson.toJSON("/tmp/rsqsimRup2254817.geojson"); + } +} diff --git a/src/main/java/nz/cri/gns/NZSHM22/opensha/util/NZSHM22_PythonGateway.java b/src/main/java/nz/cri/gns/NZSHM22/opensha/util/NZSHM22_PythonGateway.java index c2f126b4..7a799032 100644 --- a/src/main/java/nz/cri/gns/NZSHM22/opensha/util/NZSHM22_PythonGateway.java +++ b/src/main/java/nz/cri/gns/NZSHM22/opensha/util/NZSHM22_PythonGateway.java @@ -14,6 +14,7 @@ import nz.cri.gns.NZSHM22.opensha.ruptures.NZSHM22_AbstractRuptureSetBuilder; import nz.cri.gns.NZSHM22.opensha.ruptures.NZSHM22_CoulombRuptureSetBuilder; import nz.cri.gns.NZSHM22.opensha.ruptures.NZSHM22_SubductionRuptureSetBuilder; +import nz.cri.gns.NZSHM22.opensha.ruptures.experimental.rsqsims.CoulombTester; import nz.cri.gns.NZSHM22.opensha.timeDependent.TimeDependentRatesGenerator; import nz.cri.gns.NZSHM22.util.GitVersion; import nz.cri.gns.NZSHM22.util.NZSHM22_ReportPageGen; @@ -22,6 +23,7 @@ import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet; import org.opensha.sha.earthquake.faultSysSolution.FaultSystemSolution; import org.opensha.sha.earthquake.faultSysSolution.RupSetScalingRelationship; +import org.opensha.sha.earthquake.faultSysSolution.modules.ClusterRuptures; import py4j.GatewayServer; /** A py4j gateway for building ruptures and running inversions. */ @@ -274,4 +276,17 @@ public static RupSetScalingRelationship getScalingRelationship(String name) { public static void initJupyterLogger(String basePath) { JupyterLogger.initialise(basePath); } + + public static FaultSystemRupSet loadRupSet(String fileName) throws IOException { + return FaultSystemRupSet.load(new File(fileName)); + } + + public static ClusterRuptures getRuptures(FaultSystemRupSet rupSet) { + return rupSet.getModule(ClusterRuptures.class); + } + + public static CoulombTester getCoulombTester(FaultSystemRupSet rupSet, String cacheFile) + throws IOException { + return new CoulombTester(rupSet, cacheFile); + } } diff --git a/src/main/java/nz/cri/gns/NZSHM22/opensha/util/SimpleGeoJsonBuilder.java b/src/main/java/nz/cri/gns/NZSHM22/opensha/util/SimpleGeoJsonBuilder.java index 7c4d1c1b..75339140 100644 --- a/src/main/java/nz/cri/gns/NZSHM22/opensha/util/SimpleGeoJsonBuilder.java +++ b/src/main/java/nz/cri/gns/NZSHM22/opensha/util/SimpleGeoJsonBuilder.java @@ -21,6 +21,11 @@ public class SimpleGeoJsonBuilder { List features = new ArrayList<>(); + public FeatureProperties addFeature(Feature feature) { + features.add(feature); + return feature.properties; + } + public FeatureProperties addLocation(Location location) { FeatureProperties properties = new FeatureProperties(); features.add(new Feature(new Geometry.Point(location), properties)); @@ -59,6 +64,21 @@ public FeatureProperties addFaultSection(FaultSection section) { return feature.properties; } + public FeatureProperties addFaultSectionPerimeter(FaultSection section) { + LocationList locations = section.getFaultSurface(1, false, false).getPerimeter(); + FeatureProperties props = new FeatureProperties(); + features.add(new Feature(new Geometry.LineString(locations), props)); + return props; + } + + public FeatureProperties addFaultSectionPolygon(FaultSection section) { + LocationList locations = section.getFaultSurface(1, false, false).getPerimeter(); + FeatureProperties props = + new FeatureProperties(GeoJSONFaultSection.toFeature(section).properties); + features.add(new Feature(new Geometry.Polygon(locations), props)); + return props; + } + public FeatureProperties addLine(Location... locations) { LocationList locs = new LocationList(); locs.addAll(Arrays.asList(locations)); @@ -94,6 +114,11 @@ public FeatureProperties setLineColour( return props; } + public FeatureProperties setPolygonColour(FeatureProperties props, String cssColour) { + props.set(FeatureProperties.FILL_COLOR_PROP, cssColour); + return props; + } + public FeatureProperties setLineColour(FeatureProperties props, String cssColour) { props.set(FeatureProperties.STROKE_COLOR_PROP, cssColour); return props;