Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@ compileJava.options.encoding = "UTF-8"
apply from: '../opensha/build-git.gradle'

dependencies {
implementation 'org.apache.sis.core:sis-referencing:1.4'
implementation 'org.apache.sis.non-free:sis-embedded-data:1.4'
implementation 'org.apache.sis.non-free:sis-epsg:1.4'
/* no remote repo */
implementation files('python/share/py4j/py4j0.10.9.1.jar', //Py4j jar installed locally via `pip install py4j`
'lib/openmap.jar')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,30 @@ protected NZSHM22_CrustalInversionRunner configure() throws DocumentException, I
initialSolution = variablePerturbationBasis.clone();
}

/* FIXME oakley, this is something for RSQSim
rupSet.removeRuptures();
// rupSet.getFaultSectionDataList().forEach(s ->
// s.setSectionName(s.getSectionName().replace("Subsection ", "")));
Set<String> names = new HashSet<>();
for (FaultSection section : rupSet.getFaultSectionDataList()) {

section.setSectionName(section.getSectionId() + " " + section.getSectionName());
}
// Preconditions.checkState(names.size() == rupSet.getNumSections());
U3FaultSystemIO.writeRupSet(rupSet, new File("/tmp/NZSHM_crustal_u3_use_this_instead.zip"));

List<String> metaData = new ArrayList<>();
metaData.add("NZSHM22 crustal fault sub sections");
metaData.add("fault_model: CFM_1_0A_DOM_SANSTVZ");
metaData.add("depth_scaling_tvz: 0.667");
metaData.add("depth_scaling_sans: 0.8");

FaultSectionDataWriter.writeSectionsToFile(
rupSet.getFaultSectionDataList(), metaData, new File("/tmp/asciitest.txt"), false);


*/

InversionModels inversionModel = branch.getValue(InversionModels.class);

// this contains all inversion weights
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,13 @@
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.*;
import java.util.stream.Collectors;
import nz.cri.gns.NZSHM22.opensha.ruptures.downDip.DownDipFaultSubSectionCluster;
import org.opensha.sha.earthquake.faultSysSolution.ruptures.ClusterRupture;
import org.opensha.sha.earthquake.faultSysSolution.ruptures.FaultSubsectionCluster;
import org.opensha.sha.earthquake.faultSysSolution.ruptures.Jump;
import org.opensha.sha.earthquake.faultSysSolution.ruptures.multiRupture.MultiRuptureJump;
import org.opensha.sha.earthquake.faultSysSolution.ruptures.util.SectionDistanceAzimuthCalculator;
import org.opensha.sha.earthquake.faultSysSolution.ruptures.util.UniqueRupture;
import org.opensha.sha.faultSurface.FaultSection;
Expand Down Expand Up @@ -59,6 +57,16 @@ static FaultSection last(FaultSubsectionCluster cluster) {
return cluster.subSects.get(cluster.subSects.size() - 1);
}

// FIXME use properties
static boolean isCrustal(FaultSection section) {
return !section.getSectionName().contains("row:");
}

// FIXME use properties
static boolean isSubduction(FaultSection section) {
return section.getSectionName().contains("row:");
}

static ImmutableList<Jump> makeInternalJumps(
ClusterRupture ruptureA,
ClusterRupture ruptureB,
Expand Down Expand Up @@ -130,6 +138,18 @@ public static ManipulatedClusterRupture splay(
ruptureA.internalUnique);
}

public static ManipulatedClusterRupture splay(
ClusterRupture rupture, ClusterRupture splayRupture) {
Jump jump =
new Jump(
rupture.clusters[0].startSect,
rupture.clusters[0],
splayRupture.clusters[0].startSect,
splayRupture.clusters[0],
5);
return splay(rupture, splayRupture, jump);
}

/**
* Safely reverses ruptures that may have a subduction cluster.
*
Expand Down Expand Up @@ -163,4 +183,75 @@ public static ClusterRupture reverse(ClusterRupture rupture) {
ImmutableList.copyOf(jumps),
rupture.unique);
}

public static ManipulatedClusterRupture makeFromClusters(
List<FaultSubsectionCluster> clusters) {
FaultSubsectionCluster[] clusterArray = clusters.toArray(new FaultSubsectionCluster[] {});
List<Jump> jumps = new ArrayList<>();
UniqueRupture uniqueRupture =
clusters.stream().map(UniqueRupture::forClusters).reduce(UniqueRupture::add).get();

for (int c = 1; c < clusterArray.length; c++) {
FaultSubsectionCluster fromCluster = clusterArray[c - 1];
FaultSection from = last(fromCluster);
FaultSubsectionCluster toCluster = clusterArray[c];
FaultSection to = first(toCluster);
double distance = 5;
jumps.add(new Jump(from, fromCluster, to, toCluster, distance));
}

return new ManipulatedClusterRupture(
clusterArray, ImmutableList.copyOf(jumps), uniqueRupture);
}

public static ManipulatedClusterRupture makeFromSections(List<FaultSection> sections) {
List<FaultSubsectionCluster> clusters =
sections.stream()
.collect(Collectors.groupingBy(FaultSection::getParentSectionId))
.values()
.stream()
.peek(list -> list.sort(Comparator.comparing(FaultSection::getSectionId)))
.map(FaultSubsectionCluster::new)
.collect(Collectors.toList());
return makeFromClusters(clusters);
}

/**
* Can be used if we get a list of jumbled FaultSections from RSQSim data
*
* @param sections
* @return
*/
public static ClusterRupture makeRupture(List<FaultSection> sections) {
ManipulatedClusterRupture crustal =
makeFromSections(
sections.stream()
.filter(ManipulatedClusterRupture::isCrustal)
.collect(Collectors.toList()));
ManipulatedClusterRupture subduction =
makeFromSections(
sections.stream()
.filter(ManipulatedClusterRupture::isSubduction)
.collect(Collectors.toList()));
return splay(subduction, crustal);
}

/**
* Splits a MultiRuptureJump into two separate ruptures so that we can apply Coulomb filters
*
* @param jump
* @return
*/
public static MultiRuptureJump reconstructJump(ClusterRupture rupture) {
List<FaultSection> fromSections =
Arrays.stream(rupture.clusters)
.flatMap(c -> c.subSects.stream())
.collect(Collectors.toList());
List<FaultSection> toSections =
rupture.splays.values().asList().get(0).buildOrderedSectionList();
ClusterRupture fromRupture = ManipulatedClusterRupture.makeFromSections(fromSections);
ClusterRupture toRupture = ManipulatedClusterRupture.makeFromSections(toSections);
return new MultiRuptureJump(
fromSections.get(0), fromRupture, toSections.get(0), toRupture, 10);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 32,
"id": "a98437a0-d942-45ca-9f40-a8aa03edb6d8",
"metadata": {},
"outputs": [],
"source": [
"import pickle\n",
"import json\n",
"\n",
"mapping = pickle.load(open(\"hikkerm_discretized_trimmed_dict.pkl\", \"rb\"))\n",
"\n",
"indices = {k: [int(i) for i in mapping[k][\"triangle_indices\"]] for k in mapping}\n",
"\n",
"jsonString = json.dumps(indices, indent=2)\n",
"\n",
"f = open(\"hikkerm_discretized_trimmed_dict.json\", \"a\")\n",
"f.write(jsonString)\n",
"f.close()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2838bc29-cb92-4e53-bdf4-5c0db46c7883",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.9"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
package nz.cri.gns.NZSHM22.opensha.ruptures.experimental.rsqsims;

import com.google.common.base.Preconditions;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import org.opensha.sha.earthquake.faultSysSolution.ruptures.ClusterRupture;
import org.opensha.sha.earthquake.faultSysSolution.ruptures.FaultSubsectionCluster;
import org.opensha.sha.earthquake.faultSysSolution.ruptures.util.SectionDistanceAzimuthCalculator;
import org.opensha.sha.faultSurface.FaultSection;

public class ClusterAggregator {

final SectionDistanceAzimuthCalculator disAzCalc;

final double maxInternalJumpDist;

public ClusterAggregator(
SectionDistanceAzimuthCalculator disAzCalc, double maxInternalJumpDist) {
this.disAzCalc = disAzCalc;
this.maxInternalJumpDist = maxInternalJumpDist;
}

/**
* Turns an Event into two ruptures, one subduction, one crustal
*
* @param event
* @return
*/
public List<ClusterRupture> makeRuptures(RsqSimEventLoader.Event event) {
List<FaultSection> subductionSections =
event.sections.stream()
.filter(s -> s.getSectionName().contains("row:"))
.collect(Collectors.toList());
List<FaultSection> crustalSections =
event.sections.stream()
.filter(s -> !s.getSectionName().contains("row:"))
.collect(Collectors.toList());
Preconditions.checkState(!subductionSections.isEmpty());
Preconditions.checkState(!crustalSections.isEmpty());
List<ClusterRupture> ruptures = new ArrayList<>();
ruptures.add(ClusterRupture.forOrderedSingleStrandRupture(subductionSections, disAzCalc));
ruptures.add(ClusterRupture.forOrderedSingleStrandRupture(crustalSections, disAzCalc));
return ruptures;
}

class ClusterData {
FaultSubsectionCluster cluster;
List<FaultSection> endPoints = new ArrayList<>();
public boolean connected = false;

ClusterData(FaultSubsectionCluster cluster) {
this.cluster = cluster;
endPoints.add(cluster.subSects.get(0));
endPoints.add(cluster.subSects.get(cluster.subSects.size() - 1));
}

boolean isNear(FaultSection section) {
for (FaultSection candidate : endPoints) {
if (section == candidate
|| disAzCalc.getDistance(candidate, section) <= maxInternalJumpDist) {
return true;
}
}
return false;
}
}

/**
* Returns true if all clusters can transitively be connected through maxInternalJumpDist jumps.
*
* @param clusters
* @return
*/
public boolean allConnected(FaultSubsectionCluster[] clusters) {
if (clusters.length == 1) {
return true;
}

// wrap clusters in ClusterData
List<ClusterData> groups =
Arrays.stream(clusters).map(ClusterData::new).collect(Collectors.toList());

ClusterData first = groups.get(0);
List<FaultSection> edge = new ArrayList<>(first.endPoints);
first.connected = true;

// Go through all fault sections at the edge of the connected cluster.
// The edge may grow as we add more clusters into the connected cluster.
// See if we can jump from the selected fault section to a cluster that's so far
// unconnected.
// If so, add the cluster to the connected cluster, and expand the edge accordingly
for (int e = 0; e < edge.size(); e++) {
FaultSection section = edge.get(e);
for (ClusterData cluster : groups) {
if (cluster.connected) {
continue;
}
if (cluster.isNear(section)) {
cluster.connected = true;
edge.addAll(cluster.endPoints);
}
}
}

// return true if all clusters are connected
for (ClusterData data : groups) {
if (!data.connected) {
return false;
}
}
return true;
}

/**
* Returns true if all clusters in the rupture can be connected by maxInternalJumpDist jumps
*
* @param rupture
* @return
*/
public boolean allConnected(ClusterRupture rupture) {
return allConnected(rupture.clusters);
}

/**
* Returns true if all clusters in each rupture can be connected by maxInternalJumpDist jumps
* Assumes that exactly two ruptures are passed in.
*
* @param ruptures
* @return
*/
public boolean allConnected(List<ClusterRupture> ruptures) {
return allConnected(ruptures.get(0)) && allConnected(ruptures.get(1));
}
}
Loading
Loading