Skip to content
This repository was archived by the owner on Sep 9, 2025. It is now read-only.

Commit 88f41d6

Browse files
author
Hendrik van Antwerpen
committed
Make similar path detection optional during path stitching
1 parent 7a30932 commit 88f41d6

File tree

3 files changed

+105
-29
lines changed

3 files changed

+105
-29
lines changed

stack-graphs/include/stack-graphs.h

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1024,6 +1024,13 @@ struct sg_forward_path_stitcher *sg_forward_path_stitcher_new(const struct sg_st
10241024
size_t count,
10251025
const sg_node_handle *starting_nodes);
10261026

1027+
// Sets whether similar path detection should be enabled during path stitching. Paths are similar
1028+
// if start and end node, and pre- and postconditions are the same. The presence of similar paths
1029+
// can lead to exponential blow up during path stitching. Similar path detection is disabled by
1030+
// default because of the accociated preformance cost.
1031+
void sg_forward_path_stitcher_set_similar_path_detection(struct sg_forward_path_stitcher *stitcher,
1032+
bool detect_similar_paths);
1033+
10271034
// Sets the maximum amount of work that can be performed during each phase of the algorithm. By
10281035
// bounding our work this way, you can ensure that it's not possible for our CPU-bound algorithm
10291036
// to starve any worker threads or processes that you might be using. If you don't call this
@@ -1078,6 +1085,13 @@ struct sg_forward_partial_path_stitcher *sg_forward_partial_path_stitcher_from_p
10781085
size_t count,
10791086
const struct sg_partial_path *initial_partial_paths);
10801087

1088+
// Sets whether similar path detection should be enabled during path stitching. Paths are similar
1089+
// if start and end node, and pre- and postconditions are the same. The presence of similar paths
1090+
// can lead to exponential blow up during path stitching. Similar path detection is disabled by
1091+
// default because of the accociated preformance cost.
1092+
void sg_forward_partial_path_stitcher_set_similar_path_detection(struct sg_forward_partial_path_stitcher *stitcher,
1093+
bool detect_similar_paths);
1094+
10811095
// Sets the maximum amount of work that can be performed during each phase of the algorithm. By
10821096
// bounding our work this way, you can ensure that it's not possible for our CPU-bound algorithm
10831097
// to starve any worker threads or processes that you might be using. If you don't call this

stack-graphs/src/c.rs

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1825,6 +1825,21 @@ pub extern "C" fn sg_forward_path_stitcher_new(
18251825
Box::into_raw(Box::new(ForwardPathStitcher::new(stitcher, paths))) as *mut _
18261826
}
18271827

1828+
/// Sets whether similar path detection should be enabled during path stitching. Paths are similar
1829+
/// if start and end node, and pre- and postconditions are the same. The presence of similar paths
1830+
/// can lead to exponential blow up during path stitching. Similar path detection is disabled by
1831+
/// default because of the accociated preformance cost.
1832+
#[no_mangle]
1833+
pub extern "C" fn sg_forward_path_stitcher_set_similar_path_detection(
1834+
stitcher: *mut sg_forward_path_stitcher,
1835+
detect_similar_paths: bool,
1836+
) {
1837+
let stitcher = unsafe { &mut *(stitcher as *mut ForwardPathStitcher) };
1838+
stitcher
1839+
.stitcher
1840+
.set_similar_path_detection(detect_similar_paths);
1841+
}
1842+
18281843
/// Sets the maximum amount of work that can be performed during each phase of the algorithm. By
18291844
/// bounding our work this way, you can ensure that it's not possible for our CPU-bound algorithm
18301845
/// to starve any worker threads or processes that you might be using. If you don't call this
@@ -2011,6 +2026,21 @@ pub extern "C" fn sg_forward_partial_path_stitcher_from_partial_paths(
20112026
))) as *mut _
20122027
}
20132028

2029+
/// Sets whether similar path detection should be enabled during path stitching. Paths are similar
2030+
/// if start and end node, and pre- and postconditions are the same. The presence of similar paths
2031+
/// can lead to exponential blow up during path stitching. Similar path detection is disabled by
2032+
/// default because of the accociated preformance cost.
2033+
#[no_mangle]
2034+
pub extern "C" fn sg_forward_partial_path_stitcher_set_similar_path_detection(
2035+
stitcher: *mut sg_forward_partial_path_stitcher,
2036+
detect_similar_paths: bool,
2037+
) {
2038+
let stitcher = unsafe { &mut *(stitcher as *mut InternalForwardPartialPathStitcher) };
2039+
stitcher
2040+
.stitcher
2041+
.set_similar_path_detection(detect_similar_paths);
2042+
}
2043+
20142044
/// Sets the maximum amount of work that can be performed during each phase of the algorithm. By
20152045
/// bounding our work this way, you can ensure that it's not possible for our CPU-bound algorithm
20162046
/// to starve any worker threads or processes that you might be using. If you don't call this

stack-graphs/src/stitching.rs

Lines changed: 61 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -485,7 +485,7 @@ pub struct PathStitcher {
485485
VecDeque<AppendingCycleDetector<OwnedOrDatabasePath>>,
486486
),
487487
appended_paths: Appendables<OwnedOrDatabasePath>,
488-
similar_path_detector: SimilarPathDetector<Path>,
488+
similar_path_detector: Option<SimilarPathDetector<Path>>,
489489
max_work_per_phase: usize,
490490
#[cfg(feature = "copious-debugging")]
491491
phase_number: usize,
@@ -537,7 +537,7 @@ impl PathStitcher {
537537
queue: VecDeque::new(),
538538
next_iteration,
539539
appended_paths,
540-
similar_path_detector: SimilarPathDetector::new(),
540+
similar_path_detector: None,
541541
// By default, there's no artificial bound on the amount of work done per phase
542542
max_work_per_phase: usize::MAX,
543543
#[cfg(feature = "copious-debugging")]
@@ -565,6 +565,18 @@ impl PathStitcher {
565565
self.next_iteration.0.as_mut_slices().0
566566
}
567567

568+
/// Sets whether similar path detection should be enabled during path stitching. Paths are similar
569+
/// if start and end node, and pre- and postconditions are the same. The presence of similar paths
570+
/// can lead to exponential blow up during path stitching. Similar path detection is disabled by
571+
/// default because of the accociated preformance cost.
572+
pub fn set_similar_path_detection(&mut self, detect_similar_paths: bool) {
573+
if detect_similar_paths {
574+
self.similar_path_detector = Some(SimilarPathDetector::new());
575+
} else {
576+
self.similar_path_detector = None;
577+
}
578+
}
579+
568580
/// Sets the maximum amount of work that can be performed during each phase of the algorithm.
569581
/// By bounding our work this way, you can ensure that it's not possible for our CPU-bound
570582
/// algorithm to starve any worker threads or processes that you might be using. If you don't
@@ -635,14 +647,16 @@ impl PathStitcher {
635647
copious_debugging!(" is invalid: cyclic");
636648
continue;
637649
}
638-
if self.similar_path_detector.has_similar_path(
639-
graph,
640-
paths,
641-
&new_path,
642-
|ps, left, right| left.equals(ps, right),
643-
) {
644-
copious_debugging!(" is invalid: too many similar");
645-
continue;
650+
if let Some(similar_path_detector) = &mut self.similar_path_detector {
651+
if similar_path_detector.has_similar_path(
652+
graph,
653+
paths,
654+
&new_path,
655+
|ps, left, right| left.equals(ps, right),
656+
) {
657+
copious_debugging!(" is invalid: too many similar");
658+
continue;
659+
}
646660
}
647661
self.next_iteration.0.push_back(new_path);
648662
self.next_iteration.1.push_back(new_cycle_detector);
@@ -689,10 +703,12 @@ impl PathStitcher {
689703

690704
#[cfg(feature = "copious-debugging")]
691705
{
692-
copious_debugging!(
693-
" Max similar path bucket size: {}",
694-
self.similar_path_detector.max_bucket_size()
695-
);
706+
if let Some(similar_path_detector) = &self.similar_path_detector {
707+
copious_debugging!(
708+
" Max similar path bucket size: {}",
709+
similar_path_detector.max_bucket_size()
710+
);
711+
}
696712
copious_debugging!("==> End phase {}", self.phase_number);
697713
self.phase_number += 1;
698714
}
@@ -772,7 +788,7 @@ pub struct ForwardPartialPathStitcher {
772788
VecDeque<AppendingCycleDetector<OwnedOrDatabasePath>>,
773789
),
774790
appended_paths: Appendables<OwnedOrDatabasePath>,
775-
similar_path_detector: SimilarPathDetector<PartialPath>,
791+
similar_path_detector: Option<SimilarPathDetector<PartialPath>>,
776792
max_work_per_phase: usize,
777793
#[cfg(feature = "copious-debugging")]
778794
phase_number: usize,
@@ -837,7 +853,7 @@ impl ForwardPartialPathStitcher {
837853
queue: VecDeque::new(),
838854
next_iteration,
839855
appended_paths,
840-
similar_path_detector: SimilarPathDetector::new(),
856+
similar_path_detector: None,
841857
// By default, there's no artificial bound on the amount of work done per phase
842858
max_work_per_phase: usize::MAX,
843859
#[cfg(feature = "copious-debugging")]
@@ -874,7 +890,7 @@ impl ForwardPartialPathStitcher {
874890
queue: VecDeque::new(),
875891
next_iteration,
876892
appended_paths,
877-
similar_path_detector: SimilarPathDetector::new(),
893+
similar_path_detector: None,
878894
// By default, there's no artificial bound on the amount of work done per phase
879895
max_work_per_phase: usize::MAX,
880896
#[cfg(feature = "copious-debugging")]
@@ -902,6 +918,18 @@ impl ForwardPartialPathStitcher {
902918
self.next_iteration.0.as_mut_slices().0
903919
}
904920

921+
/// Sets whether similar path detection should be enabled during path stitching. Paths are similar
922+
/// if start and end node, and pre- and postconditions are the same. The presence of similar paths
923+
/// can lead to exponential blow up during path stitching. Similar path detection is disabled by
924+
/// default because of the accociated preformance cost.
925+
pub fn set_similar_path_detection(&mut self, detect_similar_paths: bool) {
926+
if detect_similar_paths {
927+
self.similar_path_detector = Some(SimilarPathDetector::new());
928+
} else {
929+
self.similar_path_detector = None;
930+
}
931+
}
932+
905933
/// Sets the maximum amount of work that can be performed during each phase of the algorithm.
906934
/// By bounding our work this way, you can ensure that it's not possible for our CPU-bound
907935
/// algorithm to starve any worker threads or processes that you might be using. If you don't
@@ -978,14 +1006,16 @@ impl ForwardPartialPathStitcher {
9781006
copious_debugging!(" is invalid: cyclic");
9791007
continue;
9801008
}
981-
if self.similar_path_detector.has_similar_path(
982-
graph,
983-
partials,
984-
&new_partial_path,
985-
|ps, left, right| left.equals(ps, right),
986-
) {
987-
copious_debugging!(" is invalid: too many similar");
988-
continue;
1009+
if let Some(similar_path_detector) = &mut self.similar_path_detector {
1010+
if similar_path_detector.has_similar_path(
1011+
graph,
1012+
partials,
1013+
&new_partial_path,
1014+
|ps, left, right| left.equals(ps, right),
1015+
) {
1016+
copious_debugging!(" is invalid: too many similar");
1017+
continue;
1018+
}
9891019
}
9901020
}
9911021
self.next_iteration.0.push_back(new_partial_path);
@@ -1037,10 +1067,12 @@ impl ForwardPartialPathStitcher {
10371067

10381068
#[cfg(feature = "copious-debugging")]
10391069
{
1040-
copious_debugging!(
1041-
" Max similar path bucket size: {}",
1042-
self.similar_path_detector.max_bucket_size()
1043-
);
1070+
if let Some(similar_path_detector) = &self.similar_path_detector {
1071+
copious_debugging!(
1072+
" Max similar path bucket size: {}",
1073+
similar_path_detector.max_bucket_size()
1074+
);
1075+
}
10441076
copious_debugging!("==> End phase {}", self.phase_number);
10451077
self.phase_number += 1;
10461078
}

0 commit comments

Comments
 (0)