Skip to content
Merged
Show file tree
Hide file tree
Changes from 20 commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
e98d94e
add `par_` and `debug!`
yamilbknsu Mar 3, 2026
cac83cd
change debug! to info!
yamilbknsu Mar 3, 2026
2a26ff9
optimized extent application and changed to `intersects`
yamilbknsu Mar 4, 2026
ae5927f
change predicate back to contains following errors where connectors w…
yamilbknsu Mar 5, 2026
c89d1f0
implemented intersects + ids optimization
yamilbknsu Mar 6, 2026
93ccb0a
fixed edge-ids
yamilbknsu Mar 10, 2026
30b154a
cleaning island vertices
yamilbknsu Mar 12, 2026
791985b
Merge branch 'main' into yep/lakewood_tests
yamilbknsu Mar 12, 2026
f355487
Update rust/bambam-omf/src/graph/serialize_ops.rs
yamilbknsu Mar 12, 2026
7ab2c2c
Initial plan
Copilot Mar 12, 2026
c991d4a
remove `par_` that breaks order
yamilbknsu Mar 12, 2026
555a9d2
Fix stale vertex_lookup after island detection vertex remapping
Copilot Mar 12, 2026
9ead189
Remove `unwrap` use
yamilbknsu Mar 13, 2026
a090e96
clippy
yamilbknsu Mar 13, 2026
6e8ad67
Merge pull request #119 from NatLabRockies/copilot/sub-pr-117-again
yamilbknsu Mar 13, 2026
9e105c5
remove `par_` from `iter().enumerate()`
yamilbknsu Mar 13, 2026
11304cb
Merge branch 'main' into yep/lakewood_tests
yamilbknsu Mar 13, 2026
75dea2b
Merge branch 'main' into yep/lakewood_tests
yamilbknsu Mar 17, 2026
cadb167
patch `iter` operations
yamilbknsu Mar 17, 2026
ec98da8
bugfix (sink and sources in island algorithm)
yamilbknsu Mar 18, 2026
d7ebc42
refactor `visit_edge` operation
yamilbknsu Mar 19, 2026
a108dcc
comment fix
yamilbknsu Mar 19, 2026
0cc5025
Merge branch 'main' into yep/lakewood_tests
yamilbknsu Mar 19, 2026
9dbae3f
Merge branch 'main' into yep/lakewood_tests
robfitzgerald Mar 19, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 33 additions & 14 deletions rust/bambam-omf/src/app/network.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::path::Path;
use std::{collections::HashSet, path::Path, sync::Arc};

use geo::{Contains, Geometry};
use geo::{BoundingRect, Geometry, Intersects};
use rayon::prelude::*;
use routee_compass_core::model::unit::DistanceUnit;
use serde::{Deserialize, Serialize};
Expand Down Expand Up @@ -140,26 +140,45 @@ fn apply_extent_to_collection(
collection: TransportationCollection,
extent: Geometry<f32>,
) -> TransportationCollection {
let filtered_segments = collection
log::info!("Started applying extent to segments");
let extent_arc = Arc::new(extent);
let filtered_segments: Vec<crate::collection::TransportationSegmentRecord> = collection
.segments
.into_par_iter()
.filter(|segment| {
segment
.get_linestring()
.map(|linestring| extent.contains(linestring))
.unwrap_or(false)
.filter(|segment| match segment.get_linestring() {
Ok(ls) => {
let Some(bbox) = ls.bounding_rect() else {
return false;
};

// Short-circuit condition for bbox
extent_arc.intersects(&bbox) && extent_arc.intersects(ls)
}
Err(_) => false,
})
.collect();

log::info!("Collecting all connector IDs");
let connector_ids = filtered_segments
.par_iter()
.flat_map(|segment| {
segment
.connectors
.as_ref()
.unwrap_or(&vec![])
.iter()
.map(|con_ref| con_ref.connector_id.clone())
.collect::<Vec<String>>()
})
.collect::<HashSet<String>>();

let arc_ids = Arc::new(connector_ids);

log::info!("Started applying extent to connectors");
let filtered_connectors = collection
.connectors
.into_par_iter()
.filter(|connector| {
connector
.get_geometry()
.map(|geometry| extent.contains(geometry))
.unwrap_or(false)
})
.filter(|connector| arc_ids.contains(&connector.id))
.collect();

TransportationCollection {
Expand Down
32 changes: 32 additions & 0 deletions rust/bambam-omf/src/graph/component_algorithm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,16 +149,30 @@ fn is_component_island_sequential(
Haversine.length(&line_string![initial_midpoint.0, current_midpoint.0]);

// get all neighbors, add them to queue
// forward_adjacency[dst]: edges leaving dst (v → *)
let outward_edges: Vec<&(EdgeListId, EdgeId)> =
forward_adjacency[edge.dst_vertex_id.0].keys().collect();
for (edge_list_id, edge_id) in outward_edges {
queue.push_back((*edge_list_id, *edge_id));
}
// backward_adjacency[src]: edges entering src (* → u)
let inward_edges: Vec<&(EdgeListId, EdgeId)> =
backward_adjacency[edge.src_vertex_id.0].keys().collect();
for (edge_list_id, edge_id) in inward_edges {
queue.push_back((*edge_list_id, *edge_id));
}
// forward_adjacency[src]: other edges leaving src (u → *) — catches pure source vertices
let sibling_outward_edges: Vec<&(EdgeListId, EdgeId)> =
forward_adjacency[edge.src_vertex_id.0].keys().collect();
for (edge_list_id, edge_id) in sibling_outward_edges {
queue.push_back((*edge_list_id, *edge_id));
}
// backward_adjacency[dst]: other edges entering dst (* → v) — catches pure sink vertices
let sibling_inward_edges: Vec<&(EdgeListId, EdgeId)> =
backward_adjacency[edge.dst_vertex_id.0].keys().collect();
for (edge_list_id, edge_id) in sibling_inward_edges {
queue.push_back((*edge_list_id, *edge_id));
}

// mark as visited
visited.insert((edge_list_id, edge_id));
Expand Down Expand Up @@ -201,20 +215,38 @@ fn is_component_island_parallel(
.ok_or(OvertureMapsCollectionError::InternalError(format!("edge list {current_edge_list_id:?} or edge {current_edge_id:?} not found during island detection starting at edge {edge:?}")))?;

// Expand queue
// forward_adjacency[dst]: edges leaving dst (v → *)
let outward_edges: Vec<&(EdgeListId, EdgeId)> = forward_adjacency
[current_edge.dst_vertex_id.0]
.keys()
.collect();
for (edge_list_id, edge_id) in outward_edges {
visit_queue.push_back((edge_list_id, edge_id));
}
// backward_adjacency[src]: edges entering src (* → u)
let inward_edges: Vec<&(EdgeListId, EdgeId)> = backward_adjacency
[current_edge.src_vertex_id.0]
.keys()
.collect();
for (edge_list_id, edge_id) in inward_edges {
visit_queue.push_back((edge_list_id, edge_id));
}
// forward_adjacency[src]: other edges leaving src (u → *) — catches pure source vertices
let sibling_outward_edges: Vec<&(EdgeListId, EdgeId)> = forward_adjacency
[current_edge.src_vertex_id.0]
.keys()
.collect();
for (edge_list_id, edge_id) in sibling_outward_edges {
visit_queue.push_back((edge_list_id, edge_id));
}
// backward_adjacency[dst]: other edges entering dst (* → v) — catches pure sink vertices
let sibling_inward_edges: Vec<&(EdgeListId, EdgeId)> = backward_adjacency
[current_edge.dst_vertex_id.0]
.keys()
.collect();
for (edge_list_id, edge_id) in sibling_inward_edges {
visit_queue.push_back((edge_list_id, edge_id));
}

// Update counter
let current_midpoint = compute_midpoint(current_edge, vertices);
Expand Down
49 changes: 43 additions & 6 deletions rust/bambam-omf/src/graph/omf_graph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,10 @@ use crate::{
SegmentFullType, TransportationCollection, TransportationSegmentRecord,
},
graph::{
component_algorithm::island_detection_algorithm, segment_ops,
serialize_ops::clean_omf_edge_list, vertex_serializable::VertexSerializable,
component_algorithm::island_detection_algorithm,
segment_ops,
serialize_ops::{clean_omf_edge_list, compute_vertex_remapping},
vertex_serializable::VertexSerializable,
OmfGraphSummary,
},
};
Expand Down Expand Up @@ -61,9 +63,11 @@ impl OmfGraphVectorized {
island_detection_configuration: Option<IslandDetectionAlgorithmConfiguration>,
) -> Result<Self, OvertureMapsCollectionError> {
// process all connectors into vertices
log::info!("Creating vertex lookup");
let (mut vertices, mut vertex_lookup) =
ops::create_vertices_and_lookup(&collection.connectors, None)?;

log::info!("Processing edge lists");
// for each mode configuration, create an edge list
let mut edge_lists: Vec<OmfEdgeList> = vec![];
for (index, edge_list_config) in configuration.iter().enumerate() {
Expand All @@ -73,6 +77,7 @@ impl OmfGraphVectorized {
let mut filter = edge_list_config.filter.clone();
filter.sort(); // sort for performance

log::info!("Filtering edge list {edge_list_id}");
// filter to the segments that match our travel mode filter(s)
let segments: Vec<&TransportationSegmentRecord> = collection
.segments
Expand All @@ -84,6 +89,7 @@ impl OmfGraphVectorized {
// the splits are locations in each segment record where we want to define a vertex
// which may not yet exist on the graph. this is where we begin to impose directivity
// in our records.
log::info!("Creating splits");
let mut splits = vec![];
for heading in [SegmentHeading::Forward, SegmentHeading::Backward] {
let mut when: SegmentAccessRestrictionWhen = edge_list_config.into();
Expand All @@ -99,6 +105,7 @@ impl OmfGraphVectorized {

// depending on the split method, we may need to create additional vertices at locations
// which are not OvertureMaps-defined connector types.
log::info!("Extending vertices");
ops::extend_vertices(
&splits,
&segments,
Expand All @@ -108,6 +115,7 @@ impl OmfGraphVectorized {
)?;

// create all edges based on the above split points using all vertices.
log::info!("Creating edges");
let edges = ops::create_edges(
&segments,
&segment_lookup,
Expand All @@ -116,11 +124,16 @@ impl OmfGraphVectorized {
&vertex_lookup,
edge_list_id,
)?;
log::info!("Creating geometries");
let geometries = ops::create_geometries(&segments, &segment_lookup, &splits)?;
log::info!("Creating bearings");
let bearings = ops::bearing_deg_from_geometries(&geometries)?;
log::info!("Creating classes");
let classes = ops::create_segment_full_types(&segments, &segment_lookup, &splits)?;

log::info!("Creating speeds");
let speeds = ops::create_speeds(&segments, &segment_lookup, &splits)?;
log::info!("Creating speed lookup");
let speed_lookup = ops::create_speed_by_segment_type_lookup(
&speeds,
&segments,
Expand All @@ -130,13 +143,16 @@ impl OmfGraphVectorized {
)?;

// insert global speed value for reference
log::info!("Computing global speed");
let global_speed =
ops::get_global_average_speed(&speeds, &segments, &segment_lookup, &splits)?;

// omf ids
log::info!("Computing omf_ids");
let omf_segment_ids = ops::get_segment_omf_ids(&segments, &segment_lookup, &splits)?;

// match speeds according to classes
log::info!("Completing speeds vector with default global");
let speeds = speeds
.into_par_iter()
.zip(&classes)
Expand Down Expand Up @@ -172,6 +188,7 @@ impl OmfGraphVectorized {
}

// Compute islands in resulting edge lists and remove island edges
log::info!("Compute islands");
if let Some(algorithm_config) = island_detection_configuration {
let ref_edge_lists = edge_lists
.iter()
Expand All @@ -187,11 +204,31 @@ impl OmfGraphVectorized {

// Refactor Vec into Hashmap
let mut edges_lookup: HashMap<EdgeListId, Vec<EdgeId>> = HashMap::new();
for (a, b) in island_edges {
edges_lookup.entry(a).or_default().push(b);
for (a, b) in &island_edges {
edges_lookup.entry(*a).or_default().push(*b);
}

// Compute and apply vertex remapping
let vertex_remapping = compute_vertex_remapping(&vertices, &edge_lists, &island_edges)?;
Copy link
Collaborator

@robfitzgerald robfitzgerald Mar 19, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we have loaded all segments and connectors and built Edges and Vertices from them. here we drop island vertices and reassign a VertexId sequentially. this invalidates the previous list of vertex ids which was used to assign src/dst VertexIds in each Edge, but, i don't see a spot after this where those identifiers are updated accordingly.

i have been wondering, couldn't all of this island generation + pre-processing take place over the collection of SegmentSplits? with those, we haven't yet invented the VertexIds/EdgeIds/EdgeListIds. if we can remove SegmentSplits due to island detection, and then we build the final Vertices/Edges (and their associated ids), then we don't need to apply this kind of correction. edit: don't let this part of the comment send you down a rabbit hole; this could be a tech debt issue instead.

vertices = vertices
.into_iter()
.filter_map(|vertex| {
vertex_remapping[vertex.vertex_id.0].map(|vertex_id| Vertex {
vertex_id,
..vertex
})
})
.collect();

// dropping entries for removed vertices.
vertex_lookup.retain(|_, v| vertex_remapping[*v].is_some());
// Update vertex_lookup to reflect the remapped vertex indices,
for v in vertex_lookup.values_mut() {
*v = vertex_remapping[*v].ok_or(OvertureMapsCollectionError::InternalError(format!("vertex index {v} expected after island computation but was flagged for deletion")))?.0;
}

// Clean the edge lists
log::info!("Apply islands algorithm result");
edge_lists = edge_lists
.into_iter()
.map(|omf_list| {
Expand All @@ -209,9 +246,9 @@ impl OmfGraphVectorized {
.map(|edge| !edges_to_remove.contains(&edge.edge_id))
.collect::<Vec<bool>>();

clean_omf_edge_list(omf_list, mask)
clean_omf_edge_list(omf_list, mask, &vertex_remapping)
})
.collect::<Vec<OmfEdgeList>>();
.collect::<Result<Vec<OmfEdgeList>, OvertureMapsCollectionError>>()?;
};

let result = Self {
Expand Down
Loading
Loading