Skip to content
Merged
Show file tree
Hide file tree
Changes from 22 commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/workflows/rust-publish.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,10 @@ jobs:
with:
toolchain: stable

- name: Install test assets
run: |
./setup_bambam_osm_test.sh
Copy link

Copilot AI Sep 10, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The script name setup_bambam_osm_test.sh doesn't match any of the actual script files created in this PR. It should be ./script/setup_test_bambam_osm.sh to match the new file structure.

Copilot uses AI. Check for mistakes.

- name: Run tests
run: cargo test --all --manifest-path=rust/Cargo.toml

Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/rust-release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ jobs:

- name: Install test assets
run: |
./setup.sh
./setup_bambam_osm_test.sh
Copy link

Copilot AI Sep 10, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The script name setup_bambam_osm_test.sh doesn't match any of the actual script files created in this PR. It should be ./script/setup_test_bambam_osm.sh to match the new file structure.

Copilot uses AI. Check for mistakes.

- name: Test
run: |
Expand Down
8 changes: 6 additions & 2 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,14 @@ jobs:
run: |
rustup component add rustfmt
cargo fmt --all --manifest-path rust/Cargo.toml -- --check


- name: Install uv
uses: astral-sh/setup-uv@v6

- name: Install test assets
run: |
./setup.sh
./script/setup_test_bambam.sh
./script/setup_test_bambam_osm.sh

- name: Test
run: |
Expand Down
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,5 @@ Cargo.lock
target
output
cache
.venv
.venv
denver_co/
239 changes: 239 additions & 0 deletions configuration/denver_test.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,239 @@
parallelism = 6
response_persistence_policy = "persist_response_from_memory"

[graph]
edge_list.input_file = "../denver_co/edges-compass.csv.gz"
vertex_list_input_file = "../denver_co/vertices-compass.csv.gz"

[mapping]
type = "edge"
geometry = { input_file = "../denver_co/edges-geometries-enumerated.txt.gz" }
tolerance.distance = 15.0
tolerance.unit = "meters"
queries_without_destinations = false
matching_type = ["point", "vertex_id", "edge_id"]

[algorithm]
type = "a*"

# cut off searches that exceed these termination policies.
[termination]
type = "combined"
[[termination.models]]
type = "query_runtime"
limit = "00:01:00"
frequency = 100_000
[[termination.models]]
type = "solution_size"
limit = 2_000_000

[search.traversal]
type = "combined"

[[search.traversal.models]]
type = "distance"
distance_unit = "miles"

[[search.traversal.models]]
type = "fixed_speed"
name = "walk"
speed = 5.0
speed_unit = "kph"

[[search.traversal.models]]
type = "fixed_speed"
name = "bike"
speed = 16.0
speed_unit = "kph"

[[search.traversal.models]]
type = "speed"
speed_table_input_file = "../denver_co/edges-posted-speed-enumerated.txt.gz"
speed_unit = "kph"

[[search.traversal.models]]
type = "multimodal"

[search.traversal.models.dependencies]
walk = [{ input_feature = { name = "walk_speed", type = "speed" }, destination_features = ["edge_time", "trip_time"] }]
bike = [{ input_feature = { name = "bike_speed", type = "speed" }, destination_features = ["edge_time", "trip_time"] }]
drive = [
{ input_feature = { name = "edge_speed", type = "speed" }, destination_features = ["edge_time", "trip_time"] },
# { time_feature = "trip_enroute_delay", destinations = [
# "edge_time",
# "trip_time",
# ] },
]

[search.traversal.models.output_features]
trip_time = { type = "time", unit = "minutes", initial = 0.0, accumulator = true }
edge_time = { type = "time", unit = "minutes", initial = 0.0, accumulator = false }
# penalty_factor = { name = "factor", unit = "none", accumulator = false, format = { FloatingPoint = { initial = 1.0 } } }

# use a time-optimal routing strategy
[cost]
weights = { trip_time = 1.0 }
vehicle_rates = { trip_time = { type = "raw" } }
# weights = { trip_time = 1.0, penalty_factor = 1.0 }
# vehicle_rates = { trip_time = { type = "raw" }, penalty_factor = { type = "raw" } }
cost_aggregation = "sum"

[search.access]
type = "turn_delay"
edge_heading_input_file = "../denver_co/edges-headings-enumerated.csv.gz"
[search.access.turn_delay_model]
type = "tabular_discrete"
time_unit = "seconds"
[search.access.turn_delay_model.table]
no_turn = 0.0
slight_right = 0.5
right = 1.0
sharp_right = 1.5
slight_left = 1.0
left = 2.5
sharp_left = 3.5
u_turn = 9.5

[search.frontier]
type = "combined"

[[search.frontier.models]]
type = "road_class"
road_class_input_file = "../denver_co/edges-road-class-enumerated.txt.gz"

# [[search.frontier.models]]
# type = "turn_restriction"
# turn_restriction_input_file = "../denver_co/edges-turn-restrictions.csv.gz"

[[search.frontier.models]]
type = "time_limit"
time_limit = { time = 40.0, time_unit = "minutes" }

[[plugin.input_plugins]]
type = "grid"
extent_format = "wkt"
grid = { type = "h3", resolution = 8 }
[plugin.input_plugins.population_source]
type = "acs"
acs_type = "five_year"
acs_year = 2022
acs_resolution = "census_tract"
acs_categories = ["B01001_001E"]

[[plugin.input_plugins]]
type = "inject"
format = "key_value"
write_mode = "overwrite"
key = "grid_search"
[plugin.input_plugins.value]
mode = ["walk", "bike", "drive"]
# modes = [{ mode = "walk", road_classes = [4, 5, 6, 7] }, { mode = "bike", road_classes = [4, 5, 6, 7] }, { mode = "drive", road_classes = [1, 2, 3, 4, 5, 6] }]

[[plugin.input_plugins]]
type = "inject"
key = "start_time"
value = "08:00:00"
format = "key_value"
write_mode = "overwrite"

[[plugin.input_plugins]]
type = "inject"
key = "start_weekday"
value = "monday"
format = "key_value"
write_mode = "overwrite"

[[plugin.input_plugins]]
type = "grid_search"

[[plugin.input_plugins]]
type = "load_balancer"
[plugin.input_plugins.weight_heuristic]
type = "custom"
[plugin.input_plugins.weight_heuristic.custom_weight_type]
type = "categorical"
column_name = "mode"
default = 1
mapping = { "walk" = 1, "bike" = 10, "drive" = 100 }

[[plugin.output_plugins]]
type = "summary"

[[plugin.output_plugins]]
type = "isochrone"
time_bin = { type = "list", times = [10, 20, 30, 40] }
isochrone_algorithm = { type = "k_nearest_concave_hull", k = 3 }
destination_point_generator = { type = "destination_point" }
isochrone_output_format = "wkb"

### MEP OPPORTUNITY DATA CONFIGURATION #################################
# assigns opportunities to search results based on a file or api data source
# and a taxonomy for MEP activity types.
# this example shows data loaded from the census LODES online file repository
# assigning activity types by NAICS sector id.
[[plugin.output_plugins]]
type = "opportunity"
collect_format = "aggregate"

[plugin.output_plugins.model]
type = "combined"

[[plugin.output_plugins.model.models]]
type = "api"
vertex_input_file = "../denver_co/vertices-compass.csv.gz"
activity_column_names = ["entertainment", "food", "retail", "healthcare", "services", "jobs"]
table_orientation = "destination_vertex_oriented"

[plugin.output_plugins.model.models.opportunity_source]
type = "lodes"
study_region = { type = "census", geoids = ["08"] }

# collect LODES data at the tract level. while it is available at the block,
# the download + processing time is 10x that of census tracts. other possible values
# are `block`, `county` or `state`.
data_granularity = "census_tract"
# different editions of the dataset, we are choosing LODES v 8.0.
edition = "LODES8"
# Job Type, can have a value of “JT00” for All Jobs, “JT01” for Primary Jobs, “JT02” for All
# Private Jobs, “JT03” for Private Primary Jobs, “JT04” for All Federal Jobs, or “JT05” for
# Federal Primary Jobs
job_type = "JT00"
# Segment of the workforce, can have the values of:
# - S000: Total number of jobs (default)
# - SA01: Number of jobs of workers age 29 or younger
# - SA02: Number of jobs for workers age 30 to 54
# - SA03: Number of jobs for workers age 55 or older
# - SE01: Number of jobs with earnings $1250/month or less
# - SE02: Number of jobs with earnings $1251/month to $3333/month
# - SE03: Number of jobs with earnings greater than $3333/month
# - SI01: Number of jobs in Goods Producing industry sectors
# - SI02: Number of jobs in Trade, Transportation, and Utilities industry sectors
# - SI03: Number of jobs in All Other Services industry sectors
segment = "S000"

# most recent year with all states. Alaska has no coverage from 2017-2022. we may
# want a "continental" variant which could support 2022.
year = 2022

[plugin.output_plugins.model.models.opportunity_source.activity_mapping]
# see https://lehd.ces.census.gov/data/lodes/LODES8/LODESTechDoc8.0.pdf
CNS01 = ["jobs"] # 11 (Agriculture, Forestry, Fishing and Hunting)
CNS02 = ["jobs"] # 21 (Mining, Quarrying, and Oil and Gas Extraction)
CNS03 = ["jobs"] # 22 (Utilities)
CNS04 = ["jobs"] # 23 (Construction)
CNS05 = ["jobs"] # 31-33 (Manufacturing)
CNS06 = ["jobs"] # 42 (Wholesale Trade)
CNS07 = ["jobs", "retail"] # 44-45 (Retail Trade)
CNS08 = ["jobs"] # 48-49 (Transportation and Warehousing)
CNS09 = ["jobs"] # 51 (Information)
CNS10 = ["jobs"] # 52 (Finance and Insurance)
CNS11 = ["jobs"] # 53 (Real Estate and Rental and Leasing)
CNS12 = ["jobs", "services"] # 54 (Professional, Scientific, and Technical Services)
CNS13 = ["jobs"] # 55 (Management of Companies and Enterprises)
CNS14 = ["jobs"] # 56 (Admin/Support/Waste Mgmt/Remediation Services)
CNS15 = ["jobs"] # 61 (Educational Services)
CNS16 = ["jobs", "healthcare"] # 62 (Health Care and Social Assistance)
CNS17 = ["jobs", "entertainment"] # 71 (Arts, Entertainment, and Recreation)
CNS18 = ["jobs", "food"] # 72 (Accommodation and Food Services)
CNS19 = ["jobs"] # 81 (Other Services [except Public Administration])
CNS20 = ["jobs"] # 92 (Public Administration)
13 changes: 9 additions & 4 deletions rust/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@ bamcensus-core = { version = "0.1.0" }
bamcensus-acs = { version = "0.1.0" }
bamcensus = { version = "0.1.0" }
bamcensus-lehd = { version = "0.1.0" }
routee-compass = { version = "0.14.0" }
routee-compass-core = { version = "0.14.0" }
routee-compass-powertrain = { version = "0.14.0" }
routee-compass = { git = "https://github.com/NREL/routee-compass.git", branch = "main" }
routee-compass-core = { git = "https://github.com/NREL/routee-compass.git", branch = "main" }
routee-compass-powertrain = { git = "https://github.com/NREL/routee-compass.git", branch = "main" }

# third party
arrow = { version = "55.0.0" }
Expand All @@ -25,11 +25,14 @@ flate2 = "1.0"
futures = { version = "0.3.31", features = ["executor"] }
geo = { version = "0.30.0", features = ["use-serde"] }
geo-buffer = "0.2.0"
geo-traits = "0.3.0"
geo-types = "0.7.16"
geojson = { version = "0.24.1" }
geozero = { version = "0.14.0", features = ["with-wkb"] }
gtfs-structures = "0.43.0"
h3o = { version = "0.8.0", features = ["serde", "geo"] }
hex = "0.4.3"
inventory = { version = "0.3.21" }
itertools = { version = "0.14.0" }
kdam = "0.6.2"
log = "0.4.19"
Expand All @@ -38,17 +41,19 @@ object_store = { "version" = "0.12.0", features = ["aws"] }
osmio = "0.14.0"
osmpbf = "0.3.4"
parquet = { version = "55.0.0", features = ["snap", "async", "object_store"] }
rand = "0.9.1"
rayon = "1.10.0"
regex = { version = "1.11.1" }
reqwest = { version = "0.12.12", features = ["blocking"] }
rstar = { version = "0.12.0" }
serde = { version = "1.0.160", features = ["derive"] }
serde_json = { version = "1.0" }
shapefile = { version = "0.7.0", features = ["geo-types"] }
skiplist = "0.5.1"
thiserror = "2.0.12"
tokio = "1.39.2"
toml = { version = "0.8.20" }
uom = { version = "0.37.0", features = ["serde"]}
wkb = "0.7.1"
wkb = "0.9.0"
wkt = { version = "0.13.0", features = ["serde"] }
zip = "2.1.3"
2 changes: 1 addition & 1 deletion rust/bambam-osm/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ mod tests {
Err(_) => true, // default
};
let temp_directory = "src/test/tmp";
let pbf_file = "src/test/liechtenstein-latest.osm.pbf";
let pbf_file = "src/test/liechtenstein-250908.osm.pbf";
let extent_file = "src/test/schaan_liechtenstein.txt";
let config_file = "src/test/test_osm_import.toml";
let conf = crate::App::Pbf {
Expand Down
14 changes: 7 additions & 7 deletions rust/bambam-osm/src/model/osm/graph/compass_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use std::{fs::File, path::Path};
use csv::QuoteStyle;
use flate2::{write::GzEncoder, Compression};
use kdam::tqdm;
use routee_compass_core::model::network::Edge;
use routee_compass_core::model::network::{EdgeConfig, EdgeId};
use wkt::ToWkt;

use crate::model::osm::{
Expand Down Expand Up @@ -144,12 +144,12 @@ impl CompassWriter for OsmGraphVectorized {
}
// COMPASS
if let Some(ref mut writer) = edge_writer {
let edge = Edge::new(
edge_id,
row.src_vertex_id.0,
row.dst_vertex_id.0,
uom::si::f64::Length::new::<uom::si::length::meter>(row.length_meters),
);
let edge = EdgeConfig {
edge_id: EdgeId(edge_id),
src_vertex_id: row.src_vertex_id,
dst_vertex_id: row.dst_vertex_id,
distance: row.length_meters,
};
writer.serialize(edge).map_err(|e| {
OsmError::CsvWriteError(String::from(filenames::EDGES_COMPASS), e)
})?;
Expand Down
3 changes: 0 additions & 3 deletions rust/bambam-osm/src/test/get_test.sh

This file was deleted.

Loading
Loading