From 792017de55670f1909b2acc765c5b2c1006f1d27 Mon Sep 17 00:00:00 2001 From: Rob Fitzgerald Date: Tue, 3 Feb 2026 15:40:47 -0700 Subject: [PATCH 01/15] template omf configuration --- .../bambam-omf/bambam-config-omf.toml | 269 ++++++++++++++++++ 1 file changed, 269 insertions(+) create mode 100644 configuration/bambam-omf/bambam-config-omf.toml diff --git a/configuration/bambam-omf/bambam-config-omf.toml b/configuration/bambam-omf/bambam-config-omf.toml new file mode 100644 index 00000000..edbdcb27 --- /dev/null +++ b/configuration/bambam-omf/bambam-config-omf.toml @@ -0,0 +1,269 @@ +[graph] +edge_list = [ + { input_file = "data/walk/edges-compass.csv.gz" }, + { input_file = "data/bike/edges-compass.csv.gz" }, + { input_file = "data/drive/edges-compass.csv.gz" }, +] +vertex_list_input_file = "data/vertices-compass.csv.gz" + +[mapping] +type = "edge" +geometry = [ + { type = "from_linestrings", geometry_input_file = "data/walk/edges-geometries-enumerated.txt.gz" }, + { type = "from_linestrings", geometry_input_file = "data/bike/edges-geometries-enumerated.txt.gz" }, + { type = "from_linestrings", geometry_input_file = "data/drive/edges-geometries-enumerated.txt.gz" }, +] +tolerance.distance = 15.0 +tolerance.unit = "meters" +queries_without_destinations = false +matching_type = ["point", "vertex_id", "edge_id"] + +[algorithm] +type = "a*" + +# cut off searches that exceed these termination policies. +[termination] +type = "solution_size" +limit = 1_000_000 + +# use a time-optimal routing strategy +[cost] +weights.trip_time = 1.0 +vehicle_rates.trip_time.type = "raw" + +### +### WALK TOPOLOGY +### +[[search]] +traversal.type = "combined" +traversal.models = [ + { type = "distance", distance_unit = "miles" }, + { type = "fixed_speed", name = "walk", speed = 5.0, speed_unit = "kph" }, + { type = "time", time_unit = "minutes" }, + { type = "multimodal", this_mode = "walk", available_modes = ["walk", "bike", "drive"], available_route_ids = [], use_route_ids = false, max_trip_legs = 5 } +] + +frontier.type = "combined" +frontier.models = [ + { type = "time_limit", time_limit = { time = 40.0, time_unit = "minutes" }}, + { type = "multimodal", mode = "walk", constraints = [], available_modes = ["walk", "bike", "drive"], available_route_ids = [], use_route_ids = false, max_trip_legs = 5 } +] + +### +### BIKE TOPOLOGY +### +[[search]] +traversal.type = "combined" +traversal.models = [ + { type = "distance", distance_unit = "miles" }, + { type = "fixed_speed", name = "bike", speed = 16.0, speed_unit = "kph" }, + { type = "time", time_unit = "minutes" }, + { type = "multimodal", this_mode = "bike", available_modes = ["walk", "bike", "drive"], available_route_ids = [], use_route_ids = false, max_trip_legs = 5 } +] + +frontier.type = "combined" +frontier.models = [ + { type = "time_limit", time_limit = { time = 40.0, time_unit = "minutes" }}, + { type = "multimodal", mode = "bike", constraints = [], available_modes = ["walk", "bike", "drive"], available_route_ids = [], use_route_ids = false, max_trip_legs = 5 } +] + +### +### DRIVE TOPOLOGY +### +[[search]] +traversal.type = "combined" +traversal.models = [ + { type = "distance", distance_unit = "miles" }, + { type = "speed", name = "drive", speed_unit = "kph", speed_table_input_file = "data/drive/edges-speeds-mph-enumerated.txt.gz" }, + { type = "time", time_unit = "minutes" }, + { type = "multimodal", this_mode = "drive", available_modes = ["walk", "bike", "drive"], available_route_ids = [], use_route_ids = false, max_trip_legs = 5 } +] + +frontier.type = "combined" +frontier.models = [ + { type = "time_limit", time_limit = { time = 40.0, time_unit = "minutes" }}, + { type = "multimodal", mode = "drive", constraints = [], available_modes = ["walk", "bike", "drive"], available_route_ids = [], use_route_ids = false, max_trip_legs = 5 } +] + + +[[plugin.input_plugins]] +type = "grid" +extent_format = "wkt" +grid = { type = "h3", resolution = 8 } +[plugin.input_plugins.population_source] +type = "acs" +acs_type = "five_year" +acs_year = 2022 +acs_resolution = "census_tract" +acs_categories = ["B01001_001E"] + +[[plugin.input_plugins]] +type = "inject" +format = "key_value" +write_mode = "overwrite" +key = "grid_search" +value.mode = ["walk", "bike", "drive"] + +[[plugin.output_plugins]] +type = "traversal" +tree = "geo_json" + +[[plugin.output_plugins]] +type = "summary" + +[[plugin.output_plugins]] +type = "isochrone" +time_bin = { type = "list", times = [10, 20, 30, 40] } +isochrone_algorithm = { type = "k_nearest_concave_hull", k = 3 } +destination_point_generator = { type = "destination_point" } +isochrone_output_format = "wkb" + +### MEP OPPORTUNITY DATA CONFIGURATION ################################# +# assigns opportunities to search results based on a file or api data source +# and a taxonomy for MEP activity types. +# this example shows data loaded from the census LODES online file repository +# assigning activity types by NAICS sector id. +[[plugin.output_plugins]] +type = "opportunity" +collect_format = "aggregate" + +[plugin.output_plugins.model] +type = "combined" + +[[plugin.output_plugins.model.models]] +type = "api" +vertex_input_file = "data/vertices-compass.csv.gz" +activity_column_names = ["entertainment", "food", "retail", "healthcare", "services", "jobs"] +table_orientation = "destination_vertex_oriented" + +[plugin.output_plugins.model.models.opportunity_source] +type = "lodes" + +# denver metro region coverage +study_region = { type = "census", geoids = [ + "08001", # Adams County + "08005", # Arapahoe County + "08013", # Boulder County + "08014", # Broomfield County + "08031", # Denver County + "08035", # Douglas County + "08039", # Elbert County + "08059", # Jefferson County + "08123", # Weld County +] } + +# collect LODES data at the tract level. while it is available at the block, +# the download + processing time is 10x that of census tracts. other possible values +# are `block`, `county` or `state`. +data_granularity = "census_tract" +# different editions of the dataset, we are choosing LODES v 8.0. +edition = "LODES8" +# Job Type, can have a value of “JT00” for All Jobs, “JT01” for Primary Jobs, “JT02” for All +# Private Jobs, “JT03” for Private Primary Jobs, “JT04” for All Federal Jobs, or “JT05” for +# Federal Primary Jobs +job_type = "JT00" +# Segment of the workforce, can have the values of: +# - S000: Total number of jobs (default) +# - SA01: Number of jobs of workers age 29 or younger +# - SA02: Number of jobs for workers age 30 to 54 +# - SA03: Number of jobs for workers age 55 or older +# - SE01: Number of jobs with earnings $1250/month or less +# - SE02: Number of jobs with earnings $1251/month to $3333/month +# - SE03: Number of jobs with earnings greater than $3333/month +# - SI01: Number of jobs in Goods Producing industry sectors +# - SI02: Number of jobs in Trade, Transportation, and Utilities industry sectors +# - SI03: Number of jobs in All Other Services industry sectors +segment = "S000" + +# most recent year with all states. Alaska has no coverage from 2017-2022. we may +# want a "continental" variant which could support 2022. +year = 2022 + +[plugin.output_plugins.model.models.opportunity_source.activity_mapping] +# see https://lehd.ces.census.gov/data/lodes/LODES8/LODESTechDoc8.0.pdf +CNS01 = ["jobs"] # 11 (Agriculture, Forestry, Fishing and Hunting) +CNS02 = ["jobs"] # 21 (Mining, Quarrying, and Oil and Gas Extraction) +CNS03 = ["jobs"] # 22 (Utilities) +CNS04 = ["jobs"] # 23 (Construction) +CNS05 = ["jobs"] # 31-33 (Manufacturing) +CNS06 = ["jobs"] # 42 (Wholesale Trade) +CNS07 = ["jobs", "retail"] # 44-45 (Retail Trade) +CNS08 = ["jobs"] # 48-49 (Transportation and Warehousing) +CNS09 = ["jobs"] # 51 (Information) +CNS10 = ["jobs"] # 52 (Finance and Insurance) +CNS11 = ["jobs"] # 53 (Real Estate and Rental and Leasing) +CNS12 = ["jobs", "services"] # 54 (Professional, Scientific, and Technical Services) +CNS13 = ["jobs"] # 55 (Management of Companies and Enterprises) +CNS14 = ["jobs"] # 56 (Admin/Support/Waste Mgmt/Remediation Services) +CNS15 = ["jobs"] # 61 (Educational Services) +CNS16 = ["jobs", "healthcare"] # 62 (Health Care and Social Assistance) +CNS17 = ["jobs", "entertainment"] # 71 (Arts, Entertainment, and Recreation) +CNS18 = ["jobs", "food"] # 72 (Accommodation and Food Services) +CNS19 = ["jobs"] # 81 (Other Services [except Public Administration]) +CNS20 = ["jobs"] # 92 (Public Administration) + +[system] +parallelism = 8 +response_persistence_policy = "persist_response_in_memory" + +[system.response_output_policy] +type = "combined" + +[[system.response_output_policy.policies]] +type = "file" +filename = "result.json" +[system.response_output_policy.policies.format] +type = "json" +newline_delimited = false + +[[system.response_output_policy.policies]] +type = "file" +filename = "result.csv" +[system.response_output_policy.policies.format] +type = "csv" +sorted = true +[system.response_output_policy.policies.format.mapping] +grid_id = "request.grid_id" +lon = "request.origin_x" +lat = "request.origin_y" +mode = "request.mode" +runtime = "search_runtime" +error = "error" + +opps_entertainment_total = "opportunity_totals.entertainment" +opps_retail_total = "opportunity_totals.retail" +opps_healthcare_total = "opportunity_totals.healthcare" +opps_services_total = "opportunity_totals.services" +opps_food_total = "opportunity_totals.food" +opps_jobs_total = "opportunity_totals.jobs" + +opps_entertainment_10 = "bin.10.opportunities.entertainment" +opps_retail_10 = "bin.10.opportunities.retail" +opps_healthcare_10 = "bin.10.opportunities.healthcare" +opps_services_10 = "bin.10.opportunities.services" +opps_food_10 = "bin.10.opportunities.food" +opps_jobs_10 = "bin.10.opportunities.jobs" +opps_entertainment_20 = "bin.20.opportunities.entertainment" +opps_retail_20 = "bin.20.opportunities.retail" +opps_healthcare_20 = "bin.20.opportunities.healthcare" +opps_services_20 = "bin.20.opportunities.services" +opps_food_20 = "bin.20.opportunities.food" +opps_jobs_20 = "bin.20.opportunities.jobs" +opps_entertainment_30 = "bin.30.opportunities.entertainment" +opps_retail_30 = "bin.30.opportunities.retail" +opps_healthcare_30 = "bin.30.opportunities.healthcare" +opps_services_30 = "bin.30.opportunities.services" +opps_food_30 = "bin.30.opportunities.food" +opps_jobs_30 = "bin.30.opportunities.jobs" +opps_entertainment_40 = "bin.40.opportunities.entertainment" +opps_retail_40 = "bin.40.opportunities.retail" +opps_healthcare_40 = "bin.40.opportunities.healthcare" +opps_services_40 = "bin.40.opportunities.services" +opps_food_40 = "bin.40.opportunities.food" +opps_jobs_40 = "bin.40.opportunities.jobs" + +isochrone_10 = "bin.10.isochrone" +isochrone_20 = "bin.20.isochrone" +isochrone_30 = "bin.30.isochrone" +isochrone_40 = "bin.40.isochrone" + From 8641d6c07d364fcfa9d7defbd60c03574bc41ed2 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Tue, 3 Feb 2026 23:12:24 +0000 Subject: [PATCH 02/15] Initial plan From 3eaad851f420faf283662c77d4fc0a7bb71d719a Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Tue, 3 Feb 2026 23:21:03 +0000 Subject: [PATCH 03/15] Fix MaxTripLegs constraint bug causing infinite search expansion Co-authored-by: robfitzgerald <7003022+robfitzgerald@users.noreply.github.com> --- .../model/frontier/multimodal/constraint.rs | 2 +- .../frontier/multimodal/constraint.rs.fixed | 173 ++++++++++++++++++ .../src/model/frontier/multimodal/model.rs | 34 ++++ 3 files changed, 208 insertions(+), 1 deletion(-) create mode 100644 rust/bambam/src/model/frontier/multimodal/constraint.rs.fixed diff --git a/rust/bambam/src/model/frontier/multimodal/constraint.rs b/rust/bambam/src/model/frontier/multimodal/constraint.rs index fe6594b9..e6b93718 100644 --- a/rust/bambam/src/model/frontier/multimodal/constraint.rs +++ b/rust/bambam/src/model/frontier/multimodal/constraint.rs @@ -89,7 +89,7 @@ impl MultimodalFrontierConstraint { })?; let n_legs = match active_mode { Some(active_mode) if active_mode != edge_mode => n_existing_legs + 1, - _ => 0, + _ => n_existing_legs, }; let is_valid = n_legs <= *max_legs; Ok(is_valid) diff --git a/rust/bambam/src/model/frontier/multimodal/constraint.rs.fixed b/rust/bambam/src/model/frontier/multimodal/constraint.rs.fixed new file mode 100644 index 00000000..e6b93718 --- /dev/null +++ b/rust/bambam/src/model/frontier/multimodal/constraint.rs.fixed @@ -0,0 +1,173 @@ +use crate::model::frontier::multimodal::sequence_trie::SubSequenceTrie; +use crate::model::frontier::multimodal::{ + multimodal_frontier_ops as ops, MultimodalFrontierConstraintConfig, +}; +use crate::model::state::{ + multimodal_state_ops as state_ops, MultimodalMapping, MultimodalStateMapping, +}; +use routee_compass_core::model::{ + frontier::FrontierModelError, + network::Edge, + state::{StateModel, StateVariable}, + unit::TimeUnit, +}; +use std::collections::{HashMap, HashSet}; +use uom::si::f64::Time; + +#[derive(Debug)] +/// types of constraints to limit exponential search expansion in multimodal scenarios. +/// +/// only deals with constraints associated with multimodal metadata, since metric-based +/// constraints must be applied _after_ access + traversal metrics have been run. +pub enum MultimodalFrontierConstraint { + AllowedModes(HashSet), + ModeCounts(HashMap), + MaxTripLegs(usize), + ExactSequences(SubSequenceTrie), // MaxTime(HashMap), +} + +impl MultimodalFrontierConstraint { + /// validates an edge for traversal in a multimodal traversal + pub fn valid_frontier( + &self, + edge_mode: &str, + edge: &Edge, + state: &[StateVariable], + state_model: &StateModel, + mode_to_state: &MultimodalStateMapping, + max_trip_legs: u64, + ) -> Result { + use MultimodalFrontierConstraint as MFC; + + match self { + MFC::AllowedModes(items) => { + let result = items.contains(edge_mode); + Ok(result) + } + MFC::ModeCounts(limits) => { + let mut counts = + ops::get_mode_counts(state, state_model, max_trip_legs, mode_to_state)?; + + // simulate a mode transition if the incoming edge has a different mode than the trip's active mode + let active_mode = state_ops::get_active_leg_mode( + state, + state_model, + max_trip_legs, + mode_to_state, + ) + .map_err(|e| { + FrontierModelError::FrontierModelError(format!( + "while applying mode count frontier model constraint, {e}" + )) + })?; + if Some(edge_mode) != active_mode { + counts + .entry(edge_mode.to_string()) + .and_modify(|cnt| *cnt += 1) + .or_insert(1); + } + + Ok(ops::valid_mode_counts(&counts, limits)) + } + MFC::MaxTripLegs(max_legs) => { + // simulate a mode transition if the incoming edge has a different mode than the trip's active mode + let active_mode = state_ops::get_active_leg_mode( + state, + state_model, + max_trip_legs, + mode_to_state, + ) + .map_err(|e| { + FrontierModelError::FrontierModelError(format!( + "while applying mode count frontier model constraint, {e}" + )) + })?; + let n_existing_legs = state_ops::get_n_legs(state, state_model).map_err(|e| { + FrontierModelError::FrontierModelError( + (format!("while getting number of trip legs for this trip: {e}")), + ) + })?; + let n_legs = match active_mode { + Some(active_mode) if active_mode != edge_mode => n_existing_legs + 1, + _ => n_existing_legs, + }; + let is_valid = n_legs <= *max_legs; + Ok(is_valid) + } + MFC::ExactSequences(trie) => { + let mut modes = + state_ops::get_mode_sequence(state, state_model, max_trip_legs, mode_to_state) + .map_err(|e| { + FrontierModelError::FrontierModelError(format!( + "while testing for matching mode sub-sequence, had error: {e}" + )) + })?; + + // simulate a mode transition if the incoming edge has a different mode than the trip's active mode + let active_mode = state_ops::get_active_leg_mode( + state, + state_model, + max_trip_legs, + mode_to_state, + ) + .map_err(|e| { + FrontierModelError::FrontierModelError(format!( + "while applying mode count frontier model constraint, {e}" + )) + })?; + if Some(edge_mode) != active_mode { + modes.push(edge_mode.to_string()); + } + let is_match = trie.contains(&modes); + Ok(is_match) + } + } + } +} + +impl TryFrom<&MultimodalFrontierConstraintConfig> for MultimodalFrontierConstraint { + type Error = FrontierModelError; + + fn try_from(value: &MultimodalFrontierConstraintConfig) -> Result { + use MultimodalFrontierConstraintConfig as MFCC; + match value { + MFCC::AllowedModes { allowed_modes } => { + let modes = allowed_modes.iter().cloned().collect::>(); + Ok(Self::AllowedModes(modes)) + } + MFCC::ModeCounts { mode_counts } => { + let counts = mode_counts + .iter() + .map(|(k, v)| { + let v_usize: usize = v.get().try_into().map_err(|e| { + FrontierModelError::FrontierModelError(format!( + "while reading mode count limit: {e}" + )) + })?; + Ok((k.clone(), v_usize)) + }) + .collect::, _>>()?; + Ok(Self::ModeCounts(counts)) + } + MFCC::TripLegCount { trip_leg_count } => { + let max_usize: usize = trip_leg_count.get().try_into().map_err(|e| { + FrontierModelError::FrontierModelError(format!( + "while reading max trip leg limit: {e}" + )) + })?; + Ok(Self::MaxTripLegs(max_usize)) + } + MFCC::ExactSequences { exact_sequences } => { + let mut trie = SubSequenceTrie::new(); + for seq in exact_sequences.iter() { + trie.insert_sequence(seq.clone()); + } + Ok(Self::ExactSequences(trie)) + } + } + } +} + +// MultimodalFrontierConstraint::MaxTime(limits) => { +// ops::valid_mode_time(state, state_model, limits) +// } diff --git a/rust/bambam/src/model/frontier/multimodal/model.rs b/rust/bambam/src/model/frontier/multimodal/model.rs index e1b4aab2..e1f952e2 100644 --- a/rust/bambam/src/model/frontier/multimodal/model.rs +++ b/rust/bambam/src/model/frontier/multimodal/model.rs @@ -871,4 +871,38 @@ mod test { .expect("test failed"); assert!(!is_valid); // Should be invalid as this would create a second leg } + + #[test] + fn test_max_trip_legs_same_mode_continuation_at_limit() { + // Test that continuing with the same mode when at the limit is still invalid + // This tests the bug fix where same-mode continuation was always returning 0 legs + let max_trip_legs = 2; + let (bike_mtm, bike_mfm, state_model, mut state) = test_setup( + vec![MultimodalFrontierConstraint::MaxTripLegs(1)], + "bike", // FrontierModel for bike edges + &["walk", "bike"], + &[], + max_trip_legs, + ); + + // Set up state with 2 legs: walk then bike (exceeds limit of 1) + inject_trip_legs( + &["walk", "bike"], + &mut state, + &state_model, + &bike_mtm.mode_to_state, + max_trip_legs, + ); + + // Test continuing with bike-mode edge (same as active mode) + // edge.edge_list_id doesn't matter since we're just checking constraints, not traversal + // The important thing is that bike_mfm has mode="bike" which matches active_mode="bike" + // Before the fix, this would incorrectly return n_legs=0 and be valid + // After the fix, this should correctly use n_existing_legs=2 and be invalid + let bike_edge = Edge::new(0, 0, 0, 1, Length::new::(1000.0)); + let is_valid = bike_mfm + .valid_frontier(&bike_edge, None, &state, &state_model) + .expect("test failed"); + assert!(!is_valid); // Should be invalid as we already have 2 legs, which exceeds the limit + } } From a015ab0846fba294da8797b45c981658e44247a1 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Tue, 3 Feb 2026 23:21:13 +0000 Subject: [PATCH 04/15] Remove accidentally committed backup file Co-authored-by: robfitzgerald <7003022+robfitzgerald@users.noreply.github.com> --- .../frontier/multimodal/constraint.rs.fixed | 173 ------------------ 1 file changed, 173 deletions(-) delete mode 100644 rust/bambam/src/model/frontier/multimodal/constraint.rs.fixed diff --git a/rust/bambam/src/model/frontier/multimodal/constraint.rs.fixed b/rust/bambam/src/model/frontier/multimodal/constraint.rs.fixed deleted file mode 100644 index e6b93718..00000000 --- a/rust/bambam/src/model/frontier/multimodal/constraint.rs.fixed +++ /dev/null @@ -1,173 +0,0 @@ -use crate::model::frontier::multimodal::sequence_trie::SubSequenceTrie; -use crate::model::frontier::multimodal::{ - multimodal_frontier_ops as ops, MultimodalFrontierConstraintConfig, -}; -use crate::model::state::{ - multimodal_state_ops as state_ops, MultimodalMapping, MultimodalStateMapping, -}; -use routee_compass_core::model::{ - frontier::FrontierModelError, - network::Edge, - state::{StateModel, StateVariable}, - unit::TimeUnit, -}; -use std::collections::{HashMap, HashSet}; -use uom::si::f64::Time; - -#[derive(Debug)] -/// types of constraints to limit exponential search expansion in multimodal scenarios. -/// -/// only deals with constraints associated with multimodal metadata, since metric-based -/// constraints must be applied _after_ access + traversal metrics have been run. -pub enum MultimodalFrontierConstraint { - AllowedModes(HashSet), - ModeCounts(HashMap), - MaxTripLegs(usize), - ExactSequences(SubSequenceTrie), // MaxTime(HashMap), -} - -impl MultimodalFrontierConstraint { - /// validates an edge for traversal in a multimodal traversal - pub fn valid_frontier( - &self, - edge_mode: &str, - edge: &Edge, - state: &[StateVariable], - state_model: &StateModel, - mode_to_state: &MultimodalStateMapping, - max_trip_legs: u64, - ) -> Result { - use MultimodalFrontierConstraint as MFC; - - match self { - MFC::AllowedModes(items) => { - let result = items.contains(edge_mode); - Ok(result) - } - MFC::ModeCounts(limits) => { - let mut counts = - ops::get_mode_counts(state, state_model, max_trip_legs, mode_to_state)?; - - // simulate a mode transition if the incoming edge has a different mode than the trip's active mode - let active_mode = state_ops::get_active_leg_mode( - state, - state_model, - max_trip_legs, - mode_to_state, - ) - .map_err(|e| { - FrontierModelError::FrontierModelError(format!( - "while applying mode count frontier model constraint, {e}" - )) - })?; - if Some(edge_mode) != active_mode { - counts - .entry(edge_mode.to_string()) - .and_modify(|cnt| *cnt += 1) - .or_insert(1); - } - - Ok(ops::valid_mode_counts(&counts, limits)) - } - MFC::MaxTripLegs(max_legs) => { - // simulate a mode transition if the incoming edge has a different mode than the trip's active mode - let active_mode = state_ops::get_active_leg_mode( - state, - state_model, - max_trip_legs, - mode_to_state, - ) - .map_err(|e| { - FrontierModelError::FrontierModelError(format!( - "while applying mode count frontier model constraint, {e}" - )) - })?; - let n_existing_legs = state_ops::get_n_legs(state, state_model).map_err(|e| { - FrontierModelError::FrontierModelError( - (format!("while getting number of trip legs for this trip: {e}")), - ) - })?; - let n_legs = match active_mode { - Some(active_mode) if active_mode != edge_mode => n_existing_legs + 1, - _ => n_existing_legs, - }; - let is_valid = n_legs <= *max_legs; - Ok(is_valid) - } - MFC::ExactSequences(trie) => { - let mut modes = - state_ops::get_mode_sequence(state, state_model, max_trip_legs, mode_to_state) - .map_err(|e| { - FrontierModelError::FrontierModelError(format!( - "while testing for matching mode sub-sequence, had error: {e}" - )) - })?; - - // simulate a mode transition if the incoming edge has a different mode than the trip's active mode - let active_mode = state_ops::get_active_leg_mode( - state, - state_model, - max_trip_legs, - mode_to_state, - ) - .map_err(|e| { - FrontierModelError::FrontierModelError(format!( - "while applying mode count frontier model constraint, {e}" - )) - })?; - if Some(edge_mode) != active_mode { - modes.push(edge_mode.to_string()); - } - let is_match = trie.contains(&modes); - Ok(is_match) - } - } - } -} - -impl TryFrom<&MultimodalFrontierConstraintConfig> for MultimodalFrontierConstraint { - type Error = FrontierModelError; - - fn try_from(value: &MultimodalFrontierConstraintConfig) -> Result { - use MultimodalFrontierConstraintConfig as MFCC; - match value { - MFCC::AllowedModes { allowed_modes } => { - let modes = allowed_modes.iter().cloned().collect::>(); - Ok(Self::AllowedModes(modes)) - } - MFCC::ModeCounts { mode_counts } => { - let counts = mode_counts - .iter() - .map(|(k, v)| { - let v_usize: usize = v.get().try_into().map_err(|e| { - FrontierModelError::FrontierModelError(format!( - "while reading mode count limit: {e}" - )) - })?; - Ok((k.clone(), v_usize)) - }) - .collect::, _>>()?; - Ok(Self::ModeCounts(counts)) - } - MFCC::TripLegCount { trip_leg_count } => { - let max_usize: usize = trip_leg_count.get().try_into().map_err(|e| { - FrontierModelError::FrontierModelError(format!( - "while reading max trip leg limit: {e}" - )) - })?; - Ok(Self::MaxTripLegs(max_usize)) - } - MFCC::ExactSequences { exact_sequences } => { - let mut trie = SubSequenceTrie::new(); - for seq in exact_sequences.iter() { - trie.insert_sequence(seq.clone()); - } - Ok(Self::ExactSequences(trie)) - } - } - } -} - -// MultimodalFrontierConstraint::MaxTime(limits) => { -// ops::valid_mode_time(state, state_model, limits) -// } From 63f708e09a39c9885475b7eff0b5348f791361e5 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Tue, 3 Feb 2026 23:22:21 +0000 Subject: [PATCH 05/15] Clarify test comments to distinguish buffer size from constraint limit Co-authored-by: robfitzgerald <7003022+robfitzgerald@users.noreply.github.com> --- rust/bambam/src/model/frontier/multimodal/model.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/rust/bambam/src/model/frontier/multimodal/model.rs b/rust/bambam/src/model/frontier/multimodal/model.rs index e1f952e2..eddf1f1f 100644 --- a/rust/bambam/src/model/frontier/multimodal/model.rs +++ b/rust/bambam/src/model/frontier/multimodal/model.rs @@ -876,16 +876,20 @@ mod test { fn test_max_trip_legs_same_mode_continuation_at_limit() { // Test that continuing with the same mode when at the limit is still invalid // This tests the bug fix where same-mode continuation was always returning 0 legs - let max_trip_legs = 2; + + // max_trip_legs is the state buffer size, constraint is the actual limit + let max_trip_legs = 2; // State buffer can hold 2 legs + let constraint_limit = 1; // But we only allow 1 leg + let (bike_mtm, bike_mfm, state_model, mut state) = test_setup( - vec![MultimodalFrontierConstraint::MaxTripLegs(1)], + vec![MultimodalFrontierConstraint::MaxTripLegs(constraint_limit)], "bike", // FrontierModel for bike edges &["walk", "bike"], &[], max_trip_legs, ); - // Set up state with 2 legs: walk then bike (exceeds limit of 1) + // Set up state with 2 legs: walk then bike (exceeds constraint_limit of 1) inject_trip_legs( &["walk", "bike"], &mut state, @@ -903,6 +907,6 @@ mod test { let is_valid = bike_mfm .valid_frontier(&bike_edge, None, &state, &state_model) .expect("test failed"); - assert!(!is_valid); // Should be invalid as we already have 2 legs, which exceeds the limit + assert!(!is_valid); // Should be invalid as we already have 2 legs, which exceeds constraint_limit of 1 } } From 5e20039251e1dec45e729b9880fd24fedcbf8749 Mon Sep 17 00:00:00 2001 From: Rob Fitzgerald Date: Tue, 3 Feb 2026 16:59:13 -0700 Subject: [PATCH 06/15] fmt --- rust/bambam/src/model/frontier/multimodal/model.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/rust/bambam/src/model/frontier/multimodal/model.rs b/rust/bambam/src/model/frontier/multimodal/model.rs index eddf1f1f..43fdfff1 100644 --- a/rust/bambam/src/model/frontier/multimodal/model.rs +++ b/rust/bambam/src/model/frontier/multimodal/model.rs @@ -876,14 +876,14 @@ mod test { fn test_max_trip_legs_same_mode_continuation_at_limit() { // Test that continuing with the same mode when at the limit is still invalid // This tests the bug fix where same-mode continuation was always returning 0 legs - + // max_trip_legs is the state buffer size, constraint is the actual limit - let max_trip_legs = 2; // State buffer can hold 2 legs - let constraint_limit = 1; // But we only allow 1 leg - + let max_trip_legs = 2; // State buffer can hold 2 legs + let constraint_limit = 1; // But we only allow 1 leg + let (bike_mtm, bike_mfm, state_model, mut state) = test_setup( vec![MultimodalFrontierConstraint::MaxTripLegs(constraint_limit)], - "bike", // FrontierModel for bike edges + "bike", // FrontierModel for bike edges &["walk", "bike"], &[], max_trip_legs, From aac3261fe2163ea4b05cdf1d8a10d36b5cf7b28d Mon Sep 17 00:00:00 2001 From: Rob Fitzgerald Date: Fri, 6 Feb 2026 15:29:09 -0700 Subject: [PATCH 07/15] debug logging in multimodal components --- .../src/model/frontier/multimodal/model.rs | 7 +++++++ .../bambam/src/model/label/multimodal/model.rs | 11 +++++++++++ .../src/model/traversal/multimodal/model.rs | 18 ++++++++++++++++++ 3 files changed, 36 insertions(+) diff --git a/rust/bambam/src/model/frontier/multimodal/model.rs b/rust/bambam/src/model/frontier/multimodal/model.rs index 43fdfff1..66a3236c 100644 --- a/rust/bambam/src/model/frontier/multimodal/model.rs +++ b/rust/bambam/src/model/frontier/multimodal/model.rs @@ -93,6 +93,13 @@ impl FrontierModel for MultimodalFrontierModel { &self.engine.mode_to_state, self.engine.max_trip_legs, )?; + log::debug!( + "multimodal frontier is valid? '{valid}' for state at time: {:.2} minutes", + state_model + .get_time(state, "trip_time") + .unwrap_or_default() + .get::() + ); if !valid { return Ok(false); } diff --git a/rust/bambam/src/model/label/multimodal/model.rs b/rust/bambam/src/model/label/multimodal/model.rs index 2f12f57f..9d21d9b4 100644 --- a/rust/bambam/src/model/label/multimodal/model.rs +++ b/rust/bambam/src/model/label/multimodal/model.rs @@ -49,6 +49,17 @@ impl LabelModel for MultimodalLabelModel { .collect::, _>>()?; let label = Label::new_u8_state(vertex_id, &mode_labels)?; + + log::debug!( + "multimodal label model at vertex {} produced label {} for state at time: {:.2} minutes", + vertex_id, + label, + state_model + .get_time(state, "trip_time") + .unwrap_or_default() + .get::() + ); + Ok(label) } } diff --git a/rust/bambam/src/model/traversal/multimodal/model.rs b/rust/bambam/src/model/traversal/multimodal/model.rs index 0bdfa9e9..c49db601 100644 --- a/rust/bambam/src/model/traversal/multimodal/model.rs +++ b/rust/bambam/src/model/traversal/multimodal/model.rs @@ -118,6 +118,15 @@ impl TraversalModel for MultimodalTraversalModel { state_model: &StateModel, ) -> Result<(), TraversalModelError> { let (_, edge, _) = trajectory; + log::debug!( + "begin multimodal traversal along edge {:?} for state at time: {:.2} minutes with tree size {}", + (edge.edge_list_id, edge.edge_id), + state_model + .get_time(state, "trip_time") + .unwrap_or_default() + .get::(), + tree.len() + ); // first, apply any mode switching for using this edge ops::mode_switch( @@ -149,6 +158,15 @@ impl TraversalModel for MultimodalTraversalModel { self.max_trip_legs, )?; } + log::debug!( + "finish multimodal traversal along edge {:?} for state at time: {:.2} minutes with tree size {}", + (edge.edge_list_id, edge.edge_id), + state_model + .get_time(state, "trip_time") + .unwrap_or_default() + .get::(), + tree.len() + ); Ok(()) } From a1842c3abfaa10221224799df6037ad9fe69ef47 Mon Sep 17 00:00:00 2001 From: Rob Fitzgerald Date: Mon, 9 Feb 2026 09:20:12 -0700 Subject: [PATCH 08/15] debug logging in multimodal components --- rust/bambam/src/model/label/multimodal/model.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/rust/bambam/src/model/label/multimodal/model.rs b/rust/bambam/src/model/label/multimodal/model.rs index 9d21d9b4..3d380af9 100644 --- a/rust/bambam/src/model/label/multimodal/model.rs +++ b/rust/bambam/src/model/label/multimodal/model.rs @@ -1,5 +1,6 @@ //! builds labels that include enumerations for leg modes. //! +use itertools::Itertools; use routee_compass_core::model::{ label::{label_model_error::LabelModelError, Label, LabelModel}, network::VertexId, @@ -51,9 +52,9 @@ impl LabelModel for MultimodalLabelModel { let label = Label::new_u8_state(vertex_id, &mode_labels)?; log::debug!( - "multimodal label model at vertex {} produced label {} for state at time: {:.2} minutes", + "multimodal label model at vertex {} produced label [{}] for state at time: {:.2} minutes", vertex_id, - label, + mode_labels.iter().map(|l| self.mode_to_state.get_categorical(*l as i64).unwrap_or_default().cloned().unwrap_or_default()).join("->"), state_model .get_time(state, "trip_time") .unwrap_or_default() From 3665096d0c65aebfdcf040e35642532d2d2398d6 Mon Sep 17 00:00:00 2001 From: Rob Fitzgerald Date: Tue, 10 Feb 2026 10:51:25 -0700 Subject: [PATCH 09/15] checkpoint: statistical summary feature for OMF --- rust/bambam-omf/src/app/network.rs | 23 ++- rust/bambam-omf/src/collection/collector.rs | 31 ++- .../src/collection/record/record_type.rs | 2 +- .../record/transportation_collection.rs | 10 +- .../record/transportation_segment.rs | 2 +- rust/bambam-omf/src/graph/mod.rs | 2 + rust/bambam-omf/src/graph/omf_graph.rs | 6 +- rust/bambam-omf/src/graph/summary.rs | 178 ++++++++++++++++++ 8 files changed, 224 insertions(+), 30 deletions(-) create mode 100644 rust/bambam-omf/src/graph/summary.rs diff --git a/rust/bambam-omf/src/app/network.rs b/rust/bambam-omf/src/app/network.rs index 375c0b94..f0b64158 100644 --- a/rust/bambam-omf/src/app/network.rs +++ b/rust/bambam-omf/src/app/network.rs @@ -5,11 +5,9 @@ use serde::{Deserialize, Serialize}; use crate::{ app::CliBoundingBox, collection::{ - filter::TravelModeFilter, ObjectStoreSource, OvertureMapsCollectionError, - OvertureMapsCollectorConfig, ReleaseVersion, SegmentAccessRestrictionWhen, - TransportationCollection, + ObjectStoreSource, OvertureMapsCollectionError, OvertureMapsCollectorConfig, ReleaseVersion, SegmentAccessRestrictionWhen, TransportationCollection, filter::TravelModeFilter }, - graph::OmfGraphVectorized, + graph::{OmfGraphSource, OmfGraphStats, OmfGraphSummary, OmfGraphVectorized}, util, }; @@ -52,7 +50,22 @@ pub fn run( } let vectorized_graph = OmfGraphVectorized::new(&collection, modes)?; - vectorized_graph.write_compass(output_directory, true)?; + let stats = OmfGraphStats::try_from(&vectorized_graph)?; + let uri = match local_source { + Some(local) => format!("file://{}", local.to_str().unwrap_or_default()), + None => collection.uri.clone(), + }; + let source = OmfGraphSource::new( + &uri, + , + bbox.as_ref() + ); + let summary = OmfGraphSummary { + source, + stats, + }; + + vectorized_graph.write_compass(&summary, output_directory, true)?; Ok(()) } diff --git a/rust/bambam-omf/src/collection/collector.rs b/rust/bambam-omf/src/collection/collector.rs index 8a7bf368..a485a884 100644 --- a/rust/bambam-omf/src/collection/collector.rs +++ b/rust/bambam-omf/src/collection/collector.rs @@ -52,7 +52,7 @@ impl OvertureMapsCollector { } } - fn get_latest_release(&self) -> Result { + pub fn get_latest_release(&self) -> Result { // Get runtime to consume async functions let runtime = tokio::runtime::Builder::new_current_thread() .enable_all() @@ -212,16 +212,12 @@ impl OvertureMapsCollector { pub fn collect_from_release( &self, - release: ReleaseVersion, + release_uri: &str, record_type: &OvertureRecordType, row_filter_config: Option, ) -> Result, OvertureMapsCollectionError> { - let release_str = match release { - ReleaseVersion::Latest => self.get_latest_release()?, - other => String::from(other), - }; - log::info!("Collecting OvertureMaps {record_type} records from release {release_str}"); - let path = Path::from(record_type.format_url(release_str)); + log::info!("Collecting OvertureMaps {record_type} records from release {release_uri}"); + let path = Path::from(record_type.format_url(release_uri)); self.collect_from_path(path, record_type, row_filter_config) } } @@ -230,10 +226,8 @@ impl OvertureMapsCollector { mod test { use crate::collection::{ ObjectStoreSource, OvertureMapsCollector, OvertureMapsCollectorConfig, OvertureRecord, - OvertureRecordType, ReleaseVersion, RowFilterConfig, + OvertureRecordType, RowFilterConfig, }; - use chrono::NaiveDate; - use std::str::FromStr; fn get_collector() -> OvertureMapsCollector { OvertureMapsCollectorConfig::new(ObjectStoreSource::AmazonS3, Some(4), Some(64)) @@ -254,13 +248,15 @@ mod test { ymax: 39.784, }; + // this retrieval and external depenency seems too brittle for a unit test + let latest_release = collector + .get_latest_release() + .expect("failed to retrieve latest OMF release"); + // Connectors let connector_records = collector .collect_from_release( - ReleaseVersion::Monthly { - datetime: NaiveDate::from_str("2025-12-17").unwrap(), - version: Some(0), - }, + &latest_release, &OvertureRecordType::Connector, Some(row_filter.clone()), ) @@ -277,10 +273,7 @@ mod test { // Segment let segment_records = collector .collect_from_release( - ReleaseVersion::Monthly { - datetime: NaiveDate::from_str("2025-12-17").unwrap(), - version: Some(0), - }, + &latest_release, &OvertureRecordType::Segment, Some(row_filter), ) diff --git a/rust/bambam-omf/src/collection/record/record_type.rs b/rust/bambam-omf/src/collection/record/record_type.rs index 9b0f3864..6d0cdd77 100644 --- a/rust/bambam-omf/src/collection/record/record_type.rs +++ b/rust/bambam-omf/src/collection/record/record_type.rs @@ -11,7 +11,7 @@ pub enum OvertureRecordType { } impl OvertureRecordType { - pub fn format_url(&self, release_str: String) -> String { + pub fn format_url(&self, release_str: &str) -> String { match self { OvertureRecordType::Places => { format!("release/{release_str}/theme=places/type=place/").to_owned() diff --git a/rust/bambam-omf/src/collection/record/transportation_collection.rs b/rust/bambam-omf/src/collection/record/transportation_collection.rs index 6abe4a1b..3f068e40 100644 --- a/rust/bambam-omf/src/collection/record/transportation_collection.rs +++ b/rust/bambam-omf/src/collection/record/transportation_collection.rs @@ -9,6 +9,7 @@ use crate::collection::{ #[derive(Serialize, Deserialize, Clone, Debug)] pub struct TransportationCollection { + pub uri: String, pub connectors: Vec, pub segments: Vec, } @@ -21,9 +22,13 @@ impl TransportationCollection { release: ReleaseVersion, row_filter_config: Option, ) -> Result { + let uri = match &release { + ReleaseVersion::Latest => collector.get_latest_release()?, + other => String::from(other), + }; let connectors = collector .collect_from_release( - release.clone(), + &uri, &OvertureRecordType::Connector, row_filter_config.clone(), )? @@ -40,7 +45,7 @@ impl TransportationCollection { let segments = collector .collect_from_release( - release.clone(), + &uri, &OvertureRecordType::Segment, row_filter_config.clone(), )? @@ -56,6 +61,7 @@ impl TransportationCollection { .collect::, OvertureMapsCollectionError>>()?; Ok(Self { + uri, connectors, segments, }) diff --git a/rust/bambam-omf/src/collection/record/transportation_segment.rs b/rust/bambam-omf/src/collection/record/transportation_segment.rs index 06a40c63..e84db943 100644 --- a/rust/bambam-omf/src/collection/record/transportation_segment.rs +++ b/rust/bambam-omf/src/collection/record/transportation_segment.rs @@ -259,7 +259,7 @@ impl fmt::Display for SegmentSubclass { } /// Fully qualified segment type including type, class and subclass. E.g. road-service-driveway -#[derive(Eq, PartialEq, Hash)] +#[derive(Debug, Clone, Eq, PartialEq, Hash)] pub struct SegmentFullType(SegmentSubtype, SegmentClass, Option); impl SegmentFullType { diff --git a/rust/bambam-omf/src/graph/mod.rs b/rust/bambam-omf/src/graph/mod.rs index a1bcc55c..15dea514 100644 --- a/rust/bambam-omf/src/graph/mod.rs +++ b/rust/bambam-omf/src/graph/mod.rs @@ -2,9 +2,11 @@ mod connector_in_segment; mod omf_graph; mod segment_split; mod serialize_ops; +mod summary; mod vertex_serializable; pub mod segment_ops; pub use connector_in_segment::ConnectorInSegment; pub use omf_graph::OmfGraphVectorized; pub use segment_split::SegmentSplit; +pub use summary::{ClassStats, EdgeListStats, OmfGraphSource, OmfGraphStats, OmfGraphSummary}; diff --git a/rust/bambam-omf/src/graph/omf_graph.rs b/rust/bambam-omf/src/graph/omf_graph.rs index 3bd98ab9..c53cef04 100644 --- a/rust/bambam-omf/src/graph/omf_graph.rs +++ b/rust/bambam-omf/src/graph/omf_graph.rs @@ -7,7 +7,7 @@ use crate::{ record::SegmentHeading, OvertureMapsCollectionError, SegmentAccessRestrictionWhen, SegmentFullType, TransportationCollection, TransportationSegmentRecord, }, - graph::{segment_ops, vertex_serializable::VertexSerializable}, + graph::{segment_ops, vertex_serializable::VertexSerializable, OmfGraphSummary}, }; use geo::LineString; use kdam::tqdm; @@ -22,6 +22,7 @@ pub const SPEEDS_FILENAME: &str = "edges-speeds-mph-enumerated.txt.gz"; pub const CLASSES_FILENAME: &str = "edges-classes-enumerated.txt.gz"; pub const SPEED_MAPPING_FILENAME: &str = "edges-classes-speed-mapping.csv.gz"; pub const BEARINGS_FILENAME: &str = "edges-bearings-enumerated.txt.gz"; +pub const GLOBAL_AVG_SPEED_KEY: &str = "_global_"; pub struct OmfGraphVectorized { pub vertices: Vec, @@ -139,7 +140,7 @@ impl OmfGraphVectorized { .iter() .map(|(&k, v)| (k.as_str(), *v)) .collect::>(); - speed_lookup.insert(String::from("_global_"), global_speed); + speed_lookup.insert(String::from(GLOBAL_AVG_SPEED_KEY), global_speed); let edge_list = OmfEdgeList { edges: EdgeList(edges.into_boxed_slice()), @@ -165,6 +166,7 @@ impl OmfGraphVectorized { /// write the graph to disk in vectorized Compass format. pub fn write_compass( &self, + stats: &OmfGraphSummary, output_directory: &Path, overwrite: bool, ) -> Result<(), OvertureMapsCollectionError> { diff --git a/rust/bambam-omf/src/graph/summary.rs b/rust/bambam-omf/src/graph/summary.rs new file mode 100644 index 00000000..45856723 --- /dev/null +++ b/rust/bambam-omf/src/graph/summary.rs @@ -0,0 +1,178 @@ +use std::collections::HashMap; + +use serde::{Deserialize, Serialize}; + +use crate::{ + app::CliBoundingBox, + collection::OvertureMapsCollectionError, + graph::{ + omf_graph::{OmfEdgeList, GLOBAL_AVG_SPEED_KEY}, + OmfGraphVectorized, + }, +}; + +/// summarizes an OMF import of a network. +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct OmfGraphSummary { + /// information describing how this dataset was generated + pub source: OmfGraphSource, + /// + pub stats: OmfGraphStats, +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +#[serde(rename_all = "snake_case")] +pub struct OmfGraphSource { + /// location of imported OMF dataset. this should either be + /// an official OMF storage location or a local file path. + pub uri: String, + /// user-provided name for the network + pub study_region: String, + /// date and time this network was created + pub created: String, + /// bounding box query used when run + pub bbox: Option, +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +#[serde(rename_all = "snake_case")] +pub struct OmfGraphStats { + /// number of vertices in the network + pub vertices: usize, + /// details for each edge list + pub edge_list: HashMap, +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +#[serde(rename_all = "snake_case")] +pub struct EdgeListStats { + /// number of edges in the network + pub edges: usize, + /// sum of all miles of roadways + pub miles: f64, + /// average speed of all segments in this edge list + pub avg_speed_mph: Option, + /// count and mileage of roadways by road class + pub road_class_stats: HashMap, +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +#[serde(rename_all = "snake_case")] +pub struct ClassStats { + /// number of segments + pub count: usize, + /// total miles of counted segments + pub distance_miles: f64, + /// average speed observed over the counted segments + pub avg_speed_mph: Option, +} + +struct ClassStatsAcc { + /// number of segments + pub count: usize, + /// total miles of counted segments + pub sum_distance: uom::si::f64::Length, +} + +impl OmfGraphSource { + pub fn new(uri: &str, study_region: &str, bbox: Option<&CliBoundingBox>) -> Self { + let created = chrono::Utc::now().to_rfc3339(); + Self { + uri: uri.to_string(), + study_region: study_region.to_string(), + created, + bbox: bbox.cloned(), + } + } +} + +impl TryFrom<&OmfGraphVectorized> for OmfGraphStats { + type Error = OvertureMapsCollectionError; + + fn try_from(value: &OmfGraphVectorized) -> Result { + let edge_list_iter = value.edge_list_config.iter().zip(value.edge_lists.iter()); + let mut edge_list = HashMap::new(); + for (c, e) in edge_list_iter { + let key = c.mode.clone(); + let value = EdgeListStats::try_from(e)?; + let _ = edge_list.insert(key, value); + } + Ok(OmfGraphStats { + vertices: value.vertices.len(), + edge_list, + }) + } +} + +impl TryFrom<&OmfEdgeList> for EdgeListStats { + type Error = OvertureMapsCollectionError; + + fn try_from(value: &OmfEdgeList) -> Result { + let edges = value.edges.len(); + let miles = if edges == 0 { + 0.0 + } else { + value + .edges + .0 + .iter() + .map(|e| e.distance.get::()) + .sum() + }; + + let mut class_stats_accumulators: HashMap = HashMap::new(); + let edge_iter = value.edges.0.iter().zip(value.classes.iter()); + for (edge, class_full_type) in edge_iter { + let road_class = class_full_type.as_str().to_string(); + match class_stats_accumulators.get_mut(&road_class) { + Some(cnt) => { + cnt.add(edge.distance); + } + None => { + let acc = ClassStatsAcc::new(edge.distance); + class_stats_accumulators.insert(road_class.clone(), acc); + } + } + } + let road_class_stats: HashMap = class_stats_accumulators + .into_iter() + .map(|(k, v)| { + // this fully-qualified road class label may or may not be represented in the + // collected speed lookup table. + let avg_speed = value.speed_lookup.get(&k).cloned(); + (k, ClassStats::new(v, avg_speed)) + }) + .collect(); + let avg_speed_mph = value.speed_lookup.get(GLOBAL_AVG_SPEED_KEY).cloned(); + + Ok(Self { + edges, + miles, + road_class_stats, + avg_speed_mph, + }) + } +} + +impl ClassStats { + fn new(acc: ClassStatsAcc, avg_speed_mph: Option) -> Self { + Self { + count: acc.count, + distance_miles: acc.sum_distance.get::(), + avg_speed_mph, + } + } +} + +impl ClassStatsAcc { + pub fn new(distance: uom::si::f64::Length) -> Self { + Self { + count: 1, + sum_distance: distance, + } + } + pub fn add(&mut self, distance: uom::si::f64::Length) { + self.count += 1; + self.sum_distance += distance; + } +} From a61ff767e816aa594820a97f4754921d89ca847a Mon Sep 17 00:00:00 2001 From: Rob Fitzgerald Date: Tue, 10 Feb 2026 13:25:54 -0700 Subject: [PATCH 10/15] pass release URI as string resolved from version --- .../source/overture_opportunity_collection_model.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/rust/bambam/src/model/output_plugin/opportunity/source/overture_opportunity_collection_model.rs b/rust/bambam/src/model/output_plugin/opportunity/source/overture_opportunity_collection_model.rs index 46ee9d34..5ed39859 100644 --- a/rust/bambam/src/model/output_plugin/opportunity/source/overture_opportunity_collection_model.rs +++ b/rust/bambam/src/model/output_plugin/opportunity/source/overture_opportunity_collection_model.rs @@ -167,10 +167,11 @@ impl OvertureOpportunityCollectionModel { &self, activity_types: &[String], ) -> Result, Vec)>, OvertureMapsCollectionError> { + let uri = self.release_version.to_string(); let places_records = self .collector .collect_from_release( - self.release_version.clone(), + &uri, &OvertureRecordType::Places, self.places_row_filter_config.clone(), )? @@ -237,10 +238,11 @@ impl OvertureOpportunityCollectionModel { let arc_buildings_taxonomy = Arc::new(buildings_taxonomy_model); // Use the collector to retrieve buildings data + let uri = self.release_version.to_string(); let buildings_records = self .collector .collect_from_release( - self.release_version.clone(), + &uri, &OvertureRecordType::Buildings, self.buildings_row_filter_config.clone(), )? From 56d623a5838f69360e1de3f76ba9b6e42e4f2929 Mon Sep 17 00:00:00 2001 From: Rob Fitzgerald Date: Tue, 10 Feb 2026 13:26:17 -0700 Subject: [PATCH 11/15] wire in summary output --- rust/bambam-omf/Cargo.toml | 1 + rust/bambam-omf/src/app/network.rs | 37 +++++++++++++++++++++--------- rust/bambam-omf/src/app/omf_app.rs | 14 ++++++++++- 3 files changed, 40 insertions(+), 12 deletions(-) diff --git a/rust/bambam-omf/Cargo.toml b/rust/bambam-omf/Cargo.toml index b3da1932..288b270c 100644 --- a/rust/bambam-omf/Cargo.toml +++ b/rust/bambam-omf/Cargo.toml @@ -52,6 +52,7 @@ serde_bytes = { workspace = true } serde_json = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true } +toml = { workspace = true } uom = { workspace = true } wkb = { workspace = true } wkt = { workspace = true } diff --git a/rust/bambam-omf/src/app/network.rs b/rust/bambam-omf/src/app/network.rs index f0b64158..20c9270c 100644 --- a/rust/bambam-omf/src/app/network.rs +++ b/rust/bambam-omf/src/app/network.rs @@ -5,7 +5,9 @@ use serde::{Deserialize, Serialize}; use crate::{ app::CliBoundingBox, collection::{ - ObjectStoreSource, OvertureMapsCollectionError, OvertureMapsCollectorConfig, ReleaseVersion, SegmentAccessRestrictionWhen, TransportationCollection, filter::TravelModeFilter + filter::TravelModeFilter, ObjectStoreSource, OvertureMapsCollectionError, + OvertureMapsCollectorConfig, ReleaseVersion, SegmentAccessRestrictionWhen, + TransportationCollection, }, graph::{OmfGraphSource, OmfGraphStats, OmfGraphSummary, OmfGraphVectorized}, util, @@ -33,6 +35,7 @@ impl From<&NetworkEdgeListConfiguration> for SegmentAccessRestrictionWhen { /// runs an OMF network import using the provided configuration. pub fn run( + name: &str, bbox: Option<&CliBoundingBox>, modes: &[NetworkEdgeListConfiguration], output_directory: &Path, @@ -50,26 +53,38 @@ pub fn run( } let vectorized_graph = OmfGraphVectorized::new(&collection, modes)?; - let stats = OmfGraphStats::try_from(&vectorized_graph)?; + + // summarize imported graph let uri = match local_source { Some(local) => format!("file://{}", local.to_str().unwrap_or_default()), None => collection.uri.clone(), }; - let source = OmfGraphSource::new( - &uri, - , - bbox.as_ref() - ); - let summary = OmfGraphSummary { - source, - stats, - }; + let stats = OmfGraphStats::try_from(&vectorized_graph)?; + let source = OmfGraphSource::new(&uri, name, bbox); + let summary = OmfGraphSummary { source, stats }; + write_summary(output_directory, &summary)?; vectorized_graph.write_compass(&summary, output_directory, true)?; Ok(()) } +fn write_summary( + output_directory: &Path, + summary: &OmfGraphSummary, +) -> Result<(), OvertureMapsCollectionError> { + let summary_toml = toml::to_string_pretty(&summary).map_err(|e| { + OvertureMapsCollectionError::InternalError(format!("failure serializing summary TOML: {e}")) + })?; + let summary_path = output_directory.join("summary.toml"); + std::fs::write(&summary_path, &summary_toml).map_err(|e| { + OvertureMapsCollectionError::WriteError { + path: summary_path, + message: e.to_string(), + } + }) +} + fn read_local(path: &Path) -> Result { let contents = std::fs::read(path).map_err(|e| OvertureMapsCollectionError::ReadError { path: path.to_owned(), diff --git a/rust/bambam-omf/src/app/omf_app.rs b/rust/bambam-omf/src/app/omf_app.rs index 8657f6d0..643e697c 100644 --- a/rust/bambam-omf/src/app/omf_app.rs +++ b/rust/bambam-omf/src/app/omf_app.rs @@ -22,6 +22,10 @@ pub struct OmfApp { pub enum OmfOperation { /// download all of the OMF transportation data Network { + /// descriptive user-provided name for this import region. + #[arg(short, long)] + name: String, + /// configuration file defining how the network is imported and separated /// into mode-specific edge lists. #[arg(short, long)] @@ -51,6 +55,7 @@ impl OmfOperation { pub fn run(&self) -> Result<(), OvertureMapsCollectionError> { match self { OmfOperation::Network { + name, configuration_file, output_directory, local_source, @@ -78,7 +83,14 @@ impl OmfOperation { None => Path::new(""), }; let local = local_source.as_ref().map(Path::new); - crate::app::network::run(bbox.as_ref(), &network_config, outdir, local, *store_raw) + crate::app::network::run( + &name, + bbox.as_ref(), + &network_config, + outdir, + local, + *store_raw, + ) } } } From f2e7613f36c3708b28cec6fac35940bac6935082 Mon Sep 17 00:00:00 2001 From: Rob Fitzgerald Date: Tue, 10 Feb 2026 13:26:29 -0700 Subject: [PATCH 12/15] clippy --- rust/bambam-omf/src/app/omf_app.rs | 2 +- rust/bambam-omf/src/collection/collector.rs | 1 - rust/bambam-omf/src/graph/omf_graph.rs | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/rust/bambam-omf/src/app/omf_app.rs b/rust/bambam-omf/src/app/omf_app.rs index 643e697c..3add3494 100644 --- a/rust/bambam-omf/src/app/omf_app.rs +++ b/rust/bambam-omf/src/app/omf_app.rs @@ -84,7 +84,7 @@ impl OmfOperation { }; let local = local_source.as_ref().map(Path::new); crate::app::network::run( - &name, + name, bbox.as_ref(), &network_config, outdir, diff --git a/rust/bambam-omf/src/collection/collector.rs b/rust/bambam-omf/src/collection/collector.rs index a485a884..e2263699 100644 --- a/rust/bambam-omf/src/collection/collector.rs +++ b/rust/bambam-omf/src/collection/collector.rs @@ -18,7 +18,6 @@ use super::record::OvertureRecord; use super::record::OvertureRecordType; use super::OvertureMapsCollectionError; use super::OvertureMapsCollectorConfig; -use super::ReleaseVersion; use super::RowFilter; use super::RowFilterConfig; diff --git a/rust/bambam-omf/src/graph/omf_graph.rs b/rust/bambam-omf/src/graph/omf_graph.rs index c53cef04..c9c53c3c 100644 --- a/rust/bambam-omf/src/graph/omf_graph.rs +++ b/rust/bambam-omf/src/graph/omf_graph.rs @@ -166,7 +166,7 @@ impl OmfGraphVectorized { /// write the graph to disk in vectorized Compass format. pub fn write_compass( &self, - stats: &OmfGraphSummary, + _stats: &OmfGraphSummary, output_directory: &Path, overwrite: bool, ) -> Result<(), OvertureMapsCollectionError> { From 4bb89899dc7730bf38ee38f5690ac1c24850ce31 Mon Sep 17 00:00:00 2001 From: Rob Fitzgerald Date: Tue, 10 Feb 2026 13:39:02 -0700 Subject: [PATCH 13/15] fix names --- rust/bambam-omf/src/app/network.rs | 23 +++---------------- .../record/transportation_collection.rs | 4 ++-- rust/bambam-omf/src/graph/omf_graph.rs | 21 ++++++++++++++++- rust/bambam-omf/src/graph/summary.rs | 8 +++---- 4 files changed, 29 insertions(+), 27 deletions(-) diff --git a/rust/bambam-omf/src/app/network.rs b/rust/bambam-omf/src/app/network.rs index 20c9270c..1f8af9e2 100644 --- a/rust/bambam-omf/src/app/network.rs +++ b/rust/bambam-omf/src/app/network.rs @@ -55,36 +55,19 @@ pub fn run( let vectorized_graph = OmfGraphVectorized::new(&collection, modes)?; // summarize imported graph - let uri = match local_source { + let release = match local_source { Some(local) => format!("file://{}", local.to_str().unwrap_or_default()), - None => collection.uri.clone(), + None => collection.release.clone(), }; let stats = OmfGraphStats::try_from(&vectorized_graph)?; - let source = OmfGraphSource::new(&uri, name, bbox); + let source = OmfGraphSource::new(&release, name, bbox); let summary = OmfGraphSummary { source, stats }; - write_summary(output_directory, &summary)?; vectorized_graph.write_compass(&summary, output_directory, true)?; Ok(()) } -fn write_summary( - output_directory: &Path, - summary: &OmfGraphSummary, -) -> Result<(), OvertureMapsCollectionError> { - let summary_toml = toml::to_string_pretty(&summary).map_err(|e| { - OvertureMapsCollectionError::InternalError(format!("failure serializing summary TOML: {e}")) - })?; - let summary_path = output_directory.join("summary.toml"); - std::fs::write(&summary_path, &summary_toml).map_err(|e| { - OvertureMapsCollectionError::WriteError { - path: summary_path, - message: e.to_string(), - } - }) -} - fn read_local(path: &Path) -> Result { let contents = std::fs::read(path).map_err(|e| OvertureMapsCollectionError::ReadError { path: path.to_owned(), diff --git a/rust/bambam-omf/src/collection/record/transportation_collection.rs b/rust/bambam-omf/src/collection/record/transportation_collection.rs index 3f068e40..4ee2f99a 100644 --- a/rust/bambam-omf/src/collection/record/transportation_collection.rs +++ b/rust/bambam-omf/src/collection/record/transportation_collection.rs @@ -9,7 +9,7 @@ use crate::collection::{ #[derive(Serialize, Deserialize, Clone, Debug)] pub struct TransportationCollection { - pub uri: String, + pub release: String, pub connectors: Vec, pub segments: Vec, } @@ -61,7 +61,7 @@ impl TransportationCollection { .collect::, OvertureMapsCollectionError>>()?; Ok(Self { - uri, + release: uri, connectors, segments, }) diff --git a/rust/bambam-omf/src/graph/omf_graph.rs b/rust/bambam-omf/src/graph/omf_graph.rs index c9c53c3c..aff20451 100644 --- a/rust/bambam-omf/src/graph/omf_graph.rs +++ b/rust/bambam-omf/src/graph/omf_graph.rs @@ -166,7 +166,7 @@ impl OmfGraphVectorized { /// write the graph to disk in vectorized Compass format. pub fn write_compass( &self, - _stats: &OmfGraphSummary, + summary: &OmfGraphSummary, output_directory: &Path, overwrite: bool, ) -> Result<(), OvertureMapsCollectionError> { @@ -179,6 +179,9 @@ impl OmfGraphVectorized { use crate::util::fs::serialize_into_csv; use crate::util::fs::serialize_into_enumerated_txt; + // write the TOML summary file + write_summary(output_directory, summary)?; + // write vertices serialize_into_csv( self.vertices.iter().map(|v| VertexSerializable::from(*v)), @@ -271,3 +274,19 @@ impl OmfGraphVectorized { Ok(()) } } + +fn write_summary( + output_directory: &Path, + summary: &OmfGraphSummary, +) -> Result<(), OvertureMapsCollectionError> { + let summary_toml = toml::to_string_pretty(&summary).map_err(|e| { + OvertureMapsCollectionError::InternalError(format!("failure serializing summary TOML: {e}")) + })?; + let summary_path = output_directory.join("summary.toml"); + std::fs::write(&summary_path, &summary_toml).map_err(|e| { + OvertureMapsCollectionError::WriteError { + path: summary_path, + message: e.to_string(), + } + }) +} diff --git a/rust/bambam-omf/src/graph/summary.rs b/rust/bambam-omf/src/graph/summary.rs index 45856723..9f60d1eb 100644 --- a/rust/bambam-omf/src/graph/summary.rs +++ b/rust/bambam-omf/src/graph/summary.rs @@ -24,8 +24,8 @@ pub struct OmfGraphSummary { #[serde(rename_all = "snake_case")] pub struct OmfGraphSource { /// location of imported OMF dataset. this should either be - /// an official OMF storage location or a local file path. - pub uri: String, + /// an official OMF release identifier or a local file path. + pub release: String, /// user-provided name for the network pub study_region: String, /// date and time this network was created @@ -75,10 +75,10 @@ struct ClassStatsAcc { } impl OmfGraphSource { - pub fn new(uri: &str, study_region: &str, bbox: Option<&CliBoundingBox>) -> Self { + pub fn new(release: &str, study_region: &str, bbox: Option<&CliBoundingBox>) -> Self { let created = chrono::Utc::now().to_rfc3339(); Self { - uri: uri.to_string(), + release: release.to_string(), study_region: study_region.to_string(), created, bbox: bbox.cloned(), From 89ee90b49ad9fb05c5a9da49cb55dabacf9db82c Mon Sep 17 00:00:00 2001 From: Rob Fitzgerald Date: Wed, 11 Feb 2026 12:27:57 -0700 Subject: [PATCH 14/15] default configuration during OMF import --- rust/bambam-omf/src/graph/omf_graph.rs | 3 + .../src/util/bambam-config-omf.toml | 271 ++++++++++++++++++ rust/bambam-omf/src/util/fs.rs | 22 +- 3 files changed, 295 insertions(+), 1 deletion(-) create mode 100644 rust/bambam-omf/src/util/bambam-config-omf.toml diff --git a/rust/bambam-omf/src/graph/omf_graph.rs b/rust/bambam-omf/src/graph/omf_graph.rs index aff20451..a91e4194 100644 --- a/rust/bambam-omf/src/graph/omf_graph.rs +++ b/rust/bambam-omf/src/graph/omf_graph.rs @@ -182,6 +182,9 @@ impl OmfGraphVectorized { // write the TOML summary file write_summary(output_directory, summary)?; + // copy default configuration file into the output directory + crate::util::fs::copy_default_config(output_directory)?; + // write vertices serialize_into_csv( self.vertices.iter().map(|v| VertexSerializable::from(*v)), diff --git a/rust/bambam-omf/src/util/bambam-config-omf.toml b/rust/bambam-omf/src/util/bambam-config-omf.toml new file mode 100644 index 00000000..8ef73fb4 --- /dev/null +++ b/rust/bambam-omf/src/util/bambam-config-omf.toml @@ -0,0 +1,271 @@ +[graph] +edge_list = [ + { input_file = "walk/edges-compass.csv.gz" }, + { input_file = "bike/edges-compass.csv.gz" }, + { input_file = "drive/edges-compass.csv.gz" }, +] +vertex_list_input_file = "vertices-compass.csv.gz" + +[mapping] +type = "edge" +geometry = [ + { type = "from_linestrings", geometry_input_file = "walk/edges-geometries-enumerated.txt.gz" }, + { type = "from_linestrings", geometry_input_file = "bike/edges-geometries-enumerated.txt.gz" }, + { type = "from_linestrings", geometry_input_file = "drive/edges-geometries-enumerated.txt.gz" }, +] +tolerance.distance = 15.0 +tolerance.unit = "meters" +queries_without_destinations = false +matching_type = ["point", "vertex_id", "edge_id"] + +[algorithm] +type = "a*" + +# cut off searches that exceed these termination policies. +[termination] +type = "solution_size" +limit = 1_000_000 + +# use a time-optimal routing strategy +[cost] +weights.trip_time = 1.0 +vehicle_rates.trip_time.type = "raw" + +### +### WALK TOPOLOGY +### +[[search]] +traversal.type = "combined" +traversal.models = [ + { type = "distance", distance_unit = "miles" }, + { type = "fixed_speed", name = "walk", speed = 5.0, speed_unit = "kph" }, + { type = "time", time_unit = "minutes" }, + { type = "multimodal", this_mode = "walk", available_modes = ["walk", "bike", "drive"], available_route_ids = [], use_route_ids = false, max_trip_legs = 5 } +] + +frontier.type = "combined" +frontier.models = [ + { type = "time_limit", time_limit = { time = 40.0, time_unit = "minutes" }}, + { type = "multimodal", this_mode = "walk", constraints = [], available_modes = ["walk", "bike", "drive"], available_route_ids = [], use_route_ids = false, max_trip_legs = 5 } +] + +### +### BIKE TOPOLOGY +### +[[search]] +traversal.type = "combined" +traversal.models = [ + { type = "distance", distance_unit = "miles" }, + { type = "fixed_speed", name = "bike", speed = 16.0, speed_unit = "kph" }, + { type = "time", time_unit = "minutes" }, + { type = "multimodal", this_mode = "bike", available_modes = ["walk", "bike", "drive"], available_route_ids = [], use_route_ids = false, max_trip_legs = 5 } +] + +frontier.type = "combined" +frontier.models = [ + { type = "time_limit", time_limit = { time = 40.0, time_unit = "minutes" }}, + { type = "multimodal", this_mode = "bike", constraints = [], available_modes = ["walk", "bike", "drive"], available_route_ids = [], use_route_ids = false, max_trip_legs = 5 } +] + +### +### DRIVE TOPOLOGY +### +[[search]] +traversal.type = "combined" +traversal.models = [ + { type = "distance", distance_unit = "miles" }, + { type = "speed", name = "drive", speed_unit = "kph", speed_table_input_file = "drive/edges-speeds-mph-enumerated.txt.gz" }, + { type = "time", time_unit = "minutes" }, + { type = "multimodal", this_mode = "drive", available_modes = ["walk", "bike", "drive"], available_route_ids = [], use_route_ids = false, max_trip_legs = 5 } +] + +frontier.type = "combined" +frontier.models = [ + { type = "time_limit", time_limit = { time = 40.0, time_unit = "minutes" }}, + { type = "multimodal", this_mode = "drive", constraints = [], available_modes = ["walk", "bike", "drive"], available_route_ids = [], use_route_ids = false, max_trip_legs = 5 } +] + + +[[plugin.input_plugins]] +type = "grid" +extent_format = "wkt" +grid = { type = "h3", resolution = 8 } +[plugin.input_plugins.population_source] +type = "acs" +acs_type = "five_year" +acs_year = 2022 +acs_resolution = "census_tract" +acs_categories = ["B01001_001E"] + +[[plugin.input_plugins]] +type = "inject" +format = "key_value" +write_mode = "overwrite" +key = "grid_search" +value.mode = ["walk", "bike", "drive"] + +[[plugin.output_plugins]] +type = "traversal" +tree = "geo_json" + +[[plugin.output_plugins]] +type = "summary" + +[[plugin.output_plugins]] +type = "isochrone" +time_bin = { type = "list", times = [10, 20, 30, 40] } +isochrone_algorithm = { type = "k_nearest_concave_hull", k = 3 } +destination_point_generator = { type = "destination_point" } +isochrone_output_format = "wkb" + +### MEP OPPORTUNITY DATA CONFIGURATION ################################# +# assigns opportunities to search results based on a file or api data source +# and a taxonomy for MEP activity types. +# this example shows data loaded from the census LODES online file repository +# assigning activity types by NAICS sector id. +[[plugin.output_plugins]] +type = "opportunity" +collect_format = "aggregate" + +[plugin.output_plugins.model] +type = "combined" + +[[plugin.output_plugins.model.models]] +type = "api" +vertex_input_file = "vertices-compass.csv.gz" +activity_column_names = ["entertainment", "food", "retail", "healthcare", "services", "jobs"] +table_orientation = "destination_vertex_oriented" + +[plugin.output_plugins.model.models.opportunity_source] +type = "lodes" + +# denver metro region coverage. replace this with a list of geoids for your study area. +# alternatively, study_region = national for job coverage across the US. +# for information on year availability of data, see . +study_region = { type = "census", geoids = [ + "08001", # Adams County + "08005", # Arapahoe County + "08013", # Boulder County + "08014", # Broomfield County + "08031", # Denver County + "08035", # Douglas County + "08039", # Elbert County + "08059", # Jefferson County + "08123", # Weld County +] } + +# collect LODES data at the tract level. while it is available at the block, +# the download + processing time is 10x that of census tracts. other possible values +# are `block`, `county` or `state`. +data_granularity = "census_tract" +# different editions of the dataset, we are choosing LODES v 8.0. +edition = "LODES8" +# Job Type, can have a value of “JT00” for All Jobs, “JT01” for Primary Jobs, “JT02” for All +# Private Jobs, “JT03” for Private Primary Jobs, “JT04” for All Federal Jobs, or “JT05” for +# Federal Primary Jobs +job_type = "JT00" +# Segment of the workforce, can have the values of: +# - S000: Total number of jobs (default) +# - SA01: Number of jobs of workers age 29 or younger +# - SA02: Number of jobs for workers age 30 to 54 +# - SA03: Number of jobs for workers age 55 or older +# - SE01: Number of jobs with earnings $1250/month or less +# - SE02: Number of jobs with earnings $1251/month to $3333/month +# - SE03: Number of jobs with earnings greater than $3333/month +# - SI01: Number of jobs in Goods Producing industry sectors +# - SI02: Number of jobs in Trade, Transportation, and Utilities industry sectors +# - SI03: Number of jobs in All Other Services industry sectors +segment = "S000" + +# most recent year with all states. Alaska has no coverage from 2017-2022. we may +# want a "continental" variant which could support 2022. +year = 2022 + +[plugin.output_plugins.model.models.opportunity_source.activity_mapping] +# see https://lehd.ces.census.gov/lodes/LODES8/LODESTechDoc8.0.pdf +CNS01 = ["jobs"] # 11 (Agriculture, Forestry, Fishing and Hunting) +CNS02 = ["jobs"] # 21 (Mining, Quarrying, and Oil and Gas Extraction) +CNS03 = ["jobs"] # 22 (Utilities) +CNS04 = ["jobs"] # 23 (Construction) +CNS05 = ["jobs"] # 31-33 (Manufacturing) +CNS06 = ["jobs"] # 42 (Wholesale Trade) +CNS07 = ["jobs", "retail"] # 44-45 (Retail Trade) +CNS08 = ["jobs"] # 48-49 (Transportation and Warehousing) +CNS09 = ["jobs"] # 51 (Information) +CNS10 = ["jobs"] # 52 (Finance and Insurance) +CNS11 = ["jobs"] # 53 (Real Estate and Rental and Leasing) +CNS12 = ["jobs", "services"] # 54 (Professional, Scientific, and Technical Services) +CNS13 = ["jobs"] # 55 (Management of Companies and Enterprises) +CNS14 = ["jobs"] # 56 (Admin/Support/Waste Mgmt/Remediation Services) +CNS15 = ["jobs"] # 61 (Educational Services) +CNS16 = ["jobs", "healthcare"] # 62 (Health Care and Social Assistance) +CNS17 = ["jobs", "entertainment"] # 71 (Arts, Entertainment, and Recreation) +CNS18 = ["jobs", "food"] # 72 (Accommodation and Food Services) +CNS19 = ["jobs"] # 81 (Other Services [except Public Administration]) +CNS20 = ["jobs"] # 92 (Public Administration) + +[system] +parallelism = 8 +response_persistence_policy = "persist_response_in_memory" + +[system.response_output_policy] +type = "combined" + +[[system.response_output_policy.policies]] +type = "file" +filename = "result.json" +[system.response_output_policy.policies.format] +type = "json" +newline_delimited = false + +[[system.response_output_policy.policies]] +type = "file" +filename = "result.csv" +[system.response_output_policy.policies.format] +type = "csv" +sorted = true +[system.response_output_policy.policies.format.mapping] +grid_id = "request.grid_id" +lon = "request.origin_x" +lat = "request.origin_y" +mode = "request.mode" +runtime = "search_runtime" +error = "error" + +opps_entertainment_total = "opportunity_totals.entertainment" +opps_retail_total = "opportunity_totals.retail" +opps_healthcare_total = "opportunity_totals.healthcare" +opps_services_total = "opportunity_totals.services" +opps_food_total = "opportunity_totals.food" +opps_jobs_total = "opportunity_totals.jobs" + +opps_entertainment_10 = "bin.10.opportunities.entertainment" +opps_retail_10 = "bin.10.opportunities.retail" +opps_healthcare_10 = "bin.10.opportunities.healthcare" +opps_services_10 = "bin.10.opportunities.services" +opps_food_10 = "bin.10.opportunities.food" +opps_jobs_10 = "bin.10.opportunities.jobs" +opps_entertainment_20 = "bin.20.opportunities.entertainment" +opps_retail_20 = "bin.20.opportunities.retail" +opps_healthcare_20 = "bin.20.opportunities.healthcare" +opps_services_20 = "bin.20.opportunities.services" +opps_food_20 = "bin.20.opportunities.food" +opps_jobs_20 = "bin.20.opportunities.jobs" +opps_entertainment_30 = "bin.30.opportunities.entertainment" +opps_retail_30 = "bin.30.opportunities.retail" +opps_healthcare_30 = "bin.30.opportunities.healthcare" +opps_services_30 = "bin.30.opportunities.services" +opps_food_30 = "bin.30.opportunities.food" +opps_jobs_30 = "bin.30.opportunities.jobs" +opps_entertainment_40 = "bin.40.opportunities.entertainment" +opps_retail_40 = "bin.40.opportunities.retail" +opps_healthcare_40 = "bin.40.opportunities.healthcare" +opps_services_40 = "bin.40.opportunities.services" +opps_food_40 = "bin.40.opportunities.food" +opps_jobs_40 = "bin.40.opportunities.jobs" + +isochrone_10 = "bin.10.isochrone" +isochrone_20 = "bin.20.isochrone" +isochrone_30 = "bin.30.isochrone" +isochrone_40 = "bin.40.isochrone" + diff --git a/rust/bambam-omf/src/util/fs.rs b/rust/bambam-omf/src/util/fs.rs index 640e35fe..992483b6 100644 --- a/rust/bambam-omf/src/util/fs.rs +++ b/rust/bambam-omf/src/util/fs.rs @@ -1,4 +1,7 @@ -use std::{fs::File, path::Path}; +use std::{ + fs::File, + path::{Path, PathBuf}, +}; use csv::QuoteStyle; use flate2::{write::GzEncoder, Compression}; @@ -7,6 +10,23 @@ use serde::Serialize; use crate::collection::OvertureMapsCollectionError; +/// copies bambam-config-omf.toml to the directory of an OMF import. +pub fn copy_default_config(output_directory: &Path) -> Result<(), OvertureMapsCollectionError> { + let src = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("src") + .join("util") + .join("bambam-config-omf.toml"); + let dst = output_directory.join("bambam.toml"); + std::fs::copy(&src, &dst).map_err(|e| OvertureMapsCollectionError::WriteError { + path: dst, + message: format!( + "unable to copy default TOML from '{}': {e}", + src.to_str().unwrap_or_else(|| "?") + ), + })?; + Ok(()) +} + /// helper function to "mkdir -p path" - make all directories along a path pub fn create_dirs

(path: P) -> Result<(), OvertureMapsCollectionError> where From 1f5e8fd3194a181217e5ec1f1f1a69e8c83d6ac7 Mon Sep 17 00:00:00 2001 From: Rob Fitzgerald Date: Wed, 11 Feb 2026 12:28:10 -0700 Subject: [PATCH 15/15] clippy --- rust/bambam-omf/src/util/fs.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust/bambam-omf/src/util/fs.rs b/rust/bambam-omf/src/util/fs.rs index 992483b6..0c1d782a 100644 --- a/rust/bambam-omf/src/util/fs.rs +++ b/rust/bambam-omf/src/util/fs.rs @@ -21,7 +21,7 @@ pub fn copy_default_config(output_directory: &Path) -> Result<(), OvertureMapsCo path: dst, message: format!( "unable to copy default TOML from '{}': {e}", - src.to_str().unwrap_or_else(|| "?") + src.to_str().unwrap_or("?") ), })?; Ok(())