From 71100f711aadff95a92fedef8e7d727ee321e5dd Mon Sep 17 00:00:00 2001 From: "Eric Q. Mooring" Date: Sat, 28 Feb 2026 08:05:30 +0000 Subject: [PATCH 01/28] initial commit symptom status manager --- src/lib.rs | 1 + src/symptom_status_manager.rs | 89 +++++++++++++++++++++++++++++++++++ 2 files changed, 90 insertions(+) create mode 100644 src/symptom_status_manager.rs diff --git a/src/lib.rs b/src/lib.rs index c79cdde..4f7cb38 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,6 +1,7 @@ pub mod infection_importation; pub mod infection_propagation_loop; pub mod infectiousness_manager; +pub mod symptom_status_manager; pub mod model; pub mod natural_history_parameter_manager; pub mod parameters; diff --git a/src/symptom_status_manager.rs b/src/symptom_status_manager.rs new file mode 100644 index 0000000..e4f1acb --- /dev/null +++ b/src/symptom_status_manager.rs @@ -0,0 +1,89 @@ +use ixa::{Context, ContextEntitiesExt, ContextRandomExt, IxaError, define_rng, impl_property, prelude::PropertyChangeEvent}; +use rand_distr::{LogNormal}; +use serde::{Deserialize, Serialize}; + +use crate::{ + infectiousness_manager::InfectionStatus, population_loader::Person +}; + +define_rng!(SymptomsRng); + +#[derive(Serialize, Deserialize, PartialEq, Debug, Copy, Clone)] +pub enum SymptomStatus { + NoSymptoms, + Mild, + Severe, + Critical, + Resolved, + Dead +} + +impl_property!( + SymptomStatus, + Person, + default_const = SymptomStatus::NoSymptoms +); + +pub fn init(context: &mut Context) -> Result<(), IxaError> { + context.subscribe_to_event( + move|context, event: PropertyChangeEvent|{ + if event.current == InfectionStatus::Infectious { + if context.sample_bool(SymptomsRng, probability_mild_given_infect){ + let infect_to_mild = LogNormal::new(infect_to_mild_mu, infect_to_mild_sigma).unwrap(); + let mild_time = context.get_current_time() + context.sample_distr(SymptomsRng, infect_to_mild); + context.add_plan(mild_time, move|context|{ + context.set_property(event.entity_id,SymptomStatus::Mild); + }); + if context.sample_bool(SymptomsRng, probability_severe_given_mild){ + let mild_to_severe = LogNormal::new(mild_to_severe_mu, mild_to_severe_sigma).unwrap(); + let severe_time = mild_time + context.sample_distr(SymptomsRng, mild_to_severe); + context.add_plan(severe_time, |context|{ + context.set_property(event.entity_id, SymptomStatus::Severe); + }); + if context.sample_bool(SymptomsRng, probability_critical_given_severe){ + let severe_to_critical = LogNormal::new(severe_to_critical_mu, severe_to_critical_sigma).unwrap(); + let critical_time = severe_time + context.sample_distr(SymptomsRng, severe_to_critical); + context.add_plan(critical_time, |context|{ + context.set_property(event.entity_id, SymptomStatus::Critical); + }); + if context.sample_bool(SymptomsRng, probability_dead_given_critical){ + let critical_to_dead = LogNormal::new(critical_to_dead_mu, critical_to_dead_sigma).unwrap(); + let dead_time = critical_time + context.sample_distr(SymptomsRng, critical_to_dead); + context.add_plan(dead_time, |context|{ + context.set_property(event.entity_id, SymptomStatus::Dead); + }); + } else { + let critical_to_resolved = LogNormal::new(critical_to_resolved_mu, critical_to_resolved_sigma).unwrap(); + let resolved_time = critical_time + context.sample_distr(SymptomsRng, critical_to_resolved); + context.add_plan(critical_time, |context|{ + context.set_property(event.entity_id, SymptomStatus::Resolved); + }); + } + } else { + let severe_to_resolved = LogNormal::new(severe_to_resolved_mu, severe_to_resolved_sigma).unwrap(); + let resolved_time = severe_time + context.sample_distr(SymptomsRng, severe_to_resolved); + context.add_plan(resolved_time, |context|{ + context.set_property(event.entity_id, SymptomStatus::Resolved); + }); + } + } else { + let mild_to_resolved = LogNormal::new(mild_to_resolved_mu, mild_to_resolved_sigma).unwrap(); + let resolved_time = mild_time + context.sample_distr(SymptomsRng, mild_to_resolved); + context.add_plan(resolved_time, |context|{ + context.set_property(event.entity_id, SymptomStatus::Resolved); + }); + } + } + } + } + ); + Ok(()) +} + +// All persons should end up with a SymptomStatus of NoSymptoms, Dead, or Resolved. + +// TODO: +// -set up parameters (essential to make it run) +// -generate reports about symptom status (both incidence and prevalence are of interest, but could start with incidence) +// -add SymptomStatusData or some other mechanism to track what most symptom statuses a person had, even after resolved (and maybe track timing, too) +// -make probabilities age (category) specific From ed06e88c4d9c7563e56f9ef59a63d222a2c41d01 Mon Sep 17 00:00:00 2001 From: "Eric Q. Mooring" Date: Tue, 3 Mar 2026 09:02:11 +0000 Subject: [PATCH 02/28] revise symptom manager and make reports --- input/input.json | 19 ++++++ src/model.rs | 3 +- src/parameters.rs | 81 ++++++++++++++++++++++- src/reports/incidence_report.rs | 43 ++++++++++++ src/reports/prevalence_report.rs | 80 ++++++++++++++++++----- src/symptom_status_manager.rs | 109 ++++++++++++++++++++----------- 6 files changed, 279 insertions(+), 56 deletions(-) diff --git a/input/input.json b/input/input.json index a39d56b..7ce21eb 100644 --- a/input/input.json +++ b/input/input.json @@ -13,6 +13,25 @@ "duration": 5.0 } }, + "probability_mild_given_infect": 0.7, + "infect_to_mild_mu": 0.1, + "infect_to_mild_sigma": 0.0, + "probability_severe_given_mild": 0.2, + "mild_to_severe_mu": 0.1, + "mild_to_severe_sigma": 0.1, + "mild_to_resolved_mu": 0.1, + "mild_to_resolved_sigma": 0.1, + "probability_critical_given_severe": 0.2, + "severe_to_critical_mu": 0.1, + "severe_to_critical_sigma": 0.1, + "severe_to_resolved_mu": 0.1, + "severe_to_resolved_sigma": 0.1, + "probability_dead_given_critical": 0.2, + "critical_to_dead_mu": 0.1, + "critical_to_dead_sigma": 0.1, + "critical_to_resolved_mu": 0.1, + "critical_to_resolved_sigma": 0.1, + "settings_properties": {"Home": {"alpha": 0.0}, "Workplace": {"alpha": 0.0}, "School": {"alpha": 0.0}, diff --git a/src/model.rs b/src/model.rs index a687b5d..75c460a 100644 --- a/src/model.rs +++ b/src/model.rs @@ -1,7 +1,7 @@ use ixa::{ExecutionPhase, prelude::*}; use crate::{ - infection_importation, infection_propagation_loop, population_loader, reports, settings, + infection_importation, infection_propagation_loop, population_loader, reports, settings, symptom_status_manager, }; pub fn initialize_model(context: &mut Context, seed: u64, max_time: f64) -> Result<(), IxaError> { @@ -19,6 +19,7 @@ pub fn initialize_model(context: &mut Context, seed: u64, max_time: f64) -> Resu context.set_start_time(-1000.); settings::init(context); population_loader::init(context)?; + symptom_status_manager::init(context)?; infection_propagation_loop::init(context)?; infection_importation::init(context)?; reports::init(context)?; diff --git a/src/parameters.rs b/src/parameters.rs index cc21607..e33dc85 100644 --- a/src/parameters.rs +++ b/src/parameters.rs @@ -36,9 +36,45 @@ pub struct Params { pub imported_cases_timeseries: ImportCasesFromFile, /// A library of infection rates to assign to infected people. pub infectiousness_rate_fn: RateFnType, + /// Probability an infected person develops mild illness + pub probability_mild_given_infect: f64, + /// Mu parameter for log normal delay distribution from infection to mild illness + pub infect_to_mild_mu: f64, + /// Sigma parameter for log normal delay distribution from infection to mild illness + pub infect_to_mild_sigma: f64, + /// Probability a person with mild illness develops severe illness + pub probability_severe_given_mild: f64, + /// Mu parameter for log normal delay distribution from mild to severe illness + pub mild_to_severe_mu: f64, + /// Sigma parameter for log normal delay distribution from mild to severe illness + pub mild_to_severe_sigma: f64, + /// Mu parameter for log normal delay distribution from mild illness to resolution + pub mild_to_resolved_mu: f64, + /// Sigma parameter for log normal delay distribution from mild illness to resolution + pub mild_to_resolved_sigma: f64, + /// Probability a person with severe illness develops critical illness + pub probability_critical_given_severe: f64, + /// Mu parameter for log normal delay distribution from severe to critical illness + pub severe_to_critical_mu: f64, + /// Sigma parameter for log normal delay distribution from severe to critical illness + pub severe_to_critical_sigma: f64, + /// Mu parameter for log normal delay distribution from severe illness to resolution + pub severe_to_resolved_mu: f64, + /// Sigma parameter for log normal delay distribution from severe illness to resolution + pub severe_to_resolved_sigma: f64, + /// Probability a person with critical illness dies + pub probability_dead_given_critical: f64, + /// Mu parameter for log normal delay distribution from critical illness to death + pub critical_to_dead_mu: f64, + /// Sigma parameter for log normal delay distribution from critical illness to death + pub critical_to_dead_sigma: f64, + /// Mu parameter for log normal delay distribution from critical illness to resolution + pub critical_to_resolved_mu: f64, + /// Sigma parameter for log normal delay distribution from critical illness to resolution + pub critical_to_resolved_sigma: f64, /// Setting properties by setting type pub settings_properties: HashMap, - /// ratios used to initialize indiviiduals itineraries by setting type. + /// ratios used to initialize individuals itineraries by setting type. pub itinerary_ratios: HashMap, /// Prevalence report with a period and name required pub prevalence_report: ReportParams, @@ -77,6 +113,31 @@ fn validate_inputs(parameters: &Params) -> Result<(), IxaError> { } } + // Validate the symptom status parameters + if !(0.0..=1.0).contains(¶meters.probability_mild_given_infect) { + return Err(IxaError::IxaError( + "The probability of mild illness given infection must be between 0 and 1, inclusive.".to_string(), + )); + } + + if !(0.0..=1.0).contains(¶meters.probability_severe_given_mild) { + return Err(IxaError::IxaError( + "The probability of severe illness given mild illness must be between 0 and 1, inclusive.".to_string(), + )); + } + + if !(0.0..=1.0).contains(¶meters.probability_critical_given_severe) { + return Err(IxaError::IxaError( + "The probability of critical illness given severe illness must be between 0 and 1, inclusive.".to_string(), + )); + } + + if !(0.0..=1.0).contains(¶meters.probability_dead_given_critical) { + return Err(IxaError::IxaError( + "The probability of dying given critical illness must be between 0 and 1, inclusive.".to_string(), + )); + } + // We only want to fail when all itinerary ratios are 0. // Instead of holding the itinerary ratios in a vector, we sum them because we error if they // are negative, so if their sum is 0.0, they must all be 0.0. @@ -161,6 +222,24 @@ impl Default for Params { rate: 1.0, duration: 5.0, }, + probability_mild_given_infect: 0.0, + infect_to_mild_mu: 0.0, + infect_to_mild_sigma: 0.0, + probability_severe_given_mild: 0.0, + mild_to_severe_mu: 0.0, + mild_to_severe_sigma: 0.0, + mild_to_resolved_mu: 0.0, + mild_to_resolved_sigma: 0.0, + probability_critical_given_severe: 0.0, + severe_to_critical_mu: 0.0, + severe_to_critical_sigma: 0.0, + severe_to_resolved_mu: 0.0, + severe_to_resolved_sigma: 0.0, + probability_dead_given_critical: 0.0, + critical_to_dead_mu: 0.0, + critical_to_dead_sigma: 0.0, + critical_to_resolved_mu: 0.0, + critical_to_resolved_sigma: 0.0, settings_properties: HashMap::new(), itinerary_ratios: HashMap::new(), prevalence_report: ReportParams { diff --git a/src/reports/incidence_report.rs b/src/reports/incidence_report.rs index 9278406..084954c 100644 --- a/src/reports/incidence_report.rs +++ b/src/reports/incidence_report.rs @@ -1,5 +1,6 @@ use crate::{ infectiousness_manager::InfectionStatus, + symptom_status_manager::SymptomStatus, population_loader::{Age, Person}, }; use ixa::{ExecutionPhase, HashMap, prelude::*}; @@ -17,6 +18,7 @@ define_report!(PersonPropertyIncidenceReport); struct PropertyReportDataContainer { infection_status_change: HashMap<(u8, InfectionStatus), u32>, + symptom_status_change: HashMap<(u8, SymptomStatus), u32> } define_data_plugin!( @@ -24,6 +26,7 @@ define_data_plugin!( PropertyReportDataContainer, PropertyReportDataContainer { infection_status_change: HashMap::default(), + symptom_status_change: HashMap::default() } ); @@ -42,12 +45,31 @@ fn update_infection_incidence( } } +fn update_symptom_incidence( + context: &mut Context, + event: PropertyChangeEvent, +) { + if event.current != SymptomStatus::NoSymptoms { + let age: Age = context.get_property(event.entity_id); + let report_container_mut = context.get_data_mut(PropertyReportDataPlugin); + report_container_mut + .symptom_status_change + .entry((age.0, event.current)) + .and_modify(|v| *v += 1) + .or_insert(1); + } +} + fn reset_incidence_map(context: &mut Context) { let report_container = context.get_data_mut(PropertyReportDataPlugin); report_container .infection_status_change .values_mut() .for_each(|v| *v = 0); + report_container + .symptom_status_change + .values_mut() + .for_each(|v| *v = 0); } fn send_incidence_counts(context: &mut Context) { @@ -63,6 +85,15 @@ fn send_incidence_counts(context: &mut Context) { count: *count, }); } + // Symptom status + for ((age, symptom_status), count) in &report_container.symptom_status_change { + context.send_report(PersonPropertyIncidenceReport { + t_upper, + age: *age, + event: format!("{symptom_status:?}"), + count: *count, + }); + } reset_incidence_map(context); } @@ -100,12 +131,24 @@ pub fn init(context: &mut Context, file_name: &str, period: f64) -> Result<(), I .infection_status_change .insert((age, inf_value), 0); } + + let symp_vec = [SymptomStatus::Mild, SymptomStatus::Severe, SymptomStatus::Critical, SymptomStatus::Dead, SymptomStatus::Resolved]; + + for symp_value in symp_vec { + report_container + .symptom_status_change + .insert((age, symp_value), 0); + } } context.subscribe_to_event::>(|context, event| { update_infection_incidence(context, event); }); + context.subscribe_to_event::>(|context, event| { + update_symptom_incidence(context, event); + }); + context.add_periodic_plan_with_phase( period, move |context: &mut Context| { diff --git a/src/reports/prevalence_report.rs b/src/reports/prevalence_report.rs index 4d31504..8f839c3 100644 --- a/src/reports/prevalence_report.rs +++ b/src/reports/prevalence_report.rs @@ -1,5 +1,6 @@ use crate::{ infectiousness_manager::InfectionStatus, + symptom_status_manager::SymptomStatus, population_loader::{Age, Alive, Person}, }; use ixa::prelude::*; @@ -11,13 +12,14 @@ use serde::{Deserialize, Serialize}; struct PersonPropertyReport { t: f64, age: u8, - infection_status: InfectionStatus, + status: String, count: usize, } define_report!(PersonPropertyReport); define_multi_property!((Age, InfectionStatus), Person); +define_multi_property!((Age, SymptomStatus), Person); // #[derive(Eq, Hash, PartialEq, Serialize, Deserialize, Copy, Clone, Debug)] // pub struct PersonReportProperties { @@ -28,30 +30,50 @@ define_multi_property!((Age, InfectionStatus), Person); // impl_derived_property!(PersonReportProperties, Person, ((Age, InfectionStatus))); struct PropertyReportDataContainer { - report_map_container: HashMap<(Age, InfectionStatus), usize>, + infection_map_container: HashMap<(Age, InfectionStatus), usize>, + symptom_map_container: HashMap<(Age, SymptomStatus), usize> } define_data_plugin!( PropertyReportDataPlugin, PropertyReportDataContainer, PropertyReportDataContainer { - report_map_container: HashMap::default(), + infection_map_container: HashMap::default(), + symptom_map_container: HashMap::default() } ); -type ReportEvent = PropertyChangeEvent; +type InfectionReportEvent = PropertyChangeEvent; -fn update_property_change_counts(context: &mut Context, event: ReportEvent) { +fn update_infection_change_counts(context: &mut Context, event: InfectionReportEvent) { let report_container_mut = context.get_data_mut(PropertyReportDataPlugin); let _ = *report_container_mut - .report_map_container + .infection_map_container .entry(event.current) .and_modify(|n| *n += 1) .or_insert(1); let _ = *report_container_mut - .report_map_container + .infection_map_container + .entry(event.previous) + .and_modify(|n| *n -= 1) + .or_insert(0); +} + +type SymptomReportEvent = PropertyChangeEvent; + +fn update_symptom_change_counts(context: &mut Context, event: SymptomReportEvent) { + let report_container_mut = context.get_data_mut(PropertyReportDataPlugin); + + let _ = *report_container_mut + .symptom_map_container + .entry(event.current) + .and_modify(|n| *n += 1) + .or_insert(1); + + let _ = *report_container_mut + .symptom_map_container .entry(event.previous) .and_modify(|n| *n -= 1) .or_insert(0); @@ -60,17 +82,26 @@ fn update_property_change_counts(context: &mut Context, event: ReportEvent) { fn send_property_counts(context: &mut Context) { let report_container = context.get_data(PropertyReportDataPlugin); - for (values, count_property) in &report_container.report_map_container { + for (values, count_property) in &report_container.infection_map_container { context.send_report(PersonPropertyReport { t: context.get_current_time(), age: values.0.0, - infection_status: values.1, + status: format!("{:?}", values.1), + count: *count_property, + }); + } + + for (values, count_property) in &report_container.symptom_map_container { + context.send_report(PersonPropertyReport { + t: context.get_current_time(), + age: values.0.0, + status: format!("{:?}", values.1), count: *count_property, }); } } -/// Count initial number of people per property status and subscribe to cahnges +/// Count initial number of people per property status and subscribe to changes /// # Errors /// /// Will return `IxaError` if the report cannot be added @@ -81,23 +112,38 @@ fn send_property_counts(context: &mut Context) { pub fn init(context: &mut Context, file_name: &str, period: f64) -> Result<(), IxaError> { context.add_report::(file_name)?; - let mut map_counts = HashMap::default(); + let mut infection_map_counts = HashMap::default(); + let mut symptom_map_counts = HashMap::default(); + + context.with_query_results::((Alive(true),), &mut |current_people| { //current_people = results.to_owned_vec(); for person in current_people { - let value: (Age, InfectionStatus) = context.get_property(*person); - map_counts - .entry(value) + let infection_value: (Age, InfectionStatus) = context.get_property(*person); + infection_map_counts + .entry(infection_value) + .and_modify(|count| *count += 1) + .or_insert(1); + let symptom_value: (Age, SymptomStatus) = context.get_property(*person); + symptom_map_counts + .entry(symptom_value) .and_modify(|count| *count += 1) .or_insert(1); } }); let report_container = context.get_data_mut(PropertyReportDataPlugin); - report_container.report_map_container = map_counts; + report_container.infection_map_container = infection_map_counts; + + let report_container = context.get_data_mut(PropertyReportDataPlugin); + report_container.symptom_map_container = symptom_map_counts; + + context.subscribe_to_event::(|context, event| { + update_infection_change_counts(context, event); + }); - context.subscribe_to_event::(|context, event| { - update_property_change_counts(context, event); + context.subscribe_to_event::(|context, event| { + update_symptom_change_counts(context, event); }); context.add_periodic_plan_with_phase( diff --git a/src/symptom_status_manager.rs b/src/symptom_status_manager.rs index e4f1acb..8ba781f 100644 --- a/src/symptom_status_manager.rs +++ b/src/symptom_status_manager.rs @@ -3,12 +3,12 @@ use rand_distr::{LogNormal}; use serde::{Deserialize, Serialize}; use crate::{ - infectiousness_manager::InfectionStatus, population_loader::Person + ContextParametersExt, Params, infectiousness_manager::InfectionStatus, population_loader::Person }; define_rng!(SymptomsRng); -#[derive(Serialize, Deserialize, PartialEq, Debug, Copy, Clone)] +#[derive(Serialize, Deserialize, PartialEq, Debug, Copy, Clone, Eq, Hash)] pub enum SymptomStatus { NoSymptoms, Mild, @@ -25,6 +25,28 @@ impl_property!( ); pub fn init(context: &mut Context) -> Result<(), IxaError> { + let &Params{ + probability_mild_given_infect, + infect_to_mild_mu, + infect_to_mild_sigma, + probability_severe_given_mild, + mild_to_severe_mu, + mild_to_severe_sigma, + mild_to_resolved_mu, + mild_to_resolved_sigma, + probability_critical_given_severe, + severe_to_critical_mu, + severe_to_critical_sigma, + severe_to_resolved_mu, + severe_to_resolved_sigma, + probability_dead_given_critical, + critical_to_dead_mu, + critical_to_dead_sigma, + critical_to_resolved_mu, + critical_to_resolved_sigma, + .. + } = context.get_params(); + context.subscribe_to_event( move|context, event: PropertyChangeEvent|{ if event.current == InfectionStatus::Infectious { @@ -34,56 +56,69 @@ pub fn init(context: &mut Context) -> Result<(), IxaError> { context.add_plan(mild_time, move|context|{ context.set_property(event.entity_id,SymptomStatus::Mild); }); + } + } + } + ); + + context.subscribe_to_event( + move|context, event: PropertyChangeEvent|{ + match event.current { + SymptomStatus::Mild => { if context.sample_bool(SymptomsRng, probability_severe_given_mild){ let mild_to_severe = LogNormal::new(mild_to_severe_mu, mild_to_severe_sigma).unwrap(); - let severe_time = mild_time + context.sample_distr(SymptomsRng, mild_to_severe); - context.add_plan(severe_time, |context|{ + let severe_time = context.get_current_time() + context.sample_distr(SymptomsRng, mild_to_severe); + context.add_plan(severe_time, move|context|{ context.set_property(event.entity_id, SymptomStatus::Severe); }); - if context.sample_bool(SymptomsRng, probability_critical_given_severe){ - let severe_to_critical = LogNormal::new(severe_to_critical_mu, severe_to_critical_sigma).unwrap(); - let critical_time = severe_time + context.sample_distr(SymptomsRng, severe_to_critical); - context.add_plan(critical_time, |context|{ - context.set_property(event.entity_id, SymptomStatus::Critical); - }); - if context.sample_bool(SymptomsRng, probability_dead_given_critical){ - let critical_to_dead = LogNormal::new(critical_to_dead_mu, critical_to_dead_sigma).unwrap(); - let dead_time = critical_time + context.sample_distr(SymptomsRng, critical_to_dead); - context.add_plan(dead_time, |context|{ - context.set_property(event.entity_id, SymptomStatus::Dead); - }); - } else { - let critical_to_resolved = LogNormal::new(critical_to_resolved_mu, critical_to_resolved_sigma).unwrap(); - let resolved_time = critical_time + context.sample_distr(SymptomsRng, critical_to_resolved); - context.add_plan(critical_time, |context|{ - context.set_property(event.entity_id, SymptomStatus::Resolved); - }); - } - } else { - let severe_to_resolved = LogNormal::new(severe_to_resolved_mu, severe_to_resolved_sigma).unwrap(); - let resolved_time = severe_time + context.sample_distr(SymptomsRng, severe_to_resolved); - context.add_plan(resolved_time, |context|{ - context.set_property(event.entity_id, SymptomStatus::Resolved); - }); - } } else { let mild_to_resolved = LogNormal::new(mild_to_resolved_mu, mild_to_resolved_sigma).unwrap(); - let resolved_time = mild_time + context.sample_distr(SymptomsRng, mild_to_resolved); - context.add_plan(resolved_time, |context|{ + let resolved_time = context.get_current_time() + context.sample_distr(SymptomsRng, mild_to_resolved); + context.add_plan(resolved_time, move|context|{ context.set_property(event.entity_id, SymptomStatus::Resolved); }); } - } + }, + SymptomStatus::Severe => { + if context.sample_bool(SymptomsRng, probability_critical_given_severe){ + let severe_to_critical = LogNormal::new(severe_to_critical_mu, severe_to_critical_sigma).unwrap(); + let critical_time = context.get_current_time() + context.sample_distr(SymptomsRng, severe_to_critical); + context.add_plan(critical_time, move|context|{ + context.set_property(event.entity_id, SymptomStatus::Critical); + }); + } else { + let severe_to_resolved = LogNormal::new(severe_to_resolved_mu, severe_to_resolved_sigma).unwrap(); + let resolved_time = context.get_current_time() + context.sample_distr(SymptomsRng, severe_to_resolved); + context.add_plan(resolved_time, move|context|{ + context.set_property(event.entity_id, SymptomStatus::Resolved); + }); + } + }, + SymptomStatus::Critical => { + if context.sample_bool(SymptomsRng, probability_dead_given_critical){ + let critical_to_dead = LogNormal::new(critical_to_dead_mu, critical_to_dead_sigma).unwrap(); + let dead_time = context.get_current_time() + context.sample_distr(SymptomsRng, critical_to_dead); + context.add_plan(dead_time, move|context|{ + context.set_property(event.entity_id, SymptomStatus::Dead); + }); + } else { + let critical_to_resolved = LogNormal::new(critical_to_resolved_mu, critical_to_resolved_sigma).unwrap(); + let resolved_time = context.get_current_time() + context.sample_distr(SymptomsRng, critical_to_resolved); + context.add_plan(resolved_time, move|context|{ + context.set_property(event.entity_id, SymptomStatus::Resolved); + }); + } + }, + _ => () } - } - ); + }); Ok(()) } // All persons should end up with a SymptomStatus of NoSymptoms, Dead, or Resolved. // TODO: -// -set up parameters (essential to make it run) -// -generate reports about symptom status (both incidence and prevalence are of interest, but could start with incidence) +// -set up parameters (essential to make it run) [DONE] // -add SymptomStatusData or some other mechanism to track what most symptom statuses a person had, even after resolved (and maybe track timing, too) // -make probabilities age (category) specific +// -finish validation for parameters \ No newline at end of file From dad1fc0703ceeb8289fc391a40206bf55afb5937 Mon Sep 17 00:00:00 2001 From: "Eric Q. Mooring" Date: Tue, 3 Mar 2026 19:26:33 +0000 Subject: [PATCH 03/28] modify prevalence report to show combos --- src/reports/prevalence_report.rs | 91 ++++++++------------------------ 1 file changed, 21 insertions(+), 70 deletions(-) diff --git a/src/reports/prevalence_report.rs b/src/reports/prevalence_report.rs index 8f839c3..d8b679c 100644 --- a/src/reports/prevalence_report.rs +++ b/src/reports/prevalence_report.rs @@ -12,68 +12,40 @@ use serde::{Deserialize, Serialize}; struct PersonPropertyReport { t: f64, age: u8, - status: String, + infection_status: InfectionStatus, + symptom_status: SymptomStatus, count: usize, } define_report!(PersonPropertyReport); -define_multi_property!((Age, InfectionStatus), Person); -define_multi_property!((Age, SymptomStatus), Person); - -// #[derive(Eq, Hash, PartialEq, Serialize, Deserialize, Copy, Clone, Debug)] -// pub struct PersonReportProperties { -// age: u8, -// infection_status: InfectionStatus, -// } - -// impl_derived_property!(PersonReportProperties, Person, ((Age, InfectionStatus))); +define_multi_property!((Age, InfectionStatus, SymptomStatus), Person); struct PropertyReportDataContainer { - infection_map_container: HashMap<(Age, InfectionStatus), usize>, - symptom_map_container: HashMap<(Age, SymptomStatus), usize> + report_map_container: HashMap<(Age, InfectionStatus, SymptomStatus), usize> } define_data_plugin!( PropertyReportDataPlugin, PropertyReportDataContainer, PropertyReportDataContainer { - infection_map_container: HashMap::default(), - symptom_map_container: HashMap::default() + report_map_container: HashMap::default() } ); -type InfectionReportEvent = PropertyChangeEvent; - -fn update_infection_change_counts(context: &mut Context, event: InfectionReportEvent) { - let report_container_mut = context.get_data_mut(PropertyReportDataPlugin); - - let _ = *report_container_mut - .infection_map_container - .entry(event.current) - .and_modify(|n| *n += 1) - .or_insert(1); - - let _ = *report_container_mut - .infection_map_container - .entry(event.previous) - .and_modify(|n| *n -= 1) - .or_insert(0); -} - -type SymptomReportEvent = PropertyChangeEvent; +type ReportEvent = PropertyChangeEvent; -fn update_symptom_change_counts(context: &mut Context, event: SymptomReportEvent) { +fn update_change_counts(context: &mut Context, event: ReportEvent) { let report_container_mut = context.get_data_mut(PropertyReportDataPlugin); let _ = *report_container_mut - .symptom_map_container + .report_map_container .entry(event.current) .and_modify(|n| *n += 1) .or_insert(1); let _ = *report_container_mut - .symptom_map_container + .report_map_container .entry(event.previous) .and_modify(|n| *n -= 1) .or_insert(0); @@ -82,20 +54,12 @@ fn update_symptom_change_counts(context: &mut Context, event: SymptomReportEvent fn send_property_counts(context: &mut Context) { let report_container = context.get_data(PropertyReportDataPlugin); - for (values, count_property) in &report_container.infection_map_container { + for (values, count_property) in &report_container.report_map_container { context.send_report(PersonPropertyReport { t: context.get_current_time(), age: values.0.0, - status: format!("{:?}", values.1), - count: *count_property, - }); - } - - for (values, count_property) in &report_container.symptom_map_container { - context.send_report(PersonPropertyReport { - t: context.get_current_time(), - age: values.0.0, - status: format!("{:?}", values.1), + infection_status: values.1, + symptom_status: values.2, count: *count_property, }); } @@ -112,38 +76,25 @@ fn send_property_counts(context: &mut Context) { pub fn init(context: &mut Context, file_name: &str, period: f64) -> Result<(), IxaError> { context.add_report::(file_name)?; - let mut infection_map_counts = HashMap::default(); - let mut symptom_map_counts = HashMap::default(); + let mut map_counts = HashMap::default(); context.with_query_results::((Alive(true),), &mut |current_people| { //current_people = results.to_owned_vec(); for person in current_people { - let infection_value: (Age, InfectionStatus) = context.get_property(*person); - infection_map_counts - .entry(infection_value) + let value: (Age, InfectionStatus, SymptomStatus) = context.get_property(*person); + map_counts + .entry(value) .and_modify(|count| *count += 1) .or_insert(1); - let symptom_value: (Age, SymptomStatus) = context.get_property(*person); - symptom_map_counts - .entry(symptom_value) - .and_modify(|count| *count += 1) - .or_insert(1); - } - }); - - let report_container = context.get_data_mut(PropertyReportDataPlugin); - report_container.infection_map_container = infection_map_counts; + } + }); let report_container = context.get_data_mut(PropertyReportDataPlugin); - report_container.symptom_map_container = symptom_map_counts; - - context.subscribe_to_event::(|context, event| { - update_infection_change_counts(context, event); - }); + report_container.report_map_container = map_counts; - context.subscribe_to_event::(|context, event| { - update_symptom_change_counts(context, event); + context.subscribe_to_event::(|context, event| { + update_change_counts(context, event); }); context.add_periodic_plan_with_phase( From 2312e54e9e89881e3cccbade0630e80be2966eed Mon Sep 17 00:00:00 2001 From: "Eric Q. Mooring" Date: Thu, 5 Mar 2026 09:28:06 +0000 Subject: [PATCH 04/28] revise tests for report generation --- src/reports/incidence_report.rs | 14 +++++---- src/reports/prevalence_report.rs | 49 +++++++++++++++++++------------- src/symptom_status_manager.rs | 4 +-- 3 files changed, 40 insertions(+), 27 deletions(-) diff --git a/src/reports/incidence_report.rs b/src/reports/incidence_report.rs index 084954c..789bdbc 100644 --- a/src/reports/incidence_report.rs +++ b/src/reports/incidence_report.rs @@ -231,6 +231,7 @@ mod test { std::mem::drop(context); let mut reader = csv::Reader::from_path(file_path).unwrap(); + let mut event_count = 0; let mut line_count = 0; for result in reader.deserialize() { let record: crate::reports::incidence_report::PersonPropertyIncidenceReport = @@ -238,15 +239,15 @@ mod test { line_count += 1; if record.t_upper == 2.0 && record.event == *"Infectious" && record.age == 43 { assert_eq!(record.count, 1); + event_count += 1; } else { assert_eq!(record.count, 0); } } - // 2 event types: Infectious + Recovered - // 2 time points - // 2 ages - assert_eq!(line_count, 8); + assert!(line_count > event_count); + assert_eq!(event_count, 1); + } #[test] @@ -294,12 +295,14 @@ mod test { let mut reader = csv::Reader::from_path(file_path).unwrap(); let mut line_count = 0; + let mut event_count = 0; for result in reader.deserialize() { let record: crate::reports::incidence_report::PersonPropertyIncidenceReport = result.unwrap(); line_count += 1; if record.t_upper == 2.0 && record.event == *"Infectious" && record.age == 44 { assert_eq!(record.count, 1); + event_count += 1; } else { assert_eq!(record.count, 0); } @@ -308,6 +311,7 @@ mod test { // 2 event types: Infectious + Recovered // 2 time points // 2 ages at first timepoint, 3 ages at second timepoint for only one event (2x2x2 + 1 = 9) - assert_eq!(line_count, 9); + assert!(line_count > event_count); + assert_eq!(event_count, 1); } } diff --git a/src/reports/prevalence_report.rs b/src/reports/prevalence_report.rs index d8b679c..ab3325d 100644 --- a/src/reports/prevalence_report.rs +++ b/src/reports/prevalence_report.rs @@ -111,7 +111,7 @@ pub fn init(context: &mut Context, file_name: &str, period: f64) -> Result<(), I mod test { use crate::{ Age, - infectiousness_manager::InfectionContextExt, + infectiousness_manager::{InfectionContextExt, InfectionStatus}, parameters::{ContextParametersExt, GlobalParams, Params}, population_loader::PersonId, rate_fns::load_rate_fns, @@ -179,24 +179,33 @@ mod test { assert!(file_path.exists()); let mut reader = csv::Reader::from_path(file_path).unwrap(); - - let mut actual: Vec> = reader - .records() - .map(|result| result.unwrap().iter().map(String::from).collect()) - .collect(); - let mut expected = vec![ - // t | age | inf status | count - vec!["0.0", "42", "Infectious", "1"], - vec!["0.0", "43", "Susceptible", "1"], - vec!["2.0", "42", "Infectious", "1"], - vec!["2.0", "43", "Infectious", "1"], - // Only an initialized combination can have a zero count - vec!["2.0", "43", "Susceptible", "0"], - ]; - - actual.sort(); - expected.sort(); - - assert_eq!(actual, expected, "CSV file should contain the correct data"); + let mut line_count = 0; + for result in reader.deserialize() { + let record:crate::reports::prevalence_report::PersonPropertyReport = result.unwrap(); + line_count += 1; + if record.t == 0.0 { + if record.age == 42 { + assert_eq!(record.infection_status, InfectionStatus::Infectious); + assert_eq!(record.count, 1); + } else if record.age == 43 { + assert_eq!(record.infection_status, InfectionStatus::Susceptible); + assert_eq!(record.count, 1); + } else { panic!("invalid age at t == 0.0") } + } + else if record.t == 2.0 { + if record.age == 42 { + assert_eq!(record.infection_status, InfectionStatus::Infectious); + assert_eq!(record.count, 1); + } else if record.age == 43 { + match record.infection_status { + InfectionStatus::Susceptible => assert_eq!(record.count, 0), + InfectionStatus::Infectious => assert_eq!(record.count, 1), + _ => panic!("All InfectionStatus should be susceptible or infectious") + } + } else { panic!("invalid age at t == 2.0") } + } else { panic!("record times other than 0.0 and 2.0 are invalid")} + } + + assert_eq!(line_count, 5); } } diff --git a/src/symptom_status_manager.rs b/src/symptom_status_manager.rs index 8ba781f..2ad105d 100644 --- a/src/symptom_status_manager.rs +++ b/src/symptom_status_manager.rs @@ -44,7 +44,7 @@ pub fn init(context: &mut Context) -> Result<(), IxaError> { critical_to_dead_sigma, critical_to_resolved_mu, critical_to_resolved_sigma, - .. + .. } = context.get_params(); context.subscribe_to_event( @@ -121,4 +121,4 @@ pub fn init(context: &mut Context) -> Result<(), IxaError> { // -set up parameters (essential to make it run) [DONE] // -add SymptomStatusData or some other mechanism to track what most symptom statuses a person had, even after resolved (and maybe track timing, too) // -make probabilities age (category) specific -// -finish validation for parameters \ No newline at end of file +// -finish validation for parameters From 9a5233a101c3131b34228a5b88a8cbb3f5ab6b4a Mon Sep 17 00:00:00 2001 From: "Eric Q. Mooring" Date: Thu, 5 Mar 2026 19:45:25 +0000 Subject: [PATCH 05/28] fix additional test and pre-commit issues --- input/input.json | 1 - src/lib.rs | 2 +- src/model.rs | 3 +- src/parameters.rs | 6 +- src/reports/incidence_report.rs | 13 ++- src/reports/mod.rs | 18 ++++ src/reports/prevalence_report.rs | 28 +++--- src/symptom_status_manager.rs | 148 +++++++++++++++++-------------- 8 files changed, 133 insertions(+), 86 deletions(-) diff --git a/input/input.json b/input/input.json index 7ce21eb..e80ec54 100644 --- a/input/input.json +++ b/input/input.json @@ -31,7 +31,6 @@ "critical_to_dead_sigma": 0.1, "critical_to_resolved_mu": 0.1, "critical_to_resolved_sigma": 0.1, - "settings_properties": {"Home": {"alpha": 0.0}, "Workplace": {"alpha": 0.0}, "School": {"alpha": 0.0}, diff --git a/src/lib.rs b/src/lib.rs index 4f7cb38..c293f5a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,7 +1,6 @@ pub mod infection_importation; pub mod infection_propagation_loop; pub mod infectiousness_manager; -pub mod symptom_status_manager; pub mod model; pub mod natural_history_parameter_manager; pub mod parameters; @@ -9,6 +8,7 @@ pub mod population_loader; pub mod rate_fns; pub mod reports; pub mod settings; +pub mod symptom_status_manager; pub use model::initialize_model; pub use parameters::ContextParametersExt; diff --git a/src/model.rs b/src/model.rs index 75c460a..7bf8d08 100644 --- a/src/model.rs +++ b/src/model.rs @@ -1,7 +1,8 @@ use ixa::{ExecutionPhase, prelude::*}; use crate::{ - infection_importation, infection_propagation_loop, population_loader, reports, settings, symptom_status_manager, + infection_importation, infection_propagation_loop, population_loader, reports, settings, + symptom_status_manager, }; pub fn initialize_model(context: &mut Context, seed: u64, max_time: f64) -> Result<(), IxaError> { diff --git a/src/parameters.rs b/src/parameters.rs index e33dc85..a408643 100644 --- a/src/parameters.rs +++ b/src/parameters.rs @@ -116,7 +116,8 @@ fn validate_inputs(parameters: &Params) -> Result<(), IxaError> { // Validate the symptom status parameters if !(0.0..=1.0).contains(¶meters.probability_mild_given_infect) { return Err(IxaError::IxaError( - "The probability of mild illness given infection must be between 0 and 1, inclusive.".to_string(), + "The probability of mild illness given infection must be between 0 and 1, inclusive." + .to_string(), )); } @@ -134,7 +135,8 @@ fn validate_inputs(parameters: &Params) -> Result<(), IxaError> { if !(0.0..=1.0).contains(¶meters.probability_dead_given_critical) { return Err(IxaError::IxaError( - "The probability of dying given critical illness must be between 0 and 1, inclusive.".to_string(), + "The probability of dying given critical illness must be between 0 and 1, inclusive." + .to_string(), )); } diff --git a/src/reports/incidence_report.rs b/src/reports/incidence_report.rs index 789bdbc..127d3db 100644 --- a/src/reports/incidence_report.rs +++ b/src/reports/incidence_report.rs @@ -1,7 +1,7 @@ use crate::{ infectiousness_manager::InfectionStatus, - symptom_status_manager::SymptomStatus, population_loader::{Age, Person}, + symptom_status_manager::SymptomStatus, }; use ixa::{ExecutionPhase, HashMap, prelude::*}; use serde::{Deserialize, Serialize}; @@ -18,7 +18,7 @@ define_report!(PersonPropertyIncidenceReport); struct PropertyReportDataContainer { infection_status_change: HashMap<(u8, InfectionStatus), u32>, - symptom_status_change: HashMap<(u8, SymptomStatus), u32> + symptom_status_change: HashMap<(u8, SymptomStatus), u32>, } define_data_plugin!( @@ -132,7 +132,13 @@ pub fn init(context: &mut Context, file_name: &str, period: f64) -> Result<(), I .insert((age, inf_value), 0); } - let symp_vec = [SymptomStatus::Mild, SymptomStatus::Severe, SymptomStatus::Critical, SymptomStatus::Dead, SymptomStatus::Resolved]; + let symp_vec = [ + SymptomStatus::Mild, + SymptomStatus::Severe, + SymptomStatus::Critical, + SymptomStatus::Dead, + SymptomStatus::Resolved, + ]; for symp_value in symp_vec { report_container @@ -247,7 +253,6 @@ mod test { assert!(line_count > event_count); assert_eq!(event_count, 1); - } #[test] diff --git a/src/reports/mod.rs b/src/reports/mod.rs index 2420431..7a8828c 100644 --- a/src/reports/mod.rs +++ b/src/reports/mod.rs @@ -125,6 +125,24 @@ mod test { "include": false }, "infectiousness_rate_fn": {"Constant": {"rate": 1.0, "duration": 5.0}}, + "probability_mild_given_infect": 0.7, + "infect_to_mild_mu": 0.1, + "infect_to_mild_sigma": 0.0, + "probability_severe_given_mild": 0.2, + "mild_to_severe_mu": 0.1, + "mild_to_severe_sigma": 0.1, + "mild_to_resolved_mu": 0.1, + "mild_to_resolved_sigma": 0.1, + "probability_critical_given_severe": 0.2, + "severe_to_critical_mu": 0.1, + "severe_to_critical_sigma": 0.1, + "severe_to_resolved_mu": 0.1, + "severe_to_resolved_sigma": 0.1, + "probability_dead_given_critical": 0.2, + "critical_to_dead_mu": 0.1, + "critical_to_dead_sigma": 0.1, + "critical_to_resolved_mu": 0.1, + "critical_to_resolved_sigma": 0.1, "settings_properties": {}, "itinerary_ratios": {}, "prevalence_report": { diff --git a/src/reports/prevalence_report.rs b/src/reports/prevalence_report.rs index ab3325d..46250ba 100644 --- a/src/reports/prevalence_report.rs +++ b/src/reports/prevalence_report.rs @@ -1,7 +1,7 @@ use crate::{ infectiousness_manager::InfectionStatus, - symptom_status_manager::SymptomStatus, population_loader::{Age, Alive, Person}, + symptom_status_manager::SymptomStatus, }; use ixa::prelude::*; use ixa::{ExecutionPhase, HashMap}; @@ -22,7 +22,7 @@ define_report!(PersonPropertyReport); define_multi_property!((Age, InfectionStatus, SymptomStatus), Person); struct PropertyReportDataContainer { - report_map_container: HashMap<(Age, InfectionStatus, SymptomStatus), usize> + report_map_container: HashMap<(Age, InfectionStatus, SymptomStatus), usize>, } define_data_plugin!( @@ -78,7 +78,6 @@ pub fn init(context: &mut Context, file_name: &str, period: f64) -> Result<(), I let mut map_counts = HashMap::default(); - context.with_query_results::((Alive(true),), &mut |current_people| { //current_people = results.to_owned_vec(); for person in current_people { @@ -87,8 +86,8 @@ pub fn init(context: &mut Context, file_name: &str, period: f64) -> Result<(), I .entry(value) .and_modify(|count| *count += 1) .or_insert(1); - } - }); + } + }); let report_container = context.get_data_mut(PropertyReportDataPlugin); report_container.report_map_container = map_counts; @@ -181,7 +180,7 @@ mod test { let mut reader = csv::Reader::from_path(file_path).unwrap(); let mut line_count = 0; for result in reader.deserialize() { - let record:crate::reports::prevalence_report::PersonPropertyReport = result.unwrap(); + let record: crate::reports::prevalence_report::PersonPropertyReport = result.unwrap(); line_count += 1; if record.t == 0.0 { if record.age == 42 { @@ -190,9 +189,10 @@ mod test { } else if record.age == 43 { assert_eq!(record.infection_status, InfectionStatus::Susceptible); assert_eq!(record.count, 1); - } else { panic!("invalid age at t == 0.0") } - } - else if record.t == 2.0 { + } else { + panic!("invalid age at t == 0.0") + } + } else if record.t == 2.0 { if record.age == 42 { assert_eq!(record.infection_status, InfectionStatus::Infectious); assert_eq!(record.count, 1); @@ -200,10 +200,14 @@ mod test { match record.infection_status { InfectionStatus::Susceptible => assert_eq!(record.count, 0), InfectionStatus::Infectious => assert_eq!(record.count, 1), - _ => panic!("All InfectionStatus should be susceptible or infectious") + _ => panic!("All InfectionStatus should be susceptible or infectious"), } - } else { panic!("invalid age at t == 2.0") } - } else { panic!("record times other than 0.0 and 2.0 are invalid")} + } else { + panic!("invalid age at t == 2.0") + } + } else { + panic!("record times other than 0.0 and 2.0 are invalid") + } } assert_eq!(line_count, 5); diff --git a/src/symptom_status_manager.rs b/src/symptom_status_manager.rs index 2ad105d..82ea2f8 100644 --- a/src/symptom_status_manager.rs +++ b/src/symptom_status_manager.rs @@ -1,9 +1,13 @@ -use ixa::{Context, ContextEntitiesExt, ContextRandomExt, IxaError, define_rng, impl_property, prelude::PropertyChangeEvent}; -use rand_distr::{LogNormal}; +use ixa::{ + Context, ContextEntitiesExt, ContextRandomExt, IxaError, define_rng, impl_property, + prelude::PropertyChangeEvent, +}; +use rand_distr::LogNormal; use serde::{Deserialize, Serialize}; use crate::{ - ContextParametersExt, Params, infectiousness_manager::InfectionStatus, population_loader::Person + ContextParametersExt, Params, infectiousness_manager::InfectionStatus, + population_loader::Person, }; define_rng!(SymptomsRng); @@ -15,7 +19,7 @@ pub enum SymptomStatus { Severe, Critical, Resolved, - Dead + Dead, } impl_property!( @@ -25,7 +29,7 @@ impl_property!( ); pub fn init(context: &mut Context) -> Result<(), IxaError> { - let &Params{ + let &Params { probability_mild_given_infect, infect_to_mild_mu, infect_to_mild_sigma, @@ -45,73 +49,87 @@ pub fn init(context: &mut Context) -> Result<(), IxaError> { critical_to_resolved_mu, critical_to_resolved_sigma, .. - } = context.get_params(); + } = context.get_params(); context.subscribe_to_event( - move|context, event: PropertyChangeEvent|{ - if event.current == InfectionStatus::Infectious { - if context.sample_bool(SymptomsRng, probability_mild_given_infect){ - let infect_to_mild = LogNormal::new(infect_to_mild_mu, infect_to_mild_sigma).unwrap(); - let mild_time = context.get_current_time() + context.sample_distr(SymptomsRng, infect_to_mild); - context.add_plan(mild_time, move|context|{ - context.set_property(event.entity_id,SymptomStatus::Mild); - }); - } + move |context, event: PropertyChangeEvent| { + if event.current == InfectionStatus::Infectious + && context.sample_bool(SymptomsRng, probability_mild_given_infect) + { + let infect_to_mild = + LogNormal::new(infect_to_mild_mu, infect_to_mild_sigma).unwrap(); + let mild_time = + context.get_current_time() + context.sample_distr(SymptomsRng, infect_to_mild); + context.add_plan(mild_time, move |context| { + context.set_property(event.entity_id, SymptomStatus::Mild); + }); } - } + }, ); context.subscribe_to_event( - move|context, event: PropertyChangeEvent|{ - match event.current { - SymptomStatus::Mild => { - if context.sample_bool(SymptomsRng, probability_severe_given_mild){ - let mild_to_severe = LogNormal::new(mild_to_severe_mu, mild_to_severe_sigma).unwrap(); - let severe_time = context.get_current_time() + context.sample_distr(SymptomsRng, mild_to_severe); - context.add_plan(severe_time, move|context|{ - context.set_property(event.entity_id, SymptomStatus::Severe); - }); - } else { - let mild_to_resolved = LogNormal::new(mild_to_resolved_mu, mild_to_resolved_sigma).unwrap(); - let resolved_time = context.get_current_time() + context.sample_distr(SymptomsRng, mild_to_resolved); - context.add_plan(resolved_time, move|context|{ - context.set_property(event.entity_id, SymptomStatus::Resolved); - }); - } - }, - SymptomStatus::Severe => { - if context.sample_bool(SymptomsRng, probability_critical_given_severe){ - let severe_to_critical = LogNormal::new(severe_to_critical_mu, severe_to_critical_sigma).unwrap(); - let critical_time = context.get_current_time() + context.sample_distr(SymptomsRng, severe_to_critical); - context.add_plan(critical_time, move|context|{ - context.set_property(event.entity_id, SymptomStatus::Critical); - }); - } else { - let severe_to_resolved = LogNormal::new(severe_to_resolved_mu, severe_to_resolved_sigma).unwrap(); - let resolved_time = context.get_current_time() + context.sample_distr(SymptomsRng, severe_to_resolved); - context.add_plan(resolved_time, move|context|{ - context.set_property(event.entity_id, SymptomStatus::Resolved); - }); - } - }, - SymptomStatus::Critical => { - if context.sample_bool(SymptomsRng, probability_dead_given_critical){ - let critical_to_dead = LogNormal::new(critical_to_dead_mu, critical_to_dead_sigma).unwrap(); - let dead_time = context.get_current_time() + context.sample_distr(SymptomsRng, critical_to_dead); - context.add_plan(dead_time, move|context|{ - context.set_property(event.entity_id, SymptomStatus::Dead); - }); - } else { - let critical_to_resolved = LogNormal::new(critical_to_resolved_mu, critical_to_resolved_sigma).unwrap(); - let resolved_time = context.get_current_time() + context.sample_distr(SymptomsRng, critical_to_resolved); - context.add_plan(resolved_time, move|context|{ - context.set_property(event.entity_id, SymptomStatus::Resolved); - }); - } - }, - _ => () + move |context, event: PropertyChangeEvent| match event.current { + SymptomStatus::Mild => { + if context.sample_bool(SymptomsRng, probability_severe_given_mild) { + let mild_to_severe = + LogNormal::new(mild_to_severe_mu, mild_to_severe_sigma).unwrap(); + let severe_time = context.get_current_time() + + context.sample_distr(SymptomsRng, mild_to_severe); + context.add_plan(severe_time, move |context| { + context.set_property(event.entity_id, SymptomStatus::Severe); + }); + } else { + let mild_to_resolved = + LogNormal::new(mild_to_resolved_mu, mild_to_resolved_sigma).unwrap(); + let resolved_time = context.get_current_time() + + context.sample_distr(SymptomsRng, mild_to_resolved); + context.add_plan(resolved_time, move |context| { + context.set_property(event.entity_id, SymptomStatus::Resolved); + }); + } } - }); + SymptomStatus::Severe => { + if context.sample_bool(SymptomsRng, probability_critical_given_severe) { + let severe_to_critical = + LogNormal::new(severe_to_critical_mu, severe_to_critical_sigma).unwrap(); + let critical_time = context.get_current_time() + + context.sample_distr(SymptomsRng, severe_to_critical); + context.add_plan(critical_time, move |context| { + context.set_property(event.entity_id, SymptomStatus::Critical); + }); + } else { + let severe_to_resolved = + LogNormal::new(severe_to_resolved_mu, severe_to_resolved_sigma).unwrap(); + let resolved_time = context.get_current_time() + + context.sample_distr(SymptomsRng, severe_to_resolved); + context.add_plan(resolved_time, move |context| { + context.set_property(event.entity_id, SymptomStatus::Resolved); + }); + } + } + SymptomStatus::Critical => { + if context.sample_bool(SymptomsRng, probability_dead_given_critical) { + let critical_to_dead = + LogNormal::new(critical_to_dead_mu, critical_to_dead_sigma).unwrap(); + let dead_time = context.get_current_time() + + context.sample_distr(SymptomsRng, critical_to_dead); + context.add_plan(dead_time, move |context| { + context.set_property(event.entity_id, SymptomStatus::Dead); + }); + } else { + let critical_to_resolved = + LogNormal::new(critical_to_resolved_mu, critical_to_resolved_sigma) + .unwrap(); + let resolved_time = context.get_current_time() + + context.sample_distr(SymptomsRng, critical_to_resolved); + context.add_plan(resolved_time, move |context| { + context.set_property(event.entity_id, SymptomStatus::Resolved); + }); + } + } + _ => (), + }, + ); Ok(()) } From 1b2dd4a80557bd7065494d054d3c84c398871306 Mon Sep 17 00:00:00 2001 From: KOVALW Date: Fri, 6 Mar 2026 18:51:07 +0000 Subject: [PATCH 06/28] working calibration routine --- experiments/phase1/input/default_params.json | 55 ++++++ experiments/phase1/input/priors.json | 32 ++++ packages/ixa-epi-utils/README.md | 3 - packages/ixa-epi-utils/pyproject.toml | 14 -- .../src/ixa_epi_utils/__init__.py | 7 - .../src/ixa_epi_utils/parameters.py | 36 ---- pyproject.toml | 13 +- scripts/phase_1_calibration.py | 72 ++++++++ src/ixa_epi_covid/__init__.py | 5 + src/ixa_epi_covid/covid_model.py | 88 ++++++++++ uv.lock | 160 ++++++++++-------- 11 files changed, 347 insertions(+), 138 deletions(-) create mode 100644 experiments/phase1/input/default_params.json create mode 100644 experiments/phase1/input/priors.json delete mode 100644 packages/ixa-epi-utils/README.md delete mode 100644 packages/ixa-epi-utils/pyproject.toml delete mode 100644 packages/ixa-epi-utils/src/ixa_epi_utils/__init__.py delete mode 100644 packages/ixa-epi-utils/src/ixa_epi_utils/parameters.py create mode 100644 scripts/phase_1_calibration.py create mode 100644 src/ixa_epi_covid/__init__.py create mode 100644 src/ixa_epi_covid/covid_model.py diff --git a/experiments/phase1/input/default_params.json b/experiments/phase1/input/default_params.json new file mode 100644 index 0000000..d996cb7 --- /dev/null +++ b/experiments/phase1/input/default_params.json @@ -0,0 +1,55 @@ +{ + "epimodel.GlobalParams": { + "seed": 1234, + "max_time": 100.0, + "synth_population_file": "input/people_test.csv", + "symptomatic_reporting_prob": 0.5, + "initial_prevalence": 0.0, + "imported_cases_timeseries": { + "include": true, + "filename": "./experiments/phase1/calibration/output/importation_timeseries.csv" + }, + "infectiousness_rate_fn": {"Constant": { + "rate": 1.0, + "duration": 5.0 + } + }, + "probability_mild_given_infect": 0.7, + "infect_to_mild_mu": 0.1, + "infect_to_mild_sigma": 0.0, + "probability_severe_given_mild": 0.2, + "mild_to_severe_mu": 0.1, + "mild_to_severe_sigma": 0.1, + "mild_to_resolved_mu": 0.1, + "mild_to_resolved_sigma": 0.1, + "probability_critical_given_severe": 0.2, + "severe_to_critical_mu": 0.1, + "severe_to_critical_sigma": 0.1, + "severe_to_resolved_mu": 0.1, + "severe_to_resolved_sigma": 0.1, + "probability_dead_given_critical": 0.2, + "critical_to_dead_mu": 0.1, + "critical_to_dead_sigma": 0.1, + "critical_to_resolved_mu": 0.1, + "critical_to_resolved_sigma": 0.1, + "settings_properties": {"Home": {"alpha": 0.0}, + "Workplace": {"alpha": 0.0}, + "School": {"alpha": 0.0}, + "CensusTract": {"alpha": 0.0}}, + "itinerary_ratios": {"Home": 0.25, "Workplace": 0.25, "School": 0.25, "CensusTract": 0.25}, + "prevalence_report": { + "write": true, + "filename": "person_property_count.csv", + "period": 1.0 + }, + "incidence_report": { + "write": true, + "filename": "incidence_report.csv", + "period": 1.0 + }, + "transmission_report": { + "write": false, + "filename": "transmission_report.csv" + } + } +} diff --git a/experiments/phase1/input/priors.json b/experiments/phase1/input/priors.json new file mode 100644 index 0000000..8b41919 --- /dev/null +++ b/experiments/phase1/input/priors.json @@ -0,0 +1,32 @@ +{ + "priors": { + "symptomatic_reporting_prob": { + "distribution": "beta", + "parameters": { + "alpha": 5, + "beta": 5 + } + }, + "settings_properties.Home.alpha": { + "distribution": "uniform", + "parameters": { + "min": 0.0, + "max": 1.0 + } + }, + "probability_mild_given_infect": { + "distribution": "beta", + "parameters": { + "alpha": 7, + "beta": 3 + } + }, + "infectiousness_rate_fn.Constant.rate": { + "distribution": "uniform", + "parameters": { + "min": 0.1, + "max": 2.0 + } + } + } +} diff --git a/packages/ixa-epi-utils/README.md b/packages/ixa-epi-utils/README.md deleted file mode 100644 index 57ff253..0000000 --- a/packages/ixa-epi-utils/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# ixa-epi-utils - -Utility functions for organizing inputs and outputs diff --git a/packages/ixa-epi-utils/pyproject.toml b/packages/ixa-epi-utils/pyproject.toml deleted file mode 100644 index 126c66d..0000000 --- a/packages/ixa-epi-utils/pyproject.toml +++ /dev/null @@ -1,14 +0,0 @@ -[project] -name = "ixa-epi-utils" -version = "0.1.0" -description = "Library of utility functions for handling inputs and outputs of the ixa-epi-covid model" -readme = "README.md" -requires-python = ">=3.12" -dependencies = [ - "numpy>=2.3.4", - "polars>=1.35.1", -] - -[build-system] -requires = ["uv_build>=0.9.21,<0.10.0"] -build-backend = "uv_build" diff --git a/packages/ixa-epi-utils/src/ixa_epi_utils/__init__.py b/packages/ixa-epi-utils/src/ixa_epi_utils/__init__.py deleted file mode 100644 index 8e68612..0000000 --- a/packages/ixa-epi-utils/src/ixa_epi_utils/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from .parameters import ( - sample_from_distributions, -) - -__all__ = [ - "sample_from_distributions", -] diff --git a/packages/ixa-epi-utils/src/ixa_epi_utils/parameters.py b/packages/ixa-epi-utils/src/ixa_epi_utils/parameters.py deleted file mode 100644 index 57acef2..0000000 --- a/packages/ixa-epi-utils/src/ixa_epi_utils/parameters.py +++ /dev/null @@ -1,36 +0,0 @@ -import numpy as np - - -def sample_from_distributions( - distributions: dict, n_samples: int = None, seed: int = None -) -> dict: - """ - Sample parameters from given distributions. - Eventually this should allow for correlated samples and other sampling techniques like sobol. - For now, only independent sampling from uniform, normal, and beta distributions is supported. - """ - - if seed is not None: - np.random.seed(seed) - - sampled_parameters = {} - for param, dist_info in distributions.items(): - dist_type = dist_info["type"] - match dist_type: - case "uniform": - sampled_parameters[param] = np.random.uniform( - low=dist_info["min"], high=dist_info["max"], size=n_samples - ) - case "normal": - sampled_parameters[param] = np.random.normal( - loc=dist_info["mean"], - scale=dist_info["std"], - size=n_samples, - ) - case "beta": - sampled_parameters[param] = np.random.beta( - a=dist_info["alpha"], b=dist_info["beta"], size=n_samples - ) - case _: - raise ValueError(f"Unknown distribution type: {dist_type}") - return sampled_parameters diff --git a/pyproject.toml b/pyproject.toml index 51e9642..2896ea9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [project] -name = "ixa-epi-covid" +name = "ixa_epi_covid" version = "0.1.0" description = "Epidemiological models for COVID-19 developed in ixa" readme = "README.md" @@ -15,14 +15,19 @@ dependencies = [ "scipy>=1.16.3", "seaborn>=0.13.2", "us>=3.2.0", + "cfa-mrp", + "calibrationtools" ] [build-system] -requires = ["uv-build>=0.8.13,<0.9.0"] -build-backend = "uv-build" +requires = ["uv_build>=0.8.13,<0.9.0"] +build-backend = "uv_build" + +[tool.uv.sources] +calibrationtools = { git = "https://github.com/CDCgov/cfa-calibration-tools.git", branch = "wtk-dev" } [tool.uv] -package = false +package = true [tool.uv.workspace] members = [ diff --git a/scripts/phase_1_calibration.py b/scripts/phase_1_calibration.py new file mode 100644 index 0000000..ddffe11 --- /dev/null +++ b/scripts/phase_1_calibration.py @@ -0,0 +1,72 @@ +from calibrationtools import ABCSampler, AdaptMultivariateNormalVariance, IndependentKernels, MultivariateNormalKernel, SeedKernel +from ixa_epi_covid import CovidModel +from pathlib import Path +import json +import polars as pl +import os +import shutil + + +with open(Path("experiments", "phase1", "input", "priors.json"), "r") as f: + priors = json.load(f) + +with open(Path("experiments", "phase1", "input", "default_params.json"), "r") as f: + default_params = json.load(f) + +mrp_defaults = { + "ixa_inputs": default_params, + "config_inputs": { + "exe_file": "./target/release/ixa-epi-covid", + "output_dir": "./experiments/phase1/calibration/output", + "force_overwrite": True + } +} + +output_dir = Path(mrp_defaults["config_inputs"]["output_dir"]) +if os.path.exists(output_dir) and mrp_defaults["config_inputs"]["force_overwrite"]: + shutil.rmtree(str(output_dir)) + +output_dir.mkdir(parents=True, exist_ok=False) + +P = priors +K = IndependentKernels( + [ + MultivariateNormalKernel( + [p for p in P["priors"].keys()], + ), + SeedKernel("seed"), + ] +) + +model = CovidModel() + +def outputs_to_distance(model_output: pl.DataFrame, target_data: int): + first_death_observed = ( + model_output + .filter( + (pl.col("event") == "Dead") & (pl.col("count") > 0) + ) + .filter(pl.col("t_upper") == pl.min("t_upper")) + ) + if first_death_observed.height > 0: + return abs(target_data - first_death_observed.item(0, "t_upper")) + else: + return 1000 + +sampler = ABCSampler( + generation_particle_count=500, + tolerance_values= [30.0, 20.0, 10.0, 5.0, 2.0, 0.0], + priors=P, + perturbation_kernel=K, + variance_adapter=AdaptMultivariateNormalVariance(), + outputs_to_distance=outputs_to_distance, + target_data=65, + model_runner=model, + seed=123, +) + +results = sampler.run( + default_params=mrp_defaults, + parameter_headers = ["ixa_inputs","epimodel.GlobalParams"] +) +print(results) \ No newline at end of file diff --git a/src/ixa_epi_covid/__init__.py b/src/ixa_epi_covid/__init__.py new file mode 100644 index 0000000..a8057cf --- /dev/null +++ b/src/ixa_epi_covid/__init__.py @@ -0,0 +1,5 @@ +from .covid_model import CovidModel + +__all__ = [ + "CovidModel" +] diff --git a/src/ixa_epi_covid/covid_model.py b/src/ixa_epi_covid/covid_model.py new file mode 100644 index 0000000..bc5a511 --- /dev/null +++ b/src/ixa_epi_covid/covid_model.py @@ -0,0 +1,88 @@ +from pathlib import Path +from typing import Any + +import subprocess +import polars as pl +import json +from mrp import MRPModel +from importation import ImportationModel, get_linelist_data + +class CovidModel(MRPModel): + def run(self): + pass + + @staticmethod + def simulate(model_inputs: dict[str, Any]) -> pl.DataFrame: + ixa_inputs = model_inputs["ixa_inputs"] + + ## Generate the importation time series from relevant ixa parameters -------------- + # Calculate the probability that an inidivdual will die given that they are symptomatic + case_fatality_ratio = ( + ixa_inputs["epimodel.GlobalParams"]["probability_severe_given_mild"] * + ixa_inputs["epimodel.GlobalParams"]["probability_critical_given_severe"] * + ixa_inputs["epimodel.GlobalParams"]["probability_dead_given_critical"] + ) + + proportion_asymptomatic = ixa_inputs["epimodel.GlobalParams"]["probability_mild_given_infect"] + symptomatic_reporting_prob = ixa_inputs["epimodel.GlobalParams"]["symptomatic_reporting_prob"] + + importation_params = { + "symptomatic_reporting_prob": symptomatic_reporting_prob, + "case_fatality_ratio": case_fatality_ratio, + "proportion_asymptomatic": proportion_asymptomatic, + } + + importation_filename = ixa_inputs["epimodel.GlobalParams"]["imported_cases_timeseries"]["filename"] + + # Create the model object + importation_model = ImportationModel( + data=get_linelist_data(), + parameters=importation_params, + national_model="multinomial", + state_model="proportional", + seed=ixa_inputs["epimodel.GlobalParams"]["seed"], # Optional argument to set the model seed + ) + + # Generate timeseries data from the model object for Indiana in 2020 + timeseries_data = importation_model.sample_state_importation_incidence( + state="Indiana", + year=2020 + ) + + # Store timeseries at appropriate location accessible to ixa + timeseries_data.write_csv(importation_filename) + + ## Run the ixa transmission model ------------------------ + # Write the ixa inputs to the specified file location + config_inputs = model_inputs["config_inputs"] + input_file_path = Path(config_inputs["output_dir"], "input.json") + ixa_inputs["epimodel.GlobalParams"]["seed"] = int(ixa_inputs["epimodel.GlobalParams"]["seed"]) + with open (input_file_path, "w") as f: + json.dump(ixa_inputs, f) + + # Write command to call the ixa model binaries + cmd = [ + config_inputs["exe_file"], + "--config", + input_file_path, + "--output", + config_inputs["output_dir"], + "-f", + "--no-stats", + ] + + try: + subprocess.run(cmd, capture_output=True, check=True) + except subprocess.CalledProcessError as e: + print("Error running the ixa model:") + print("Command:", " ".join(cmd)) + print("Return code:", e.returncode) + print("Standard error:", e.stderr) + raise e + + # Read the model incidence report from the specified location and return as a DataFrame + incidence_report_filename = ixa_inputs["epimodel.GlobalParams"]["incidence_report"]["filename"] + incidence_report_path = Path(config_inputs["output_dir"], incidence_report_filename) + + return pl.read_csv(incidence_report_path) + \ No newline at end of file diff --git a/uv.lock b/uv.lock index db18757..dfc6cec 100644 --- a/uv.lock +++ b/uv.lock @@ -14,7 +14,6 @@ resolution-markers = [ members = [ "importation", "ixa-epi-covid", - "ixa-epi-utils", ] [[package]] @@ -161,6 +160,18 @@ css = [ { name = "tinycss2" }, ] +[[package]] +name = "calibrationtools" +version = "0.1.1" +source = { git = "https://github.com/CDCgov/cfa-calibration-tools.git?branch=wtk-dev#95c00f9a19421af549e3fc9a7fdfb3e186f83713" } +dependencies = [ + { name = "cfa-mrp" }, + { name = "jsonschema" }, + { name = "numpy" }, + { name = "pyarrow" }, + { name = "scipy" }, +] + [[package]] name = "census" version = "0.8.25" @@ -182,6 +193,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, ] +[[package]] +name = "cfa-mrp" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/fb/3ac10fe3d102f7042af0d79c6573b792135d11e0737afb2d91969ce70dab/cfa_mrp-0.0.4.tar.gz", hash = "sha256:dd62717720d1b7f62c7f86917f4e1457adcbaf79ef5672e374ce8907ba8398ae", size = 118116, upload-time = "2026-03-04T15:31:01.032Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/64/d0a1ab7ef7e5b9eded57ea0e3b763c117568135a1c59e2c723e1c03889b4/cfa_mrp-0.0.4-py3-none-any.whl", hash = "sha256:e208abdb802ab239b9d5012aa389e2f4e1e113a76d3cf70c0976bbe3a61a84b4", size = 20206, upload-time = "2026-03-04T15:30:59.576Z" }, +] + [[package]] name = "cffi" version = "2.0.0" @@ -665,9 +688,11 @@ wheels = [ [[package]] name = "ixa-epi-covid" version = "0.1.0" -source = { virtual = "." } +source = { editable = "." } dependencies = [ + { name = "calibrationtools" }, { name = "census" }, + { name = "cfa-mrp" }, { name = "dotenv" }, { name = "jupyter" }, { name = "matplotlib" }, @@ -681,7 +706,9 @@ dependencies = [ [package.metadata] requires-dist = [ + { name = "calibrationtools", git = "https://github.com/CDCgov/cfa-calibration-tools.git?branch=wtk-dev" }, { name = "census", specifier = ">=0.8.25" }, + { name = "cfa-mrp" }, { name = "dotenv", specifier = ">=0.9.9" }, { name = "jupyter", specifier = ">=1.1.1" }, { name = "matplotlib", specifier = ">=3.10.8" }, @@ -693,21 +720,6 @@ requires-dist = [ { name = "us", specifier = ">=3.2.0" }, ] -[[package]] -name = "ixa-epi-utils" -version = "0.1.0" -source = { editable = "packages/ixa-epi-utils" } -dependencies = [ - { name = "numpy" }, - { name = "polars" }, -] - -[package.metadata] -requires-dist = [ - { name = "numpy", specifier = ">=2.3.4" }, - { name = "polars", specifier = ">=1.35.1" }, -] - [[package]] name = "jedi" version = "0.19.2" @@ -1339,63 +1351,63 @@ wheels = [ [[package]] name = "numpy" -version = "2.4.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/62/ae72ff66c0f1fd959925b4c11f8c2dea61f47f6acaea75a08512cdfe3fed/numpy-2.4.1.tar.gz", hash = "sha256:a1ceafc5042451a858231588a104093474c6a5c57dcc724841f5c888d237d690", size = 20721320, upload-time = "2026-01-10T06:44:59.619Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/7f/ec53e32bf10c813604edf07a3682616bd931d026fcde7b6d13195dfb684a/numpy-2.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d3703409aac693fa82c0aee023a1ae06a6e9d065dba10f5e8e80f642f1e9d0a2", size = 16656888, upload-time = "2026-01-10T06:42:40.913Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e0/1f9585d7dae8f14864e948fd7fa86c6cb72dee2676ca2748e63b1c5acfe0/numpy-2.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7211b95ca365519d3596a1d8688a95874cc94219d417504d9ecb2df99fa7bfa8", size = 12373956, upload-time = "2026-01-10T06:42:43.091Z" }, - { url = "https://files.pythonhosted.org/packages/8e/43/9762e88909ff2326f5e7536fa8cb3c49fb03a7d92705f23e6e7f553d9cb3/numpy-2.4.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5adf01965456a664fc727ed69cc71848f28d063217c63e1a0e200a118d5eec9a", size = 5202567, upload-time = "2026-01-10T06:42:45.107Z" }, - { url = "https://files.pythonhosted.org/packages/4b/ee/34b7930eb61e79feb4478800a4b95b46566969d837546aa7c034c742ef98/numpy-2.4.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:26f0bcd9c79a00e339565b303badc74d3ea2bd6d52191eeca5f95936cad107d0", size = 6549459, upload-time = "2026-01-10T06:42:48.152Z" }, - { url = "https://files.pythonhosted.org/packages/79/e3/5f115fae982565771be994867c89bcd8d7208dbfe9469185497d70de5ddf/numpy-2.4.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0093e85df2960d7e4049664b26afc58b03236e967fb942354deef3208857a04c", size = 14404859, upload-time = "2026-01-10T06:42:49.947Z" }, - { url = "https://files.pythonhosted.org/packages/d9/7d/9c8a781c88933725445a859cac5d01b5871588a15969ee6aeb618ba99eee/numpy-2.4.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ad270f438cbdd402c364980317fb6b117d9ec5e226fff5b4148dd9aa9fc6e02", size = 16371419, upload-time = "2026-01-10T06:42:52.409Z" }, - { url = "https://files.pythonhosted.org/packages/a6/d2/8aa084818554543f17cf4162c42f162acbd3bb42688aefdba6628a859f77/numpy-2.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:297c72b1b98100c2e8f873d5d35fb551fce7040ade83d67dd51d38c8d42a2162", size = 16182131, upload-time = "2026-01-10T06:42:54.694Z" }, - { url = "https://files.pythonhosted.org/packages/60/db/0425216684297c58a8df35f3284ef56ec4a043e6d283f8a59c53562caf1b/numpy-2.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf6470d91d34bf669f61d515499859fa7a4c2f7c36434afb70e82df7217933f9", size = 18295342, upload-time = "2026-01-10T06:42:56.991Z" }, - { url = "https://files.pythonhosted.org/packages/31/4c/14cb9d86240bd8c386c881bafbe43f001284b7cce3bc01623ac9475da163/numpy-2.4.1-cp312-cp312-win32.whl", hash = "sha256:b6bcf39112e956594b3331316d90c90c90fb961e39696bda97b89462f5f3943f", size = 5959015, upload-time = "2026-01-10T06:42:59.631Z" }, - { url = "https://files.pythonhosted.org/packages/51/cf/52a703dbeb0c65807540d29699fef5fda073434ff61846a564d5c296420f/numpy-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:e1a27bb1b2dee45a2a53f5ca6ff2d1a7f135287883a1689e930d44d1ff296c87", size = 12310730, upload-time = "2026-01-10T06:43:01.627Z" }, - { url = "https://files.pythonhosted.org/packages/69/80/a828b2d0ade5e74a9fe0f4e0a17c30fdc26232ad2bc8c9f8b3197cf7cf18/numpy-2.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:0e6e8f9d9ecf95399982019c01223dc130542960a12edfa8edd1122dfa66a8a8", size = 10312166, upload-time = "2026-01-10T06:43:03.673Z" }, - { url = "https://files.pythonhosted.org/packages/04/68/732d4b7811c00775f3bd522a21e8dd5a23f77eb11acdeb663e4a4ebf0ef4/numpy-2.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d797454e37570cfd61143b73b8debd623c3c0952959adb817dd310a483d58a1b", size = 16652495, upload-time = "2026-01-10T06:43:06.283Z" }, - { url = "https://files.pythonhosted.org/packages/20/ca/857722353421a27f1465652b2c66813eeeccea9d76d5f7b74b99f298e60e/numpy-2.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82c55962006156aeef1629b953fd359064aa47e4d82cfc8e67f0918f7da3344f", size = 12368657, upload-time = "2026-01-10T06:43:09.094Z" }, - { url = "https://files.pythonhosted.org/packages/81/0d/2377c917513449cc6240031a79d30eb9a163d32a91e79e0da47c43f2c0c8/numpy-2.4.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:71abbea030f2cfc3092a0ff9f8c8fdefdc5e0bf7d9d9c99663538bb0ecdac0b9", size = 5197256, upload-time = "2026-01-10T06:43:13.634Z" }, - { url = "https://files.pythonhosted.org/packages/17/39/569452228de3f5de9064ac75137082c6214be1f5c532016549a7923ab4b5/numpy-2.4.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5b55aa56165b17aaf15520beb9cbd33c9039810e0d9643dd4379e44294c7303e", size = 6545212, upload-time = "2026-01-10T06:43:15.661Z" }, - { url = "https://files.pythonhosted.org/packages/8c/a4/77333f4d1e4dac4395385482557aeecf4826e6ff517e32ca48e1dafbe42a/numpy-2.4.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0faba4a331195bfa96f93dd9dfaa10b2c7aa8cda3a02b7fd635e588fe821bf5", size = 14402871, upload-time = "2026-01-10T06:43:17.324Z" }, - { url = "https://files.pythonhosted.org/packages/ba/87/d341e519956273b39d8d47969dd1eaa1af740615394fe67d06f1efa68773/numpy-2.4.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e3087f53e2b4428766b54932644d148613c5a595150533ae7f00dab2f319a8", size = 16359305, upload-time = "2026-01-10T06:43:19.376Z" }, - { url = "https://files.pythonhosted.org/packages/32/91/789132c6666288eaa20ae8066bb99eba1939362e8f1a534949a215246e97/numpy-2.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:49e792ec351315e16da54b543db06ca8a86985ab682602d90c60ef4ff4db2a9c", size = 16181909, upload-time = "2026-01-10T06:43:21.808Z" }, - { url = "https://files.pythonhosted.org/packages/cf/b8/090b8bd27b82a844bb22ff8fdf7935cb1980b48d6e439ae116f53cdc2143/numpy-2.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79e9e06c4c2379db47f3f6fc7a8652e7498251789bf8ff5bd43bf478ef314ca2", size = 18284380, upload-time = "2026-01-10T06:43:23.957Z" }, - { url = "https://files.pythonhosted.org/packages/67/78/722b62bd31842ff029412271556a1a27a98f45359dea78b1548a3a9996aa/numpy-2.4.1-cp313-cp313-win32.whl", hash = "sha256:3d1a100e48cb266090a031397863ff8a30050ceefd798f686ff92c67a486753d", size = 5957089, upload-time = "2026-01-10T06:43:27.535Z" }, - { url = "https://files.pythonhosted.org/packages/da/a6/cf32198b0b6e18d4fbfa9a21a992a7fca535b9bb2b0cdd217d4a3445b5ca/numpy-2.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:92a0e65272fd60bfa0d9278e0484c2f52fe03b97aedc02b357f33fe752c52ffb", size = 12307230, upload-time = "2026-01-10T06:43:29.298Z" }, - { url = "https://files.pythonhosted.org/packages/44/6c/534d692bfb7d0afe30611320c5fb713659dcb5104d7cc182aff2aea092f5/numpy-2.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:20d4649c773f66cc2fc36f663e091f57c3b7655f936a4c681b4250855d1da8f5", size = 10313125, upload-time = "2026-01-10T06:43:31.782Z" }, - { url = "https://files.pythonhosted.org/packages/da/a1/354583ac5c4caa566de6ddfbc42744409b515039e085fab6e0ff942e0df5/numpy-2.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f93bc6892fe7b0663e5ffa83b61aab510aacffd58c16e012bb9352d489d90cb7", size = 12496156, upload-time = "2026-01-10T06:43:34.237Z" }, - { url = "https://files.pythonhosted.org/packages/51/b0/42807c6e8cce58c00127b1dc24d365305189991f2a7917aa694a109c8d7d/numpy-2.4.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:178de8f87948163d98a4c9ab5bee4ce6519ca918926ec8df195af582de28544d", size = 5324663, upload-time = "2026-01-10T06:43:36.211Z" }, - { url = "https://files.pythonhosted.org/packages/fe/55/7a621694010d92375ed82f312b2f28017694ed784775269115323e37f5e2/numpy-2.4.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:98b35775e03ab7f868908b524fc0a84d38932d8daf7b7e1c3c3a1b6c7a2c9f15", size = 6645224, upload-time = "2026-01-10T06:43:37.884Z" }, - { url = "https://files.pythonhosted.org/packages/50/96/9fa8635ed9d7c847d87e30c834f7109fac5e88549d79ef3324ab5c20919f/numpy-2.4.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:941c2a93313d030f219f3a71fd3d91a728b82979a5e8034eb2e60d394a2b83f9", size = 14462352, upload-time = "2026-01-10T06:43:39.479Z" }, - { url = "https://files.pythonhosted.org/packages/03/d1/8cf62d8bb2062da4fb82dd5d49e47c923f9c0738032f054e0a75342faba7/numpy-2.4.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:529050522e983e00a6c1c6b67411083630de8b57f65e853d7b03d9281b8694d2", size = 16407279, upload-time = "2026-01-10T06:43:41.93Z" }, - { url = "https://files.pythonhosted.org/packages/86/1c/95c86e17c6b0b31ce6ef219da00f71113b220bcb14938c8d9a05cee0ff53/numpy-2.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2302dc0224c1cbc49bb94f7064f3f923a971bfae45c33870dcbff63a2a550505", size = 16248316, upload-time = "2026-01-10T06:43:44.121Z" }, - { url = "https://files.pythonhosted.org/packages/30/b4/e7f5ff8697274c9d0fa82398b6a372a27e5cef069b37df6355ccb1f1db1a/numpy-2.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9171a42fcad32dcf3fa86f0a4faa5e9f8facefdb276f54b8b390d90447cff4e2", size = 18329884, upload-time = "2026-01-10T06:43:46.613Z" }, - { url = "https://files.pythonhosted.org/packages/37/a4/b073f3e9d77f9aec8debe8ca7f9f6a09e888ad1ba7488f0c3b36a94c03ac/numpy-2.4.1-cp313-cp313t-win32.whl", hash = "sha256:382ad67d99ef49024f11d1ce5dcb5ad8432446e4246a4b014418ba3a1175a1f4", size = 6081138, upload-time = "2026-01-10T06:43:48.854Z" }, - { url = "https://files.pythonhosted.org/packages/16/16/af42337b53844e67752a092481ab869c0523bc95c4e5c98e4dac4e9581ac/numpy-2.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:62fea415f83ad8fdb6c20840578e5fbaf5ddd65e0ec6c3c47eda0f69da172510", size = 12447478, upload-time = "2026-01-10T06:43:50.476Z" }, - { url = "https://files.pythonhosted.org/packages/6c/f8/fa85b2eac68ec631d0b631abc448552cb17d39afd17ec53dcbcc3537681a/numpy-2.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:a7870e8c5fc11aef57d6fea4b4085e537a3a60ad2cdd14322ed531fdca68d261", size = 10382981, upload-time = "2026-01-10T06:43:52.575Z" }, - { url = "https://files.pythonhosted.org/packages/1b/a7/ef08d25698e0e4b4efbad8d55251d20fe2a15f6d9aa7c9b30cd03c165e6f/numpy-2.4.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:3869ea1ee1a1edc16c29bbe3a2f2a4e515cc3a44d43903ad41e0cacdbaf733dc", size = 16652046, upload-time = "2026-01-10T06:43:54.797Z" }, - { url = "https://files.pythonhosted.org/packages/8f/39/e378b3e3ca13477e5ac70293ec027c438d1927f18637e396fe90b1addd72/numpy-2.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e867df947d427cdd7a60e3e271729090b0f0df80f5f10ab7dd436f40811699c3", size = 12378858, upload-time = "2026-01-10T06:43:57.099Z" }, - { url = "https://files.pythonhosted.org/packages/c3/74/7ec6154f0006910ed1fdbb7591cf4432307033102b8a22041599935f8969/numpy-2.4.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:e3bd2cb07841166420d2fa7146c96ce00cb3410664cbc1a6be028e456c4ee220", size = 5207417, upload-time = "2026-01-10T06:43:59.037Z" }, - { url = "https://files.pythonhosted.org/packages/f7/b7/053ac11820d84e42f8feea5cb81cc4fcd1091499b45b1ed8c7415b1bf831/numpy-2.4.1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:f0a90aba7d521e6954670550e561a4cb925713bd944445dbe9e729b71f6cabee", size = 6542643, upload-time = "2026-01-10T06:44:01.852Z" }, - { url = "https://files.pythonhosted.org/packages/c0/c4/2e7908915c0e32ca636b92e4e4a3bdec4cb1e7eb0f8aedf1ed3c68a0d8cd/numpy-2.4.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d558123217a83b2d1ba316b986e9248a1ed1971ad495963d555ccd75dcb1556", size = 14418963, upload-time = "2026-01-10T06:44:04.047Z" }, - { url = "https://files.pythonhosted.org/packages/eb/c0/3ed5083d94e7ffd7c404e54619c088e11f2e1939a9544f5397f4adb1b8ba/numpy-2.4.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f44de05659b67d20499cbc96d49f2650769afcb398b79b324bb6e297bfe3844", size = 16363811, upload-time = "2026-01-10T06:44:06.207Z" }, - { url = "https://files.pythonhosted.org/packages/0e/68/42b66f1852bf525050a67315a4fb94586ab7e9eaa541b1bef530fab0c5dd/numpy-2.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:69e7419c9012c4aaf695109564e3387f1259f001b4326dfa55907b098af082d3", size = 16197643, upload-time = "2026-01-10T06:44:08.33Z" }, - { url = "https://files.pythonhosted.org/packages/d2/40/e8714fc933d85f82c6bfc7b998a0649ad9769a32f3494ba86598aaf18a48/numpy-2.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2ffd257026eb1b34352e749d7cc1678b5eeec3e329ad8c9965a797e08ccba205", size = 18289601, upload-time = "2026-01-10T06:44:10.841Z" }, - { url = "https://files.pythonhosted.org/packages/80/9a/0d44b468cad50315127e884802351723daca7cf1c98d102929468c81d439/numpy-2.4.1-cp314-cp314-win32.whl", hash = "sha256:727c6c3275ddefa0dc078524a85e064c057b4f4e71ca5ca29a19163c607be745", size = 6005722, upload-time = "2026-01-10T06:44:13.332Z" }, - { url = "https://files.pythonhosted.org/packages/7e/bb/c6513edcce5a831810e2dddc0d3452ce84d208af92405a0c2e58fd8e7881/numpy-2.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:7d5d7999df434a038d75a748275cd6c0094b0ecdb0837342b332a82defc4dc4d", size = 12438590, upload-time = "2026-01-10T06:44:15.006Z" }, - { url = "https://files.pythonhosted.org/packages/e9/da/a598d5cb260780cf4d255102deba35c1d072dc028c4547832f45dd3323a8/numpy-2.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:ce9ce141a505053b3c7bce3216071f3bf5c182b8b28930f14cd24d43932cd2df", size = 10596180, upload-time = "2026-01-10T06:44:17.386Z" }, - { url = "https://files.pythonhosted.org/packages/de/bc/ea3f2c96fcb382311827231f911723aeff596364eb6e1b6d1d91128aa29b/numpy-2.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:4e53170557d37ae404bf8d542ca5b7c629d6efa1117dac6a83e394142ea0a43f", size = 12498774, upload-time = "2026-01-10T06:44:19.467Z" }, - { url = "https://files.pythonhosted.org/packages/aa/ab/ef9d939fe4a812648c7a712610b2ca6140b0853c5efea361301006c02ae5/numpy-2.4.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:a73044b752f5d34d4232f25f18160a1cc418ea4507f5f11e299d8ac36875f8a0", size = 5327274, upload-time = "2026-01-10T06:44:23.189Z" }, - { url = "https://files.pythonhosted.org/packages/bd/31/d381368e2a95c3b08b8cf7faac6004849e960f4a042d920337f71cef0cae/numpy-2.4.1-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:fb1461c99de4d040666ca0444057b06541e5642f800b71c56e6ea92d6a853a0c", size = 6648306, upload-time = "2026-01-10T06:44:25.012Z" }, - { url = "https://files.pythonhosted.org/packages/c8/e5/0989b44ade47430be6323d05c23207636d67d7362a1796ccbccac6773dd2/numpy-2.4.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:423797bdab2eeefbe608d7c1ec7b2b4fd3c58d51460f1ee26c7500a1d9c9ee93", size = 14464653, upload-time = "2026-01-10T06:44:26.706Z" }, - { url = "https://files.pythonhosted.org/packages/10/a7/cfbe475c35371cae1358e61f20c5f075badc18c4797ab4354140e1d283cf/numpy-2.4.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:52b5f61bdb323b566b528899cc7db2ba5d1015bda7ea811a8bcf3c89c331fa42", size = 16405144, upload-time = "2026-01-10T06:44:29.378Z" }, - { url = "https://files.pythonhosted.org/packages/f8/a3/0c63fe66b534888fa5177cc7cef061541064dbe2b4b60dcc60ffaf0d2157/numpy-2.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42d7dd5fa36d16d52a84f821eb96031836fd405ee6955dd732f2023724d0aa01", size = 16247425, upload-time = "2026-01-10T06:44:31.721Z" }, - { url = "https://files.pythonhosted.org/packages/6b/2b/55d980cfa2c93bd40ff4c290bf824d792bd41d2fe3487b07707559071760/numpy-2.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e7b6b5e28bbd47b7532698e5db2fe1db693d84b58c254e4389d99a27bb9b8f6b", size = 18330053, upload-time = "2026-01-10T06:44:34.617Z" }, - { url = "https://files.pythonhosted.org/packages/23/12/8b5fc6b9c487a09a7957188e0943c9ff08432c65e34567cabc1623b03a51/numpy-2.4.1-cp314-cp314t-win32.whl", hash = "sha256:5de60946f14ebe15e713a6f22850c2372fa72f4ff9a432ab44aa90edcadaa65a", size = 6152482, upload-time = "2026-01-10T06:44:36.798Z" }, - { url = "https://files.pythonhosted.org/packages/00/a5/9f8ca5856b8940492fc24fbe13c1bc34d65ddf4079097cf9e53164d094e1/numpy-2.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:8f085da926c0d491ffff3096f91078cc97ea67e7e6b65e490bc8dcda65663be2", size = 12627117, upload-time = "2026-01-10T06:44:38.828Z" }, - { url = "https://files.pythonhosted.org/packages/ad/0d/eca3d962f9eef265f01a8e0d20085c6dd1f443cbffc11b6dede81fd82356/numpy-2.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:6436cffb4f2bf26c974344439439c95e152c9a527013f26b3577be6c2ca64295", size = 10667121, upload-time = "2026-01-10T06:44:41.644Z" }, +version = "2.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/fd/0005efbd0af48e55eb3c7208af93f2862d4b1a56cd78e84309a2d959208d/numpy-2.4.2.tar.gz", hash = "sha256:659a6107e31a83c4e33f763942275fd278b21d095094044eb35569e86a21ddae", size = 20723651, upload-time = "2026-01-31T23:13:10.135Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/6e/6f394c9c77668153e14d4da83bcc247beb5952f6ead7699a1a2992613bea/numpy-2.4.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:21982668592194c609de53ba4933a7471880ccbaadcc52352694a59ecc860b3a", size = 16667963, upload-time = "2026-01-31T23:10:52.147Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f8/55483431f2b2fd015ae6ed4fe62288823ce908437ed49db5a03d15151678/numpy-2.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40397bda92382fcec844066efb11f13e1c9a3e2a8e8f318fb72ed8b6db9f60f1", size = 14693571, upload-time = "2026-01-31T23:10:54.789Z" }, + { url = "https://files.pythonhosted.org/packages/2f/20/18026832b1845cdc82248208dd929ca14c9d8f2bac391f67440707fff27c/numpy-2.4.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:b3a24467af63c67829bfaa61eecf18d5432d4f11992688537be59ecd6ad32f5e", size = 5203469, upload-time = "2026-01-31T23:10:57.343Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/2eb97c8a77daaba34eaa3fa7241a14ac5f51c46a6bd5911361b644c4a1e2/numpy-2.4.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:805cc8de9fd6e7a22da5aed858e0ab16be5a4db6c873dde1d7451c541553aa27", size = 6550820, upload-time = "2026-01-31T23:10:59.429Z" }, + { url = "https://files.pythonhosted.org/packages/b1/91/b97fdfd12dc75b02c44e26c6638241cc004d4079a0321a69c62f51470c4c/numpy-2.4.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d82351358ffbcdcd7b686b90742a9b86632d6c1c051016484fa0b326a0a1548", size = 15663067, upload-time = "2026-01-31T23:11:01.291Z" }, + { url = "https://files.pythonhosted.org/packages/f5/c6/a18e59f3f0b8071cc85cbc8d80cd02d68aa9710170b2553a117203d46936/numpy-2.4.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e35d3e0144137d9fdae62912e869136164534d64a169f86438bc9561b6ad49f", size = 16619782, upload-time = "2026-01-31T23:11:03.669Z" }, + { url = "https://files.pythonhosted.org/packages/b7/83/9751502164601a79e18847309f5ceec0b1446d7b6aa12305759b72cf98b2/numpy-2.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adb6ed2ad29b9e15321d167d152ee909ec73395901b70936f029c3bc6d7f4460", size = 17013128, upload-time = "2026-01-31T23:11:05.913Z" }, + { url = "https://files.pythonhosted.org/packages/61/c4/c4066322256ec740acc1c8923a10047818691d2f8aec254798f3dd90f5f2/numpy-2.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8906e71fd8afcb76580404e2a950caef2685df3d2a57fe82a86ac8d33cc007ba", size = 18345324, upload-time = "2026-01-31T23:11:08.248Z" }, + { url = "https://files.pythonhosted.org/packages/ab/af/6157aa6da728fa4525a755bfad486ae7e3f76d4c1864138003eb84328497/numpy-2.4.2-cp312-cp312-win32.whl", hash = "sha256:ec055f6dae239a6299cace477b479cca2fc125c5675482daf1dd886933a1076f", size = 5960282, upload-time = "2026-01-31T23:11:10.497Z" }, + { url = "https://files.pythonhosted.org/packages/92/0f/7ceaaeaacb40567071e94dbf2c9480c0ae453d5bb4f52bea3892c39dc83c/numpy-2.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:209fae046e62d0ce6435fcfe3b1a10537e858249b3d9b05829e2a05218296a85", size = 12314210, upload-time = "2026-01-31T23:11:12.176Z" }, + { url = "https://files.pythonhosted.org/packages/2f/a3/56c5c604fae6dd40fa2ed3040d005fca97e91bd320d232ac9931d77ba13c/numpy-2.4.2-cp312-cp312-win_arm64.whl", hash = "sha256:fbde1b0c6e81d56f5dccd95dd4a711d9b95df1ae4009a60887e56b27e8d903fa", size = 10220171, upload-time = "2026-01-31T23:11:14.684Z" }, + { url = "https://files.pythonhosted.org/packages/a1/22/815b9fe25d1d7ae7d492152adbc7226d3eff731dffc38fe970589fcaaa38/numpy-2.4.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:25f2059807faea4b077a2b6837391b5d830864b3543627f381821c646f31a63c", size = 16663696, upload-time = "2026-01-31T23:11:17.516Z" }, + { url = "https://files.pythonhosted.org/packages/09/f0/817d03a03f93ba9c6c8993de509277d84e69f9453601915e4a69554102a1/numpy-2.4.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bd3a7a9f5847d2fb8c2c6d1c862fa109c31a9abeca1a3c2bd5a64572955b2979", size = 14688322, upload-time = "2026-01-31T23:11:19.883Z" }, + { url = "https://files.pythonhosted.org/packages/da/b4/f805ab79293c728b9a99438775ce51885fd4f31b76178767cfc718701a39/numpy-2.4.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:8e4549f8a3c6d13d55041925e912bfd834285ef1dd64d6bc7d542583355e2e98", size = 5198157, upload-time = "2026-01-31T23:11:22.375Z" }, + { url = "https://files.pythonhosted.org/packages/74/09/826e4289844eccdcd64aac27d13b0fd3f32039915dd5b9ba01baae1f436c/numpy-2.4.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:aea4f66ff44dfddf8c2cffd66ba6538c5ec67d389285292fe428cb2c738c8aef", size = 6546330, upload-time = "2026-01-31T23:11:23.958Z" }, + { url = "https://files.pythonhosted.org/packages/19/fb/cbfdbfa3057a10aea5422c558ac57538e6acc87ec1669e666d32ac198da7/numpy-2.4.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3cd545784805de05aafe1dde61752ea49a359ccba9760c1e5d1c88a93bbf2b7", size = 15660968, upload-time = "2026-01-31T23:11:25.713Z" }, + { url = "https://files.pythonhosted.org/packages/04/dc/46066ce18d01645541f0186877377b9371b8fa8017fa8262002b4ef22612/numpy-2.4.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0d9b7c93578baafcbc5f0b83eaf17b79d345c6f36917ba0c67f45226911d499", size = 16607311, upload-time = "2026-01-31T23:11:28.117Z" }, + { url = "https://files.pythonhosted.org/packages/14/d9/4b5adfc39a43fa6bf918c6d544bc60c05236cc2f6339847fc5b35e6cb5b0/numpy-2.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f74f0f7779cc7ae07d1810aab8ac6b1464c3eafb9e283a40da7309d5e6e48fbb", size = 17012850, upload-time = "2026-01-31T23:11:30.888Z" }, + { url = "https://files.pythonhosted.org/packages/b7/20/adb6e6adde6d0130046e6fdfb7675cc62bc2f6b7b02239a09eb58435753d/numpy-2.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c7ac672d699bf36275c035e16b65539931347d68b70667d28984c9fb34e07fa7", size = 18334210, upload-time = "2026-01-31T23:11:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/78/0e/0a73b3dff26803a8c02baa76398015ea2a5434d9b8265a7898a6028c1591/numpy-2.4.2-cp313-cp313-win32.whl", hash = "sha256:8e9afaeb0beff068b4d9cd20d322ba0ee1cecfb0b08db145e4ab4dd44a6b5110", size = 5958199, upload-time = "2026-01-31T23:11:35.385Z" }, + { url = "https://files.pythonhosted.org/packages/43/bc/6352f343522fcb2c04dbaf94cb30cca6fd32c1a750c06ad6231b4293708c/numpy-2.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:7df2de1e4fba69a51c06c28f5a3de36731eb9639feb8e1cf7e4a7b0daf4cf622", size = 12310848, upload-time = "2026-01-31T23:11:38.001Z" }, + { url = "https://files.pythonhosted.org/packages/6e/8d/6da186483e308da5da1cc6918ce913dcfe14ffde98e710bfeff2a6158d4e/numpy-2.4.2-cp313-cp313-win_arm64.whl", hash = "sha256:0fece1d1f0a89c16b03442eae5c56dc0be0c7883b5d388e0c03f53019a4bfd71", size = 10221082, upload-time = "2026-01-31T23:11:40.392Z" }, + { url = "https://files.pythonhosted.org/packages/25/a1/9510aa43555b44781968935c7548a8926274f815de42ad3997e9e83680dd/numpy-2.4.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5633c0da313330fd20c484c78cdd3f9b175b55e1a766c4a174230c6b70ad8262", size = 14815866, upload-time = "2026-01-31T23:11:42.495Z" }, + { url = "https://files.pythonhosted.org/packages/36/30/6bbb5e76631a5ae46e7923dd16ca9d3f1c93cfa8d4ed79a129814a9d8db3/numpy-2.4.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d9f64d786b3b1dd742c946c42d15b07497ed14af1a1f3ce840cce27daa0ce913", size = 5325631, upload-time = "2026-01-31T23:11:44.7Z" }, + { url = "https://files.pythonhosted.org/packages/46/00/3a490938800c1923b567b3a15cd17896e68052e2145d8662aaf3e1ffc58f/numpy-2.4.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:b21041e8cb6a1eb5312dd1d2f80a94d91efffb7a06b70597d44f1bd2dfc315ab", size = 6646254, upload-time = "2026-01-31T23:11:46.341Z" }, + { url = "https://files.pythonhosted.org/packages/d3/e9/fac0890149898a9b609caa5af7455a948b544746e4b8fe7c212c8edd71f8/numpy-2.4.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:00ab83c56211a1d7c07c25e3217ea6695e50a3e2f255053686b081dc0b091a82", size = 15720138, upload-time = "2026-01-31T23:11:48.082Z" }, + { url = "https://files.pythonhosted.org/packages/ea/5c/08887c54e68e1e28df53709f1893ce92932cc6f01f7c3d4dc952f61ffd4e/numpy-2.4.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fb882da679409066b4603579619341c6d6898fc83a8995199d5249f986e8e8f", size = 16655398, upload-time = "2026-01-31T23:11:50.293Z" }, + { url = "https://files.pythonhosted.org/packages/4d/89/253db0fa0e66e9129c745e4ef25631dc37d5f1314dad2b53e907b8538e6d/numpy-2.4.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:66cb9422236317f9d44b67b4d18f44efe6e9c7f8794ac0462978513359461554", size = 17079064, upload-time = "2026-01-31T23:11:52.927Z" }, + { url = "https://files.pythonhosted.org/packages/2a/d5/cbade46ce97c59c6c3da525e8d95b7abe8a42974a1dc5c1d489c10433e88/numpy-2.4.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0f01dcf33e73d80bd8dc0f20a71303abbafa26a19e23f6b68d1aa9990af90257", size = 18379680, upload-time = "2026-01-31T23:11:55.22Z" }, + { url = "https://files.pythonhosted.org/packages/40/62/48f99ae172a4b63d981babe683685030e8a3df4f246c893ea5c6ef99f018/numpy-2.4.2-cp313-cp313t-win32.whl", hash = "sha256:52b913ec40ff7ae845687b0b34d8d93b60cb66dcee06996dd5c99f2fc9328657", size = 6082433, upload-time = "2026-01-31T23:11:58.096Z" }, + { url = "https://files.pythonhosted.org/packages/07/38/e054a61cfe48ad9f1ed0d188e78b7e26859d0b60ef21cd9de4897cdb5326/numpy-2.4.2-cp313-cp313t-win_amd64.whl", hash = "sha256:5eea80d908b2c1f91486eb95b3fb6fab187e569ec9752ab7d9333d2e66bf2d6b", size = 12451181, upload-time = "2026-01-31T23:11:59.782Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a4/a05c3a6418575e185dd84d0b9680b6bb2e2dc3e4202f036b7b4e22d6e9dc/numpy-2.4.2-cp313-cp313t-win_arm64.whl", hash = "sha256:fd49860271d52127d61197bb50b64f58454e9f578cb4b2c001a6de8b1f50b0b1", size = 10290756, upload-time = "2026-01-31T23:12:02.438Z" }, + { url = "https://files.pythonhosted.org/packages/18/88/b7df6050bf18fdcfb7046286c6535cabbdd2064a3440fca3f069d319c16e/numpy-2.4.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:444be170853f1f9d528428eceb55f12918e4fda5d8805480f36a002f1415e09b", size = 16663092, upload-time = "2026-01-31T23:12:04.521Z" }, + { url = "https://files.pythonhosted.org/packages/25/7a/1fee4329abc705a469a4afe6e69b1ef7e915117747886327104a8493a955/numpy-2.4.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d1240d50adff70c2a88217698ca844723068533f3f5c5fa6ee2e3220e3bdb000", size = 14698770, upload-time = "2026-01-31T23:12:06.96Z" }, + { url = "https://files.pythonhosted.org/packages/fb/0b/f9e49ba6c923678ad5bc38181c08ac5e53b7a5754dbca8e581aa1a56b1ff/numpy-2.4.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:7cdde6de52fb6664b00b056341265441192d1291c130e99183ec0d4b110ff8b1", size = 5208562, upload-time = "2026-01-31T23:12:09.632Z" }, + { url = "https://files.pythonhosted.org/packages/7d/12/d7de8f6f53f9bb76997e5e4c069eda2051e3fe134e9181671c4391677bb2/numpy-2.4.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:cda077c2e5b780200b6b3e09d0b42205a3d1c68f30c6dceb90401c13bff8fe74", size = 6543710, upload-time = "2026-01-31T23:12:11.969Z" }, + { url = "https://files.pythonhosted.org/packages/09/63/c66418c2e0268a31a4cf8a8b512685748200f8e8e8ec6c507ce14e773529/numpy-2.4.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d30291931c915b2ab5717c2974bb95ee891a1cf22ebc16a8006bd59cd210d40a", size = 15677205, upload-time = "2026-01-31T23:12:14.33Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6c/7f237821c9642fb2a04d2f1e88b4295677144ca93285fd76eff3bcba858d/numpy-2.4.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bba37bc29d4d85761deed3954a1bc62be7cf462b9510b51d367b769a8c8df325", size = 16611738, upload-time = "2026-01-31T23:12:16.525Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a7/39c4cdda9f019b609b5c473899d87abff092fc908cfe4d1ecb2fcff453b0/numpy-2.4.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b2f0073ed0868db1dcd86e052d37279eef185b9c8db5bf61f30f46adac63c909", size = 17028888, upload-time = "2026-01-31T23:12:19.306Z" }, + { url = "https://files.pythonhosted.org/packages/da/b3/e84bb64bdfea967cc10950d71090ec2d84b49bc691df0025dddb7c26e8e3/numpy-2.4.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7f54844851cdb630ceb623dcec4db3240d1ac13d4990532446761baede94996a", size = 18339556, upload-time = "2026-01-31T23:12:21.816Z" }, + { url = "https://files.pythonhosted.org/packages/88/f5/954a291bc1192a27081706862ac62bb5920fbecfbaa302f64682aa90beed/numpy-2.4.2-cp314-cp314-win32.whl", hash = "sha256:12e26134a0331d8dbd9351620f037ec470b7c75929cb8a1537f6bfe411152a1a", size = 6006899, upload-time = "2026-01-31T23:12:24.14Z" }, + { url = "https://files.pythonhosted.org/packages/05/cb/eff72a91b2efdd1bc98b3b8759f6a1654aa87612fc86e3d87d6fe4f948c4/numpy-2.4.2-cp314-cp314-win_amd64.whl", hash = "sha256:068cdb2d0d644cdb45670810894f6a0600797a69c05f1ac478e8d31670b8ee75", size = 12443072, upload-time = "2026-01-31T23:12:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/37/75/62726948db36a56428fce4ba80a115716dc4fad6a3a4352487f8bb950966/numpy-2.4.2-cp314-cp314-win_arm64.whl", hash = "sha256:6ed0be1ee58eef41231a5c943d7d1375f093142702d5723ca2eb07db9b934b05", size = 10494886, upload-time = "2026-01-31T23:12:28.488Z" }, + { url = "https://files.pythonhosted.org/packages/36/2f/ee93744f1e0661dc267e4b21940870cabfae187c092e1433b77b09b50ac4/numpy-2.4.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:98f16a80e917003a12c0580f97b5f875853ebc33e2eaa4bccfc8201ac6869308", size = 14818567, upload-time = "2026-01-31T23:12:30.709Z" }, + { url = "https://files.pythonhosted.org/packages/a7/24/6535212add7d76ff938d8bdc654f53f88d35cddedf807a599e180dcb8e66/numpy-2.4.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:20abd069b9cda45874498b245c8015b18ace6de8546bf50dfa8cea1696ed06ef", size = 5328372, upload-time = "2026-01-31T23:12:32.962Z" }, + { url = "https://files.pythonhosted.org/packages/5e/9d/c48f0a035725f925634bf6b8994253b43f2047f6778a54147d7e213bc5a7/numpy-2.4.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:e98c97502435b53741540a5717a6749ac2ada901056c7db951d33e11c885cc7d", size = 6649306, upload-time = "2026-01-31T23:12:34.797Z" }, + { url = "https://files.pythonhosted.org/packages/81/05/7c73a9574cd4a53a25907bad38b59ac83919c0ddc8234ec157f344d57d9a/numpy-2.4.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:da6cad4e82cb893db4b69105c604d805e0c3ce11501a55b5e9f9083b47d2ffe8", size = 15722394, upload-time = "2026-01-31T23:12:36.565Z" }, + { url = "https://files.pythonhosted.org/packages/35/fa/4de10089f21fc7d18442c4a767ab156b25c2a6eaf187c0db6d9ecdaeb43f/numpy-2.4.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e4424677ce4b47fe73c8b5556d876571f7c6945d264201180db2dc34f676ab5", size = 16653343, upload-time = "2026-01-31T23:12:39.188Z" }, + { url = "https://files.pythonhosted.org/packages/b8/f9/d33e4ffc857f3763a57aa85650f2e82486832d7492280ac21ba9efda80da/numpy-2.4.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2b8f157c8a6f20eb657e240f8985cc135598b2b46985c5bccbde7616dc9c6b1e", size = 17078045, upload-time = "2026-01-31T23:12:42.041Z" }, + { url = "https://files.pythonhosted.org/packages/c8/b8/54bdb43b6225badbea6389fa038c4ef868c44f5890f95dd530a218706da3/numpy-2.4.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5daf6f3914a733336dab21a05cdec343144600e964d2fcdabaac0c0269874b2a", size = 18380024, upload-time = "2026-01-31T23:12:44.331Z" }, + { url = "https://files.pythonhosted.org/packages/a5/55/6e1a61ded7af8df04016d81b5b02daa59f2ea9252ee0397cb9f631efe9e5/numpy-2.4.2-cp314-cp314t-win32.whl", hash = "sha256:8c50dd1fc8826f5b26a5ee4d77ca55d88a895f4e4819c7ecc2a9f5905047a443", size = 6153937, upload-time = "2026-01-31T23:12:47.229Z" }, + { url = "https://files.pythonhosted.org/packages/45/aa/fa6118d1ed6d776b0983f3ceac9b1a5558e80df9365b1c3aa6d42bf9eee4/numpy-2.4.2-cp314-cp314t-win_amd64.whl", hash = "sha256:fcf92bee92742edd401ba41135185866f7026c502617f422eb432cfeca4fe236", size = 12631844, upload-time = "2026-01-31T23:12:48.997Z" }, + { url = "https://files.pythonhosted.org/packages/32/0a/2ec5deea6dcd158f254a7b372fb09cfba5719419c8d66343bab35237b3fb/numpy-2.4.2-cp314-cp314t-win_arm64.whl", hash = "sha256:1f92f53998a17265194018d1cc321b2e96e900ca52d54c7c77837b71b9465181", size = 10565379, upload-time = "2026-01-31T23:12:51.345Z" }, ] [[package]] From f1bbf68eafc50e69bd2b714ba39801b05dbd0099 Mon Sep 17 00:00:00 2001 From: KOVALW Date: Fri, 6 Mar 2026 20:01:05 +0000 Subject: [PATCH 07/28] repair zero probability nan occurence importation --- .../src/importation/perkins_et_al_methods.py | 13 ++++--- .../importation/tests/test_perkins_methods.py | 35 +++++++++++++++++++ src/ixa_epi_covid/covid_model.py | 2 +- 3 files changed, 44 insertions(+), 6 deletions(-) diff --git a/packages/importation/src/importation/perkins_et_al_methods.py b/packages/importation/src/importation/perkins_et_al_methods.py index 159995a..8f436d5 100644 --- a/packages/importation/src/importation/perkins_et_al_methods.py +++ b/packages/importation/src/importation/perkins_et_al_methods.py @@ -197,17 +197,20 @@ def prob_undetected_infections( "n_undetected_infections": n_undetected, "weight": pmf_prob, } - ).with_columns( - (pl.col("weight") / pl.col("weight").sum()).alias("probability") ) + if prob_data.select(pl.sum('weight').eq(0)).item(): + return prob_data.with_columns( + pl.lit(1.0 / prob_data.height).alias("probability") + ) + else: + return prob_data.with_columns( + (pl.col("weight") / pl.col("weight").sum()).alias("probability") + ) else: raise ValueError( "Calculating the probability of observing n undetected infections given known cases and deaths requires one parameter set in prop_ascf." ) - return prob_data - - def sample_undetected_infections( known_cases: int, known_deaths: int, diff --git a/packages/importation/tests/test_perkins_methods.py b/packages/importation/tests/test_perkins_methods.py index e9c2af9..73d8797 100644 --- a/packages/importation/tests/test_perkins_methods.py +++ b/packages/importation/tests/test_perkins_methods.py @@ -159,6 +159,25 @@ def test_prob_undetected_infections_list(dummy_values): for col in ["n_undetected_infections", "probability"] ) +def test_prob_undetected_infections_zero_probability_value(dummy_values): + prop_ascf = get_prop_ascf(importation_parameters=dummy_values) + known_cases = 500 + known_deaths = 2 + n_undetected = 1 + max_infections = 502 + seed = 42 + + prob_data = prob_undetected_infections( + n_undetected, known_cases, known_deaths, prop_ascf + ) + + assert prob_data.shape[0] == 1 + assert all( + col in prob_data.columns + for col in ["n_undetected_infections", "probability"] + ) + assert prob_data.item(0, "probability") == 1.0 + assert prob_data.item(0, "weight") == 0.0 def test_sample_undetected_infections(dummy_values): prop_ascf = get_prop_ascf(importation_parameters=dummy_values) @@ -176,6 +195,22 @@ def test_sample_undetected_infections(dummy_values): col in sampled_data.columns for col in ["n_undetected_infections"] ) +def test_sample_undetected_infections_zero_handling(dummy_values): + prop_ascf = get_prop_ascf(importation_parameters=dummy_values) + known_cases = 500 + known_deaths = 2 + max_infections = 502 + seed = 42 + + sampled_data = sample_undetected_infections( + known_cases, known_deaths, prop_ascf, max_infections, seed + ) + + assert sampled_data.shape[0] == 1 + assert all( + col in sampled_data.columns for col in ["n_undetected_infections"] + ) + @pytest.fixture def mock_sample_undetected_infections(dummy_values): diff --git a/src/ixa_epi_covid/covid_model.py b/src/ixa_epi_covid/covid_model.py index bc5a511..207dc79 100644 --- a/src/ixa_epi_covid/covid_model.py +++ b/src/ixa_epi_covid/covid_model.py @@ -58,7 +58,7 @@ def simulate(model_inputs: dict[str, Any]) -> pl.DataFrame: input_file_path = Path(config_inputs["output_dir"], "input.json") ixa_inputs["epimodel.GlobalParams"]["seed"] = int(ixa_inputs["epimodel.GlobalParams"]["seed"]) with open (input_file_path, "w") as f: - json.dump(ixa_inputs, f) + json.dump(ixa_inputs, f, indent=4) # Write command to call the ixa model binaries cmd = [ From 2d0a7c669ae3632118871791ea047f7203c9c490 Mon Sep 17 00:00:00 2001 From: KOVALW Date: Fri, 6 Mar 2026 20:02:13 +0000 Subject: [PATCH 08/28] precommit --- .../src/importation/perkins_et_al_methods.py | 11 ++-- .../importation/tests/test_perkins_methods.py | 5 +- scripts/phase_1_calibration.py | 48 ++++++++++------- src/ixa_epi_covid/__init__.py | 4 +- src/ixa_epi_covid/covid_model.py | 53 +++++++++++++------ 5 files changed, 76 insertions(+), 45 deletions(-) diff --git a/packages/importation/src/importation/perkins_et_al_methods.py b/packages/importation/src/importation/perkins_et_al_methods.py index 8f436d5..6377836 100644 --- a/packages/importation/src/importation/perkins_et_al_methods.py +++ b/packages/importation/src/importation/perkins_et_al_methods.py @@ -198,19 +198,22 @@ def prob_undetected_infections( "weight": pmf_prob, } ) - if prob_data.select(pl.sum('weight').eq(0)).item(): + if prob_data.select(pl.sum("weight").eq(0)).item(): return prob_data.with_columns( pl.lit(1.0 / prob_data.height).alias("probability") - ) + ) else: - return prob_data.with_columns( - (pl.col("weight") / pl.col("weight").sum()).alias("probability") + return prob_data.with_columns( + (pl.col("weight") / pl.col("weight").sum()).alias( + "probability" + ) ) else: raise ValueError( "Calculating the probability of observing n undetected infections given known cases and deaths requires one parameter set in prop_ascf." ) + def sample_undetected_infections( known_cases: int, known_deaths: int, diff --git a/packages/importation/tests/test_perkins_methods.py b/packages/importation/tests/test_perkins_methods.py index 73d8797..379df1a 100644 --- a/packages/importation/tests/test_perkins_methods.py +++ b/packages/importation/tests/test_perkins_methods.py @@ -159,13 +159,12 @@ def test_prob_undetected_infections_list(dummy_values): for col in ["n_undetected_infections", "probability"] ) + def test_prob_undetected_infections_zero_probability_value(dummy_values): prop_ascf = get_prop_ascf(importation_parameters=dummy_values) known_cases = 500 known_deaths = 2 n_undetected = 1 - max_infections = 502 - seed = 42 prob_data = prob_undetected_infections( n_undetected, known_cases, known_deaths, prop_ascf @@ -179,6 +178,7 @@ def test_prob_undetected_infections_zero_probability_value(dummy_values): assert prob_data.item(0, "probability") == 1.0 assert prob_data.item(0, "weight") == 0.0 + def test_sample_undetected_infections(dummy_values): prop_ascf = get_prop_ascf(importation_parameters=dummy_values) known_cases = 5 @@ -195,6 +195,7 @@ def test_sample_undetected_infections(dummy_values): col in sampled_data.columns for col in ["n_undetected_infections"] ) + def test_sample_undetected_infections_zero_handling(dummy_values): prop_ascf = get_prop_ascf(importation_parameters=dummy_values) known_cases = 500 diff --git a/scripts/phase_1_calibration.py b/scripts/phase_1_calibration.py index ddffe11..4436a7b 100644 --- a/scripts/phase_1_calibration.py +++ b/scripts/phase_1_calibration.py @@ -1,16 +1,25 @@ -from calibrationtools import ABCSampler, AdaptMultivariateNormalVariance, IndependentKernels, MultivariateNormalKernel, SeedKernel -from ixa_epi_covid import CovidModel -from pathlib import Path import json -import polars as pl import os import shutil +from pathlib import Path + +import polars as pl +from calibrationtools import ( + ABCSampler, + AdaptMultivariateNormalVariance, + IndependentKernels, + MultivariateNormalKernel, + SeedKernel, +) +from ixa_epi_covid import CovidModel with open(Path("experiments", "phase1", "input", "priors.json"), "r") as f: priors = json.load(f) -with open(Path("experiments", "phase1", "input", "default_params.json"), "r") as f: +with open( + Path("experiments", "phase1", "input", "default_params.json"), "r" +) as f: default_params = json.load(f) mrp_defaults = { @@ -18,15 +27,18 @@ "config_inputs": { "exe_file": "./target/release/ixa-epi-covid", "output_dir": "./experiments/phase1/calibration/output", - "force_overwrite": True - } + "force_overwrite": True, + }, } output_dir = Path(mrp_defaults["config_inputs"]["output_dir"]) -if os.path.exists(output_dir) and mrp_defaults["config_inputs"]["force_overwrite"]: +if ( + os.path.exists(output_dir) + and mrp_defaults["config_inputs"]["force_overwrite"] +): shutil.rmtree(str(output_dir)) -output_dir.mkdir(parents=True, exist_ok=False) +output_dir.mkdir(parents=True, exist_ok=False) P = priors K = IndependentKernels( @@ -40,22 +52,20 @@ model = CovidModel() + def outputs_to_distance(model_output: pl.DataFrame, target_data: int): - first_death_observed = ( - model_output - .filter( - (pl.col("event") == "Dead") & (pl.col("count") > 0) - ) - .filter(pl.col("t_upper") == pl.min("t_upper")) - ) + first_death_observed = model_output.filter( + (pl.col("event") == "Dead") & (pl.col("count") > 0) + ).filter(pl.col("t_upper") == pl.min("t_upper")) if first_death_observed.height > 0: return abs(target_data - first_death_observed.item(0, "t_upper")) else: return 1000 + sampler = ABCSampler( generation_particle_count=500, - tolerance_values= [30.0, 20.0, 10.0, 5.0, 2.0, 0.0], + tolerance_values=[30.0, 20.0, 10.0, 5.0, 2.0, 0.0], priors=P, perturbation_kernel=K, variance_adapter=AdaptMultivariateNormalVariance(), @@ -67,6 +77,6 @@ def outputs_to_distance(model_output: pl.DataFrame, target_data: int): results = sampler.run( default_params=mrp_defaults, - parameter_headers = ["ixa_inputs","epimodel.GlobalParams"] + parameter_headers=["ixa_inputs", "epimodel.GlobalParams"], ) -print(results) \ No newline at end of file +print(results) diff --git a/src/ixa_epi_covid/__init__.py b/src/ixa_epi_covid/__init__.py index a8057cf..4846c73 100644 --- a/src/ixa_epi_covid/__init__.py +++ b/src/ixa_epi_covid/__init__.py @@ -1,5 +1,3 @@ from .covid_model import CovidModel -__all__ = [ - "CovidModel" -] +__all__ = ["CovidModel"] diff --git a/src/ixa_epi_covid/covid_model.py b/src/ixa_epi_covid/covid_model.py index 207dc79..acc4b0e 100644 --- a/src/ixa_epi_covid/covid_model.py +++ b/src/ixa_epi_covid/covid_model.py @@ -1,11 +1,12 @@ +import json +import subprocess from pathlib import Path from typing import Any -import subprocess import polars as pl -import json -from mrp import MRPModel from importation import ImportationModel, get_linelist_data +from mrp import MRPModel + class CovidModel(MRPModel): def run(self): @@ -18,13 +19,23 @@ def simulate(model_inputs: dict[str, Any]) -> pl.DataFrame: ## Generate the importation time series from relevant ixa parameters -------------- # Calculate the probability that an inidivdual will die given that they are symptomatic case_fatality_ratio = ( - ixa_inputs["epimodel.GlobalParams"]["probability_severe_given_mild"] * - ixa_inputs["epimodel.GlobalParams"]["probability_critical_given_severe"] * - ixa_inputs["epimodel.GlobalParams"]["probability_dead_given_critical"] + ixa_inputs["epimodel.GlobalParams"][ + "probability_severe_given_mild" + ] + * ixa_inputs["epimodel.GlobalParams"][ + "probability_critical_given_severe" + ] + * ixa_inputs["epimodel.GlobalParams"][ + "probability_dead_given_critical" + ] ) - proportion_asymptomatic = ixa_inputs["epimodel.GlobalParams"]["probability_mild_given_infect"] - symptomatic_reporting_prob = ixa_inputs["epimodel.GlobalParams"]["symptomatic_reporting_prob"] + proportion_asymptomatic = ixa_inputs["epimodel.GlobalParams"][ + "probability_mild_given_infect" + ] + symptomatic_reporting_prob = ixa_inputs["epimodel.GlobalParams"][ + "symptomatic_reporting_prob" + ] importation_params = { "symptomatic_reporting_prob": symptomatic_reporting_prob, @@ -32,7 +43,9 @@ def simulate(model_inputs: dict[str, Any]) -> pl.DataFrame: "proportion_asymptomatic": proportion_asymptomatic, } - importation_filename = ixa_inputs["epimodel.GlobalParams"]["imported_cases_timeseries"]["filename"] + importation_filename = ixa_inputs["epimodel.GlobalParams"][ + "imported_cases_timeseries" + ]["filename"] # Create the model object importation_model = ImportationModel( @@ -40,13 +53,14 @@ def simulate(model_inputs: dict[str, Any]) -> pl.DataFrame: parameters=importation_params, national_model="multinomial", state_model="proportional", - seed=ixa_inputs["epimodel.GlobalParams"]["seed"], # Optional argument to set the model seed + seed=ixa_inputs["epimodel.GlobalParams"][ + "seed" + ], # Optional argument to set the model seed ) # Generate timeseries data from the model object for Indiana in 2020 timeseries_data = importation_model.sample_state_importation_incidence( - state="Indiana", - year=2020 + state="Indiana", year=2020 ) # Store timeseries at appropriate location accessible to ixa @@ -56,8 +70,10 @@ def simulate(model_inputs: dict[str, Any]) -> pl.DataFrame: # Write the ixa inputs to the specified file location config_inputs = model_inputs["config_inputs"] input_file_path = Path(config_inputs["output_dir"], "input.json") - ixa_inputs["epimodel.GlobalParams"]["seed"] = int(ixa_inputs["epimodel.GlobalParams"]["seed"]) - with open (input_file_path, "w") as f: + ixa_inputs["epimodel.GlobalParams"]["seed"] = int( + ixa_inputs["epimodel.GlobalParams"]["seed"] + ) + with open(input_file_path, "w") as f: json.dump(ixa_inputs, f, indent=4) # Write command to call the ixa model binaries @@ -81,8 +97,11 @@ def simulate(model_inputs: dict[str, Any]) -> pl.DataFrame: raise e # Read the model incidence report from the specified location and return as a DataFrame - incidence_report_filename = ixa_inputs["epimodel.GlobalParams"]["incidence_report"]["filename"] - incidence_report_path = Path(config_inputs["output_dir"], incidence_report_filename) + incidence_report_filename = ixa_inputs["epimodel.GlobalParams"][ + "incidence_report" + ]["filename"] + incidence_report_path = Path( + config_inputs["output_dir"], incidence_report_filename + ) return pl.read_csv(incidence_report_path) - \ No newline at end of file From 6c7a5afac1c1c2c00c9f9f2777d481799a214134 Mon Sep 17 00:00:00 2001 From: KOVALW Date: Fri, 6 Mar 2026 20:06:31 +0000 Subject: [PATCH 09/28] Store results and print select diagnostics --- scripts/phase_1_calibration.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/scripts/phase_1_calibration.py b/scripts/phase_1_calibration.py index 4436a7b..4ac9c19 100644 --- a/scripts/phase_1_calibration.py +++ b/scripts/phase_1_calibration.py @@ -1,5 +1,6 @@ import json import os +import pickle import shutil from pathlib import Path @@ -79,4 +80,22 @@ def outputs_to_distance(model_output: pl.DataFrame, target_data: int): default_params=mrp_defaults, parameter_headers=["ixa_inputs", "epimodel.GlobalParams"], ) + print(results) + +diagnostics = results.get_diagnostics() + +print("\nAvailable diagnostics metrics:") +print(diagnostics.keys()) + +print("\nQuantiles for each parameter:") +print(diagnostics["quantiles"]) + +print("\nCorrelation matrix:") +print(diagnostics["correlation_matrix"]) + +with ( + Path("experiments", "phase1", "calibration", "output", "results.pkl"), + "w", +) as fp: + pickle.dump(results, fp) From 96b88c045fd76f07d65fe52c813f27a09255a7a7 Mon Sep 17 00:00:00 2001 From: KOVALW Date: Fri, 6 Mar 2026 20:15:24 +0000 Subject: [PATCH 10/28] update gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index a0d8f6f..52feec8 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ **/profiling.json +experiments/**/output/ # Data *.csv From 28a7000bf52d8b6f6e17fd159163b11ce61f5a49 Mon Sep 17 00:00:00 2001 From: KOVALW Date: Fri, 6 Mar 2026 20:45:25 +0000 Subject: [PATCH 11/28] updated printing --- scripts/phase_1_calibration.py | 20 +++++++++++++------- src/ixa_epi_covid/covid_model.py | 24 +++++++++++++----------- 2 files changed, 26 insertions(+), 18 deletions(-) diff --git a/scripts/phase_1_calibration.py b/scripts/phase_1_calibration.py index 4ac9c19..94aa959 100644 --- a/scripts/phase_1_calibration.py +++ b/scripts/phase_1_calibration.py @@ -30,6 +30,7 @@ "output_dir": "./experiments/phase1/calibration/output", "force_overwrite": True, }, + "importation_inputs": {"state": "Indiana", "year": 2020}, } output_dir = Path(mrp_defaults["config_inputs"]["output_dir"]) @@ -65,7 +66,7 @@ def outputs_to_distance(model_output: pl.DataFrame, target_data: int): sampler = ABCSampler( - generation_particle_count=500, + generation_particle_count=1000, tolerance_values=[30.0, 20.0, 10.0, 5.0, 2.0, 0.0], priors=P, perturbation_kernel=K, @@ -85,17 +86,22 @@ def outputs_to_distance(model_output: pl.DataFrame, target_data: int): diagnostics = results.get_diagnostics() -print("\nAvailable diagnostics metrics:") -print(diagnostics.keys()) - print("\nQuantiles for each parameter:") -print(diagnostics["quantiles"]) +print( + json.dumps( + { + k1: {k2: float(v2) for k2, v2 in v1.items()} + for k1, v1 in diagnostics["quantiles"].items() + }, + indent=4, + ) +) print("\nCorrelation matrix:") print(diagnostics["correlation_matrix"]) -with ( +with open( Path("experiments", "phase1", "calibration", "output", "results.pkl"), - "w", + "wb", ) as fp: pickle.dump(results, fp) diff --git a/src/ixa_epi_covid/covid_model.py b/src/ixa_epi_covid/covid_model.py index acc4b0e..5ec4ad6 100644 --- a/src/ixa_epi_covid/covid_model.py +++ b/src/ixa_epi_covid/covid_model.py @@ -15,6 +15,16 @@ def run(self): @staticmethod def simulate(model_inputs: dict[str, Any]) -> pl.DataFrame: ixa_inputs = model_inputs["ixa_inputs"] + config_inputs = model_inputs["config_inputs"] + importation_inputs = model_inputs["importation_inputs"] + + # Write the ixa inputs to the specified file location so that downstream errors can be re-tried + input_file_path = Path(config_inputs["output_dir"], "input.json") + ixa_inputs["epimodel.GlobalParams"]["seed"] = int( + ixa_inputs["epimodel.GlobalParams"]["seed"] + ) + with open(input_file_path, "w") as f: + json.dump(ixa_inputs, f, indent=4) ## Generate the importation time series from relevant ixa parameters -------------- # Calculate the probability that an inidivdual will die given that they are symptomatic @@ -58,24 +68,16 @@ def simulate(model_inputs: dict[str, Any]) -> pl.DataFrame: ], # Optional argument to set the model seed ) - # Generate timeseries data from the model object for Indiana in 2020 + # Generate timeseries data from the model object for state and optional year timeseries_data = importation_model.sample_state_importation_incidence( - state="Indiana", year=2020 + state=importation_inputs["state"], + year=importation_inputs.get("year"), ) # Store timeseries at appropriate location accessible to ixa timeseries_data.write_csv(importation_filename) ## Run the ixa transmission model ------------------------ - # Write the ixa inputs to the specified file location - config_inputs = model_inputs["config_inputs"] - input_file_path = Path(config_inputs["output_dir"], "input.json") - ixa_inputs["epimodel.GlobalParams"]["seed"] = int( - ixa_inputs["epimodel.GlobalParams"]["seed"] - ) - with open(input_file_path, "w") as f: - json.dump(ixa_inputs, f, indent=4) - # Write command to call the ixa model binaries cmd = [ config_inputs["exe_file"], From c56240b8beca601c64845baa7147ae07843e6e5e Mon Sep 17 00:00:00 2001 From: KOVALW Date: Mon, 9 Mar 2026 14:24:10 +0000 Subject: [PATCH 12/28] report and minor importation fix --- experiments/phase1/reports/calibration.qmd | 22 ++++ .../src/importation/perkins_et_al_methods.py | 5 +- scripts/phase_1_calibration.py | 6 +- src/ixa_epi_covid/run.py | 108 ++++++++++++++++++ 4 files changed, 136 insertions(+), 5 deletions(-) create mode 100644 experiments/phase1/reports/calibration.qmd create mode 100644 src/ixa_epi_covid/run.py diff --git a/experiments/phase1/reports/calibration.qmd b/experiments/phase1/reports/calibration.qmd new file mode 100644 index 0000000..d387d9e --- /dev/null +++ b/experiments/phase1/reports/calibration.qmd @@ -0,0 +1,22 @@ +--- +title: "Phase I calibration" +date: "2026-03-09" +format: pdf +--- + +# Overview +In this phase, we aim to calibrate the model to the timing of the first reported death due to COVID-19 in the state of Indiana. The first reported death in Indiana occurred on March 16th, 2020, 11 days after the first confirmed case. + +# Results + +```{python} +#| echo: false +import pickle +from calibrationtools.calibration_results import CalibrationResults +from pathlib import Path + +with open(Path("..","calibration","output","results.pkl"), "rb") as fp: + results: CalibrationResults = pickle.load(fp) + +print(results) +``` \ No newline at end of file diff --git a/packages/importation/src/importation/perkins_et_al_methods.py b/packages/importation/src/importation/perkins_et_al_methods.py index 6377836..d8591b6 100644 --- a/packages/importation/src/importation/perkins_et_al_methods.py +++ b/packages/importation/src/importation/perkins_et_al_methods.py @@ -198,13 +198,14 @@ def prob_undetected_infections( "weight": pmf_prob, } ) + print(prob_data.select(pl.sum('weight'))) if prob_data.select(pl.sum("weight").eq(0)).item(): return prob_data.with_columns( pl.lit(1.0 / prob_data.height).alias("probability") ) else: return prob_data.with_columns( - (pl.col("weight") / pl.col("weight").sum()).alias( + ((pl.col("weight").log() - pl.sum("weight").log()).exp()).alias( "probability" ) ) @@ -262,7 +263,7 @@ def sample_undetected_infections( prop_ascf=prop_ascf, ) - prob_data = prob_data + print(prob_data) sampled_undetected = np.random.choice( prob_data["n_undetected_infections"].to_list(), size=1, diff --git a/scripts/phase_1_calibration.py b/scripts/phase_1_calibration.py index 94aa959..a883de0 100644 --- a/scripts/phase_1_calibration.py +++ b/scripts/phase_1_calibration.py @@ -66,13 +66,13 @@ def outputs_to_distance(model_output: pl.DataFrame, target_data: int): sampler = ABCSampler( - generation_particle_count=1000, - tolerance_values=[30.0, 20.0, 10.0, 5.0, 2.0, 0.0], + generation_particle_count=500, + tolerance_values=[30.0, 20.0, 10.0], priors=P, perturbation_kernel=K, variance_adapter=AdaptMultivariateNormalVariance(), outputs_to_distance=outputs_to_distance, - target_data=65, + target_data=75, model_runner=model, seed=123, ) diff --git a/src/ixa_epi_covid/run.py b/src/ixa_epi_covid/run.py new file mode 100644 index 0000000..1a3c286 --- /dev/null +++ b/src/ixa_epi_covid/run.py @@ -0,0 +1,108 @@ +from .covid_model import CovidModel +import argparse +from importation import ImportationModel, get_linelist_data +from pathlib import Path +import subprocess +import json + +def run(ixa_config: Path, output_dir: Path): + with open(ixa_config, "r") as f: + ixa_inputs = json.load(f) + + ## Generate the importation time series from relevant ixa parameters -------------- + # Calculate the probability that an inidivdual will die given that they are symptomatic + case_fatality_ratio = ( + ixa_inputs["epimodel.GlobalParams"][ + "probability_severe_given_mild" + ] + * ixa_inputs["epimodel.GlobalParams"][ + "probability_critical_given_severe" + ] + * ixa_inputs["epimodel.GlobalParams"][ + "probability_dead_given_critical" + ] + ) + + proportion_asymptomatic = ixa_inputs["epimodel.GlobalParams"][ + "probability_mild_given_infect" + ] + symptomatic_reporting_prob = ixa_inputs["epimodel.GlobalParams"][ + "symptomatic_reporting_prob" + ] + + importation_params = { + "symptomatic_reporting_prob": symptomatic_reporting_prob, + "case_fatality_ratio": case_fatality_ratio, + "proportion_asymptomatic": proportion_asymptomatic, + } + + print(importation_params) + + importation_filename = ixa_inputs["epimodel.GlobalParams"][ + "imported_cases_timeseries" + ]["filename"] + model = CovidModel() + + # Create the model object + importation_model = ImportationModel( + data=get_linelist_data(), + parameters=importation_params, + national_model="multinomial", + state_model="proportional", + seed=ixa_inputs["epimodel.GlobalParams"][ + "seed" + ], # Optional argument to set the model seed + ) + + # Generate timeseries data from the model object for state and optional year + timeseries_data = importation_model.sample_state_importation_incidence( + state="IN", + year=2020, + ) + + # Store timeseries at appropriate location accessible to ixa + timeseries_data.write_csv(importation_filename) + + ## Run the ixa transmission model ------------------------ + # Write command to call the ixa model binaries + cmd = [ + "./target/release/ixa-epi-covid", + "--config", + ixa_config, + "--output", + output_dir, + "-f", + "--no-stats", + ] + + try: + subprocess.run(cmd, capture_output=True, check=True) + except subprocess.CalledProcessError as e: + print("Error running the ixa model:") + print("Command:", " ".join(cmd)) + print("Return code:", e.returncode) + print("Standard error:", e.stderr) + raise e + + +argparser = argparse.ArgumentParser(description="Run the ixa-epi-covid model with the specified configuration file and output directory.") + +argparser.add_argument( + "-c", + "--ixa_config", + type=Path, + required=True, + help="Path to the ixa configuration file (input.json) containing the model parameters.", +) + +argparser.add_argument( + "-o", + "--output_dir", + type=Path, + required=True, + help="Path to the output directory where model results will be saved.", +) + +if __name__ == "__main__": + args = argparser.parse_args() + run(args.ixa_config, args.output_dir) \ No newline at end of file From c30556f5b317ba7cda647f6bb9f10c72abea6774 Mon Sep 17 00:00:00 2001 From: KOVALW Date: Mon, 9 Mar 2026 17:37:29 +0000 Subject: [PATCH 13/28] remove unnecessary run file and create visualizations --- experiments/phase1/reports/calibration.qmd | 106 ++++++++++++++++- .../src/importation/perkins_et_al_methods.py | 3 +- scripts/phase_1_calibration.py | 4 +- scripts/plot_calibration.py | 99 ++++++++++++++++ src/ixa_epi_covid/covid_model.py | 4 +- src/ixa_epi_covid/run.py | 108 ------------------ 6 files changed, 206 insertions(+), 118 deletions(-) create mode 100644 scripts/plot_calibration.py delete mode 100644 src/ixa_epi_covid/run.py diff --git a/experiments/phase1/reports/calibration.qmd b/experiments/phase1/reports/calibration.qmd index d387d9e..40ec6dd 100644 --- a/experiments/phase1/reports/calibration.qmd +++ b/experiments/phase1/reports/calibration.qmd @@ -5,10 +5,9 @@ format: pdf --- # Overview -In this phase, we aim to calibrate the model to the timing of the first reported death due to COVID-19 in the state of Indiana. The first reported death in Indiana occurred on March 16th, 2020, 11 days after the first confirmed case. +In this phase, we aim to calibrate the model to the timing of the first reported death due to COVID-19 in the state of Indiana. The first reported death in Indiana occurred on March 16th, 2020, 10 days after the first confirmed case. # Results - ```{python} #| echo: false import pickle @@ -17,6 +16,105 @@ from pathlib import Path with open(Path("..","calibration","output","results.pkl"), "rb") as fp: results: CalibrationResults = pickle.load(fp) +``` + + +Quantiles for each fitted parameter +```{python} +#| echo: false +import json +diagnostics = results.get_diagnostics() +print( + json.dumps( + { + k1: {k2: float(v2) for k2, v2 in v1.items()} + for k1, v1 in diagnostics["quantiles"].items() + }, + indent=4, + ) +) +``` + +```{python} +#| echo: false + +# Re-generating a random sample of parameter sets from posterior +from ixa_epi_covid.run import run +from calibrationtools import default_particle_reader +import tempfile +import polars as pl +import os +from ixa_epi_covid import CovidModel + +os.chdir('../') +particles = results.sample_posterior_particles(n=5) +default_params_file = Path("input","default_params.json") + +with open(default_params_file, "rb") as fp: + default_params = json.load(fp) + +mrp_defaults = { + 'ixa_inputs': default_params, + "config_inputs": { + "exe_file": "./../../target/release/ixa-epi-covid", + "output_dir": "./experiments/phase1/calibration/output", + "force_overwrite": True, + }, + "importation_inputs": {"state": "Indiana", "year": 2020}, +} + +uniq_id = 0 +model = CovidModel() +for p in particles: + model_inputs = default_particle_reader( + p, + default_params=mrp_defaults, + headers=["ixa_inputs","epimodel.GlobalParams"] + ) + importation_curves = [] + prevalence_data = [] + with tempfile.TemporaryDirectory(delete=False) as tmpdir: + model_inputs["config_inputs"]["output_dir"] = str(tmpdir) + importation_path = Path(tmpdir, "importation_timeseries.csv") + model_inputs["ixa_inputs"]["epimodel.GlobalParams"]["imported_cases_timeseries"]["filename"] = str(importation_path) + model.simulate(model_inputs) + prevalence_data.append( + pl.read_csv(Path(tmpdir, model_inputs['epimodel.GlobalParams']["prevalence_report"]["filename"])).with_columns( + pl.lit(uniq_id).alias("id") + ) + ) + importation_curves.append( + pl.read_csv(importation_path).with_columns( + pl.lit(uniq_id).alias("id") + ) + ) + uniq_id += 1 +``` + +Plot the importation time series and death time series for the 50 sampled parameter sets +```{python} +#| echo: false +#| eval: false +import matplotlib.pyplot as plt +import seaborn as sns + +importations = pl.vstack(importation_curves) +deaths = pl.vstack(prevalence_data).filter(pl.col('symptom_status') == 'Dead') + +sns.lineplot( + data = importations, + x="time", + y="imported_infections", + units="id", + estimator=None, +) +plt.show() -print(results) -``` \ No newline at end of file +sns.lineplot( + data = deaths, + x="time", + y="count", + units="id", + estimator=None, +) +``` diff --git a/packages/importation/src/importation/perkins_et_al_methods.py b/packages/importation/src/importation/perkins_et_al_methods.py index d8591b6..7cfafb6 100644 --- a/packages/importation/src/importation/perkins_et_al_methods.py +++ b/packages/importation/src/importation/perkins_et_al_methods.py @@ -198,7 +198,7 @@ def prob_undetected_infections( "weight": pmf_prob, } ) - print(prob_data.select(pl.sum('weight'))) + if prob_data.select(pl.sum("weight").eq(0)).item(): return prob_data.with_columns( pl.lit(1.0 / prob_data.height).alias("probability") @@ -263,7 +263,6 @@ def sample_undetected_infections( prop_ascf=prop_ascf, ) - print(prob_data) sampled_undetected = np.random.choice( prob_data["n_undetected_infections"].to_list(), size=1, diff --git a/scripts/phase_1_calibration.py b/scripts/phase_1_calibration.py index a883de0..a94cfaf 100644 --- a/scripts/phase_1_calibration.py +++ b/scripts/phase_1_calibration.py @@ -66,8 +66,8 @@ def outputs_to_distance(model_output: pl.DataFrame, target_data: int): sampler = ABCSampler( - generation_particle_count=500, - tolerance_values=[30.0, 20.0, 10.0], + generation_particle_count=750, + tolerance_values=[30.0, 20.0, 10.0, 5.0], priors=P, perturbation_kernel=K, variance_adapter=AdaptMultivariateNormalVariance(), diff --git a/scripts/plot_calibration.py b/scripts/plot_calibration.py new file mode 100644 index 0000000..99e58f9 --- /dev/null +++ b/scripts/plot_calibration.py @@ -0,0 +1,99 @@ +import pickle +from calibrationtools.calibration_results import CalibrationResults +from pathlib import Path +import json +from calibrationtools import default_particle_reader +import tempfile +import polars as pl +from ixa_epi_covid import CovidModel +import matplotlib.pyplot as plt +import seaborn as sns +import os + + +with open(Path("experiments","phase1","calibration","output","results.pkl"), "rb") as fp: + results: CalibrationResults = pickle.load(fp) + +diagnostics = results.get_diagnostics() +# print( +# json.dumps( +# { +# k1: {k2: float(v2) for k2, v2 in v1.items()} +# for k1, v1 in diagnostics["quantiles"].items() +# }, +# indent=4, +# ) +# ) +# Re-generating a random sample of parameter sets from posterior + +particles = results.sample_posterior_particles(n=100) +default_params_file = Path("experiments", "phase1","input","default_params.json") + +with open(default_params_file, "rb") as fp: + default_params = json.load(fp) + +mrp_defaults = { + 'ixa_inputs': default_params, + "config_inputs": { + "exe_file": "./target/release/ixa-epi-covid", + "output_dir": "./experiments/phase1/calibration/output", + "force_overwrite": True, + }, + "importation_inputs": {"state": "Indiana", "year": 2020}, +} + +uniq_id = 0 +model = CovidModel() +importation_curves = [] +prevalence_data = [] + +with tempfile.TemporaryDirectory() as tmpdir: + for p in particles: + model_inputs = default_particle_reader( + p, + default_params=mrp_defaults, + parameter_headers=["ixa_inputs","epimodel.GlobalParams"] + ) + + model_inputs["config_inputs"]["output_dir"] = str(Path(tmpdir, f'{uniq_id}')) + os.makedirs(model_inputs["config_inputs"]["output_dir"], exist_ok=True) + importation_path = Path(tmpdir, f'{uniq_id}', "importation_timeseries.csv") + + model_inputs["ixa_inputs"]["epimodel.GlobalParams"]["imported_cases_timeseries"]["filename"] = str(importation_path) + model.simulate(model_inputs) + prevalence_data.append( + pl.read_csv(Path(tmpdir, f'{uniq_id}', model_inputs["ixa_inputs"]['epimodel.GlobalParams']["prevalence_report"]["filename"])).with_columns( + pl.lit(uniq_id).alias("id") + ) + ) + importation_curves.append( + pl.read_csv(importation_path).with_columns( + pl.lit(uniq_id).alias("id") + ) + ) + uniq_id += 1 +print(len(importation_curves)) +importations = pl.concat(importation_curves) +deaths = pl.concat(prevalence_data).filter(pl.col('symptom_status') == 'Dead').group_by('t', 'id').agg(pl.sum('count')) + +sns.lineplot( + data = importations, + x="time", + y="imported_infections", + units="id", + estimator=None, + alpha=0.05 +) +plt.show() + +sns.lineplot( + data = deaths, + x="t", + y="count", +) +plt.show() +sns.histplot( + data = deaths.filter(pl.col('count') > 0).group_by('id').agg(pl.min('t')), + x = 't', +) +plt.show() \ No newline at end of file diff --git a/src/ixa_epi_covid/covid_model.py b/src/ixa_epi_covid/covid_model.py index 5ec4ad6..f347b5d 100644 --- a/src/ixa_epi_covid/covid_model.py +++ b/src/ixa_epi_covid/covid_model.py @@ -19,7 +19,7 @@ def simulate(model_inputs: dict[str, Any]) -> pl.DataFrame: importation_inputs = model_inputs["importation_inputs"] # Write the ixa inputs to the specified file location so that downstream errors can be re-tried - input_file_path = Path(config_inputs["output_dir"], "input.json") + input_file_path = Path(config_inputs["output_dir"],"input.json") ixa_inputs["epimodel.GlobalParams"]["seed"] = int( ixa_inputs["epimodel.GlobalParams"]["seed"] ) @@ -82,7 +82,7 @@ def simulate(model_inputs: dict[str, Any]) -> pl.DataFrame: cmd = [ config_inputs["exe_file"], "--config", - input_file_path, + str(input_file_path), "--output", config_inputs["output_dir"], "-f", diff --git a/src/ixa_epi_covid/run.py b/src/ixa_epi_covid/run.py deleted file mode 100644 index 1a3c286..0000000 --- a/src/ixa_epi_covid/run.py +++ /dev/null @@ -1,108 +0,0 @@ -from .covid_model import CovidModel -import argparse -from importation import ImportationModel, get_linelist_data -from pathlib import Path -import subprocess -import json - -def run(ixa_config: Path, output_dir: Path): - with open(ixa_config, "r") as f: - ixa_inputs = json.load(f) - - ## Generate the importation time series from relevant ixa parameters -------------- - # Calculate the probability that an inidivdual will die given that they are symptomatic - case_fatality_ratio = ( - ixa_inputs["epimodel.GlobalParams"][ - "probability_severe_given_mild" - ] - * ixa_inputs["epimodel.GlobalParams"][ - "probability_critical_given_severe" - ] - * ixa_inputs["epimodel.GlobalParams"][ - "probability_dead_given_critical" - ] - ) - - proportion_asymptomatic = ixa_inputs["epimodel.GlobalParams"][ - "probability_mild_given_infect" - ] - symptomatic_reporting_prob = ixa_inputs["epimodel.GlobalParams"][ - "symptomatic_reporting_prob" - ] - - importation_params = { - "symptomatic_reporting_prob": symptomatic_reporting_prob, - "case_fatality_ratio": case_fatality_ratio, - "proportion_asymptomatic": proportion_asymptomatic, - } - - print(importation_params) - - importation_filename = ixa_inputs["epimodel.GlobalParams"][ - "imported_cases_timeseries" - ]["filename"] - model = CovidModel() - - # Create the model object - importation_model = ImportationModel( - data=get_linelist_data(), - parameters=importation_params, - national_model="multinomial", - state_model="proportional", - seed=ixa_inputs["epimodel.GlobalParams"][ - "seed" - ], # Optional argument to set the model seed - ) - - # Generate timeseries data from the model object for state and optional year - timeseries_data = importation_model.sample_state_importation_incidence( - state="IN", - year=2020, - ) - - # Store timeseries at appropriate location accessible to ixa - timeseries_data.write_csv(importation_filename) - - ## Run the ixa transmission model ------------------------ - # Write command to call the ixa model binaries - cmd = [ - "./target/release/ixa-epi-covid", - "--config", - ixa_config, - "--output", - output_dir, - "-f", - "--no-stats", - ] - - try: - subprocess.run(cmd, capture_output=True, check=True) - except subprocess.CalledProcessError as e: - print("Error running the ixa model:") - print("Command:", " ".join(cmd)) - print("Return code:", e.returncode) - print("Standard error:", e.stderr) - raise e - - -argparser = argparse.ArgumentParser(description="Run the ixa-epi-covid model with the specified configuration file and output directory.") - -argparser.add_argument( - "-c", - "--ixa_config", - type=Path, - required=True, - help="Path to the ixa configuration file (input.json) containing the model parameters.", -) - -argparser.add_argument( - "-o", - "--output_dir", - type=Path, - required=True, - help="Path to the output directory where model results will be saved.", -) - -if __name__ == "__main__": - args = argparser.parse_args() - run(args.ixa_config, args.output_dir) \ No newline at end of file From 1b5f0ca2eda297ec796a04b06ac79b4b35ad1a0f Mon Sep 17 00:00:00 2001 From: KOVALW Date: Mon, 9 Mar 2026 18:46:33 +0000 Subject: [PATCH 14/28] add prior distribution figs to plot calibration script --- experiments/phase1/reports/calibration.qmd | 4 +- .../src/importation/perkins_et_al_methods.py | 6 +- scripts/plot_calibration.py | 133 +++++++++++++----- src/ixa_epi_covid/covid_model.py | 2 +- 4 files changed, 102 insertions(+), 43 deletions(-) diff --git a/experiments/phase1/reports/calibration.qmd b/experiments/phase1/reports/calibration.qmd index 40ec6dd..1730d18 100644 --- a/experiments/phase1/reports/calibration.qmd +++ b/experiments/phase1/reports/calibration.qmd @@ -67,8 +67,8 @@ uniq_id = 0 model = CovidModel() for p in particles: model_inputs = default_particle_reader( - p, - default_params=mrp_defaults, + p, + default_params=mrp_defaults, headers=["ixa_inputs","epimodel.GlobalParams"] ) importation_curves = [] diff --git a/packages/importation/src/importation/perkins_et_al_methods.py b/packages/importation/src/importation/perkins_et_al_methods.py index 7cfafb6..6009716 100644 --- a/packages/importation/src/importation/perkins_et_al_methods.py +++ b/packages/importation/src/importation/perkins_et_al_methods.py @@ -205,9 +205,9 @@ def prob_undetected_infections( ) else: return prob_data.with_columns( - ((pl.col("weight").log() - pl.sum("weight").log()).exp()).alias( - "probability" - ) + ( + (pl.col("weight").log() - pl.sum("weight").log()).exp() + ).alias("probability") ) else: raise ValueError( diff --git a/scripts/plot_calibration.py b/scripts/plot_calibration.py index 99e58f9..acd9389 100644 --- a/scripts/plot_calibration.py +++ b/scripts/plot_calibration.py @@ -1,39 +1,81 @@ -import pickle -from calibrationtools.calibration_results import CalibrationResults -from pathlib import Path import json -from calibrationtools import default_particle_reader +import os +import pickle import tempfile -import polars as pl -from ixa_epi_covid import CovidModel +from pathlib import Path + import matplotlib.pyplot as plt +import numpy as np +import polars as pl import seaborn as sns -import os +from calibrationtools import ( + Particle, + default_particle_reader, +) +from calibrationtools.calibration_results import CalibrationResults +from ixa_epi_covid import CovidModel -with open(Path("experiments","phase1","calibration","output","results.pkl"), "rb") as fp: +with open( + Path("experiments", "phase1", "calibration", "output", "results.pkl"), "rb" +) as fp: results: CalibrationResults = pickle.load(fp) diagnostics = results.get_diagnostics() -# print( -# json.dumps( -# { -# k1: {k2: float(v2) for k2, v2 in v1.items()} -# for k1, v1 in diagnostics["quantiles"].items() -# }, -# indent=4, -# ) -# ) +print( + json.dumps( + { + k1: {k2: float(v2) for k2, v2 in v1.items()} + for k1, v1 in diagnostics["quantiles"].items() + }, + indent=4, + ) +) + +posterior_samples = results.sample_posterior_particles(n=int(results.ess)) + + +for param in diagnostics["quantiles"].keys(): + vals = [p[param] for p in posterior_samples] + min_val = min(vals) + max_val = max(vals) + print(param, min_val, max_val) + sns.histplot(x=vals, stat="density") + eval_points = np.arange( + min_val - np.var(vals), max_val + np.var(vals), 0.01 + ) + param_prior = None + for prior in results.priors.priors: + if prior.param == param: + param_prior = prior + break + if not param_prior: + raise (ValueError, f"Could not find prior {param}") + + density_vals = [ + param_prior.probability_density(Particle({param: v})) + for v in eval_points + ] + + sns.lineplot( + data=pl.DataFrame({param: list(eval_points), "density": density_vals}), + x=param, + y="density", + ) + plt.show() + # Re-generating a random sample of parameter sets from posterior particles = results.sample_posterior_particles(n=100) -default_params_file = Path("experiments", "phase1","input","default_params.json") +default_params_file = Path( + "experiments", "phase1", "input", "default_params.json" +) with open(default_params_file, "rb") as fp: default_params = json.load(fp) mrp_defaults = { - 'ixa_inputs': default_params, + "ixa_inputs": default_params, "config_inputs": { "exe_file": "./target/release/ixa-epi-covid", "output_dir": "./experiments/phase1/calibration/output", @@ -50,21 +92,33 @@ with tempfile.TemporaryDirectory() as tmpdir: for p in particles: model_inputs = default_particle_reader( - p, - default_params=mrp_defaults, - parameter_headers=["ixa_inputs","epimodel.GlobalParams"] + p, + default_params=mrp_defaults, + parameter_headers=["ixa_inputs", "epimodel.GlobalParams"], + ) + + model_inputs["config_inputs"]["output_dir"] = str( + Path(tmpdir, f"{uniq_id}") ) - - model_inputs["config_inputs"]["output_dir"] = str(Path(tmpdir, f'{uniq_id}')) os.makedirs(model_inputs["config_inputs"]["output_dir"], exist_ok=True) - importation_path = Path(tmpdir, f'{uniq_id}', "importation_timeseries.csv") - - model_inputs["ixa_inputs"]["epimodel.GlobalParams"]["imported_cases_timeseries"]["filename"] = str(importation_path) + importation_path = Path( + tmpdir, f"{uniq_id}", "importation_timeseries.csv" + ) + + model_inputs["ixa_inputs"]["epimodel.GlobalParams"][ + "imported_cases_timeseries" + ]["filename"] = str(importation_path) model.simulate(model_inputs) prevalence_data.append( - pl.read_csv(Path(tmpdir, f'{uniq_id}', model_inputs["ixa_inputs"]['epimodel.GlobalParams']["prevalence_report"]["filename"])).with_columns( - pl.lit(uniq_id).alias("id") - ) + pl.read_csv( + Path( + tmpdir, + f"{uniq_id}", + model_inputs["ixa_inputs"]["epimodel.GlobalParams"][ + "prevalence_report" + ]["filename"], + ) + ).with_columns(pl.lit(uniq_id).alias("id")) ) importation_curves.append( pl.read_csv(importation_path).with_columns( @@ -74,26 +128,31 @@ uniq_id += 1 print(len(importation_curves)) importations = pl.concat(importation_curves) -deaths = pl.concat(prevalence_data).filter(pl.col('symptom_status') == 'Dead').group_by('t', 'id').agg(pl.sum('count')) +deaths = ( + pl.concat(prevalence_data) + .filter(pl.col("symptom_status") == "Dead") + .group_by("t", "id") + .agg(pl.sum("count")) +) sns.lineplot( - data = importations, + data=importations, x="time", y="imported_infections", units="id", estimator=None, - alpha=0.05 + alpha=0.05, ) plt.show() sns.lineplot( - data = deaths, + data=deaths, x="t", y="count", ) plt.show() sns.histplot( - data = deaths.filter(pl.col('count') > 0).group_by('id').agg(pl.min('t')), - x = 't', + data=deaths.filter(pl.col("count") > 0).group_by("id").agg(pl.min("t")), + x="t", ) -plt.show() \ No newline at end of file +plt.show() diff --git a/src/ixa_epi_covid/covid_model.py b/src/ixa_epi_covid/covid_model.py index f347b5d..738663c 100644 --- a/src/ixa_epi_covid/covid_model.py +++ b/src/ixa_epi_covid/covid_model.py @@ -19,7 +19,7 @@ def simulate(model_inputs: dict[str, Any]) -> pl.DataFrame: importation_inputs = model_inputs["importation_inputs"] # Write the ixa inputs to the specified file location so that downstream errors can be re-tried - input_file_path = Path(config_inputs["output_dir"],"input.json") + input_file_path = Path(config_inputs["output_dir"], "input.json") ixa_inputs["epimodel.GlobalParams"]["seed"] = int( ixa_inputs["epimodel.GlobalParams"]["seed"] ) From c1fcf784e01e36b6e93c120324356e0182b94c73 Mon Sep 17 00:00:00 2001 From: KOVALW Date: Tue, 10 Mar 2026 14:22:08 +0000 Subject: [PATCH 15/28] Port plotting from script into qmd report --- experiments/phase1/reports/calibration.qmd | 212 +++++++++++++++++---- scripts/plot_calibration.py | 158 --------------- 2 files changed, 171 insertions(+), 199 deletions(-) delete mode 100644 scripts/plot_calibration.py diff --git a/experiments/phase1/reports/calibration.qmd b/experiments/phase1/reports/calibration.qmd index 1730d18..e4dfb32 100644 --- a/experiments/phase1/reports/calibration.qmd +++ b/experiments/phase1/reports/calibration.qmd @@ -11,10 +11,27 @@ In this phase, we aim to calibrate the model to the timing of the first reported ```{python} #| echo: false import pickle -from calibrationtools.calibration_results import CalibrationResults +from calibrationtools.calibration_results import CalibrationResults, Particle from pathlib import Path +import os +import seaborn as sns +import matplotlib.pyplot as plt +import numpy as np +import polars as pl +import json +from calibrationtools import default_particle_reader +import tempfile +import polars as pl +import os +from ixa_epi_covid import CovidModel + + +os.chdir('../../../') + +wd = Path("experiments", "phase1") -with open(Path("..","calibration","output","results.pkl"), "rb") as fp: +# Load results object from the calibration directory +with open(wd / "calibration" / "output" / "results.pkl", "rb") as fp: results: CalibrationResults = pickle.load(fp) ``` @@ -22,12 +39,11 @@ with open(Path("..","calibration","output","results.pkl"), "rb") as fp: Quantiles for each fitted parameter ```{python} #| echo: false -import json diagnostics = results.get_diagnostics() print( json.dumps( { - k1: {k2: float(v2) for k2, v2 in v1.items()} + k1: {k2: np.format_float_positional(v2, precision=3) for k2, v2 in v1.items()} for k1, v1 in diagnostics["quantiles"].items() }, indent=4, @@ -35,28 +51,63 @@ print( ) ``` +Histograms of posterior samples compared to the probability density of each prior. To generate the histogram, we sample $n$ particles from the posterior population, where $n$ is the effective sample size of the population, in order to reflect the weight distribution of the posterior. For each plot, the prior is plotted as a blue line and the histogram of the posterior samples is plotted in orange with a kernel density estimator overlay. + ```{python} #| echo: false +posterior_samples = results.sample_posterior_particles(n=int(results.ess)) -# Re-generating a random sample of parameter sets from posterior -from ixa_epi_covid.run import run -from calibrationtools import default_particle_reader -import tempfile -import polars as pl -import os -from ixa_epi_covid import CovidModel +for param in diagnostics["quantiles"].keys(): + vals = [p[param] for p in posterior_samples] + min_val = min(vals) + max_val = max(vals) + print(param, min_val, max_val) + sns.histplot(x=vals, stat="density", kde=True, color='orange', edgecolor='black') + eval_points = np.arange( + min_val - np.var(vals), max_val + np.var(vals), 0.01 + ) + param_prior = None + for prior in results.priors.priors: + if prior.param == param: + param_prior = prior + break + if not param_prior: + raise (ValueError, f"Could not find prior {param}") + + density_vals = [ + param_prior.probability_density(Particle({param: v})) + for v in eval_points + ] + + sns.lineplot( + data=pl.DataFrame({param: list(eval_points), "density": density_vals}), + x=param, + y="density", + ) + plt.title(f"Posterior versus prior distribution") + plt.xlabel(" ".join(param.split("."))) + plt.ylabel("Density") + plt.tight_layout() + plt.show() -os.chdir('../') -particles = results.sample_posterior_particles(n=5) -default_params_file = Path("input","default_params.json") +``` +```{python} +#| echo: false +## Obtaining the importation time series and death incidence data frmaes for a random smaple form the posterior +# Re-generating a random sample of parameter sets from posterior +particle_count = 100 +particles = results.sample_posterior_particles(n=particle_count) +default_params_file = wd / 'input' / 'default_params.json' with open(default_params_file, "rb") as fp: default_params = json.load(fp) +default_params['epimodel.GlobalParams']['max_time'] = 200 + mrp_defaults = { 'ixa_inputs': default_params, "config_inputs": { - "exe_file": "./../../target/release/ixa-epi-covid", + "exe_file": "./target/release/ixa-epi-covid", "output_dir": "./experiments/phase1/calibration/output", "force_overwrite": True, }, @@ -65,56 +116,135 @@ mrp_defaults = { uniq_id = 0 model = CovidModel() -for p in particles: - model_inputs = default_particle_reader( - p, - default_params=mrp_defaults, - headers=["ixa_inputs","epimodel.GlobalParams"] - ) - importation_curves = [] - prevalence_data = [] - with tempfile.TemporaryDirectory(delete=False) as tmpdir: - model_inputs["config_inputs"]["output_dir"] = str(tmpdir) - importation_path = Path(tmpdir, "importation_timeseries.csv") - model_inputs["ixa_inputs"]["epimodel.GlobalParams"]["imported_cases_timeseries"]["filename"] = str(importation_path) +importation_curves = [] +prevalence_data = [] + +with tempfile.TemporaryDirectory() as tmpdir: + for p in particles: + model_inputs = default_particle_reader( + p, + default_params=mrp_defaults, + parameter_headers=["ixa_inputs", "epimodel.GlobalParams"], + ) + + model_inputs["config_inputs"]["output_dir"] = str( + Path(tmpdir, f"{uniq_id}") + ) + os.makedirs(model_inputs["config_inputs"]["output_dir"], exist_ok=True) + importation_path = Path( + tmpdir, f"{uniq_id}", "importation_timeseries.csv" + ) + + model_inputs["ixa_inputs"]["epimodel.GlobalParams"][ + "imported_cases_timeseries" + ]["filename"] = str(importation_path) model.simulate(model_inputs) prevalence_data.append( - pl.read_csv(Path(tmpdir, model_inputs['epimodel.GlobalParams']["prevalence_report"]["filename"])).with_columns( - pl.lit(uniq_id).alias("id") - ) + pl.read_csv( + Path( + tmpdir, + f"{uniq_id}", + model_inputs["ixa_inputs"]["epimodel.GlobalParams"][ + "prevalence_report" + ]["filename"], + ) + ).with_columns(pl.lit(uniq_id).alias("id")) ) importation_curves.append( pl.read_csv(importation_path).with_columns( pl.lit(uniq_id).alias("id") ) ) - uniq_id += 1 + uniq_id += 1 + +importations = pl.concat(importation_curves) +all_prevalence_data = pl.concat(prevalence_data) +deaths = ( + all_prevalence_data + .filter(pl.col("symptom_status") == "Dead") + .group_by("t", "id") + .agg(pl.sum("count")) +) ``` -Plot the importation time series and death time series for the 50 sampled parameter sets +For a final SMC step with threshold toelrance above zero, we will observe some variance in the date of first reported death. Here we show a sampled histogram of the timing of the first death across the posterior simulations. The vertical dashed line indicates the observed timing of the first death in Indiana (March 16, 2020, or 75 days after the start of the simulation on January 1, 2020). We can see that while although the distribution is centered around the observed timing, there is a higher weight on earlier first reported deaths, indicating that imported infections occur faster in the simulated model than they occured in real life. This may be due to the proprotional sampling of Indiana importations from the domestic level imported infections, when in fact they may have been less probable due to mobility flow patterns. + ```{python} #| echo: false -#| eval: false -import matplotlib.pyplot as plt -import seaborn as sns -importations = pl.vstack(importation_curves) -deaths = pl.vstack(prevalence_data).filter(pl.col('symptom_status') == 'Dead') +first_deaths = deaths.filter(pl.col("count") > 0).group_by("id").agg(pl.min("t")) +sns.histplot( + data=first_deaths, + x="t", +) +plt.axvline(x=75, color="black", linestyle="--", label="Observed (March 16, 2020)") +plt.title(f"Distribution of first death times ({particle_count} posterior samples)") +plt.xlabel("Time of first death (days since simulation start)") +plt.ylabel("Number of simulations") +plt.legend() +plt.tight_layout() +plt.show() + +``` +We can show the posterior variance in the imported infections time series by overlaying samples from the simulated particles. Each line in the plot below corresponds to the importation time series for a single particle. +```{python} +#| echo: false sns.lineplot( - data = importations, + data=importations, x="time", y="imported_infections", units="id", estimator=None, + alpha=0.05, ) +plt.title(f"Imported infections over time ({particle_count} posterior samples)") +plt.xlabel("Time (days since simulation start)") +plt.ylabel("Number of imported infections") +plt.tight_layout() plt.show() +``` +We can project the cumulative number of deaths observed in the model up to the evaluation timepoint. Because the last SMC step has a distance tolerance threshold greater than zero, we see some simulations accrue deaths before the first reported death in the data. +```{python} +#| echo: false sns.lineplot( - data = deaths, - x="time", + data=deaths.filter(pl.col('t') <= 75), + x="t", y="count", - units="id", +) +plt.title(f"Total observed deaths over time ({particle_count} posterior samples)") +plt.xlabel("Time (days since simulation start)") +plt.ylabel("Number of deaths") +plt.axvline(x=75, color="black", linestyle="--", label="First death reported (March 16, 2020)") +plt.tight_layout() +plt.show() +``` + +Finally, we can plot the same trajectories for number of infections observed in the model over time, which includes both imported infections and locally acquired infections. This figure shows the issue with calibratin the model with a small population size (1,000 people). The susceptible pool is rapidly depleted by local transmission, and a high proportion of the population is already recovered by the timepoint of evaluation comparing the model results to the first reported death in the data. + +```{python} +#| echo: false +sir_data = all_prevalence_data.group_by( + 't', 'infection_status', 'id' +).agg( + pl.sum('count') +) + +sns.lineplot( + data = sir_data, + x='t', + y='count', + hue='infection_status', + units='id', estimator=None, + alpha=0.05 ) +plt.title(f"Individuals by infection status over time ({particle_count} posterior samples)") +plt.xlabel("Time (days since simulation start)") +plt.axvline(x=65, color="black", linestyle="--", label="First case reported (March 6, 2020)") +plt.axvline(x=75, color="red", linestyle="--", label="First death reported (March 16, 2020)") +plt.ylabel("Number of people") +plt.tight_layout() +plt.show() ``` diff --git a/scripts/plot_calibration.py b/scripts/plot_calibration.py deleted file mode 100644 index acd9389..0000000 --- a/scripts/plot_calibration.py +++ /dev/null @@ -1,158 +0,0 @@ -import json -import os -import pickle -import tempfile -from pathlib import Path - -import matplotlib.pyplot as plt -import numpy as np -import polars as pl -import seaborn as sns -from calibrationtools import ( - Particle, - default_particle_reader, -) -from calibrationtools.calibration_results import CalibrationResults - -from ixa_epi_covid import CovidModel - -with open( - Path("experiments", "phase1", "calibration", "output", "results.pkl"), "rb" -) as fp: - results: CalibrationResults = pickle.load(fp) - -diagnostics = results.get_diagnostics() -print( - json.dumps( - { - k1: {k2: float(v2) for k2, v2 in v1.items()} - for k1, v1 in diagnostics["quantiles"].items() - }, - indent=4, - ) -) - -posterior_samples = results.sample_posterior_particles(n=int(results.ess)) - - -for param in diagnostics["quantiles"].keys(): - vals = [p[param] for p in posterior_samples] - min_val = min(vals) - max_val = max(vals) - print(param, min_val, max_val) - sns.histplot(x=vals, stat="density") - eval_points = np.arange( - min_val - np.var(vals), max_val + np.var(vals), 0.01 - ) - param_prior = None - for prior in results.priors.priors: - if prior.param == param: - param_prior = prior - break - if not param_prior: - raise (ValueError, f"Could not find prior {param}") - - density_vals = [ - param_prior.probability_density(Particle({param: v})) - for v in eval_points - ] - - sns.lineplot( - data=pl.DataFrame({param: list(eval_points), "density": density_vals}), - x=param, - y="density", - ) - plt.show() - -# Re-generating a random sample of parameter sets from posterior - -particles = results.sample_posterior_particles(n=100) -default_params_file = Path( - "experiments", "phase1", "input", "default_params.json" -) - -with open(default_params_file, "rb") as fp: - default_params = json.load(fp) - -mrp_defaults = { - "ixa_inputs": default_params, - "config_inputs": { - "exe_file": "./target/release/ixa-epi-covid", - "output_dir": "./experiments/phase1/calibration/output", - "force_overwrite": True, - }, - "importation_inputs": {"state": "Indiana", "year": 2020}, -} - -uniq_id = 0 -model = CovidModel() -importation_curves = [] -prevalence_data = [] - -with tempfile.TemporaryDirectory() as tmpdir: - for p in particles: - model_inputs = default_particle_reader( - p, - default_params=mrp_defaults, - parameter_headers=["ixa_inputs", "epimodel.GlobalParams"], - ) - - model_inputs["config_inputs"]["output_dir"] = str( - Path(tmpdir, f"{uniq_id}") - ) - os.makedirs(model_inputs["config_inputs"]["output_dir"], exist_ok=True) - importation_path = Path( - tmpdir, f"{uniq_id}", "importation_timeseries.csv" - ) - - model_inputs["ixa_inputs"]["epimodel.GlobalParams"][ - "imported_cases_timeseries" - ]["filename"] = str(importation_path) - model.simulate(model_inputs) - prevalence_data.append( - pl.read_csv( - Path( - tmpdir, - f"{uniq_id}", - model_inputs["ixa_inputs"]["epimodel.GlobalParams"][ - "prevalence_report" - ]["filename"], - ) - ).with_columns(pl.lit(uniq_id).alias("id")) - ) - importation_curves.append( - pl.read_csv(importation_path).with_columns( - pl.lit(uniq_id).alias("id") - ) - ) - uniq_id += 1 -print(len(importation_curves)) -importations = pl.concat(importation_curves) -deaths = ( - pl.concat(prevalence_data) - .filter(pl.col("symptom_status") == "Dead") - .group_by("t", "id") - .agg(pl.sum("count")) -) - -sns.lineplot( - data=importations, - x="time", - y="imported_infections", - units="id", - estimator=None, - alpha=0.05, -) -plt.show() - -sns.lineplot( - data=deaths, - x="t", - y="count", -) -plt.show() -sns.histplot( - data=deaths.filter(pl.col("count") > 0).group_by("id").agg(pl.min("t")), - x="t", -) -plt.show() From 2d4802c78700cb8584c6235e27a1d7570a43a149 Mon Sep 17 00:00:00 2001 From: KOVALW Date: Tue, 10 Mar 2026 14:27:01 +0000 Subject: [PATCH 16/28] added readme for recreating results --- experiments/phase1/README.md | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 experiments/phase1/README.md diff --git a/experiments/phase1/README.md b/experiments/phase1/README.md new file mode 100644 index 0000000..8ff36a7 --- /dev/null +++ b/experiments/phase1/README.md @@ -0,0 +1,21 @@ +# Running the calibration routine +In order to generate the results analysis report from the `reports/` directory, first calibrate the model by using the phase 1 calibration script. Ensure that the uv environment is synced and the rust binaries have been assembled +``` +uv sync --all-packages +uv run cargo build -r +uv run python scripts/phase_1_calibration.py +``` + +Then, to render the analysis report, ensure that `tinytex` is installed with + +``` +quarto install tinyext +``` + +and then render the document using + +``` +uv run quarto render experiments/phase1/reports/calibration.qmd +``` + +The resulting file should be a PDF in the reports directory. From d335275a59caeedca90682aed91c87f301f6c516 Mon Sep 17 00:00:00 2001 From: KOVALW Date: Tue, 10 Mar 2026 15:04:02 +0000 Subject: [PATCH 17/28] update importation timeseries plot --- experiments/phase1/reports/calibration.qmd | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/experiments/phase1/reports/calibration.qmd b/experiments/phase1/reports/calibration.qmd index e4dfb32..8422c6e 100644 --- a/experiments/phase1/reports/calibration.qmd +++ b/experiments/phase1/reports/calibration.qmd @@ -190,17 +190,25 @@ plt.show() We can show the posterior variance in the imported infections time series by overlaying samples from the simulated particles. Each line in the plot below corresponds to the importation time series for a single particle. ```{python} #| echo: false -sns.lineplot( - data=importations, +sns.scatterplot( + data=importations.filter(pl.col('imported_infections') > 0), x="time", y="imported_infections", - units="id", - estimator=None, alpha=0.05, ) +sns.pointplot( + data=importations, + x="time", + y="imported_infections", + linestyle="none", + errorbar=None, + marker="_", + native_scale=True, + color='black' +) plt.title(f"Imported infections over time ({particle_count} posterior samples)") plt.xlabel("Time (days since simulation start)") -plt.ylabel("Number of imported infections") +plt.ylabel("Number of imported infections (daily)") plt.tight_layout() plt.show() ``` From 788b45ccc6b0b4e7f86abcf72ec105a839f6c692 Mon Sep 17 00:00:00 2001 From: KOVALW Date: Tue, 10 Mar 2026 15:37:14 +0000 Subject: [PATCH 18/28] clean up param for loop --- experiments/phase1/reports/calibration.qmd | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/experiments/phase1/reports/calibration.qmd b/experiments/phase1/reports/calibration.qmd index 8422c6e..3ac4244 100644 --- a/experiments/phase1/reports/calibration.qmd +++ b/experiments/phase1/reports/calibration.qmd @@ -57,11 +57,11 @@ Histograms of posterior samples compared to the probability density of each prio #| echo: false posterior_samples = results.sample_posterior_particles(n=int(results.ess)) -for param in diagnostics["quantiles"].keys(): +for param in results.fitted_params: vals = [p[param] for p in posterior_samples] min_val = min(vals) max_val = max(vals) - print(param, min_val, max_val) + sns.histplot(x=vals, stat="density", kde=True, color='orange', edgecolor='black') eval_points = np.arange( min_val - np.var(vals), max_val + np.var(vals), 0.01 From 1a8d1b5e808db68422c5a1142030a645e8c2642d Mon Sep 17 00:00:00 2001 From: KOVALW Date: Tue, 10 Mar 2026 18:59:35 +0000 Subject: [PATCH 19/28] updated qmd with a preferred api --- experiments/phase1/reports/calibration.qmd | 26 ++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/experiments/phase1/reports/calibration.qmd b/experiments/phase1/reports/calibration.qmd index 3ac4244..e569bee 100644 --- a/experiments/phase1/reports/calibration.qmd +++ b/experiments/phase1/reports/calibration.qmd @@ -53,6 +53,32 @@ print( Histograms of posterior samples compared to the probability density of each prior. To generate the histogram, we sample $n$ particles from the posterior population, where $n$ is the effective sample size of the population, in order to reflect the weight distribution of the posterior. For each plot, the prior is plotted as a blue line and the histogram of the posterior samples is plotted in orange with a kernel density estimator overlay. +```{python} +#| echo: false +#| eval: false +# Preferred API ---------- +for param in results.fitted_params: + # get values and use weights to alter histogram instead of sampling with ESS + values = results.posterior_particles.get_values_of(param) + sns.histplot(values, weights=results.posterior_particles.weights) + + # Method to obtain the marginal prior and call PDF on those values for certain evaluation list + marginal_prior = results.get_marginal_prior(param) + eval_points = np.arange(min(values) - np.var(values), max(values) + np.var(values), 0.01) + prior_density = marginal_prior.probability_density(eval_points) + sns.lineplot( + data = pl.DataFrame({ + param: list(eval_points), + 'density': prior_density + }) + ) + plt.title(f"Posterior versus prior distribution") + plt.xlabel(" ".join(param.split("."))) + plt.ylabel("Density") + plt.tight_layout() + plt.show() + +``` ```{python} #| echo: false posterior_samples = results.sample_posterior_particles(n=int(results.ess)) From a37673238e7081b5f8b579a9ca001a27ebfdfe93 Mon Sep 17 00:00:00 2001 From: KOVALW Date: Tue, 10 Mar 2026 20:57:07 +0000 Subject: [PATCH 20/28] readme typo --- experiments/phase1/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/experiments/phase1/README.md b/experiments/phase1/README.md index 8ff36a7..8795f37 100644 --- a/experiments/phase1/README.md +++ b/experiments/phase1/README.md @@ -9,7 +9,7 @@ uv run python scripts/phase_1_calibration.py Then, to render the analysis report, ensure that `tinytex` is installed with ``` -quarto install tinyext +quarto install tinytex ``` and then render the document using From 4b594a9677b6035b3ccc740d1026da9dc1313a8e Mon Sep 17 00:00:00 2001 From: KOVALW Date: Wed, 11 Mar 2026 20:46:41 +0000 Subject: [PATCH 21/28] rebase calibrationtools and remove symptomatic_reporting_prob from ixa inputs --- experiments/phase1/input/default_params.json | 1 - scripts/phase_1_calibration.py | 14 ++++++++++---- src/ixa_epi_covid/covid_model.py | 2 +- src/population_loader.rs | 2 +- uv.lock | 2 +- 5 files changed, 13 insertions(+), 8 deletions(-) diff --git a/experiments/phase1/input/default_params.json b/experiments/phase1/input/default_params.json index d996cb7..6b7ffad 100644 --- a/experiments/phase1/input/default_params.json +++ b/experiments/phase1/input/default_params.json @@ -3,7 +3,6 @@ "seed": 1234, "max_time": 100.0, "synth_population_file": "input/people_test.csv", - "symptomatic_reporting_prob": 0.5, "initial_prevalence": 0.0, "imported_cases_timeseries": { "include": true, diff --git a/scripts/phase_1_calibration.py b/scripts/phase_1_calibration.py index a94cfaf..d9d132d 100644 --- a/scripts/phase_1_calibration.py +++ b/scripts/phase_1_calibration.py @@ -28,9 +28,16 @@ "config_inputs": { "exe_file": "./target/release/ixa-epi-covid", "output_dir": "./experiments/phase1/calibration/output", - "force_overwrite": True, + "force_overwrite": True + # "ixa_overrides": { + # "synth_population_file": Path(os.path.expanduser(os.getenv("SYNTH_POPULATION_DIR"))) / "in.csv" + # } + }, + "importation_inputs": { + "state": "Indiana", + "year": 2020, + "symptomatic_reporting_prob": 0.5, }, - "importation_inputs": {"state": "Indiana", "year": 2020}, } output_dir = Path(mrp_defaults["config_inputs"]["output_dir"]) @@ -64,9 +71,8 @@ def outputs_to_distance(model_output: pl.DataFrame, target_data: int): else: return 1000 - sampler = ABCSampler( - generation_particle_count=750, + generation_particle_count=500, tolerance_values=[30.0, 20.0, 10.0, 5.0], priors=P, perturbation_kernel=K, diff --git a/src/ixa_epi_covid/covid_model.py b/src/ixa_epi_covid/covid_model.py index 738663c..dd96035 100644 --- a/src/ixa_epi_covid/covid_model.py +++ b/src/ixa_epi_covid/covid_model.py @@ -43,7 +43,7 @@ def simulate(model_inputs: dict[str, Any]) -> pl.DataFrame: proportion_asymptomatic = ixa_inputs["epimodel.GlobalParams"][ "probability_mild_given_infect" ] - symptomatic_reporting_prob = ixa_inputs["epimodel.GlobalParams"][ + symptomatic_reporting_prob = model_inputs["importation_inputs"][ "symptomatic_reporting_prob" ] diff --git a/src/population_loader.rs b/src/population_loader.rs index 91c072d..69b13a1 100644 --- a/src/population_loader.rs +++ b/src/population_loader.rs @@ -61,7 +61,7 @@ fn create_person_from_record( append_itinerary_entry( &mut itinerary, context, - SettingId::new(School, school_string.parse()?), + SettingId::new(School, school_string.replace("xprvx", "0").parse()?), None, )?; } diff --git a/uv.lock b/uv.lock index dfc6cec..10da9fa 100644 --- a/uv.lock +++ b/uv.lock @@ -163,7 +163,7 @@ css = [ [[package]] name = "calibrationtools" version = "0.1.1" -source = { git = "https://github.com/CDCgov/cfa-calibration-tools.git?branch=wtk-dev#95c00f9a19421af549e3fc9a7fdfb3e186f83713" } +source = { git = "https://github.com/CDCgov/cfa-calibration-tools.git?branch=wtk-dev#67583fb42527f89516cb9d711b4d25a18b14b1cb" } dependencies = [ { name = "cfa-mrp" }, { name = "jsonschema" }, From 5bfd950d6f13f3337eb862c7f5f23eb162cdb5f6 Mon Sep 17 00:00:00 2001 From: KOVALW Date: Wed, 11 Mar 2026 20:47:09 +0000 Subject: [PATCH 22/28] precommit --- scripts/phase_1_calibration.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/phase_1_calibration.py b/scripts/phase_1_calibration.py index d9d132d..8c0704d 100644 --- a/scripts/phase_1_calibration.py +++ b/scripts/phase_1_calibration.py @@ -28,13 +28,13 @@ "config_inputs": { "exe_file": "./target/release/ixa-epi-covid", "output_dir": "./experiments/phase1/calibration/output", - "force_overwrite": True + "force_overwrite": True, # "ixa_overrides": { # "synth_population_file": Path(os.path.expanduser(os.getenv("SYNTH_POPULATION_DIR"))) / "in.csv" # } }, "importation_inputs": { - "state": "Indiana", + "state": "Indiana", "year": 2020, "symptomatic_reporting_prob": 0.5, }, @@ -71,6 +71,7 @@ def outputs_to_distance(model_output: pl.DataFrame, target_data: int): else: return 1000 + sampler = ABCSampler( generation_particle_count=500, tolerance_values=[30.0, 20.0, 10.0, 5.0], From b37ad1f9671f4f2ed6c49092089b540d5928aa3d Mon Sep 17 00:00:00 2001 From: KOVALW Date: Thu, 12 Mar 2026 18:58:16 +0000 Subject: [PATCH 23/28] override population --- experiments/phase1/reports/calibration.qmd | 20 +++-- scripts/phase_1_calibration.py | 88 ++++++++++++++-------- 2 files changed, 68 insertions(+), 40 deletions(-) diff --git a/experiments/phase1/reports/calibration.qmd b/experiments/phase1/reports/calibration.qmd index e569bee..af73ba4 100644 --- a/experiments/phase1/reports/calibration.qmd +++ b/experiments/phase1/reports/calibration.qmd @@ -19,11 +19,12 @@ import matplotlib.pyplot as plt import numpy as np import polars as pl import json -from calibrationtools import default_particle_reader +from calibrationtools import ParticleReader import tempfile import polars as pl import os from ixa_epi_covid import CovidModel +from mrp.api import apply_dict_overrides os.chdir('../../../') @@ -129,6 +130,10 @@ with open(default_params_file, "rb") as fp: default_params = json.load(fp) default_params['epimodel.GlobalParams']['max_time'] = 200 +ixa_overrides = { + "synth_population_file": "/mnt/S_CFA_Predict/team-CMEI/synthetic_populations/cbsa_all_work_school_household_2020-04-24/cbsa_all_work_school_household/IN/Bloomington IN.csv" +} +default_params = apply_dict_overrides(default_params, {'epimodel.GlobalParams': ixa_overrides}) mrp_defaults = { 'ixa_inputs': default_params, @@ -137,7 +142,11 @@ mrp_defaults = { "output_dir": "./experiments/phase1/calibration/output", "force_overwrite": True, }, - "importation_inputs": {"state": "Indiana", "year": 2020}, + "importation_inputs": { + "state": "Indiana", + "year": 2020, + "symptomatic_reporting_prob": 0.5 + }, } uniq_id = 0 @@ -146,12 +155,9 @@ importation_curves = [] prevalence_data = [] with tempfile.TemporaryDirectory() as tmpdir: + reader = ParticleReader(results.priors.params, mrp_defaults) for p in particles: - model_inputs = default_particle_reader( - p, - default_params=mrp_defaults, - parameter_headers=["ixa_inputs", "epimodel.GlobalParams"], - ) + model_inputs = reader.read_particle(p) model_inputs["config_inputs"]["output_dir"] = str( Path(tmpdir, f"{uniq_id}") diff --git a/scripts/phase_1_calibration.py b/scripts/phase_1_calibration.py index 8c0704d..2bead88 100644 --- a/scripts/phase_1_calibration.py +++ b/scripts/phase_1_calibration.py @@ -12,26 +12,59 @@ MultivariateNormalKernel, SeedKernel, ) +from mrp.api import apply_dict_overrides from ixa_epi_covid import CovidModel -with open(Path("experiments", "phase1", "input", "priors.json"), "r") as f: - priors = json.load(f) +# Run-specific parameters declaration ------------------------------------------------------ +# Default model and parameters +exe_file = Path("target", "release", "ixa-epi-covid") +output_dir = Path("experiments", "phase1", "calibration", "output") +default_ixa_params_file = Path( + "experiments", "phase1", "input", "default_params.json" +) +ixa_overrides = { + "synth_population_file": "/mnt/S_CFA_Predict/team-CMEI/synthetic_populations/cbsa_all_work_school_household_2020-04-24/cbsa_all_work_school_household/IN/Bloomington IN.csv" +} +force_overwrite = True -with open( - Path("experiments", "phase1", "input", "default_params.json"), "r" -) as f: +# State importation model declaration parameters +state = "Indiana" +year = 2020 + +# Calibration inputs +priors_file = Path("experiments", "phase1", "input", "priors.json") +tolerance_values = [30.0, 20.0, 10.0, 5.0] # , 2.0, 0.01] +generation_particle_count = 500 +target_data = 75 + + +# Output processing function for calibration +def outputs_to_distance(model_output: pl.DataFrame, target_data: int): + first_death_observed = model_output.filter( + (pl.col("event") == "Dead") & (pl.col("count") > 0) + ).filter(pl.col("t_upper") == pl.min("t_upper")) + if first_death_observed.height > 0: + return abs(target_data - first_death_observed.item(0, "t_upper")) + else: + return 1000 + + +# Load environment files, defaults, and setup configurations --------------------- +with open(default_ixa_params_file, "r") as f: default_params = json.load(f) + +default_params = apply_dict_overrides( + default_params, {"epimodel.GlobalParams": ixa_overrides} +) + mrp_defaults = { "ixa_inputs": default_params, "config_inputs": { - "exe_file": "./target/release/ixa-epi-covid", - "output_dir": "./experiments/phase1/calibration/output", - "force_overwrite": True, - # "ixa_overrides": { - # "synth_population_file": Path(os.path.expanduser(os.getenv("SYNTH_POPULATION_DIR"))) / "in.csv" - # } + "exe_file": str(exe_file), + "output_dir": str(output_dir), + "force_overwrite": force_overwrite, }, "importation_inputs": { "state": "Indiana", @@ -49,45 +82,34 @@ output_dir.mkdir(parents=True, exist_ok=False) -P = priors +# Create the model and sampler objects ------------------------------------------------ +with open(priors_file, "r") as f: + priors = json.load(f) + +P: dict[dict, dict] = priors K = IndependentKernels( [ - MultivariateNormalKernel( - [p for p in P["priors"].keys()], - ), + MultivariateNormalKernel(list(P["priors"].keys())), SeedKernel("seed"), ] ) model = CovidModel() - -def outputs_to_distance(model_output: pl.DataFrame, target_data: int): - first_death_observed = model_output.filter( - (pl.col("event") == "Dead") & (pl.col("count") > 0) - ).filter(pl.col("t_upper") == pl.min("t_upper")) - if first_death_observed.height > 0: - return abs(target_data - first_death_observed.item(0, "t_upper")) - else: - return 1000 - - sampler = ABCSampler( - generation_particle_count=500, - tolerance_values=[30.0, 20.0, 10.0, 5.0], + generation_particle_count=generation_particle_count, + tolerance_values=tolerance_values, priors=P, perturbation_kernel=K, variance_adapter=AdaptMultivariateNormalVariance(), outputs_to_distance=outputs_to_distance, - target_data=75, + target_data=target_data, model_runner=model, seed=123, ) -results = sampler.run( - default_params=mrp_defaults, - parameter_headers=["ixa_inputs", "epimodel.GlobalParams"], -) +# Execute the sampler ---------------------------------------------------------------------- +results = sampler.run(default_params=mrp_defaults) print(results) From fe6ec72c24eef4daa1e0677eb9e749e395f43be6 Mon Sep 17 00:00:00 2001 From: KOVALW Date: Fri, 13 Mar 2026 18:30:24 +0000 Subject: [PATCH 24/28] reduce run time workarounds --- experiments/phase1/input/default_params.json | 3 +- experiments/phase1/input/priors.json | 2 +- experiments/phase1/reports/calibration.qmd | 8 +++-- input/input.json | 3 +- scripts/phase_1_calibration.py | 31 ++++++++++++++------ src/abort_run.rs | 24 +++++++++++++++ src/lib.rs | 1 + src/model.rs | 4 +-- src/parameters.rs | 3 ++ 9 files changed, 62 insertions(+), 17 deletions(-) create mode 100644 src/abort_run.rs diff --git a/experiments/phase1/input/default_params.json b/experiments/phase1/input/default_params.json index 6b7ffad..64ec9f4 100644 --- a/experiments/phase1/input/default_params.json +++ b/experiments/phase1/input/default_params.json @@ -49,6 +49,7 @@ "transmission_report": { "write": false, "filename": "transmission_report.csv" - } + }, + "first_death_terminates_run": false } } diff --git a/experiments/phase1/input/priors.json b/experiments/phase1/input/priors.json index 8b41919..6334f50 100644 --- a/experiments/phase1/input/priors.json +++ b/experiments/phase1/input/priors.json @@ -25,7 +25,7 @@ "distribution": "uniform", "parameters": { "min": 0.1, - "max": 2.0 + "max": 1.2 } } } diff --git a/experiments/phase1/reports/calibration.qmd b/experiments/phase1/reports/calibration.qmd index af73ba4..f51260e 100644 --- a/experiments/phase1/reports/calibration.qmd +++ b/experiments/phase1/reports/calibration.qmd @@ -122,7 +122,7 @@ for param in results.fitted_params: #| echo: false ## Obtaining the importation time series and death incidence data frmaes for a random smaple form the posterior # Re-generating a random sample of parameter sets from posterior -particle_count = 100 +particle_count = int(min(100, results.ess)) particles = results.sample_posterior_particles(n=particle_count) default_params_file = wd / 'input' / 'default_params.json' @@ -271,14 +271,16 @@ sir_data = all_prevalence_data.group_by( pl.sum('count') ) -sns.lineplot( +sns.relplot( data = sir_data, x='t', y='count', + kind='line', hue='infection_status', units='id', estimator=None, - alpha=0.05 + alpha=0.1, + row='infection_status', ) plt.title(f"Individuals by infection status over time ({particle_count} posterior samples)") plt.xlabel("Time (days since simulation start)") diff --git a/input/input.json b/input/input.json index e80ec54..6d3213b 100644 --- a/input/input.json +++ b/input/input.json @@ -49,6 +49,7 @@ "transmission_report": { "write": true, "filename": "transmission_report.csv" - } + }, + "first_death_terminates_run": false } } diff --git a/scripts/phase_1_calibration.py b/scripts/phase_1_calibration.py index 2bead88..9f0c228 100644 --- a/scripts/phase_1_calibration.py +++ b/scripts/phase_1_calibration.py @@ -2,6 +2,7 @@ import os import pickle import shutil +import timeit from pathlib import Path import polars as pl @@ -24,9 +25,10 @@ "experiments", "phase1", "input", "default_params.json" ) ixa_overrides = { - "synth_population_file": "/mnt/S_CFA_Predict/team-CMEI/synthetic_populations/cbsa_all_work_school_household_2020-04-24/cbsa_all_work_school_household/IN/Bloomington IN.csv" + "synth_population_file": "/mnt/S_CFA_Predict/team-CMEI/synthetic_populations/cbsa_all_work_school_household_2020-04-24/cbsa_all_work_school_household/IN/Bloomington IN.csv", + "first_death_terminates_run": True } -force_overwrite = True +force_overwrite = False # State importation model declaration parameters state = "Indiana" @@ -34,7 +36,7 @@ # Calibration inputs priors_file = Path("experiments", "phase1", "input", "priors.json") -tolerance_values = [30.0, 20.0, 10.0, 5.0] # , 2.0, 0.01] +tolerance_values = [2.0, 0.1] # , 2.0, 0.01] generation_particle_count = 500 target_data = 75 @@ -54,6 +56,13 @@ def outputs_to_distance(model_output: pl.DataFrame, target_data: int): with open(default_ixa_params_file, "r") as f: default_params = json.load(f) +ixa_overrides.update( + { + "epimodel.GlobalParams": { + "max_time": target_data + tolerance_values[0] + 1 + } + } +) default_params = apply_dict_overrides( default_params, {"epimodel.GlobalParams": ixa_overrides} @@ -74,11 +83,13 @@ def outputs_to_distance(model_output: pl.DataFrame, target_data: int): } output_dir = Path(mrp_defaults["config_inputs"]["output_dir"]) -if ( - os.path.exists(output_dir) - and mrp_defaults["config_inputs"]["force_overwrite"] -): - shutil.rmtree(str(output_dir)) +if os.path.exists(output_dir): + if force_overwrite: + shutil.rmtree(str(output_dir)) + else: + raise FileExistsError( + f"Output directory {output_dir} already exists and force_overwrite is set to False." + ) output_dir.mkdir(parents=True, exist_ok=False) @@ -109,8 +120,10 @@ def outputs_to_distance(model_output: pl.DataFrame, target_data: int): ) # Execute the sampler ---------------------------------------------------------------------- +start = timeit.default_timer() # Start the timer results = sampler.run(default_params=mrp_defaults) - +finish = timeit.default_timer() # Stop the timer +print(f"Calibration completed in {finish - start:.2f} seconds.") print(results) diagnostics = results.get_diagnostics() diff --git a/src/abort_run.rs b/src/abort_run.rs new file mode 100644 index 0000000..712738d --- /dev/null +++ b/src/abort_run.rs @@ -0,0 +1,24 @@ +use crate::{ + parameters::{ContextParametersExt, Params}, + population_loader::Person, + symptom_status_manager::SymptomStatus, +}; +use ixa::prelude::*; + +pub fn init(context: &mut Context) { + let &Params { + first_death_terminates_run, + .. + } = context.get_params(); + if first_death_terminates_run { + context.subscribe_to_event::>( + move |context, event| { + if event.current == SymptomStatus::Dead { + context.add_plan(context.get_current_time() + 1.0, move |context| { + context.shutdown(); + }); + } + }, + ); + } +} diff --git a/src/lib.rs b/src/lib.rs index c293f5a..cf2f0ca 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,3 +1,4 @@ +pub mod abort_run; pub mod infection_importation; pub mod infection_propagation_loop; pub mod infectiousness_manager; diff --git a/src/model.rs b/src/model.rs index 7bf8d08..1517494 100644 --- a/src/model.rs +++ b/src/model.rs @@ -2,7 +2,7 @@ use ixa::{ExecutionPhase, prelude::*}; use crate::{ infection_importation, infection_propagation_loop, population_loader, reports, settings, - symptom_status_manager, + symptom_status_manager, abort_run }; pub fn initialize_model(context: &mut Context, seed: u64, max_time: f64) -> Result<(), IxaError> { @@ -24,6 +24,6 @@ pub fn initialize_model(context: &mut Context, seed: u64, max_time: f64) -> Resu infection_propagation_loop::init(context)?; infection_importation::init(context)?; reports::init(context)?; - + abort_run::init(context); Ok(()) } diff --git a/src/parameters.rs b/src/parameters.rs index a408643..57f272b 100644 --- a/src/parameters.rs +++ b/src/parameters.rs @@ -82,6 +82,8 @@ pub struct Params { pub incidence_report: ReportParams, /// Transmission report with a name required pub transmission_report: ReportParams, + /// Terminate run on first observed death + pub first_death_terminates_run: bool, } #[allow(clippy::too_many_lines)] @@ -259,6 +261,7 @@ impl Default for Params { filename: None, period: None, }, + first_death_terminates_run: false, } } } From 4ee830789e9c615062d6082cab81e532e5029a2a Mon Sep 17 00:00:00 2001 From: KOVALW Date: Fri, 13 Mar 2026 19:32:08 +0000 Subject: [PATCH 25/28] update model output return --- experiments/phase1/reports/calibration.qmd | 59 +++++++--------------- scripts/phase_1_calibration.py | 25 +++++---- src/ixa_epi_covid/covid_model.py | 28 ++++++---- src/model.rs | 4 +- 4 files changed, 56 insertions(+), 60 deletions(-) diff --git a/experiments/phase1/reports/calibration.qmd b/experiments/phase1/reports/calibration.qmd index f51260e..413b280 100644 --- a/experiments/phase1/reports/calibration.qmd +++ b/experiments/phase1/reports/calibration.qmd @@ -52,12 +52,12 @@ print( ) ``` -Histograms of posterior samples compared to the probability density of each prior. To generate the histogram, we sample $n$ particles from the posterior population, where $n$ is the effective sample size of the population, in order to reflect the weight distribution of the posterior. For each plot, the prior is plotted as a blue line and the histogram of the posterior samples is plotted in orange with a kernel density estimator overlay. +Histograms of posterior samples compared to the probability density of each prior. For each plot, the prior is plotted as a blue line and the histogram of the posterior samples is plotted in orange with a kernel density estimator overlay. To generate the histogram, we sample $n$ particles from the posterior population, where $n$ is the effective sample size of the population. Re-sampling particles in this manner reflects the posterior weight distribution, the probability mass function over accepted particles determined by the ABC-SMC algorithm perturbation kernel and prior distributions. ```{python} #| echo: false #| eval: false -# Preferred API ---------- +# Preferred API for development in calibrationtools---------- for param in results.fitted_params: # get values and use weights to alter histogram instead of sampling with ESS values = results.posterior_particles.get_values_of(param) @@ -120,7 +120,7 @@ for param in results.fitted_params: ``` ```{python} #| echo: false -## Obtaining the importation time series and death incidence data frmaes for a random smaple form the posterior +## Obtaining the importation time series and death incidence data frames for a random smaple form the posterior # Re-generating a random sample of parameter sets from posterior particle_count = int(min(100, results.ess)) particles = results.sample_posterior_particles(n=particle_count) @@ -129,9 +129,12 @@ default_params_file = wd / 'input' / 'default_params.json' with open(default_params_file, "rb") as fp: default_params = json.load(fp) -default_params['epimodel.GlobalParams']['max_time'] = 200 ixa_overrides = { - "synth_population_file": "/mnt/S_CFA_Predict/team-CMEI/synthetic_populations/cbsa_all_work_school_household_2020-04-24/cbsa_all_work_school_household/IN/Bloomington IN.csv" + "synth_population_file": "/mnt/S_CFA_Predict/team-CMEI/synthetic_populations/cbsa_all_work_school_household_2020-04-24/cbsa_all_work_school_household/IN/Bloomington IN.csv", + "imported_cases_timeseries": { + "filename": "./experiments/phase1/projection/imported_cases_timeseries.csv" + }, + "max_time": 200 } default_params = apply_dict_overrides(default_params, {'epimodel.GlobalParams': ixa_overrides}) @@ -139,8 +142,9 @@ mrp_defaults = { 'ixa_inputs': default_params, "config_inputs": { "exe_file": "./target/release/ixa-epi-covid", - "output_dir": "./experiments/phase1/calibration/output", + "output_dir": "./experiments/phase1/projection/output", "force_overwrite": True, + "outputs_to_read": ['prevalence_report', 'imported_cases_timeseries'] }, "importation_inputs": { "state": "Indiana", @@ -153,41 +157,16 @@ uniq_id = 0 model = CovidModel() importation_curves = [] prevalence_data = [] +os.makedirs(mrp_defaults["config_inputs"]["output_dir"], exist_ok=True) + +reader = ParticleReader(results.priors.params, mrp_defaults) +for p in particles: + model_inputs = reader.read_particle(p) + outputs = model.simulate(model_inputs) -with tempfile.TemporaryDirectory() as tmpdir: - reader = ParticleReader(results.priors.params, mrp_defaults) - for p in particles: - model_inputs = reader.read_particle(p) - - model_inputs["config_inputs"]["output_dir"] = str( - Path(tmpdir, f"{uniq_id}") - ) - os.makedirs(model_inputs["config_inputs"]["output_dir"], exist_ok=True) - importation_path = Path( - tmpdir, f"{uniq_id}", "importation_timeseries.csv" - ) - - model_inputs["ixa_inputs"]["epimodel.GlobalParams"][ - "imported_cases_timeseries" - ]["filename"] = str(importation_path) - model.simulate(model_inputs) - prevalence_data.append( - pl.read_csv( - Path( - tmpdir, - f"{uniq_id}", - model_inputs["ixa_inputs"]["epimodel.GlobalParams"][ - "prevalence_report" - ]["filename"], - ) - ).with_columns(pl.lit(uniq_id).alias("id")) - ) - importation_curves.append( - pl.read_csv(importation_path).with_columns( - pl.lit(uniq_id).alias("id") - ) - ) - uniq_id += 1 + importation_curves.append(outputs["imported_cases_timeseries"].with_columns(pl.lit(uniq_id).alias("id"))) + prevalence_data.append(outputs["prevalence_report"].with_columns(pl.lit(uniq_id).alias("id"))) + uniq_id += 1 importations = pl.concat(importation_curves) all_prevalence_data = pl.concat(prevalence_data) diff --git a/scripts/phase_1_calibration.py b/scripts/phase_1_calibration.py index 9f0c228..d1adcd4 100644 --- a/scripts/phase_1_calibration.py +++ b/scripts/phase_1_calibration.py @@ -26,26 +26,32 @@ ) ixa_overrides = { "synth_population_file": "/mnt/S_CFA_Predict/team-CMEI/synthetic_populations/cbsa_all_work_school_household_2020-04-24/cbsa_all_work_school_household/IN/Bloomington IN.csv", - "first_death_terminates_run": True + "first_death_terminates_run": True, } force_overwrite = False +outputs_to_read = ["incidence_report"] # State importation model declaration parameters state = "Indiana" year = 2020 +symptomatic_reporting_prob_default = 0.5 # Calibration inputs priors_file = Path("experiments", "phase1", "input", "priors.json") -tolerance_values = [2.0, 0.1] # , 2.0, 0.01] +tolerance_values = [2.0, 0.1] generation_particle_count = 500 target_data = 75 # Output processing function for calibration -def outputs_to_distance(model_output: pl.DataFrame, target_data: int): - first_death_observed = model_output.filter( - (pl.col("event") == "Dead") & (pl.col("count") > 0) - ).filter(pl.col("t_upper") == pl.min("t_upper")) +def outputs_to_distance( + model_output: dict[str, pl.DataFrame], target_data: int +): + first_death_observed = ( + model_output["incidence_report"] + .filter((pl.col("event") == "Dead") & (pl.col("count") > 0)) + .filter(pl.col("t_upper") == pl.min("t_upper")) + ) if first_death_observed.height > 0: return abs(target_data - first_death_observed.item(0, "t_upper")) else: @@ -74,11 +80,12 @@ def outputs_to_distance(model_output: pl.DataFrame, target_data: int): "exe_file": str(exe_file), "output_dir": str(output_dir), "force_overwrite": force_overwrite, + "outputs_to_read": outputs_to_read, }, "importation_inputs": { - "state": "Indiana", - "year": 2020, - "symptomatic_reporting_prob": 0.5, + "state": state, + "year": year, + "symptomatic_reporting_prob": symptomatic_reporting_prob_default, }, } diff --git a/src/ixa_epi_covid/covid_model.py b/src/ixa_epi_covid/covid_model.py index dd96035..2e2241d 100644 --- a/src/ixa_epi_covid/covid_model.py +++ b/src/ixa_epi_covid/covid_model.py @@ -85,7 +85,7 @@ def simulate(model_inputs: dict[str, Any]) -> pl.DataFrame: str(input_file_path), "--output", config_inputs["output_dir"], - "-f", + "--force-overwrite", "--no-stats", ] @@ -99,11 +99,21 @@ def simulate(model_inputs: dict[str, Any]) -> pl.DataFrame: raise e # Read the model incidence report from the specified location and return as a DataFrame - incidence_report_filename = ixa_inputs["epimodel.GlobalParams"][ - "incidence_report" - ]["filename"] - incidence_report_path = Path( - config_inputs["output_dir"], incidence_report_filename - ) - - return pl.read_csv(incidence_report_path) + outputs = {} + for output in config_inputs["outputs_to_read"]: + fp = ixa_inputs["epimodel.GlobalParams"][output]["filename"] + if Path(config_inputs["output_dir"], fp).exists(): + outputs.update( + { + output: pl.read_csv( + Path(config_inputs["output_dir"], fp) + ) + } + ) + elif Path(fp).exists(): + outputs.update({output: pl.read_csv(Path(fp))}) + else: + raise FileNotFoundError( + f"Expected output file {fp} not found. Looked in {config_inputs['output_dir']}" + ) + return outputs diff --git a/src/model.rs b/src/model.rs index 1517494..3a7a7e1 100644 --- a/src/model.rs +++ b/src/model.rs @@ -1,8 +1,8 @@ use ixa::{ExecutionPhase, prelude::*}; use crate::{ - infection_importation, infection_propagation_loop, population_loader, reports, settings, - symptom_status_manager, abort_run + abort_run, infection_importation, infection_propagation_loop, population_loader, reports, + settings, symptom_status_manager, }; pub fn initialize_model(context: &mut Context, seed: u64, max_time: f64) -> Result<(), IxaError> { From 542da1ade40423c930fcc63b4fa76274a81147a9 Mon Sep 17 00:00:00 2001 From: KOVALW Date: Fri, 13 Mar 2026 19:35:45 +0000 Subject: [PATCH 26/28] fix test string for report --- src/reports/mod.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/reports/mod.rs b/src/reports/mod.rs index 7a8828c..e7d4105 100644 --- a/src/reports/mod.rs +++ b/src/reports/mod.rs @@ -159,6 +159,7 @@ mod test { "write": true, "filename": "transmission.csv" } + "first_death_terminates_run": false } } "#; From 158208c387d2dba2c023370e16c95f405b664f4c Mon Sep 17 00:00:00 2001 From: KOVALW Date: Fri, 13 Mar 2026 19:43:18 +0000 Subject: [PATCH 27/28] comma missing --- src/reports/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reports/mod.rs b/src/reports/mod.rs index e7d4105..0c6c952 100644 --- a/src/reports/mod.rs +++ b/src/reports/mod.rs @@ -158,7 +158,7 @@ mod test { "transmission_report": { "write": true, "filename": "transmission.csv" - } + }, "first_death_terminates_run": false } } From 96737c9a38295330549c03519681d6aae98655b2 Mon Sep 17 00:00:00 2001 From: KOVALW Date: Fri, 13 Mar 2026 20:11:20 +0000 Subject: [PATCH 28/28] figure updates --- experiments/phase1/reports/calibration.qmd | 63 +++++++++------------- 1 file changed, 25 insertions(+), 38 deletions(-) diff --git a/experiments/phase1/reports/calibration.qmd b/experiments/phase1/reports/calibration.qmd index 413b280..9693a75 100644 --- a/experiments/phase1/reports/calibration.qmd +++ b/experiments/phase1/reports/calibration.qmd @@ -152,6 +152,7 @@ mrp_defaults = { "symptomatic_reporting_prob": 0.5 }, } +reader = ParticleReader(results.priors.params, mrp_defaults) uniq_id = 0 model = CovidModel() @@ -159,7 +160,6 @@ importation_curves = [] prevalence_data = [] os.makedirs(mrp_defaults["config_inputs"]["output_dir"], exist_ok=True) -reader = ParticleReader(results.priors.params, mrp_defaults) for p in particles: model_inputs = reader.read_particle(p) outputs = model.simulate(model_inputs) @@ -178,27 +178,7 @@ deaths = ( ) ``` -For a final SMC step with threshold toelrance above zero, we will observe some variance in the date of first reported death. Here we show a sampled histogram of the timing of the first death across the posterior simulations. The vertical dashed line indicates the observed timing of the first death in Indiana (March 16, 2020, or 75 days after the start of the simulation on January 1, 2020). We can see that while although the distribution is centered around the observed timing, there is a higher weight on earlier first reported deaths, indicating that imported infections occur faster in the simulated model than they occured in real life. This may be due to the proprotional sampling of Indiana importations from the domestic level imported infections, when in fact they may have been less probable due to mobility flow patterns. - -```{python} -#| echo: false - -first_deaths = deaths.filter(pl.col("count") > 0).group_by("id").agg(pl.min("t")) -sns.histplot( - data=first_deaths, - x="t", -) -plt.axvline(x=75, color="black", linestyle="--", label="Observed (March 16, 2020)") -plt.title(f"Distribution of first death times ({particle_count} posterior samples)") -plt.xlabel("Time of first death (days since simulation start)") -plt.ylabel("Number of simulations") -plt.legend() -plt.tight_layout() -plt.show() - -``` - -We can show the posterior variance in the imported infections time series by overlaying samples from the simulated particles. Each line in the plot below corresponds to the importation time series for a single particle. +We can show the posterior variance in the imported infections time series by overlaying samples from the simulated particles. Blue points on the plot represent the count of imported infections for a given day in a given simulation if the number of importations was above zero. Black line show the median and 95% credible interval of the number of imported infections on each day from January 1st to March 12th. ```{python} #| echo: false sns.scatterplot( @@ -207,15 +187,11 @@ sns.scatterplot( y="imported_infections", alpha=0.05, ) -sns.pointplot( +sns.lineplot( data=importations, x="time", y="imported_infections", - linestyle="none", - errorbar=None, - marker="_", - native_scale=True, - color='black' + estimator='median' ) plt.title(f"Imported infections over time ({particle_count} posterior samples)") plt.xlabel("Time (days since simulation start)") @@ -223,14 +199,21 @@ plt.ylabel("Number of imported infections (daily)") plt.tight_layout() plt.show() ``` -We can project the cumulative number of deaths observed in the model up to the evaluation timepoint. Because the last SMC step has a distance tolerance threshold greater than zero, we see some simulations accrue deaths before the first reported death in the data. +We can project the cumulative number of deaths observed in the model projections. Because the last SMC step has a distance tolerance threshold below one, we see no simulations accrue deaths before the first reported death in the data. ```{python} #| echo: false sns.lineplot( - data=deaths.filter(pl.col('t') <= 75), + data=deaths, x="t", y="count", + units='id', estimator=None, + alpha=0.05 +) +sns.lineplot( + data=deaths, + x="t", + y="count" ) plt.title(f"Total observed deaths over time ({particle_count} posterior samples)") plt.xlabel("Time (days since simulation start)") @@ -240,7 +223,7 @@ plt.tight_layout() plt.show() ``` -Finally, we can plot the same trajectories for number of infections observed in the model over time, which includes both imported infections and locally acquired infections. This figure shows the issue with calibratin the model with a small population size (1,000 people). The susceptible pool is rapidly depleted by local transmission, and a high proportion of the population is already recovered by the timepoint of evaluation comparing the model results to the first reported death in the data. +Finally, we can plot the same trajectories for number of infections observed in the model over time, which includes both imported infections and locally acquired infections. We see that some epidemic trajectories are strongly delayed from first imported infections due to stochasticity. ```{python} #| echo: false @@ -250,7 +233,7 @@ sir_data = all_prevalence_data.group_by( pl.sum('count') ) -sns.relplot( +g = sns.relplot( data = sir_data, x='t', y='count', @@ -261,11 +244,15 @@ sns.relplot( alpha=0.1, row='infection_status', ) -plt.title(f"Individuals by infection status over time ({particle_count} posterior samples)") -plt.xlabel("Time (days since simulation start)") -plt.axvline(x=65, color="black", linestyle="--", label="First case reported (March 6, 2020)") -plt.axvline(x=75, color="red", linestyle="--", label="First death reported (March 16, 2020)") -plt.ylabel("Number of people") -plt.tight_layout() +g.fig.suptitle( + f"Individuals by infection status over time ({particle_count} posterior samples)", + fontsize='x-large', + fontweight='bold' +) +g.fig.subplots_adjust(top=0.85) +g.set_axis_labels("Time (days since simulation start)", "Number of people") +for ax in g.axes.flat: + ax.axvline(x=65, color="black", linestyle="--", label="First case reported (March 6, 2020)") + ax.axvline(x=75, color="red", linestyle="--", label="First death reported (March 16, 2020)") plt.show() ```