From 6a8b391afabe7f02e1744739508c5815f66150af Mon Sep 17 00:00:00 2001 From: Aurash Karimi Date: Tue, 11 Nov 2025 09:27:17 +0000 Subject: [PATCH 01/28] refactor and remove capacity term from npv objective --- src/simulation/investment/appraisal.rs | 75 ++++++++++++++++++- .../investment/appraisal/optimisation.rs | 47 ++---------- 2 files changed, 80 insertions(+), 42 deletions(-) diff --git a/src/simulation/investment/appraisal.rs b/src/simulation/investment/appraisal.rs index e768ab2ed..81e30428f 100644 --- a/src/simulation/investment/appraisal.rs +++ b/src/simulation/investment/appraisal.rs @@ -8,6 +8,7 @@ use crate::model::Model; use crate::time_slice::TimeSliceID; use crate::units::{Activity, Capacity}; use anyhow::Result; +use highs::RowProblem as Problem; use indexmap::IndexMap; use std::cmp::Ordering; @@ -17,7 +18,7 @@ mod costs; mod optimisation; use coefficients::ObjectiveCoefficients; use float_cmp::approx_eq; -use optimisation::perform_optimisation; +use optimisation::{VariableMap, perform_optimisation}; /// The output of investment appraisal required to compare potential investment decisions pub struct AppraisalOutput { @@ -60,6 +61,66 @@ impl AppraisalOutput { } } +/// Add variables to the problem for LCOX appraisal. +fn add_variables_to_lcox_problem( + problem: &mut Problem, + cost_coefficients: &ObjectiveCoefficients, +) -> VariableMap { + // Create capacity variable + let capacity_var = problem.add_column(cost_coefficients.capacity_coefficient.value(), 0.0..); + + // Create activity variables + let mut activity_vars = IndexMap::new(); + for (time_slice, cost) in &cost_coefficients.activity_coefficients { + let var = problem.add_column(cost.value(), 0.0..); + activity_vars.insert(time_slice.clone(), var); + } + + // Create unmet demand variables + // One per time slice, all of which use the same coefficient + let mut unmet_demand_vars = IndexMap::new(); + for time_slice in cost_coefficients.activity_coefficients.keys() { + let var = problem.add_column(cost_coefficients.unmet_demand_coefficient.value(), 0.0..); + unmet_demand_vars.insert(time_slice.clone(), var); + } + + VariableMap { + capacity_var, + activity_vars, + unmet_demand_vars, + } +} + +/// Add variables to the problem for npv appraisal. +fn add_variables_to_npv_problem( + problem: &mut Problem, + cost_coefficients: &ObjectiveCoefficients, +) -> VariableMap { + // Create capacity variable (zeroed coefficient - doesn't affect optimisation) + let capacity_var = problem.add_column(0.0, 0.0..); + + // Create activity variables + let mut activity_vars = IndexMap::new(); + for (time_slice, cost) in &cost_coefficients.activity_coefficients { + let var = problem.add_column(cost.value(), 0.0..); + activity_vars.insert(time_slice.clone(), var); + } + + // Create unmet demand variables + // One per time slice, all of which use the same coefficient + let mut unmet_demand_vars = IndexMap::new(); + for time_slice in cost_coefficients.activity_coefficients.keys() { + let var = problem.add_column(cost_coefficients.unmet_demand_coefficient.value(), 0.0..); + unmet_demand_vars.insert(time_slice.clone(), var); + } + + VariableMap { + capacity_var, + activity_vars, + unmet_demand_vars, + } +} + /// Calculate LCOX for a hypothetical investment in the given asset. /// /// This is more commonly referred to as Levelised Cost of *Electricity*, but as the model can @@ -72,15 +133,19 @@ fn calculate_lcox( coefficients: &ObjectiveCoefficients, demand: &DemandMap, ) -> Result { + let mut problem = Problem::default(); + let variables = add_variables_to_lcox_problem(&mut problem, coefficients); + // Perform optimisation to calculate capacity, activity and unmet demand let results = perform_optimisation( asset, max_capacity, commodity, - coefficients, demand, &model.time_slice_info, highs::Sense::Minimise, + problem, + &variables, )?; // Calculate LCOX for the hypothetical investment @@ -114,15 +179,19 @@ fn calculate_npv( coefficients: &ObjectiveCoefficients, demand: &DemandMap, ) -> Result { + let mut problem = Problem::default(); + let variables = add_variables_to_npv_problem(&mut problem, coefficients); + // Perform optimisation to calculate capacity, activity and unmet demand let results = perform_optimisation( asset, max_capacity, commodity, - coefficients, demand, &model.time_slice_info, highs::Sense::Maximise, + problem, + &variables, )?; // Calculate profitability index for the hypothetical investment diff --git a/src/simulation/investment/appraisal/optimisation.rs b/src/simulation/investment/appraisal/optimisation.rs index 6216a9808..9ceeb35e7 100644 --- a/src/simulation/investment/appraisal/optimisation.rs +++ b/src/simulation/investment/appraisal/optimisation.rs @@ -1,6 +1,5 @@ //! Optimisation problem for investment tools. use super::DemandMap; -use super::coefficients::ObjectiveCoefficients; use super::constraints::{ add_activity_constraints, add_capacity_constraint, add_demand_constraints, }; @@ -17,13 +16,13 @@ use indexmap::IndexMap; pub type Variable = highs::Col; /// Map storing variables for the optimisation problem -struct VariableMap { +pub struct VariableMap { /// Capacity variable - capacity_var: Variable, + pub capacity_var: Variable, /// Activity variables in each time slice - activity_vars: IndexMap, + pub activity_vars: IndexMap, // Unmet demand variables - unmet_demand_vars: IndexMap, + pub unmet_demand_vars: IndexMap, } /// Map containing optimisation results and coefficients @@ -36,33 +35,6 @@ pub struct ResultsMap { pub unmet_demand: DemandMap, } -/// Add variables to the problem based on cost coefficients -fn add_variables(problem: &mut Problem, cost_coefficients: &ObjectiveCoefficients) -> VariableMap { - // Create capacity variable - let capacity_var = problem.add_column(cost_coefficients.capacity_coefficient.value(), 0.0..); - - // Create activity variables - let mut activity_vars = IndexMap::new(); - for (time_slice, cost) in &cost_coefficients.activity_coefficients { - let var = problem.add_column(cost.value(), 0.0..); - activity_vars.insert(time_slice.clone(), var); - } - - // Create unmet demand variables - // One per time slice, all of which use the same coefficient - let mut unmet_demand_vars = IndexMap::new(); - for time_slice in cost_coefficients.activity_coefficients.keys() { - let var = problem.add_column(cost_coefficients.unmet_demand_coefficient.value(), 0.0..); - unmet_demand_vars.insert(time_slice.clone(), var); - } - - VariableMap { - capacity_var, - activity_vars, - unmet_demand_vars, - } -} - /// Adds constraints to the problem. fn add_constraints( problem: &mut Problem, @@ -94,28 +66,25 @@ fn add_constraints( /// Performs optimisation for an asset, given the coefficients and demand. /// /// Will either maximise or minimise the objective function, depending on the `sense` parameter. +#[allow(clippy::too_many_arguments)] pub fn perform_optimisation( asset: &AssetRef, max_capacity: Option, commodity: &Commodity, - coefficients: &ObjectiveCoefficients, demand: &DemandMap, time_slice_info: &TimeSliceInfo, sense: Sense, + mut problem: Problem, + variables: &VariableMap, ) -> Result { // Set up problem - let mut problem = Problem::default(); - - // Add variables - let variables = add_variables(&mut problem, coefficients); - // Add constraints add_constraints( &mut problem, asset, max_capacity, commodity, - &variables, + variables, demand, time_slice_info, ); From b470397ef07b31311991f2a8d6d3a45e8d99f44e Mon Sep 17 00:00:00 2001 From: Aurash Karimi Date: Thu, 13 Nov 2025 15:48:43 +0000 Subject: [PATCH 02/28] move annual fixed cost calculation --- src/simulation/investment/appraisal.rs | 3 ++- src/simulation/investment/appraisal/coefficients.rs | 5 +---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/src/simulation/investment/appraisal.rs b/src/simulation/investment/appraisal.rs index 81e30428f..65bbf594a 100644 --- a/src/simulation/investment/appraisal.rs +++ b/src/simulation/investment/appraisal.rs @@ -8,6 +8,7 @@ use crate::model::Model; use crate::time_slice::TimeSliceID; use crate::units::{Activity, Capacity}; use anyhow::Result; +use costs::annual_fixed_cost; use highs::RowProblem as Problem; use indexmap::IndexMap; use std::cmp::Ordering; @@ -195,7 +196,7 @@ fn calculate_npv( )?; // Calculate profitability index for the hypothetical investment - let annual_fixed_cost = -coefficients.capacity_coefficient; + let annual_fixed_cost = annual_fixed_cost(asset); let activity_surpluses = &coefficients.activity_coefficients; let profitability_index = profitability_index( results.capacity, diff --git a/src/simulation/investment/appraisal/coefficients.rs b/src/simulation/investment/appraisal/coefficients.rs index 47b40edf6..46b27ad78 100644 --- a/src/simulation/investment/appraisal/coefficients.rs +++ b/src/simulation/investment/appraisal/coefficients.rs @@ -86,9 +86,6 @@ pub fn calculate_coefficients_for_npv( prices: &CommodityPrices, year: u32, ) -> ObjectiveCoefficients { - // Capacity coefficient - let capacity_coefficient = -annual_fixed_cost(asset); - // Activity coefficients let mut activity_coefficients = IndexMap::new(); for time_slice in time_slice_info.iter_ids() { @@ -100,7 +97,7 @@ pub fn calculate_coefficients_for_npv( let unmet_demand_coefficient = MoneyPerFlow(0.0); ObjectiveCoefficients { - capacity_coefficient, + capacity_coefficient: MoneyPerCapacity(0.0), activity_coefficients, unmet_demand_coefficient, } From 6560d41644c7335a08176dc27d5fb476cdb9fa51 Mon Sep 17 00:00:00 2001 From: Diego Alonso Alvarez Date: Mon, 10 Nov 2025 13:48:00 +0000 Subject: [PATCH 03/28] Check flows in milestone years only --- src/input/process.rs | 2 +- src/input/process/flow.rs | 69 +++++++++++++++++++++++++++------------ 2 files changed, 50 insertions(+), 21 deletions(-) diff --git a/src/input/process.rs b/src/input/process.rs index 3352e7d8d..c95ba05a1 100644 --- a/src/input/process.rs +++ b/src/input/process.rs @@ -61,7 +61,7 @@ pub fn read_processes( let mut processes = read_processes_file(model_dir, milestone_years, region_ids, commodities)?; let mut activity_limits = read_process_availabilities(model_dir, &processes, time_slice_info, base_year)?; - let mut flows = read_process_flows(model_dir, &mut processes, commodities)?; + let mut flows = read_process_flows(model_dir, &mut processes, commodities, milestone_years)?; let mut parameters = read_process_parameters(model_dir, &processes, base_year)?; // Add data to Process objects diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index a80f2e6cb..56b62ae09 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -11,7 +11,7 @@ use anyhow::{Context, Result, ensure}; use indexmap::IndexMap; use itertools::iproduct; use serde::Deserialize; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::path::Path; use std::rc::Rc; @@ -62,10 +62,11 @@ pub fn read_process_flows( model_dir: &Path, processes: &mut ProcessMap, commodities: &CommodityMap, + milestone_years: &[u32], ) -> Result> { let file_path = model_dir.join(PROCESS_FLOWS_FILE_NAME); let process_flow_csv = read_csv(&file_path)?; - read_process_flows_from_iter(process_flow_csv, processes, commodities) + read_process_flows_from_iter(process_flow_csv, processes, commodities, milestone_years) .with_context(|| input_err_msg(&file_path)) } @@ -74,6 +75,7 @@ fn read_process_flows_from_iter( iter: I, processes: &mut ProcessMap, commodities: &CommodityMap, + milestone_years: &[u32], ) -> Result> where I: Iterator, @@ -134,8 +136,8 @@ where } } - validate_flows_and_update_primary_output(processes, &flows_map)?; - validate_secondary_flows(processes, &flows_map)?; + validate_flows_and_update_primary_output(processes, &flows_map, milestone_years)?; + validate_secondary_flows(processes, &flows_map, milestone_years)?; Ok(flows_map) } @@ -143,29 +145,33 @@ where fn validate_flows_and_update_primary_output( processes: &mut ProcessMap, flows_map: &HashMap, + milestone_years: &[u32], ) -> Result<()> { for (process_id, process) in processes.iter_mut() { let map = flows_map .get(process_id) .with_context(|| format!("Missing flows map for process {process_id}"))?; + let region_year: Vec<(&RegionID, &u32)> = + iproduct!(process.regions.iter(), milestone_years.iter()).collect(); + ensure!( - map.len() == process.years.len() * process.regions.len(), - "Flows map for process {process_id} does not cover all regions and years" + region_year + .iter() + .all(|(region_id, year)| map.contains_key(&((*region_id).clone(), **year))), + "Flows map for process {process_id} does not cover all regions and milestone years" ); - let mut iter = iproduct!(process.years.iter(), process.regions.iter()); - let primary_output = if let Some(primary_output) = &process.primary_output { Some(primary_output.clone()) } else { - let (year, region_id) = iter.next().unwrap(); + let (region_id, year) = region_year[0]; infer_primary_output(&map[&(region_id.clone(), *year)]).with_context(|| { format!("Could not infer primary_output for process {process_id}") })? }; - for (&year, region_id) in iter { + for (region_id, &year) in region_year { let flows = &map[&(region_id.clone(), year)]; // Check that the process has flows for this region/year @@ -236,11 +242,12 @@ fn check_flows_primary_output( Ok(()) } -/// Checks that non-primary io are defined for all years (within a region) and that -/// they are only inputs or only outputs in all years. +/// Checks that non-primary io are defined for all milestone years, at least, (within a region) and +/// that they are only inputs or only outputs in all years. fn validate_secondary_flows( processes: &mut ProcessMap, flows_map: &HashMap, + milestone_years: &[u32], ) -> Result<()> { for (process_id, process) in processes.iter() { // Get the flows for this process - there should be no error, as was checked already @@ -251,7 +258,11 @@ fn validate_secondary_flows( // Get the non-primary io flows for all years, if any, arranged by (commodity, region) let iter = iproduct!(process.years.iter(), process.regions.iter()); let mut flows: HashMap<(CommodityID, RegionID), Vec<&ProcessFlow>> = HashMap::new(); + let mut years: HashMap<(CommodityID, RegionID), HashSet> = HashMap::new(); for (&year, region_id) in iter { + if !map.contains_key(&(region_id.clone(), year)) { + continue; + } let flow = map[&(region_id.clone(), year)] .iter() .filter_map(|(commodity_id, flow)| { @@ -260,17 +271,21 @@ fn validate_secondary_flows( }); for (key, value) in flow { - flows.entry(key).or_default().push(value); + flows.entry(key.clone()).or_default().push(value); + if milestone_years.contains(&year) { + years.entry(key).or_default().insert(year); + } } } // Finally we check that the flows for a given commodity and region are defined for all - // years and that they are all inputs or all outputs + // milestone years and that they are all inputs or all outputs. This later check is done + // for all years, milestone or not. for ((commodity_id, region_id), value) in &flows { ensure!( - value.len() == process.years.len(), + years[&(commodity_id.clone(), region_id.clone())].len() == milestone_years.len(), "Flow of commodity {commodity_id} in region {region_id} for process {process_id} \ - does not cover all years" + does not cover all milestone years" ); let input_or_zero = value .iter() @@ -331,12 +346,16 @@ mod tests { #[rstest] fn single_output_infer_primary(#[from(svd_commodity)] commodity: Commodity, process: Process) { + let milestone_years = vec![2010, 2020]; let commodity = Rc::new(commodity); let (mut processes, flows_map) = build_maps( process, std::iter::once((commodity.id.clone(), flow(commodity.clone(), 1.0))), ); - assert!(validate_flows_and_update_primary_output(&mut processes, &flows_map).is_ok()); + assert!( + validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) + .is_ok() + ); assert_eq!( processes.values().exactly_one().unwrap().primary_output, Some(commodity.id.clone()) @@ -349,6 +368,7 @@ mod tests { #[from(sed_commodity)] commodity2: Commodity, process: Process, ) { + let milestone_years = vec![2010, 2020]; let commodity1 = Rc::new(commodity1); let commodity2 = Rc::new(commodity2); let (mut processes, flows_map) = build_maps( @@ -359,7 +379,8 @@ mod tests { ] .into_iter(), ); - let res = validate_flows_and_update_primary_output(&mut processes, &flows_map); + let res = + validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years); assert_error!(res, "Could not infer primary_output for process process1"); } @@ -369,6 +390,7 @@ mod tests { #[from(sed_commodity)] commodity2: Commodity, process: Process, ) { + let milestone_years = vec![2010, 2020]; let commodity1 = Rc::new(commodity1); let commodity2 = Rc::new(commodity2); let mut process = process; @@ -381,7 +403,10 @@ mod tests { ] .into_iter(), ); - assert!(validate_flows_and_update_primary_output(&mut processes, &flows_map).is_ok()); + assert!( + validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) + .is_ok() + ); assert_eq!( processes.values().exactly_one().unwrap().primary_output, Some(commodity2.id.clone()) @@ -394,6 +419,7 @@ mod tests { #[from(sed_commodity)] commodity2: Commodity, process: Process, ) { + let milestone_years = vec![2010, 2020]; let commodity1 = Rc::new(commodity1); let commodity2 = Rc::new(commodity2); let (mut processes, flows_map) = build_maps( @@ -404,7 +430,10 @@ mod tests { ] .into_iter(), ); - assert!(validate_flows_and_update_primary_output(&mut processes, &flows_map).is_ok()); + assert!( + validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) + .is_ok() + ); assert_eq!( processes.values().exactly_one().unwrap().primary_output, None From 9e7281f456cbaf74a90b128e8da52b700b8d9721 Mon Sep 17 00:00:00 2001 From: Diego Alonso Alvarez Date: Mon, 10 Nov 2025 13:55:25 +0000 Subject: [PATCH 04/28] Remove unnecessary check --- src/input/process/flow.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index 56b62ae09..9700b2bde 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -260,9 +260,6 @@ fn validate_secondary_flows( let mut flows: HashMap<(CommodityID, RegionID), Vec<&ProcessFlow>> = HashMap::new(); let mut years: HashMap<(CommodityID, RegionID), HashSet> = HashMap::new(); for (&year, region_id) in iter { - if !map.contains_key(&(region_id.clone(), year)) { - continue; - } let flow = map[&(region_id.clone(), year)] .iter() .filter_map(|(commodity_id, flow)| { From 6e9c0fab9a51c1dfdd469e00fa6a316d78a426c0 Mon Sep 17 00:00:00 2001 From: Diego Alonso Alvarez Date: Tue, 11 Nov 2025 06:18:12 +0000 Subject: [PATCH 05/28] Check only milestone y within process range of activity --- src/input/process/flow.rs | 29 ++++++++++++++++++++--------- 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index 9700b2bde..5c977cffd 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -11,7 +11,7 @@ use anyhow::{Context, Result, ensure}; use indexmap::IndexMap; use itertools::iproduct; use serde::Deserialize; -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; use std::path::Path; use std::rc::Rc; @@ -152,14 +152,18 @@ fn validate_flows_and_update_primary_output( .get(process_id) .with_context(|| format!("Missing flows map for process {process_id}"))?; + // Flows are required for all milestone years within the process years of activity + let required_years = milestone_years + .iter() + .filter(|&y| process.years.contains(y)); let region_year: Vec<(&RegionID, &u32)> = - iproduct!(process.regions.iter(), milestone_years.iter()).collect(); + iproduct!(process.regions.iter(), required_years).collect(); ensure!( region_year .iter() .all(|(region_id, year)| map.contains_key(&((*region_id).clone(), **year))), - "Flows map for process {process_id} does not cover all regions and milestone years" + "Flows map for process {process_id} does not cover all regions and required years" ); let primary_output = if let Some(primary_output) = &process.primary_output { @@ -255,10 +259,16 @@ fn validate_secondary_flows( .get(process_id) .with_context(|| format!("Missing flows map for process {process_id}"))?; + // Flows are required for all milestone years within the process years of activity + let required_years: Vec<&u32> = milestone_years + .iter() + .filter(|&y| process.years.contains(y)) + .collect(); + // Get the non-primary io flows for all years, if any, arranged by (commodity, region) let iter = iproduct!(process.years.iter(), process.regions.iter()); let mut flows: HashMap<(CommodityID, RegionID), Vec<&ProcessFlow>> = HashMap::new(); - let mut years: HashMap<(CommodityID, RegionID), HashSet> = HashMap::new(); + let mut number_of_years: HashMap<(CommodityID, RegionID), u32> = HashMap::new(); for (&year, region_id) in iter { let flow = map[&(region_id.clone(), year)] .iter() @@ -269,20 +279,21 @@ fn validate_secondary_flows( for (key, value) in flow { flows.entry(key.clone()).or_default().push(value); - if milestone_years.contains(&year) { - years.entry(key).or_default().insert(year); + if required_years.contains(&&year) { + *number_of_years.entry(key).or_default() += 1; } } } // Finally we check that the flows for a given commodity and region are defined for all // milestone years and that they are all inputs or all outputs. This later check is done - // for all years, milestone or not. + // for all years in the process range, required or not. for ((commodity_id, region_id), value) in &flows { ensure!( - years[&(commodity_id.clone(), region_id.clone())].len() == milestone_years.len(), + number_of_years[&(commodity_id.clone(), region_id.clone())] + == required_years.len().try_into().unwrap(), "Flow of commodity {commodity_id} in region {region_id} for process {process_id} \ - does not cover all milestone years" + does not cover all milestone years within the process range of activity." ); let input_or_zero = value .iter() From 3c74d56820025fdedcb8f1d921033917d2f0c24b Mon Sep 17 00:00:00 2001 From: Diego Alonso Alvarez Date: Tue, 11 Nov 2025 06:28:09 +0000 Subject: [PATCH 06/28] white_check_mark: Update tests to cover new functionality --- src/asset.rs | 4 +-- src/fixture.rs | 12 ++++++-- src/input/process/flow.rs | 59 ++++++++++++++++++++++++++++++++++++++- 3 files changed, 69 insertions(+), 6 deletions(-) diff --git a/src/asset.rs b/src/asset.rs index 6d40b9e8d..68f606a80 100644 --- a/src/asset.rs +++ b/src/asset.rs @@ -1023,8 +1023,8 @@ mod tests { let agent_id = AgentID("agent1".into()); let region_id = RegionID("GBR".into()); assert_error!( - Asset::new_future(agent_id, process.into(), region_id, Capacity(1.0), 2009), - "Process process1 does not operate in the year 2009" + Asset::new_future(agent_id, process.into(), region_id, Capacity(1.0), 2007), + "Process process1 does not operate in the year 2007" ); } diff --git a/src/fixture.rs b/src/fixture.rs index 051957eaa..b571cc22e 100644 --- a/src/fixture.rs +++ b/src/fixture.rs @@ -148,13 +148,19 @@ pub fn process( region_ids: IndexSet, process_parameter_map: ProcessParameterMap, ) -> Process { - let years = vec![2010, 2015, 2020]; + let milestone_years = vec![2010, 2015, 2020]; + // The process start year is before the base year + let years = vec![2008, 2009] + .iter() + .chain(&milestone_years) + .cloned() + .collect(); // Create maps with (empty) entries for every region/year combo - let activity_limits = iproduct!(region_ids.iter(), years.iter()) + let activity_limits = iproduct!(region_ids.iter(), milestone_years.iter()) .map(|(region_id, year)| ((region_id.clone(), *year), Rc::new(HashMap::new()))) .collect(); - let flows = iproduct!(region_ids.iter(), years.iter()) + let flows = iproduct!(region_ids.iter(), milestone_years.iter()) .map(|(region_id, year)| ((region_id.clone(), *year), Rc::new(IndexMap::new()))) .collect(); Process { diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index 5c977cffd..9d58d6985 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -338,12 +338,14 @@ mod tests { fn build_maps( process: Process, flows: I, + years: Option<&Vec>, ) -> (ProcessMap, HashMap) where I: Clone + Iterator, { + let years = years.unwrap_or(&process.years); let map: Rc> = Rc::new(flows.clone().collect()); - let flows_inner = iproduct!(&process.regions, &process.years) + let flows_inner = iproduct!(&process.regions, years) .map(|(region_id, year)| ((region_id.clone(), *year), map.clone())) .collect(); let flows = hash_map! {process.id.clone() => flows_inner}; @@ -359,6 +361,7 @@ mod tests { let (mut processes, flows_map) = build_maps( process, std::iter::once((commodity.id.clone(), flow(commodity.clone(), 1.0))), + None, ); assert!( validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) @@ -386,6 +389,7 @@ mod tests { (commodity2.id.clone(), flow(commodity2.clone(), 2.0)), ] .into_iter(), + None, ); let res = validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years); @@ -410,6 +414,7 @@ mod tests { (commodity2.id.clone(), flow(commodity2.clone(), 2.0)), ] .into_iter(), + None, ); assert!( validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) @@ -437,6 +442,7 @@ mod tests { (commodity2.id.clone(), flow(commodity2.clone(), -2.0)), ] .into_iter(), + None, ); assert!( validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) @@ -447,4 +453,55 @@ mod tests { None ); } + + #[rstest] + fn flows_not_in_all_milestone_years( + #[from(svd_commodity)] commodity1: Commodity, + #[from(sed_commodity)] commodity2: Commodity, + process: Process, + ) { + let milestone_years = vec![2010, 2015, 2020]; + let flow_years = vec![2010, 2020]; + let commodity1 = Rc::new(commodity1); + let commodity2 = Rc::new(commodity2); + let (mut processes, flows_map) = build_maps( + process, + [ + (commodity1.id.clone(), flow(commodity1.clone(), 1.0)), + (commodity2.id.clone(), flow(commodity2.clone(), 2.0)), + ] + .into_iter(), + Some(&flow_years), + ); + let res = + validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years); + assert_error!( + res, + "Flows map for process process1 does not cover all regions and required years" + ); + } + + #[rstest] + fn flows_only_milestone_years( + #[from(svd_commodity)] commodity1: Commodity, + #[from(sed_commodity)] commodity2: Commodity, + process: Process, + ) { + let milestone_years = vec![2010, 2015, 2020]; + let commodity1 = Rc::new(commodity1); + let commodity2 = Rc::new(commodity2); + let (mut processes, flows_map) = build_maps( + process, + [ + (commodity1.id.clone(), flow(commodity1.clone(), 1.0)), + (commodity2.id.clone(), flow(commodity2.clone(), -2.0)), + ] + .into_iter(), + Some(&milestone_years), + ); + assert!( + validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) + .is_ok() + ); + } } From d3f4c755c7eacfb4a6ed90e98c6e45d32c79f7f3 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 3 Nov 2025 19:35:05 +0000 Subject: [PATCH 07/28] Add output file for unmet demand --- src/output.rs | 89 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 89 insertions(+) diff --git a/src/output.rs b/src/output.rs index 2c1b9e4af..5faa9ae33 100644 --- a/src/output.rs +++ b/src/output.rs @@ -41,6 +41,9 @@ const ACTIVITY_ASSET_DISPATCH: &str = "debug_dispatch_assets.csv"; /// The output file name for commodity balance duals const COMMODITY_BALANCE_DUALS_FILE_NAME: &str = "debug_commodity_balance_duals.csv"; +/// The output file name for unmet demand values +const UNMET_DEMAND_FILE_NAME: &str = "debug_unmet_demand.csv"; + /// The output file name for extra solver output values const SOLVER_VALUES_FILE_NAME: &str = "debug_solver.csv"; @@ -192,6 +195,17 @@ struct CommodityBalanceDualsRow { value: MoneyPerFlow, } +/// Represents the unmet demand data in a row of the unmet demand CSV file +#[derive(Serialize, Deserialize, Debug, PartialEq)] +struct UnmetDemandRow { + milestone_year: u32, + run_description: String, + commodity_id: CommodityID, + region_id: RegionID, + time_slice: TimeSliceID, + value: Flow, +} + /// Represents solver output values #[derive(Serialize, Deserialize, Debug, PartialEq)] struct SolverValuesRow { @@ -232,6 +246,7 @@ struct AppraisalResultsTimeSliceRow { struct DebugDataWriter { context: Option, commodity_balance_duals_writer: csv::Writer, + unmet_demand_writer: csv::Writer, solver_values_writer: csv::Writer, appraisal_results_writer: csv::Writer, appraisal_results_time_slice_writer: csv::Writer, @@ -253,6 +268,7 @@ impl DebugDataWriter { Ok(Self { context: None, commodity_balance_duals_writer: new_writer(COMMODITY_BALANCE_DUALS_FILE_NAME)?, + unmet_demand_writer: new_writer(UNMET_DEMAND_FILE_NAME)?, solver_values_writer: new_writer(SOLVER_VALUES_FILE_NAME)?, appraisal_results_writer: new_writer(APPRAISAL_RESULTS_FILE_NAME)?, appraisal_results_time_slice_writer: new_writer( @@ -290,6 +306,11 @@ impl DebugDataWriter { run_description, solution.iter_commodity_balance_duals(), )?; + self.write_unmet_demand( + milestone_year, + run_description, + solution.iter_unmet_demand(), + )?; self.write_solver_values(milestone_year, run_description, solution.objective_value)?; Ok(()) } @@ -377,6 +398,31 @@ impl DebugDataWriter { Ok(()) } + /// Write unmet demand values to file + fn write_unmet_demand<'a, I>( + &mut self, + milestone_year: u32, + run_description: &str, + iter: I, + ) -> Result<()> + where + I: Iterator, + { + for (commodity_id, region_id, time_slice, value) in iter { + let row = UnmetDemandRow { + milestone_year, + run_description: self.with_context(run_description), + commodity_id: commodity_id.clone(), + region_id: region_id.clone(), + time_slice: time_slice.clone(), + value, + }; + self.unmet_demand_writer.serialize(row)?; + } + + Ok(()) + } + /// Write additional solver output values to file fn write_solver_values( &mut self, @@ -453,6 +499,7 @@ impl DebugDataWriter { /// Flush the underlying streams fn flush(&mut self) -> Result<()> { self.commodity_balance_duals_writer.flush()?; + self.unmet_demand_writer.flush()?; self.solver_values_writer.flush()?; self.appraisal_results_writer.flush()?; self.appraisal_results_time_slice_writer.flush()?; @@ -760,6 +807,48 @@ mod tests { assert_equal(records, iter::once(expected)); } + #[rstest] + fn test_write_unmet_demand( + commodity_id: CommodityID, + region_id: RegionID, + time_slice: TimeSliceID, + ) { + let milestone_year = 2020; + let run_description = "test_run".to_string(); + let value = Flow(0.5); + let dir = tempdir().unwrap(); + + // Write unmet demand + { + let mut writer = DebugDataWriter::create(dir.path()).unwrap(); + writer + .write_unmet_demand( + milestone_year, + &run_description, + iter::once((&commodity_id, ®ion_id, &time_slice, value)), + ) + .unwrap(); + writer.flush().unwrap(); + } + + // Read back and compare + let expected = UnmetDemandRow { + milestone_year, + run_description, + commodity_id: commodity_id, + region_id: region_id, + time_slice, + value, + }; + let records: Vec = + csv::Reader::from_path(dir.path().join(UNMET_DEMAND_FILE_NAME)) + .unwrap() + .into_deserialize() + .try_collect() + .unwrap(); + assert_equal(records, iter::once(expected)); + } + #[rstest] fn test_write_activity(assets: AssetPool, time_slice: TimeSliceID) { let milestone_year = 2020; From 4116dbdba6062a903229bc0fff50741cce8c8852 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 3 Nov 2025 19:44:28 +0000 Subject: [PATCH 08/28] Add (empty) unmet demand file --- tests/data/simple/debug_unmet_demand.csv | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 tests/data/simple/debug_unmet_demand.csv diff --git a/tests/data/simple/debug_unmet_demand.csv b/tests/data/simple/debug_unmet_demand.csv new file mode 100644 index 000000000..e69de29bb From 4a354bb3504fe85b216d96013d1f6e0f4213bd2d Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 5 Nov 2025 16:24:08 +0000 Subject: [PATCH 09/28] Give timeslice level output data for appraisal --- src/fixture.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/fixture.rs b/src/fixture.rs index b571cc22e..78c11dc27 100644 --- a/src/fixture.rs +++ b/src/fixture.rs @@ -6,6 +6,7 @@ use crate::agent::{ }; use crate::asset::{Asset, AssetPool, AssetRef}; use crate::commodity::{Commodity, CommodityID, CommodityLevyMap, CommodityType, DemandMap}; +use crate::process::ProcessID; use crate::process::{Process, ProcessMap, ProcessParameter, ProcessParameterMap}; use crate::region::RegionID; use crate::simulation::investment::appraisal::{ From 878b1b501e4dbfc9c58e583739fcf030678055cb Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 5 Nov 2025 16:38:54 +0000 Subject: [PATCH 10/28] Delete unused import --- src/fixture.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/fixture.rs b/src/fixture.rs index 78c11dc27..b571cc22e 100644 --- a/src/fixture.rs +++ b/src/fixture.rs @@ -6,7 +6,6 @@ use crate::agent::{ }; use crate::asset::{Asset, AssetPool, AssetRef}; use crate::commodity::{Commodity, CommodityID, CommodityLevyMap, CommodityType, DemandMap}; -use crate::process::ProcessID; use crate::process::{Process, ProcessMap, ProcessParameter, ProcessParameterMap}; use crate::region::RegionID; use crate::simulation::investment::appraisal::{ From 415972ac8217fb74298e4ea1ae5b3191122e674b Mon Sep 17 00:00:00 2001 From: Diego Alonso Alvarez Date: Mon, 10 Nov 2025 13:48:00 +0000 Subject: [PATCH 11/28] Check flows in milestone years only --- src/input/process/flow.rs | 40 +++++++++++++++++++++++++-------------- 1 file changed, 26 insertions(+), 14 deletions(-) diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index 9d58d6985..9d169ede7 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -11,7 +11,7 @@ use anyhow::{Context, Result, ensure}; use indexmap::IndexMap; use itertools::iproduct; use serde::Deserialize; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::path::Path; use std::rc::Rc; @@ -63,9 +63,11 @@ pub fn read_process_flows( processes: &mut ProcessMap, commodities: &CommodityMap, milestone_years: &[u32], + milestone_years: &[u32], ) -> Result> { let file_path = model_dir.join(PROCESS_FLOWS_FILE_NAME); let process_flow_csv = read_csv(&file_path)?; + read_process_flows_from_iter(process_flow_csv, processes, commodities, milestone_years) read_process_flows_from_iter(process_flow_csv, processes, commodities, milestone_years) .with_context(|| input_err_msg(&file_path)) } @@ -152,18 +154,14 @@ fn validate_flows_and_update_primary_output( .get(process_id) .with_context(|| format!("Missing flows map for process {process_id}"))?; - // Flows are required for all milestone years within the process years of activity - let required_years = milestone_years - .iter() - .filter(|&y| process.years.contains(y)); let region_year: Vec<(&RegionID, &u32)> = - iproduct!(process.regions.iter(), required_years).collect(); + iproduct!(process.regions.iter(), milestone_years.iter()).collect(); ensure!( region_year .iter() .all(|(region_id, year)| map.contains_key(&((*region_id).clone(), **year))), - "Flows map for process {process_id} does not cover all regions and required years" + "Flows map for process {process_id} does not cover all regions and milestone years" ); let primary_output = if let Some(primary_output) = &process.primary_output { @@ -268,8 +266,11 @@ fn validate_secondary_flows( // Get the non-primary io flows for all years, if any, arranged by (commodity, region) let iter = iproduct!(process.years.iter(), process.regions.iter()); let mut flows: HashMap<(CommodityID, RegionID), Vec<&ProcessFlow>> = HashMap::new(); - let mut number_of_years: HashMap<(CommodityID, RegionID), u32> = HashMap::new(); + let mut years: HashMap<(CommodityID, RegionID), HashSet> = HashMap::new(); for (&year, region_id) in iter { + if !map.contains_key(&(region_id.clone(), year)) { + continue; + } let flow = map[&(region_id.clone(), year)] .iter() .filter_map(|(commodity_id, flow)| { @@ -279,21 +280,20 @@ fn validate_secondary_flows( for (key, value) in flow { flows.entry(key.clone()).or_default().push(value); - if required_years.contains(&&year) { - *number_of_years.entry(key).or_default() += 1; + if milestone_years.contains(&year) { + years.entry(key).or_default().insert(year); } } } // Finally we check that the flows for a given commodity and region are defined for all // milestone years and that they are all inputs or all outputs. This later check is done - // for all years in the process range, required or not. + // for all years, milestone or not. for ((commodity_id, region_id), value) in &flows { ensure!( - number_of_years[&(commodity_id.clone(), region_id.clone())] - == required_years.len().try_into().unwrap(), + years[&(commodity_id.clone(), region_id.clone())].len() == milestone_years.len(), "Flow of commodity {commodity_id} in region {region_id} for process {process_id} \ - does not cover all milestone years within the process range of activity." + does not cover all milestone years" ); let input_or_zero = value .iter() @@ -367,6 +367,10 @@ mod tests { validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) .is_ok() ); + assert!( + validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) + .is_ok() + ); assert_eq!( processes.values().exactly_one().unwrap().primary_output, Some(commodity.id.clone()) @@ -420,6 +424,10 @@ mod tests { validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) .is_ok() ); + assert!( + validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) + .is_ok() + ); assert_eq!( processes.values().exactly_one().unwrap().primary_output, Some(commodity2.id.clone()) @@ -448,6 +456,10 @@ mod tests { validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) .is_ok() ); + assert!( + validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) + .is_ok() + ); assert_eq!( processes.values().exactly_one().unwrap().primary_output, None From 51fb6fb13d386b744e0860f46bc0505ca66b9e6b Mon Sep 17 00:00:00 2001 From: Diego Alonso Alvarez Date: Mon, 10 Nov 2025 13:55:25 +0000 Subject: [PATCH 12/28] Remove unnecessary check --- src/input/process/flow.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index 9d169ede7..b65581674 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -268,9 +268,6 @@ fn validate_secondary_flows( let mut flows: HashMap<(CommodityID, RegionID), Vec<&ProcessFlow>> = HashMap::new(); let mut years: HashMap<(CommodityID, RegionID), HashSet> = HashMap::new(); for (&year, region_id) in iter { - if !map.contains_key(&(region_id.clone(), year)) { - continue; - } let flow = map[&(region_id.clone(), year)] .iter() .filter_map(|(commodity_id, flow)| { From f114085aa3c900a79c2e6e342fbb2d3410efed35 Mon Sep 17 00:00:00 2001 From: Diego Alonso Alvarez Date: Tue, 11 Nov 2025 06:18:12 +0000 Subject: [PATCH 13/28] Check only milestone y within process range of activity --- src/input/process/flow.rs | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index b65581674..fd6606fb1 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -11,7 +11,7 @@ use anyhow::{Context, Result, ensure}; use indexmap::IndexMap; use itertools::iproduct; use serde::Deserialize; -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; use std::path::Path; use std::rc::Rc; @@ -154,14 +154,18 @@ fn validate_flows_and_update_primary_output( .get(process_id) .with_context(|| format!("Missing flows map for process {process_id}"))?; + // Flows are required for all milestone years within the process years of activity + let required_years = milestone_years + .iter() + .filter(|&y| process.years.contains(y)); let region_year: Vec<(&RegionID, &u32)> = - iproduct!(process.regions.iter(), milestone_years.iter()).collect(); + iproduct!(process.regions.iter(), required_years).collect(); ensure!( region_year .iter() .all(|(region_id, year)| map.contains_key(&((*region_id).clone(), **year))), - "Flows map for process {process_id} does not cover all regions and milestone years" + "Flows map for process {process_id} does not cover all regions and required years" ); let primary_output = if let Some(primary_output) = &process.primary_output { @@ -266,7 +270,7 @@ fn validate_secondary_flows( // Get the non-primary io flows for all years, if any, arranged by (commodity, region) let iter = iproduct!(process.years.iter(), process.regions.iter()); let mut flows: HashMap<(CommodityID, RegionID), Vec<&ProcessFlow>> = HashMap::new(); - let mut years: HashMap<(CommodityID, RegionID), HashSet> = HashMap::new(); + let mut number_of_years: HashMap<(CommodityID, RegionID), u32> = HashMap::new(); for (&year, region_id) in iter { let flow = map[&(region_id.clone(), year)] .iter() @@ -277,20 +281,21 @@ fn validate_secondary_flows( for (key, value) in flow { flows.entry(key.clone()).or_default().push(value); - if milestone_years.contains(&year) { - years.entry(key).or_default().insert(year); + if required_years.contains(&&year) { + *number_of_years.entry(key).or_default() += 1; } } } // Finally we check that the flows for a given commodity and region are defined for all // milestone years and that they are all inputs or all outputs. This later check is done - // for all years, milestone or not. + // for all years in the process range, required or not. for ((commodity_id, region_id), value) in &flows { ensure!( - years[&(commodity_id.clone(), region_id.clone())].len() == milestone_years.len(), + number_of_years[&(commodity_id.clone(), region_id.clone())] + == required_years.len().try_into().unwrap(), "Flow of commodity {commodity_id} in region {region_id} for process {process_id} \ - does not cover all milestone years" + does not cover all milestone years within the process range of activity." ); let input_or_zero = value .iter() From 9ca4348889c5e7df0e658cc350f9be0333ea0bc7 Mon Sep 17 00:00:00 2001 From: Diego Alonso Alvarez Date: Tue, 11 Nov 2025 06:28:09 +0000 Subject: [PATCH 14/28] white_check_mark: Update tests to cover new functionality --- src/input/process/flow.rs | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index fd6606fb1..ca80a7d50 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -341,12 +341,15 @@ mod tests { process: Process, flows: I, years: Option<&Vec>, + years: Option<&Vec>, ) -> (ProcessMap, HashMap) where I: Clone + Iterator, { + let years = years.unwrap_or(&process.years); let years = years.unwrap_or(&process.years); let map: Rc> = Rc::new(flows.clone().collect()); + let flows_inner = iproduct!(&process.regions, years) let flows_inner = iproduct!(&process.regions, years) .map(|(region_id, year)| ((region_id.clone(), *year), map.clone())) .collect(); @@ -369,10 +372,6 @@ mod tests { validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) .is_ok() ); - assert!( - validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) - .is_ok() - ); assert_eq!( processes.values().exactly_one().unwrap().primary_output, Some(commodity.id.clone()) @@ -426,10 +425,6 @@ mod tests { validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) .is_ok() ); - assert!( - validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) - .is_ok() - ); assert_eq!( processes.values().exactly_one().unwrap().primary_output, Some(commodity2.id.clone()) @@ -458,10 +453,6 @@ mod tests { validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) .is_ok() ); - assert!( - validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) - .is_ok() - ); assert_eq!( processes.values().exactly_one().unwrap().primary_output, None From 9482fcb7d4b3a6c8c6c62f366725e1858a025e05 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 5 Nov 2025 16:24:08 +0000 Subject: [PATCH 15/28] Give timeslice level output data for appraisal --- src/fixture.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/fixture.rs b/src/fixture.rs index b571cc22e..78c11dc27 100644 --- a/src/fixture.rs +++ b/src/fixture.rs @@ -6,6 +6,7 @@ use crate::agent::{ }; use crate::asset::{Asset, AssetPool, AssetRef}; use crate::commodity::{Commodity, CommodityID, CommodityLevyMap, CommodityType, DemandMap}; +use crate::process::ProcessID; use crate::process::{Process, ProcessMap, ProcessParameter, ProcessParameterMap}; use crate::region::RegionID; use crate::simulation::investment::appraisal::{ From 727f4bd11ed02468faac14ab66db51424f3eff5c Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 5 Nov 2025 16:38:54 +0000 Subject: [PATCH 16/28] Delete unused import --- src/fixture.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/fixture.rs b/src/fixture.rs index 78c11dc27..b571cc22e 100644 --- a/src/fixture.rs +++ b/src/fixture.rs @@ -6,7 +6,6 @@ use crate::agent::{ }; use crate::asset::{Asset, AssetPool, AssetRef}; use crate::commodity::{Commodity, CommodityID, CommodityLevyMap, CommodityType, DemandMap}; -use crate::process::ProcessID; use crate::process::{Process, ProcessMap, ProcessParameter, ProcessParameterMap}; use crate::region::RegionID; use crate::simulation::investment::appraisal::{ From 8f28ded4ac0d1c43949a67f4d391c8cf48c2689d Mon Sep 17 00:00:00 2001 From: Diego Alonso Alvarez Date: Mon, 10 Nov 2025 13:48:00 +0000 Subject: [PATCH 17/28] Check flows in milestone years only --- src/input/process/flow.rs | 40 +++++++++++++++++++++++---------------- 1 file changed, 24 insertions(+), 16 deletions(-) diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index ca80a7d50..d3b5eb140 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -11,7 +11,7 @@ use anyhow::{Context, Result, ensure}; use indexmap::IndexMap; use itertools::iproduct; use serde::Deserialize; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::path::Path; use std::rc::Rc; @@ -63,11 +63,9 @@ pub fn read_process_flows( processes: &mut ProcessMap, commodities: &CommodityMap, milestone_years: &[u32], - milestone_years: &[u32], ) -> Result> { let file_path = model_dir.join(PROCESS_FLOWS_FILE_NAME); let process_flow_csv = read_csv(&file_path)?; - read_process_flows_from_iter(process_flow_csv, processes, commodities, milestone_years) read_process_flows_from_iter(process_flow_csv, processes, commodities, milestone_years) .with_context(|| input_err_msg(&file_path)) } @@ -154,18 +152,14 @@ fn validate_flows_and_update_primary_output( .get(process_id) .with_context(|| format!("Missing flows map for process {process_id}"))?; - // Flows are required for all milestone years within the process years of activity - let required_years = milestone_years - .iter() - .filter(|&y| process.years.contains(y)); let region_year: Vec<(&RegionID, &u32)> = - iproduct!(process.regions.iter(), required_years).collect(); + iproduct!(process.regions.iter(), milestone_years.iter()).collect(); ensure!( region_year .iter() .all(|(region_id, year)| map.contains_key(&((*region_id).clone(), **year))), - "Flows map for process {process_id} does not cover all regions and required years" + "Flows map for process {process_id} does not cover all regions and milestone years" ); let primary_output = if let Some(primary_output) = &process.primary_output { @@ -270,8 +264,11 @@ fn validate_secondary_flows( // Get the non-primary io flows for all years, if any, arranged by (commodity, region) let iter = iproduct!(process.years.iter(), process.regions.iter()); let mut flows: HashMap<(CommodityID, RegionID), Vec<&ProcessFlow>> = HashMap::new(); - let mut number_of_years: HashMap<(CommodityID, RegionID), u32> = HashMap::new(); + let mut years: HashMap<(CommodityID, RegionID), HashSet> = HashMap::new(); for (&year, region_id) in iter { + if !map.contains_key(&(region_id.clone(), year)) { + continue; + } let flow = map[&(region_id.clone(), year)] .iter() .filter_map(|(commodity_id, flow)| { @@ -281,21 +278,20 @@ fn validate_secondary_flows( for (key, value) in flow { flows.entry(key.clone()).or_default().push(value); - if required_years.contains(&&year) { - *number_of_years.entry(key).or_default() += 1; + if milestone_years.contains(&year) { + years.entry(key).or_default().insert(year); } } } // Finally we check that the flows for a given commodity and region are defined for all // milestone years and that they are all inputs or all outputs. This later check is done - // for all years in the process range, required or not. + // for all years, milestone or not. for ((commodity_id, region_id), value) in &flows { ensure!( - number_of_years[&(commodity_id.clone(), region_id.clone())] - == required_years.len().try_into().unwrap(), + years[&(commodity_id.clone(), region_id.clone())].len() == milestone_years.len(), "Flow of commodity {commodity_id} in region {region_id} for process {process_id} \ - does not cover all milestone years within the process range of activity." + does not cover all milestone years" ); let input_or_zero = value .iter() @@ -372,6 +368,10 @@ mod tests { validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) .is_ok() ); + assert!( + validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) + .is_ok() + ); assert_eq!( processes.values().exactly_one().unwrap().primary_output, Some(commodity.id.clone()) @@ -425,6 +425,10 @@ mod tests { validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) .is_ok() ); + assert!( + validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) + .is_ok() + ); assert_eq!( processes.values().exactly_one().unwrap().primary_output, Some(commodity2.id.clone()) @@ -453,6 +457,10 @@ mod tests { validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) .is_ok() ); + assert!( + validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) + .is_ok() + ); assert_eq!( processes.values().exactly_one().unwrap().primary_output, None From b5e6aff4f1cc1406d16313d9976fd5533eb09c63 Mon Sep 17 00:00:00 2001 From: Diego Alonso Alvarez Date: Mon, 10 Nov 2025 13:55:25 +0000 Subject: [PATCH 18/28] Remove unnecessary check --- src/input/process/flow.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index d3b5eb140..a3cc2cd67 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -266,9 +266,6 @@ fn validate_secondary_flows( let mut flows: HashMap<(CommodityID, RegionID), Vec<&ProcessFlow>> = HashMap::new(); let mut years: HashMap<(CommodityID, RegionID), HashSet> = HashMap::new(); for (&year, region_id) in iter { - if !map.contains_key(&(region_id.clone(), year)) { - continue; - } let flow = map[&(region_id.clone(), year)] .iter() .filter_map(|(commodity_id, flow)| { From bc37c16458a14655834459fbe86e3ecba145ba69 Mon Sep 17 00:00:00 2001 From: Diego Alonso Alvarez Date: Tue, 11 Nov 2025 06:18:12 +0000 Subject: [PATCH 19/28] Check only milestone y within process range of activity --- src/input/process/flow.rs | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index a3cc2cd67..ec5f7ba9b 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -11,7 +11,7 @@ use anyhow::{Context, Result, ensure}; use indexmap::IndexMap; use itertools::iproduct; use serde::Deserialize; -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; use std::path::Path; use std::rc::Rc; @@ -152,14 +152,18 @@ fn validate_flows_and_update_primary_output( .get(process_id) .with_context(|| format!("Missing flows map for process {process_id}"))?; + // Flows are required for all milestone years within the process years of activity + let required_years = milestone_years + .iter() + .filter(|&y| process.years.contains(y)); let region_year: Vec<(&RegionID, &u32)> = - iproduct!(process.regions.iter(), milestone_years.iter()).collect(); + iproduct!(process.regions.iter(), required_years).collect(); ensure!( region_year .iter() .all(|(region_id, year)| map.contains_key(&((*region_id).clone(), **year))), - "Flows map for process {process_id} does not cover all regions and milestone years" + "Flows map for process {process_id} does not cover all regions and required years" ); let primary_output = if let Some(primary_output) = &process.primary_output { @@ -264,7 +268,7 @@ fn validate_secondary_flows( // Get the non-primary io flows for all years, if any, arranged by (commodity, region) let iter = iproduct!(process.years.iter(), process.regions.iter()); let mut flows: HashMap<(CommodityID, RegionID), Vec<&ProcessFlow>> = HashMap::new(); - let mut years: HashMap<(CommodityID, RegionID), HashSet> = HashMap::new(); + let mut number_of_years: HashMap<(CommodityID, RegionID), u32> = HashMap::new(); for (&year, region_id) in iter { let flow = map[&(region_id.clone(), year)] .iter() @@ -275,20 +279,21 @@ fn validate_secondary_flows( for (key, value) in flow { flows.entry(key.clone()).or_default().push(value); - if milestone_years.contains(&year) { - years.entry(key).or_default().insert(year); + if required_years.contains(&&year) { + *number_of_years.entry(key).or_default() += 1; } } } // Finally we check that the flows for a given commodity and region are defined for all // milestone years and that they are all inputs or all outputs. This later check is done - // for all years, milestone or not. + // for all years in the process range, required or not. for ((commodity_id, region_id), value) in &flows { ensure!( - years[&(commodity_id.clone(), region_id.clone())].len() == milestone_years.len(), + number_of_years[&(commodity_id.clone(), region_id.clone())] + == required_years.len().try_into().unwrap(), "Flow of commodity {commodity_id} in region {region_id} for process {process_id} \ - does not cover all milestone years" + does not cover all milestone years within the process range of activity." ); let input_or_zero = value .iter() From 0fa5b9234f5bc1c2b4a28040bca66ca5c4c5997d Mon Sep 17 00:00:00 2001 From: Diego Alonso Alvarez Date: Tue, 11 Nov 2025 06:28:09 +0000 Subject: [PATCH 20/28] white_check_mark: Update tests to cover new functionality --- src/input/process/flow.rs | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index ec5f7ba9b..4c1dbf760 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -339,15 +339,12 @@ mod tests { process: Process, flows: I, years: Option<&Vec>, - years: Option<&Vec>, ) -> (ProcessMap, HashMap) where I: Clone + Iterator, { - let years = years.unwrap_or(&process.years); let years = years.unwrap_or(&process.years); let map: Rc> = Rc::new(flows.clone().collect()); - let flows_inner = iproduct!(&process.regions, years) let flows_inner = iproduct!(&process.regions, years) .map(|(region_id, year)| ((region_id.clone(), *year), map.clone())) .collect(); @@ -370,10 +367,6 @@ mod tests { validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) .is_ok() ); - assert!( - validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) - .is_ok() - ); assert_eq!( processes.values().exactly_one().unwrap().primary_output, Some(commodity.id.clone()) @@ -397,6 +390,7 @@ mod tests { ] .into_iter(), None, + None, ); let res = validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years); @@ -427,10 +421,6 @@ mod tests { validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) .is_ok() ); - assert!( - validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) - .is_ok() - ); assert_eq!( processes.values().exactly_one().unwrap().primary_output, Some(commodity2.id.clone()) @@ -459,10 +449,6 @@ mod tests { validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) .is_ok() ); - assert!( - validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) - .is_ok() - ); assert_eq!( processes.values().exactly_one().unwrap().primary_output, None From aa5e1be936fe5ea5f50ec837d7c6aac59212e294 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 5 Nov 2025 16:24:08 +0000 Subject: [PATCH 21/28] Give timeslice level output data for appraisal --- src/fixture.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/fixture.rs b/src/fixture.rs index b571cc22e..78c11dc27 100644 --- a/src/fixture.rs +++ b/src/fixture.rs @@ -6,6 +6,7 @@ use crate::agent::{ }; use crate::asset::{Asset, AssetPool, AssetRef}; use crate::commodity::{Commodity, CommodityID, CommodityLevyMap, CommodityType, DemandMap}; +use crate::process::ProcessID; use crate::process::{Process, ProcessMap, ProcessParameter, ProcessParameterMap}; use crate::region::RegionID; use crate::simulation::investment::appraisal::{ From c49bd589d8eaabeab27468cbc3157ad74cbdf86c Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 5 Nov 2025 16:38:54 +0000 Subject: [PATCH 22/28] Delete unused import --- src/fixture.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/fixture.rs b/src/fixture.rs index 78c11dc27..b571cc22e 100644 --- a/src/fixture.rs +++ b/src/fixture.rs @@ -6,7 +6,6 @@ use crate::agent::{ }; use crate::asset::{Asset, AssetPool, AssetRef}; use crate::commodity::{Commodity, CommodityID, CommodityLevyMap, CommodityType, DemandMap}; -use crate::process::ProcessID; use crate::process::{Process, ProcessMap, ProcessParameter, ProcessParameterMap}; use crate::region::RegionID; use crate::simulation::investment::appraisal::{ From 819ce80cfbbcedc2efb06c18bbb3d6d021602bdf Mon Sep 17 00:00:00 2001 From: Aurash Karimi Date: Thu, 13 Nov 2025 17:31:38 +0000 Subject: [PATCH 23/28] simplify --- src/simulation/investment/appraisal.rs | 75 +------------------ .../investment/appraisal/optimisation.rs | 40 +++++++++- 2 files changed, 39 insertions(+), 76 deletions(-) diff --git a/src/simulation/investment/appraisal.rs b/src/simulation/investment/appraisal.rs index 65bbf594a..6a250f1ac 100644 --- a/src/simulation/investment/appraisal.rs +++ b/src/simulation/investment/appraisal.rs @@ -9,7 +9,6 @@ use crate::time_slice::TimeSliceID; use crate::units::{Activity, Capacity}; use anyhow::Result; use costs::annual_fixed_cost; -use highs::RowProblem as Problem; use indexmap::IndexMap; use std::cmp::Ordering; @@ -19,7 +18,7 @@ mod costs; mod optimisation; use coefficients::ObjectiveCoefficients; use float_cmp::approx_eq; -use optimisation::{VariableMap, perform_optimisation}; +use optimisation::perform_optimisation; /// The output of investment appraisal required to compare potential investment decisions pub struct AppraisalOutput { @@ -62,66 +61,6 @@ impl AppraisalOutput { } } -/// Add variables to the problem for LCOX appraisal. -fn add_variables_to_lcox_problem( - problem: &mut Problem, - cost_coefficients: &ObjectiveCoefficients, -) -> VariableMap { - // Create capacity variable - let capacity_var = problem.add_column(cost_coefficients.capacity_coefficient.value(), 0.0..); - - // Create activity variables - let mut activity_vars = IndexMap::new(); - for (time_slice, cost) in &cost_coefficients.activity_coefficients { - let var = problem.add_column(cost.value(), 0.0..); - activity_vars.insert(time_slice.clone(), var); - } - - // Create unmet demand variables - // One per time slice, all of which use the same coefficient - let mut unmet_demand_vars = IndexMap::new(); - for time_slice in cost_coefficients.activity_coefficients.keys() { - let var = problem.add_column(cost_coefficients.unmet_demand_coefficient.value(), 0.0..); - unmet_demand_vars.insert(time_slice.clone(), var); - } - - VariableMap { - capacity_var, - activity_vars, - unmet_demand_vars, - } -} - -/// Add variables to the problem for npv appraisal. -fn add_variables_to_npv_problem( - problem: &mut Problem, - cost_coefficients: &ObjectiveCoefficients, -) -> VariableMap { - // Create capacity variable (zeroed coefficient - doesn't affect optimisation) - let capacity_var = problem.add_column(0.0, 0.0..); - - // Create activity variables - let mut activity_vars = IndexMap::new(); - for (time_slice, cost) in &cost_coefficients.activity_coefficients { - let var = problem.add_column(cost.value(), 0.0..); - activity_vars.insert(time_slice.clone(), var); - } - - // Create unmet demand variables - // One per time slice, all of which use the same coefficient - let mut unmet_demand_vars = IndexMap::new(); - for time_slice in cost_coefficients.activity_coefficients.keys() { - let var = problem.add_column(cost_coefficients.unmet_demand_coefficient.value(), 0.0..); - unmet_demand_vars.insert(time_slice.clone(), var); - } - - VariableMap { - capacity_var, - activity_vars, - unmet_demand_vars, - } -} - /// Calculate LCOX for a hypothetical investment in the given asset. /// /// This is more commonly referred to as Levelised Cost of *Electricity*, but as the model can @@ -134,19 +73,15 @@ fn calculate_lcox( coefficients: &ObjectiveCoefficients, demand: &DemandMap, ) -> Result { - let mut problem = Problem::default(); - let variables = add_variables_to_lcox_problem(&mut problem, coefficients); - // Perform optimisation to calculate capacity, activity and unmet demand let results = perform_optimisation( asset, max_capacity, commodity, + coefficients, demand, &model.time_slice_info, highs::Sense::Minimise, - problem, - &variables, )?; // Calculate LCOX for the hypothetical investment @@ -180,19 +115,15 @@ fn calculate_npv( coefficients: &ObjectiveCoefficients, demand: &DemandMap, ) -> Result { - let mut problem = Problem::default(); - let variables = add_variables_to_npv_problem(&mut problem, coefficients); - // Perform optimisation to calculate capacity, activity and unmet demand let results = perform_optimisation( asset, max_capacity, commodity, + coefficients, demand, &model.time_slice_info, highs::Sense::Maximise, - problem, - &variables, )?; // Calculate profitability index for the hypothetical investment diff --git a/src/simulation/investment/appraisal/optimisation.rs b/src/simulation/investment/appraisal/optimisation.rs index 9ceeb35e7..e86982c68 100644 --- a/src/simulation/investment/appraisal/optimisation.rs +++ b/src/simulation/investment/appraisal/optimisation.rs @@ -1,5 +1,6 @@ //! Optimisation problem for investment tools. use super::DemandMap; +use super::ObjectiveCoefficients; use super::constraints::{ add_activity_constraints, add_capacity_constraint, add_demand_constraints, }; @@ -16,7 +17,7 @@ use indexmap::IndexMap; pub type Variable = highs::Col; /// Map storing variables for the optimisation problem -pub struct VariableMap { +struct VariableMap { /// Capacity variable pub capacity_var: Variable, /// Activity variables in each time slice @@ -35,6 +36,35 @@ pub struct ResultsMap { pub unmet_demand: DemandMap, } +fn add_variables_to_problem( + problem: &mut Problem, + cost_coefficients: &ObjectiveCoefficients, +) -> VariableMap { + // Create capacity variable + let capacity_var = problem.add_column(cost_coefficients.capacity_coefficient.value(), 0.0..); + + // Create activity variables + let mut activity_vars = IndexMap::new(); + for (time_slice, cost) in &cost_coefficients.activity_coefficients { + let var = problem.add_column(cost.value(), 0.0..); + activity_vars.insert(time_slice.clone(), var); + } + + // Create unmet demand variables + // One per time slice, all of which use the same coefficient + let mut unmet_demand_vars = IndexMap::new(); + for time_slice in cost_coefficients.activity_coefficients.keys() { + let var = problem.add_column(cost_coefficients.unmet_demand_coefficient.value(), 0.0..); + unmet_demand_vars.insert(time_slice.clone(), var); + } + + VariableMap { + capacity_var, + activity_vars, + unmet_demand_vars, + } +} + /// Adds constraints to the problem. fn add_constraints( problem: &mut Problem, @@ -71,20 +101,22 @@ pub fn perform_optimisation( asset: &AssetRef, max_capacity: Option, commodity: &Commodity, + coefficients: &ObjectiveCoefficients, demand: &DemandMap, time_slice_info: &TimeSliceInfo, sense: Sense, - mut problem: Problem, - variables: &VariableMap, ) -> Result { // Set up problem + let mut problem = Problem::default(); + let variables = add_variables_to_problem(&mut problem, coefficients); + // Add constraints add_constraints( &mut problem, asset, max_capacity, commodity, - variables, + &variables, demand, time_slice_info, ); From ed2966734d898ae3add4a0a2e9823dbc8c2d822e Mon Sep 17 00:00:00 2001 From: Aurash Karimi Date: Thu, 13 Nov 2025 17:32:45 +0000 Subject: [PATCH 24/28] remove too many arguments supression --- src/simulation/investment/appraisal/optimisation.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/simulation/investment/appraisal/optimisation.rs b/src/simulation/investment/appraisal/optimisation.rs index e86982c68..fa07a38aa 100644 --- a/src/simulation/investment/appraisal/optimisation.rs +++ b/src/simulation/investment/appraisal/optimisation.rs @@ -96,7 +96,6 @@ fn add_constraints( /// Performs optimisation for an asset, given the coefficients and demand. /// /// Will either maximise or minimise the objective function, depending on the `sense` parameter. -#[allow(clippy::too_many_arguments)] pub fn perform_optimisation( asset: &AssetRef, max_capacity: Option, From aab4fd9d5fb64a870bda6301ad90ec3484798f29 Mon Sep 17 00:00:00 2001 From: Aurash Karimi Date: Thu, 13 Nov 2025 17:34:21 +0000 Subject: [PATCH 25/28] make variable map members private --- src/simulation/investment/appraisal/optimisation.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/simulation/investment/appraisal/optimisation.rs b/src/simulation/investment/appraisal/optimisation.rs index fa07a38aa..267b0424a 100644 --- a/src/simulation/investment/appraisal/optimisation.rs +++ b/src/simulation/investment/appraisal/optimisation.rs @@ -19,11 +19,11 @@ pub type Variable = highs::Col; /// Map storing variables for the optimisation problem struct VariableMap { /// Capacity variable - pub capacity_var: Variable, + capacity_var: Variable, /// Activity variables in each time slice - pub activity_vars: IndexMap, - // Unmet demand variables - pub unmet_demand_vars: IndexMap, + activity_vars: IndexMap, + /// Unmet demand variables + unmet_demand_vars: IndexMap, } /// Map containing optimisation results and coefficients From bad47296615fe8e1bbd57896f3cbeb46ded9f626 Mon Sep 17 00:00:00 2001 From: Aurash Karimi Date: Mon, 17 Nov 2025 10:51:19 +0000 Subject: [PATCH 26/28] update docs change function to consturctor method --- docs/model/investment.md | 4 +- .../investment/appraisal/optimisation.rs | 69 ++++++++++--------- 2 files changed, 40 insertions(+), 33 deletions(-) diff --git a/docs/model/investment.md b/docs/model/investment.md index 1b9c0c793..77a796c44 100644 --- a/docs/model/investment.md +++ b/docs/model/investment.md @@ -142,11 +142,11 @@ operational constraints (e.g., minimum load levels) and the balance level of the demand tranche it is being asked to serve. \\[ - maximise \Big\\{ -AFC \* cap - \sum_t act_t \* AC_t + maximise \Big\\{ - \sum_t act_t \* AC_t \Big\\} \\] - Where \\( cap \\) and \\( act_t \\) are decision variables, and subject to: + Where \\( act_t \\) is a decision variable, and subject to: - The asset operational constraints (e.g., \\( avail_{LB}, avail_{EQ} \\), etc.), activity less than capacity, applied to its activity profile \\( act_t \\). diff --git a/src/simulation/investment/appraisal/optimisation.rs b/src/simulation/investment/appraisal/optimisation.rs index 267b0424a..eb4677e4c 100644 --- a/src/simulation/investment/appraisal/optimisation.rs +++ b/src/simulation/investment/appraisal/optimisation.rs @@ -26,6 +26,42 @@ struct VariableMap { unmet_demand_vars: IndexMap, } +impl VariableMap { + /// Creates a new variable map by adding variables to the optimisation problem. + /// + /// # Arguments + /// * `problem` - The optimisation problem to add variables to + /// * `cost_coefficients` - Objective function coefficients for each variable + /// + /// # Returns + /// A new `VariableMap` containing all created decision variables + fn add_to_problem(problem: &mut Problem, cost_coefficients: &ObjectiveCoefficients) -> Self { + // Create capacity variable with its associated cost + let capacity_var = + problem.add_column(cost_coefficients.capacity_coefficient.value(), 0.0..); + + // Create activity variables for each time slice + let mut activity_vars = IndexMap::new(); + for (time_slice, cost) in &cost_coefficients.activity_coefficients { + let var = problem.add_column(cost.value(), 0.0..); + activity_vars.insert(time_slice.clone(), var); + } + + // Create unmet demand variables for each time slice + let mut unmet_demand_vars = IndexMap::new(); + for time_slice in cost_coefficients.activity_coefficients.keys() { + let var = problem.add_column(cost_coefficients.unmet_demand_coefficient.value(), 0.0..); + unmet_demand_vars.insert(time_slice.clone(), var); + } + + Self { + capacity_var, + activity_vars, + unmet_demand_vars, + } + } +} + /// Map containing optimisation results and coefficients pub struct ResultsMap { /// Capacity variable @@ -36,35 +72,6 @@ pub struct ResultsMap { pub unmet_demand: DemandMap, } -fn add_variables_to_problem( - problem: &mut Problem, - cost_coefficients: &ObjectiveCoefficients, -) -> VariableMap { - // Create capacity variable - let capacity_var = problem.add_column(cost_coefficients.capacity_coefficient.value(), 0.0..); - - // Create activity variables - let mut activity_vars = IndexMap::new(); - for (time_slice, cost) in &cost_coefficients.activity_coefficients { - let var = problem.add_column(cost.value(), 0.0..); - activity_vars.insert(time_slice.clone(), var); - } - - // Create unmet demand variables - // One per time slice, all of which use the same coefficient - let mut unmet_demand_vars = IndexMap::new(); - for time_slice in cost_coefficients.activity_coefficients.keys() { - let var = problem.add_column(cost_coefficients.unmet_demand_coefficient.value(), 0.0..); - unmet_demand_vars.insert(time_slice.clone(), var); - } - - VariableMap { - capacity_var, - activity_vars, - unmet_demand_vars, - } -} - /// Adds constraints to the problem. fn add_constraints( problem: &mut Problem, @@ -105,9 +112,9 @@ pub fn perform_optimisation( time_slice_info: &TimeSliceInfo, sense: Sense, ) -> Result { - // Set up problem + // Create problem and add variables let mut problem = Problem::default(); - let variables = add_variables_to_problem(&mut problem, coefficients); + let variables = VariableMap::add_to_problem(&mut problem, coefficients); // Add constraints add_constraints( From 01fbfde03ba7381f434b29f861d28f05025656c9 Mon Sep 17 00:00:00 2001 From: Aurash Karimi Date: Mon, 17 Nov 2025 11:27:22 +0000 Subject: [PATCH 27/28] update docs npv epsilon --- docs/model/investment.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/model/investment.md b/docs/model/investment.md index 77a796c44..134d90710 100644 --- a/docs/model/investment.md +++ b/docs/model/investment.md @@ -139,10 +139,11 @@ operational constraints (e.g., minimum load levels) and the balance level of the - **Optimise capacity and dispatch to maximise annualised profit:** Solve a small optimisation sub-problem to maximise the asset’s surplus, subject to its operational rules and the specific - demand tranche it is being asked to serve. + demand tranche it is being asked to serve. \\(\varepsilon \approx 1×10^{-14}\\) is added to each + \\(AC_t \\) to allow assets which are breakeven (or very close to breakeven) to be dispatched. \\[ - maximise \Big\\{ - \sum_t act_t \* AC_t + maximise \Big\\{ - \sum_t act_t \* (AC_t + \varepsilon) \Big\\} \\] From f22be2a03c984a7dc9d9200f6d919633afc11562 Mon Sep 17 00:00:00 2001 From: Aurash Karimi Date: Mon, 17 Nov 2025 11:31:06 +0000 Subject: [PATCH 28/28] remove unused variable --- src/simulation/investment/appraisal/coefficients.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/simulation/investment/appraisal/coefficients.rs b/src/simulation/investment/appraisal/coefficients.rs index 9406dfa0b..3cf01c50b 100644 --- a/src/simulation/investment/appraisal/coefficients.rs +++ b/src/simulation/investment/appraisal/coefficients.rs @@ -90,9 +90,6 @@ pub fn calculate_coefficients_for_npv( // assets are still dispatched const EPSILON_ACTIVITY_COEFFICIENT: MoneyPerActivity = MoneyPerActivity(f64::EPSILON * 100.0); - // Capacity coefficient - let capacity_coefficient = -annual_fixed_cost(asset); - // Activity coefficients let mut activity_coefficients = IndexMap::new(); for time_slice in time_slice_info.iter_ids() {