From 5fbcff8128a420fceab1e9ee241e730c51d48084 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Thu, 14 Nov 2024 15:33:52 +0000 Subject: [PATCH 01/20] Fix: Don't reuse mutable demand object between tests --- src/demand.rs | 134 +++++++++++++++++++++++++--------------------- src/time_slice.rs | 2 +- 2 files changed, 74 insertions(+), 62 deletions(-) diff --git a/src/demand.rs b/src/demand.rs index a13ef27fe..cc0144c12 100644 --- a/src/demand.rs +++ b/src/demand.rs @@ -12,7 +12,7 @@ const DEMAND_FILE_NAME: &str = "demand.csv"; const DEMAND_SLICES_FILE_NAME: &str = "demand_slicing.csv"; /// Represents a single demand entry in the dataset. -#[derive(Debug, Deserialize, PartialEq)] +#[derive(Debug, Clone, Deserialize, PartialEq)] pub struct Demand { /// The commodity this demand entry refers to pub commodity_id: String, @@ -38,7 +38,7 @@ struct DemandSliceRaw { } /// How demand varies by time slice -#[derive(Debug, PartialEq)] +#[derive(Debug, Clone, PartialEq)] pub struct DemandSlice { /// Which time slice(s) this applies to pub time_slice: TimeSliceSelection, @@ -448,7 +448,7 @@ COM1,West,2023,13" }; // Demand grouped by region - let mut demand = [( + let demand: HashMap<_, _> = [( "COM1".into(), [( "GBR".into(), @@ -467,69 +467,81 @@ COM1,West,2023,13" .collect(); // Valid - let demand_slice = DemandSliceRaw { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: "winter.day".into(), - fraction: 1.0, - }; - read_demand_slices_from_iter( - [demand_slice.clone()].into_iter(), - &time_slice_info, - &mut demand, - ) - .unwrap(); - let time_slice = time_slice_info.get_selection("winter.day").unwrap(); - assert_eq!( - try_get_demand("COM1", "GBR", &mut demand) - .unwrap() - .demand_slices, - vec![DemandSlice { - time_slice, - fraction: 1.0 - }] - ); + { + let mut demand = demand.clone(); + let demand_slice = DemandSliceRaw { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter.day".into(), + fraction: 1.0, + }; + read_demand_slices_from_iter( + [demand_slice.clone()].into_iter(), + &time_slice_info, + &mut demand, + ) + .unwrap(); + let time_slice = time_slice_info.get_selection("winter.day").unwrap(); + assert_eq!( + try_get_demand("COM1", "GBR", &mut demand) + .unwrap() + .demand_slices, + vec![DemandSlice { + time_slice, + fraction: 1.0 + }] + ); + } // Bad commodity - let demand_slice = DemandSliceRaw { - commodity_id: "COM2".into(), - region_id: "GBR".into(), - time_slice: "winter.day".into(), - fraction: 1.0, - }; - assert!(read_demand_slices_from_iter( - [demand_slice].into_iter(), - &time_slice_info, - &mut demand - ) - .is_err()); + { + let mut demand = demand.clone(); + let demand_slice = DemandSliceRaw { + commodity_id: "COM2".into(), + region_id: "GBR".into(), + time_slice: "winter.day".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + [demand_slice].into_iter(), + &time_slice_info, + &mut demand + ) + .is_err()); + } // Bad region - let demand_slice = DemandSliceRaw { - commodity_id: "COM1".into(), - region_id: "USA".into(), - time_slice: "winter.day".into(), - fraction: 1.0, - }; - assert!(read_demand_slices_from_iter( - [demand_slice].into_iter(), - &time_slice_info, - &mut demand - ) - .is_err()); + { + let mut demand = demand.clone(); + let demand_slice = DemandSliceRaw { + commodity_id: "COM1".into(), + region_id: "USA".into(), + time_slice: "winter.day".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + [demand_slice].into_iter(), + &time_slice_info, + &mut demand + ) + .is_err()); + } // Bad time slice - let demand_slice = DemandSliceRaw { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: "summer.night".into(), - fraction: 1.0, - }; - assert!(read_demand_slices_from_iter( - [demand_slice].into_iter(), - &time_slice_info, - &mut demand, - ) - .is_err()); + { + let mut demand = demand.clone(); + let demand_slice = DemandSliceRaw { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "summer.night".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + [demand_slice].into_iter(), + &time_slice_info, + &mut demand, + ) + .is_err()); + } } } diff --git a/src/time_slice.rs b/src/time_slice.rs index 40e506a8d..2b2d393f0 100644 --- a/src/time_slice.rs +++ b/src/time_slice.rs @@ -33,7 +33,7 @@ impl Display for TimeSliceID { } /// Represents a time slice read from an input file, which can be all -#[derive(PartialEq, Debug)] +#[derive(PartialEq, Clone, Debug)] pub enum TimeSliceSelection { /// All year and all day Annual, From 754af6451efe51bc8c7e6ec4a44f7a4c845feb47 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 25 Nov 2024 08:22:02 +0000 Subject: [PATCH 02/20] Rename: `DemandHashMap` => `CommodityDemandMap` Also make private. --- src/demand.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/demand.rs b/src/demand.rs index cc0144c12..76fb7ade0 100644 --- a/src/demand.rs +++ b/src/demand.rs @@ -46,8 +46,8 @@ pub struct DemandSlice { pub fraction: f64, } -/// A [HashMap] of [Demand] grouped first by commodity, then region -pub type DemandHashMap = HashMap, HashMap, Demand>>; +/// A [`HashMap`] of [`Demand`] grouped first by commodity, then region +type CommodityDemandMap = HashMap, HashMap, Demand>>; /// Read the demand data from an iterator /// @@ -66,7 +66,7 @@ fn read_demand_from_iter( commodity_ids: &HashSet>, region_ids: &HashSet>, year_range: &RangeInclusive, -) -> Result +) -> Result where I: Iterator, { @@ -114,7 +114,7 @@ fn read_demand_file( commodity_ids: &HashSet>, region_ids: &HashSet>, year_range: &RangeInclusive, -) -> Result { +) -> Result { let file_path = model_dir.join(DEMAND_FILE_NAME); let demand_csv = read_csv(&file_path)?; read_demand_from_iter(demand_csv, commodity_ids, region_ids, year_range) @@ -125,7 +125,7 @@ fn read_demand_file( fn try_get_demand<'a>( commodity_id: &str, region_id: &str, - demand: &'a mut DemandHashMap, + demand: &'a mut CommodityDemandMap, ) -> Option<&'a mut Demand> { demand.get_mut(commodity_id)?.get_mut(region_id) } @@ -134,7 +134,7 @@ fn try_get_demand<'a>( fn read_demand_slices_from_iter( iter: I, time_slice_info: &TimeSliceInfo, - demand: &mut DemandHashMap, + demand: &mut CommodityDemandMap, ) -> Result<()> where I: Iterator, @@ -168,7 +168,7 @@ where fn read_demand_slices( model_dir: &Path, time_slice_info: &TimeSliceInfo, - demand: &mut DemandHashMap, + demand: &mut CommodityDemandMap, ) -> Result<()> { let file_path = model_dir.join(DEMAND_SLICES_FILE_NAME); let demand_slices_csv = read_csv(&file_path)?; @@ -195,7 +195,7 @@ pub fn read_demand( region_ids: &HashSet>, time_slice_info: &TimeSliceInfo, year_range: &RangeInclusive, -) -> Result { +) -> Result { let mut demand = read_demand_file(model_dir, commodity_ids, region_ids, year_range)?; // Read in demand slices From c0f5935d54f646243955690cb2dd6a5e14bdf60a Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 25 Nov 2024 08:38:30 +0000 Subject: [PATCH 03/20] Make bespoke `DemandMap` type with `get` method --- src/commodity.rs | 6 ++-- src/demand.rs | 73 ++++++++++++++++++++++++++++++++++-------------- src/process.rs | 3 +- 3 files changed, 57 insertions(+), 25 deletions(-) diff --git a/src/commodity.rs b/src/commodity.rs index e64c1d5d5..c88fdd8de 100644 --- a/src/commodity.rs +++ b/src/commodity.rs @@ -1,5 +1,5 @@ #![allow(missing_docs)] -use crate::demand::{read_demand, Demand}; +use crate::demand::{read_demand, DemandMap}; use crate::input::*; use crate::time_slice::{TimeSliceID, TimeSliceInfo, TimeSliceLevel}; use anyhow::{ensure, Context, Result}; @@ -28,7 +28,7 @@ pub struct Commodity { #[serde(skip)] pub costs: CommodityCostMap, #[serde(skip)] - pub demand_by_region: HashMap, Demand>, + pub demand: DemandMap, } define_id_getter! {Commodity} @@ -268,7 +268,7 @@ pub fn read_commodities( commodity.costs = costs; } if let Some(demand) = demand.remove(&id) { - commodity.demand_by_region = demand; + commodity.demand = demand; } (id, commodity.into()) diff --git a/src/demand.rs b/src/demand.rs index 76fb7ade0..4b834674e 100644 --- a/src/demand.rs +++ b/src/demand.rs @@ -46,8 +46,24 @@ pub struct DemandSlice { pub fraction: f64, } +/// A map of [`Demand`], keyed by region +#[derive(PartialEq, Debug, Clone, Default)] +pub struct DemandMap(HashMap, Demand>); + +impl DemandMap { + /// Create a new, empty [`DemandMap`] + pub fn new() -> DemandMap { + DemandMap::default() + } + + /// Retrieve a [`Demand`] entry from the map + pub fn get(&self, region_id: &str) -> Option<&Demand> { + self.0.get(region_id) + } +} + /// A [`HashMap`] of [`Demand`] grouped first by commodity, then region -type CommodityDemandMap = HashMap, HashMap, Demand>>; +type CommodityDemandMap = HashMap, DemandMap>; /// Read the demand data from an iterator /// @@ -70,7 +86,7 @@ fn read_demand_from_iter( where I: Iterator, { - let mut map_by_commodity = HashMap::new(); + let mut map = HashMap::new(); for demand in iter { let commodity_id = commodity_ids.get_id(&demand.commodity_id)?; @@ -83,17 +99,17 @@ where ); // Get entry for this commodity - let map_by_region = map_by_commodity + let map = map .entry(commodity_id) - .or_insert_with(|| HashMap::with_capacity(1)); + .or_insert_with(|| DemandMap(HashMap::with_capacity(1))); ensure!( - map_by_region.insert(region_id, demand).is_none(), + map.0.insert(region_id, demand).is_none(), "Multiple entries for same commodity and region found" ); } - Ok(map_by_commodity) + Ok(map) } /// Read the demand.csv file. @@ -127,7 +143,7 @@ fn try_get_demand<'a>( region_id: &str, demand: &'a mut CommodityDemandMap, ) -> Option<&'a mut Demand> { - demand.get_mut(commodity_id)?.get_mut(region_id) + demand.get_mut(commodity_id)?.0.get_mut(region_id) } /// Read demand slices from an iterator and store them in `demand`. @@ -214,6 +230,19 @@ mod tests { use std::path::Path; use tempfile::tempdir; + #[test] + fn test_demand_map_get() { + let value = Demand { + year: 2023, + region_id: "North".to_string(), + commodity_id: "COM1".to_string(), + demand: 10.0, + demand_slices: Vec::new(), + }; + let map = DemandMap(HashMap::from_iter([("North".into(), value.clone())])); + assert_eq!(map.get("North").unwrap(), &value) + } + /// Create an example demand file in dir_path fn create_demand_file(dir_path: &Path) { let file_path = dir_path.join(DEMAND_FILE_NAME); @@ -383,7 +412,7 @@ COM1,West,2023,13" HashMap::from_iter( [( "COM1".into(), - HashMap::from_iter([ + DemandMap(HashMap::from_iter([ ( "North".into(), Demand { @@ -424,7 +453,7 @@ COM1,West,2023,13" demand_slices: Vec::new() } ) - ]) + ])) )] .into_iter() ) @@ -450,18 +479,20 @@ COM1,West,2023,13" // Demand grouped by region let demand: HashMap<_, _> = [( "COM1".into(), - [( - "GBR".into(), - Demand { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - year: 2020, - demand: 1.0, - demand_slices: Vec::new(), - }, - )] - .into_iter() - .collect(), + DemandMap( + [( + "GBR".into(), + Demand { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + year: 2020, + demand: 1.0, + demand_slices: Vec::new(), + }, + )] + .into_iter() + .collect(), + ), )] .into_iter() .collect(); diff --git a/src/process.rs b/src/process.rs index b8c458a1d..1ea80beec 100644 --- a/src/process.rs +++ b/src/process.rs @@ -432,6 +432,7 @@ pub fn read_processes( #[cfg(test)] mod tests { use crate::commodity::{CommodityCostMap, CommodityType}; + use crate::demand::DemandMap; use crate::time_slice::TimeSliceLevel; use super::*; @@ -727,7 +728,7 @@ mod tests { kind: CommodityType::InputCommodity, time_slice_level: TimeSliceLevel::Annual, costs: CommodityCostMap::new(), - demand_by_region: HashMap::new(), + demand: DemandMap::new(), }; (Rc::clone(&commodity.id), commodity.into()) From f90f1a7edcf493c170e11d9ab94f75bb8539e9f9 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 25 Nov 2024 10:13:17 +0000 Subject: [PATCH 04/20] Require demand entry year to be milestone year --- src/commodity.rs | 3 +- src/demand.rs | 79 ++++++++++++++++++++++++------------------------ 2 files changed, 40 insertions(+), 42 deletions(-) diff --git a/src/commodity.rs b/src/commodity.rs index c88fdd8de..391ea5afc 100644 --- a/src/commodity.rs +++ b/src/commodity.rs @@ -251,13 +251,12 @@ pub fn read_commodities( milestone_years, )?; - let year_range = *milestone_years.first().unwrap()..=*milestone_years.last().unwrap(); let mut demand = read_demand( model_dir, &commodity_ids, region_ids, time_slice_info, - &year_range, + milestone_years, )?; // Populate Vecs for each Commodity diff --git a/src/demand.rs b/src/demand.rs index 4b834674e..bb4244353 100644 --- a/src/demand.rs +++ b/src/demand.rs @@ -4,7 +4,6 @@ use crate::time_slice::{TimeSliceInfo, TimeSliceSelection}; use anyhow::{ensure, Context, Result}; use serde::Deserialize; use std::collections::{HashMap, HashSet}; -use std::ops::RangeInclusive; use std::path::Path; use std::rc::Rc; @@ -72,7 +71,7 @@ type CommodityDemandMap = HashMap, DemandMap>; /// * `iter` - An iterator of `Demand`s /// * `commodity_ids` - All possible IDs of commodities /// * `region_ids` - All possible IDs for regions -/// * `year_range` - The year range for the simulation +/// * `milestone_years` - All milestone years /// /// # Returns /// @@ -81,7 +80,7 @@ fn read_demand_from_iter( iter: I, commodity_ids: &HashSet>, region_ids: &HashSet>, - year_range: &RangeInclusive, + milestone_years: &[u32], ) -> Result where I: Iterator, @@ -93,8 +92,9 @@ where let region_id = region_ids.get_id(&demand.region_id)?; ensure!( - year_range.contains(&demand.year), - "Year {} is out of range", + milestone_years.binary_search(&demand.year).is_ok(), + "Year {} is not a milestone year. \ + Input of non-milestone years is currently not supported.", demand.year ); @@ -119,7 +119,7 @@ where /// * `model_dir` - Folder containing model configuration files /// * `commodity_ids` - All possible IDs of commodities /// * `region_ids` - All possible IDs for regions -/// * `year_range` - The year range for the simulation +/// * `milestone_years` - All milestone years /// /// # Returns /// @@ -129,12 +129,11 @@ fn read_demand_file( model_dir: &Path, commodity_ids: &HashSet>, region_ids: &HashSet>, - year_range: &RangeInclusive, + milestone_years: &[u32], ) -> Result { let file_path = model_dir.join(DEMAND_FILE_NAME); - let demand_csv = read_csv(&file_path)?; - read_demand_from_iter(demand_csv, commodity_ids, region_ids, year_range) - .with_context(|| input_err_msg(&file_path)) + let iter = read_csv(&file_path)?; + read_demand_from_iter(iter, commodity_ids, region_ids, milestone_years) } /// Try to get demand for the given commodity and region. Returns `None` if not found. @@ -200,7 +199,7 @@ fn read_demand_slices( /// * `commodity_ids` - All possible IDs of commodities /// * `region_ids` - All possible IDs for regions /// * `time_slice_info` - Information about seasons and times of day -/// * `year_range` - The year range for the simulation +/// * `milestone_years` - All milestone years /// /// # Returns /// @@ -210,9 +209,9 @@ pub fn read_demand( commodity_ids: &HashSet>, region_ids: &HashSet>, time_slice_info: &TimeSliceInfo, - year_range: &RangeInclusive, + milestone_years: &[u32], ) -> Result { - let mut demand = read_demand_file(model_dir, commodity_ids, region_ids, year_range)?; + let mut demand = read_demand_file(model_dir, commodity_ids, region_ids, milestone_years)?; // Read in demand slices read_demand_slices(model_dir, time_slice_info, &mut demand)?; @@ -233,7 +232,7 @@ mod tests { #[test] fn test_demand_map_get() { let value = Demand { - year: 2023, + year: 2020, region_id: "North".to_string(), commodity_id: "COM1".to_string(), demand: 10.0, @@ -250,10 +249,10 @@ mod tests { writeln!( file, "commodity_id,region_id,year,demand -COM1,North,2023,10 -COM1,South,2023,11 -COM1,East,2023,12 -COM1,West,2023,13" +COM1,North,2020,10 +COM1,South,2020,11 +COM1,East,2020,12 +COM1,West,2020,13" ) .unwrap(); } @@ -262,19 +261,19 @@ COM1,West,2023,13" fn test_read_demand_from_iter() { let commodity_ids = ["COM1".into()].into_iter().collect(); let region_ids = ["North".into(), "South".into()].into_iter().collect(); - let year_range = 2020..=2030; + let milestone_years = [2020, 2030]; // Valid let demand = [ Demand { - year: 2023, + year: 2020, region_id: "North".to_string(), commodity_id: "COM1".to_string(), demand: 10.0, demand_slices: Vec::new(), }, Demand { - year: 2023, + year: 2020, region_id: "South".to_string(), commodity_id: "COM1".to_string(), demand: 11.0, @@ -285,21 +284,21 @@ COM1,West,2023,13" demand.into_iter(), &commodity_ids, ®ion_ids, - &year_range + &milestone_years ) .is_ok()); // Bad commodity ID let demand = [ Demand { - year: 2023, + year: 2020, region_id: "North".to_string(), commodity_id: "COM2".to_string(), demand: 10.0, demand_slices: Vec::new(), }, Demand { - year: 2023, + year: 2020, region_id: "South".to_string(), commodity_id: "COM1".to_string(), demand: 11.0, @@ -310,21 +309,21 @@ COM1,West,2023,13" demand.into_iter(), &commodity_ids, ®ion_ids, - &year_range + &milestone_years ) .is_err()); // Bad region ID let demand = [ Demand { - year: 2023, + year: 2020, region_id: "East".to_string(), commodity_id: "COM1".to_string(), demand: 10.0, demand_slices: Vec::new(), }, Demand { - year: 2023, + year: 2020, region_id: "South".to_string(), commodity_id: "COM1".to_string(), demand: 11.0, @@ -335,7 +334,7 @@ COM1,West,2023,13" demand.into_iter(), &commodity_ids, ®ion_ids, - &year_range + &milestone_years ) .is_err()); @@ -349,7 +348,7 @@ COM1,West,2023,13" demand_slices: Vec::new(), }, Demand { - year: 2023, + year: 2020, region_id: "South".to_string(), commodity_id: "COM1".to_string(), demand: 11.0, @@ -360,28 +359,28 @@ COM1,West,2023,13" demand.into_iter(), &commodity_ids, ®ion_ids, - &year_range + &milestone_years ) .is_err()); // Multiple entries for same commodity and region let demand = [ Demand { - year: 2023, + year: 2020, region_id: "North".to_string(), commodity_id: "COM1".to_string(), demand: 10.0, demand_slices: Vec::new(), }, Demand { - year: 2023, + year: 2020, region_id: "North".to_string(), commodity_id: "COM1".to_string(), demand: 10.0, demand_slices: Vec::new(), }, Demand { - year: 2023, + year: 2020, region_id: "South".to_string(), commodity_id: "COM1".to_string(), demand: 11.0, @@ -392,7 +391,7 @@ COM1,West,2023,13" demand.into_iter(), &commodity_ids, ®ion_ids, - &year_range + &milestone_years ) .is_err()); } @@ -405,8 +404,8 @@ COM1,West,2023,13" let region_ids = ["North".into(), "South".into(), "East".into(), "West".into()] .into_iter() .collect(); - let year_range = 2020..=2030; - let demand = read_demand_file(dir.path(), &commodity_ids, ®ion_ids, &year_range); + let milestone_years = [2020, 2030]; + let demand = read_demand_file(dir.path(), &commodity_ids, ®ion_ids, &milestone_years); assert_eq!( demand.unwrap(), HashMap::from_iter( @@ -416,7 +415,7 @@ COM1,West,2023,13" ( "North".into(), Demand { - year: 2023, + year: 2020, region_id: "North".to_string(), commodity_id: "COM1".to_string(), demand: 10.0, @@ -426,7 +425,7 @@ COM1,West,2023,13" ( "South".into(), Demand { - year: 2023, + year: 2020, region_id: "South".to_string(), commodity_id: "COM1".to_string(), demand: 11.0, @@ -436,7 +435,7 @@ COM1,West,2023,13" ( "East".into(), Demand { - year: 2023, + year: 2020, region_id: "East".to_string(), commodity_id: "COM1".to_string(), demand: 12.0, @@ -446,7 +445,7 @@ COM1,West,2023,13" ( "West".into(), Demand { - year: 2023, + year: 2020, region_id: "West".to_string(), commodity_id: "COM1".to_string(), demand: 13.0, From 181d3d11ae75de74769b2d0ef7197ce78ec2b515 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Thu, 14 Nov 2024 17:05:29 +0000 Subject: [PATCH 05/20] Check demand quantity is valid Closes #203. --- src/demand.rs | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/src/demand.rs b/src/demand.rs index bb4244353..d5f744b0c 100644 --- a/src/demand.rs +++ b/src/demand.rs @@ -98,6 +98,11 @@ where demand.year ); + ensure!( + demand.demand.is_normal() && demand.demand > 0.0, + "Demand must be a valid number greater than zero" + ); + // Get entry for this commodity let map = map .entry(commodity_id) @@ -363,6 +368,31 @@ COM1,West,2020,13" ) .is_err()); + // Bad demand quantity + macro_rules! test_quantity { + ($quantity: expr) => { + let demand = [Demand { + year: 2020, + region_id: "North".to_string(), + commodity_id: "COM1".to_string(), + demand: $quantity, + demand_slices: Vec::new(), + }]; + assert!(read_demand_from_iter( + demand.into_iter(), + &commodity_ids, + ®ion_ids, + &milestone_years, + ) + .is_err()); + }; + } + test_quantity!(-1.0); + test_quantity!(0.0); + test_quantity!(f64::NAN); + test_quantity!(f64::NEG_INFINITY); + test_quantity!(f64::INFINITY); + // Multiple entries for same commodity and region let demand = [ Demand { From e260ac226b97a9e6e30b5403fa763247a1d94708 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Tue, 12 Nov 2024 09:53:09 +0000 Subject: [PATCH 06/20] Move check that fractions sum to one to input.rs --- src/input.rs | 35 +++++++++++++++++++++++++++++++++++ src/time_slice.rs | 37 ++----------------------------------- 2 files changed, 37 insertions(+), 35 deletions(-) diff --git a/src/input.rs b/src/input.rs index c9ca85804..ac50d1316 100644 --- a/src/input.rs +++ b/src/input.rs @@ -1,6 +1,7 @@ //! Common routines for handling input data. #![allow(missing_docs)] use anyhow::{ensure, Context, Result}; +use float_cmp::approx_eq; use itertools::Itertools; use serde::de::{Deserialize, DeserializeOwned, Deserializer}; use serde_string_enum::{DeserializeLabeledStringEnum, SerializeLabeledStringEnum}; @@ -188,6 +189,21 @@ where .with_context(|| input_err_msg(file_path)) } +/// Check that fractions sum to (approximately) one +pub fn check_fractions_sum_to_one(fractions: I) -> Result<()> +where + I: Iterator, +{ + let sum = fractions.sum(); + ensure!( + approx_eq!(f64, sum, 1.0, epsilon = 1e-5), + "Sum of fractions does not equal one (actual: {})", + sum + ); + + Ok(()) +} + #[cfg(test)] mod tests { use super::*; @@ -345,4 +361,23 @@ mod tests { let process_ids = create_ids(); read_csv_grouped_by_id::(&file_path, &process_ids).unwrap(); } + + #[test] + fn test_check_fractions_sum_to_one() { + // Single input, valid + assert!(check_fractions_sum_to_one([1.0].into_iter()).is_ok()); + + // Multiple inputs, valid + assert!(check_fractions_sum_to_one([0.4, 0.6].into_iter()).is_ok()); + + // Single input, invalid + assert!(check_fractions_sum_to_one([0.5].into_iter()).is_err()); + + // Multiple inputs, invalid + assert!(check_fractions_sum_to_one([0.4, 0.3].into_iter()).is_err()); + + // Edge cases + assert!(check_fractions_sum_to_one([f64::INFINITY].into_iter()).is_err()); + assert!(check_fractions_sum_to_one([f64::NAN].into_iter()).is_err()); + } } diff --git a/src/time_slice.rs b/src/time_slice.rs index 2b2d393f0..61b393c5c 100644 --- a/src/time_slice.rs +++ b/src/time_slice.rs @@ -5,7 +5,6 @@ #![allow(missing_docs)] use crate::input::*; use anyhow::{ensure, Context, Result}; -use float_cmp::approx_eq; use itertools::Itertools; use serde::Deserialize; use serde_string_enum::DeserializeLabeledStringEnum; @@ -183,7 +182,8 @@ where } // Validate data - check_time_slice_fractions_sum_to_one(fractions.values().cloned())?; + check_fractions_sum_to_one(fractions.values().cloned()) + .context("Invalid time slice fractions")?; Ok(TimeSliceInfo { seasons, @@ -223,20 +223,6 @@ pub fn read_time_slice_info(model_dir: &Path) -> Result { read_time_slice_info_from_iter(time_slices_csv).with_context(|| input_err_msg(file_path)) } -/// Check that time slice fractions sum to (approximately) one -fn check_time_slice_fractions_sum_to_one(fractions: I) -> Result<()> -where - I: Iterator, -{ - let sum = fractions.sum(); - ensure!( - approx_eq!(f64, sum, 1.0, epsilon = 1e-5), - "Sum of time slice fractions does not equal one (actual: {sum})" - ); - - Ok(()) -} - #[cfg(test)] mod tests { use super::*; @@ -361,23 +347,4 @@ autumn,evening,0.25" iter::once(&slices[1]), ); } - - #[test] - fn test_check_time_slice_fractions_sum_to_one() { - // Single input, valid - assert!(check_time_slice_fractions_sum_to_one([1.0].into_iter()).is_ok()); - - // Multiple inputs, valid - assert!(check_time_slice_fractions_sum_to_one([0.4, 0.6].into_iter()).is_ok()); - - // Single input, invalid - assert!(check_time_slice_fractions_sum_to_one([0.5].into_iter()).is_err()); - - // Multiple inputs, invalid - assert!(check_time_slice_fractions_sum_to_one([0.4, 0.3].into_iter()).is_err()); - - // Edge cases - assert!(check_time_slice_fractions_sum_to_one([f64::INFINITY].into_iter()).is_err()); - assert!(check_time_slice_fractions_sum_to_one([f64::NAN].into_iter()).is_err()); - } } From 422ec1d4e8214826ef7562d7e01631083f5b588e Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Thu, 12 Dec 2024 17:01:28 +0000 Subject: [PATCH 07/20] Compute demand for each time slice when loading data Closes #231. Closes #202. --- src/demand.rs | 856 ++++++++++++++++++++++++++++++++++---------------- 1 file changed, 589 insertions(+), 267 deletions(-) diff --git a/src/demand.rs b/src/demand.rs index d5f744b0c..0101a152e 100644 --- a/src/demand.rs +++ b/src/demand.rs @@ -1,7 +1,9 @@ -//! Code for working with demand for a given commodity. Demand can vary by region and year. +//! Code for working with demand for a given commodity. Demand can vary by region, year and time +//! slice. use crate::input::*; -use crate::time_slice::{TimeSliceInfo, TimeSliceSelection}; +use crate::time_slice::{TimeSliceID, TimeSliceInfo}; use anyhow::{ensure, Context, Result}; +use itertools::Itertools; use serde::Deserialize; use std::collections::{HashMap, HashSet}; use std::path::Path; @@ -10,25 +12,54 @@ use std::rc::Rc; const DEMAND_FILE_NAME: &str = "demand.csv"; const DEMAND_SLICES_FILE_NAME: &str = "demand_slicing.csv"; +/// A map relating region, year and time slice to demand (in real units, not a fraction). +/// +/// This data type is exported as this is the way in we want to look up demand outside of this +/// module. +#[derive(PartialEq, Debug, Clone, Default)] +pub struct DemandMap(HashMap); + +/// The key for a [`DemandMap`] +#[derive(PartialEq, Eq, Hash, Debug, Clone)] +struct DemandMapKey { + region_id: Rc, + year: u32, + time_slice: TimeSliceID, +} + +impl DemandMap { + /// Create a new, empty [`DemandMap`] + pub fn new() -> DemandMap { + DemandMap::default() + } + + /// Retrieve the demand for the specified region, year and time slice + pub fn get(&self, region_id: Rc, year: u32, time_slice: TimeSliceID) -> Option { + self.0 + .get(&DemandMapKey { + region_id, + year, + time_slice, + }) + .copied() + } +} + /// Represents a single demand entry in the dataset. #[derive(Debug, Clone, Deserialize, PartialEq)] -pub struct Demand { +struct Demand { /// The commodity this demand entry refers to - pub commodity_id: String, + commodity_id: String, /// The region of the demand entry - pub region_id: String, + region_id: String, /// The year of the demand entry - pub year: u32, + year: u32, /// Annual demand quantity - pub demand: f64, - - /// How demand varies by time slice - #[serde(skip)] - pub demand_slices: Vec, + demand: f64, } #[derive(Clone, Deserialize)] -struct DemandSliceRaw { +struct DemandSlice { commodity_id: String, region_id: String, time_slice: String, @@ -36,56 +67,97 @@ struct DemandSliceRaw { fraction: f64, } -/// How demand varies by time slice -#[derive(Debug, Clone, PartialEq)] -pub struct DemandSlice { - /// Which time slice(s) this applies to - pub time_slice: TimeSliceSelection, - /// The fraction of total demand (between 0 and 1 inclusive) - pub fraction: f64, +/// A map relating commodity, region and time slice to the fraction of annual demand +type DemandSliceMap = HashMap; + +#[derive(PartialEq, Eq, Hash, Debug)] +struct DemandSliceMapKey { + commodity_id: Rc, + region_id: Rc, + time_slice: TimeSliceID, } -/// A map of [`Demand`], keyed by region -#[derive(PartialEq, Debug, Clone, Default)] -pub struct DemandMap(HashMap, Demand>); +/// A map relating commodity, region and year to annual demand +type AnnualDemandMap = HashMap; -impl DemandMap { - /// Create a new, empty [`DemandMap`] - pub fn new() -> DemandMap { - DemandMap::default() - } +#[derive(PartialEq, Eq, Hash, Debug)] +struct AnnualDemandMapKey { + commodity_id: Rc, + region_id: Rc, + year: u32, +} - /// Retrieve a [`Demand`] entry from the map - pub fn get(&self, region_id: &str) -> Option<&Demand> { - self.0.get(region_id) - } +/// Reads demand data from CSV files. +/// +/// # Arguments +/// +/// * `model_dir` - Folder containing model configuration files +/// * `commodity_ids` - All possible IDs of commodities +/// * `region_ids` - All possible IDs for regions +/// * `time_slice_info` - Information about seasons and times of day +/// * `milestone_years` - All milestone years +/// +/// # Returns +/// +/// This function returns [`DemandMap`]s grouped by commodity ID. +pub fn read_demand( + model_dir: &Path, + commodity_ids: &HashSet>, + region_ids: &HashSet>, + time_slice_info: &TimeSliceInfo, + milestone_years: &[u32], +) -> Result, DemandMap>> { + let demand = read_demand_file(model_dir, commodity_ids, region_ids, milestone_years)?; + let slices = read_demand_slices(model_dir, commodity_ids, region_ids, time_slice_info)?; + + Ok(compute_demand_map(&demand, &slices, time_slice_info)) } -/// A [`HashMap`] of [`Demand`] grouped first by commodity, then region -type CommodityDemandMap = HashMap, DemandMap>; +/// Read the demand.csv file. +/// +/// # Arguments +/// +/// * `model_dir` - Folder containing model configuration files +/// * `commodity_ids` - All possible IDs of commodities +/// * `region_ids` - All possible IDs for regions +/// * `milestone_years` - All milestone years +/// +/// # Returns +/// +/// Annual demand data, grouped by commodity, region and milestone year. +fn read_demand_file( + model_dir: &Path, + commodity_ids: &HashSet>, + region_ids: &HashSet>, + milestone_years: &[u32], +) -> Result { + let file_path = model_dir.join(DEMAND_FILE_NAME); + let iter = read_csv(&file_path)?; + read_demand_from_iter(iter, commodity_ids, region_ids, milestone_years) +} -/// Read the demand data from an iterator +/// Read the demand data from an iterator. /// /// # Arguments /// -/// * `iter` - An iterator of `Demand`s +/// * `iter` - An iterator of [`Demand`]s /// * `commodity_ids` - All possible IDs of commodities /// * `region_ids` - All possible IDs for regions /// * `milestone_years` - All milestone years /// /// # Returns /// -/// The demand data (except for the demand slice information), grouped by commodity and region. +/// The demand for each combination of commodity, region and year. fn read_demand_from_iter( iter: I, commodity_ids: &HashSet>, region_ids: &HashSet>, milestone_years: &[u32], -) -> Result +) -> Result where I: Iterator, { - let mut map = HashMap::new(); + let mut map = AnnualDemandMap::new(); for demand in iter { let commodity_id = commodity_ids.get_id(&demand.commodity_id)?; @@ -103,148 +175,210 @@ where "Demand must be a valid number greater than zero" ); - // Get entry for this commodity - let map = map - .entry(commodity_id) - .or_insert_with(|| DemandMap(HashMap::with_capacity(1))); - + let key = AnnualDemandMapKey { + commodity_id: Rc::clone(&commodity_id), + region_id: Rc::clone(®ion_id), + year: demand.year, + }; ensure!( - map.0.insert(region_id, demand).is_none(), - "Multiple entries for same commodity and region found" + map.insert(key, demand.demand).is_none(), + "Duplicate demand entries (commodity: {}, region: {}, year: {})", + commodity_id, + region_id, + demand.year ); } Ok(map) } -/// Read the demand.csv file. +/// Read demand slices from specified model directory. /// /// # Arguments /// /// * `model_dir` - Folder containing model configuration files /// * `commodity_ids` - All possible IDs of commodities /// * `region_ids` - All possible IDs for regions -/// * `milestone_years` - All milestone years -/// -/// # Returns -/// -/// The demand data except for the demand slice information, which resides in a separate CSV file. -/// The data is grouped by commodity and region. -fn read_demand_file( +/// * `time_slice_info` - Information about seasons and times of day +fn read_demand_slices( model_dir: &Path, commodity_ids: &HashSet>, region_ids: &HashSet>, - milestone_years: &[u32], -) -> Result { - let file_path = model_dir.join(DEMAND_FILE_NAME); - let iter = read_csv(&file_path)?; - read_demand_from_iter(iter, commodity_ids, region_ids, milestone_years) -} - -/// Try to get demand for the given commodity and region. Returns `None` if not found. -fn try_get_demand<'a>( - commodity_id: &str, - region_id: &str, - demand: &'a mut CommodityDemandMap, -) -> Option<&'a mut Demand> { - demand.get_mut(commodity_id)?.0.get_mut(region_id) + time_slice_info: &TimeSliceInfo, +) -> Result { + let file_path = model_dir.join(DEMAND_SLICES_FILE_NAME); + let demand_slices_csv = read_csv(&file_path)?; + read_demand_slices_from_iter( + demand_slices_csv, + commodity_ids, + region_ids, + time_slice_info, + ) + .with_context(|| input_err_msg(file_path)) } -/// Read demand slices from an iterator and store them in `demand`. +/// Read demand slices from an iterator fn read_demand_slices_from_iter( iter: I, + commodity_ids: &HashSet>, + region_ids: &HashSet>, time_slice_info: &TimeSliceInfo, - demand: &mut CommodityDemandMap, -) -> Result<()> +) -> Result where - I: Iterator, + I: Iterator, { + let mut demand_slices = DemandSliceMap::new(); + + // Keep track of commodity + region pairs for validation + let mut commodity_regions = HashSet::new(); + + let mut time_slices = Vec::new(); for slice in iter { - let demand = - try_get_demand(&slice.commodity_id, &slice.region_id, demand).with_context(|| { - format!( - "No demand specified for commodity {} in region {}", - &slice.commodity_id, &slice.region_id - ) - })?; + let commodity_id = commodity_ids.get_id(&slice.commodity_id)?; + let region_id = region_ids.get_id(&slice.region_id)?; + + // We need to know how many time slices are covered by the current demand slice entry and + // how long they are relative to one another so that we can divide up the demand for this + // entry appropriately + let ts_selection = time_slice_info.get_selection(&slice.time_slice)?; + let ts_iter = time_slice_info.iter_selection(&ts_selection); + time_slices + .extend(ts_iter.map(|ts| (ts.clone(), time_slice_info.fractions.get(ts).unwrap()))); + let time_total: f64 = time_slices.iter().map(|(_, fraction)| *fraction).sum(); + for (time_slice, time_fraction) in time_slices.drain(0..) { + let key = DemandSliceMapKey { + commodity_id: Rc::clone(&commodity_id), + region_id: Rc::clone(®ion_id), + time_slice: time_slice.clone(), + }; - let time_slice = time_slice_info.get_selection(&slice.time_slice)?; - demand.demand_slices.push(DemandSlice { - time_slice, - fraction: slice.fraction, - }); + // Share demand between the time slices in proportion to duration + let demand_fraction = slice.fraction * time_fraction / time_total; + ensure!(demand_slices.insert(key, demand_fraction).is_none(), + "Duplicate demand slicing entry (or same time slice covered by more than one entry) \ + (commodity: {commodity_id}, region: {region_id}, time slice: {time_slice})" + ); + } + + commodity_regions.insert((commodity_id, region_id)); } - Ok(()) + validate_demand_slices(commodity_regions, &demand_slices, time_slice_info)?; + + Ok(demand_slices) } -/// Read demand slices from specified model directory. +/// Check that the [`DemandSliceMap`] is well formed. /// -/// # Arguments +/// Specifically, check: /// -/// * `model_dir` - Folder containing model configuration files -/// * `time_slice_info` - Information about seasons and times of day -/// * `demand` - Demand data grouped by commodity and region -fn read_demand_slices( - model_dir: &Path, +/// * It is non-empty +/// * If an entry is provided for any commodity + region pair, there must be entries covering every +/// time slice +/// * The demand fractions for all entries related to a commodity + region pair sum to one +fn validate_demand_slices( + commodity_regions: HashSet<(Rc, Rc)>, + demand_slices: &DemandSliceMap, time_slice_info: &TimeSliceInfo, - demand: &mut CommodityDemandMap, ) -> Result<()> { - let file_path = model_dir.join(DEMAND_SLICES_FILE_NAME); - let demand_slices_csv = read_csv(&file_path)?; - read_demand_slices_from_iter(demand_slices_csv, time_slice_info, demand) - .with_context(|| input_err_msg(file_path)) + ensure!(!demand_slices.is_empty(), "Empty demand slices file"); + + for (commodity_id, region_id) in commodity_regions { + time_slice_info + .iter() + .map(|time_slice| { + let key = DemandSliceMapKey { + commodity_id: Rc::clone(&commodity_id), + region_id: Rc::clone(®ion_id), + time_slice: time_slice.clone(), + }; + + demand_slices.get(&key).with_context(|| { + format!( + "Demand slice missing for time slice {} (commodity: {}, region {})", + time_slice, commodity_id, region_id + ) + }) + }) + .process_results(|iter| { + check_fractions_sum_to_one(iter.copied()).context("Invalid demand fractions") + })??; + } + + Ok(()) } -/// Reads demand data from a CSV file. -/// -/// # Arguments -/// -/// * `model_dir` - Folder containing model configuration files -/// * `commodity_ids` - All possible IDs of commodities -/// * `region_ids` - All possible IDs for regions -/// * `time_slice_info` - Information about seasons and times of day -/// * `milestone_years` - All milestone years -/// -/// # Returns -/// -/// This function returns demand data grouped by commodity and then region. -pub fn read_demand( - model_dir: &Path, - commodity_ids: &HashSet>, - region_ids: &HashSet>, +/// Calculate the demand for each combination of commodity, region, year and time slice +fn compute_demand_map( + demand: &AnnualDemandMap, + slices: &DemandSliceMap, time_slice_info: &TimeSliceInfo, - milestone_years: &[u32], -) -> Result { - let mut demand = read_demand_file(model_dir, commodity_ids, region_ids, milestone_years)?; +) -> HashMap, DemandMap> { + let mut map = HashMap::new(); + for (demand_key, annual_demand) in demand.iter() { + let commodity_id = &demand_key.commodity_id; + let region_id = &demand_key.region_id; + for time_slice in time_slice_info.iter() { + let slice_key = DemandSliceMapKey { + commodity_id: Rc::clone(commodity_id), + region_id: Rc::clone(region_id), + time_slice: time_slice.clone(), + }; - // Read in demand slices - read_demand_slices(model_dir, time_slice_info, &mut demand)?; + // NB: This has already been checked, so shouldn't fail + let demand_fraction = slices.get(&slice_key).unwrap_or_else(|| { + panic!( + "Missing demand slice entry (commodity: {}, region: {}, time slice: {})", + commodity_id, region_id, time_slice + ) + }); + + // Get or create entry + let map = map + .entry(Rc::clone(commodity_id)) + .or_insert_with(DemandMap::new); + + // Add a new demand entry + map.0.insert( + DemandMapKey { + region_id: Rc::clone(region_id), + year: demand_key.year, + time_slice: time_slice.clone(), + }, + annual_demand * demand_fraction, + ); + } + } - Ok(demand) + map } #[cfg(test)] mod tests { + use super::*; use crate::time_slice::TimeSliceID; - use super::*; use std::fs::File; use std::io::Write; + use std::iter; use std::path::Path; use tempfile::tempdir; #[test] fn test_demand_map_get() { - let value = Demand { + let time_slice = TimeSliceID { + season: "all-year".into(), + time_of_day: "all-day".into(), + }; + let key = DemandMapKey { + region_id: "North".into(), year: 2020, - region_id: "North".to_string(), - commodity_id: "COM1".to_string(), - demand: 10.0, - demand_slices: Vec::new(), + time_slice: time_slice.clone(), }; - let map = DemandMap(HashMap::from_iter([("North".into(), value.clone())])); - assert_eq!(map.get("North").unwrap(), &value) + let value = 0.2; + + let map = DemandMap(HashMap::from_iter(iter::once((key, value)))); + assert_eq!(map.get("North".into(), 2020, time_slice).unwrap(), value) } /// Create an example demand file in dir_path @@ -275,14 +409,12 @@ COM1,West,2020,13" region_id: "North".to_string(), commodity_id: "COM1".to_string(), demand: 10.0, - demand_slices: Vec::new(), }, Demand { year: 2020, region_id: "South".to_string(), commodity_id: "COM1".to_string(), demand: 11.0, - demand_slices: Vec::new(), }, ]; assert!(read_demand_from_iter( @@ -300,14 +432,12 @@ COM1,West,2020,13" region_id: "North".to_string(), commodity_id: "COM2".to_string(), demand: 10.0, - demand_slices: Vec::new(), }, Demand { year: 2020, region_id: "South".to_string(), commodity_id: "COM1".to_string(), demand: 11.0, - demand_slices: Vec::new(), }, ]; assert!(read_demand_from_iter( @@ -325,14 +455,12 @@ COM1,West,2020,13" region_id: "East".to_string(), commodity_id: "COM1".to_string(), demand: 10.0, - demand_slices: Vec::new(), }, Demand { year: 2020, region_id: "South".to_string(), commodity_id: "COM1".to_string(), demand: 11.0, - demand_slices: Vec::new(), }, ]; assert!(read_demand_from_iter( @@ -350,14 +478,12 @@ COM1,West,2020,13" region_id: "North".to_string(), commodity_id: "COM1".to_string(), demand: 10.0, - demand_slices: Vec::new(), }, Demand { year: 2020, region_id: "South".to_string(), commodity_id: "COM1".to_string(), demand: 11.0, - demand_slices: Vec::new(), }, ]; assert!(read_demand_from_iter( @@ -376,7 +502,6 @@ COM1,West,2020,13" region_id: "North".to_string(), commodity_id: "COM1".to_string(), demand: $quantity, - demand_slices: Vec::new(), }]; assert!(read_demand_from_iter( demand.into_iter(), @@ -400,21 +525,18 @@ COM1,West,2020,13" region_id: "North".to_string(), commodity_id: "COM1".to_string(), demand: 10.0, - demand_slices: Vec::new(), }, Demand { year: 2020, region_id: "North".to_string(), commodity_id: "COM1".to_string(), demand: 10.0, - demand_slices: Vec::new(), }, Demand { year: 2020, region_id: "South".to_string(), commodity_id: "COM1".to_string(), demand: 11.0, - demand_slices: Vec::new(), }, ]; assert!(read_demand_from_iter( @@ -435,65 +557,51 @@ COM1,West,2020,13" .into_iter() .collect(); let milestone_years = [2020, 2030]; - let demand = read_demand_file(dir.path(), &commodity_ids, ®ion_ids, &milestone_years); + let expected = AnnualDemandMap::from_iter([ + ( + AnnualDemandMapKey { + commodity_id: "COM1".into(), + region_id: "North".into(), + year: 2020, + }, + 10.0, + ), + ( + AnnualDemandMapKey { + commodity_id: "COM1".into(), + region_id: "South".into(), + year: 2020, + }, + 11.0, + ), + ( + AnnualDemandMapKey { + commodity_id: "COM1".into(), + region_id: "East".into(), + year: 2020, + }, + 12.0, + ), + ( + AnnualDemandMapKey { + commodity_id: "COM1".into(), + region_id: "West".into(), + year: 2020, + }, + 13.0, + ), + ]); assert_eq!( - demand.unwrap(), - HashMap::from_iter( - [( - "COM1".into(), - DemandMap(HashMap::from_iter([ - ( - "North".into(), - Demand { - year: 2020, - region_id: "North".to_string(), - commodity_id: "COM1".to_string(), - demand: 10.0, - demand_slices: Vec::new() - } - ), - ( - "South".into(), - Demand { - year: 2020, - region_id: "South".to_string(), - commodity_id: "COM1".to_string(), - demand: 11.0, - demand_slices: Vec::new() - } - ), - ( - "East".into(), - Demand { - year: 2020, - region_id: "East".to_string(), - commodity_id: "COM1".to_string(), - demand: 12.0, - demand_slices: Vec::new() - } - ), - ( - "West".into(), - Demand { - year: 2020, - region_id: "West".to_string(), - commodity_id: "COM1".to_string(), - demand: 13.0, - demand_slices: Vec::new() - } - ) - ])) - )] - .into_iter() - ) + read_demand_file(dir.path(), &commodity_ids, ®ion_ids, &milestone_years).unwrap(), + expected ); } #[test] fn test_read_demand_slices_from_iter() { let time_slice_info = TimeSliceInfo { - seasons: ["winter".into()].into_iter().collect(), - times_of_day: ["day".into()].into_iter().collect(), + seasons: iter::once("winter".into()).collect(), + times_of_day: iter::once("day".into()).collect(), fractions: [( TimeSliceID { season: "winter".into(), @@ -504,104 +612,318 @@ COM1,West,2020,13" .into_iter() .collect(), }; - - // Demand grouped by region - let demand: HashMap<_, _> = [( - "COM1".into(), - DemandMap( - [( - "GBR".into(), - Demand { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - year: 2020, - demand: 1.0, - demand_slices: Vec::new(), - }, - )] - .into_iter() - .collect(), - ), - )] - .into_iter() - .collect(); + let commodity_ids = iter::once("COM1".into()).collect(); + let region_ids = iter::once("GBR".into()).collect(); // Valid - { - let mut demand = demand.clone(); - let demand_slice = DemandSliceRaw { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: "winter.day".into(), - fraction: 1.0, - }; + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 1.0, + }; + let time_slice = time_slice_info + .get_time_slice_id_from_str("winter.day") + .unwrap(); + let key = DemandSliceMapKey { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice, + }; + let expected = DemandSliceMap::from_iter(iter::once((key, 1.0))); + assert_eq!( read_demand_slices_from_iter( - [demand_slice.clone()].into_iter(), + iter::once(demand_slice.clone()), + &commodity_ids, + ®ion_ids, &time_slice_info, - &mut demand, ) - .unwrap(); - let time_slice = time_slice_info.get_selection("winter.day").unwrap(); + .unwrap(), + expected + ); + + // Valid, multiple time slices + { + let time_slice_info = TimeSliceInfo { + seasons: ["winter".into(), "summer".into()].into_iter().collect(), + times_of_day: ["day".into(), "night".into()].into_iter().collect(), + fractions: [ + ( + TimeSliceID { + season: "summer".into(), + time_of_day: "day".into(), + }, + 3.0 / 16.0, + ), + ( + TimeSliceID { + season: "summer".into(), + time_of_day: "night".into(), + }, + 5.0 / 16.0, + ), + ( + TimeSliceID { + season: "winter".into(), + time_of_day: "day".into(), + }, + 3.0 / 16.0, + ), + ( + TimeSliceID { + season: "winter".into(), + time_of_day: "night".into(), + }, + 5.0 / 16.0, + ), + ] + .into_iter() + .collect(), + }; + let demand_slices = [ + DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 0.5, + }, + DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "summer".into(), + fraction: 0.5, + }, + ]; + let expected = DemandSliceMap::from_iter([ + ( + DemandSliceMapKey { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: TimeSliceID { + season: "summer".into(), + time_of_day: "day".into(), + }, + }, + 3.0 / 16.0, + ), + ( + DemandSliceMapKey { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: TimeSliceID { + season: "summer".into(), + time_of_day: "night".into(), + }, + }, + 5.0 / 16.0, + ), + ( + DemandSliceMapKey { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: TimeSliceID { + season: "winter".into(), + time_of_day: "day".into(), + }, + }, + 3.0 / 16.0, + ), + ( + DemandSliceMapKey { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: TimeSliceID { + season: "winter".into(), + time_of_day: "night".into(), + }, + }, + 5.0 / 16.0, + ), + ]); assert_eq!( - try_get_demand("COM1", "GBR", &mut demand) - .unwrap() - .demand_slices, - vec![DemandSlice { - time_slice, - fraction: 1.0 - }] + read_demand_slices_from_iter( + demand_slices.into_iter(), + &commodity_ids, + ®ion_ids, + &time_slice_info, + ) + .unwrap(), + expected ); } + // Empty CSV file + assert!(read_demand_slices_from_iter( + iter::empty(), + &commodity_ids, + ®ion_ids, + &time_slice_info, + ) + .is_err()); + // Bad commodity - { - let mut demand = demand.clone(); - let demand_slice = DemandSliceRaw { - commodity_id: "COM2".into(), - region_id: "GBR".into(), - time_slice: "winter.day".into(), - fraction: 1.0, - }; - assert!(read_demand_slices_from_iter( - [demand_slice].into_iter(), - &time_slice_info, - &mut demand - ) - .is_err()); - } + let demand_slice = DemandSlice { + commodity_id: "COM2".into(), + region_id: "GBR".into(), + time_slice: "winter.day".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice.clone()), + &commodity_ids, + ®ion_ids, + &time_slice_info, + ) + .is_err()); // Bad region - { - let mut demand = demand.clone(); - let demand_slice = DemandSliceRaw { - commodity_id: "COM1".into(), - region_id: "USA".into(), - time_slice: "winter.day".into(), - fraction: 1.0, - }; - assert!(read_demand_slices_from_iter( - [demand_slice].into_iter(), - &time_slice_info, - &mut demand - ) - .is_err()); - } + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "FRA".into(), + time_slice: "winter.day".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice.clone()), + &commodity_ids, + ®ion_ids, + &time_slice_info, + ) + .is_err()); + + // Bad time slice selection + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "summer".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice.clone()), + &commodity_ids, + ®ion_ids, + &time_slice_info, + ) + .is_err()); - // Bad time slice { - let mut demand = demand.clone(); - let demand_slice = DemandSliceRaw { + // Some time slices uncovered + let time_slice_info = TimeSliceInfo { + seasons: ["winter".into(), "summer".into()].into_iter().collect(), + times_of_day: iter::once("day".into()).collect(), + fractions: [ + ( + TimeSliceID { + season: "winter".into(), + time_of_day: "day".into(), + }, + 0.5, + ), + ( + TimeSliceID { + season: "summer".into(), + time_of_day: "day".into(), + }, + 0.5, + ), + ] + .into_iter() + .collect(), + }; + let demand_slice = DemandSlice { commodity_id: "COM1".into(), region_id: "GBR".into(), - time_slice: "summer.night".into(), + time_slice: "winter".into(), fraction: 1.0, }; assert!(read_demand_slices_from_iter( - [demand_slice].into_iter(), + iter::once(demand_slice.clone()), + &commodity_ids, + ®ion_ids, &time_slice_info, - &mut demand, ) .is_err()); } + + // Same time slice twice + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter.day".into(), + fraction: 0.5, + }; + assert!(read_demand_slices_from_iter( + iter::repeat_n(demand_slice.clone(), 2), + &commodity_ids, + ®ion_ids, + &time_slice_info, + ) + .is_err()); + + // Whole season and single time slice conflicting + let demand_slice_season = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 0.5, + }; + assert!(read_demand_slices_from_iter( + [demand_slice, demand_slice_season].into_iter(), + &commodity_ids, + ®ion_ids, + &time_slice_info, + ) + .is_err()); + + // Fractions don't sum to one + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 0.5, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice), + &commodity_ids, + ®ion_ids, + &time_slice_info, + ) + .is_err()); + } + + #[test] + #[should_panic] + fn test_compute_demand_map_missing_entry() { + let time_slice_info = TimeSliceInfo { + seasons: iter::once("winter".into()).collect(), + times_of_day: iter::once("day".into()).collect(), + fractions: [( + TimeSliceID { + season: "winter".into(), + time_of_day: "day".into(), + }, + 1.0, + )] + .into_iter() + .collect(), + }; + let key = AnnualDemandMapKey { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + year: 2024, + }; + let demand = iter::once((key, 1.0)).collect(); + + // NB: No entry for GBR + let key = DemandSliceMapKey { + commodity_id: "COM1".into(), + region_id: "FRA".into(), + time_slice: time_slice_info + .get_time_slice_id_from_str("winter.day") + .unwrap(), + }; + let slices = iter::once((key, 1.0)).collect(); + + compute_demand_map(&demand, &slices, &time_slice_info); } } From b15b63a57876b1064b1ade6b21e5f53825e62e35 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Tue, 17 Dec 2024 10:43:30 +0000 Subject: [PATCH 08/20] Remove unnecessarily verbose error-checking --- src/demand.rs | 43 +------------------------------------------ 1 file changed, 1 insertion(+), 42 deletions(-) diff --git a/src/demand.rs b/src/demand.rs index 0101a152e..57d05357c 100644 --- a/src/demand.rs +++ b/src/demand.rs @@ -326,12 +326,7 @@ fn compute_demand_map( }; // NB: This has already been checked, so shouldn't fail - let demand_fraction = slices.get(&slice_key).unwrap_or_else(|| { - panic!( - "Missing demand slice entry (commodity: {}, region: {}, time slice: {})", - commodity_id, region_id, time_slice - ) - }); + let demand_fraction = slices.get(&slice_key).unwrap(); // Get or create entry let map = map @@ -890,40 +885,4 @@ COM1,West,2020,13" ) .is_err()); } - - #[test] - #[should_panic] - fn test_compute_demand_map_missing_entry() { - let time_slice_info = TimeSliceInfo { - seasons: iter::once("winter".into()).collect(), - times_of_day: iter::once("day".into()).collect(), - fractions: [( - TimeSliceID { - season: "winter".into(), - time_of_day: "day".into(), - }, - 1.0, - )] - .into_iter() - .collect(), - }; - let key = AnnualDemandMapKey { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - year: 2024, - }; - let demand = iter::once((key, 1.0)).collect(); - - // NB: No entry for GBR - let key = DemandSliceMapKey { - commodity_id: "COM1".into(), - region_id: "FRA".into(), - time_slice: time_slice_info - .get_time_slice_id_from_str("winter.day") - .unwrap(), - }; - let slices = iter::once((key, 1.0)).collect(); - - compute_demand_map(&demand, &slices, &time_slice_info); - } } From 49c180a1eaf1b117dc1cc045e6a04f5fda32fac0 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Thu, 12 Dec 2024 17:23:36 +0000 Subject: [PATCH 09/20] Require demand entries for all milestone years Closes #230. --- examples/simple/demand.csv | 1 + src/demand.rs | 41 ++++++++++++++++++++++++++++++++++++-- 2 files changed, 40 insertions(+), 2 deletions(-) diff --git a/examples/simple/demand.csv b/examples/simple/demand.csv index 1884bd0c9..7fbef6ae7 100644 --- a/examples/simple/demand.csv +++ b/examples/simple/demand.csv @@ -1,2 +1,3 @@ commodity_id,region_id,year,demand RSHEAT,GBR,2020,927.38 +RSHEAT,GBR,2100,927.38 diff --git a/src/demand.rs b/src/demand.rs index 57d05357c..0cdb9e2d6 100644 --- a/src/demand.rs +++ b/src/demand.rs @@ -159,6 +159,10 @@ where { let mut map = AnnualDemandMap::new(); + // Keep track of all commodity + region pairs so we can check that every milestone year is + // covered + let mut commodity_regions = HashSet::new(); + for demand in iter { let commodity_id = commodity_ids.get_id(&demand.commodity_id)?; let region_id = region_ids.get_id(&demand.region_id)?; @@ -187,6 +191,24 @@ where region_id, demand.year ); + + commodity_regions.insert((commodity_id, region_id)); + } + + // If a commodity + region combination is represented, it must include entries for every + // milestone year + for (commodity_id, region_id) in commodity_regions { + for year in milestone_years.iter().copied() { + let key = AnnualDemandMapKey { + commodity_id: Rc::clone(&commodity_id), + region_id: Rc::clone(®ion_id), + year, + }; + ensure!( + map.contains_key(&key), + "Missing milestone year {year} for commodity {commodity_id} in region {region_id}" + ); + } } Ok(map) @@ -395,7 +417,7 @@ COM1,West,2020,13" fn test_read_demand_from_iter() { let commodity_ids = ["COM1".into()].into_iter().collect(); let region_ids = ["North".into(), "South".into()].into_iter().collect(); - let milestone_years = [2020, 2030]; + let milestone_years = [2020]; // Valid let demand = [ @@ -541,6 +563,21 @@ COM1,West,2020,13" &milestone_years ) .is_err()); + + // Missing entry for a milestone year + let demand = Demand { + year: 2020, + region_id: "North".to_string(), + commodity_id: "COM1".to_string(), + demand: 10.0, + }; + assert!(read_demand_from_iter( + iter::once(demand), + &commodity_ids, + ®ion_ids, + &[2020, 2030] + ) + .is_err()); } #[test] @@ -551,7 +588,7 @@ COM1,West,2020,13" let region_ids = ["North".into(), "South".into(), "East".into(), "West".into()] .into_iter() .collect(); - let milestone_years = [2020, 2030]; + let milestone_years = [2020]; let expected = AnnualDemandMap::from_iter([ ( AnnualDemandMapKey { From f9cc83c83e95ed8b54aeff9c818f5a968c83fc9b Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Tue, 17 Dec 2024 11:50:59 +0000 Subject: [PATCH 10/20] Check for demand slicing provided without corresponding entry in demand.csv --- src/demand.rs | 98 +++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 71 insertions(+), 27 deletions(-) diff --git a/src/demand.rs b/src/demand.rs index 0cdb9e2d6..873a962a9 100644 --- a/src/demand.rs +++ b/src/demand.rs @@ -87,6 +87,8 @@ struct AnnualDemandMapKey { year: u32, } +type CommodityRegionPairs = HashSet<(Rc, Rc)>; + /// Reads demand data from CSV files. /// /// # Arguments @@ -107,8 +109,15 @@ pub fn read_demand( time_slice_info: &TimeSliceInfo, milestone_years: &[u32], ) -> Result, DemandMap>> { - let demand = read_demand_file(model_dir, commodity_ids, region_ids, milestone_years)?; - let slices = read_demand_slices(model_dir, commodity_ids, region_ids, time_slice_info)?; + let (demand, commodity_regions) = + read_demand_file(model_dir, commodity_ids, region_ids, milestone_years)?; + let slices = read_demand_slices( + model_dir, + commodity_ids, + region_ids, + &commodity_regions, + time_slice_info, + )?; Ok(compute_demand_map(&demand, &slices, time_slice_info)) } @@ -130,7 +139,7 @@ fn read_demand_file( commodity_ids: &HashSet>, region_ids: &HashSet>, milestone_years: &[u32], -) -> Result { +) -> Result<(AnnualDemandMap, CommodityRegionPairs)> { let file_path = model_dir.join(DEMAND_FILE_NAME); let iter = read_csv(&file_path)?; read_demand_from_iter(iter, commodity_ids, region_ids, milestone_years) @@ -147,13 +156,14 @@ fn read_demand_file( /// /// # Returns /// -/// The demand for each combination of commodity, region and year. +/// The demand for each combination of commodity, region and year along with a [`HashSet`] of all +/// commodity + region pairs included in the file. fn read_demand_from_iter( iter: I, commodity_ids: &HashSet>, region_ids: &HashSet>, milestone_years: &[u32], -) -> Result +) -> Result<(AnnualDemandMap, CommodityRegionPairs)> where I: Iterator, { @@ -197,11 +207,11 @@ where // If a commodity + region combination is represented, it must include entries for every // milestone year - for (commodity_id, region_id) in commodity_regions { + for (commodity_id, region_id) in commodity_regions.iter() { for year in milestone_years.iter().copied() { let key = AnnualDemandMapKey { - commodity_id: Rc::clone(&commodity_id), - region_id: Rc::clone(®ion_id), + commodity_id: Rc::clone(commodity_id), + region_id: Rc::clone(region_id), year, }; ensure!( @@ -211,7 +221,7 @@ where } } - Ok(map) + Ok((map, commodity_regions)) } /// Read demand slices from specified model directory. @@ -221,11 +231,13 @@ where /// * `model_dir` - Folder containing model configuration files /// * `commodity_ids` - All possible IDs of commodities /// * `region_ids` - All possible IDs for regions +/// * `commodity_regions` - Pairs of commodities + regions listed in demand CSV file /// * `time_slice_info` - Information about seasons and times of day fn read_demand_slices( model_dir: &Path, commodity_ids: &HashSet>, region_ids: &HashSet>, + commodity_regions: &CommodityRegionPairs, time_slice_info: &TimeSliceInfo, ) -> Result { let file_path = model_dir.join(DEMAND_SLICES_FILE_NAME); @@ -234,6 +246,7 @@ fn read_demand_slices( demand_slices_csv, commodity_ids, region_ids, + commodity_regions, time_slice_info, ) .with_context(|| input_err_msg(file_path)) @@ -244,6 +257,7 @@ fn read_demand_slices_from_iter( iter: I, commodity_ids: &HashSet>, region_ids: &HashSet>, + commodity_regions: &CommodityRegionPairs, time_slice_info: &TimeSliceInfo, ) -> Result where @@ -251,13 +265,15 @@ where { let mut demand_slices = DemandSliceMap::new(); - // Keep track of commodity + region pairs for validation - let mut commodity_regions = HashSet::new(); - let mut time_slices = Vec::new(); for slice in iter { let commodity_id = commodity_ids.get_id(&slice.commodity_id)?; let region_id = region_ids.get_id(&slice.region_id)?; + ensure!( + commodity_regions.contains(&(Rc::clone(&commodity_id), Rc::clone(®ion_id))), + "Demand slicing provided for commodity {commodity_id} in region {region_id} \ + without a corresponding entry in {DEMAND_FILE_NAME}" + ); // We need to know how many time slices are covered by the current demand slice entry and // how long they are relative to one another so that we can divide up the demand for this @@ -281,8 +297,6 @@ where (commodity: {commodity_id}, region: {region_id}, time slice: {time_slice})" ); } - - commodity_regions.insert((commodity_id, region_id)); } validate_demand_slices(commodity_regions, &demand_slices, time_slice_info)?; @@ -299,7 +313,7 @@ where /// time slice /// * The demand fractions for all entries related to a commodity + region pair sum to one fn validate_demand_slices( - commodity_regions: HashSet<(Rc, Rc)>, + commodity_regions: &CommodityRegionPairs, demand_slices: &DemandSliceMap, time_slice_info: &TimeSliceInfo, ) -> Result<()> { @@ -310,8 +324,8 @@ fn validate_demand_slices( .iter() .map(|time_slice| { let key = DemandSliceMapKey { - commodity_id: Rc::clone(&commodity_id), - region_id: Rc::clone(®ion_id), + commodity_id: Rc::clone(commodity_id), + region_id: Rc::clone(region_id), time_slice: time_slice.clone(), }; @@ -375,6 +389,7 @@ mod tests { use super::*; use crate::time_slice::TimeSliceID; + use itertools::iproduct; use std::fs::File; use std::io::Write; use std::iter; @@ -584,10 +599,9 @@ COM1,West,2020,13" fn test_read_demand_file() { let dir = tempdir().unwrap(); create_demand_file(dir.path()); - let commodity_ids = ["COM1".into()].into_iter().collect(); - let region_ids = ["North".into(), "South".into(), "East".into(), "West".into()] - .into_iter() - .collect(); + let commodity_ids = HashSet::from_iter(iter::once("COM1".into())); + let region_ids = + HashSet::from_iter(["North".into(), "South".into(), "East".into(), "West".into()]); let milestone_years = [2020]; let expected = AnnualDemandMap::from_iter([ ( @@ -623,10 +637,12 @@ COM1,West,2020,13" 13.0, ), ]); - assert_eq!( - read_demand_file(dir.path(), &commodity_ids, ®ion_ids, &milestone_years).unwrap(), - expected - ); + let (demand, commodity_regions) = + read_demand_file(dir.path(), &commodity_ids, ®ion_ids, &milestone_years).unwrap(); + let commodity_regions_expected = + iproduct!(commodity_ids.iter().cloned(), region_ids.iter().cloned()).collect(); + assert_eq!(demand, expected); + assert_eq!(commodity_regions, commodity_regions_expected); } #[test] @@ -644,8 +660,10 @@ COM1,West,2020,13" .into_iter() .collect(), }; - let commodity_ids = iter::once("COM1".into()).collect(); - let region_ids = iter::once("GBR".into()).collect(); + let commodity_ids = HashSet::from_iter(iter::once("COM1".into())); + let region_ids = HashSet::from_iter(iter::once("GBR".into())); + let commodity_regions = + iproduct!(commodity_ids.iter().cloned(), region_ids.iter().cloned()).collect(); // Valid let demand_slice = DemandSlice { @@ -668,6 +686,7 @@ COM1,West,2020,13" iter::once(demand_slice.clone()), &commodity_ids, ®ion_ids, + &commodity_regions, &time_slice_info, ) .unwrap(), @@ -777,6 +796,7 @@ COM1,West,2020,13" demand_slices.into_iter(), &commodity_ids, ®ion_ids, + &commodity_regions, &time_slice_info, ) .unwrap(), @@ -789,6 +809,7 @@ COM1,West,2020,13" iter::empty(), &commodity_ids, ®ion_ids, + &commodity_regions, &time_slice_info, ) .is_err()); @@ -804,6 +825,7 @@ COM1,West,2020,13" iter::once(demand_slice.clone()), &commodity_ids, ®ion_ids, + &commodity_regions, &time_slice_info, ) .is_err()); @@ -819,6 +841,7 @@ COM1,West,2020,13" iter::once(demand_slice.clone()), &commodity_ids, ®ion_ids, + &commodity_regions, &time_slice_info, ) .is_err()); @@ -834,6 +857,7 @@ COM1,West,2020,13" iter::once(demand_slice.clone()), &commodity_ids, ®ion_ids, + &commodity_regions, &time_slice_info, ) .is_err()); @@ -872,6 +896,7 @@ COM1,West,2020,13" iter::once(demand_slice.clone()), &commodity_ids, ®ion_ids, + &commodity_regions, &time_slice_info, ) .is_err()); @@ -888,6 +913,7 @@ COM1,West,2020,13" iter::repeat_n(demand_slice.clone(), 2), &commodity_ids, ®ion_ids, + &commodity_regions, &time_slice_info, ) .is_err()); @@ -903,6 +929,7 @@ COM1,West,2020,13" [demand_slice, demand_slice_season].into_iter(), &commodity_ids, ®ion_ids, + &commodity_regions, &time_slice_info, ) .is_err()); @@ -918,6 +945,23 @@ COM1,West,2020,13" iter::once(demand_slice), &commodity_ids, ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + + // No corresponding entry for commodity + region in demand CSV file + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice), + &commodity_ids, + ®ion_ids, + &HashSet::new(), &time_slice_info, ) .is_err()); From b91fc83e2016ead0db793abec327fcbfade827a3 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 13 Jan 2025 15:34:36 +0000 Subject: [PATCH 11/20] Remove outdated comment --- src/input.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/input.rs b/src/input.rs index 08584d469..49ab9d2f8 100644 --- a/src/input.rs +++ b/src/input.rs @@ -109,10 +109,7 @@ impl IDCollection for HashSet> { } } -/// Read a CSV file of items with IDs. -/// -/// This is like `read_csv_grouped_by_id`, with the difference that it is to be used on the "main" -/// CSV file for a record type, so it assumes that all IDs encountered are valid. +/// Read a CSV file of items with IDs pub fn read_csv_id_file(file_path: &Path) -> Result, T>> where T: HasID + DeserializeOwned, From 04522d5fa25b0f330ca997cde42c161e4ba9b200 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 13 Jan 2025 15:45:59 +0000 Subject: [PATCH 12/20] Move demand CSV code into submodule of `input` Closes #285. --- src/commodity.rs | 59 +++++++++++++++++++++++++++- src/input/commodity.rs | 3 +- src/{ => input/commodity}/demand.rs | 61 +++-------------------------- src/input/process/flow.rs | 3 +- src/input/process/pac.rs | 3 +- src/lib.rs | 1 - 6 files changed, 67 insertions(+), 63 deletions(-) rename src/{ => input/commodity}/demand.rs (94%) diff --git a/src/commodity.rs b/src/commodity.rs index c2be22a38..fcde579f2 100644 --- a/src/commodity.rs +++ b/src/commodity.rs @@ -1,5 +1,4 @@ #![allow(missing_docs)] -use crate::demand::DemandMap; use crate::input::*; use crate::time_slice::{TimeSliceID, TimeSliceLevel}; use serde::Deserialize; @@ -110,10 +109,68 @@ pub enum CommodityType { OutputCommodity, } +/// A map relating region, year and time slice to demand (in real units, not a fraction). +/// +/// This data type is exported as this is the way in we want to look up demand outside of this +/// module. +#[derive(PartialEq, Debug, Clone, Default)] +pub struct DemandMap(HashMap); + +/// The key for a [`DemandMap`] +#[derive(PartialEq, Eq, Hash, Debug, Clone)] +struct DemandMapKey { + region_id: Rc, + year: u32, + time_slice: TimeSliceID, +} + +impl DemandMap { + /// Create a new, empty [`DemandMap`] + pub fn new() -> DemandMap { + DemandMap::default() + } + + /// Retrieve the demand for the specified region, year and time slice + pub fn get(&self, region_id: Rc, year: u32, time_slice: TimeSliceID) -> Option { + self.0 + .get(&DemandMapKey { + region_id, + year, + time_slice, + }) + .copied() + } + + /// Insert a new demand entry for the specified region, year and time slice + pub fn insert(&mut self, region_id: Rc, year: u32, time_slice: TimeSliceID, demand: f64) { + self.0.insert( + DemandMapKey { + region_id, + year, + time_slice, + }, + demand, + ); + } +} + #[cfg(test)] mod tests { use super::*; + #[test] + fn test_demand_map() { + let time_slice = TimeSliceID { + season: "all-year".into(), + time_of_day: "all-day".into(), + }; + let value = 0.25; + let mut map = DemandMap::new(); + map.insert("North".into(), 2020, time_slice.clone(), value); + + assert_eq!(map.get("North".into(), 2020, time_slice).unwrap(), value) + } + #[test] fn test_commodity_cost_map() { let ts = TimeSliceID { diff --git a/src/input/commodity.rs b/src/input/commodity.rs index 7fc393eb2..4bece3350 100644 --- a/src/input/commodity.rs +++ b/src/input/commodity.rs @@ -1,6 +1,5 @@ //! Code for reading in commodity-related data from CSV files. use crate::commodity::Commodity; -use crate::demand::read_demand; use crate::input::*; use crate::time_slice::TimeSliceInfo; use anyhow::Result; @@ -10,6 +9,8 @@ use std::rc::Rc; pub mod cost; use cost::read_commodity_costs; +pub mod demand; +use demand::read_demand; const COMMODITY_FILE_NAME: &str = "commodities.csv"; diff --git a/src/demand.rs b/src/input/commodity/demand.rs similarity index 94% rename from src/demand.rs rename to src/input/commodity/demand.rs index 873a962a9..ab8f81caa 100644 --- a/src/demand.rs +++ b/src/input/commodity/demand.rs @@ -1,5 +1,6 @@ //! Code for working with demand for a given commodity. Demand can vary by region, year and time //! slice. +use crate::commodity::DemandMap; use crate::input::*; use crate::time_slice::{TimeSliceID, TimeSliceInfo}; use anyhow::{ensure, Context, Result}; @@ -12,39 +13,6 @@ use std::rc::Rc; const DEMAND_FILE_NAME: &str = "demand.csv"; const DEMAND_SLICES_FILE_NAME: &str = "demand_slicing.csv"; -/// A map relating region, year and time slice to demand (in real units, not a fraction). -/// -/// This data type is exported as this is the way in we want to look up demand outside of this -/// module. -#[derive(PartialEq, Debug, Clone, Default)] -pub struct DemandMap(HashMap); - -/// The key for a [`DemandMap`] -#[derive(PartialEq, Eq, Hash, Debug, Clone)] -struct DemandMapKey { - region_id: Rc, - year: u32, - time_slice: TimeSliceID, -} - -impl DemandMap { - /// Create a new, empty [`DemandMap`] - pub fn new() -> DemandMap { - DemandMap::default() - } - - /// Retrieve the demand for the specified region, year and time slice - pub fn get(&self, region_id: Rc, year: u32, time_slice: TimeSliceID) -> Option { - self.0 - .get(&DemandMapKey { - region_id, - year, - time_slice, - }) - .copied() - } -} - /// Represents a single demand entry in the dataset. #[derive(Debug, Clone, Deserialize, PartialEq)] struct Demand { @@ -370,12 +338,10 @@ fn compute_demand_map( .or_insert_with(DemandMap::new); // Add a new demand entry - map.0.insert( - DemandMapKey { - region_id: Rc::clone(region_id), - year: demand_key.year, - time_slice: time_slice.clone(), - }, + map.insert( + Rc::clone(region_id), + demand_key.year, + time_slice.clone(), annual_demand * demand_fraction, ); } @@ -396,23 +362,6 @@ mod tests { use std::path::Path; use tempfile::tempdir; - #[test] - fn test_demand_map_get() { - let time_slice = TimeSliceID { - season: "all-year".into(), - time_of_day: "all-day".into(), - }; - let key = DemandMapKey { - region_id: "North".into(), - year: 2020, - time_slice: time_slice.clone(), - }; - let value = 0.2; - - let map = DemandMap(HashMap::from_iter(iter::once((key, value)))); - assert_eq!(map.get("North".into(), 2020, time_slice).unwrap(), value) - } - /// Create an example demand file in dir_path fn create_demand_file(dir_path: &Path) { let file_path = dir_path.join(DEMAND_FILE_NAME); diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index e50161350..276fb0dfb 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -65,8 +65,7 @@ where #[cfg(test)] mod test { use super::*; - use crate::commodity::{CommodityCostMap, CommodityType}; - use crate::demand::DemandMap; + use crate::commodity::{CommodityCostMap, CommodityType, DemandMap}; use crate::time_slice::TimeSliceLevel; #[test] diff --git a/src/input/process/pac.rs b/src/input/process/pac.rs index 93763771b..38d6a1b57 100644 --- a/src/input/process/pac.rs +++ b/src/input/process/pac.rs @@ -131,8 +131,7 @@ fn validate_pac_flows( #[cfg(test)] mod test { use super::*; - use crate::commodity::{CommodityCostMap, CommodityType}; - use crate::demand::DemandMap; + use crate::commodity::{CommodityCostMap, CommodityType, DemandMap}; use crate::process::FlowType; use crate::time_slice::TimeSliceLevel; diff --git a/src/lib.rs b/src/lib.rs index a4c655855..d878b6dea 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -3,7 +3,6 @@ pub mod agent; pub mod commands; pub mod commodity; -pub mod demand; pub mod input; pub mod log; pub mod model; From 48ba07c7a7e5e8b9e4a203b5d6a81d95b9506fc2 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 13 Jan 2025 16:06:09 +0000 Subject: [PATCH 13/20] Rename: mod test to mod tests --- src/input/process/flow.rs | 2 +- src/input/process/pac.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index 276fb0dfb..f0294af17 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -63,7 +63,7 @@ where } #[cfg(test)] -mod test { +mod tests { use super::*; use crate::commodity::{CommodityCostMap, CommodityType, DemandMap}; use crate::time_slice::TimeSliceLevel; diff --git a/src/input/process/pac.rs b/src/input/process/pac.rs index 38d6a1b57..44eea9c1f 100644 --- a/src/input/process/pac.rs +++ b/src/input/process/pac.rs @@ -129,7 +129,7 @@ fn validate_pac_flows( } #[cfg(test)] -mod test { +mod tests { use super::*; use crate::commodity::{CommodityCostMap, CommodityType, DemandMap}; use crate::process::FlowType; From 10aa75c2d8932de4db9d7eb9a1d1cf39804d0861 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 13 Jan 2025 16:15:36 +0000 Subject: [PATCH 14/20] Move demand slicing code into own submodule --- src/input/commodity.rs | 1 + src/input/commodity/demand.rs | 481 +------------------------ src/input/commodity/demand_slicing.rs | 485 ++++++++++++++++++++++++++ 3 files changed, 497 insertions(+), 470 deletions(-) create mode 100644 src/input/commodity/demand_slicing.rs diff --git a/src/input/commodity.rs b/src/input/commodity.rs index 4bece3350..9a91896f1 100644 --- a/src/input/commodity.rs +++ b/src/input/commodity.rs @@ -11,6 +11,7 @@ pub mod cost; use cost::read_commodity_costs; pub mod demand; use demand::read_demand; +pub mod demand_slicing; const COMMODITY_FILE_NAME: &str = "commodities.csv"; diff --git a/src/input/commodity/demand.rs b/src/input/commodity/demand.rs index ab8f81caa..741effc6f 100644 --- a/src/input/commodity/demand.rs +++ b/src/input/commodity/demand.rs @@ -1,17 +1,16 @@ //! Code for working with demand for a given commodity. Demand can vary by region, year and time //! slice. +use super::demand_slicing::{read_demand_slices, DemandSliceMap, DemandSliceMapKey}; use crate::commodity::DemandMap; use crate::input::*; -use crate::time_slice::{TimeSliceID, TimeSliceInfo}; -use anyhow::{ensure, Context, Result}; -use itertools::Itertools; +use crate::time_slice::TimeSliceInfo; +use anyhow::{ensure, Result}; use serde::Deserialize; use std::collections::{HashMap, HashSet}; use std::path::Path; use std::rc::Rc; const DEMAND_FILE_NAME: &str = "demand.csv"; -const DEMAND_SLICES_FILE_NAME: &str = "demand_slicing.csv"; /// Represents a single demand entry in the dataset. #[derive(Debug, Clone, Deserialize, PartialEq)] @@ -26,36 +25,22 @@ struct Demand { demand: f64, } -#[derive(Clone, Deserialize)] -struct DemandSlice { - commodity_id: String, - region_id: String, - time_slice: String, - #[serde(deserialize_with = "deserialise_proportion_nonzero")] - fraction: f64, -} - -/// A map relating commodity, region and time slice to the fraction of annual demand -type DemandSliceMap = HashMap; - -#[derive(PartialEq, Eq, Hash, Debug)] -struct DemandSliceMapKey { - commodity_id: Rc, - region_id: Rc, - time_slice: TimeSliceID, -} - /// A map relating commodity, region and year to annual demand -type AnnualDemandMap = HashMap; +pub type AnnualDemandMap = HashMap; +/// A key for an [`AnnualDemandMap`] #[derive(PartialEq, Eq, Hash, Debug)] -struct AnnualDemandMapKey { +pub struct AnnualDemandMapKey { + /// The commodity to which this demand applies commodity_id: Rc, + /// The region to which this demand applies region_id: Rc, + /// The simulation year to which this demand applies year: u32, } -type CommodityRegionPairs = HashSet<(Rc, Rc)>; +/// A set of commodity + region pairs +pub type CommodityRegionPairs = HashSet<(Rc, Rc)>; /// Reads demand data from CSV files. /// @@ -192,126 +177,6 @@ where Ok((map, commodity_regions)) } -/// Read demand slices from specified model directory. -/// -/// # Arguments -/// -/// * `model_dir` - Folder containing model configuration files -/// * `commodity_ids` - All possible IDs of commodities -/// * `region_ids` - All possible IDs for regions -/// * `commodity_regions` - Pairs of commodities + regions listed in demand CSV file -/// * `time_slice_info` - Information about seasons and times of day -fn read_demand_slices( - model_dir: &Path, - commodity_ids: &HashSet>, - region_ids: &HashSet>, - commodity_regions: &CommodityRegionPairs, - time_slice_info: &TimeSliceInfo, -) -> Result { - let file_path = model_dir.join(DEMAND_SLICES_FILE_NAME); - let demand_slices_csv = read_csv(&file_path)?; - read_demand_slices_from_iter( - demand_slices_csv, - commodity_ids, - region_ids, - commodity_regions, - time_slice_info, - ) - .with_context(|| input_err_msg(file_path)) -} - -/// Read demand slices from an iterator -fn read_demand_slices_from_iter( - iter: I, - commodity_ids: &HashSet>, - region_ids: &HashSet>, - commodity_regions: &CommodityRegionPairs, - time_slice_info: &TimeSliceInfo, -) -> Result -where - I: Iterator, -{ - let mut demand_slices = DemandSliceMap::new(); - - let mut time_slices = Vec::new(); - for slice in iter { - let commodity_id = commodity_ids.get_id(&slice.commodity_id)?; - let region_id = region_ids.get_id(&slice.region_id)?; - ensure!( - commodity_regions.contains(&(Rc::clone(&commodity_id), Rc::clone(®ion_id))), - "Demand slicing provided for commodity {commodity_id} in region {region_id} \ - without a corresponding entry in {DEMAND_FILE_NAME}" - ); - - // We need to know how many time slices are covered by the current demand slice entry and - // how long they are relative to one another so that we can divide up the demand for this - // entry appropriately - let ts_selection = time_slice_info.get_selection(&slice.time_slice)?; - let ts_iter = time_slice_info.iter_selection(&ts_selection); - time_slices - .extend(ts_iter.map(|ts| (ts.clone(), time_slice_info.fractions.get(ts).unwrap()))); - let time_total: f64 = time_slices.iter().map(|(_, fraction)| *fraction).sum(); - for (time_slice, time_fraction) in time_slices.drain(0..) { - let key = DemandSliceMapKey { - commodity_id: Rc::clone(&commodity_id), - region_id: Rc::clone(®ion_id), - time_slice: time_slice.clone(), - }; - - // Share demand between the time slices in proportion to duration - let demand_fraction = slice.fraction * time_fraction / time_total; - ensure!(demand_slices.insert(key, demand_fraction).is_none(), - "Duplicate demand slicing entry (or same time slice covered by more than one entry) \ - (commodity: {commodity_id}, region: {region_id}, time slice: {time_slice})" - ); - } - } - - validate_demand_slices(commodity_regions, &demand_slices, time_slice_info)?; - - Ok(demand_slices) -} - -/// Check that the [`DemandSliceMap`] is well formed. -/// -/// Specifically, check: -/// -/// * It is non-empty -/// * If an entry is provided for any commodity + region pair, there must be entries covering every -/// time slice -/// * The demand fractions for all entries related to a commodity + region pair sum to one -fn validate_demand_slices( - commodity_regions: &CommodityRegionPairs, - demand_slices: &DemandSliceMap, - time_slice_info: &TimeSliceInfo, -) -> Result<()> { - ensure!(!demand_slices.is_empty(), "Empty demand slices file"); - - for (commodity_id, region_id) in commodity_regions { - time_slice_info - .iter() - .map(|time_slice| { - let key = DemandSliceMapKey { - commodity_id: Rc::clone(commodity_id), - region_id: Rc::clone(region_id), - time_slice: time_slice.clone(), - }; - - demand_slices.get(&key).with_context(|| { - format!( - "Demand slice missing for time slice {} (commodity: {}, region {})", - time_slice, commodity_id, region_id - ) - }) - }) - .process_results(|iter| { - check_fractions_sum_to_one(iter.copied()).context("Invalid demand fractions") - })??; - } - - Ok(()) -} - /// Calculate the demand for each combination of commodity, region, year and time slice fn compute_demand_map( demand: &AnnualDemandMap, @@ -353,8 +218,6 @@ fn compute_demand_map( #[cfg(test)] mod tests { use super::*; - use crate::time_slice::TimeSliceID; - use itertools::iproduct; use std::fs::File; use std::io::Write; @@ -593,326 +456,4 @@ COM1,West,2020,13" assert_eq!(demand, expected); assert_eq!(commodity_regions, commodity_regions_expected); } - - #[test] - fn test_read_demand_slices_from_iter() { - let time_slice_info = TimeSliceInfo { - seasons: iter::once("winter".into()).collect(), - times_of_day: iter::once("day".into()).collect(), - fractions: [( - TimeSliceID { - season: "winter".into(), - time_of_day: "day".into(), - }, - 1.0, - )] - .into_iter() - .collect(), - }; - let commodity_ids = HashSet::from_iter(iter::once("COM1".into())); - let region_ids = HashSet::from_iter(iter::once("GBR".into())); - let commodity_regions = - iproduct!(commodity_ids.iter().cloned(), region_ids.iter().cloned()).collect(); - - // Valid - let demand_slice = DemandSlice { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: "winter".into(), - fraction: 1.0, - }; - let time_slice = time_slice_info - .get_time_slice_id_from_str("winter.day") - .unwrap(); - let key = DemandSliceMapKey { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice, - }; - let expected = DemandSliceMap::from_iter(iter::once((key, 1.0))); - assert_eq!( - read_demand_slices_from_iter( - iter::once(demand_slice.clone()), - &commodity_ids, - ®ion_ids, - &commodity_regions, - &time_slice_info, - ) - .unwrap(), - expected - ); - - // Valid, multiple time slices - { - let time_slice_info = TimeSliceInfo { - seasons: ["winter".into(), "summer".into()].into_iter().collect(), - times_of_day: ["day".into(), "night".into()].into_iter().collect(), - fractions: [ - ( - TimeSliceID { - season: "summer".into(), - time_of_day: "day".into(), - }, - 3.0 / 16.0, - ), - ( - TimeSliceID { - season: "summer".into(), - time_of_day: "night".into(), - }, - 5.0 / 16.0, - ), - ( - TimeSliceID { - season: "winter".into(), - time_of_day: "day".into(), - }, - 3.0 / 16.0, - ), - ( - TimeSliceID { - season: "winter".into(), - time_of_day: "night".into(), - }, - 5.0 / 16.0, - ), - ] - .into_iter() - .collect(), - }; - let demand_slices = [ - DemandSlice { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: "winter".into(), - fraction: 0.5, - }, - DemandSlice { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: "summer".into(), - fraction: 0.5, - }, - ]; - let expected = DemandSliceMap::from_iter([ - ( - DemandSliceMapKey { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: TimeSliceID { - season: "summer".into(), - time_of_day: "day".into(), - }, - }, - 3.0 / 16.0, - ), - ( - DemandSliceMapKey { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: TimeSliceID { - season: "summer".into(), - time_of_day: "night".into(), - }, - }, - 5.0 / 16.0, - ), - ( - DemandSliceMapKey { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: TimeSliceID { - season: "winter".into(), - time_of_day: "day".into(), - }, - }, - 3.0 / 16.0, - ), - ( - DemandSliceMapKey { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: TimeSliceID { - season: "winter".into(), - time_of_day: "night".into(), - }, - }, - 5.0 / 16.0, - ), - ]); - assert_eq!( - read_demand_slices_from_iter( - demand_slices.into_iter(), - &commodity_ids, - ®ion_ids, - &commodity_regions, - &time_slice_info, - ) - .unwrap(), - expected - ); - } - - // Empty CSV file - assert!(read_demand_slices_from_iter( - iter::empty(), - &commodity_ids, - ®ion_ids, - &commodity_regions, - &time_slice_info, - ) - .is_err()); - - // Bad commodity - let demand_slice = DemandSlice { - commodity_id: "COM2".into(), - region_id: "GBR".into(), - time_slice: "winter.day".into(), - fraction: 1.0, - }; - assert!(read_demand_slices_from_iter( - iter::once(demand_slice.clone()), - &commodity_ids, - ®ion_ids, - &commodity_regions, - &time_slice_info, - ) - .is_err()); - - // Bad region - let demand_slice = DemandSlice { - commodity_id: "COM1".into(), - region_id: "FRA".into(), - time_slice: "winter.day".into(), - fraction: 1.0, - }; - assert!(read_demand_slices_from_iter( - iter::once(demand_slice.clone()), - &commodity_ids, - ®ion_ids, - &commodity_regions, - &time_slice_info, - ) - .is_err()); - - // Bad time slice selection - let demand_slice = DemandSlice { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: "summer".into(), - fraction: 1.0, - }; - assert!(read_demand_slices_from_iter( - iter::once(demand_slice.clone()), - &commodity_ids, - ®ion_ids, - &commodity_regions, - &time_slice_info, - ) - .is_err()); - - { - // Some time slices uncovered - let time_slice_info = TimeSliceInfo { - seasons: ["winter".into(), "summer".into()].into_iter().collect(), - times_of_day: iter::once("day".into()).collect(), - fractions: [ - ( - TimeSliceID { - season: "winter".into(), - time_of_day: "day".into(), - }, - 0.5, - ), - ( - TimeSliceID { - season: "summer".into(), - time_of_day: "day".into(), - }, - 0.5, - ), - ] - .into_iter() - .collect(), - }; - let demand_slice = DemandSlice { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: "winter".into(), - fraction: 1.0, - }; - assert!(read_demand_slices_from_iter( - iter::once(demand_slice.clone()), - &commodity_ids, - ®ion_ids, - &commodity_regions, - &time_slice_info, - ) - .is_err()); - } - - // Same time slice twice - let demand_slice = DemandSlice { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: "winter.day".into(), - fraction: 0.5, - }; - assert!(read_demand_slices_from_iter( - iter::repeat_n(demand_slice.clone(), 2), - &commodity_ids, - ®ion_ids, - &commodity_regions, - &time_slice_info, - ) - .is_err()); - - // Whole season and single time slice conflicting - let demand_slice_season = DemandSlice { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: "winter".into(), - fraction: 0.5, - }; - assert!(read_demand_slices_from_iter( - [demand_slice, demand_slice_season].into_iter(), - &commodity_ids, - ®ion_ids, - &commodity_regions, - &time_slice_info, - ) - .is_err()); - - // Fractions don't sum to one - let demand_slice = DemandSlice { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: "winter".into(), - fraction: 0.5, - }; - assert!(read_demand_slices_from_iter( - iter::once(demand_slice), - &commodity_ids, - ®ion_ids, - &commodity_regions, - &time_slice_info, - ) - .is_err()); - - // No corresponding entry for commodity + region in demand CSV file - let demand_slice = DemandSlice { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: "winter".into(), - fraction: 1.0, - }; - assert!(read_demand_slices_from_iter( - iter::once(demand_slice), - &commodity_ids, - ®ion_ids, - &HashSet::new(), - &time_slice_info, - ) - .is_err()); - } } diff --git a/src/input/commodity/demand_slicing.rs b/src/input/commodity/demand_slicing.rs new file mode 100644 index 000000000..8413e2671 --- /dev/null +++ b/src/input/commodity/demand_slicing.rs @@ -0,0 +1,485 @@ +//! Demand slicing determines how annual demand is distributed across the year. +use super::demand::*; +use crate::input::*; +use crate::time_slice::{TimeSliceID, TimeSliceInfo}; +use anyhow::{ensure, Context, Result}; +use itertools::Itertools; +use serde::Deserialize; +use std::collections::{HashMap, HashSet}; +use std::path::Path; +use std::rc::Rc; + +const DEMAND_SLICES_FILE_NAME: &str = "demand_slicing.csv"; + +#[derive(Clone, Deserialize)] +struct DemandSlice { + commodity_id: String, + region_id: String, + time_slice: String, + #[serde(deserialize_with = "deserialise_proportion_nonzero")] + fraction: f64, +} + +/// A map relating commodity, region and time slice to the fraction of annual demand +pub type DemandSliceMap = HashMap; + +/// A key for a [`DemandSliceMap`] +#[derive(PartialEq, Eq, Hash, Debug)] +pub struct DemandSliceMapKey { + /// The commodity to which this demand applies + pub commodity_id: Rc, + /// The region to which this demand applies + pub region_id: Rc, + /// The time slice to which this demand applies + pub time_slice: TimeSliceID, +} + +/// Read demand slices from specified model directory. +/// +/// # Arguments +/// +/// * `model_dir` - Folder containing model configuration files +/// * `commodity_ids` - All possible IDs of commodities +/// * `region_ids` - All possible IDs for regions +/// * `commodity_regions` - Pairs of commodities + regions listed in demand CSV file +/// * `time_slice_info` - Information about seasons and times of day +pub fn read_demand_slices( + model_dir: &Path, + commodity_ids: &HashSet>, + region_ids: &HashSet>, + commodity_regions: &CommodityRegionPairs, + time_slice_info: &TimeSliceInfo, +) -> Result { + let file_path = model_dir.join(DEMAND_SLICES_FILE_NAME); + let demand_slices_csv = read_csv(&file_path)?; + read_demand_slices_from_iter( + demand_slices_csv, + commodity_ids, + region_ids, + commodity_regions, + time_slice_info, + ) + .with_context(|| input_err_msg(file_path)) +} + +/// Read demand slices from an iterator +fn read_demand_slices_from_iter( + iter: I, + commodity_ids: &HashSet>, + region_ids: &HashSet>, + commodity_regions: &CommodityRegionPairs, + time_slice_info: &TimeSliceInfo, +) -> Result +where + I: Iterator, +{ + let mut demand_slices = DemandSliceMap::new(); + + let mut time_slices = Vec::new(); + for slice in iter { + let commodity_id = commodity_ids.get_id(&slice.commodity_id)?; + let region_id = region_ids.get_id(&slice.region_id)?; + ensure!( + commodity_regions.contains(&(Rc::clone(&commodity_id), Rc::clone(®ion_id))), + "Demand slicing provided for commodity {commodity_id} in region {region_id} \ + without a corresponding entry in demand CSV file" + ); + + // We need to know how many time slices are covered by the current demand slice entry and + // how long they are relative to one another so that we can divide up the demand for this + // entry appropriately + let ts_selection = time_slice_info.get_selection(&slice.time_slice)?; + let ts_iter = time_slice_info.iter_selection(&ts_selection); + time_slices + .extend(ts_iter.map(|ts| (ts.clone(), time_slice_info.fractions.get(ts).unwrap()))); + let time_total: f64 = time_slices.iter().map(|(_, fraction)| *fraction).sum(); + for (time_slice, time_fraction) in time_slices.drain(0..) { + let key = DemandSliceMapKey { + commodity_id: Rc::clone(&commodity_id), + region_id: Rc::clone(®ion_id), + time_slice: time_slice.clone(), + }; + + // Share demand between the time slices in proportion to duration + let demand_fraction = slice.fraction * time_fraction / time_total; + ensure!(demand_slices.insert(key, demand_fraction).is_none(), + "Duplicate demand slicing entry (or same time slice covered by more than one entry) \ + (commodity: {commodity_id}, region: {region_id}, time slice: {time_slice})" + ); + } + } + + validate_demand_slices(commodity_regions, &demand_slices, time_slice_info)?; + + Ok(demand_slices) +} + +/// Check that the [`DemandSliceMap`] is well formed. +/// +/// Specifically, check: +/// +/// * It is non-empty +/// * If an entry is provided for any commodity + region pair, there must be entries covering every +/// time slice +/// * The demand fractions for all entries related to a commodity + region pair sum to one +fn validate_demand_slices( + commodity_regions: &CommodityRegionPairs, + demand_slices: &DemandSliceMap, + time_slice_info: &TimeSliceInfo, +) -> Result<()> { + ensure!(!demand_slices.is_empty(), "Empty demand slices file"); + + for (commodity_id, region_id) in commodity_regions { + time_slice_info + .iter() + .map(|time_slice| { + let key = DemandSliceMapKey { + commodity_id: Rc::clone(commodity_id), + region_id: Rc::clone(region_id), + time_slice: time_slice.clone(), + }; + + demand_slices.get(&key).with_context(|| { + format!( + "Demand slice missing for time slice {} (commodity: {}, region {})", + time_slice, commodity_id, region_id + ) + }) + }) + .process_results(|iter| { + check_fractions_sum_to_one(iter.copied()).context("Invalid demand fractions") + })??; + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::time_slice::TimeSliceID; + use itertools::iproduct; + use std::iter; + + #[test] + fn test_read_demand_slices_from_iter() { + let time_slice_info = TimeSliceInfo { + seasons: iter::once("winter".into()).collect(), + times_of_day: iter::once("day".into()).collect(), + fractions: [( + TimeSliceID { + season: "winter".into(), + time_of_day: "day".into(), + }, + 1.0, + )] + .into_iter() + .collect(), + }; + let commodity_ids = HashSet::from_iter(iter::once("COM1".into())); + let region_ids = HashSet::from_iter(iter::once("GBR".into())); + let commodity_regions = + iproduct!(commodity_ids.iter().cloned(), region_ids.iter().cloned()).collect(); + + // Valid + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 1.0, + }; + let time_slice = time_slice_info + .get_time_slice_id_from_str("winter.day") + .unwrap(); + let key = DemandSliceMapKey { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice, + }; + let expected = DemandSliceMap::from_iter(iter::once((key, 1.0))); + assert_eq!( + read_demand_slices_from_iter( + iter::once(demand_slice.clone()), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .unwrap(), + expected + ); + + // Valid, multiple time slices + { + let time_slice_info = TimeSliceInfo { + seasons: ["winter".into(), "summer".into()].into_iter().collect(), + times_of_day: ["day".into(), "night".into()].into_iter().collect(), + fractions: [ + ( + TimeSliceID { + season: "summer".into(), + time_of_day: "day".into(), + }, + 3.0 / 16.0, + ), + ( + TimeSliceID { + season: "summer".into(), + time_of_day: "night".into(), + }, + 5.0 / 16.0, + ), + ( + TimeSliceID { + season: "winter".into(), + time_of_day: "day".into(), + }, + 3.0 / 16.0, + ), + ( + TimeSliceID { + season: "winter".into(), + time_of_day: "night".into(), + }, + 5.0 / 16.0, + ), + ] + .into_iter() + .collect(), + }; + let demand_slices = [ + DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 0.5, + }, + DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "summer".into(), + fraction: 0.5, + }, + ]; + let expected = DemandSliceMap::from_iter([ + ( + DemandSliceMapKey { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: TimeSliceID { + season: "summer".into(), + time_of_day: "day".into(), + }, + }, + 3.0 / 16.0, + ), + ( + DemandSliceMapKey { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: TimeSliceID { + season: "summer".into(), + time_of_day: "night".into(), + }, + }, + 5.0 / 16.0, + ), + ( + DemandSliceMapKey { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: TimeSliceID { + season: "winter".into(), + time_of_day: "day".into(), + }, + }, + 3.0 / 16.0, + ), + ( + DemandSliceMapKey { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: TimeSliceID { + season: "winter".into(), + time_of_day: "night".into(), + }, + }, + 5.0 / 16.0, + ), + ]); + assert_eq!( + read_demand_slices_from_iter( + demand_slices.into_iter(), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .unwrap(), + expected + ); + } + + // Empty CSV file + assert!(read_demand_slices_from_iter( + iter::empty(), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + + // Bad commodity + let demand_slice = DemandSlice { + commodity_id: "COM2".into(), + region_id: "GBR".into(), + time_slice: "winter.day".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice.clone()), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + + // Bad region + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "FRA".into(), + time_slice: "winter.day".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice.clone()), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + + // Bad time slice selection + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "summer".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice.clone()), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + + { + // Some time slices uncovered + let time_slice_info = TimeSliceInfo { + seasons: ["winter".into(), "summer".into()].into_iter().collect(), + times_of_day: iter::once("day".into()).collect(), + fractions: [ + ( + TimeSliceID { + season: "winter".into(), + time_of_day: "day".into(), + }, + 0.5, + ), + ( + TimeSliceID { + season: "summer".into(), + time_of_day: "day".into(), + }, + 0.5, + ), + ] + .into_iter() + .collect(), + }; + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice.clone()), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + } + + // Same time slice twice + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter.day".into(), + fraction: 0.5, + }; + assert!(read_demand_slices_from_iter( + iter::repeat_n(demand_slice.clone(), 2), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + + // Whole season and single time slice conflicting + let demand_slice_season = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 0.5, + }; + assert!(read_demand_slices_from_iter( + [demand_slice, demand_slice_season].into_iter(), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + + // Fractions don't sum to one + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 0.5, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + + // No corresponding entry for commodity + region in demand CSV file + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice), + &commodity_ids, + ®ion_ids, + &HashSet::new(), + &time_slice_info, + ) + .is_err()); + } +} From bcf643f7a5c97fc7f97391cf3f6b04d143a386b4 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 13 Jan 2025 16:16:21 +0000 Subject: [PATCH 15/20] Rename: DEMAND_SLICES_FILE_NAME => DEMAND_SLICING_FILE_NAME --- src/input/commodity/demand_slicing.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/input/commodity/demand_slicing.rs b/src/input/commodity/demand_slicing.rs index 8413e2671..d449c7acf 100644 --- a/src/input/commodity/demand_slicing.rs +++ b/src/input/commodity/demand_slicing.rs @@ -9,7 +9,7 @@ use std::collections::{HashMap, HashSet}; use std::path::Path; use std::rc::Rc; -const DEMAND_SLICES_FILE_NAME: &str = "demand_slicing.csv"; +const DEMAND_SLICING_FILE_NAME: &str = "demand_slicing.csv"; #[derive(Clone, Deserialize)] struct DemandSlice { @@ -50,7 +50,7 @@ pub fn read_demand_slices( commodity_regions: &CommodityRegionPairs, time_slice_info: &TimeSliceInfo, ) -> Result { - let file_path = model_dir.join(DEMAND_SLICES_FILE_NAME); + let file_path = model_dir.join(DEMAND_SLICING_FILE_NAME); let demand_slices_csv = read_csv(&file_path)?; read_demand_slices_from_iter( demand_slices_csv, From 1fa3f1e5ec6db2ebe7ec8f62fb85c4536f79c089 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 13 Jan 2025 16:17:12 +0000 Subject: [PATCH 16/20] Remove redundant check --- src/input/commodity/demand_slicing.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/input/commodity/demand_slicing.rs b/src/input/commodity/demand_slicing.rs index d449c7acf..6f30e5ce2 100644 --- a/src/input/commodity/demand_slicing.rs +++ b/src/input/commodity/demand_slicing.rs @@ -127,8 +127,6 @@ fn validate_demand_slices( demand_slices: &DemandSliceMap, time_slice_info: &TimeSliceInfo, ) -> Result<()> { - ensure!(!demand_slices.is_empty(), "Empty demand slices file"); - for (commodity_id, region_id) in commodity_regions { time_slice_info .iter() From 941292cd3139070e02bc9139199d6f33a13079f1 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 13 Jan 2025 16:26:10 +0000 Subject: [PATCH 17/20] Add a longer doc comment for `compute_demand_map` --- src/input/commodity/demand.rs | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/input/commodity/demand.rs b/src/input/commodity/demand.rs index 741effc6f..12ac516ff 100644 --- a/src/input/commodity/demand.rs +++ b/src/input/commodity/demand.rs @@ -177,7 +177,18 @@ where Ok((map, commodity_regions)) } -/// Calculate the demand for each combination of commodity, region, year and time slice +/// Calculate the demand for each combination of commodity, region, year and time slice. +/// +/// # Arguments +/// +/// * `demand` - Total annual demand for combinations of commodity, region and year +/// * `slices` - How annual demand is shared between time slices +/// * `time_slice_info` - Information about time slices +/// +/// # Returns +/// +/// [`DemandMap`]s for combinations of region, year and time slice, grouped by the commodity to +/// which the demand applies. fn compute_demand_map( demand: &AnnualDemandMap, slices: &DemandSliceMap, From de1b4a6f22231ecf5a5ec7be2f1e0b45c2ff86fd Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 13 Jan 2025 16:27:21 +0000 Subject: [PATCH 18/20] Rename: `compute_demand_map` => `compute_demand_maps` --- src/input/commodity/demand.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/input/commodity/demand.rs b/src/input/commodity/demand.rs index 12ac516ff..15bfe482d 100644 --- a/src/input/commodity/demand.rs +++ b/src/input/commodity/demand.rs @@ -72,7 +72,7 @@ pub fn read_demand( time_slice_info, )?; - Ok(compute_demand_map(&demand, &slices, time_slice_info)) + Ok(compute_demand_maps(&demand, &slices, time_slice_info)) } /// Read the demand.csv file. @@ -189,7 +189,7 @@ where /// /// [`DemandMap`]s for combinations of region, year and time slice, grouped by the commodity to /// which the demand applies. -fn compute_demand_map( +fn compute_demand_maps( demand: &AnnualDemandMap, slices: &DemandSliceMap, time_slice_info: &TimeSliceInfo, From 1cbee625571306517ba693f966cb71dfa803feb3 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Tue, 14 Jan 2025 14:14:32 +0000 Subject: [PATCH 19/20] TimeSliceInfo: iter{_selection}: Return fraction along with TimeSliceID --- src/input/commodity/cost.rs | 2 +- src/input/commodity/demand.rs | 2 +- src/input/commodity/demand_slicing.rs | 9 +++---- src/time_slice.rs | 34 ++++++++++++++++++++------- 4 files changed, 33 insertions(+), 14 deletions(-) diff --git a/src/input/commodity/cost.rs b/src/input/commodity/cost.rs index 4a5e5903c..f41c1f2c9 100644 --- a/src/input/commodity/cost.rs +++ b/src/input/commodity/cost.rs @@ -93,7 +93,7 @@ where .entry(commodity_id.clone()) .or_insert_with(CommodityCostMap::new); - for time_slice in time_slice_info.iter_selection(&ts_selection) { + for (time_slice, _) in time_slice_info.iter_selection(&ts_selection) { let value = CommodityCost { balance_type: cost.balance_type.clone(), value: cost.value, diff --git a/src/input/commodity/demand.rs b/src/input/commodity/demand.rs index 15bfe482d..7955e347b 100644 --- a/src/input/commodity/demand.rs +++ b/src/input/commodity/demand.rs @@ -198,7 +198,7 @@ fn compute_demand_maps( for (demand_key, annual_demand) in demand.iter() { let commodity_id = &demand_key.commodity_id; let region_id = &demand_key.region_id; - for time_slice in time_slice_info.iter() { + for time_slice in time_slice_info.iter_ids() { let slice_key = DemandSliceMapKey { commodity_id: Rc::clone(commodity_id), region_id: Rc::clone(region_id), diff --git a/src/input/commodity/demand_slicing.rs b/src/input/commodity/demand_slicing.rs index 6f30e5ce2..900e33aef 100644 --- a/src/input/commodity/demand_slicing.rs +++ b/src/input/commodity/demand_slicing.rs @@ -89,9 +89,10 @@ where // how long they are relative to one another so that we can divide up the demand for this // entry appropriately let ts_selection = time_slice_info.get_selection(&slice.time_slice)?; - let ts_iter = time_slice_info.iter_selection(&ts_selection); - time_slices - .extend(ts_iter.map(|ts| (ts.clone(), time_slice_info.fractions.get(ts).unwrap()))); + let ts_iter = time_slice_info + .iter_selection(&ts_selection) + .map(|(ts, fraction)| (ts.clone(), fraction)); + time_slices.extend(ts_iter); let time_total: f64 = time_slices.iter().map(|(_, fraction)| *fraction).sum(); for (time_slice, time_fraction) in time_slices.drain(0..) { let key = DemandSliceMapKey { @@ -129,7 +130,7 @@ fn validate_demand_slices( ) -> Result<()> { for (commodity_id, region_id) in commodity_regions { time_slice_info - .iter() + .iter_ids() .map(|time_slice| { let key = DemandSliceMapKey { commodity_id: Rc::clone(commodity_id), diff --git a/src/time_slice.rs b/src/time_slice.rs index 61b393c5c..d97fee8e6 100644 --- a/src/time_slice.rs +++ b/src/time_slice.rs @@ -115,24 +115,34 @@ impl TimeSliceInfo { /// /// The order will be consistent each time this is called, but not every time the program is /// run. - pub fn iter(&self) -> impl Iterator { + pub fn iter_ids(&self) -> impl Iterator { self.fractions.keys() } - /// Iterate over the subset of [`TimeSliceID`] indicated by `selection`. + /// Iterate over all time slices. + /// + /// The order will be consistent each time this is called, but not every time the program is + /// run. + pub fn iter(&self) -> impl Iterator { + self.fractions.iter().map(|(ts, fraction)| (ts, *fraction)) + } + + /// Iterate over the subset of time slices indicated by `selection`. /// /// The order will be consistent each time this is called, but not every time the program is /// run. pub fn iter_selection<'a>( &'a self, selection: &'a TimeSliceSelection, - ) -> Box + 'a> { + ) -> Box + 'a> { match selection { TimeSliceSelection::Annual => Box::new(self.iter()), TimeSliceSelection::Season(season) => { - Box::new(self.iter().filter(move |ts| ts.season == *season)) + Box::new(self.iter().filter(move |(ts, _)| ts.season == *season)) + } + TimeSliceSelection::Single(ts) => { + Box::new(iter::once((ts, *self.fractions.get(ts).unwrap()))) } - TimeSliceSelection::Single(ts) => Box::new(iter::once(ts)), } } } @@ -334,16 +344,24 @@ autumn,evening,0.25" }; assert_eq!( - HashSet::<&TimeSliceID>::from_iter(ts_info.iter_selection(&TimeSliceSelection::Annual)), + HashSet::<&TimeSliceID>::from_iter( + ts_info + .iter_selection(&TimeSliceSelection::Annual) + .map(|(ts, _)| ts) + ), HashSet::from_iter(slices.iter()) ); itertools::assert_equal( - ts_info.iter_selection(&TimeSliceSelection::Season("winter".into())), + ts_info + .iter_selection(&TimeSliceSelection::Season("winter".into())) + .map(|(ts, _)| ts), iter::once(&slices[0]), ); let ts = ts_info.get_time_slice_id_from_str("summer.night").unwrap(); itertools::assert_equal( - ts_info.iter_selection(&TimeSliceSelection::Single(ts)), + ts_info + .iter_selection(&TimeSliceSelection::Single(ts)) + .map(|(ts, _)| ts), iter::once(&slices[1]), ); } From 69aa5d62b9d50dcff7cca2a6273b13f442695b22 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Tue, 14 Jan 2025 15:16:59 +0000 Subject: [PATCH 20/20] Put calculation of time slice share into separate function --- src/input/commodity/demand_slicing.rs | 14 +--- src/time_slice.rs | 112 ++++++++++++++++++++++++++ 2 files changed, 116 insertions(+), 10 deletions(-) diff --git a/src/input/commodity/demand_slicing.rs b/src/input/commodity/demand_slicing.rs index 900e33aef..901b42a5c 100644 --- a/src/input/commodity/demand_slicing.rs +++ b/src/input/commodity/demand_slicing.rs @@ -75,7 +75,6 @@ where { let mut demand_slices = DemandSliceMap::new(); - let mut time_slices = Vec::new(); for slice in iter { let commodity_id = commodity_ids.get_id(&slice.commodity_id)?; let region_id = region_ids.get_id(&slice.region_id)?; @@ -89,23 +88,18 @@ where // how long they are relative to one another so that we can divide up the demand for this // entry appropriately let ts_selection = time_slice_info.get_selection(&slice.time_slice)?; - let ts_iter = time_slice_info - .iter_selection(&ts_selection) - .map(|(ts, fraction)| (ts.clone(), fraction)); - time_slices.extend(ts_iter); - let time_total: f64 = time_slices.iter().map(|(_, fraction)| *fraction).sum(); - for (time_slice, time_fraction) in time_slices.drain(0..) { + for (ts, demand_fraction) in time_slice_info.calculate_share(&ts_selection, slice.fraction) + { let key = DemandSliceMapKey { commodity_id: Rc::clone(&commodity_id), region_id: Rc::clone(®ion_id), - time_slice: time_slice.clone(), + time_slice: ts.clone(), }; // Share demand between the time slices in proportion to duration - let demand_fraction = slice.fraction * time_fraction / time_total; ensure!(demand_slices.insert(key, demand_fraction).is_none(), "Duplicate demand slicing entry (or same time slice covered by more than one entry) \ - (commodity: {commodity_id}, region: {region_id}, time slice: {time_slice})" + (commodity: {commodity_id}, region: {region_id}, time slice: {ts})" ); } } diff --git a/src/time_slice.rs b/src/time_slice.rs index d97fee8e6..e0155a854 100644 --- a/src/time_slice.rs +++ b/src/time_slice.rs @@ -145,6 +145,56 @@ impl TimeSliceInfo { } } } + + /// Iterate over a subset of time slices calculating the relative duration of each. + /// + /// The relative duration is specified as a fraction of the total time (proportion of year) + /// covered by `selection`. + /// + /// # Arguments + /// + /// * `selection` - A subset of time slices + /// + /// # Returns + /// + /// An iterator of time slices along with the fraction of the total selection. + pub fn iterate_selection_share<'a>( + &'a self, + selection: &'a TimeSliceSelection, + ) -> impl Iterator { + // Store time slices as we have to iterate over selection twice + let time_slices = self.iter_selection(selection).collect_vec(); + + // Total fraction of year covered by selection + let time_total: f64 = time_slices.iter().map(|(_, fraction)| *fraction).sum(); + + // Calculate share + time_slices + .into_iter() + .map(move |(ts, time_fraction)| (ts, time_fraction / time_total)) + } + + /// Share a value between a subset of time slices in proportion to their lengths. + /// + /// For instance, you could use this function to compute how demand is distributed between the + /// different time slices of winter. + /// + /// # Arguments + /// + /// * `selection` - A subset of time slices + /// * `value` - The value to be shared between the time slices + /// + /// # Returns + /// + /// An iterator of time slices along with a fraction of `value`. + pub fn calculate_share<'a>( + &'a self, + selection: &'a TimeSliceSelection, + value: f64, + ) -> impl Iterator { + self.iterate_selection_share(selection) + .map(move |(ts, share)| (ts, value * share)) + } } /// A time slice record retrieved from a CSV file @@ -236,6 +286,7 @@ pub fn read_time_slice_info(model_dir: &Path) -> Result { #[cfg(test)] mod tests { use super::*; + use float_cmp::assert_approx_eq; use std::fs::File; use std::io::Write; use std::path::Path; @@ -365,4 +416,65 @@ autumn,evening,0.25" iter::once(&slices[1]), ); } + + #[test] + fn test_calculate_share() { + let slices = [ + TimeSliceID { + season: "winter".into(), + time_of_day: "day".into(), + }, + TimeSliceID { + season: "winter".into(), + time_of_day: "night".into(), + }, + TimeSliceID { + season: "summer".into(), + time_of_day: "day".into(), + }, + TimeSliceID { + season: "summer".into(), + time_of_day: "night".into(), + }, + ]; + let ts_info = TimeSliceInfo { + seasons: ["winter".into(), "summer".into()].into_iter().collect(), + times_of_day: ["day".into(), "night".into()].into_iter().collect(), + fractions: slices.iter().map(|ts| (ts.clone(), 0.25)).collect(), + }; + + macro_rules! check_share { + ($selection:expr, $expected:expr) => { + let expected = $expected; + let actual: HashMap<_, _> = HashMap::from_iter( + ts_info + .calculate_share(&$selection, 8.0) + .map(|(ts, share)| (ts.clone(), share)), + ); + assert!(actual.len() == expected.len()); + for (k, v) in actual { + assert_approx_eq!(f64, v, *expected.get(&k).unwrap()); + } + }; + } + + // Whole year + let expected: HashMap<_, _> = HashMap::from_iter(slices.iter().map(|ts| (ts.clone(), 2.0))); + check_share!(TimeSliceSelection::Annual, expected); + + // One season + let selection = TimeSliceSelection::Season("winter".into()); + let expected: HashMap<_, _> = HashMap::from_iter( + ts_info + .iter_selection(&selection) + .map(|(ts, _)| (ts.clone(), 4.0)), + ); + check_share!(selection, expected); + + // Single time slice + let time_slice = ts_info.get_time_slice_id_from_str("winter.day").unwrap(); + let selection = TimeSliceSelection::Single(time_slice.clone()); + let expected: HashMap<_, _> = HashMap::from_iter(iter::once((time_slice, 8.0))); + check_share!(selection, expected); + } }