diff --git a/examples/simple/demand.csv b/examples/simple/demand.csv index 1884bd0c9..7fbef6ae7 100644 --- a/examples/simple/demand.csv +++ b/examples/simple/demand.csv @@ -1,2 +1,3 @@ commodity_id,region_id,year,demand RSHEAT,GBR,2020,927.38 +RSHEAT,GBR,2100,927.38 diff --git a/src/commodity.rs b/src/commodity.rs index 582ad1af3..fcde579f2 100644 --- a/src/commodity.rs +++ b/src/commodity.rs @@ -1,5 +1,4 @@ #![allow(missing_docs)] -use crate::demand::Demand; use crate::input::*; use crate::time_slice::{TimeSliceID, TimeSliceLevel}; use serde::Deserialize; @@ -23,7 +22,7 @@ pub struct Commodity { #[serde(skip)] pub costs: CommodityCostMap, #[serde(skip)] - pub demand_by_region: HashMap, Demand>, + pub demand: DemandMap, } define_id_getter! {Commodity} @@ -110,10 +109,68 @@ pub enum CommodityType { OutputCommodity, } +/// A map relating region, year and time slice to demand (in real units, not a fraction). +/// +/// This data type is exported as this is the way in we want to look up demand outside of this +/// module. +#[derive(PartialEq, Debug, Clone, Default)] +pub struct DemandMap(HashMap); + +/// The key for a [`DemandMap`] +#[derive(PartialEq, Eq, Hash, Debug, Clone)] +struct DemandMapKey { + region_id: Rc, + year: u32, + time_slice: TimeSliceID, +} + +impl DemandMap { + /// Create a new, empty [`DemandMap`] + pub fn new() -> DemandMap { + DemandMap::default() + } + + /// Retrieve the demand for the specified region, year and time slice + pub fn get(&self, region_id: Rc, year: u32, time_slice: TimeSliceID) -> Option { + self.0 + .get(&DemandMapKey { + region_id, + year, + time_slice, + }) + .copied() + } + + /// Insert a new demand entry for the specified region, year and time slice + pub fn insert(&mut self, region_id: Rc, year: u32, time_slice: TimeSliceID, demand: f64) { + self.0.insert( + DemandMapKey { + region_id, + year, + time_slice, + }, + demand, + ); + } +} + #[cfg(test)] mod tests { use super::*; + #[test] + fn test_demand_map() { + let time_slice = TimeSliceID { + season: "all-year".into(), + time_of_day: "all-day".into(), + }; + let value = 0.25; + let mut map = DemandMap::new(); + map.insert("North".into(), 2020, time_slice.clone(), value); + + assert_eq!(map.get("North".into(), 2020, time_slice).unwrap(), value) + } + #[test] fn test_commodity_cost_map() { let ts = TimeSliceID { diff --git a/src/demand.rs b/src/demand.rs deleted file mode 100644 index a13ef27fe..000000000 --- a/src/demand.rs +++ /dev/null @@ -1,535 +0,0 @@ -//! Code for working with demand for a given commodity. Demand can vary by region and year. -use crate::input::*; -use crate::time_slice::{TimeSliceInfo, TimeSliceSelection}; -use anyhow::{ensure, Context, Result}; -use serde::Deserialize; -use std::collections::{HashMap, HashSet}; -use std::ops::RangeInclusive; -use std::path::Path; -use std::rc::Rc; - -const DEMAND_FILE_NAME: &str = "demand.csv"; -const DEMAND_SLICES_FILE_NAME: &str = "demand_slicing.csv"; - -/// Represents a single demand entry in the dataset. -#[derive(Debug, Deserialize, PartialEq)] -pub struct Demand { - /// The commodity this demand entry refers to - pub commodity_id: String, - /// The region of the demand entry - pub region_id: String, - /// The year of the demand entry - pub year: u32, - /// Annual demand quantity - pub demand: f64, - - /// How demand varies by time slice - #[serde(skip)] - pub demand_slices: Vec, -} - -#[derive(Clone, Deserialize)] -struct DemandSliceRaw { - commodity_id: String, - region_id: String, - time_slice: String, - #[serde(deserialize_with = "deserialise_proportion_nonzero")] - fraction: f64, -} - -/// How demand varies by time slice -#[derive(Debug, PartialEq)] -pub struct DemandSlice { - /// Which time slice(s) this applies to - pub time_slice: TimeSliceSelection, - /// The fraction of total demand (between 0 and 1 inclusive) - pub fraction: f64, -} - -/// A [HashMap] of [Demand] grouped first by commodity, then region -pub type DemandHashMap = HashMap, HashMap, Demand>>; - -/// Read the demand data from an iterator -/// -/// # Arguments -/// -/// * `iter` - An iterator of `Demand`s -/// * `commodity_ids` - All possible IDs of commodities -/// * `region_ids` - All possible IDs for regions -/// * `year_range` - The year range for the simulation -/// -/// # Returns -/// -/// The demand data (except for the demand slice information), grouped by commodity and region. -fn read_demand_from_iter( - iter: I, - commodity_ids: &HashSet>, - region_ids: &HashSet>, - year_range: &RangeInclusive, -) -> Result -where - I: Iterator, -{ - let mut map_by_commodity = HashMap::new(); - - for demand in iter { - let commodity_id = commodity_ids.get_id(&demand.commodity_id)?; - let region_id = region_ids.get_id(&demand.region_id)?; - - ensure!( - year_range.contains(&demand.year), - "Year {} is out of range", - demand.year - ); - - // Get entry for this commodity - let map_by_region = map_by_commodity - .entry(commodity_id) - .or_insert_with(|| HashMap::with_capacity(1)); - - ensure!( - map_by_region.insert(region_id, demand).is_none(), - "Multiple entries for same commodity and region found" - ); - } - - Ok(map_by_commodity) -} - -/// Read the demand.csv file. -/// -/// # Arguments -/// -/// * `model_dir` - Folder containing model configuration files -/// * `commodity_ids` - All possible IDs of commodities -/// * `region_ids` - All possible IDs for regions -/// * `year_range` - The year range for the simulation -/// -/// # Returns -/// -/// The demand data except for the demand slice information, which resides in a separate CSV file. -/// The data is grouped by commodity and region. -fn read_demand_file( - model_dir: &Path, - commodity_ids: &HashSet>, - region_ids: &HashSet>, - year_range: &RangeInclusive, -) -> Result { - let file_path = model_dir.join(DEMAND_FILE_NAME); - let demand_csv = read_csv(&file_path)?; - read_demand_from_iter(demand_csv, commodity_ids, region_ids, year_range) - .with_context(|| input_err_msg(&file_path)) -} - -/// Try to get demand for the given commodity and region. Returns `None` if not found. -fn try_get_demand<'a>( - commodity_id: &str, - region_id: &str, - demand: &'a mut DemandHashMap, -) -> Option<&'a mut Demand> { - demand.get_mut(commodity_id)?.get_mut(region_id) -} - -/// Read demand slices from an iterator and store them in `demand`. -fn read_demand_slices_from_iter( - iter: I, - time_slice_info: &TimeSliceInfo, - demand: &mut DemandHashMap, -) -> Result<()> -where - I: Iterator, -{ - for slice in iter { - let demand = - try_get_demand(&slice.commodity_id, &slice.region_id, demand).with_context(|| { - format!( - "No demand specified for commodity {} in region {}", - &slice.commodity_id, &slice.region_id - ) - })?; - - let time_slice = time_slice_info.get_selection(&slice.time_slice)?; - demand.demand_slices.push(DemandSlice { - time_slice, - fraction: slice.fraction, - }); - } - - Ok(()) -} - -/// Read demand slices from specified model directory. -/// -/// # Arguments -/// -/// * `model_dir` - Folder containing model configuration files -/// * `time_slice_info` - Information about seasons and times of day -/// * `demand` - Demand data grouped by commodity and region -fn read_demand_slices( - model_dir: &Path, - time_slice_info: &TimeSliceInfo, - demand: &mut DemandHashMap, -) -> Result<()> { - let file_path = model_dir.join(DEMAND_SLICES_FILE_NAME); - let demand_slices_csv = read_csv(&file_path)?; - read_demand_slices_from_iter(demand_slices_csv, time_slice_info, demand) - .with_context(|| input_err_msg(file_path)) -} - -/// Reads demand data from a CSV file. -/// -/// # Arguments -/// -/// * `model_dir` - Folder containing model configuration files -/// * `commodity_ids` - All possible IDs of commodities -/// * `region_ids` - All possible IDs for regions -/// * `time_slice_info` - Information about seasons and times of day -/// * `year_range` - The year range for the simulation -/// -/// # Returns -/// -/// This function returns demand data grouped by commodity and then region. -pub fn read_demand( - model_dir: &Path, - commodity_ids: &HashSet>, - region_ids: &HashSet>, - time_slice_info: &TimeSliceInfo, - year_range: &RangeInclusive, -) -> Result { - let mut demand = read_demand_file(model_dir, commodity_ids, region_ids, year_range)?; - - // Read in demand slices - read_demand_slices(model_dir, time_slice_info, &mut demand)?; - - Ok(demand) -} - -#[cfg(test)] -mod tests { - use crate::time_slice::TimeSliceID; - - use super::*; - use std::fs::File; - use std::io::Write; - use std::path::Path; - use tempfile::tempdir; - - /// Create an example demand file in dir_path - fn create_demand_file(dir_path: &Path) { - let file_path = dir_path.join(DEMAND_FILE_NAME); - let mut file = File::create(file_path).unwrap(); - writeln!( - file, - "commodity_id,region_id,year,demand -COM1,North,2023,10 -COM1,South,2023,11 -COM1,East,2023,12 -COM1,West,2023,13" - ) - .unwrap(); - } - - #[test] - fn test_read_demand_from_iter() { - let commodity_ids = ["COM1".into()].into_iter().collect(); - let region_ids = ["North".into(), "South".into()].into_iter().collect(); - let year_range = 2020..=2030; - - // Valid - let demand = [ - Demand { - year: 2023, - region_id: "North".to_string(), - commodity_id: "COM1".to_string(), - demand: 10.0, - demand_slices: Vec::new(), - }, - Demand { - year: 2023, - region_id: "South".to_string(), - commodity_id: "COM1".to_string(), - demand: 11.0, - demand_slices: Vec::new(), - }, - ]; - assert!(read_demand_from_iter( - demand.into_iter(), - &commodity_ids, - ®ion_ids, - &year_range - ) - .is_ok()); - - // Bad commodity ID - let demand = [ - Demand { - year: 2023, - region_id: "North".to_string(), - commodity_id: "COM2".to_string(), - demand: 10.0, - demand_slices: Vec::new(), - }, - Demand { - year: 2023, - region_id: "South".to_string(), - commodity_id: "COM1".to_string(), - demand: 11.0, - demand_slices: Vec::new(), - }, - ]; - assert!(read_demand_from_iter( - demand.into_iter(), - &commodity_ids, - ®ion_ids, - &year_range - ) - .is_err()); - - // Bad region ID - let demand = [ - Demand { - year: 2023, - region_id: "East".to_string(), - commodity_id: "COM1".to_string(), - demand: 10.0, - demand_slices: Vec::new(), - }, - Demand { - year: 2023, - region_id: "South".to_string(), - commodity_id: "COM1".to_string(), - demand: 11.0, - demand_slices: Vec::new(), - }, - ]; - assert!(read_demand_from_iter( - demand.into_iter(), - &commodity_ids, - ®ion_ids, - &year_range - ) - .is_err()); - - // Bad year - let demand = [ - Demand { - year: 2010, - region_id: "North".to_string(), - commodity_id: "COM1".to_string(), - demand: 10.0, - demand_slices: Vec::new(), - }, - Demand { - year: 2023, - region_id: "South".to_string(), - commodity_id: "COM1".to_string(), - demand: 11.0, - demand_slices: Vec::new(), - }, - ]; - assert!(read_demand_from_iter( - demand.into_iter(), - &commodity_ids, - ®ion_ids, - &year_range - ) - .is_err()); - - // Multiple entries for same commodity and region - let demand = [ - Demand { - year: 2023, - region_id: "North".to_string(), - commodity_id: "COM1".to_string(), - demand: 10.0, - demand_slices: Vec::new(), - }, - Demand { - year: 2023, - region_id: "North".to_string(), - commodity_id: "COM1".to_string(), - demand: 10.0, - demand_slices: Vec::new(), - }, - Demand { - year: 2023, - region_id: "South".to_string(), - commodity_id: "COM1".to_string(), - demand: 11.0, - demand_slices: Vec::new(), - }, - ]; - assert!(read_demand_from_iter( - demand.into_iter(), - &commodity_ids, - ®ion_ids, - &year_range - ) - .is_err()); - } - - #[test] - fn test_read_demand_file() { - let dir = tempdir().unwrap(); - create_demand_file(dir.path()); - let commodity_ids = ["COM1".into()].into_iter().collect(); - let region_ids = ["North".into(), "South".into(), "East".into(), "West".into()] - .into_iter() - .collect(); - let year_range = 2020..=2030; - let demand = read_demand_file(dir.path(), &commodity_ids, ®ion_ids, &year_range); - assert_eq!( - demand.unwrap(), - HashMap::from_iter( - [( - "COM1".into(), - HashMap::from_iter([ - ( - "North".into(), - Demand { - year: 2023, - region_id: "North".to_string(), - commodity_id: "COM1".to_string(), - demand: 10.0, - demand_slices: Vec::new() - } - ), - ( - "South".into(), - Demand { - year: 2023, - region_id: "South".to_string(), - commodity_id: "COM1".to_string(), - demand: 11.0, - demand_slices: Vec::new() - } - ), - ( - "East".into(), - Demand { - year: 2023, - region_id: "East".to_string(), - commodity_id: "COM1".to_string(), - demand: 12.0, - demand_slices: Vec::new() - } - ), - ( - "West".into(), - Demand { - year: 2023, - region_id: "West".to_string(), - commodity_id: "COM1".to_string(), - demand: 13.0, - demand_slices: Vec::new() - } - ) - ]) - )] - .into_iter() - ) - ); - } - - #[test] - fn test_read_demand_slices_from_iter() { - let time_slice_info = TimeSliceInfo { - seasons: ["winter".into()].into_iter().collect(), - times_of_day: ["day".into()].into_iter().collect(), - fractions: [( - TimeSliceID { - season: "winter".into(), - time_of_day: "day".into(), - }, - 1.0, - )] - .into_iter() - .collect(), - }; - - // Demand grouped by region - let mut demand = [( - "COM1".into(), - [( - "GBR".into(), - Demand { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - year: 2020, - demand: 1.0, - demand_slices: Vec::new(), - }, - )] - .into_iter() - .collect(), - )] - .into_iter() - .collect(); - - // Valid - let demand_slice = DemandSliceRaw { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: "winter.day".into(), - fraction: 1.0, - }; - read_demand_slices_from_iter( - [demand_slice.clone()].into_iter(), - &time_slice_info, - &mut demand, - ) - .unwrap(); - let time_slice = time_slice_info.get_selection("winter.day").unwrap(); - assert_eq!( - try_get_demand("COM1", "GBR", &mut demand) - .unwrap() - .demand_slices, - vec![DemandSlice { - time_slice, - fraction: 1.0 - }] - ); - - // Bad commodity - let demand_slice = DemandSliceRaw { - commodity_id: "COM2".into(), - region_id: "GBR".into(), - time_slice: "winter.day".into(), - fraction: 1.0, - }; - assert!(read_demand_slices_from_iter( - [demand_slice].into_iter(), - &time_slice_info, - &mut demand - ) - .is_err()); - - // Bad region - let demand_slice = DemandSliceRaw { - commodity_id: "COM1".into(), - region_id: "USA".into(), - time_slice: "winter.day".into(), - fraction: 1.0, - }; - assert!(read_demand_slices_from_iter( - [demand_slice].into_iter(), - &time_slice_info, - &mut demand - ) - .is_err()); - - // Bad time slice - let demand_slice = DemandSliceRaw { - commodity_id: "COM1".into(), - region_id: "GBR".into(), - time_slice: "summer.night".into(), - fraction: 1.0, - }; - assert!(read_demand_slices_from_iter( - [demand_slice].into_iter(), - &time_slice_info, - &mut demand, - ) - .is_err()); - } -} diff --git a/src/input.rs b/src/input.rs index 42b549399..49ab9d2f8 100644 --- a/src/input.rs +++ b/src/input.rs @@ -1,5 +1,6 @@ //! Common routines for handling input data. use anyhow::{ensure, Context, Result}; +use float_cmp::approx_eq; use itertools::Itertools; use serde::de::{Deserialize, DeserializeOwned, Deserializer}; use std::collections::{HashMap, HashSet}; @@ -108,10 +109,7 @@ impl IDCollection for HashSet> { } } -/// Read a CSV file of items with IDs. -/// -/// This is like `read_csv_grouped_by_id`, with the difference that it is to be used on the "main" -/// CSV file for a record type, so it assumes that all IDs encountered are valid. +/// Read a CSV file of items with IDs pub fn read_csv_id_file(file_path: &Path) -> Result, T>> where T: HasID + DeserializeOwned, @@ -166,6 +164,21 @@ where } } +/// Check that fractions sum to (approximately) one +pub fn check_fractions_sum_to_one(fractions: I) -> Result<()> +where + I: Iterator, +{ + let sum = fractions.sum(); + ensure!( + approx_eq!(f64, sum, 1.0, epsilon = 1e-5), + "Sum of fractions does not equal one (actual: {})", + sum + ); + + Ok(()) +} + #[cfg(test)] mod tests { use super::*; @@ -263,4 +276,23 @@ mod tests { assert!(deserialise_f64(f64::NAN).is_err()); assert!(deserialise_f64(f64::INFINITY).is_err()); } + + #[test] + fn test_check_fractions_sum_to_one() { + // Single input, valid + assert!(check_fractions_sum_to_one([1.0].into_iter()).is_ok()); + + // Multiple inputs, valid + assert!(check_fractions_sum_to_one([0.4, 0.6].into_iter()).is_ok()); + + // Single input, invalid + assert!(check_fractions_sum_to_one([0.5].into_iter()).is_err()); + + // Multiple inputs, invalid + assert!(check_fractions_sum_to_one([0.4, 0.3].into_iter()).is_err()); + + // Edge cases + assert!(check_fractions_sum_to_one([f64::INFINITY].into_iter()).is_err()); + assert!(check_fractions_sum_to_one([f64::NAN].into_iter()).is_err()); + } } diff --git a/src/input/commodity.rs b/src/input/commodity.rs index e4a5b1f54..9a91896f1 100644 --- a/src/input/commodity.rs +++ b/src/input/commodity.rs @@ -1,6 +1,5 @@ //! Code for reading in commodity-related data from CSV files. use crate::commodity::Commodity; -use crate::demand::read_demand; use crate::input::*; use crate::time_slice::TimeSliceInfo; use anyhow::Result; @@ -10,6 +9,9 @@ use std::rc::Rc; pub mod cost; use cost::read_commodity_costs; +pub mod demand; +use demand::read_demand; +pub mod demand_slicing; const COMMODITY_FILE_NAME: &str = "commodities.csv"; @@ -41,13 +43,12 @@ pub fn read_commodities( milestone_years, )?; - let year_range = *milestone_years.first().unwrap()..=*milestone_years.last().unwrap(); let mut demand = read_demand( model_dir, &commodity_ids, region_ids, time_slice_info, - &year_range, + milestone_years, )?; // Populate Vecs for each Commodity @@ -58,7 +59,7 @@ pub fn read_commodities( commodity.costs = costs; } if let Some(demand) = demand.remove(&id) { - commodity.demand_by_region = demand; + commodity.demand = demand; } (id, commodity.into()) diff --git a/src/input/commodity/cost.rs b/src/input/commodity/cost.rs index 4a5e5903c..f41c1f2c9 100644 --- a/src/input/commodity/cost.rs +++ b/src/input/commodity/cost.rs @@ -93,7 +93,7 @@ where .entry(commodity_id.clone()) .or_insert_with(CommodityCostMap::new); - for time_slice in time_slice_info.iter_selection(&ts_selection) { + for (time_slice, _) in time_slice_info.iter_selection(&ts_selection) { let value = CommodityCost { balance_type: cost.balance_type.clone(), value: cost.value, diff --git a/src/input/commodity/demand.rs b/src/input/commodity/demand.rs new file mode 100644 index 000000000..7955e347b --- /dev/null +++ b/src/input/commodity/demand.rs @@ -0,0 +1,470 @@ +//! Code for working with demand for a given commodity. Demand can vary by region, year and time +//! slice. +use super::demand_slicing::{read_demand_slices, DemandSliceMap, DemandSliceMapKey}; +use crate::commodity::DemandMap; +use crate::input::*; +use crate::time_slice::TimeSliceInfo; +use anyhow::{ensure, Result}; +use serde::Deserialize; +use std::collections::{HashMap, HashSet}; +use std::path::Path; +use std::rc::Rc; + +const DEMAND_FILE_NAME: &str = "demand.csv"; + +/// Represents a single demand entry in the dataset. +#[derive(Debug, Clone, Deserialize, PartialEq)] +struct Demand { + /// The commodity this demand entry refers to + commodity_id: String, + /// The region of the demand entry + region_id: String, + /// The year of the demand entry + year: u32, + /// Annual demand quantity + demand: f64, +} + +/// A map relating commodity, region and year to annual demand +pub type AnnualDemandMap = HashMap; + +/// A key for an [`AnnualDemandMap`] +#[derive(PartialEq, Eq, Hash, Debug)] +pub struct AnnualDemandMapKey { + /// The commodity to which this demand applies + commodity_id: Rc, + /// The region to which this demand applies + region_id: Rc, + /// The simulation year to which this demand applies + year: u32, +} + +/// A set of commodity + region pairs +pub type CommodityRegionPairs = HashSet<(Rc, Rc)>; + +/// Reads demand data from CSV files. +/// +/// # Arguments +/// +/// * `model_dir` - Folder containing model configuration files +/// * `commodity_ids` - All possible IDs of commodities +/// * `region_ids` - All possible IDs for regions +/// * `time_slice_info` - Information about seasons and times of day +/// * `milestone_years` - All milestone years +/// +/// # Returns +/// +/// This function returns [`DemandMap`]s grouped by commodity ID. +pub fn read_demand( + model_dir: &Path, + commodity_ids: &HashSet>, + region_ids: &HashSet>, + time_slice_info: &TimeSliceInfo, + milestone_years: &[u32], +) -> Result, DemandMap>> { + let (demand, commodity_regions) = + read_demand_file(model_dir, commodity_ids, region_ids, milestone_years)?; + let slices = read_demand_slices( + model_dir, + commodity_ids, + region_ids, + &commodity_regions, + time_slice_info, + )?; + + Ok(compute_demand_maps(&demand, &slices, time_slice_info)) +} + +/// Read the demand.csv file. +/// +/// # Arguments +/// +/// * `model_dir` - Folder containing model configuration files +/// * `commodity_ids` - All possible IDs of commodities +/// * `region_ids` - All possible IDs for regions +/// * `milestone_years` - All milestone years +/// +/// # Returns +/// +/// Annual demand data, grouped by commodity, region and milestone year. +fn read_demand_file( + model_dir: &Path, + commodity_ids: &HashSet>, + region_ids: &HashSet>, + milestone_years: &[u32], +) -> Result<(AnnualDemandMap, CommodityRegionPairs)> { + let file_path = model_dir.join(DEMAND_FILE_NAME); + let iter = read_csv(&file_path)?; + read_demand_from_iter(iter, commodity_ids, region_ids, milestone_years) +} + +/// Read the demand data from an iterator. +/// +/// # Arguments +/// +/// * `iter` - An iterator of [`Demand`]s +/// * `commodity_ids` - All possible IDs of commodities +/// * `region_ids` - All possible IDs for regions +/// * `milestone_years` - All milestone years +/// +/// # Returns +/// +/// The demand for each combination of commodity, region and year along with a [`HashSet`] of all +/// commodity + region pairs included in the file. +fn read_demand_from_iter( + iter: I, + commodity_ids: &HashSet>, + region_ids: &HashSet>, + milestone_years: &[u32], +) -> Result<(AnnualDemandMap, CommodityRegionPairs)> +where + I: Iterator, +{ + let mut map = AnnualDemandMap::new(); + + // Keep track of all commodity + region pairs so we can check that every milestone year is + // covered + let mut commodity_regions = HashSet::new(); + + for demand in iter { + let commodity_id = commodity_ids.get_id(&demand.commodity_id)?; + let region_id = region_ids.get_id(&demand.region_id)?; + + ensure!( + milestone_years.binary_search(&demand.year).is_ok(), + "Year {} is not a milestone year. \ + Input of non-milestone years is currently not supported.", + demand.year + ); + + ensure!( + demand.demand.is_normal() && demand.demand > 0.0, + "Demand must be a valid number greater than zero" + ); + + let key = AnnualDemandMapKey { + commodity_id: Rc::clone(&commodity_id), + region_id: Rc::clone(®ion_id), + year: demand.year, + }; + ensure!( + map.insert(key, demand.demand).is_none(), + "Duplicate demand entries (commodity: {}, region: {}, year: {})", + commodity_id, + region_id, + demand.year + ); + + commodity_regions.insert((commodity_id, region_id)); + } + + // If a commodity + region combination is represented, it must include entries for every + // milestone year + for (commodity_id, region_id) in commodity_regions.iter() { + for year in milestone_years.iter().copied() { + let key = AnnualDemandMapKey { + commodity_id: Rc::clone(commodity_id), + region_id: Rc::clone(region_id), + year, + }; + ensure!( + map.contains_key(&key), + "Missing milestone year {year} for commodity {commodity_id} in region {region_id}" + ); + } + } + + Ok((map, commodity_regions)) +} + +/// Calculate the demand for each combination of commodity, region, year and time slice. +/// +/// # Arguments +/// +/// * `demand` - Total annual demand for combinations of commodity, region and year +/// * `slices` - How annual demand is shared between time slices +/// * `time_slice_info` - Information about time slices +/// +/// # Returns +/// +/// [`DemandMap`]s for combinations of region, year and time slice, grouped by the commodity to +/// which the demand applies. +fn compute_demand_maps( + demand: &AnnualDemandMap, + slices: &DemandSliceMap, + time_slice_info: &TimeSliceInfo, +) -> HashMap, DemandMap> { + let mut map = HashMap::new(); + for (demand_key, annual_demand) in demand.iter() { + let commodity_id = &demand_key.commodity_id; + let region_id = &demand_key.region_id; + for time_slice in time_slice_info.iter_ids() { + let slice_key = DemandSliceMapKey { + commodity_id: Rc::clone(commodity_id), + region_id: Rc::clone(region_id), + time_slice: time_slice.clone(), + }; + + // NB: This has already been checked, so shouldn't fail + let demand_fraction = slices.get(&slice_key).unwrap(); + + // Get or create entry + let map = map + .entry(Rc::clone(commodity_id)) + .or_insert_with(DemandMap::new); + + // Add a new demand entry + map.insert( + Rc::clone(region_id), + demand_key.year, + time_slice.clone(), + annual_demand * demand_fraction, + ); + } + } + + map +} + +#[cfg(test)] +mod tests { + use super::*; + use itertools::iproduct; + use std::fs::File; + use std::io::Write; + use std::iter; + use std::path::Path; + use tempfile::tempdir; + + /// Create an example demand file in dir_path + fn create_demand_file(dir_path: &Path) { + let file_path = dir_path.join(DEMAND_FILE_NAME); + let mut file = File::create(file_path).unwrap(); + writeln!( + file, + "commodity_id,region_id,year,demand +COM1,North,2020,10 +COM1,South,2020,11 +COM1,East,2020,12 +COM1,West,2020,13" + ) + .unwrap(); + } + + #[test] + fn test_read_demand_from_iter() { + let commodity_ids = ["COM1".into()].into_iter().collect(); + let region_ids = ["North".into(), "South".into()].into_iter().collect(); + let milestone_years = [2020]; + + // Valid + let demand = [ + Demand { + year: 2020, + region_id: "North".to_string(), + commodity_id: "COM1".to_string(), + demand: 10.0, + }, + Demand { + year: 2020, + region_id: "South".to_string(), + commodity_id: "COM1".to_string(), + demand: 11.0, + }, + ]; + assert!(read_demand_from_iter( + demand.into_iter(), + &commodity_ids, + ®ion_ids, + &milestone_years + ) + .is_ok()); + + // Bad commodity ID + let demand = [ + Demand { + year: 2020, + region_id: "North".to_string(), + commodity_id: "COM2".to_string(), + demand: 10.0, + }, + Demand { + year: 2020, + region_id: "South".to_string(), + commodity_id: "COM1".to_string(), + demand: 11.0, + }, + ]; + assert!(read_demand_from_iter( + demand.into_iter(), + &commodity_ids, + ®ion_ids, + &milestone_years + ) + .is_err()); + + // Bad region ID + let demand = [ + Demand { + year: 2020, + region_id: "East".to_string(), + commodity_id: "COM1".to_string(), + demand: 10.0, + }, + Demand { + year: 2020, + region_id: "South".to_string(), + commodity_id: "COM1".to_string(), + demand: 11.0, + }, + ]; + assert!(read_demand_from_iter( + demand.into_iter(), + &commodity_ids, + ®ion_ids, + &milestone_years + ) + .is_err()); + + // Bad year + let demand = [ + Demand { + year: 2010, + region_id: "North".to_string(), + commodity_id: "COM1".to_string(), + demand: 10.0, + }, + Demand { + year: 2020, + region_id: "South".to_string(), + commodity_id: "COM1".to_string(), + demand: 11.0, + }, + ]; + assert!(read_demand_from_iter( + demand.into_iter(), + &commodity_ids, + ®ion_ids, + &milestone_years + ) + .is_err()); + + // Bad demand quantity + macro_rules! test_quantity { + ($quantity: expr) => { + let demand = [Demand { + year: 2020, + region_id: "North".to_string(), + commodity_id: "COM1".to_string(), + demand: $quantity, + }]; + assert!(read_demand_from_iter( + demand.into_iter(), + &commodity_ids, + ®ion_ids, + &milestone_years, + ) + .is_err()); + }; + } + test_quantity!(-1.0); + test_quantity!(0.0); + test_quantity!(f64::NAN); + test_quantity!(f64::NEG_INFINITY); + test_quantity!(f64::INFINITY); + + // Multiple entries for same commodity and region + let demand = [ + Demand { + year: 2020, + region_id: "North".to_string(), + commodity_id: "COM1".to_string(), + demand: 10.0, + }, + Demand { + year: 2020, + region_id: "North".to_string(), + commodity_id: "COM1".to_string(), + demand: 10.0, + }, + Demand { + year: 2020, + region_id: "South".to_string(), + commodity_id: "COM1".to_string(), + demand: 11.0, + }, + ]; + assert!(read_demand_from_iter( + demand.into_iter(), + &commodity_ids, + ®ion_ids, + &milestone_years + ) + .is_err()); + + // Missing entry for a milestone year + let demand = Demand { + year: 2020, + region_id: "North".to_string(), + commodity_id: "COM1".to_string(), + demand: 10.0, + }; + assert!(read_demand_from_iter( + iter::once(demand), + &commodity_ids, + ®ion_ids, + &[2020, 2030] + ) + .is_err()); + } + + #[test] + fn test_read_demand_file() { + let dir = tempdir().unwrap(); + create_demand_file(dir.path()); + let commodity_ids = HashSet::from_iter(iter::once("COM1".into())); + let region_ids = + HashSet::from_iter(["North".into(), "South".into(), "East".into(), "West".into()]); + let milestone_years = [2020]; + let expected = AnnualDemandMap::from_iter([ + ( + AnnualDemandMapKey { + commodity_id: "COM1".into(), + region_id: "North".into(), + year: 2020, + }, + 10.0, + ), + ( + AnnualDemandMapKey { + commodity_id: "COM1".into(), + region_id: "South".into(), + year: 2020, + }, + 11.0, + ), + ( + AnnualDemandMapKey { + commodity_id: "COM1".into(), + region_id: "East".into(), + year: 2020, + }, + 12.0, + ), + ( + AnnualDemandMapKey { + commodity_id: "COM1".into(), + region_id: "West".into(), + year: 2020, + }, + 13.0, + ), + ]); + let (demand, commodity_regions) = + read_demand_file(dir.path(), &commodity_ids, ®ion_ids, &milestone_years).unwrap(); + let commodity_regions_expected = + iproduct!(commodity_ids.iter().cloned(), region_ids.iter().cloned()).collect(); + assert_eq!(demand, expected); + assert_eq!(commodity_regions, commodity_regions_expected); + } +} diff --git a/src/input/commodity/demand_slicing.rs b/src/input/commodity/demand_slicing.rs new file mode 100644 index 000000000..901b42a5c --- /dev/null +++ b/src/input/commodity/demand_slicing.rs @@ -0,0 +1,478 @@ +//! Demand slicing determines how annual demand is distributed across the year. +use super::demand::*; +use crate::input::*; +use crate::time_slice::{TimeSliceID, TimeSliceInfo}; +use anyhow::{ensure, Context, Result}; +use itertools::Itertools; +use serde::Deserialize; +use std::collections::{HashMap, HashSet}; +use std::path::Path; +use std::rc::Rc; + +const DEMAND_SLICING_FILE_NAME: &str = "demand_slicing.csv"; + +#[derive(Clone, Deserialize)] +struct DemandSlice { + commodity_id: String, + region_id: String, + time_slice: String, + #[serde(deserialize_with = "deserialise_proportion_nonzero")] + fraction: f64, +} + +/// A map relating commodity, region and time slice to the fraction of annual demand +pub type DemandSliceMap = HashMap; + +/// A key for a [`DemandSliceMap`] +#[derive(PartialEq, Eq, Hash, Debug)] +pub struct DemandSliceMapKey { + /// The commodity to which this demand applies + pub commodity_id: Rc, + /// The region to which this demand applies + pub region_id: Rc, + /// The time slice to which this demand applies + pub time_slice: TimeSliceID, +} + +/// Read demand slices from specified model directory. +/// +/// # Arguments +/// +/// * `model_dir` - Folder containing model configuration files +/// * `commodity_ids` - All possible IDs of commodities +/// * `region_ids` - All possible IDs for regions +/// * `commodity_regions` - Pairs of commodities + regions listed in demand CSV file +/// * `time_slice_info` - Information about seasons and times of day +pub fn read_demand_slices( + model_dir: &Path, + commodity_ids: &HashSet>, + region_ids: &HashSet>, + commodity_regions: &CommodityRegionPairs, + time_slice_info: &TimeSliceInfo, +) -> Result { + let file_path = model_dir.join(DEMAND_SLICING_FILE_NAME); + let demand_slices_csv = read_csv(&file_path)?; + read_demand_slices_from_iter( + demand_slices_csv, + commodity_ids, + region_ids, + commodity_regions, + time_slice_info, + ) + .with_context(|| input_err_msg(file_path)) +} + +/// Read demand slices from an iterator +fn read_demand_slices_from_iter( + iter: I, + commodity_ids: &HashSet>, + region_ids: &HashSet>, + commodity_regions: &CommodityRegionPairs, + time_slice_info: &TimeSliceInfo, +) -> Result +where + I: Iterator, +{ + let mut demand_slices = DemandSliceMap::new(); + + for slice in iter { + let commodity_id = commodity_ids.get_id(&slice.commodity_id)?; + let region_id = region_ids.get_id(&slice.region_id)?; + ensure!( + commodity_regions.contains(&(Rc::clone(&commodity_id), Rc::clone(®ion_id))), + "Demand slicing provided for commodity {commodity_id} in region {region_id} \ + without a corresponding entry in demand CSV file" + ); + + // We need to know how many time slices are covered by the current demand slice entry and + // how long they are relative to one another so that we can divide up the demand for this + // entry appropriately + let ts_selection = time_slice_info.get_selection(&slice.time_slice)?; + for (ts, demand_fraction) in time_slice_info.calculate_share(&ts_selection, slice.fraction) + { + let key = DemandSliceMapKey { + commodity_id: Rc::clone(&commodity_id), + region_id: Rc::clone(®ion_id), + time_slice: ts.clone(), + }; + + // Share demand between the time slices in proportion to duration + ensure!(demand_slices.insert(key, demand_fraction).is_none(), + "Duplicate demand slicing entry (or same time slice covered by more than one entry) \ + (commodity: {commodity_id}, region: {region_id}, time slice: {ts})" + ); + } + } + + validate_demand_slices(commodity_regions, &demand_slices, time_slice_info)?; + + Ok(demand_slices) +} + +/// Check that the [`DemandSliceMap`] is well formed. +/// +/// Specifically, check: +/// +/// * It is non-empty +/// * If an entry is provided for any commodity + region pair, there must be entries covering every +/// time slice +/// * The demand fractions for all entries related to a commodity + region pair sum to one +fn validate_demand_slices( + commodity_regions: &CommodityRegionPairs, + demand_slices: &DemandSliceMap, + time_slice_info: &TimeSliceInfo, +) -> Result<()> { + for (commodity_id, region_id) in commodity_regions { + time_slice_info + .iter_ids() + .map(|time_slice| { + let key = DemandSliceMapKey { + commodity_id: Rc::clone(commodity_id), + region_id: Rc::clone(region_id), + time_slice: time_slice.clone(), + }; + + demand_slices.get(&key).with_context(|| { + format!( + "Demand slice missing for time slice {} (commodity: {}, region {})", + time_slice, commodity_id, region_id + ) + }) + }) + .process_results(|iter| { + check_fractions_sum_to_one(iter.copied()).context("Invalid demand fractions") + })??; + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::time_slice::TimeSliceID; + use itertools::iproduct; + use std::iter; + + #[test] + fn test_read_demand_slices_from_iter() { + let time_slice_info = TimeSliceInfo { + seasons: iter::once("winter".into()).collect(), + times_of_day: iter::once("day".into()).collect(), + fractions: [( + TimeSliceID { + season: "winter".into(), + time_of_day: "day".into(), + }, + 1.0, + )] + .into_iter() + .collect(), + }; + let commodity_ids = HashSet::from_iter(iter::once("COM1".into())); + let region_ids = HashSet::from_iter(iter::once("GBR".into())); + let commodity_regions = + iproduct!(commodity_ids.iter().cloned(), region_ids.iter().cloned()).collect(); + + // Valid + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 1.0, + }; + let time_slice = time_slice_info + .get_time_slice_id_from_str("winter.day") + .unwrap(); + let key = DemandSliceMapKey { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice, + }; + let expected = DemandSliceMap::from_iter(iter::once((key, 1.0))); + assert_eq!( + read_demand_slices_from_iter( + iter::once(demand_slice.clone()), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .unwrap(), + expected + ); + + // Valid, multiple time slices + { + let time_slice_info = TimeSliceInfo { + seasons: ["winter".into(), "summer".into()].into_iter().collect(), + times_of_day: ["day".into(), "night".into()].into_iter().collect(), + fractions: [ + ( + TimeSliceID { + season: "summer".into(), + time_of_day: "day".into(), + }, + 3.0 / 16.0, + ), + ( + TimeSliceID { + season: "summer".into(), + time_of_day: "night".into(), + }, + 5.0 / 16.0, + ), + ( + TimeSliceID { + season: "winter".into(), + time_of_day: "day".into(), + }, + 3.0 / 16.0, + ), + ( + TimeSliceID { + season: "winter".into(), + time_of_day: "night".into(), + }, + 5.0 / 16.0, + ), + ] + .into_iter() + .collect(), + }; + let demand_slices = [ + DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 0.5, + }, + DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "summer".into(), + fraction: 0.5, + }, + ]; + let expected = DemandSliceMap::from_iter([ + ( + DemandSliceMapKey { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: TimeSliceID { + season: "summer".into(), + time_of_day: "day".into(), + }, + }, + 3.0 / 16.0, + ), + ( + DemandSliceMapKey { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: TimeSliceID { + season: "summer".into(), + time_of_day: "night".into(), + }, + }, + 5.0 / 16.0, + ), + ( + DemandSliceMapKey { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: TimeSliceID { + season: "winter".into(), + time_of_day: "day".into(), + }, + }, + 3.0 / 16.0, + ), + ( + DemandSliceMapKey { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: TimeSliceID { + season: "winter".into(), + time_of_day: "night".into(), + }, + }, + 5.0 / 16.0, + ), + ]); + assert_eq!( + read_demand_slices_from_iter( + demand_slices.into_iter(), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .unwrap(), + expected + ); + } + + // Empty CSV file + assert!(read_demand_slices_from_iter( + iter::empty(), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + + // Bad commodity + let demand_slice = DemandSlice { + commodity_id: "COM2".into(), + region_id: "GBR".into(), + time_slice: "winter.day".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice.clone()), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + + // Bad region + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "FRA".into(), + time_slice: "winter.day".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice.clone()), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + + // Bad time slice selection + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "summer".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice.clone()), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + + { + // Some time slices uncovered + let time_slice_info = TimeSliceInfo { + seasons: ["winter".into(), "summer".into()].into_iter().collect(), + times_of_day: iter::once("day".into()).collect(), + fractions: [ + ( + TimeSliceID { + season: "winter".into(), + time_of_day: "day".into(), + }, + 0.5, + ), + ( + TimeSliceID { + season: "summer".into(), + time_of_day: "day".into(), + }, + 0.5, + ), + ] + .into_iter() + .collect(), + }; + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice.clone()), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + } + + // Same time slice twice + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter.day".into(), + fraction: 0.5, + }; + assert!(read_demand_slices_from_iter( + iter::repeat_n(demand_slice.clone(), 2), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + + // Whole season and single time slice conflicting + let demand_slice_season = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 0.5, + }; + assert!(read_demand_slices_from_iter( + [demand_slice, demand_slice_season].into_iter(), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + + // Fractions don't sum to one + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 0.5, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice), + &commodity_ids, + ®ion_ids, + &commodity_regions, + &time_slice_info, + ) + .is_err()); + + // No corresponding entry for commodity + region in demand CSV file + let demand_slice = DemandSlice { + commodity_id: "COM1".into(), + region_id: "GBR".into(), + time_slice: "winter".into(), + fraction: 1.0, + }; + assert!(read_demand_slices_from_iter( + iter::once(demand_slice), + &commodity_ids, + ®ion_ids, + &HashSet::new(), + &time_slice_info, + ) + .is_err()); + } +} diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index cdc93374e..f0294af17 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -63,9 +63,9 @@ where } #[cfg(test)] -mod test { +mod tests { use super::*; - use crate::commodity::{CommodityCostMap, CommodityType}; + use crate::commodity::{CommodityCostMap, CommodityType, DemandMap}; use crate::time_slice::TimeSliceLevel; #[test] @@ -80,7 +80,7 @@ mod test { kind: CommodityType::InputCommodity, time_slice_level: TimeSliceLevel::Annual, costs: CommodityCostMap::new(), - demand_by_region: HashMap::new(), + demand: DemandMap::new(), }; (Rc::clone(&commodity.id), commodity.into()) @@ -161,7 +161,7 @@ mod test { kind: CommodityType::InputCommodity, time_slice_level: TimeSliceLevel::Annual, costs: CommodityCostMap::new(), - demand_by_region: HashMap::new(), + demand: DemandMap::new(), }; (Rc::clone(&commodity.id), commodity.into()) diff --git a/src/input/process/pac.rs b/src/input/process/pac.rs index 8a93df779..44eea9c1f 100644 --- a/src/input/process/pac.rs +++ b/src/input/process/pac.rs @@ -129,9 +129,9 @@ fn validate_pac_flows( } #[cfg(test)] -mod test { +mod tests { use super::*; - use crate::commodity::{CommodityCostMap, CommodityType}; + use crate::commodity::{CommodityCostMap, CommodityType, DemandMap}; use crate::process::FlowType; use crate::time_slice::TimeSliceLevel; @@ -148,7 +148,7 @@ mod test { kind: CommodityType::InputCommodity, time_slice_level: TimeSliceLevel::Annual, costs: CommodityCostMap::new(), - demand_by_region: HashMap::new(), + demand: DemandMap::new(), }; (Rc::clone(&commodity.id), commodity.into()) }) diff --git a/src/lib.rs b/src/lib.rs index a4c655855..d878b6dea 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -3,7 +3,6 @@ pub mod agent; pub mod commands; pub mod commodity; -pub mod demand; pub mod input; pub mod log; pub mod model; diff --git a/src/time_slice.rs b/src/time_slice.rs index 40e506a8d..e0155a854 100644 --- a/src/time_slice.rs +++ b/src/time_slice.rs @@ -5,7 +5,6 @@ #![allow(missing_docs)] use crate::input::*; use anyhow::{ensure, Context, Result}; -use float_cmp::approx_eq; use itertools::Itertools; use serde::Deserialize; use serde_string_enum::DeserializeLabeledStringEnum; @@ -33,7 +32,7 @@ impl Display for TimeSliceID { } /// Represents a time slice read from an input file, which can be all -#[derive(PartialEq, Debug)] +#[derive(PartialEq, Clone, Debug)] pub enum TimeSliceSelection { /// All year and all day Annual, @@ -116,26 +115,86 @@ impl TimeSliceInfo { /// /// The order will be consistent each time this is called, but not every time the program is /// run. - pub fn iter(&self) -> impl Iterator { + pub fn iter_ids(&self) -> impl Iterator { self.fractions.keys() } - /// Iterate over the subset of [`TimeSliceID`] indicated by `selection`. + /// Iterate over all time slices. + /// + /// The order will be consistent each time this is called, but not every time the program is + /// run. + pub fn iter(&self) -> impl Iterator { + self.fractions.iter().map(|(ts, fraction)| (ts, *fraction)) + } + + /// Iterate over the subset of time slices indicated by `selection`. /// /// The order will be consistent each time this is called, but not every time the program is /// run. pub fn iter_selection<'a>( &'a self, selection: &'a TimeSliceSelection, - ) -> Box + 'a> { + ) -> Box + 'a> { match selection { TimeSliceSelection::Annual => Box::new(self.iter()), TimeSliceSelection::Season(season) => { - Box::new(self.iter().filter(move |ts| ts.season == *season)) + Box::new(self.iter().filter(move |(ts, _)| ts.season == *season)) + } + TimeSliceSelection::Single(ts) => { + Box::new(iter::once((ts, *self.fractions.get(ts).unwrap()))) } - TimeSliceSelection::Single(ts) => Box::new(iter::once(ts)), } } + + /// Iterate over a subset of time slices calculating the relative duration of each. + /// + /// The relative duration is specified as a fraction of the total time (proportion of year) + /// covered by `selection`. + /// + /// # Arguments + /// + /// * `selection` - A subset of time slices + /// + /// # Returns + /// + /// An iterator of time slices along with the fraction of the total selection. + pub fn iterate_selection_share<'a>( + &'a self, + selection: &'a TimeSliceSelection, + ) -> impl Iterator { + // Store time slices as we have to iterate over selection twice + let time_slices = self.iter_selection(selection).collect_vec(); + + // Total fraction of year covered by selection + let time_total: f64 = time_slices.iter().map(|(_, fraction)| *fraction).sum(); + + // Calculate share + time_slices + .into_iter() + .map(move |(ts, time_fraction)| (ts, time_fraction / time_total)) + } + + /// Share a value between a subset of time slices in proportion to their lengths. + /// + /// For instance, you could use this function to compute how demand is distributed between the + /// different time slices of winter. + /// + /// # Arguments + /// + /// * `selection` - A subset of time slices + /// * `value` - The value to be shared between the time slices + /// + /// # Returns + /// + /// An iterator of time slices along with a fraction of `value`. + pub fn calculate_share<'a>( + &'a self, + selection: &'a TimeSliceSelection, + value: f64, + ) -> impl Iterator { + self.iterate_selection_share(selection) + .map(move |(ts, share)| (ts, value * share)) + } } /// A time slice record retrieved from a CSV file @@ -183,7 +242,8 @@ where } // Validate data - check_time_slice_fractions_sum_to_one(fractions.values().cloned())?; + check_fractions_sum_to_one(fractions.values().cloned()) + .context("Invalid time slice fractions")?; Ok(TimeSliceInfo { seasons, @@ -223,23 +283,10 @@ pub fn read_time_slice_info(model_dir: &Path) -> Result { read_time_slice_info_from_iter(time_slices_csv).with_context(|| input_err_msg(file_path)) } -/// Check that time slice fractions sum to (approximately) one -fn check_time_slice_fractions_sum_to_one(fractions: I) -> Result<()> -where - I: Iterator, -{ - let sum = fractions.sum(); - ensure!( - approx_eq!(f64, sum, 1.0, epsilon = 1e-5), - "Sum of time slice fractions does not equal one (actual: {sum})" - ); - - Ok(()) -} - #[cfg(test)] mod tests { use super::*; + use float_cmp::assert_approx_eq; use std::fs::File; use std::io::Write; use std::path::Path; @@ -348,36 +395,86 @@ autumn,evening,0.25" }; assert_eq!( - HashSet::<&TimeSliceID>::from_iter(ts_info.iter_selection(&TimeSliceSelection::Annual)), + HashSet::<&TimeSliceID>::from_iter( + ts_info + .iter_selection(&TimeSliceSelection::Annual) + .map(|(ts, _)| ts) + ), HashSet::from_iter(slices.iter()) ); itertools::assert_equal( - ts_info.iter_selection(&TimeSliceSelection::Season("winter".into())), + ts_info + .iter_selection(&TimeSliceSelection::Season("winter".into())) + .map(|(ts, _)| ts), iter::once(&slices[0]), ); let ts = ts_info.get_time_slice_id_from_str("summer.night").unwrap(); itertools::assert_equal( - ts_info.iter_selection(&TimeSliceSelection::Single(ts)), + ts_info + .iter_selection(&TimeSliceSelection::Single(ts)) + .map(|(ts, _)| ts), iter::once(&slices[1]), ); } #[test] - fn test_check_time_slice_fractions_sum_to_one() { - // Single input, valid - assert!(check_time_slice_fractions_sum_to_one([1.0].into_iter()).is_ok()); + fn test_calculate_share() { + let slices = [ + TimeSliceID { + season: "winter".into(), + time_of_day: "day".into(), + }, + TimeSliceID { + season: "winter".into(), + time_of_day: "night".into(), + }, + TimeSliceID { + season: "summer".into(), + time_of_day: "day".into(), + }, + TimeSliceID { + season: "summer".into(), + time_of_day: "night".into(), + }, + ]; + let ts_info = TimeSliceInfo { + seasons: ["winter".into(), "summer".into()].into_iter().collect(), + times_of_day: ["day".into(), "night".into()].into_iter().collect(), + fractions: slices.iter().map(|ts| (ts.clone(), 0.25)).collect(), + }; - // Multiple inputs, valid - assert!(check_time_slice_fractions_sum_to_one([0.4, 0.6].into_iter()).is_ok()); + macro_rules! check_share { + ($selection:expr, $expected:expr) => { + let expected = $expected; + let actual: HashMap<_, _> = HashMap::from_iter( + ts_info + .calculate_share(&$selection, 8.0) + .map(|(ts, share)| (ts.clone(), share)), + ); + assert!(actual.len() == expected.len()); + for (k, v) in actual { + assert_approx_eq!(f64, v, *expected.get(&k).unwrap()); + } + }; + } - // Single input, invalid - assert!(check_time_slice_fractions_sum_to_one([0.5].into_iter()).is_err()); + // Whole year + let expected: HashMap<_, _> = HashMap::from_iter(slices.iter().map(|ts| (ts.clone(), 2.0))); + check_share!(TimeSliceSelection::Annual, expected); - // Multiple inputs, invalid - assert!(check_time_slice_fractions_sum_to_one([0.4, 0.3].into_iter()).is_err()); + // One season + let selection = TimeSliceSelection::Season("winter".into()); + let expected: HashMap<_, _> = HashMap::from_iter( + ts_info + .iter_selection(&selection) + .map(|(ts, _)| (ts.clone(), 4.0)), + ); + check_share!(selection, expected); - // Edge cases - assert!(check_time_slice_fractions_sum_to_one([f64::INFINITY].into_iter()).is_err()); - assert!(check_time_slice_fractions_sum_to_one([f64::NAN].into_iter()).is_err()); + // Single time slice + let time_slice = ts_info.get_time_slice_id_from_str("winter.day").unwrap(); + let selection = TimeSliceSelection::Single(time_slice.clone()); + let expected: HashMap<_, _> = HashMap::from_iter(iter::once((time_slice, 8.0))); + check_share!(selection, expected); } }