From 65683e2f73c60322f3b0702c3bfcd9adc6437c82 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Thu, 19 Dec 2024 10:25:29 +0000 Subject: [PATCH 01/10] Move region CSV code to `input/region.rs` --- src/agent.rs | 3 +- src/asset.rs | 11 +- src/input.rs | 3 + src/input/region.rs | 324 ++++++++++++++++++++++++++++++++++++++++++++ src/model.rs | 2 +- src/process.rs | 3 +- src/region.rs | 321 +------------------------------------------ 7 files changed, 338 insertions(+), 329 deletions(-) create mode 100644 src/input/region.rs diff --git a/src/agent.rs b/src/agent.rs index 8c91e8202..b07ee0b1e 100644 --- a/src/agent.rs +++ b/src/agent.rs @@ -1,8 +1,9 @@ #![allow(missing_docs)] use crate::asset::{read_assets, Asset}; +use crate::input::region::{define_region_id_getter, read_regions_for_entity}; use crate::input::*; use crate::process::Process; -use crate::region::*; +use crate::region::RegionSelection; use anyhow::{bail, ensure, Context, Result}; use serde::Deserialize; use serde_string_enum::DeserializeLabeledStringEnum; diff --git a/src/asset.rs b/src/asset.rs index 5ba36a3f8..70350e55b 100644 --- a/src/asset.rs +++ b/src/asset.rs @@ -107,11 +107,10 @@ pub fn read_assets( #[cfg(test)] mod tests { - use std::vec; - - use crate::process::ProcessParameter; - use super::*; + use crate::process::ProcessParameter; + use crate::region::RegionSelection; + use std::vec; #[test] fn test_read_assets_from_iter() { @@ -132,7 +131,7 @@ mod tests { flows: vec![], pacs: vec![], parameter: process_param.clone(), - regions: crate::region::RegionSelection::All, + regions: RegionSelection::All, }); let processes = [(Rc::clone(&process.id), Rc::clone(&process))] .into_iter() @@ -208,7 +207,7 @@ mod tests { flows: vec![], pacs: vec![], parameter: process_param, - regions: crate::region::RegionSelection::Some(["GBR".into()].into_iter().collect()), + regions: RegionSelection::Some(["GBR".into()].into_iter().collect()), }); let asset_in = AssetRaw { agent_id: "agent1".into(), diff --git a/src/input.rs b/src/input.rs index 9f030be04..855f45ae3 100644 --- a/src/input.rs +++ b/src/input.rs @@ -7,6 +7,9 @@ use std::fs; use std::path::Path; use std::rc::Rc; +pub mod region; +pub use region::read_regions; + /// Read a series of type `T`s from a CSV file. /// /// # Arguments diff --git a/src/input/region.rs b/src/input/region.rs new file mode 100644 index 000000000..69e5c19ed --- /dev/null +++ b/src/input/region.rs @@ -0,0 +1,324 @@ +//! Code for reading region-related information from CSV files. +use super::*; +use crate::region::{Region, RegionSelection}; +use anyhow::{ensure, Context, Result}; +use serde::de::DeserializeOwned; +use std::collections::{HashMap, HashSet}; +use std::path::Path; +use std::rc::Rc; + +const REGIONS_FILE_NAME: &str = "regions.csv"; + +define_id_getter! {Region} + +/// An object which is associated with a single region +pub trait HasRegionID { + /// Get the associated region ID + fn get_region_id(&self) -> &str; +} + +macro_rules! define_region_id_getter { + ($t:ty) => { + impl crate::input::region::HasRegionID for $t { + fn get_region_id(&self) -> &str { + &self.region_id + } + } + }; +} +pub(crate) use define_region_id_getter; + +/// Reads regions from a CSV file. +/// +/// # Arguments +/// +/// * `model_dir` - Folder containing model configuration files +/// +/// # Returns +/// +/// A `HashMap, Region>` with the parsed regions data or an error. The keys are region IDs. +pub fn read_regions(model_dir: &Path) -> Result, Region>> { + read_csv_id_file(&model_dir.join(REGIONS_FILE_NAME)) +} + +/// Read region IDs associated with a particular entity. +/// +/// # Arguments +/// +/// `file_path` - Path to CSV file +/// `entity_ids` - All possible valid IDs for the entity type +/// `region_ids` - All possible valid region IDs +pub fn read_regions_for_entity( + file_path: &Path, + entity_ids: &HashSet>, + region_ids: &HashSet>, +) -> Result, RegionSelection>> +where + T: HasID + HasRegionID + DeserializeOwned, +{ + read_regions_for_entity_from_iter(read_csv::(file_path)?, entity_ids, region_ids) + .with_context(|| input_err_msg(file_path)) +} + +fn read_regions_for_entity_from_iter( + entity_iter: I, + entity_ids: &HashSet>, + region_ids: &HashSet>, +) -> Result, RegionSelection>> +where + I: Iterator, + T: HasID + HasRegionID, +{ + let mut entity_regions = HashMap::new(); + for entity in entity_iter { + let entity_id = entity_ids.get_id(entity.get_id())?; + let region_id = entity.get_region_id(); + + let succeeded = try_insert_region(entity_id, region_id, region_ids, &mut entity_regions); + + ensure!( + succeeded, + "Invalid regions specified for entity. Must specify either unique region IDs or \"all\"." + ); + } + + ensure!( + entity_regions.len() >= entity_ids.len(), + "At least one region must be specified per entity" + ); + + Ok(entity_regions) +} + +/// Try to insert a region ID into the specified map +#[must_use] +fn try_insert_region( + entity_id: Rc, + region_id: &str, + region_ids: &HashSet>, + entity_regions: &mut HashMap, RegionSelection>, +) -> bool { + if region_id.eq_ignore_ascii_case("all") { + // Valid for all regions + return entity_regions + .insert(entity_id, RegionSelection::All) + .is_none(); + } + + // Validate region_id + let region_id = match region_ids.get_id(region_id) { + Ok(id) => id, + Err(_) => return false, + }; + + // Add or create entry in entity_regions + let selection = entity_regions + .entry(entity_id) + .or_insert_with(|| RegionSelection::Some(HashSet::with_capacity(1))); + + match selection { + RegionSelection::All => false, + RegionSelection::Some(ref mut set) => set.insert(region_id), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde::Deserialize; + use std::fs::File; + use std::io::Write; + use std::path::Path; + use tempfile::tempdir; + + /// Create an example regions file in dir_path + fn create_regions_file(dir_path: &Path) { + let file_path = dir_path.join(REGIONS_FILE_NAME); + let mut file = File::create(file_path).unwrap(); + writeln!( + file, + "id,description +NA,North America +EU,Europe +AP,Asia Pacific" + ) + .unwrap(); + } + + #[test] + fn test_read_regions() { + let dir = tempdir().unwrap(); + create_regions_file(dir.path()); + let regions = read_regions(dir.path()).unwrap(); + assert_eq!( + regions, + HashMap::from([ + ( + "NA".into(), + Region { + id: "NA".into(), + description: "North America".to_string(), + } + ), + ( + "EU".into(), + Region { + id: "EU".into(), + description: "Europe".to_string(), + } + ), + ( + "AP".into(), + Region { + id: "AP".into(), + description: "Asia Pacific".to_string(), + } + ), + ]) + ) + } + + #[test] + fn test_try_insert_region() { + let region_ids = ["GBR".into(), "FRA".into()].into_iter().collect(); + + // Insert new + let mut entity_regions = HashMap::new(); + assert!(try_insert_region( + "key".into(), + "GBR", + ®ion_ids, + &mut entity_regions + )); + let selected: HashSet<_> = ["GBR".into()].into_iter().collect(); + assert_eq!( + *entity_regions.get("key").unwrap(), + RegionSelection::Some(selected) + ); + + // Insert "all" + let mut entity_regions = HashMap::new(); + assert!(try_insert_region( + "key".into(), + "all", + ®ion_ids, + &mut entity_regions + )); + assert_eq!(*entity_regions.get("key").unwrap(), RegionSelection::All); + + // Append to existing + let selected: HashSet<_> = ["FRA".into()].into_iter().collect(); + let mut entity_regions = [("key".into(), RegionSelection::Some(selected.clone()))] + .into_iter() + .collect(); + assert!(try_insert_region( + "key".into(), + "GBR", + ®ion_ids, + &mut entity_regions + )); + let selected: HashSet<_> = ["FRA".into(), "GBR".into()].into_iter().collect(); + assert_eq!( + *entity_regions.get("key").unwrap(), + RegionSelection::Some(selected) + ); + + // "All" already specified + let mut entity_regions = [("key".into(), RegionSelection::All)].into_iter().collect(); + assert!(!try_insert_region( + "key".into(), + "GBR", + ®ion_ids, + &mut entity_regions + )); + + // "GBR" specified twice + let selected: HashSet<_> = ["GBR".into()].into_iter().collect(); + let mut entity_regions = [("key".into(), RegionSelection::Some(selected))] + .into_iter() + .collect(); + assert!(!try_insert_region( + "key".into(), + "GBR", + ®ion_ids, + &mut entity_regions + )); + + // Try appending "all" to existing + let selected: HashSet<_> = ["FRA".into()].into_iter().collect(); + let mut entity_regions = [("key".into(), RegionSelection::Some(selected.clone()))] + .into_iter() + .collect(); + assert!(!try_insert_region( + "key".into(), + "all", + ®ion_ids, + &mut entity_regions + )); + } + + #[derive(Deserialize, PartialEq)] + struct Record { + id: String, + region_id: String, + } + define_id_getter! {Record} + define_region_id_getter! {Record} + + #[test] + fn test_read_regions_for_entity_from_iter() { + let entity_ids = ["A".into(), "B".into()].into_iter().collect(); + let region_ids = ["GBR".into(), "FRA".into()].into_iter().collect(); + + // Valid case + let iter = [ + Record { + id: "A".into(), + region_id: "GBR".into(), + }, + Record { + id: "B".into(), + region_id: "FRA".into(), + }, + ] + .into_iter(); + let expected = HashMap::from_iter([ + ( + "A".into(), + RegionSelection::Some(HashSet::from_iter(["GBR".into()])), + ), + ( + "B".into(), + RegionSelection::Some(HashSet::from_iter(["FRA".into()])), + ), + ]); + let actual = read_regions_for_entity_from_iter(iter, &entity_ids, ®ion_ids).unwrap(); + assert_eq!(expected, actual); + + // No region(s) specified for "B" + let iter = [Record { + id: "A".into(), + region_id: "GBR".into(), + }] + .into_iter(); + assert!(read_regions_for_entity_from_iter(iter, &entity_ids, ®ion_ids).is_err()); + + // Make try_insert_region fail + let iter = [ + Record { + id: "A".into(), + region_id: "GBR".into(), + }, + Record { + id: "B".into(), + region_id: "FRA".into(), + }, + Record { + id: "A".into(), + region_id: "all".into(), + }, + ] + .into_iter(); + assert!(read_regions_for_entity_from_iter(iter, &entity_ids, ®ion_ids).is_err()); + } +} diff --git a/src/model.rs b/src/model.rs index 43b994ba7..56cc6b05e 100644 --- a/src/model.rs +++ b/src/model.rs @@ -4,7 +4,7 @@ use crate::agent::{read_agents, Agent}; use crate::commodity::{read_commodities, Commodity}; use crate::input::*; use crate::process::{read_processes, Process}; -use crate::region::{read_regions, Region}; +use crate::region::Region; use crate::time_slice::{read_time_slice_info, TimeSliceInfo}; use anyhow::{ensure, Context, Result}; use serde::Deserialize; diff --git a/src/process.rs b/src/process.rs index f1f13c0db..2badc6447 100644 --- a/src/process.rs +++ b/src/process.rs @@ -1,7 +1,8 @@ #![allow(missing_docs)] use crate::commodity::Commodity; +use crate::input::region::{define_region_id_getter, read_regions_for_entity}; use crate::input::*; -use crate::region::*; +use crate::region::RegionSelection; use crate::time_slice::{TimeSliceInfo, TimeSliceSelection}; use ::log::warn; use anyhow::{ensure, Context, Result}; diff --git a/src/region.rs b/src/region.rs index 28eb39832..9f57d1f42 100644 --- a/src/region.rs +++ b/src/region.rs @@ -1,14 +1,8 @@ //! Regions represent different geographical areas in which agents, processes, etc. are active. -use crate::input::*; -use anyhow::{ensure, Context, Result}; -use serde::de::DeserializeOwned; use serde::Deserialize; -use std::collections::{HashMap, HashSet}; -use std::path::Path; +use std::collections::HashSet; use std::rc::Rc; -const REGIONS_FILE_NAME: &str = "regions.csv"; - /// Represents a region with an ID and a longer description. #[derive(Debug, Deserialize, PartialEq)] pub struct Region { @@ -17,7 +11,6 @@ pub struct Region { /// A text description of the region (e.g. "United Kingdom"). pub description: String, } -define_id_getter! {Region} /// Represents multiple regions #[derive(PartialEq, Debug, Clone, Default)] @@ -38,315 +31,3 @@ impl RegionSelection { } } } - -/// Reads regions from a CSV file. -/// -/// # Arguments -/// -/// * `model_dir` - Folder containing model configuration files -/// -/// # Returns -/// -/// A `HashMap, Region>` with the parsed regions data or an error. The keys are region IDs. -pub fn read_regions(model_dir: &Path) -> Result, Region>> { - read_csv_id_file(&model_dir.join(REGIONS_FILE_NAME)) -} - -/// An object which is associated with a single region -pub trait HasRegionID { - /// Get the associated region ID - fn get_region_id(&self) -> &str; -} - -macro_rules! define_region_id_getter { - ($t:ty) => { - impl HasRegionID for $t { - fn get_region_id(&self) -> &str { - &self.region_id - } - } - }; -} - -pub(crate) use define_region_id_getter; - -/// Try to insert a region ID into the specified map -#[must_use] -fn try_insert_region( - entity_id: Rc, - region_id: &str, - region_ids: &HashSet>, - entity_regions: &mut HashMap, RegionSelection>, -) -> bool { - if region_id.eq_ignore_ascii_case("all") { - // Valid for all regions - return entity_regions - .insert(entity_id, RegionSelection::All) - .is_none(); - } - - // Validate region_id - let region_id = match region_ids.get_id(region_id) { - Ok(id) => id, - Err(_) => return false, - }; - - // Add or create entry in entity_regions - let selection = entity_regions - .entry(entity_id) - .or_insert_with(|| RegionSelection::Some(HashSet::with_capacity(1))); - - match selection { - RegionSelection::All => false, - RegionSelection::Some(ref mut set) => set.insert(region_id), - } -} - -fn read_regions_for_entity_from_iter( - entity_iter: I, - entity_ids: &HashSet>, - region_ids: &HashSet>, -) -> Result, RegionSelection>> -where - I: Iterator, - T: HasID + HasRegionID, -{ - let mut entity_regions = HashMap::new(); - for entity in entity_iter { - let entity_id = entity_ids.get_id(entity.get_id())?; - let region_id = entity.get_region_id(); - - let succeeded = try_insert_region(entity_id, region_id, region_ids, &mut entity_regions); - - ensure!( - succeeded, - "Invalid regions specified for entity. Must specify either unique region IDs or \"all\"." - ); - } - - ensure!( - entity_regions.len() >= entity_ids.len(), - "At least one region must be specified per entity" - ); - - Ok(entity_regions) -} - -/// Read region IDs associated with a particular entity. -/// -/// # Arguments -/// -/// `file_path` - Path to CSV file -/// `entity_ids` - All possible valid IDs for the entity type -/// `region_ids` - All possible valid region IDs -pub fn read_regions_for_entity( - file_path: &Path, - entity_ids: &HashSet>, - region_ids: &HashSet>, -) -> Result, RegionSelection>> -where - T: HasID + HasRegionID + DeserializeOwned, -{ - read_regions_for_entity_from_iter(read_csv::(file_path)?, entity_ids, region_ids) - .with_context(|| input_err_msg(file_path)) -} - -#[cfg(test)] -mod tests { - use super::*; - use std::fs::File; - use std::io::Write; - use std::path::Path; - use tempfile::tempdir; - - /// Create an example regions file in dir_path - fn create_regions_file(dir_path: &Path) { - let file_path = dir_path.join(REGIONS_FILE_NAME); - let mut file = File::create(file_path).unwrap(); - writeln!( - file, - "id,description -NA,North America -EU,Europe -AP,Asia Pacific" - ) - .unwrap(); - } - - #[test] - fn test_read_regions() { - let dir = tempdir().unwrap(); - create_regions_file(dir.path()); - let regions = read_regions(dir.path()).unwrap(); - assert_eq!( - regions, - HashMap::from([ - ( - "NA".into(), - Region { - id: "NA".into(), - description: "North America".to_string(), - } - ), - ( - "EU".into(), - Region { - id: "EU".into(), - description: "Europe".to_string(), - } - ), - ( - "AP".into(), - Region { - id: "AP".into(), - description: "Asia Pacific".to_string(), - } - ), - ]) - ) - } - - #[test] - fn test_try_insert_region() { - let region_ids = ["GBR".into(), "FRA".into()].into_iter().collect(); - - // Insert new - let mut entity_regions = HashMap::new(); - assert!(try_insert_region( - "key".into(), - "GBR", - ®ion_ids, - &mut entity_regions - )); - let selected: HashSet<_> = ["GBR".into()].into_iter().collect(); - assert_eq!( - *entity_regions.get("key").unwrap(), - RegionSelection::Some(selected) - ); - - // Insert "all" - let mut entity_regions = HashMap::new(); - assert!(try_insert_region( - "key".into(), - "all", - ®ion_ids, - &mut entity_regions - )); - assert_eq!(*entity_regions.get("key").unwrap(), RegionSelection::All); - - // Append to existing - let selected: HashSet<_> = ["FRA".into()].into_iter().collect(); - let mut entity_regions = [("key".into(), RegionSelection::Some(selected.clone()))] - .into_iter() - .collect(); - assert!(try_insert_region( - "key".into(), - "GBR", - ®ion_ids, - &mut entity_regions - )); - let selected: HashSet<_> = ["FRA".into(), "GBR".into()].into_iter().collect(); - assert_eq!( - *entity_regions.get("key").unwrap(), - RegionSelection::Some(selected) - ); - - // "All" already specified - let mut entity_regions = [("key".into(), RegionSelection::All)].into_iter().collect(); - assert!(!try_insert_region( - "key".into(), - "GBR", - ®ion_ids, - &mut entity_regions - )); - - // "GBR" specified twice - let selected: HashSet<_> = ["GBR".into()].into_iter().collect(); - let mut entity_regions = [("key".into(), RegionSelection::Some(selected))] - .into_iter() - .collect(); - assert!(!try_insert_region( - "key".into(), - "GBR", - ®ion_ids, - &mut entity_regions - )); - - // Try appending "all" to existing - let selected: HashSet<_> = ["FRA".into()].into_iter().collect(); - let mut entity_regions = [("key".into(), RegionSelection::Some(selected.clone()))] - .into_iter() - .collect(); - assert!(!try_insert_region( - "key".into(), - "all", - ®ion_ids, - &mut entity_regions - )); - } - - #[derive(Deserialize, PartialEq)] - struct Record { - id: String, - region_id: String, - } - define_id_getter! {Record} - define_region_id_getter! {Record} - - #[test] - fn test_read_regions_for_entity_from_iter() { - let entity_ids = ["A".into(), "B".into()].into_iter().collect(); - let region_ids = ["GBR".into(), "FRA".into()].into_iter().collect(); - - // Valid case - let iter = [ - Record { - id: "A".into(), - region_id: "GBR".into(), - }, - Record { - id: "B".into(), - region_id: "FRA".into(), - }, - ] - .into_iter(); - let expected = HashMap::from_iter([ - ( - "A".into(), - RegionSelection::Some(HashSet::from_iter(["GBR".into()])), - ), - ( - "B".into(), - RegionSelection::Some(HashSet::from_iter(["FRA".into()])), - ), - ]); - let actual = read_regions_for_entity_from_iter(iter, &entity_ids, ®ion_ids).unwrap(); - assert_eq!(expected, actual); - - // No region(s) specified for "B" - let iter = [Record { - id: "A".into(), - region_id: "GBR".into(), - }] - .into_iter(); - assert!(read_regions_for_entity_from_iter(iter, &entity_ids, ®ion_ids).is_err()); - - // Make try_insert_region fail - let iter = [ - Record { - id: "A".into(), - region_id: "GBR".into(), - }, - Record { - id: "B".into(), - region_id: "FRA".into(), - }, - Record { - id: "A".into(), - region_id: "all".into(), - }, - ] - .into_iter(); - assert!(read_regions_for_entity_from_iter(iter, &entity_ids, ®ion_ids).is_err()); - } -} From 663d2840b56ead386b4e853bf9007bde4bb86b5f Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Thu, 19 Dec 2024 10:39:09 +0000 Subject: [PATCH 02/10] Move agent CSV code to `input/agent.rs` --- src/agent.rs | 449 ++++----------------------------------------- src/input.rs | 2 + src/input/agent.rs | 383 ++++++++++++++++++++++++++++++++++++++ src/model.rs | 2 +- 4 files changed, 425 insertions(+), 411 deletions(-) create mode 100644 src/input/agent.rs diff --git a/src/agent.rs b/src/agent.rs index b07ee0b1e..6b7d3be87 100644 --- a/src/agent.rs +++ b/src/agent.rs @@ -1,54 +1,13 @@ #![allow(missing_docs)] -use crate::asset::{read_assets, Asset}; -use crate::input::region::{define_region_id_getter, read_regions_for_entity}; -use crate::input::*; -use crate::process::Process; +use crate::asset::Asset; +use crate::input::deserialise_proportion_nonzero; use crate::region::RegionSelection; -use anyhow::{bail, ensure, Context, Result}; +use anyhow::Result; use serde::Deserialize; use serde_string_enum::DeserializeLabeledStringEnum; -use std::collections::{HashMap, HashSet}; -use std::path::Path; +use std::collections::HashSet; use std::rc::Rc; -const AGENT_FILE_NAME: &str = "agents.csv"; -const AGENT_REGIONS_FILE_NAME: &str = "agent_regions.csv"; -const AGENT_OBJECTIVES_FILE_NAME: &str = "agent_objectives.csv"; - -/// Which processes apply to this agent -#[derive(Debug, Clone, PartialEq)] -pub enum SearchSpace { - AllProcesses, - Some(HashSet), -} - -impl<'de> Deserialize<'de> for SearchSpace { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - let value = Option::<&str>::deserialize(deserializer)?; - match value { - None => Ok(SearchSpace::AllProcesses), - Some(processes_str) => { - let processes = HashSet::from_iter(processes_str.split(';').map(String::from)); - Ok(SearchSpace::Some(processes)) - } - } - } -} - -/// The decision rule for a particular objective -#[derive(Debug, Clone, PartialEq, DeserializeLabeledStringEnum)] -pub enum DecisionRule { - #[string = "single"] - Single, - #[string = "weighted"] - Weighted, - #[string = "lexico"] - Lexicographical, -} - /// An agent in the simulation #[derive(Debug, Deserialize, PartialEq, Clone)] pub struct Agent { @@ -77,36 +36,39 @@ pub struct Agent { #[serde(skip)] pub assets: Vec, } -define_id_getter! {Agent} -macro_rules! define_agent_id_getter { - ($t:ty) => { - impl HasID for $t { - fn get_id(&self) -> &str { - &self.agent_id - } - } - }; +/// Which processes apply to this agent +#[derive(Debug, Clone, PartialEq)] +pub enum SearchSpace { + AllProcesses, + Some(HashSet), } -#[derive(Debug, Deserialize, PartialEq)] -struct AgentRegion { - agent_id: String, - /// The region to which an agent belongs. - region_id: String, +impl<'de> Deserialize<'de> for SearchSpace { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let value = Option::<&str>::deserialize(deserializer)?; + match value { + None => Ok(SearchSpace::AllProcesses), + Some(processes_str) => { + let processes = HashSet::from_iter(processes_str.split(';').map(String::from)); + Ok(SearchSpace::Some(processes)) + } + } + } } -define_agent_id_getter! {AgentRegion} -define_region_id_getter! {AgentRegion} -/// The type of objective for the agent -/// -/// **TODO** Add more objective types +/// The decision rule for a particular objective #[derive(Debug, Clone, PartialEq, DeserializeLabeledStringEnum)] -pub enum ObjectiveType { - #[string = "lcox"] - LevelisedCostOfX, - #[string = "eac"] - EquivalentAnnualCost, +pub enum DecisionRule { + #[string = "single"] + Single, + #[string = "weighted"] + Weighted, + #[string = "lexico"] + Lexicographical, } /// An objective for an agent with associated parameters @@ -121,347 +83,14 @@ pub struct AgentObjective { /// The tolerance around the main objective to consider secondary objectives. This is an absolute value of maximum deviation in the units of the main objective. pub decision_lexico_tolerance: Option, } -define_agent_id_getter! {AgentObjective} - -/// Check that required parameters are present and others are absent -fn check_objective_parameter( - objective: &AgentObjective, - decision_rule: &DecisionRule, -) -> Result<()> { - // Check that the user hasn't supplied a value for a field we're not using - macro_rules! check_field_none { - ($field:ident) => { - ensure!( - objective.$field.is_none(), - "Field {} should be empty for this decision rule", - stringify!($field) - ) - }; - } - - // Check that required fields are present - macro_rules! check_field_some { - ($field:ident) => { - ensure!( - objective.$field.is_some(), - "Required field {} is empty", - stringify!($field) - ) - }; - } - - match decision_rule { - DecisionRule::Single => { - check_field_none!(decision_weight); - check_field_none!(decision_lexico_tolerance); - } - DecisionRule::Weighted => { - check_field_none!(decision_lexico_tolerance); - check_field_some!(decision_weight); - } - DecisionRule::Lexicographical => { - check_field_none!(decision_weight); - check_field_some!(decision_lexico_tolerance); - } - }; - - Ok(()) -} - -fn read_agent_objectives_from_iter( - iter: I, - agents: &HashMap, Agent>, -) -> Result, Vec>> -where - I: Iterator, -{ - let mut objectives = HashMap::new(); - for objective in iter { - let (id, agent) = agents - .get_key_value(objective.agent_id.as_str()) - .context("Invalid agent ID")?; - - // Check that required parameters are present and others are absent - check_objective_parameter(&objective, &agent.decision_rule)?; - - // Append to Vec with the corresponding key or create - objectives - .entry(Rc::clone(id)) - .or_insert_with(|| Vec::with_capacity(1)) - .push(objective); - } - - ensure!( - objectives.len() >= agents.len(), - "All agents must have at least one objective" - ); - - Ok(objectives) -} - -/// Read agent objective info from the agent_objectives.csv file. -/// -/// # Arguments -/// -/// * `model_dir` - Folder containing model configuration files -/// -/// # Returns -/// -/// A map of Agents, with the agent ID as the key -fn read_agent_objectives( - model_dir: &Path, - agents: &HashMap, Agent>, -) -> Result, Vec>> { - let file_path = model_dir.join(AGENT_OBJECTIVES_FILE_NAME); - let agent_objectives_csv = read_csv(&file_path)?; - read_agent_objectives_from_iter(agent_objectives_csv, agents) - .with_context(|| input_err_msg(&file_path)) -} - -pub fn read_agents_file_from_iter( - iter: I, - process_ids: &HashSet>, -) -> Result, Agent>> -where - I: Iterator, -{ - let mut agents = HashMap::new(); - for agent in iter { - if let SearchSpace::Some(ref search_space) = agent.search_space { - // Check process IDs are all valid - if !search_space - .iter() - .all(|id| process_ids.contains(id.as_str())) - { - bail!("Invalid process ID"); - } - } - - ensure!( - agents.insert(Rc::clone(&agent.id), agent).is_none(), - "Duplicate agent ID" - ); - } - - Ok(agents) -} - -/// Read agents info from the agents.csv file. -/// -/// # Arguments -/// -/// * `model_dir` - Folder containing model configuration files -/// * `process_ids` - The possible valid process IDs -/// -/// # Returns -/// -/// A map of Agents, with the agent ID as the key -pub fn read_agents_file( - model_dir: &Path, - process_ids: &HashSet>, -) -> Result, Agent>> { - let file_path = model_dir.join(AGENT_FILE_NAME); - let agents_csv = read_csv(&file_path)?; - read_agents_file_from_iter(agents_csv, process_ids).with_context(|| input_err_msg(&file_path)) -} -/// Read agents info from various CSV files. -/// -/// # Arguments -/// -/// * `model_dir` - Folder containing model configuration files -/// * `process_ids` - The possible valid process IDs -/// * `region_ids` - The possible valid region IDs -/// -/// # Returns +/// The type of objective for the agent /// -/// A map of Agents, with the agent ID as the key -pub fn read_agents( - model_dir: &Path, - processes: &HashMap, Rc>, - region_ids: &HashSet>, -) -> Result, Agent>> { - let process_ids = processes.keys().cloned().collect(); - let mut agents = read_agents_file(model_dir, &process_ids)?; - let agent_ids = agents.keys().cloned().collect(); - - let file_path = model_dir.join(AGENT_REGIONS_FILE_NAME); - let mut agent_regions = - read_regions_for_entity::(&file_path, &agent_ids, region_ids)?; - let mut objectives = read_agent_objectives(model_dir, &agents)?; - let mut assets = read_assets(model_dir, &agent_ids, processes, region_ids)?; - - // Populate each Agent's Vecs - for (id, agent) in agents.iter_mut() { - agent.regions = agent_regions.remove(id).unwrap(); - agent.objectives = objectives.remove(id).unwrap(); - agent.assets = assets.remove(id).unwrap_or_default(); - } - - Ok(agents) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_read_agents_file_from_iter() { - let process_ids = ["A".into(), "B".into()].into_iter().collect(); - - // Valid case - let search_space = ["A".into()].into_iter().collect(); - let agents = [Agent { - id: "agent".into(), - description: "".into(), - commodity_id: "".into(), - commodity_portion: 1.0, - search_space: SearchSpace::Some(search_space), - decision_rule: DecisionRule::Single, - capex_limit: None, - annual_cost_limit: None, - regions: RegionSelection::All, - objectives: Vec::new(), - assets: Vec::new(), - }]; - let expected = HashMap::from_iter([("agent".into(), agents[0].clone())]); - let actual = read_agents_file_from_iter(agents.into_iter(), &process_ids).unwrap(); - assert_eq!(actual, expected); - - // Invalid process ID - let search_space = ["C".into()].into_iter().collect(); - let agents = [Agent { - id: "agent".into(), - description: "".into(), - commodity_id: "".into(), - commodity_portion: 1.0, - search_space: SearchSpace::Some(search_space), - decision_rule: DecisionRule::Single, - capex_limit: None, - annual_cost_limit: None, - regions: RegionSelection::All, - objectives: Vec::new(), - assets: Vec::new(), - }]; - assert!(read_agents_file_from_iter(agents.into_iter(), &process_ids).is_err()); - - // Duplicate agent ID - let agents = [ - Agent { - id: "agent".into(), - description: "".into(), - commodity_id: "".into(), - commodity_portion: 1.0, - search_space: SearchSpace::AllProcesses, - decision_rule: DecisionRule::Single, - capex_limit: None, - annual_cost_limit: None, - regions: RegionSelection::All, - objectives: Vec::new(), - assets: Vec::new(), - }, - Agent { - id: "agent".into(), - description: "".into(), - commodity_id: "".into(), - commodity_portion: 1.0, - search_space: SearchSpace::AllProcesses, - decision_rule: DecisionRule::Single, - capex_limit: None, - annual_cost_limit: None, - regions: RegionSelection::All, - objectives: Vec::new(), - assets: Vec::new(), - }, - ]; - assert!(read_agents_file_from_iter(agents.into_iter(), &process_ids).is_err()); - } - - #[test] - fn test_check_objective_parameter() { - macro_rules! objective { - ($decision_weight:expr, $decision_lexico_tolerance:expr) => { - AgentObjective { - agent_id: "agent".into(), - objective_type: ObjectiveType::EquivalentAnnualCost, - decision_weight: $decision_weight, - decision_lexico_tolerance: $decision_lexico_tolerance, - } - }; - } - - // DecisionRule::Single - let decision_rule = DecisionRule::Single; - let objective = objective!(None, None); - assert!(check_objective_parameter(&objective, &decision_rule).is_ok()); - let objective = objective!(Some(1.0), None); - assert!(check_objective_parameter(&objective, &decision_rule).is_err()); - let objective = objective!(None, Some(1.0)); - assert!(check_objective_parameter(&objective, &decision_rule).is_err()); - - // DecisionRule::Weighted - let decision_rule = DecisionRule::Weighted; - let objective = objective!(Some(1.0), None); - assert!(check_objective_parameter(&objective, &decision_rule).is_ok()); - let objective = objective!(None, None); - assert!(check_objective_parameter(&objective, &decision_rule).is_err()); - let objective = objective!(None, Some(1.0)); - assert!(check_objective_parameter(&objective, &decision_rule).is_err()); - - // DecisionRule::Lexicographical - let decision_rule = DecisionRule::Lexicographical; - let objective = objective!(None, Some(1.0)); - assert!(check_objective_parameter(&objective, &decision_rule).is_ok()); - let objective = objective!(None, None); - assert!(check_objective_parameter(&objective, &decision_rule).is_err()); - let objective = objective!(Some(1.0), None); - assert!(check_objective_parameter(&objective, &decision_rule).is_err()); - } - - #[test] - fn test_read_agent_objectives_from_iter() { - let agents: HashMap<_, _> = [( - "agent".into(), - Agent { - id: "agent".into(), - description: "".into(), - commodity_id: "".into(), - commodity_portion: 1.0, - search_space: SearchSpace::AllProcesses, - decision_rule: DecisionRule::Single, - capex_limit: None, - annual_cost_limit: None, - regions: RegionSelection::All, - objectives: Vec::new(), - assets: Vec::new(), - }, - )] - .into_iter() - .collect(); - - // Valid - let objective = AgentObjective { - agent_id: "agent".into(), - objective_type: ObjectiveType::EquivalentAnnualCost, - decision_weight: None, - decision_lexico_tolerance: None, - }; - let expected = [("agent".into(), vec![objective.clone()])] - .into_iter() - .collect(); - let actual = read_agent_objectives_from_iter([objective].into_iter(), &agents).unwrap(); - assert_eq!(actual, expected); - - // Missing objective for agent - assert!(read_agent_objectives_from_iter([].into_iter(), &agents).is_err()); - - // Bad parameter - let objective = AgentObjective { - agent_id: "agent".into(), - objective_type: ObjectiveType::EquivalentAnnualCost, - decision_weight: Some(1.0), - decision_lexico_tolerance: None, - }; - assert!(read_agent_objectives_from_iter([objective].into_iter(), &agents).is_err()); - } +/// **TODO** Add more objective types +#[derive(Debug, Clone, PartialEq, DeserializeLabeledStringEnum)] +pub enum ObjectiveType { + #[string = "lcox"] + LevelisedCostOfX, + #[string = "eac"] + EquivalentAnnualCost, } diff --git a/src/input.rs b/src/input.rs index 855f45ae3..dfc9e9504 100644 --- a/src/input.rs +++ b/src/input.rs @@ -7,6 +7,8 @@ use std::fs; use std::path::Path; use std::rc::Rc; +pub mod agent; +pub use agent::read_agents; pub mod region; pub use region::read_regions; diff --git a/src/input/agent.rs b/src/input/agent.rs new file mode 100644 index 000000000..f7fbb3d19 --- /dev/null +++ b/src/input/agent.rs @@ -0,0 +1,383 @@ +//! Code for reading in agent-related data from CSV files. +use super::*; +use crate::agent::{Agent, AgentObjective, DecisionRule, SearchSpace}; +use crate::asset::read_assets; +use crate::process::Process; +use anyhow::{bail, ensure, Context, Result}; +use region::{define_region_id_getter, read_regions_for_entity}; +use serde::Deserialize; +use std::collections::{HashMap, HashSet}; +use std::path::Path; +use std::rc::Rc; + +const AGENT_FILE_NAME: &str = "agents.csv"; +const AGENT_REGIONS_FILE_NAME: &str = "agent_regions.csv"; +const AGENT_OBJECTIVES_FILE_NAME: &str = "agent_objectives.csv"; + +#[derive(Debug, Deserialize, PartialEq)] +struct AgentRegion { + agent_id: String, + /// The region to which an agent belongs. + region_id: String, +} + +macro_rules! define_agent_id_getter { + ($t:ty) => { + impl HasID for $t { + fn get_id(&self) -> &str { + &self.agent_id + } + } + }; +} + +/// Read agents info from various CSV files. +/// +/// # Arguments +/// +/// * `model_dir` - Folder containing model configuration files +/// * `process_ids` - The possible valid process IDs +/// * `region_ids` - The possible valid region IDs +/// +/// # Returns +/// +/// A map of Agents, with the agent ID as the key +pub fn read_agents( + model_dir: &Path, + processes: &HashMap, Rc>, + region_ids: &HashSet>, +) -> Result, Agent>> { + define_agent_id_getter!(AgentRegion); + define_region_id_getter!(AgentRegion); + + let process_ids = processes.keys().cloned().collect(); + let mut agents = read_agents_file(model_dir, &process_ids)?; + let agent_ids = agents.keys().cloned().collect(); + + let file_path = model_dir.join(AGENT_REGIONS_FILE_NAME); + let mut agent_regions = + read_regions_for_entity::(&file_path, &agent_ids, region_ids)?; + let mut objectives = read_agent_objectives(model_dir, &agents)?; + let mut assets = read_assets(model_dir, &agent_ids, processes, region_ids)?; + + // Populate each Agent's Vecs + for (id, agent) in agents.iter_mut() { + agent.regions = agent_regions.remove(id).unwrap(); + agent.objectives = objectives.remove(id).unwrap(); + agent.assets = assets.remove(id).unwrap_or_default(); + } + + Ok(agents) +} + +/// Read agents info from the agents.csv file. +/// +/// # Arguments +/// +/// * `model_dir` - Folder containing model configuration files +/// * `process_ids` - The possible valid process IDs +/// +/// # Returns +/// +/// A map of Agents, with the agent ID as the key +pub fn read_agents_file( + model_dir: &Path, + process_ids: &HashSet>, +) -> Result, Agent>> { + let file_path = model_dir.join(AGENT_FILE_NAME); + let agents_csv = read_csv(&file_path)?; + read_agents_file_from_iter(agents_csv, process_ids).with_context(|| input_err_msg(&file_path)) +} + +/// Read agents info from an iterator. +fn read_agents_file_from_iter( + iter: I, + process_ids: &HashSet>, +) -> Result, Agent>> +where + I: Iterator, +{ + let mut agents = HashMap::new(); + for agent in iter { + if let SearchSpace::Some(ref search_space) = agent.search_space { + // Check process IDs are all valid + if !search_space + .iter() + .all(|id| process_ids.contains(id.as_str())) + { + bail!("Invalid process ID"); + } + } + + ensure!( + agents.insert(Rc::clone(&agent.id), agent).is_none(), + "Duplicate agent ID" + ); + } + + Ok(agents) +} + +/// Read agent objective info from the agent_objectives.csv file. +/// +/// # Arguments +/// +/// * `model_dir` - Folder containing model configuration files +/// +/// # Returns +/// +/// A map of Agents, with the agent ID as the key +fn read_agent_objectives( + model_dir: &Path, + agents: &HashMap, Agent>, +) -> Result, Vec>> { + define_id_getter! {Agent} + + let file_path = model_dir.join(AGENT_OBJECTIVES_FILE_NAME); + let agent_objectives_csv = read_csv(&file_path)?; + read_agent_objectives_from_iter(agent_objectives_csv, agents) + .with_context(|| input_err_msg(&file_path)) +} + +fn read_agent_objectives_from_iter( + iter: I, + agents: &HashMap, Agent>, +) -> Result, Vec>> +where + I: Iterator, +{ + let mut objectives = HashMap::new(); + for objective in iter { + let (id, agent) = agents + .get_key_value(objective.agent_id.as_str()) + .context("Invalid agent ID")?; + + // Check that required parameters are present and others are absent + check_objective_parameter(&objective, &agent.decision_rule)?; + + // Append to Vec with the corresponding key or create + objectives + .entry(Rc::clone(id)) + .or_insert_with(|| Vec::with_capacity(1)) + .push(objective); + } + + ensure!( + objectives.len() >= agents.len(), + "All agents must have at least one objective" + ); + + Ok(objectives) +} + +/// Check that required parameters are present and others are absent +fn check_objective_parameter( + objective: &AgentObjective, + decision_rule: &DecisionRule, +) -> Result<()> { + // Check that the user hasn't supplied a value for a field we're not using + macro_rules! check_field_none { + ($field:ident) => { + ensure!( + objective.$field.is_none(), + "Field {} should be empty for this decision rule", + stringify!($field) + ) + }; + } + + // Check that required fields are present + macro_rules! check_field_some { + ($field:ident) => { + ensure!( + objective.$field.is_some(), + "Required field {} is empty", + stringify!($field) + ) + }; + } + + match decision_rule { + DecisionRule::Single => { + check_field_none!(decision_weight); + check_field_none!(decision_lexico_tolerance); + } + DecisionRule::Weighted => { + check_field_none!(decision_lexico_tolerance); + check_field_some!(decision_weight); + } + DecisionRule::Lexicographical => { + check_field_none!(decision_weight); + check_field_some!(decision_lexico_tolerance); + } + }; + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::agent::ObjectiveType; + use crate::region::RegionSelection; + + #[test] + fn test_read_agents_file_from_iter() { + let process_ids = ["A".into(), "B".into()].into_iter().collect(); + + // Valid case + let search_space = ["A".into()].into_iter().collect(); + let agents = [Agent { + id: "agent".into(), + description: "".into(), + commodity_id: "".into(), + commodity_portion: 1.0, + search_space: SearchSpace::Some(search_space), + decision_rule: DecisionRule::Single, + capex_limit: None, + annual_cost_limit: None, + regions: RegionSelection::All, + objectives: Vec::new(), + assets: Vec::new(), + }]; + let expected = HashMap::from_iter([("agent".into(), agents[0].clone())]); + let actual = read_agents_file_from_iter(agents.into_iter(), &process_ids).unwrap(); + assert_eq!(actual, expected); + + // Invalid process ID + let search_space = ["C".into()].into_iter().collect(); + let agents = [Agent { + id: "agent".into(), + description: "".into(), + commodity_id: "".into(), + commodity_portion: 1.0, + search_space: SearchSpace::Some(search_space), + decision_rule: DecisionRule::Single, + capex_limit: None, + annual_cost_limit: None, + regions: RegionSelection::All, + objectives: Vec::new(), + assets: Vec::new(), + }]; + assert!(read_agents_file_from_iter(agents.into_iter(), &process_ids).is_err()); + + // Duplicate agent ID + let agents = [ + Agent { + id: "agent".into(), + description: "".into(), + commodity_id: "".into(), + commodity_portion: 1.0, + search_space: SearchSpace::AllProcesses, + decision_rule: DecisionRule::Single, + capex_limit: None, + annual_cost_limit: None, + regions: RegionSelection::All, + objectives: Vec::new(), + assets: Vec::new(), + }, + Agent { + id: "agent".into(), + description: "".into(), + commodity_id: "".into(), + commodity_portion: 1.0, + search_space: SearchSpace::AllProcesses, + decision_rule: DecisionRule::Single, + capex_limit: None, + annual_cost_limit: None, + regions: RegionSelection::All, + objectives: Vec::new(), + assets: Vec::new(), + }, + ]; + assert!(read_agents_file_from_iter(agents.into_iter(), &process_ids).is_err()); + } + + #[test] + fn test_check_objective_parameter() { + macro_rules! objective { + ($decision_weight:expr, $decision_lexico_tolerance:expr) => { + AgentObjective { + agent_id: "agent".into(), + objective_type: ObjectiveType::EquivalentAnnualCost, + decision_weight: $decision_weight, + decision_lexico_tolerance: $decision_lexico_tolerance, + } + }; + } + + // DecisionRule::Single + let decision_rule = DecisionRule::Single; + let objective = objective!(None, None); + assert!(check_objective_parameter(&objective, &decision_rule).is_ok()); + let objective = objective!(Some(1.0), None); + assert!(check_objective_parameter(&objective, &decision_rule).is_err()); + let objective = objective!(None, Some(1.0)); + assert!(check_objective_parameter(&objective, &decision_rule).is_err()); + + // DecisionRule::Weighted + let decision_rule = DecisionRule::Weighted; + let objective = objective!(Some(1.0), None); + assert!(check_objective_parameter(&objective, &decision_rule).is_ok()); + let objective = objective!(None, None); + assert!(check_objective_parameter(&objective, &decision_rule).is_err()); + let objective = objective!(None, Some(1.0)); + assert!(check_objective_parameter(&objective, &decision_rule).is_err()); + + // DecisionRule::Lexicographical + let decision_rule = DecisionRule::Lexicographical; + let objective = objective!(None, Some(1.0)); + assert!(check_objective_parameter(&objective, &decision_rule).is_ok()); + let objective = objective!(None, None); + assert!(check_objective_parameter(&objective, &decision_rule).is_err()); + let objective = objective!(Some(1.0), None); + assert!(check_objective_parameter(&objective, &decision_rule).is_err()); + } + + #[test] + fn test_read_agent_objectives_from_iter() { + let agents: HashMap<_, _> = [( + "agent".into(), + Agent { + id: "agent".into(), + description: "".into(), + commodity_id: "".into(), + commodity_portion: 1.0, + search_space: SearchSpace::AllProcesses, + decision_rule: DecisionRule::Single, + capex_limit: None, + annual_cost_limit: None, + regions: RegionSelection::All, + objectives: Vec::new(), + assets: Vec::new(), + }, + )] + .into_iter() + .collect(); + + // Valid + let objective = AgentObjective { + agent_id: "agent".into(), + objective_type: ObjectiveType::EquivalentAnnualCost, + decision_weight: None, + decision_lexico_tolerance: None, + }; + let expected = [("agent".into(), vec![objective.clone()])] + .into_iter() + .collect(); + let actual = read_agent_objectives_from_iter([objective].into_iter(), &agents).unwrap(); + assert_eq!(actual, expected); + + // Missing objective for agent + assert!(read_agent_objectives_from_iter([].into_iter(), &agents).is_err()); + + // Bad parameter + let objective = AgentObjective { + agent_id: "agent".into(), + objective_type: ObjectiveType::EquivalentAnnualCost, + decision_weight: Some(1.0), + decision_lexico_tolerance: None, + }; + assert!(read_agent_objectives_from_iter([objective].into_iter(), &agents).is_err()); + } +} diff --git a/src/model.rs b/src/model.rs index 56cc6b05e..58264778a 100644 --- a/src/model.rs +++ b/src/model.rs @@ -1,6 +1,6 @@ //! Code for simulation models. #![allow(missing_docs)] -use crate::agent::{read_agents, Agent}; +use crate::agent::Agent; use crate::commodity::{read_commodities, Commodity}; use crate::input::*; use crate::process::{read_processes, Process}; From 3c1c7391d1a57cff944f7510c48233c5bbfa60d0 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Thu, 19 Dec 2024 11:23:36 +0000 Subject: [PATCH 03/10] input/agent/agent.rs: Expand macro with one user --- src/input/agent.rs | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/src/input/agent.rs b/src/input/agent.rs index f7fbb3d19..91f283b08 100644 --- a/src/input/agent.rs +++ b/src/input/agent.rs @@ -20,15 +20,12 @@ struct AgentRegion { /// The region to which an agent belongs. region_id: String, } +define_region_id_getter!(AgentRegion); -macro_rules! define_agent_id_getter { - ($t:ty) => { - impl HasID for $t { - fn get_id(&self) -> &str { - &self.agent_id - } - } - }; +impl HasID for AgentRegion { + fn get_id(&self) -> &str { + &self.agent_id + } } /// Read agents info from various CSV files. @@ -47,9 +44,6 @@ pub fn read_agents( processes: &HashMap, Rc>, region_ids: &HashSet>, ) -> Result, Agent>> { - define_agent_id_getter!(AgentRegion); - define_region_id_getter!(AgentRegion); - let process_ids = processes.keys().cloned().collect(); let mut agents = read_agents_file(model_dir, &process_ids)?; let agent_ids = agents.keys().cloned().collect(); From 04ba7f8f218248667e9658fd14685f13dcba1410 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Thu, 19 Dec 2024 11:01:50 +0000 Subject: [PATCH 04/10] input/agent.rs: Split out objective-related code into submodule --- src/input/agent.rs | 193 +-------------------------------- src/input/agent/objective.rs | 201 +++++++++++++++++++++++++++++++++++ 2 files changed, 206 insertions(+), 188 deletions(-) create mode 100644 src/input/agent/objective.rs diff --git a/src/input/agent.rs b/src/input/agent.rs index 91f283b08..347d16857 100644 --- a/src/input/agent.rs +++ b/src/input/agent.rs @@ -1,6 +1,6 @@ //! Code for reading in agent-related data from CSV files. use super::*; -use crate::agent::{Agent, AgentObjective, DecisionRule, SearchSpace}; +use crate::agent::{Agent, SearchSpace}; use crate::asset::read_assets; use crate::process::Process; use anyhow::{bail, ensure, Context, Result}; @@ -10,9 +10,11 @@ use std::collections::{HashMap, HashSet}; use std::path::Path; use std::rc::Rc; +pub mod objective; +use objective::read_agent_objectives; + const AGENT_FILE_NAME: &str = "agents.csv"; const AGENT_REGIONS_FILE_NAME: &str = "agent_regions.csv"; -const AGENT_OBJECTIVES_FILE_NAME: &str = "agent_objectives.csv"; #[derive(Debug, Deserialize, PartialEq)] struct AgentRegion { @@ -112,107 +114,10 @@ where Ok(agents) } -/// Read agent objective info from the agent_objectives.csv file. -/// -/// # Arguments -/// -/// * `model_dir` - Folder containing model configuration files -/// -/// # Returns -/// -/// A map of Agents, with the agent ID as the key -fn read_agent_objectives( - model_dir: &Path, - agents: &HashMap, Agent>, -) -> Result, Vec>> { - define_id_getter! {Agent} - - let file_path = model_dir.join(AGENT_OBJECTIVES_FILE_NAME); - let agent_objectives_csv = read_csv(&file_path)?; - read_agent_objectives_from_iter(agent_objectives_csv, agents) - .with_context(|| input_err_msg(&file_path)) -} - -fn read_agent_objectives_from_iter( - iter: I, - agents: &HashMap, Agent>, -) -> Result, Vec>> -where - I: Iterator, -{ - let mut objectives = HashMap::new(); - for objective in iter { - let (id, agent) = agents - .get_key_value(objective.agent_id.as_str()) - .context("Invalid agent ID")?; - - // Check that required parameters are present and others are absent - check_objective_parameter(&objective, &agent.decision_rule)?; - - // Append to Vec with the corresponding key or create - objectives - .entry(Rc::clone(id)) - .or_insert_with(|| Vec::with_capacity(1)) - .push(objective); - } - - ensure!( - objectives.len() >= agents.len(), - "All agents must have at least one objective" - ); - - Ok(objectives) -} - -/// Check that required parameters are present and others are absent -fn check_objective_parameter( - objective: &AgentObjective, - decision_rule: &DecisionRule, -) -> Result<()> { - // Check that the user hasn't supplied a value for a field we're not using - macro_rules! check_field_none { - ($field:ident) => { - ensure!( - objective.$field.is_none(), - "Field {} should be empty for this decision rule", - stringify!($field) - ) - }; - } - - // Check that required fields are present - macro_rules! check_field_some { - ($field:ident) => { - ensure!( - objective.$field.is_some(), - "Required field {} is empty", - stringify!($field) - ) - }; - } - - match decision_rule { - DecisionRule::Single => { - check_field_none!(decision_weight); - check_field_none!(decision_lexico_tolerance); - } - DecisionRule::Weighted => { - check_field_none!(decision_lexico_tolerance); - check_field_some!(decision_weight); - } - DecisionRule::Lexicographical => { - check_field_none!(decision_weight); - check_field_some!(decision_lexico_tolerance); - } - }; - - Ok(()) -} - #[cfg(test)] mod tests { use super::*; - use crate::agent::ObjectiveType; + use crate::agent::DecisionRule; use crate::region::RegionSelection; #[test] @@ -286,92 +191,4 @@ mod tests { ]; assert!(read_agents_file_from_iter(agents.into_iter(), &process_ids).is_err()); } - - #[test] - fn test_check_objective_parameter() { - macro_rules! objective { - ($decision_weight:expr, $decision_lexico_tolerance:expr) => { - AgentObjective { - agent_id: "agent".into(), - objective_type: ObjectiveType::EquivalentAnnualCost, - decision_weight: $decision_weight, - decision_lexico_tolerance: $decision_lexico_tolerance, - } - }; - } - - // DecisionRule::Single - let decision_rule = DecisionRule::Single; - let objective = objective!(None, None); - assert!(check_objective_parameter(&objective, &decision_rule).is_ok()); - let objective = objective!(Some(1.0), None); - assert!(check_objective_parameter(&objective, &decision_rule).is_err()); - let objective = objective!(None, Some(1.0)); - assert!(check_objective_parameter(&objective, &decision_rule).is_err()); - - // DecisionRule::Weighted - let decision_rule = DecisionRule::Weighted; - let objective = objective!(Some(1.0), None); - assert!(check_objective_parameter(&objective, &decision_rule).is_ok()); - let objective = objective!(None, None); - assert!(check_objective_parameter(&objective, &decision_rule).is_err()); - let objective = objective!(None, Some(1.0)); - assert!(check_objective_parameter(&objective, &decision_rule).is_err()); - - // DecisionRule::Lexicographical - let decision_rule = DecisionRule::Lexicographical; - let objective = objective!(None, Some(1.0)); - assert!(check_objective_parameter(&objective, &decision_rule).is_ok()); - let objective = objective!(None, None); - assert!(check_objective_parameter(&objective, &decision_rule).is_err()); - let objective = objective!(Some(1.0), None); - assert!(check_objective_parameter(&objective, &decision_rule).is_err()); - } - - #[test] - fn test_read_agent_objectives_from_iter() { - let agents: HashMap<_, _> = [( - "agent".into(), - Agent { - id: "agent".into(), - description: "".into(), - commodity_id: "".into(), - commodity_portion: 1.0, - search_space: SearchSpace::AllProcesses, - decision_rule: DecisionRule::Single, - capex_limit: None, - annual_cost_limit: None, - regions: RegionSelection::All, - objectives: Vec::new(), - assets: Vec::new(), - }, - )] - .into_iter() - .collect(); - - // Valid - let objective = AgentObjective { - agent_id: "agent".into(), - objective_type: ObjectiveType::EquivalentAnnualCost, - decision_weight: None, - decision_lexico_tolerance: None, - }; - let expected = [("agent".into(), vec![objective.clone()])] - .into_iter() - .collect(); - let actual = read_agent_objectives_from_iter([objective].into_iter(), &agents).unwrap(); - assert_eq!(actual, expected); - - // Missing objective for agent - assert!(read_agent_objectives_from_iter([].into_iter(), &agents).is_err()); - - // Bad parameter - let objective = AgentObjective { - agent_id: "agent".into(), - objective_type: ObjectiveType::EquivalentAnnualCost, - decision_weight: Some(1.0), - decision_lexico_tolerance: None, - }; - assert!(read_agent_objectives_from_iter([objective].into_iter(), &agents).is_err()); - } } diff --git a/src/input/agent/objective.rs b/src/input/agent/objective.rs new file mode 100644 index 000000000..089fd6a60 --- /dev/null +++ b/src/input/agent/objective.rs @@ -0,0 +1,201 @@ +//! Code for reading the agent objectives CSV file. +use super::super::*; +use crate::agent::{Agent, AgentObjective, DecisionRule}; +use anyhow::{ensure, Context, Result}; +use std::collections::HashMap; +use std::path::Path; +use std::rc::Rc; + +const AGENT_OBJECTIVES_FILE_NAME: &str = "agent_objectives.csv"; + +define_id_getter! {Agent} + +/// Read agent objective info from the agent_objectives.csv file. +/// +/// # Arguments +/// +/// * `model_dir` - Folder containing model configuration files +/// +/// # Returns +/// +/// A map of Agents, with the agent ID as the key +pub fn read_agent_objectives( + model_dir: &Path, + agents: &HashMap, Agent>, +) -> Result, Vec>> { + let file_path = model_dir.join(AGENT_OBJECTIVES_FILE_NAME); + let agent_objectives_csv = read_csv(&file_path)?; + read_agent_objectives_from_iter(agent_objectives_csv, agents) + .with_context(|| input_err_msg(&file_path)) +} + +fn read_agent_objectives_from_iter( + iter: I, + agents: &HashMap, Agent>, +) -> Result, Vec>> +where + I: Iterator, +{ + let mut objectives = HashMap::new(); + for objective in iter { + let (id, agent) = agents + .get_key_value(objective.agent_id.as_str()) + .context("Invalid agent ID")?; + + // Check that required parameters are present and others are absent + check_objective_parameter(&objective, &agent.decision_rule)?; + + // Append to Vec with the corresponding key or create + objectives + .entry(Rc::clone(id)) + .or_insert_with(|| Vec::with_capacity(1)) + .push(objective); + } + + ensure!( + objectives.len() >= agents.len(), + "All agents must have at least one objective" + ); + + Ok(objectives) +} + +/// Check that required parameters are present and others are absent +fn check_objective_parameter( + objective: &AgentObjective, + decision_rule: &DecisionRule, +) -> Result<()> { + // Check that the user hasn't supplied a value for a field we're not using + macro_rules! check_field_none { + ($field:ident) => { + ensure!( + objective.$field.is_none(), + "Field {} should be empty for this decision rule", + stringify!($field) + ) + }; + } + + // Check that required fields are present + macro_rules! check_field_some { + ($field:ident) => { + ensure!( + objective.$field.is_some(), + "Required field {} is empty", + stringify!($field) + ) + }; + } + + match decision_rule { + DecisionRule::Single => { + check_field_none!(decision_weight); + check_field_none!(decision_lexico_tolerance); + } + DecisionRule::Weighted => { + check_field_none!(decision_lexico_tolerance); + check_field_some!(decision_weight); + } + DecisionRule::Lexicographical => { + check_field_none!(decision_weight); + check_field_some!(decision_lexico_tolerance); + } + }; + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::agent::{ObjectiveType, SearchSpace}; + use crate::region::RegionSelection; + + #[test] + fn test_check_objective_parameter() { + macro_rules! objective { + ($decision_weight:expr, $decision_lexico_tolerance:expr) => { + AgentObjective { + agent_id: "agent".into(), + objective_type: ObjectiveType::EquivalentAnnualCost, + decision_weight: $decision_weight, + decision_lexico_tolerance: $decision_lexico_tolerance, + } + }; + } + + // DecisionRule::Single + let decision_rule = DecisionRule::Single; + let objective = objective!(None, None); + assert!(check_objective_parameter(&objective, &decision_rule).is_ok()); + let objective = objective!(Some(1.0), None); + assert!(check_objective_parameter(&objective, &decision_rule).is_err()); + let objective = objective!(None, Some(1.0)); + assert!(check_objective_parameter(&objective, &decision_rule).is_err()); + + // DecisionRule::Weighted + let decision_rule = DecisionRule::Weighted; + let objective = objective!(Some(1.0), None); + assert!(check_objective_parameter(&objective, &decision_rule).is_ok()); + let objective = objective!(None, None); + assert!(check_objective_parameter(&objective, &decision_rule).is_err()); + let objective = objective!(None, Some(1.0)); + assert!(check_objective_parameter(&objective, &decision_rule).is_err()); + + // DecisionRule::Lexicographical + let decision_rule = DecisionRule::Lexicographical; + let objective = objective!(None, Some(1.0)); + assert!(check_objective_parameter(&objective, &decision_rule).is_ok()); + let objective = objective!(None, None); + assert!(check_objective_parameter(&objective, &decision_rule).is_err()); + let objective = objective!(Some(1.0), None); + assert!(check_objective_parameter(&objective, &decision_rule).is_err()); + } + + #[test] + fn test_read_agent_objectives_from_iter() { + let agents: HashMap<_, _> = [( + "agent".into(), + Agent { + id: "agent".into(), + description: "".into(), + commodity_id: "".into(), + commodity_portion: 1.0, + search_space: SearchSpace::AllProcesses, + decision_rule: DecisionRule::Single, + capex_limit: None, + annual_cost_limit: None, + regions: RegionSelection::All, + objectives: Vec::new(), + assets: Vec::new(), + }, + )] + .into_iter() + .collect(); + + // Valid + let objective = AgentObjective { + agent_id: "agent".into(), + objective_type: ObjectiveType::EquivalentAnnualCost, + decision_weight: None, + decision_lexico_tolerance: None, + }; + let expected = [("agent".into(), vec![objective.clone()])] + .into_iter() + .collect(); + let actual = read_agent_objectives_from_iter([objective].into_iter(), &agents).unwrap(); + assert_eq!(actual, expected); + + // Missing objective for agent + assert!(read_agent_objectives_from_iter([].into_iter(), &agents).is_err()); + + // Bad parameter + let objective = AgentObjective { + agent_id: "agent".into(), + objective_type: ObjectiveType::EquivalentAnnualCost, + decision_weight: Some(1.0), + decision_lexico_tolerance: None, + }; + assert!(read_agent_objectives_from_iter([objective].into_iter(), &agents).is_err()); + } +} From 368a80adb28211025952d957eb1bb71efcbb7189 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Thu, 19 Dec 2024 11:05:45 +0000 Subject: [PATCH 05/10] Move assets CSV code to input/agent/asset.rs --- src/asset.rs | 209 -------------------------------------- src/input/agent.rs | 3 +- src/input/agent/asset.rs | 213 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 215 insertions(+), 210 deletions(-) create mode 100644 src/input/agent/asset.rs diff --git a/src/asset.rs b/src/asset.rs index 70350e55b..a8f4a82a1 100644 --- a/src/asset.rs +++ b/src/asset.rs @@ -1,26 +1,9 @@ //! Code for working with [Asset]s. //! //! For a description of what assets are, please see the glossary. -use crate::input::*; use crate::process::Process; -use anyhow::{ensure, Context, Result}; -use itertools::Itertools; -use serde::Deserialize; -use std::collections::{HashMap, HashSet}; -use std::path::Path; use std::rc::Rc; -const ASSETS_FILE_NAME: &str = "assets.csv"; - -#[derive(Deserialize, PartialEq)] -struct AssetRaw { - process_id: String, - region_id: String, - agent_id: String, - capacity: f64, - commission_year: u32, -} - /// An asset controlled by an agent. #[derive(Clone, Debug, PartialEq)] pub struct Asset { @@ -33,195 +16,3 @@ pub struct Asset { /// The year the asset comes online pub commission_year: u32, } - -/// Process assets from an iterator. -/// -/// # Arguments -/// -/// * `iter` - Iterator of `AssetRaw`s -/// * `agent_ids` - All possible process IDs -/// * `processes` - The model's processes -/// * `region_ids` - All possible region IDs -/// -/// # Returns -/// -/// A `HashMap` containing assets grouped by agent ID or an error. -fn read_assets_from_iter( - iter: I, - agent_ids: &HashSet>, - processes: &HashMap, Rc>, - region_ids: &HashSet>, -) -> Result, Vec>> -where - I: Iterator, -{ - iter.map(|asset| -> Result<_> { - let agent_id = agent_ids.get_id(&asset.agent_id)?; - let process = processes - .get(asset.process_id.as_str()) - .with_context(|| format!("Invalid process ID: {}", &asset.process_id))?; - let region_id = region_ids.get_id(&asset.region_id)?; - ensure!( - process.regions.contains(®ion_id), - "Region {} is not one of the regions in which process {} operates", - region_id, - process.id - ); - - Ok(( - agent_id, - Asset { - process: Rc::clone(process), - region_id, - capacity: asset.capacity, - commission_year: asset.commission_year, - }, - )) - }) - .process_results(|iter| iter.into_group_map()) -} - -/// Read assets CSV file from model directory. -/// -/// # Arguments -/// -/// * `model_dir` - Folder containing model configuration files -/// * `agent_ids` - All possible process IDs -/// * `processes` - The model's processes -/// * `region_ids` - All possible region IDs -/// -/// # Returns -/// -/// A `HashMap` containing assets grouped by agent ID. -pub fn read_assets( - model_dir: &Path, - agent_ids: &HashSet>, - processes: &HashMap, Rc>, - region_ids: &HashSet>, -) -> Result, Vec>> { - let file_path = model_dir.join(ASSETS_FILE_NAME); - let assets_csv = read_csv(&file_path)?; - read_assets_from_iter(assets_csv, agent_ids, processes, region_ids) - .with_context(|| input_err_msg(&file_path)) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::process::ProcessParameter; - use crate::region::RegionSelection; - use std::vec; - - #[test] - fn test_read_assets_from_iter() { - let process_param = ProcessParameter { - process_id: "process1".into(), - years: 2010..=2020, - capital_cost: 5.0, - fixed_operating_cost: 2.0, - variable_operating_cost: 1.0, - lifetime: 5, - discount_rate: 0.9, - cap2act: 1.0, - }; - let process = Rc::new(Process { - id: "process1".into(), - description: "Description".into(), - availabilities: vec![], - flows: vec![], - pacs: vec![], - parameter: process_param.clone(), - regions: RegionSelection::All, - }); - let processes = [(Rc::clone(&process.id), Rc::clone(&process))] - .into_iter() - .collect(); - let agent_ids = ["agent1".into()].into_iter().collect(); - let region_ids = ["GBR".into(), "USA".into()].into_iter().collect(); - - // Valid - let asset_in = AssetRaw { - agent_id: "agent1".into(), - process_id: "process1".into(), - region_id: "GBR".into(), - capacity: 1.0, - commission_year: 2010, - }; - let asset_out = Asset { - process: Rc::clone(&process), - region_id: "GBR".into(), - capacity: 1.0, - commission_year: 2010, - }; - let expected = [("agent1".into(), vec![asset_out])].into_iter().collect(); - assert!( - read_assets_from_iter([asset_in].into_iter(), &agent_ids, &processes, ®ion_ids) - .unwrap() - == expected - ); - - // Bad process ID - let asset_in = AssetRaw { - agent_id: "agent1".into(), - process_id: "process2".into(), - region_id: "GBR".into(), - capacity: 1.0, - commission_year: 2010, - }; - assert!( - read_assets_from_iter([asset_in].into_iter(), &agent_ids, &processes, ®ion_ids) - .is_err() - ); - - // Bad agent ID - let asset_in = AssetRaw { - agent_id: "agent2".into(), - process_id: "process1".into(), - region_id: "GBR".into(), - capacity: 1.0, - commission_year: 2010, - }; - assert!( - read_assets_from_iter([asset_in].into_iter(), &agent_ids, &processes, ®ion_ids) - .is_err() - ); - - // Bad region ID: not in region_ids - let asset_in = AssetRaw { - agent_id: "agent1".into(), - process_id: "process1".into(), - region_id: "FRA".into(), - capacity: 1.0, - commission_year: 2010, - }; - assert!( - read_assets_from_iter([asset_in].into_iter(), &agent_ids, &processes, ®ion_ids) - .is_err() - ); - - // Bad region ID: process not active there - let process = Rc::new(Process { - id: "process1".into(), - description: "Description".into(), - availabilities: vec![], - flows: vec![], - pacs: vec![], - parameter: process_param, - regions: RegionSelection::Some(["GBR".into()].into_iter().collect()), - }); - let asset_in = AssetRaw { - agent_id: "agent1".into(), - process_id: "process1".into(), - region_id: "USA".into(), // NB: In region_ids, but not in process.regions - capacity: 1.0, - commission_year: 2010, - }; - let processes = [(Rc::clone(&process.id), Rc::clone(&process))] - .into_iter() - .collect(); - assert!( - read_assets_from_iter([asset_in].into_iter(), &agent_ids, &processes, ®ion_ids) - .is_err() - ); - } -} diff --git a/src/input/agent.rs b/src/input/agent.rs index 347d16857..33671347c 100644 --- a/src/input/agent.rs +++ b/src/input/agent.rs @@ -1,7 +1,6 @@ //! Code for reading in agent-related data from CSV files. use super::*; use crate::agent::{Agent, SearchSpace}; -use crate::asset::read_assets; use crate::process::Process; use anyhow::{bail, ensure, Context, Result}; use region::{define_region_id_getter, read_regions_for_entity}; @@ -12,6 +11,8 @@ use std::rc::Rc; pub mod objective; use objective::read_agent_objectives; +pub mod asset; +use asset::read_assets; const AGENT_FILE_NAME: &str = "agents.csv"; const AGENT_REGIONS_FILE_NAME: &str = "agent_regions.csv"; diff --git a/src/input/agent/asset.rs b/src/input/agent/asset.rs new file mode 100644 index 000000000..38a5924f1 --- /dev/null +++ b/src/input/agent/asset.rs @@ -0,0 +1,213 @@ +//! Code for reading [Asset]s from a CSV file. +use crate::asset::Asset; +use crate::input::*; +use crate::process::Process; +use anyhow::{ensure, Context, Result}; +use itertools::Itertools; +use serde::Deserialize; +use std::collections::{HashMap, HashSet}; +use std::path::Path; +use std::rc::Rc; + +const ASSETS_FILE_NAME: &str = "assets.csv"; + +#[derive(Deserialize, PartialEq)] +struct AssetRaw { + process_id: String, + region_id: String, + agent_id: String, + capacity: f64, + commission_year: u32, +} + +/// Read assets CSV file from model directory. +/// +/// # Arguments +/// +/// * `model_dir` - Folder containing model configuration files +/// * `agent_ids` - All possible process IDs +/// * `processes` - The model's processes +/// * `region_ids` - All possible region IDs +/// +/// # Returns +/// +/// A `HashMap` containing assets grouped by agent ID. +pub fn read_assets( + model_dir: &Path, + agent_ids: &HashSet>, + processes: &HashMap, Rc>, + region_ids: &HashSet>, +) -> Result, Vec>> { + let file_path = model_dir.join(ASSETS_FILE_NAME); + let assets_csv = read_csv(&file_path)?; + read_assets_from_iter(assets_csv, agent_ids, processes, region_ids) + .with_context(|| input_err_msg(&file_path)) +} + +/// Process assets from an iterator. +/// +/// # Arguments +/// +/// * `iter` - Iterator of `AssetRaw`s +/// * `agent_ids` - All possible process IDs +/// * `processes` - The model's processes +/// * `region_ids` - All possible region IDs +/// +/// # Returns +/// +/// A `HashMap` containing assets grouped by agent ID or an error. +fn read_assets_from_iter( + iter: I, + agent_ids: &HashSet>, + processes: &HashMap, Rc>, + region_ids: &HashSet>, +) -> Result, Vec>> +where + I: Iterator, +{ + iter.map(|asset| -> Result<_> { + let agent_id = agent_ids.get_id(&asset.agent_id)?; + let process = processes + .get(asset.process_id.as_str()) + .with_context(|| format!("Invalid process ID: {}", &asset.process_id))?; + let region_id = region_ids.get_id(&asset.region_id)?; + ensure!( + process.regions.contains(®ion_id), + "Region {} is not one of the regions in which process {} operates", + region_id, + process.id + ); + + Ok(( + agent_id, + Asset { + process: Rc::clone(process), + region_id, + capacity: asset.capacity, + commission_year: asset.commission_year, + }, + )) + }) + .process_results(|iter| iter.into_group_map()) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::process::ProcessParameter; + use crate::region::RegionSelection; + use std::vec; + + #[test] + fn test_read_assets_from_iter() { + let process_param = ProcessParameter { + process_id: "process1".into(), + years: 2010..=2020, + capital_cost: 5.0, + fixed_operating_cost: 2.0, + variable_operating_cost: 1.0, + lifetime: 5, + discount_rate: 0.9, + cap2act: 1.0, + }; + let process = Rc::new(Process { + id: "process1".into(), + description: "Description".into(), + availabilities: vec![], + flows: vec![], + pacs: vec![], + parameter: process_param.clone(), + regions: RegionSelection::All, + }); + let processes = [(Rc::clone(&process.id), Rc::clone(&process))] + .into_iter() + .collect(); + let agent_ids = ["agent1".into()].into_iter().collect(); + let region_ids = ["GBR".into(), "USA".into()].into_iter().collect(); + + // Valid + let asset_in = AssetRaw { + agent_id: "agent1".into(), + process_id: "process1".into(), + region_id: "GBR".into(), + capacity: 1.0, + commission_year: 2010, + }; + let asset_out = Asset { + process: Rc::clone(&process), + region_id: "GBR".into(), + capacity: 1.0, + commission_year: 2010, + }; + let expected = [("agent1".into(), vec![asset_out])].into_iter().collect(); + assert!( + read_assets_from_iter([asset_in].into_iter(), &agent_ids, &processes, ®ion_ids) + .unwrap() + == expected + ); + + // Bad process ID + let asset_in = AssetRaw { + agent_id: "agent1".into(), + process_id: "process2".into(), + region_id: "GBR".into(), + capacity: 1.0, + commission_year: 2010, + }; + assert!( + read_assets_from_iter([asset_in].into_iter(), &agent_ids, &processes, ®ion_ids) + .is_err() + ); + + // Bad agent ID + let asset_in = AssetRaw { + agent_id: "agent2".into(), + process_id: "process1".into(), + region_id: "GBR".into(), + capacity: 1.0, + commission_year: 2010, + }; + assert!( + read_assets_from_iter([asset_in].into_iter(), &agent_ids, &processes, ®ion_ids) + .is_err() + ); + + // Bad region ID: not in region_ids + let asset_in = AssetRaw { + agent_id: "agent1".into(), + process_id: "process1".into(), + region_id: "FRA".into(), + capacity: 1.0, + commission_year: 2010, + }; + assert!( + read_assets_from_iter([asset_in].into_iter(), &agent_ids, &processes, ®ion_ids) + .is_err() + ); + + // Bad region ID: process not active there + let process = Rc::new(Process { + id: "process1".into(), + description: "Description".into(), + availabilities: vec![], + flows: vec![], + pacs: vec![], + parameter: process_param, + regions: RegionSelection::Some(["GBR".into()].into_iter().collect()), + }); + let asset_in = AssetRaw { + agent_id: "agent1".into(), + process_id: "process1".into(), + region_id: "USA".into(), // NB: In region_ids, but not in process.regions + capacity: 1.0, + commission_year: 2010, + }; + let processes = [(Rc::clone(&process.id), Rc::clone(&process))] + .into_iter() + .collect(); + assert!( + read_assets_from_iter([asset_in].into_iter(), &agent_ids, &processes, ®ion_ids) + .is_err() + ); + } +} From 9d640d8ba4de379586e842aa26a6e71b6306bd2a Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Thu, 19 Dec 2024 11:09:03 +0000 Subject: [PATCH 06/10] Move contents of asset.rs into agent.rs --- src/agent.rs | 15 ++++++++++++++- src/asset.rs | 18 ------------------ src/input/agent/asset.rs | 2 +- src/lib.rs | 1 - 4 files changed, 15 insertions(+), 21 deletions(-) delete mode 100644 src/asset.rs diff --git a/src/agent.rs b/src/agent.rs index 6b7d3be87..0553c8bea 100644 --- a/src/agent.rs +++ b/src/agent.rs @@ -1,6 +1,6 @@ #![allow(missing_docs)] -use crate::asset::Asset; use crate::input::deserialise_proportion_nonzero; +use crate::process::Process; use crate::region::RegionSelection; use anyhow::Result; use serde::Deserialize; @@ -94,3 +94,16 @@ pub enum ObjectiveType { #[string = "eac"] EquivalentAnnualCost, } + +/// An asset controlled by an agent. +#[derive(Clone, Debug, PartialEq)] +pub struct Asset { + /// The [Process] that this asset corresponds to + pub process: Rc, + /// The region in which the asset is located + pub region_id: Rc, + /// Capacity of asset + pub capacity: f64, + /// The year the asset comes online + pub commission_year: u32, +} diff --git a/src/asset.rs b/src/asset.rs deleted file mode 100644 index a8f4a82a1..000000000 --- a/src/asset.rs +++ /dev/null @@ -1,18 +0,0 @@ -//! Code for working with [Asset]s. -//! -//! For a description of what assets are, please see the glossary. -use crate::process::Process; -use std::rc::Rc; - -/// An asset controlled by an agent. -#[derive(Clone, Debug, PartialEq)] -pub struct Asset { - /// The [Process] that this asset corresponds to - pub process: Rc, - /// The region in which the asset is located - pub region_id: Rc, - /// Capacity of asset - pub capacity: f64, - /// The year the asset comes online - pub commission_year: u32, -} diff --git a/src/input/agent/asset.rs b/src/input/agent/asset.rs index 38a5924f1..b650761c9 100644 --- a/src/input/agent/asset.rs +++ b/src/input/agent/asset.rs @@ -1,5 +1,5 @@ //! Code for reading [Asset]s from a CSV file. -use crate::asset::Asset; +use crate::agent::Asset; use crate::input::*; use crate::process::Process; use anyhow::{ensure, Context, Result}; diff --git a/src/lib.rs b/src/lib.rs index 66223c476..e86f31ba8 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,7 +1,6 @@ //! High level functionality for launching the simulation. #![warn(missing_docs)] pub mod agent; -pub mod asset; pub mod commodity; pub mod demand; pub mod input; From 50a04ea32e094f70913c5afc9862aca0a4e36a56 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Thu, 19 Dec 2024 14:08:04 +0000 Subject: [PATCH 07/10] Move commodity CSV code to `input/commodity.rs` --- src/commodity.rs | 362 +--------------------------------------- src/input.rs | 2 + src/input/commodity.rs | 370 +++++++++++++++++++++++++++++++++++++++++ src/model.rs | 2 +- 4 files changed, 376 insertions(+), 360 deletions(-) create mode 100644 src/input/commodity.rs diff --git a/src/commodity.rs b/src/commodity.rs index 9e585e8e1..582ad1af3 100644 --- a/src/commodity.rs +++ b/src/commodity.rs @@ -1,17 +1,12 @@ #![allow(missing_docs)] -use crate::demand::{read_demand, Demand}; +use crate::demand::Demand; use crate::input::*; -use crate::time_slice::{TimeSliceID, TimeSliceInfo, TimeSliceLevel}; -use anyhow::{ensure, Context, Result}; +use crate::time_slice::{TimeSliceID, TimeSliceLevel}; use serde::Deserialize; use serde_string_enum::DeserializeLabeledStringEnum; -use std::collections::{HashMap, HashSet}; -use std::path::Path; +use std::collections::HashMap; use std::rc::Rc; -const COMMODITY_FILE_NAME: &str = "commodities.csv"; -const COMMODITY_COSTS_FILE_NAME: &str = "commodity_costs.csv"; - /// A commodity within the simulation #[derive(PartialEq, Debug, Deserialize)] pub struct Commodity { @@ -43,23 +38,6 @@ pub enum BalanceType { Production, } -/// Cost parameters for each commodity -#[derive(PartialEq, Debug, Deserialize, Clone)] -struct CommodityCostRaw { - /// Unique identifier for the commodity (e.g. "ELC") - pub commodity_id: String, - /// The region to which the commodity cost applies. - pub region_id: String, - /// Type of balance for application of cost. - pub balance_type: BalanceType, - /// The year to which the cost applies. - pub year: u32, - /// The time slice to which the cost applies. - pub time_slice: String, - /// Cost per unit commodity. For example, if a CO2 price is specified in input data, it can be applied to net CO2 via this value. - pub value: f64, -} - /// Cost parameters for each commodity #[derive(PartialEq, Clone, Debug)] pub struct CommodityCost { @@ -132,166 +110,9 @@ pub enum CommodityType { OutputCommodity, } -fn read_commodity_costs_iter( - iter: I, - commodity_ids: &HashSet>, - region_ids: &HashSet>, - time_slice_info: &TimeSliceInfo, - milestone_years: &[u32], -) -> Result, CommodityCostMap>> -where - I: Iterator, -{ - let mut map = HashMap::new(); - - // Keep track of milestone years used for each commodity + region combo. If a user provides an - // entry with a given commodity + region combo for one milestone year, they must also provide - // entries for all the other milestone years. - let mut used_milestone_years = HashMap::new(); - - for cost in iter { - let commodity_id = commodity_ids.get_id(&cost.commodity_id)?; - let region_id = region_ids.get_id(&cost.region_id)?; - let ts_selection = time_slice_info.get_selection(&cost.time_slice)?; - - ensure!( - milestone_years.binary_search(&cost.year).is_ok(), - "Year {} is not a milestone year. \ - Input of non-milestone years is currently not supported.", - cost.year - ); - - // Get or create CommodityCostMap for this commodity - let map = map - .entry(commodity_id.clone()) - .or_insert_with(CommodityCostMap::new); - - for time_slice in time_slice_info.iter_selection(&ts_selection) { - let value = CommodityCost { - balance_type: cost.balance_type.clone(), - value: cost.value, - }; - - ensure!( - map.insert(Rc::clone(®ion_id), cost.year, time_slice.clone(), value) - .is_none(), - "Commodity cost entry covered by more than one time slice \ - (region: {}, year: {}, time slice: {})", - region_id, - cost.year, - time_slice - ); - } - - // Keep track of milestone years used for each commodity + region combo - used_milestone_years - .entry((commodity_id, region_id)) - .or_insert_with(|| HashSet::with_capacity(1)) - .insert(cost.year); - } - - let milestone_years = HashSet::from_iter(milestone_years.iter().cloned()); - for ((commodity_id, region_id), years) in used_milestone_years.iter() { - ensure!( - years == &milestone_years, - "Commodity costs missing for some milestone years (commodity: {}, region: {})", - commodity_id, - region_id - ); - } - - Ok(map) -} - -/// Read costs associated with each commodity from commodity costs CSV file. -/// -/// # Arguments -/// -/// * `model_dir` - Folder containing model configuration files -/// * `commodity_ids` - All possible commodity IDs -/// * `region_ids` - All possible region IDs -/// * `time_slice_info` - Information about time slices -/// * `milestone_years` - All milestone years -/// -/// # Returns -/// -/// A map containing commodity costs, grouped by commodity ID. -fn read_commodity_costs( - model_dir: &Path, - commodity_ids: &HashSet>, - region_ids: &HashSet>, - time_slice_info: &TimeSliceInfo, - milestone_years: &[u32], -) -> Result, CommodityCostMap>> { - let file_path = model_dir.join(COMMODITY_COSTS_FILE_NAME); - let commodity_costs_csv = read_csv::(&file_path)?; - read_commodity_costs_iter( - commodity_costs_csv, - commodity_ids, - region_ids, - time_slice_info, - milestone_years, - ) - .with_context(|| input_err_msg(&file_path)) -} - -/// Read commodity data from the specified model directory. -/// -/// # Arguments -/// -/// * `model_dir` - Folder containing model configuration files -/// * `region_ids` - All possible region IDs -/// * `time_slice_info` - Information about time slices -/// * `milestone_years` - All milestone years -/// -/// # Returns -/// -/// A map containing commodities, grouped by commodity ID or an error. -pub fn read_commodities( - model_dir: &Path, - region_ids: &HashSet>, - time_slice_info: &TimeSliceInfo, - milestone_years: &[u32], -) -> Result, Rc>> { - let commodities = read_csv_id_file::(&model_dir.join(COMMODITY_FILE_NAME))?; - let commodity_ids = commodities.keys().cloned().collect(); - let mut costs = read_commodity_costs( - model_dir, - &commodity_ids, - region_ids, - time_slice_info, - milestone_years, - )?; - - let year_range = *milestone_years.first().unwrap()..=*milestone_years.last().unwrap(); - let mut demand = read_demand( - model_dir, - &commodity_ids, - region_ids, - time_slice_info, - &year_range, - )?; - - // Populate Vecs for each Commodity - Ok(commodities - .into_iter() - .map(|(id, mut commodity)| { - if let Some(costs) = costs.remove(&id) { - commodity.costs = costs; - } - if let Some(demand) = demand.remove(&id) { - commodity.demand_by_region = demand; - } - - (id, commodity.into()) - }) - .collect()) -} - #[cfg(test)] mod tests { use super::*; - use std::iter; #[test] fn test_commodity_cost_map() { @@ -309,181 +130,4 @@ mod tests { .is_none()); assert_eq!(map.get("GBR".into(), 2010, ts).unwrap(), &value); } - - #[test] - fn test_read_commodity_costs_iter() { - let commodity_ids = ["commodity".into()].into_iter().collect(); - let region_ids = ["GBR".into(), "FRA".into()].into_iter().collect(); - let slices = [ - TimeSliceID { - season: "winter".into(), - time_of_day: "day".into(), - }, - TimeSliceID { - season: "summer".into(), - time_of_day: "night".into(), - }, - ]; - let time_slice_info = TimeSliceInfo { - seasons: ["winter".into(), "summer".into()].into_iter().collect(), - times_of_day: ["day".into(), "night".into()].into_iter().collect(), - fractions: [(slices[0].clone(), 0.5), (slices[1].clone(), 0.5)] - .into_iter() - .collect(), - }; - let time_slice = time_slice_info - .get_time_slice_id_from_str("winter.day") - .unwrap(); - let milestone_years = [2010]; - - // Valid - let cost1 = CommodityCostRaw { - commodity_id: "commodity".into(), - region_id: "GBR".into(), - balance_type: BalanceType::Consumption, - year: 2010, - time_slice: "winter.day".into(), - value: 0.5, - }; - let cost2 = CommodityCostRaw { - commodity_id: "commodity".into(), - region_id: "FRA".into(), - balance_type: BalanceType::Production, - year: 2010, - time_slice: "winter.day".into(), - value: 0.5, - }; - let value1 = CommodityCost { - balance_type: cost1.balance_type.clone(), - value: cost1.value, - }; - let value2 = CommodityCost { - balance_type: cost2.balance_type.clone(), - value: cost2.value, - }; - let mut map = CommodityCostMap::new(); - map.insert("GBR".into(), cost1.year, time_slice.clone(), value1); - map.insert("FRA".into(), cost2.year, time_slice.clone(), value2); - let expected = HashMap::from_iter([("commodity".into(), map)]); - assert_eq!( - read_commodity_costs_iter( - [cost1.clone(), cost2].into_iter(), - &commodity_ids, - ®ion_ids, - &time_slice_info, - &milestone_years, - ) - .unwrap(), - expected - ); - - // Invalid: Overlapping time slices - let cost2 = CommodityCostRaw { - commodity_id: "commodity".into(), - region_id: "GBR".into(), - balance_type: BalanceType::Production, - year: 2010, - time_slice: "winter".into(), // NB: Covers all winter - value: 0.5, - }; - assert!(read_commodity_costs_iter( - [cost1.clone(), cost2].into_iter(), - &commodity_ids, - ®ion_ids, - &time_slice_info, - &milestone_years, - ) - .is_err()); - - // Invalid: Bad commodity - let cost = CommodityCostRaw { - commodity_id: "commodity2".into(), - region_id: "GBR".into(), - balance_type: BalanceType::Production, - year: 2010, - time_slice: "winter.day".into(), - value: 0.5, - }; - assert!(read_commodity_costs_iter( - iter::once(cost), - &commodity_ids, - ®ion_ids, - &time_slice_info, - &milestone_years, - ) - .is_err()); - - // Invalid: Bad region - let cost = CommodityCostRaw { - commodity_id: "commodity".into(), - region_id: "USA".into(), - balance_type: BalanceType::Production, - year: 2010, - time_slice: "winter.day".into(), - value: 0.5, - }; - assert!(read_commodity_costs_iter( - iter::once(cost), - &commodity_ids, - ®ion_ids, - &time_slice_info, - &milestone_years, - ) - .is_err()); - - // Invalid: Bad time slice selection - let cost = CommodityCostRaw { - commodity_id: "commodity".into(), - region_id: "GBR".into(), - balance_type: BalanceType::Production, - year: 2010, - time_slice: "summer.evening".into(), - value: 0.5, - }; - assert!(read_commodity_costs_iter( - iter::once(cost), - &commodity_ids, - ®ion_ids, - &time_slice_info, - &milestone_years, - ) - .is_err()); - - // Invalid: non-milestone year - let cost2 = CommodityCostRaw { - commodity_id: "commodity".into(), - region_id: "GBR".into(), - balance_type: BalanceType::Consumption, - year: 2011, // NB: Non-milestone year - time_slice: "winter.day".into(), - value: 0.5, - }; - assert!(read_commodity_costs_iter( - [cost1, cost2].into_iter(), - &commodity_ids, - ®ion_ids, - &time_slice_info, - &milestone_years, - ) - .is_err()); - - // Invalid: Milestone year 2020 is not covered - let milestone_years = [2010, 2020]; - let cost = CommodityCostRaw { - commodity_id: "commodity".into(), - region_id: "GBR".into(), - balance_type: BalanceType::Consumption, - year: 2010, - time_slice: "winter.day".into(), - value: 0.5, - }; - assert!(read_commodity_costs_iter( - iter::once(cost), - &commodity_ids, - ®ion_ids, - &time_slice_info, - &milestone_years, - ) - .is_err()); - } } diff --git a/src/input.rs b/src/input.rs index dfc9e9504..5ce1f09e4 100644 --- a/src/input.rs +++ b/src/input.rs @@ -9,6 +9,8 @@ use std::rc::Rc; pub mod agent; pub use agent::read_agents; +pub mod commodity; +pub use commodity::read_commodities; pub mod region; pub use region::read_regions; diff --git a/src/input/commodity.rs b/src/input/commodity.rs new file mode 100644 index 000000000..828ebba0f --- /dev/null +++ b/src/input/commodity.rs @@ -0,0 +1,370 @@ +//! Code for reading in commodity-related data from CSV files. +use crate::commodity::{BalanceType, Commodity, CommodityCost, CommodityCostMap}; +use crate::demand::read_demand; +use crate::input::*; +use crate::time_slice::TimeSliceInfo; +use anyhow::{ensure, Context, Result}; +use serde::Deserialize; +use std::collections::{HashMap, HashSet}; +use std::path::Path; +use std::rc::Rc; + +const COMMODITY_FILE_NAME: &str = "commodities.csv"; +const COMMODITY_COSTS_FILE_NAME: &str = "commodity_costs.csv"; + +/// Cost parameters for each commodity +#[derive(PartialEq, Debug, Deserialize, Clone)] +struct CommodityCostRaw { + /// Unique identifier for the commodity (e.g. "ELC") + pub commodity_id: String, + /// The region to which the commodity cost applies. + pub region_id: String, + /// Type of balance for application of cost. + pub balance_type: BalanceType, + /// The year to which the cost applies. + pub year: u32, + /// The time slice to which the cost applies. + pub time_slice: String, + /// Cost per unit commodity. For example, if a CO2 price is specified in input data, it can be applied to net CO2 via this value. + pub value: f64, +} + +fn read_commodity_costs_iter( + iter: I, + commodity_ids: &HashSet>, + region_ids: &HashSet>, + time_slice_info: &TimeSliceInfo, + milestone_years: &[u32], +) -> Result, CommodityCostMap>> +where + I: Iterator, +{ + let mut map = HashMap::new(); + + // Keep track of milestone years used for each commodity + region combo. If a user provides an + // entry with a given commodity + region combo for one milestone year, they must also provide + // entries for all the other milestone years. + let mut used_milestone_years = HashMap::new(); + + for cost in iter { + let commodity_id = commodity_ids.get_id(&cost.commodity_id)?; + let region_id = region_ids.get_id(&cost.region_id)?; + let ts_selection = time_slice_info.get_selection(&cost.time_slice)?; + + ensure!( + milestone_years.binary_search(&cost.year).is_ok(), + "Year {} is not a milestone year. \ + Input of non-milestone years is currently not supported.", + cost.year + ); + + // Get or create CommodityCostMap for this commodity + let map = map + .entry(commodity_id.clone()) + .or_insert_with(CommodityCostMap::new); + + for time_slice in time_slice_info.iter_selection(&ts_selection) { + let value = CommodityCost { + balance_type: cost.balance_type.clone(), + value: cost.value, + }; + + ensure!( + map.insert(Rc::clone(®ion_id), cost.year, time_slice.clone(), value) + .is_none(), + "Commodity cost entry covered by more than one time slice \ + (region: {}, year: {}, time slice: {})", + region_id, + cost.year, + time_slice + ); + } + + // Keep track of milestone years used for each commodity + region combo + used_milestone_years + .entry((commodity_id, region_id)) + .or_insert_with(|| HashSet::with_capacity(1)) + .insert(cost.year); + } + + let milestone_years = HashSet::from_iter(milestone_years.iter().cloned()); + for ((commodity_id, region_id), years) in used_milestone_years.iter() { + ensure!( + years == &milestone_years, + "Commodity costs missing for some milestone years (commodity: {}, region: {})", + commodity_id, + region_id + ); + } + + Ok(map) +} + +/// Read costs associated with each commodity from commodity costs CSV file. +/// +/// # Arguments +/// +/// * `model_dir` - Folder containing model configuration files +/// * `commodity_ids` - All possible commodity IDs +/// * `region_ids` - All possible region IDs +/// * `time_slice_info` - Information about time slices +/// * `milestone_years` - All milestone years +/// +/// # Returns +/// +/// A map containing commodity costs, grouped by commodity ID. +fn read_commodity_costs( + model_dir: &Path, + commodity_ids: &HashSet>, + region_ids: &HashSet>, + time_slice_info: &TimeSliceInfo, + milestone_years: &[u32], +) -> Result, CommodityCostMap>> { + let file_path = model_dir.join(COMMODITY_COSTS_FILE_NAME); + let commodity_costs_csv = read_csv::(&file_path)?; + read_commodity_costs_iter( + commodity_costs_csv, + commodity_ids, + region_ids, + time_slice_info, + milestone_years, + ) + .with_context(|| input_err_msg(&file_path)) +} + +/// Read commodity data from the specified model directory. +/// +/// # Arguments +/// +/// * `model_dir` - Folder containing model configuration files +/// * `region_ids` - All possible region IDs +/// * `time_slice_info` - Information about time slices +/// * `milestone_years` - All milestone years +/// +/// # Returns +/// +/// A map containing commodities, grouped by commodity ID or an error. +pub fn read_commodities( + model_dir: &Path, + region_ids: &HashSet>, + time_slice_info: &TimeSliceInfo, + milestone_years: &[u32], +) -> Result, Rc>> { + let commodities = read_csv_id_file::(&model_dir.join(COMMODITY_FILE_NAME))?; + let commodity_ids = commodities.keys().cloned().collect(); + let mut costs = read_commodity_costs( + model_dir, + &commodity_ids, + region_ids, + time_slice_info, + milestone_years, + )?; + + let year_range = *milestone_years.first().unwrap()..=*milestone_years.last().unwrap(); + let mut demand = read_demand( + model_dir, + &commodity_ids, + region_ids, + time_slice_info, + &year_range, + )?; + + // Populate Vecs for each Commodity + Ok(commodities + .into_iter() + .map(|(id, mut commodity)| { + if let Some(costs) = costs.remove(&id) { + commodity.costs = costs; + } + if let Some(demand) = demand.remove(&id) { + commodity.demand_by_region = demand; + } + + (id, commodity.into()) + }) + .collect()) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::time_slice::TimeSliceID; + use std::iter; + + #[test] + fn test_read_commodity_costs_iter() { + let commodity_ids = ["commodity".into()].into_iter().collect(); + let region_ids = ["GBR".into(), "FRA".into()].into_iter().collect(); + let slices = [ + TimeSliceID { + season: "winter".into(), + time_of_day: "day".into(), + }, + TimeSliceID { + season: "summer".into(), + time_of_day: "night".into(), + }, + ]; + let time_slice_info = TimeSliceInfo { + seasons: ["winter".into(), "summer".into()].into_iter().collect(), + times_of_day: ["day".into(), "night".into()].into_iter().collect(), + fractions: [(slices[0].clone(), 0.5), (slices[1].clone(), 0.5)] + .into_iter() + .collect(), + }; + let time_slice = time_slice_info + .get_time_slice_id_from_str("winter.day") + .unwrap(); + let milestone_years = [2010]; + + // Valid + let cost1 = CommodityCostRaw { + commodity_id: "commodity".into(), + region_id: "GBR".into(), + balance_type: BalanceType::Consumption, + year: 2010, + time_slice: "winter.day".into(), + value: 0.5, + }; + let cost2 = CommodityCostRaw { + commodity_id: "commodity".into(), + region_id: "FRA".into(), + balance_type: BalanceType::Production, + year: 2010, + time_slice: "winter.day".into(), + value: 0.5, + }; + let value1 = CommodityCost { + balance_type: cost1.balance_type.clone(), + value: cost1.value, + }; + let value2 = CommodityCost { + balance_type: cost2.balance_type.clone(), + value: cost2.value, + }; + let mut map = CommodityCostMap::new(); + map.insert("GBR".into(), cost1.year, time_slice.clone(), value1); + map.insert("FRA".into(), cost2.year, time_slice.clone(), value2); + let expected = HashMap::from_iter([("commodity".into(), map)]); + assert_eq!( + read_commodity_costs_iter( + [cost1.clone(), cost2].into_iter(), + &commodity_ids, + ®ion_ids, + &time_slice_info, + &milestone_years, + ) + .unwrap(), + expected + ); + + // Invalid: Overlapping time slices + let cost2 = CommodityCostRaw { + commodity_id: "commodity".into(), + region_id: "GBR".into(), + balance_type: BalanceType::Production, + year: 2010, + time_slice: "winter".into(), // NB: Covers all winter + value: 0.5, + }; + assert!(read_commodity_costs_iter( + [cost1.clone(), cost2].into_iter(), + &commodity_ids, + ®ion_ids, + &time_slice_info, + &milestone_years, + ) + .is_err()); + + // Invalid: Bad commodity + let cost = CommodityCostRaw { + commodity_id: "commodity2".into(), + region_id: "GBR".into(), + balance_type: BalanceType::Production, + year: 2010, + time_slice: "winter.day".into(), + value: 0.5, + }; + assert!(read_commodity_costs_iter( + iter::once(cost), + &commodity_ids, + ®ion_ids, + &time_slice_info, + &milestone_years, + ) + .is_err()); + + // Invalid: Bad region + let cost = CommodityCostRaw { + commodity_id: "commodity".into(), + region_id: "USA".into(), + balance_type: BalanceType::Production, + year: 2010, + time_slice: "winter.day".into(), + value: 0.5, + }; + assert!(read_commodity_costs_iter( + iter::once(cost), + &commodity_ids, + ®ion_ids, + &time_slice_info, + &milestone_years, + ) + .is_err()); + + // Invalid: Bad time slice selection + let cost = CommodityCostRaw { + commodity_id: "commodity".into(), + region_id: "GBR".into(), + balance_type: BalanceType::Production, + year: 2010, + time_slice: "summer.evening".into(), + value: 0.5, + }; + assert!(read_commodity_costs_iter( + iter::once(cost), + &commodity_ids, + ®ion_ids, + &time_slice_info, + &milestone_years, + ) + .is_err()); + + // Invalid: non-milestone year + let cost2 = CommodityCostRaw { + commodity_id: "commodity".into(), + region_id: "GBR".into(), + balance_type: BalanceType::Consumption, + year: 2011, // NB: Non-milestone year + time_slice: "winter.day".into(), + value: 0.5, + }; + assert!(read_commodity_costs_iter( + [cost1, cost2].into_iter(), + &commodity_ids, + ®ion_ids, + &time_slice_info, + &milestone_years, + ) + .is_err()); + + // Invalid: Milestone year 2020 is not covered + let milestone_years = [2010, 2020]; + let cost = CommodityCostRaw { + commodity_id: "commodity".into(), + region_id: "GBR".into(), + balance_type: BalanceType::Consumption, + year: 2010, + time_slice: "winter.day".into(), + value: 0.5, + }; + assert!(read_commodity_costs_iter( + iter::once(cost), + &commodity_ids, + ®ion_ids, + &time_slice_info, + &milestone_years, + ) + .is_err()); + } +} diff --git a/src/model.rs b/src/model.rs index 58264778a..14e1a9865 100644 --- a/src/model.rs +++ b/src/model.rs @@ -1,7 +1,7 @@ //! Code for simulation models. #![allow(missing_docs)] use crate::agent::Agent; -use crate::commodity::{read_commodities, Commodity}; +use crate::commodity::Commodity; use crate::input::*; use crate::process::{read_processes, Process}; use crate::region::Region; From 04d4f430c73c00267c7a2e8a844ea69dcae803aa Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Thu, 19 Dec 2024 15:22:58 +0000 Subject: [PATCH 08/10] Split out cost-related code into `input/commodity/cost.rs` --- src/input/commodity.rs | 313 +---------------------------------- src/input/commodity/cost.rs | 315 ++++++++++++++++++++++++++++++++++++ 2 files changed, 320 insertions(+), 308 deletions(-) create mode 100644 src/input/commodity/cost.rs diff --git a/src/input/commodity.rs b/src/input/commodity.rs index 828ebba0f..e4a5b1f54 100644 --- a/src/input/commodity.rs +++ b/src/input/commodity.rs @@ -1,136 +1,17 @@ //! Code for reading in commodity-related data from CSV files. -use crate::commodity::{BalanceType, Commodity, CommodityCost, CommodityCostMap}; +use crate::commodity::Commodity; use crate::demand::read_demand; use crate::input::*; use crate::time_slice::TimeSliceInfo; -use anyhow::{ensure, Context, Result}; -use serde::Deserialize; +use anyhow::Result; use std::collections::{HashMap, HashSet}; use std::path::Path; use std::rc::Rc; -const COMMODITY_FILE_NAME: &str = "commodities.csv"; -const COMMODITY_COSTS_FILE_NAME: &str = "commodity_costs.csv"; - -/// Cost parameters for each commodity -#[derive(PartialEq, Debug, Deserialize, Clone)] -struct CommodityCostRaw { - /// Unique identifier for the commodity (e.g. "ELC") - pub commodity_id: String, - /// The region to which the commodity cost applies. - pub region_id: String, - /// Type of balance for application of cost. - pub balance_type: BalanceType, - /// The year to which the cost applies. - pub year: u32, - /// The time slice to which the cost applies. - pub time_slice: String, - /// Cost per unit commodity. For example, if a CO2 price is specified in input data, it can be applied to net CO2 via this value. - pub value: f64, -} - -fn read_commodity_costs_iter( - iter: I, - commodity_ids: &HashSet>, - region_ids: &HashSet>, - time_slice_info: &TimeSliceInfo, - milestone_years: &[u32], -) -> Result, CommodityCostMap>> -where - I: Iterator, -{ - let mut map = HashMap::new(); - - // Keep track of milestone years used for each commodity + region combo. If a user provides an - // entry with a given commodity + region combo for one milestone year, they must also provide - // entries for all the other milestone years. - let mut used_milestone_years = HashMap::new(); - - for cost in iter { - let commodity_id = commodity_ids.get_id(&cost.commodity_id)?; - let region_id = region_ids.get_id(&cost.region_id)?; - let ts_selection = time_slice_info.get_selection(&cost.time_slice)?; - - ensure!( - milestone_years.binary_search(&cost.year).is_ok(), - "Year {} is not a milestone year. \ - Input of non-milestone years is currently not supported.", - cost.year - ); - - // Get or create CommodityCostMap for this commodity - let map = map - .entry(commodity_id.clone()) - .or_insert_with(CommodityCostMap::new); - - for time_slice in time_slice_info.iter_selection(&ts_selection) { - let value = CommodityCost { - balance_type: cost.balance_type.clone(), - value: cost.value, - }; +pub mod cost; +use cost::read_commodity_costs; - ensure!( - map.insert(Rc::clone(®ion_id), cost.year, time_slice.clone(), value) - .is_none(), - "Commodity cost entry covered by more than one time slice \ - (region: {}, year: {}, time slice: {})", - region_id, - cost.year, - time_slice - ); - } - - // Keep track of milestone years used for each commodity + region combo - used_milestone_years - .entry((commodity_id, region_id)) - .or_insert_with(|| HashSet::with_capacity(1)) - .insert(cost.year); - } - - let milestone_years = HashSet::from_iter(milestone_years.iter().cloned()); - for ((commodity_id, region_id), years) in used_milestone_years.iter() { - ensure!( - years == &milestone_years, - "Commodity costs missing for some milestone years (commodity: {}, region: {})", - commodity_id, - region_id - ); - } - - Ok(map) -} - -/// Read costs associated with each commodity from commodity costs CSV file. -/// -/// # Arguments -/// -/// * `model_dir` - Folder containing model configuration files -/// * `commodity_ids` - All possible commodity IDs -/// * `region_ids` - All possible region IDs -/// * `time_slice_info` - Information about time slices -/// * `milestone_years` - All milestone years -/// -/// # Returns -/// -/// A map containing commodity costs, grouped by commodity ID. -fn read_commodity_costs( - model_dir: &Path, - commodity_ids: &HashSet>, - region_ids: &HashSet>, - time_slice_info: &TimeSliceInfo, - milestone_years: &[u32], -) -> Result, CommodityCostMap>> { - let file_path = model_dir.join(COMMODITY_COSTS_FILE_NAME); - let commodity_costs_csv = read_csv::(&file_path)?; - read_commodity_costs_iter( - commodity_costs_csv, - commodity_ids, - region_ids, - time_slice_info, - milestone_years, - ) - .with_context(|| input_err_msg(&file_path)) -} +const COMMODITY_FILE_NAME: &str = "commodities.csv"; /// Read commodity data from the specified model directory. /// @@ -184,187 +65,3 @@ pub fn read_commodities( }) .collect()) } - -#[cfg(test)] -mod tests { - use super::*; - use crate::time_slice::TimeSliceID; - use std::iter; - - #[test] - fn test_read_commodity_costs_iter() { - let commodity_ids = ["commodity".into()].into_iter().collect(); - let region_ids = ["GBR".into(), "FRA".into()].into_iter().collect(); - let slices = [ - TimeSliceID { - season: "winter".into(), - time_of_day: "day".into(), - }, - TimeSliceID { - season: "summer".into(), - time_of_day: "night".into(), - }, - ]; - let time_slice_info = TimeSliceInfo { - seasons: ["winter".into(), "summer".into()].into_iter().collect(), - times_of_day: ["day".into(), "night".into()].into_iter().collect(), - fractions: [(slices[0].clone(), 0.5), (slices[1].clone(), 0.5)] - .into_iter() - .collect(), - }; - let time_slice = time_slice_info - .get_time_slice_id_from_str("winter.day") - .unwrap(); - let milestone_years = [2010]; - - // Valid - let cost1 = CommodityCostRaw { - commodity_id: "commodity".into(), - region_id: "GBR".into(), - balance_type: BalanceType::Consumption, - year: 2010, - time_slice: "winter.day".into(), - value: 0.5, - }; - let cost2 = CommodityCostRaw { - commodity_id: "commodity".into(), - region_id: "FRA".into(), - balance_type: BalanceType::Production, - year: 2010, - time_slice: "winter.day".into(), - value: 0.5, - }; - let value1 = CommodityCost { - balance_type: cost1.balance_type.clone(), - value: cost1.value, - }; - let value2 = CommodityCost { - balance_type: cost2.balance_type.clone(), - value: cost2.value, - }; - let mut map = CommodityCostMap::new(); - map.insert("GBR".into(), cost1.year, time_slice.clone(), value1); - map.insert("FRA".into(), cost2.year, time_slice.clone(), value2); - let expected = HashMap::from_iter([("commodity".into(), map)]); - assert_eq!( - read_commodity_costs_iter( - [cost1.clone(), cost2].into_iter(), - &commodity_ids, - ®ion_ids, - &time_slice_info, - &milestone_years, - ) - .unwrap(), - expected - ); - - // Invalid: Overlapping time slices - let cost2 = CommodityCostRaw { - commodity_id: "commodity".into(), - region_id: "GBR".into(), - balance_type: BalanceType::Production, - year: 2010, - time_slice: "winter".into(), // NB: Covers all winter - value: 0.5, - }; - assert!(read_commodity_costs_iter( - [cost1.clone(), cost2].into_iter(), - &commodity_ids, - ®ion_ids, - &time_slice_info, - &milestone_years, - ) - .is_err()); - - // Invalid: Bad commodity - let cost = CommodityCostRaw { - commodity_id: "commodity2".into(), - region_id: "GBR".into(), - balance_type: BalanceType::Production, - year: 2010, - time_slice: "winter.day".into(), - value: 0.5, - }; - assert!(read_commodity_costs_iter( - iter::once(cost), - &commodity_ids, - ®ion_ids, - &time_slice_info, - &milestone_years, - ) - .is_err()); - - // Invalid: Bad region - let cost = CommodityCostRaw { - commodity_id: "commodity".into(), - region_id: "USA".into(), - balance_type: BalanceType::Production, - year: 2010, - time_slice: "winter.day".into(), - value: 0.5, - }; - assert!(read_commodity_costs_iter( - iter::once(cost), - &commodity_ids, - ®ion_ids, - &time_slice_info, - &milestone_years, - ) - .is_err()); - - // Invalid: Bad time slice selection - let cost = CommodityCostRaw { - commodity_id: "commodity".into(), - region_id: "GBR".into(), - balance_type: BalanceType::Production, - year: 2010, - time_slice: "summer.evening".into(), - value: 0.5, - }; - assert!(read_commodity_costs_iter( - iter::once(cost), - &commodity_ids, - ®ion_ids, - &time_slice_info, - &milestone_years, - ) - .is_err()); - - // Invalid: non-milestone year - let cost2 = CommodityCostRaw { - commodity_id: "commodity".into(), - region_id: "GBR".into(), - balance_type: BalanceType::Consumption, - year: 2011, // NB: Non-milestone year - time_slice: "winter.day".into(), - value: 0.5, - }; - assert!(read_commodity_costs_iter( - [cost1, cost2].into_iter(), - &commodity_ids, - ®ion_ids, - &time_slice_info, - &milestone_years, - ) - .is_err()); - - // Invalid: Milestone year 2020 is not covered - let milestone_years = [2010, 2020]; - let cost = CommodityCostRaw { - commodity_id: "commodity".into(), - region_id: "GBR".into(), - balance_type: BalanceType::Consumption, - year: 2010, - time_slice: "winter.day".into(), - value: 0.5, - }; - assert!(read_commodity_costs_iter( - iter::once(cost), - &commodity_ids, - ®ion_ids, - &time_slice_info, - &milestone_years, - ) - .is_err()); - } -} diff --git a/src/input/commodity/cost.rs b/src/input/commodity/cost.rs new file mode 100644 index 000000000..4a5e5903c --- /dev/null +++ b/src/input/commodity/cost.rs @@ -0,0 +1,315 @@ +//! Code for reading in the commodity cost CSV file. +use crate::commodity::{BalanceType, CommodityCost, CommodityCostMap}; +use crate::input::*; +use crate::time_slice::TimeSliceInfo; +use anyhow::{ensure, Context, Result}; +use serde::Deserialize; +use std::collections::{HashMap, HashSet}; +use std::path::Path; +use std::rc::Rc; + +const COMMODITY_COSTS_FILE_NAME: &str = "commodity_costs.csv"; + +/// Cost parameters for each commodity +#[derive(PartialEq, Debug, Deserialize, Clone)] +struct CommodityCostRaw { + /// Unique identifier for the commodity (e.g. "ELC") + pub commodity_id: String, + /// The region to which the commodity cost applies. + pub region_id: String, + /// Type of balance for application of cost. + pub balance_type: BalanceType, + /// The year to which the cost applies. + pub year: u32, + /// The time slice to which the cost applies. + pub time_slice: String, + /// Cost per unit commodity. For example, if a CO2 price is specified in input data, it can be applied to net CO2 via this value. + pub value: f64, +} + +/// Read costs associated with each commodity from commodity costs CSV file. +/// +/// # Arguments +/// +/// * `model_dir` - Folder containing model configuration files +/// * `commodity_ids` - All possible commodity IDs +/// * `region_ids` - All possible region IDs +/// * `time_slice_info` - Information about time slices +/// * `milestone_years` - All milestone years +/// +/// # Returns +/// +/// A map containing commodity costs, grouped by commodity ID. +pub fn read_commodity_costs( + model_dir: &Path, + commodity_ids: &HashSet>, + region_ids: &HashSet>, + time_slice_info: &TimeSliceInfo, + milestone_years: &[u32], +) -> Result, CommodityCostMap>> { + let file_path = model_dir.join(COMMODITY_COSTS_FILE_NAME); + let commodity_costs_csv = read_csv::(&file_path)?; + read_commodity_costs_iter( + commodity_costs_csv, + commodity_ids, + region_ids, + time_slice_info, + milestone_years, + ) + .with_context(|| input_err_msg(&file_path)) +} + +fn read_commodity_costs_iter( + iter: I, + commodity_ids: &HashSet>, + region_ids: &HashSet>, + time_slice_info: &TimeSliceInfo, + milestone_years: &[u32], +) -> Result, CommodityCostMap>> +where + I: Iterator, +{ + let mut map = HashMap::new(); + + // Keep track of milestone years used for each commodity + region combo. If a user provides an + // entry with a given commodity + region combo for one milestone year, they must also provide + // entries for all the other milestone years. + let mut used_milestone_years = HashMap::new(); + + for cost in iter { + let commodity_id = commodity_ids.get_id(&cost.commodity_id)?; + let region_id = region_ids.get_id(&cost.region_id)?; + let ts_selection = time_slice_info.get_selection(&cost.time_slice)?; + + ensure!( + milestone_years.binary_search(&cost.year).is_ok(), + "Year {} is not a milestone year. \ + Input of non-milestone years is currently not supported.", + cost.year + ); + + // Get or create CommodityCostMap for this commodity + let map = map + .entry(commodity_id.clone()) + .or_insert_with(CommodityCostMap::new); + + for time_slice in time_slice_info.iter_selection(&ts_selection) { + let value = CommodityCost { + balance_type: cost.balance_type.clone(), + value: cost.value, + }; + + ensure!( + map.insert(Rc::clone(®ion_id), cost.year, time_slice.clone(), value) + .is_none(), + "Commodity cost entry covered by more than one time slice \ + (region: {}, year: {}, time slice: {})", + region_id, + cost.year, + time_slice + ); + } + + // Keep track of milestone years used for each commodity + region combo + used_milestone_years + .entry((commodity_id, region_id)) + .or_insert_with(|| HashSet::with_capacity(1)) + .insert(cost.year); + } + + let milestone_years = HashSet::from_iter(milestone_years.iter().cloned()); + for ((commodity_id, region_id), years) in used_milestone_years.iter() { + ensure!( + years == &milestone_years, + "Commodity costs missing for some milestone years (commodity: {}, region: {})", + commodity_id, + region_id + ); + } + + Ok(map) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::time_slice::TimeSliceID; + use std::iter; + + #[test] + fn test_read_commodity_costs_iter() { + let commodity_ids = ["commodity".into()].into_iter().collect(); + let region_ids = ["GBR".into(), "FRA".into()].into_iter().collect(); + let slices = [ + TimeSliceID { + season: "winter".into(), + time_of_day: "day".into(), + }, + TimeSliceID { + season: "summer".into(), + time_of_day: "night".into(), + }, + ]; + let time_slice_info = TimeSliceInfo { + seasons: ["winter".into(), "summer".into()].into_iter().collect(), + times_of_day: ["day".into(), "night".into()].into_iter().collect(), + fractions: [(slices[0].clone(), 0.5), (slices[1].clone(), 0.5)] + .into_iter() + .collect(), + }; + let time_slice = time_slice_info + .get_time_slice_id_from_str("winter.day") + .unwrap(); + let milestone_years = [2010]; + + // Valid + let cost1 = CommodityCostRaw { + commodity_id: "commodity".into(), + region_id: "GBR".into(), + balance_type: BalanceType::Consumption, + year: 2010, + time_slice: "winter.day".into(), + value: 0.5, + }; + let cost2 = CommodityCostRaw { + commodity_id: "commodity".into(), + region_id: "FRA".into(), + balance_type: BalanceType::Production, + year: 2010, + time_slice: "winter.day".into(), + value: 0.5, + }; + let value1 = CommodityCost { + balance_type: cost1.balance_type.clone(), + value: cost1.value, + }; + let value2 = CommodityCost { + balance_type: cost2.balance_type.clone(), + value: cost2.value, + }; + let mut map = CommodityCostMap::new(); + map.insert("GBR".into(), cost1.year, time_slice.clone(), value1); + map.insert("FRA".into(), cost2.year, time_slice.clone(), value2); + let expected = HashMap::from_iter([("commodity".into(), map)]); + assert_eq!( + read_commodity_costs_iter( + [cost1.clone(), cost2].into_iter(), + &commodity_ids, + ®ion_ids, + &time_slice_info, + &milestone_years, + ) + .unwrap(), + expected + ); + + // Invalid: Overlapping time slices + let cost2 = CommodityCostRaw { + commodity_id: "commodity".into(), + region_id: "GBR".into(), + balance_type: BalanceType::Production, + year: 2010, + time_slice: "winter".into(), // NB: Covers all winter + value: 0.5, + }; + assert!(read_commodity_costs_iter( + [cost1.clone(), cost2].into_iter(), + &commodity_ids, + ®ion_ids, + &time_slice_info, + &milestone_years, + ) + .is_err()); + + // Invalid: Bad commodity + let cost = CommodityCostRaw { + commodity_id: "commodity2".into(), + region_id: "GBR".into(), + balance_type: BalanceType::Production, + year: 2010, + time_slice: "winter.day".into(), + value: 0.5, + }; + assert!(read_commodity_costs_iter( + iter::once(cost), + &commodity_ids, + ®ion_ids, + &time_slice_info, + &milestone_years, + ) + .is_err()); + + // Invalid: Bad region + let cost = CommodityCostRaw { + commodity_id: "commodity".into(), + region_id: "USA".into(), + balance_type: BalanceType::Production, + year: 2010, + time_slice: "winter.day".into(), + value: 0.5, + }; + assert!(read_commodity_costs_iter( + iter::once(cost), + &commodity_ids, + ®ion_ids, + &time_slice_info, + &milestone_years, + ) + .is_err()); + + // Invalid: Bad time slice selection + let cost = CommodityCostRaw { + commodity_id: "commodity".into(), + region_id: "GBR".into(), + balance_type: BalanceType::Production, + year: 2010, + time_slice: "summer.evening".into(), + value: 0.5, + }; + assert!(read_commodity_costs_iter( + iter::once(cost), + &commodity_ids, + ®ion_ids, + &time_slice_info, + &milestone_years, + ) + .is_err()); + + // Invalid: non-milestone year + let cost2 = CommodityCostRaw { + commodity_id: "commodity".into(), + region_id: "GBR".into(), + balance_type: BalanceType::Consumption, + year: 2011, // NB: Non-milestone year + time_slice: "winter.day".into(), + value: 0.5, + }; + assert!(read_commodity_costs_iter( + [cost1, cost2].into_iter(), + &commodity_ids, + ®ion_ids, + &time_slice_info, + &milestone_years, + ) + .is_err()); + + // Invalid: Milestone year 2020 is not covered + let milestone_years = [2010, 2020]; + let cost = CommodityCostRaw { + commodity_id: "commodity".into(), + region_id: "GBR".into(), + balance_type: BalanceType::Consumption, + year: 2010, + time_slice: "winter.day".into(), + value: 0.5, + }; + assert!(read_commodity_costs_iter( + iter::once(cost), + &commodity_ids, + ®ion_ids, + &time_slice_info, + &milestone_years, + ) + .is_err()); + } +} From 4ded88497d8e146f50fb8af1eef1d03fbe9aedf6 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Fri, 20 Dec 2024 15:43:40 +0000 Subject: [PATCH 09/10] Move agent regions code to own submodule --- src/input/agent.rs | 27 +++++------------------ src/input/agent/region.rs | 45 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 50 insertions(+), 22 deletions(-) create mode 100644 src/input/agent/region.rs diff --git a/src/input/agent.rs b/src/input/agent.rs index 33671347c..4bd36a025 100644 --- a/src/input/agent.rs +++ b/src/input/agent.rs @@ -3,33 +3,18 @@ use super::*; use crate::agent::{Agent, SearchSpace}; use crate::process::Process; use anyhow::{bail, ensure, Context, Result}; -use region::{define_region_id_getter, read_regions_for_entity}; -use serde::Deserialize; use std::collections::{HashMap, HashSet}; use std::path::Path; use std::rc::Rc; -pub mod objective; -use objective::read_agent_objectives; pub mod asset; use asset::read_assets; +pub mod objective; +use objective::read_agent_objectives; +pub mod region; +use region::read_agent_regions; const AGENT_FILE_NAME: &str = "agents.csv"; -const AGENT_REGIONS_FILE_NAME: &str = "agent_regions.csv"; - -#[derive(Debug, Deserialize, PartialEq)] -struct AgentRegion { - agent_id: String, - /// The region to which an agent belongs. - region_id: String, -} -define_region_id_getter!(AgentRegion); - -impl HasID for AgentRegion { - fn get_id(&self) -> &str { - &self.agent_id - } -} /// Read agents info from various CSV files. /// @@ -51,9 +36,7 @@ pub fn read_agents( let mut agents = read_agents_file(model_dir, &process_ids)?; let agent_ids = agents.keys().cloned().collect(); - let file_path = model_dir.join(AGENT_REGIONS_FILE_NAME); - let mut agent_regions = - read_regions_for_entity::(&file_path, &agent_ids, region_ids)?; + let mut agent_regions = read_agent_regions(model_dir, &agent_ids, region_ids)?; let mut objectives = read_agent_objectives(model_dir, &agents)?; let mut assets = read_assets(model_dir, &agent_ids, processes, region_ids)?; diff --git a/src/input/agent/region.rs b/src/input/agent/region.rs new file mode 100644 index 000000000..b0d2e8069 --- /dev/null +++ b/src/input/agent/region.rs @@ -0,0 +1,45 @@ +//! Code for loading the agent regions CSV file. +use crate::input::region::{define_region_id_getter, read_regions_for_entity}; +use crate::input::HasID; +use crate::region::RegionSelection; +use anyhow::Result; +use serde::Deserialize; +use std::collections::{HashMap, HashSet}; +use std::path::Path; +use std::rc::Rc; + +const AGENT_REGIONS_FILE_NAME: &str = "agent_regions.csv"; + +#[derive(Debug, Deserialize, PartialEq)] +struct AgentRegion { + agent_id: String, + /// The region to which an agent belongs. + region_id: String, +} +define_region_id_getter!(AgentRegion); + +impl HasID for AgentRegion { + fn get_id(&self) -> &str { + &self.agent_id + } +} + +/// Read the agent regions file. +/// +/// # Arguments +/// +/// * `model_dir` - Folder containing model configuration files +/// * `agent_ids` - The possible valid agent IDs +/// * `region_ids` - The possible valid region IDs +/// +/// # Returns +/// +/// A map of [`RegionSelection`]s, with the agent ID as the key. +pub fn read_agent_regions( + model_dir: &Path, + agent_ids: &HashSet>, + region_ids: &HashSet>, +) -> Result, RegionSelection>> { + let file_path = model_dir.join(AGENT_REGIONS_FILE_NAME); + read_regions_for_entity::(&file_path, agent_ids, region_ids) +} From 15850dcfd66c91f028e966bebfcee316714cf27d Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Fri, 20 Dec 2024 15:44:13 +0000 Subject: [PATCH 10/10] Rename: read_assets => read_agent_assets --- src/input/agent.rs | 4 ++-- src/input/agent/asset.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/input/agent.rs b/src/input/agent.rs index 4bd36a025..8557c8504 100644 --- a/src/input/agent.rs +++ b/src/input/agent.rs @@ -8,7 +8,7 @@ use std::path::Path; use std::rc::Rc; pub mod asset; -use asset::read_assets; +use asset::read_agent_assets; pub mod objective; use objective::read_agent_objectives; pub mod region; @@ -38,7 +38,7 @@ pub fn read_agents( let mut agent_regions = read_agent_regions(model_dir, &agent_ids, region_ids)?; let mut objectives = read_agent_objectives(model_dir, &agents)?; - let mut assets = read_assets(model_dir, &agent_ids, processes, region_ids)?; + let mut assets = read_agent_assets(model_dir, &agent_ids, processes, region_ids)?; // Populate each Agent's Vecs for (id, agent) in agents.iter_mut() { diff --git a/src/input/agent/asset.rs b/src/input/agent/asset.rs index b650761c9..a9ec27f2b 100644 --- a/src/input/agent/asset.rs +++ b/src/input/agent/asset.rs @@ -32,7 +32,7 @@ struct AssetRaw { /// # Returns /// /// A `HashMap` containing assets grouped by agent ID. -pub fn read_assets( +pub fn read_agent_assets( model_dir: &Path, agent_ids: &HashSet>, processes: &HashMap, Rc>,