Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions src/asset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -975,7 +975,7 @@ mod tests {
parameters: process_parameter_map,
regions: indexset! {region_id.clone()},
primary_output: Some(commodity_id.clone()),
years: vec![2020],
years: 2020..=2020,
activity_limits,
capacity_to_activity: ActivityPerCapacity(1.0),
});
Expand Down Expand Up @@ -1066,7 +1066,7 @@ mod tests {
let process = Rc::new(Process {
id: "process1".into(),
description: "Description".into(),
years: vec![2010, 2020],
years: 2010..=2020,
activity_limits,
flows,
parameters: process_parameter_map,
Expand Down Expand Up @@ -1121,7 +1121,7 @@ mod tests {
Process {
id: "process1".into(),
description: "Description".into(),
years: vec![2010, 2020],
years: 2010..=2020,
activity_limits,
flows,
parameters: process_parameter_map,
Expand Down
6 changes: 1 addition & 5 deletions src/fixture.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,11 +150,7 @@ pub fn process(
) -> Process {
let milestone_years = vec![2010, 2015, 2020];
// The process start year is before the base year
let years = vec![2008, 2009]
.iter()
.chain(&milestone_years)
.cloned()
.collect();
let years = 2008..=*milestone_years.last().unwrap();

// Create maps with (empty) entries for every region/year combo
let activity_limits = iproduct!(region_ids.iter(), milestone_years.iter())
Expand Down
2 changes: 1 addition & 1 deletion src/input/agent/search_space.rs
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ mod tests {
let process = Process {
id: id.clone(),
description: "Description".into(),
years: vec![2010, 2020],
years: 2010..=2020,
activity_limits: ProcessActivityLimitsMap::new(),
flows: ProcessFlowsMap::new(),
parameters: ProcessParameterMap::new(),
Expand Down
27 changes: 26 additions & 1 deletion src/input/asset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use crate::id::IDCollection;
use crate::process::ProcessMap;
use crate::region::RegionID;
use crate::units::Capacity;
use anyhow::{Context, Result};
use anyhow::{Context, Result, ensure};
use indexmap::IndexSet;
use itertools::Itertools;
use serde::Deserialize;
Expand Down Expand Up @@ -78,6 +78,31 @@ where
.with_context(|| format!("Invalid process ID: {}", &asset.process_id))?;
let region_id = region_ids.get_id(&asset.region_id)?;

// Validate commission year. It should be within the process valid range...
ensure!(
process.years.contains(&asset.commission_year),
"Agent {} has asset with commission year {}, not within process {} commission years: {:?}",
asset.agent_id,
asset.commission_year,
asset.process_id,
process.years
);
// ... and also have associated process parameters and flows
ensure!(
process.parameters.contains_key(&(region_id.clone(), asset.commission_year)),
"Parameters for process {} do not contain entry for year {}, required for asset in agent {}",
asset.process_id,
asset.commission_year,
asset.agent_id,
);
ensure!(
process.flows.contains_key(&(region_id.clone(), asset.commission_year)),
"Flows for process {} do not contain entry for year {}, required for asset in agent {}",
asset.process_id,
asset.commission_year,
asset.agent_id,
);
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I like this block a lot, but aren't we already doing these checks elsewhere?

Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, sort of, when building the commodity graph for the model. I just thought it made sense to catch issues even earlier, when processing the file. Happy to remove it if you think it is redundant, but I don't think it does any harm.


Asset::new_future_with_max_decommission(
agent_id.clone(),
Rc::clone(process),
Expand Down
39 changes: 19 additions & 20 deletions src/input/process.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use crate::time_slice::TimeSliceInfo;
use crate::units::ActivityPerCapacity;
use anyhow::{Context, Ok, Result, ensure};
use indexmap::IndexSet;
use itertools::chain;
use log::warn;
use serde::Deserialize;
use std::path::Path;
use std::rc::Rc;
Expand Down Expand Up @@ -57,12 +57,11 @@ pub fn read_processes(
time_slice_info: &TimeSliceInfo,
milestone_years: &[u32],
) -> Result<ProcessMap> {
let base_year = milestone_years[0];
let mut processes = read_processes_file(model_dir, milestone_years, region_ids, commodities)?;
let mut activity_limits =
read_process_availabilities(model_dir, &processes, time_slice_info, base_year)?;
read_process_availabilities(model_dir, &processes, time_slice_info, milestone_years)?;
let mut flows = read_process_flows(model_dir, &mut processes, commodities, milestone_years)?;
let mut parameters = read_process_parameters(model_dir, &processes, base_year)?;
let mut parameters = read_process_parameters(model_dir, &processes, milestone_years)?;

// Add data to Process objects
for (id, process) in &mut processes {
Expand Down Expand Up @@ -101,29 +100,29 @@ where
{
let mut processes = ProcessMap::new();
for process_raw in iter {
let start_year = process_raw.start_year.unwrap_or(milestone_years[0]);
let end_year = process_raw
.end_year
.unwrap_or(*milestone_years.last().unwrap());
let start_year = process_raw.start_year.unwrap_or_else(|| {
warn!(
"Using default start year {} for process {}.",
milestone_years[0], process_raw.id
);
milestone_years[0]
});
let end_year = process_raw.end_year.unwrap_or_else(|| {
warn!(
"Using default end year {} for process {}.",
milestone_years.last().unwrap(),
process_raw.id
);
*milestone_years.last().unwrap()
});

// Check year range is valid
ensure!(
start_year <= end_year,
"Error in parameter for process {}: start_year > end_year",
process_raw.id
);

// Select process years. It is possible for assets to have been commissioned before the
// simulation's time horizon, so assume that all years >=start_year and <base year are valid
// too.
let years = chain(
start_year..milestone_years[0],
milestone_years
.iter()
.copied()
.filter(|year| (start_year..=end_year).contains(year)),
)
.collect();
let years = start_year..=end_year;

// Parse region ID
let regions = parse_region_str(&process_raw.regions, region_ids)?;
Expand Down
29 changes: 14 additions & 15 deletions src/input/process/availability.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ enum LimitType {
/// * `model_dir` - Folder containing model configuration files
/// * `processes` - Map of processes
/// * `time_slice_info` - Information about seasons and times of day
/// * `base_year` - First milestone year of simulation
/// * `milestone_years` - Milestone years of simulation
///
/// # Returns
///
Expand All @@ -85,15 +85,15 @@ pub fn read_process_availabilities(
model_dir: &Path,
processes: &ProcessMap,
time_slice_info: &TimeSliceInfo,
base_year: u32,
milestone_years: &[u32],
) -> Result<HashMap<ProcessID, ProcessActivityLimitsMap>> {
let file_path = model_dir.join(PROCESS_AVAILABILITIES_FILE_NAME);
let process_availabilities_csv = read_csv(&file_path)?;
read_process_availabilities_from_iter(
process_availabilities_csv,
processes,
time_slice_info,
base_year,
milestone_years,
)
.with_context(|| input_err_msg(&file_path))
}
Expand All @@ -105,7 +105,7 @@ pub fn read_process_availabilities(
/// * `iter` - Iterator of raw process availability records
/// * `processes` - Map of processes
/// * `time_slice_info` - Information about seasons and times of day
/// * `base_year` - First milestone year of simulation
/// * `milestone_years` - Milestone years of simulation
///
/// # Returns
///
Expand All @@ -115,7 +115,7 @@ fn read_process_availabilities_from_iter<I>(
iter: I,
processes: &ProcessMap,
time_slice_info: &TimeSliceInfo,
base_year: u32,
milestone_years: &[u32],
) -> Result<HashMap<ProcessID, ProcessActivityLimitsMap>>
where
I: Iterator<Item = ProcessAvailabilityRaw>,
Expand All @@ -137,9 +137,9 @@ where
})?;

// Get years
let process_years = &process.years;
let process_years: Vec<u32> = process.years.clone().collect();
let record_years =
parse_year_str(&record.commission_years, process_years).with_context(|| {
parse_year_str(&record.commission_years, &process_years).with_context(|| {
format!("Invalid year for process {id}. Valid years are {process_years:?}")
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Probably fine, although this could be a lot of years which will make for a long message. Maybe better to give the range of years rather than the full list?

I also wonder if parse_year_str could be simplified by taking a range instead of a list of years?

Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I thought about this, but parse_year_str is used in other places passing the milestone years as valid years, which is a discrete vector. In python handling this two possible inputs would be a piece of cake, but I don't see a clear way forward here but by duplicating the function, one taking a vector as input and another a range, updating the internal logic internally as needed.

Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmm good point. I'd say it's best as it is then

})?;

Expand All @@ -162,7 +162,7 @@ where
}
}

validate_activity_limits_maps(&map, processes, time_slice_info, base_year)?;
validate_activity_limits_maps(&map, processes, time_slice_info, milestone_years)?;

Ok(map)
}
Expand All @@ -172,15 +172,15 @@ fn validate_activity_limits_maps(
all_availabilities: &HashMap<ProcessID, ProcessActivityLimitsMap>,
processes: &ProcessMap,
time_slice_info: &TimeSliceInfo,
base_year: u32,
milestone_years: &[u32],
) -> Result<()> {
for (process_id, process) in processes {
// A map of maps: the outer map is keyed by region and year; the inner one by time slice
let map_for_process = all_availabilities
.get(process_id)
.with_context(|| format!("Missing availabilities for process {process_id}"))?;

check_missing_milestone_years(process, map_for_process, base_year)?;
check_missing_milestone_years(process, map_for_process, milestone_years)?;
check_missing_time_slices(process, map_for_process, time_slice_info)?;
}

Expand All @@ -195,13 +195,12 @@ fn validate_activity_limits_maps(
fn check_missing_milestone_years(
process: &Process,
map_for_process: &ProcessActivityLimitsMap,
base_year: u32,
milestone_years: &[u32],
) -> Result<()> {
let process_milestone_years = process
.years
.iter()
.copied()
.filter(|&year| year >= base_year);
.clone()
.filter(|year| milestone_years.contains(year));
let mut missing = Vec::new();
for (region_id, year) in iproduct!(&process.regions, process_milestone_years) {
if !map_for_process.contains_key(&(region_id.clone(), year)) {
Expand All @@ -227,7 +226,7 @@ fn check_missing_time_slices(
time_slice_info: &TimeSliceInfo,
) -> Result<()> {
let mut missing = Vec::new();
for (region_id, &year) in iproduct!(&process.regions, &process.years) {
for (region_id, year) in iproduct!(&process.regions, process.years.clone()) {
if let Some(map_for_region_year) = map_for_process.get(&(region_id.clone(), year)) {
// There are at least some entries for this region/year combo; check if there are
// any time slices not covered
Expand Down
34 changes: 17 additions & 17 deletions src/input/process/flow.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,9 +97,9 @@ where
})?;

// Get years
let process_years = &process.years;
let process_years: Vec<u32> = process.years.clone().collect();
let record_years =
parse_year_str(&record.commission_years, process_years).with_context(|| {
parse_year_str(&record.commission_years, &process_years).with_context(|| {
format!("Invalid year for process {id}. Valid years are {process_years:?}")
})?;

Expand Down Expand Up @@ -266,21 +266,21 @@ fn validate_secondary_flows(
.collect();

// Get the non-primary io flows for all years, if any, arranged by (commodity, region)
let iter = iproduct!(process.years.iter(), process.regions.iter());
let iter = iproduct!(process.years.clone(), process.regions.iter());
let mut flows: HashMap<(CommodityID, RegionID), Vec<&ProcessFlow>> = HashMap::new();
let mut number_of_years: HashMap<(CommodityID, RegionID), u32> = HashMap::new();
for (&year, region_id) in iter {
let flow = map[&(region_id.clone(), year)]
.iter()
.filter_map(|(commodity_id, flow)| {
for (year, region_id) in iter {
if let Some(commodity_map) = map.get(&(region_id.clone(), year)) {
let flow = commodity_map.iter().filter_map(|(commodity_id, flow)| {
(Some(commodity_id) != process.primary_output.as_ref())
.then_some(((commodity_id.clone(), region_id.clone()), flow))
});

for (key, value) in flow {
flows.entry(key.clone()).or_default().push(value);
if required_years.contains(&&year) {
*number_of_years.entry(key).or_default() += 1;
for (key, value) in flow {
flows.entry(key.clone()).or_default().push(value);
if required_years.contains(&&year) {
*number_of_years.entry(key).or_default() += 1;
}
}
}
}
Expand Down Expand Up @@ -338,15 +338,15 @@ mod tests {
fn build_maps<I>(
process: Process,
flows: I,
years: Option<&Vec<u32>>,
years: Option<Vec<u32>>,
) -> (ProcessMap, HashMap<ProcessID, ProcessFlowsMap>)
where
I: Clone + Iterator<Item = (CommodityID, ProcessFlow)>,
{
let years = years.unwrap_or(&process.years);
let years = years.unwrap_or(process.years.clone().collect());
let map: Rc<IndexMap<_, _>> = Rc::new(flows.clone().collect());
let flows_inner = iproduct!(&process.regions, years)
.map(|(region_id, year)| ((region_id.clone(), *year), map.clone()))
.map(|(region_id, year)| ((region_id.clone(), year), map.clone()))
.collect();
let flows = hash_map! {process.id.clone() => flows_inner};
let processes = iter::once((process.id.clone(), process.into())).collect();
Expand Down Expand Up @@ -379,7 +379,7 @@ mod tests {
#[from(sed_commodity)] commodity2: Commodity,
process: Process,
) {
let milestone_years = vec![2010, 2020];
let milestone_years: Vec<u32> = vec![2010, 2020];
let commodity1 = Rc::new(commodity1);
let commodity2 = Rc::new(commodity2);
let (mut processes, flows_map) = build_maps(
Expand Down Expand Up @@ -471,7 +471,7 @@ mod tests {
(commodity2.id.clone(), flow(commodity2.clone(), 2.0)),
]
.into_iter(),
Some(&flow_years),
Some(flow_years),
);
let res =
validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years);
Expand All @@ -497,7 +497,7 @@ mod tests {
(commodity2.id.clone(), flow(commodity2.clone(), -2.0)),
]
.into_iter(),
Some(&milestone_years),
Some(milestone_years.clone()),
);
assert!(
validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years)
Expand Down
Loading