From ff1c5abee81d12359215e99d864c1fd79bad8803 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 15 Feb 2026 21:37:33 +0800 Subject: [PATCH 01/15] =?UTF-8?q?refactor:=20trim=20MaximumIndependentSet?= =?UTF-8?q?=20API=20=E2=80=94=20remove=20delegation=20methods?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Remove delegation methods (num_vertices, num_edges, edges, has_edge, set_weights, from_graph_unit_weights) from MaximumIndependentSet so callers go through .graph() directly. Rename weights_ref() to weights() returning &[W] instead of &Vec. Co-Authored-By: Claude Opus 4.6 --- .../reduction_maximumindependentset_to_ilp.rs | 6 +-- ...imumindependentset_to_maximumsetpacking.rs | 6 +-- ...mumindependentset_to_minimumvertexcover.rs | 6 +-- ...reduction_maximumindependentset_to_qubo.rs | 6 +-- ...mumvertexcover_to_maximumindependentset.rs | 8 ++-- ...satisfiability_to_maximumindependentset.rs | 10 ++--- src/models/graph/maximum_independent_set.rs | 44 +------------------ src/rules/maximumindependentset_gridgraph.rs | 10 ++--- src/rules/maximumindependentset_ilp.rs | 7 +-- ...maximumindependentset_maximumsetpacking.rs | 8 ++-- src/rules/maximumindependentset_qubo.rs | 8 ++-- src/rules/maximumindependentset_triangular.rs | 6 +-- ...inimumvertexcover_maximumindependentset.rs | 8 ++-- src/topology/mod.rs | 2 +- src/unit_tests/graph_models.rs | 25 +++++------ src/unit_tests/io.rs | 10 ++--- .../models/graph/maximum_independent_set.rs | 39 ++++++++-------- .../rules/maximumindependentset_gridgraph.rs | 6 +-- ...maximumindependentset_maximumsetpacking.rs | 4 +- .../rules/maximumindependentset_triangular.rs | 4 +- .../rules/sat_maximumindependentset.rs | 23 +++++----- tests/suites/integration.rs | 3 +- tests/suites/reductions.rs | 16 +++---- 23 files changed, 112 insertions(+), 153 deletions(-) diff --git a/examples/reduction_maximumindependentset_to_ilp.rs b/examples/reduction_maximumindependentset_to_ilp.rs index a7e96ba63..3b1d54b2a 100644 --- a/examples/reduction_maximumindependentset_to_ilp.rs +++ b/examples/reduction_maximumindependentset_to_ilp.rs @@ -16,7 +16,7 @@ use problemreductions::export::*; use problemreductions::prelude::*; use problemreductions::topology::small_graphs::petersen; -use problemreductions::topology::SimpleGraph; +use problemreductions::topology::{Graph, SimpleGraph}; pub fn run() { // 1. Create IS instance: Petersen graph @@ -85,8 +85,8 @@ pub fn run() { problem: MaximumIndependentSet::::NAME.to_string(), variant: source_variant, instance: serde_json::json!({ - "num_vertices": is.num_vertices(), - "num_edges": is.num_edges(), + "num_vertices": is.graph().num_vertices(), + "num_edges": is.graph().num_edges(), "edges": edges, }), }, diff --git a/examples/reduction_maximumindependentset_to_maximumsetpacking.rs b/examples/reduction_maximumindependentset_to_maximumsetpacking.rs index e7c64f194..0ba36348a 100644 --- a/examples/reduction_maximumindependentset_to_maximumsetpacking.rs +++ b/examples/reduction_maximumindependentset_to_maximumsetpacking.rs @@ -18,7 +18,7 @@ use problemreductions::export::*; use problemreductions::prelude::*; use problemreductions::topology::small_graphs::petersen; -use problemreductions::topology::SimpleGraph; +use problemreductions::topology::{Graph, SimpleGraph}; pub fn run() { println!("\n=== Independent Set -> Set Packing Reduction ===\n"); @@ -108,8 +108,8 @@ pub fn run() { problem: MaximumIndependentSet::::NAME.to_string(), variant: source_variant, instance: serde_json::json!({ - "num_vertices": source.num_vertices(), - "num_edges": source.num_edges(), + "num_vertices": source.graph().num_vertices(), + "num_edges": source.graph().num_edges(), "edges": edges, }), }, diff --git a/examples/reduction_maximumindependentset_to_minimumvertexcover.rs b/examples/reduction_maximumindependentset_to_minimumvertexcover.rs index 22adca730..987d08d0b 100644 --- a/examples/reduction_maximumindependentset_to_minimumvertexcover.rs +++ b/examples/reduction_maximumindependentset_to_minimumvertexcover.rs @@ -17,7 +17,7 @@ use problemreductions::export::*; use problemreductions::prelude::*; use problemreductions::topology::small_graphs::petersen; -use problemreductions::topology::SimpleGraph; +use problemreductions::topology::{Graph, SimpleGraph}; pub fn run() { // 1. Create IS instance: Petersen graph @@ -76,8 +76,8 @@ pub fn run() { problem: MaximumIndependentSet::::NAME.to_string(), variant: source_variant, instance: serde_json::json!({ - "num_vertices": is.num_vertices(), - "num_edges": is.num_edges(), + "num_vertices": is.graph().num_vertices(), + "num_edges": is.graph().num_edges(), "edges": edges, }), }, diff --git a/examples/reduction_maximumindependentset_to_qubo.rs b/examples/reduction_maximumindependentset_to_qubo.rs index 22e2c934b..30b4bc078 100644 --- a/examples/reduction_maximumindependentset_to_qubo.rs +++ b/examples/reduction_maximumindependentset_to_qubo.rs @@ -27,7 +27,7 @@ use problemreductions::export::*; use problemreductions::prelude::*; use problemreductions::topology::small_graphs::petersen; -use problemreductions::topology::SimpleGraph; +use problemreductions::topology::{Graph, SimpleGraph}; pub fn run() { println!("=== Independent Set -> QUBO Reduction ===\n"); @@ -95,8 +95,8 @@ pub fn run() { problem: MaximumIndependentSet::::NAME.to_string(), variant: source_variant, instance: serde_json::json!({ - "num_vertices": is.num_vertices(), - "num_edges": is.num_edges(), + "num_vertices": is.graph().num_vertices(), + "num_edges": is.graph().num_edges(), "edges": edges, }), }, diff --git a/examples/reduction_minimumvertexcover_to_maximumindependentset.rs b/examples/reduction_minimumvertexcover_to_maximumindependentset.rs index f48d72fbc..3f0256ba4 100644 --- a/examples/reduction_minimumvertexcover_to_maximumindependentset.rs +++ b/examples/reduction_minimumvertexcover_to_maximumindependentset.rs @@ -18,7 +18,7 @@ use problemreductions::export::*; use problemreductions::prelude::*; use problemreductions::topology::small_graphs::petersen; -use problemreductions::topology::SimpleGraph; +use problemreductions::topology::{Graph, SimpleGraph}; pub fn run() { // Petersen graph: 10 vertices, 15 edges, VC=6 @@ -67,7 +67,7 @@ pub fn run() { // Export JSON let vc_edges = vc.edges(); - let is_edges = is.edges(); + let is_edges = is.graph().edges(); let source_variant = variant_to_map(MinimumVertexCover::::variant()); let target_variant = variant_to_map(MaximumIndependentSet::::variant()); let overhead = lookup_overhead( @@ -92,8 +92,8 @@ pub fn run() { problem: MaximumIndependentSet::::NAME.to_string(), variant: target_variant, instance: serde_json::json!({ - "num_vertices": is.num_vertices(), - "num_edges": is.num_edges(), + "num_vertices": is.graph().num_vertices(), + "num_edges": is.graph().num_edges(), "edges": is_edges, }), }, diff --git a/examples/reduction_satisfiability_to_maximumindependentset.rs b/examples/reduction_satisfiability_to_maximumindependentset.rs index 74ed27d01..6b99d0777 100644 --- a/examples/reduction_satisfiability_to_maximumindependentset.rs +++ b/examples/reduction_satisfiability_to_maximumindependentset.rs @@ -15,7 +15,7 @@ use problemreductions::export::*; use problemreductions::prelude::*; -use problemreductions::topology::SimpleGraph; +use problemreductions::topology::{Graph, SimpleGraph}; pub fn run() { // 1. Create SAT instance: 5-variable, 7-clause 3-SAT formula @@ -53,8 +53,8 @@ pub fn run() { ); println!( "Target: MaximumIndependentSet with {} vertices, {} edges", - is.num_vertices(), - is.num_edges() + is.graph().num_vertices(), + is.graph().num_edges() ); println!(" Each literal occurrence becomes a vertex."); println!(" Edges connect literals within the same clause (clique)"); @@ -127,8 +127,8 @@ pub fn run() { problem: MaximumIndependentSet::::NAME.to_string(), variant: target_variant, instance: serde_json::json!({ - "num_vertices": is.num_vertices(), - "num_edges": is.num_edges(), + "num_vertices": is.graph().num_vertices(), + "num_edges": is.graph().num_edges(), }), }, overhead: overhead_to_json(&overhead), diff --git a/src/models/graph/maximum_independent_set.rs b/src/models/graph/maximum_independent_set.rs index 2453e590d..64c3742c4 100644 --- a/src/models/graph/maximum_independent_set.rs +++ b/src/models/graph/maximum_independent_set.rs @@ -97,56 +97,16 @@ impl MaximumIndependentSet { Self { graph, weights } } - /// Create an Independent Set problem from an existing graph with unit weights. - pub fn from_graph_unit_weights(graph: G) -> Self - where - W: From, - { - let weights = vec![W::from(1); graph.num_vertices()]; - Self { graph, weights } - } - /// Get a reference to the underlying graph. pub fn graph(&self) -> &G { &self.graph } - /// Get the number of vertices. - pub fn num_vertices(&self) -> usize { - self.graph.num_vertices() - } - - /// Get the number of edges. - pub fn num_edges(&self) -> usize { - self.graph.num_edges() - } - - /// Get the edges as a list of (u, v) pairs. - pub fn edges(&self) -> Vec<(usize, usize)> { - self.graph.edges() - } - - /// Check if two vertices are adjacent. - pub fn has_edge(&self, u: usize, v: usize) -> bool { - self.graph.has_edge(u, v) - } - - /// Get a reference to the weights vector. - pub fn weights_ref(&self) -> &Vec { + /// Get a reference to the weights. + pub fn weights(&self) -> &[W] { &self.weights } - /// Set new weights for the problem. - pub fn set_weights(&mut self, weights: Vec) { - assert_eq!(weights.len(), self.graph.num_vertices()); - self.weights = weights; - } - - /// Get the weights for the problem. - pub fn weights(&self) -> Vec { - self.weights.clone() - } - /// Check if the problem has non-uniform weights. pub fn is_weighted(&self) -> bool where diff --git a/src/rules/maximumindependentset_gridgraph.rs b/src/rules/maximumindependentset_gridgraph.rs index 92cc6cf40..e96ecda41 100644 --- a/src/rules/maximumindependentset_gridgraph.rs +++ b/src/rules/maximumindependentset_gridgraph.rs @@ -9,7 +9,7 @@ use crate::reduction; use crate::rules::registry::ReductionOverhead; use crate::rules::traits::{ReduceTo, ReductionResult}; use crate::rules::unitdiskmapping::ksg; -use crate::topology::{KingsSubgraph, SimpleGraph, UnitDiskGraph}; +use crate::topology::{Graph, KingsSubgraph, SimpleGraph, UnitDiskGraph}; /// Result of reducing MIS on SimpleGraph to MIS on KingsSubgraph. #[derive(Debug, Clone)] @@ -45,8 +45,8 @@ impl ReduceTo> type Result = ReductionISSimpleToGrid; fn reduce_to(&self) -> Self::Result { - let n = self.num_vertices(); - let edges = self.edges(); + let n = self.graph().num_vertices(); + let edges = self.graph().edges(); let result = ksg::map_unweighted(n, &edges); let weights = result.node_weights.clone(); let grid = result.to_kings_subgraph(); @@ -92,8 +92,8 @@ impl ReduceTo> type Result = ReductionISUnitDiskToGrid; fn reduce_to(&self) -> Self::Result { - let n = self.num_vertices(); - let edges = self.edges(); + let n = self.graph().num_vertices(); + let edges = Graph::edges(self.graph()); let result = ksg::map_unweighted(n, &edges); let weights = result.node_weights.clone(); let grid = result.to_kings_subgraph(); diff --git a/src/rules/maximumindependentset_ilp.rs b/src/rules/maximumindependentset_ilp.rs index df53f0245..220cd7e76 100644 --- a/src/rules/maximumindependentset_ilp.rs +++ b/src/rules/maximumindependentset_ilp.rs @@ -11,7 +11,7 @@ use crate::poly; use crate::reduction; use crate::rules::registry::ReductionOverhead; use crate::rules::traits::{ReduceTo, ReductionResult}; -use crate::topology::SimpleGraph; +use crate::topology::{Graph, SimpleGraph}; /// Result of reducing MaximumIndependentSet to ILP. /// @@ -53,7 +53,7 @@ impl ReduceTo for MaximumIndependentSet { type Result = ReductionISToILP; fn reduce_to(&self) -> Self::Result { - let num_vars = self.num_vertices(); + let num_vars = self.graph().num_vertices(); // All variables are binary (0 or 1) let bounds = vec![VarBounds::binary(); num_vars]; @@ -61,6 +61,7 @@ impl ReduceTo for MaximumIndependentSet { // Constraints: x_u + x_v <= 1 for each edge (u, v) // This ensures at most one endpoint of each edge is selected let constraints: Vec = self + .graph() .edges() .into_iter() .map(|(u, v)| LinearConstraint::le(vec![(u, 1.0), (v, 1.0)], 1.0)) @@ -68,7 +69,7 @@ impl ReduceTo for MaximumIndependentSet { // Objective: maximize sum of w_i * x_i (weighted sum of selected vertices) let objective: Vec<(usize, f64)> = self - .weights_ref() + .weights() .iter() .enumerate() .map(|(i, &w)| (i, w as f64)) diff --git a/src/rules/maximumindependentset_maximumsetpacking.rs b/src/rules/maximumindependentset_maximumsetpacking.rs index 13d79bfe8..0d3ab17a4 100644 --- a/src/rules/maximumindependentset_maximumsetpacking.rs +++ b/src/rules/maximumindependentset_maximumsetpacking.rs @@ -9,7 +9,7 @@ use crate::poly; use crate::reduction; use crate::rules::registry::ReductionOverhead; use crate::rules::traits::{ReduceTo, ReductionResult}; -use crate::topology::SimpleGraph; +use crate::topology::{Graph, SimpleGraph}; use crate::types::WeightElement; use std::collections::HashSet; @@ -48,8 +48,8 @@ impl ReduceTo> for MaximumIndependentSet; fn reduce_to(&self) -> Self::Result { - let edges = self.edges(); - let n = self.num_vertices(); + let edges = self.graph().edges(); + let n = self.graph().num_vertices(); // For each vertex, collect the indices of its incident edges let mut sets: Vec> = vec![Vec::new(); n]; @@ -58,7 +58,7 @@ impl ReduceTo> for MaximumIndependentSet> for MaximumIndependentSet { type Result = ReductionISToQUBO; fn reduce_to(&self) -> Self::Result { - let n = self.num_vertices(); - let edges = self.edges(); - let weights = self.weights_ref(); + let n = self.graph().num_vertices(); + let edges = self.graph().edges(); + let weights = self.weights(); let total_weight: f64 = weights.iter().map(|&w| w as f64).sum(); let penalty = 1.0 + total_weight; diff --git a/src/rules/maximumindependentset_triangular.rs b/src/rules/maximumindependentset_triangular.rs index acf1a2b7b..c4a4524e2 100644 --- a/src/rules/maximumindependentset_triangular.rs +++ b/src/rules/maximumindependentset_triangular.rs @@ -11,7 +11,7 @@ use crate::rules::registry::ReductionOverhead; use crate::rules::traits::{ReduceTo, ReductionResult}; use crate::rules::unitdiskmapping::ksg; use crate::rules::unitdiskmapping::triangular; -use crate::topology::{SimpleGraph, TriangularSubgraph}; +use crate::topology::{Graph, SimpleGraph, TriangularSubgraph}; /// Result of reducing MIS on SimpleGraph to MIS on TriangularSubgraph. #[derive(Debug, Clone)] @@ -47,8 +47,8 @@ impl ReduceTo> type Result = ReductionISSimpleToTriangular; fn reduce_to(&self) -> Self::Result { - let n = self.num_vertices(); - let edges = self.edges(); + let n = self.graph().num_vertices(); + let edges = self.graph().edges(); let result = triangular::map_weighted(n, &edges); let weights = result.node_weights.clone(); let grid = result.to_triangular_subgraph(); diff --git a/src/rules/minimumvertexcover_maximumindependentset.rs b/src/rules/minimumvertexcover_maximumindependentset.rs index baf706e5d..3f62aae84 100644 --- a/src/rules/minimumvertexcover_maximumindependentset.rs +++ b/src/rules/minimumvertexcover_maximumindependentset.rs @@ -7,7 +7,7 @@ use crate::poly; use crate::reduction; use crate::rules::registry::ReductionOverhead; use crate::rules::traits::{ReduceTo, ReductionResult}; -use crate::topology::SimpleGraph; +use crate::topology::{Graph, SimpleGraph}; use crate::types::WeightElement; /// Result of reducing MaximumIndependentSet to MinimumVertexCover. @@ -47,9 +47,9 @@ impl ReduceTo> for MaximumIndependentSet Self::Result { let target = MinimumVertexCover::with_weights( - self.num_vertices(), - self.edges(), - self.weights_ref().clone(), + self.graph().num_vertices(), + self.graph().edges(), + self.weights().to_vec(), ); ReductionISToVC { target } } diff --git a/src/topology/mod.rs b/src/topology/mod.rs index 7d1eaf631..6935e525e 100644 --- a/src/topology/mod.rs +++ b/src/topology/mod.rs @@ -16,7 +16,7 @@ //! //! // Problems work with any graph type - SimpleGraph by default //! let simple_graph_problem: MaximumIndependentSet = MaximumIndependentSet::new(3, vec![(0, 1)]); -//! assert_eq!(simple_graph_problem.num_vertices(), 3); +//! assert_eq!(simple_graph_problem.graph().num_vertices(), 3); //! //! // Different graph topologies enable different reduction algorithms //! // (UnitDiskGraph example would require specific constructors) diff --git a/src/unit_tests/graph_models.rs b/src/unit_tests/graph_models.rs index 193b8f261..c32877023 100644 --- a/src/unit_tests/graph_models.rs +++ b/src/unit_tests/graph_models.rs @@ -8,7 +8,7 @@ use crate::models::graph::{ MinimumVertexCover, }; use crate::prelude::*; -use crate::topology::SimpleGraph; +use crate::topology::{Graph, SimpleGraph}; use crate::traits::{OptimizationProblem, Problem}; use crate::types::{Direction, SolutionSize}; use crate::variant::{K1, K2, K3, K4}; @@ -24,15 +24,15 @@ mod maximum_independent_set { fn test_creation() { let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); - assert_eq!(problem.num_vertices(), 4); - assert_eq!(problem.num_edges(), 3); + assert_eq!(problem.graph().num_vertices(), 4); + assert_eq!(problem.graph().num_edges(), 3); assert_eq!(problem.num_variables(), 4); } #[test] fn test_with_weights() { let problem = MaximumIndependentSet::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); - assert_eq!(problem.weights(), vec![1, 2, 3]); + assert_eq!(problem.weights().to_vec(), vec![1, 2, 3]); assert!(problem.is_weighted()); } @@ -45,10 +45,10 @@ mod maximum_independent_set { #[test] fn test_has_edge() { let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); - assert!(problem.has_edge(0, 1)); - assert!(problem.has_edge(1, 0)); // Undirected - assert!(problem.has_edge(1, 2)); - assert!(!problem.has_edge(0, 2)); + assert!(problem.graph().has_edge(0, 1)); + assert!(problem.graph().has_edge(1, 0)); // Undirected + assert!(problem.graph().has_edge(1, 2)); + assert!(!problem.graph().has_edge(0, 2)); } #[test] @@ -161,17 +161,16 @@ mod maximum_independent_set { #[test] fn test_edges() { let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (2, 3)]); - let edges = problem.edges(); + let edges = problem.graph().edges(); assert_eq!(edges.len(), 2); assert!(edges.contains(&(0, 1)) || edges.contains(&(1, 0))); assert!(edges.contains(&(2, 3)) || edges.contains(&(3, 2))); } #[test] - fn test_set_weights() { - let mut problem = MaximumIndependentSet::::new(3, vec![(0, 1)]); - problem.set_weights(vec![5, 10, 15]); - assert_eq!(problem.weights(), vec![5, 10, 15]); + fn test_with_custom_weights() { + let problem = MaximumIndependentSet::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + assert_eq!(problem.weights().to_vec(), vec![5, 10, 15]); } #[test] diff --git a/src/unit_tests/io.rs b/src/unit_tests/io.rs index 83a7eb4e2..3ae403796 100644 --- a/src/unit_tests/io.rs +++ b/src/unit_tests/io.rs @@ -1,6 +1,6 @@ use super::*; use crate::models::graph::MaximumIndependentSet; -use crate::topology::SimpleGraph; +use crate::topology::{Graph, SimpleGraph}; use std::fs; use std::time::{SystemTime, UNIX_EPOCH}; @@ -18,8 +18,8 @@ fn test_from_json() { let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); let json = to_json(&problem).unwrap(); let restored: MaximumIndependentSet = from_json(&json).unwrap(); - assert_eq!(restored.num_vertices(), 3); - assert_eq!(restored.num_edges(), 2); + assert_eq!(restored.graph().num_vertices(), 3); + assert_eq!(restored.graph().num_edges(), 2); } #[test] @@ -47,8 +47,8 @@ fn test_file_roundtrip() { // Read back let restored: MaximumIndependentSet = read_problem(path, FileFormat::Json).unwrap(); - assert_eq!(restored.num_vertices(), 4); - assert_eq!(restored.num_edges(), 3); + assert_eq!(restored.graph().num_vertices(), 4); + assert_eq!(restored.graph().num_edges(), 3); // Cleanup fs::remove_file(path).ok(); diff --git a/src/unit_tests/models/graph/maximum_independent_set.rs b/src/unit_tests/models/graph/maximum_independent_set.rs index db313070a..4daed6bf8 100644 --- a/src/unit_tests/models/graph/maximum_independent_set.rs +++ b/src/unit_tests/models/graph/maximum_independent_set.rs @@ -7,8 +7,8 @@ include!("../../jl_helpers.rs"); #[test] fn test_independent_set_creation() { let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); - assert_eq!(problem.num_vertices(), 4); - assert_eq!(problem.num_edges(), 3); + assert_eq!(problem.graph().num_vertices(), 4); + assert_eq!(problem.graph().num_edges(), 3); assert_eq!(problem.dims().len(), 4); } @@ -16,7 +16,7 @@ fn test_independent_set_creation() { fn test_independent_set_with_weights() { let problem = MaximumIndependentSet::::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); - assert_eq!(problem.weights(), vec![1, 2, 3]); + assert_eq!(problem.weights().to_vec(), vec![1, 2, 3]); assert!(problem.is_weighted()); } @@ -29,10 +29,10 @@ fn test_independent_set_unweighted() { #[test] fn test_has_edge() { let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); - assert!(problem.has_edge(0, 1)); - assert!(problem.has_edge(1, 0)); // Undirected - assert!(problem.has_edge(1, 2)); - assert!(!problem.has_edge(0, 2)); + assert!(problem.graph().has_edge(0, 1)); + assert!(problem.graph().has_edge(1, 0)); // Undirected + assert!(problem.graph().has_edge(1, 2)); + assert!(!problem.graph().has_edge(0, 2)); } #[test] @@ -61,17 +61,16 @@ fn test_direction() { #[test] fn test_edges() { let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (2, 3)]); - let edges = problem.edges(); + let edges = problem.graph().edges(); assert_eq!(edges.len(), 2); assert!(edges.contains(&(0, 1)) || edges.contains(&(1, 0))); assert!(edges.contains(&(2, 3)) || edges.contains(&(3, 2))); } #[test] -fn test_set_weights() { - let mut problem = MaximumIndependentSet::::new(3, vec![(0, 1)]); - problem.set_weights(vec![5, 10, 15]); - assert_eq!(problem.weights(), vec![5, 10, 15]); +fn test_with_custom_weights() { + let problem = MaximumIndependentSet::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + assert_eq!(problem.weights().to_vec(), vec![5, 10, 15]); } #[test] @@ -79,16 +78,16 @@ fn test_from_graph() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); let problem = MaximumIndependentSet::::from_graph(graph.clone(), vec![1, 2, 3]); - assert_eq!(problem.num_vertices(), 3); - assert_eq!(problem.weights(), vec![1, 2, 3]); + assert_eq!(problem.graph().num_vertices(), 3); + assert_eq!(problem.weights().to_vec(), vec![1, 2, 3]); } #[test] -fn test_from_graph_unit_weights() { +fn test_from_graph_with_unit_weights() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); - let problem = MaximumIndependentSet::::from_graph_unit_weights(graph); - assert_eq!(problem.num_vertices(), 3); - assert_eq!(problem.weights(), vec![1, 1, 1]); + let problem = MaximumIndependentSet::::from_graph(graph, vec![1, 1, 1]); + assert_eq!(problem.graph().num_vertices(), 3); + assert_eq!(problem.weights().to_vec(), vec![1, 1, 1]); } #[test] @@ -100,10 +99,10 @@ fn test_graph_accessor() { } #[test] -fn test_weights_ref() { +fn test_weights() { let problem = MaximumIndependentSet::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); - assert_eq!(problem.weights_ref(), &vec![5, 10, 15]); + assert_eq!(problem.weights(), &[5, 10, 15]); } #[test] diff --git a/src/unit_tests/rules/maximumindependentset_gridgraph.rs b/src/unit_tests/rules/maximumindependentset_gridgraph.rs index 93a0eda22..f2d681b38 100644 --- a/src/unit_tests/rules/maximumindependentset_gridgraph.rs +++ b/src/unit_tests/rules/maximumindependentset_gridgraph.rs @@ -1,7 +1,7 @@ use super::*; use crate::models::graph::MaximumIndependentSet; use crate::solvers::BruteForce; -use crate::topology::{KingsSubgraph, SimpleGraph, UnitDiskGraph}; +use crate::topology::{Graph, KingsSubgraph, SimpleGraph, UnitDiskGraph}; #[test] fn test_mis_simple_to_grid_closed_loop() { @@ -11,7 +11,7 @@ fn test_mis_simple_to_grid_closed_loop() { let target = result.target_problem(); // The grid graph should have more vertices than the original - assert!(target.num_vertices() > 3); + assert!(target.graph().num_vertices() > 3); // Find best solution on the grid graph using brute force let solver = BruteForce::new(); @@ -56,7 +56,7 @@ fn test_mis_unitdisk_to_grid_closed_loop() { let result = ReduceTo::>::reduce_to(&problem); let target = result.target_problem(); - assert!(target.num_vertices() >= 3); + assert!(target.graph().num_vertices() >= 3); let solver = BruteForce::new(); let grid_solutions = solver.find_all_best(target); diff --git a/src/unit_tests/rules/maximumindependentset_maximumsetpacking.rs b/src/unit_tests/rules/maximumindependentset_maximumsetpacking.rs index 9d7c0f661..7743a5f55 100644 --- a/src/unit_tests/rules/maximumindependentset_maximumsetpacking.rs +++ b/src/unit_tests/rules/maximumindependentset_maximumsetpacking.rs @@ -39,7 +39,7 @@ fn test_disjoint_sets() { let is_problem = reduction.target_problem(); // No edges in the intersection graph - assert_eq!(is_problem.num_edges(), 0); + assert_eq!(is_problem.graph().num_edges(), 0); } #[test] @@ -60,7 +60,7 @@ fn test_reduction_structure() { let is = reduction2.target_problem(); // IS should have same number of vertices as sets in SP - assert_eq!(is.num_vertices(), 2); + assert_eq!(is.graph().num_vertices(), 2); } #[test] diff --git a/src/unit_tests/rules/maximumindependentset_triangular.rs b/src/unit_tests/rules/maximumindependentset_triangular.rs index dd240be36..1b11f1c2c 100644 --- a/src/unit_tests/rules/maximumindependentset_triangular.rs +++ b/src/unit_tests/rules/maximumindependentset_triangular.rs @@ -10,10 +10,10 @@ fn test_mis_simple_to_triangular_closed_loop() { let target = result.target_problem(); // The triangular graph should have more vertices than the original - assert!(target.num_vertices() > 3); + assert!(target.graph().num_vertices() > 3); // Map a trivial zero solution back to verify dimensions - let zero_config = vec![0; target.num_vertices()]; + let zero_config = vec![0; target.graph().num_vertices()]; let original_solution = result.extract_solution(&zero_config); assert_eq!(original_solution.len(), 3); } diff --git a/src/unit_tests/rules/sat_maximumindependentset.rs b/src/unit_tests/rules/sat_maximumindependentset.rs index 494e590a9..5fded92aa 100644 --- a/src/unit_tests/rules/sat_maximumindependentset.rs +++ b/src/unit_tests/rules/sat_maximumindependentset.rs @@ -1,6 +1,7 @@ use super::*; use crate::models::satisfiability::CNFClause; use crate::solvers::BruteForce; +use crate::topology::Graph; use crate::traits::Problem; include!("../jl_helpers.rs"); @@ -48,9 +49,9 @@ fn test_simple_sat_to_is() { let is_problem = reduction.target_problem(); // Should have 1 vertex (one literal) - assert_eq!(is_problem.num_vertices(), 1); + assert_eq!(is_problem.graph().num_vertices(), 1); // No edges (single vertex can't form a clique) - assert_eq!(is_problem.num_edges(), 0); + assert_eq!(is_problem.graph().num_edges(), 0); } #[test] @@ -62,9 +63,9 @@ fn test_two_clause_sat_to_is() { let is_problem = reduction.target_problem(); // Should have 2 vertices - assert_eq!(is_problem.num_vertices(), 2); + assert_eq!(is_problem.graph().num_vertices(), 2); // Should have 1 edge (between x1 and NOT x1) - assert_eq!(is_problem.num_edges(), 1); + assert_eq!(is_problem.graph().num_edges(), 1); // Maximum IS should have size 1 (can't select both) let solver = BruteForce::new(); @@ -110,8 +111,8 @@ fn test_clique_edges_in_clause() { let is_problem = reduction.target_problem(); // 3 vertices, 3 edges (complete graph K3) - assert_eq!(is_problem.num_vertices(), 3); - assert_eq!(is_problem.num_edges(), 3); + assert_eq!(is_problem.graph().num_vertices(), 3); + assert_eq!(is_problem.graph().num_edges(), 3); } #[test] @@ -130,8 +131,8 @@ fn test_complement_edges_across_clauses() { let reduction = ReduceTo::>::reduce_to(&sat); let is_problem = reduction.target_problem(); - assert_eq!(is_problem.num_vertices(), 3); - assert_eq!(is_problem.num_edges(), 1); // Only the complement edge + assert_eq!(is_problem.graph().num_vertices(), 3); + assert_eq!(is_problem.graph().num_edges(), 1); // Only the complement edge } #[test] @@ -144,7 +145,7 @@ fn test_is_structure() { let is_problem = reduction.target_problem(); // IS should have vertices for literals in clauses - assert_eq!(is_problem.num_vertices(), 4); // 2 + 2 literals + assert_eq!(is_problem.graph().num_vertices(), 4); // 2 + 2 literals } #[test] @@ -154,8 +155,8 @@ fn test_empty_sat() { let reduction = ReduceTo::>::reduce_to(&sat); let is_problem = reduction.target_problem(); - assert_eq!(is_problem.num_vertices(), 0); - assert_eq!(is_problem.num_edges(), 0); + assert_eq!(is_problem.graph().num_vertices(), 0); + assert_eq!(is_problem.graph().num_edges(), 0); assert_eq!(reduction.num_clauses(), 0); } diff --git a/tests/suites/integration.rs b/tests/suites/integration.rs index a6c975c01..7bc5fb73d 100644 --- a/tests/suites/integration.rs +++ b/tests/suites/integration.rs @@ -401,8 +401,7 @@ mod weighted_problems { #[test] fn test_weighted_independent_set() { - let mut problem = MaximumIndependentSet::::new(3, vec![(0, 1)]); - problem.set_weights(vec![10, 1, 1]); + let problem = MaximumIndependentSet::::with_weights(3, vec![(0, 1)], vec![10, 1, 1]); let solver = BruteForce::new(); let solutions = solver.find_all_best(&problem); diff --git a/tests/suites/reductions.rs b/tests/suites/reductions.rs index f93485e8d..520c5b0c0 100644 --- a/tests/suites/reductions.rs +++ b/tests/suites/reductions.rs @@ -4,7 +4,7 @@ //! solutions can be properly extracted through the reduction pipeline. use problemreductions::prelude::*; -use problemreductions::topology::SimpleGraph; +use problemreductions::topology::{Graph, SimpleGraph}; /// Tests for MaximumIndependentSet <-> MinimumVertexCover reductions. mod is_vc_reductions { @@ -46,8 +46,8 @@ mod is_vc_reductions { let is_problem = result.target_problem(); // Same graph structure - assert_eq!(is_problem.num_vertices(), 4); - assert_eq!(is_problem.num_edges(), 3); + assert_eq!(is_problem.graph().num_vertices(), 4); + assert_eq!(is_problem.graph().num_edges(), 3); // Solve the target IS problem let solver = BruteForce::new(); @@ -74,8 +74,8 @@ mod is_vc_reductions { let final_is = back_to_is.target_problem(); // Should have same structure - assert_eq!(final_is.num_vertices(), original.num_vertices()); - assert_eq!(final_is.num_edges(), original.num_edges()); + assert_eq!(final_is.graph().num_vertices(), original.graph().num_vertices()); + assert_eq!(final_is.graph().num_edges(), original.graph().num_edges()); // Solve the final problem let solver = BruteForce::new(); @@ -158,7 +158,7 @@ mod is_sp_reductions { let is_problem = result.target_problem(); // Should have an edge for each pair of overlapping sets (none here) - assert_eq!(is_problem.num_edges(), 0); + assert_eq!(is_problem.graph().num_edges(), 0); // Solve let solver = BruteForce::new(); @@ -792,8 +792,8 @@ mod io_tests { let restored: MaximumIndependentSet = from_json(&json).unwrap(); // Should have same structure - assert_eq!(restored.num_vertices(), original.num_vertices()); - assert_eq!(restored.num_edges(), original.num_edges()); + assert_eq!(restored.graph().num_vertices(), original.graph().num_vertices()); + assert_eq!(restored.graph().num_edges(), original.graph().num_edges()); // Reduce the restored problem let result = ReduceTo::>::reduce_to(&restored); From f8a8862ece84307d54b537fb5b7577ee2d54a5db Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 15 Feb 2026 21:50:02 +0800 Subject: [PATCH 02/15] =?UTF-8?q?refactor:=20trim=20MinimumVertexCover=20A?= =?UTF-8?q?PI=20=E2=80=94=20remove=20delegation=20methods?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Remove num_vertices(), num_edges(), edges(), has_edge(), set_weights(), from_graph_unit_weights(), and cloning weights() from MinimumVertexCover. Rename weights_ref() to weights() returning &[W]. Callers now access graph topology via .graph() and get weights by reference. Co-Authored-By: Claude Opus 4.6 --- ...mumindependentset_to_minimumvertexcover.rs | 6 +-- .../reduction_minimumvertexcover_to_ilp.rs | 8 ++-- ...mumvertexcover_to_maximumindependentset.rs | 6 +-- ...inimumvertexcover_to_minimumsetcovering.rs | 6 +-- .../reduction_minimumvertexcover_to_qubo.rs | 6 +-- src/models/graph/minimum_vertex_cover.rs | 44 +------------------ src/rules/minimumvertexcover_ilp.rs | 7 +-- ...inimumvertexcover_maximumindependentset.rs | 6 +-- .../minimumvertexcover_minimumsetcovering.rs | 8 ++-- src/rules/minimumvertexcover_qubo.rs | 8 ++-- src/unit_tests/graph_models.rs | 14 +++--- .../models/graph/minimum_vertex_cover.rs | 22 +++++----- ...inimumvertexcover_maximumindependentset.rs | 4 +- tests/suites/integration.rs | 3 +- tests/suites/reductions.rs | 8 ++-- 15 files changed, 57 insertions(+), 99 deletions(-) diff --git a/examples/reduction_maximumindependentset_to_minimumvertexcover.rs b/examples/reduction_maximumindependentset_to_minimumvertexcover.rs index 987d08d0b..85d8d0976 100644 --- a/examples/reduction_maximumindependentset_to_minimumvertexcover.rs +++ b/examples/reduction_maximumindependentset_to_minimumvertexcover.rs @@ -69,7 +69,7 @@ pub fn run() { &target_variant, ) .expect("MaximumIndependentSet -> MinimumVertexCover overhead not found"); - let vc_edges = vc.edges(); + let vc_edges = vc.graph().edges(); let data = ReductionData { source: ProblemSide { @@ -85,8 +85,8 @@ pub fn run() { problem: MinimumVertexCover::::NAME.to_string(), variant: target_variant, instance: serde_json::json!({ - "num_vertices": vc.num_vertices(), - "num_edges": vc.num_edges(), + "num_vertices": vc.graph().num_vertices(), + "num_edges": vc.graph().num_edges(), "edges": vc_edges, }), }, diff --git a/examples/reduction_minimumvertexcover_to_ilp.rs b/examples/reduction_minimumvertexcover_to_ilp.rs index 70eff45c0..7c765193c 100644 --- a/examples/reduction_minimumvertexcover_to_ilp.rs +++ b/examples/reduction_minimumvertexcover_to_ilp.rs @@ -16,7 +16,7 @@ use problemreductions::export::*; use problemreductions::prelude::*; use problemreductions::topology::small_graphs::petersen; -use problemreductions::topology::SimpleGraph; +use problemreductions::topology::{Graph, SimpleGraph}; pub fn run() { // 1. Create VC instance: Petersen graph (10 vertices, 15 edges), VC=6 @@ -87,9 +87,9 @@ pub fn run() { problem: MinimumVertexCover::::NAME.to_string(), variant: source_variant, instance: serde_json::json!({ - "num_vertices": vc.num_vertices(), - "num_edges": vc.num_edges(), - "edges": vc.edges(), + "num_vertices": vc.graph().num_vertices(), + "num_edges": vc.graph().num_edges(), + "edges": vc.graph().edges(), }), }, target: ProblemSide { diff --git a/examples/reduction_minimumvertexcover_to_maximumindependentset.rs b/examples/reduction_minimumvertexcover_to_maximumindependentset.rs index 3f0256ba4..bbca9f4e2 100644 --- a/examples/reduction_minimumvertexcover_to_maximumindependentset.rs +++ b/examples/reduction_minimumvertexcover_to_maximumindependentset.rs @@ -66,7 +66,7 @@ pub fn run() { println!("\nReduction verified successfully"); // Export JSON - let vc_edges = vc.edges(); + let vc_edges = vc.graph().edges(); let is_edges = is.graph().edges(); let source_variant = variant_to_map(MinimumVertexCover::::variant()); let target_variant = variant_to_map(MaximumIndependentSet::::variant()); @@ -83,8 +83,8 @@ pub fn run() { problem: MinimumVertexCover::::NAME.to_string(), variant: source_variant, instance: serde_json::json!({ - "num_vertices": vc.num_vertices(), - "num_edges": vc.num_edges(), + "num_vertices": vc.graph().num_vertices(), + "num_edges": vc.graph().num_edges(), "edges": vc_edges, }), }, diff --git a/examples/reduction_minimumvertexcover_to_minimumsetcovering.rs b/examples/reduction_minimumvertexcover_to_minimumsetcovering.rs index b7ca1b255..07f042a61 100644 --- a/examples/reduction_minimumvertexcover_to_minimumsetcovering.rs +++ b/examples/reduction_minimumvertexcover_to_minimumsetcovering.rs @@ -18,7 +18,7 @@ use problemreductions::export::*; use problemreductions::prelude::*; use problemreductions::topology::small_graphs::petersen; -use problemreductions::topology::SimpleGraph; +use problemreductions::topology::{Graph, SimpleGraph}; pub fn run() { println!("\n=== Vertex Cover -> Set Covering Reduction ===\n"); @@ -110,8 +110,8 @@ pub fn run() { problem: MinimumVertexCover::::NAME.to_string(), variant: source_variant, instance: serde_json::json!({ - "num_vertices": source.num_vertices(), - "num_edges": source.num_edges(), + "num_vertices": source.graph().num_vertices(), + "num_edges": source.graph().num_edges(), "edges": edges, }), }, diff --git a/examples/reduction_minimumvertexcover_to_qubo.rs b/examples/reduction_minimumvertexcover_to_qubo.rs index 09ada7c14..3ee7293dc 100644 --- a/examples/reduction_minimumvertexcover_to_qubo.rs +++ b/examples/reduction_minimumvertexcover_to_qubo.rs @@ -27,7 +27,7 @@ use problemreductions::export::*; use problemreductions::prelude::*; use problemreductions::topology::small_graphs::petersen; -use problemreductions::topology::SimpleGraph; +use problemreductions::topology::{Graph, SimpleGraph}; pub fn run() { println!("=== Vertex Covering -> QUBO Reduction ===\n"); @@ -104,8 +104,8 @@ pub fn run() { problem: MinimumVertexCover::::NAME.to_string(), variant: source_variant, instance: serde_json::json!({ - "num_vertices": vc.num_vertices(), - "num_edges": vc.num_edges(), + "num_vertices": vc.graph().num_vertices(), + "num_edges": vc.graph().num_edges(), "edges": edges, }), }, diff --git a/src/models/graph/minimum_vertex_cover.rs b/src/models/graph/minimum_vertex_cover.rs index 1025927f7..6ca4b5078 100644 --- a/src/models/graph/minimum_vertex_cover.rs +++ b/src/models/graph/minimum_vertex_cover.rs @@ -80,56 +80,16 @@ impl MinimumVertexCover { Self { graph, weights } } - /// Create a Vertex Covering problem from a graph with unit weights. - pub fn from_graph_unit_weights(graph: G) -> Self - where - W: From, - { - let weights = vec![W::from(1); graph.num_vertices()]; - Self { graph, weights } - } - /// Get a reference to the underlying graph. pub fn graph(&self) -> &G { &self.graph } - /// Get the number of vertices. - pub fn num_vertices(&self) -> usize { - self.graph.num_vertices() - } - - /// Get the number of edges. - pub fn num_edges(&self) -> usize { - self.graph.num_edges() - } - - /// Get the edges as a list of (u, v) pairs. - pub fn edges(&self) -> Vec<(usize, usize)> { - self.graph.edges() - } - - /// Check if two vertices are adjacent. - pub fn has_edge(&self, u: usize, v: usize) -> bool { - self.graph.has_edge(u, v) - } - - /// Get a reference to the weights vector. - pub fn weights_ref(&self) -> &Vec { + /// Get a reference to the weights. + pub fn weights(&self) -> &[W] { &self.weights } - /// Set new weights for the problem. - pub fn set_weights(&mut self, weights: Vec) { - assert_eq!(weights.len(), self.graph.num_vertices()); - self.weights = weights; - } - - /// Get the weights for the problem. - pub fn weights(&self) -> Vec { - self.weights.clone() - } - /// Check if the problem has non-uniform weights. pub fn is_weighted(&self) -> bool where diff --git a/src/rules/minimumvertexcover_ilp.rs b/src/rules/minimumvertexcover_ilp.rs index 89c712077..18780fd57 100644 --- a/src/rules/minimumvertexcover_ilp.rs +++ b/src/rules/minimumvertexcover_ilp.rs @@ -11,7 +11,7 @@ use crate::poly; use crate::reduction; use crate::rules::registry::ReductionOverhead; use crate::rules::traits::{ReduceTo, ReductionResult}; -use crate::topology::SimpleGraph; +use crate::topology::{Graph, SimpleGraph}; /// Result of reducing MinimumVertexCover to ILP. /// @@ -53,7 +53,7 @@ impl ReduceTo for MinimumVertexCover { type Result = ReductionVCToILP; fn reduce_to(&self) -> Self::Result { - let num_vars = self.num_vertices(); + let num_vars = self.graph().num_vertices(); // All variables are binary (0 or 1) let bounds = vec![VarBounds::binary(); num_vars]; @@ -61,6 +61,7 @@ impl ReduceTo for MinimumVertexCover { // Constraints: x_u + x_v >= 1 for each edge (u, v) // This ensures at least one endpoint of each edge is selected let constraints: Vec = self + .graph() .edges() .into_iter() .map(|(u, v)| LinearConstraint::ge(vec![(u, 1.0), (v, 1.0)], 1.0)) @@ -68,7 +69,7 @@ impl ReduceTo for MinimumVertexCover { // Objective: minimize sum of w_i * x_i (weighted sum of selected vertices) let objective: Vec<(usize, f64)> = self - .weights_ref() + .weights() .iter() .enumerate() .map(|(i, &w)| (i, w as f64)) diff --git a/src/rules/minimumvertexcover_maximumindependentset.rs b/src/rules/minimumvertexcover_maximumindependentset.rs index 3f62aae84..2a3e9f2bf 100644 --- a/src/rules/minimumvertexcover_maximumindependentset.rs +++ b/src/rules/minimumvertexcover_maximumindependentset.rs @@ -91,9 +91,9 @@ impl ReduceTo> for MinimumVertexCover Self::Result { let target = MaximumIndependentSet::with_weights( - self.num_vertices(), - self.edges(), - self.weights_ref().clone(), + self.graph().num_vertices(), + self.graph().edges(), + self.weights().to_vec(), ); ReductionVCToIS { target } } diff --git a/src/rules/minimumvertexcover_minimumsetcovering.rs b/src/rules/minimumvertexcover_minimumsetcovering.rs index 0e6b82e0d..11e187f91 100644 --- a/src/rules/minimumvertexcover_minimumsetcovering.rs +++ b/src/rules/minimumvertexcover_minimumsetcovering.rs @@ -9,7 +9,7 @@ use crate::poly; use crate::reduction; use crate::rules::registry::ReductionOverhead; use crate::rules::traits::{ReduceTo, ReductionResult}; -use crate::topology::SimpleGraph; +use crate::topology::{Graph, SimpleGraph}; use crate::types::WeightElement; /// Result of reducing MinimumVertexCover to MinimumSetCovering. @@ -48,9 +48,9 @@ impl ReduceTo> for MinimumVertexCover type Result = ReductionVCToSC; fn reduce_to(&self) -> Self::Result { - let edges = self.edges(); + let edges = self.graph().edges(); let num_edges = edges.len(); - let num_vertices = self.num_vertices(); + let num_vertices = self.graph().num_vertices(); // For each vertex, create a set of edge indices that it covers. // An edge (u, v) with index i is covered by vertex j if j == u or j == v. @@ -65,7 +65,7 @@ impl ReduceTo> for MinimumVertexCover }) .collect(); - let target = MinimumSetCovering::with_weights(num_edges, sets, self.weights_ref().clone()); + let target = MinimumSetCovering::with_weights(num_edges, sets, self.weights().to_vec()); ReductionVCToSC { target } } diff --git a/src/rules/minimumvertexcover_qubo.rs b/src/rules/minimumvertexcover_qubo.rs index 24e176293..b0422e57a 100644 --- a/src/rules/minimumvertexcover_qubo.rs +++ b/src/rules/minimumvertexcover_qubo.rs @@ -12,7 +12,7 @@ use crate::poly; use crate::reduction; use crate::rules::registry::ReductionOverhead; use crate::rules::traits::{ReduceTo, ReductionResult}; -use crate::topology::SimpleGraph; +use crate::topology::{Graph, SimpleGraph}; /// Result of reducing MinimumVertexCover to QUBO. #[derive(Debug, Clone)] @@ -40,9 +40,9 @@ impl ReduceTo> for MinimumVertexCover { type Result = ReductionVCToQUBO; fn reduce_to(&self) -> Self::Result { - let n = self.num_vertices(); - let edges = self.edges(); - let weights = self.weights_ref(); + let n = self.graph().num_vertices(); + let edges = self.graph().edges(); + let weights = self.weights(); let total_weight: f64 = weights.iter().map(|&w| w as f64).sum(); let penalty = 1.0 + total_weight; diff --git a/src/unit_tests/graph_models.rs b/src/unit_tests/graph_models.rs index c32877023..1d96b1d63 100644 --- a/src/unit_tests/graph_models.rs +++ b/src/unit_tests/graph_models.rs @@ -207,15 +207,15 @@ mod minimum_vertex_cover { #[test] fn test_creation() { let problem = MinimumVertexCover::::new(4, vec![(0, 1), (1, 2), (2, 3)]); - assert_eq!(problem.num_vertices(), 4); - assert_eq!(problem.num_edges(), 3); + assert_eq!(problem.graph().num_vertices(), 4); + assert_eq!(problem.graph().num_edges(), 3); assert_eq!(problem.num_variables(), 4); } #[test] fn test_with_weights() { let problem = MinimumVertexCover::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); - assert_eq!(problem.weights(), vec![1, 2, 3]); + assert_eq!(problem.weights().to_vec(), vec![1, 2, 3]); assert!(problem.is_weighted()); } @@ -354,12 +354,10 @@ mod minimum_vertex_cover { } #[test] - fn test_set_weights() { - let mut problem = MinimumVertexCover::::new(3, vec![(0, 1)]); - assert!(!problem.is_weighted()); // Initially uniform - problem.set_weights(vec![1, 2, 3]); + fn test_with_custom_weights() { + let problem = MinimumVertexCover::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); assert!(problem.is_weighted()); - assert_eq!(problem.weights(), vec![1, 2, 3]); + assert_eq!(problem.weights().to_vec(), vec![1, 2, 3]); } #[test] diff --git a/src/unit_tests/models/graph/minimum_vertex_cover.rs b/src/unit_tests/models/graph/minimum_vertex_cover.rs index e95c1b18f..64bd117d4 100644 --- a/src/unit_tests/models/graph/minimum_vertex_cover.rs +++ b/src/unit_tests/models/graph/minimum_vertex_cover.rs @@ -7,8 +7,8 @@ include!("../../jl_helpers.rs"); #[test] fn test_vertex_cover_creation() { let problem = MinimumVertexCover::::new(4, vec![(0, 1), (1, 2), (2, 3)]); - assert_eq!(problem.num_vertices(), 4); - assert_eq!(problem.num_edges(), 3); + assert_eq!(problem.graph().num_vertices(), 4); + assert_eq!(problem.graph().num_edges(), 3); assert_eq!(problem.num_variables(), 4); } @@ -16,7 +16,7 @@ fn test_vertex_cover_creation() { fn test_vertex_cover_with_weights() { let problem = MinimumVertexCover::::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); - assert_eq!(problem.weights(), vec![1, 2, 3]); + assert_eq!(problem.weights().to_vec(), vec![1, 2, 3]); } #[test] @@ -70,16 +70,16 @@ fn test_is_vertex_cover_wrong_len() { #[test] fn test_from_graph() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); - let problem = MinimumVertexCover::::from_graph_unit_weights(graph); - assert_eq!(problem.num_vertices(), 3); - assert_eq!(problem.num_edges(), 2); + let problem = MinimumVertexCover::::from_graph(graph, vec![1, 1, 1]); + assert_eq!(problem.graph().num_vertices(), 3); + assert_eq!(problem.graph().num_edges(), 2); } #[test] fn test_from_graph_with_weights() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); let problem = MinimumVertexCover::::from_graph(graph, vec![1, 2, 3]); - assert_eq!(problem.weights(), vec![1, 2, 3]); + assert_eq!(problem.weights().to_vec(), vec![1, 2, 3]); } #[test] @@ -93,10 +93,10 @@ fn test_graph_accessor() { #[test] fn test_has_edge() { let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); - assert!(problem.has_edge(0, 1)); - assert!(problem.has_edge(1, 0)); // Undirected - assert!(problem.has_edge(1, 2)); - assert!(!problem.has_edge(0, 2)); + assert!(problem.graph().has_edge(0, 1)); + assert!(problem.graph().has_edge(1, 0)); // Undirected + assert!(problem.graph().has_edge(1, 2)); + assert!(!problem.graph().has_edge(0, 2)); } #[test] diff --git a/src/unit_tests/rules/minimumvertexcover_maximumindependentset.rs b/src/unit_tests/rules/minimumvertexcover_maximumindependentset.rs index 502c4d636..fcdac7764 100644 --- a/src/unit_tests/rules/minimumvertexcover_maximumindependentset.rs +++ b/src/unit_tests/rules/minimumvertexcover_maximumindependentset.rs @@ -10,7 +10,7 @@ fn test_weighted_reduction() { let vc_problem = reduction.target_problem(); // Weights should be preserved - assert_eq!(vc_problem.weights_ref(), &vec![10, 20, 30]); + assert_eq!(vc_problem.weights().to_vec(), vec![10, 20, 30]); } #[test] @@ -21,7 +21,7 @@ fn test_reduction_structure() { let vc = reduction.target_problem(); // Same number of vertices in both problems - assert_eq!(vc.num_vertices(), 5); + assert_eq!(vc.graph().num_vertices(), 5); } #[test] diff --git a/tests/suites/integration.rs b/tests/suites/integration.rs index 7bc5fb73d..80a45bc5e 100644 --- a/tests/suites/integration.rs +++ b/tests/suites/integration.rs @@ -418,8 +418,7 @@ mod weighted_problems { #[test] fn test_weighted_vertex_cover() { - let mut problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); - problem.set_weights(vec![1, 10, 1]); + let problem = MinimumVertexCover::::with_weights(3, vec![(0, 1), (1, 2)], vec![1, 10, 1]); let solver = BruteForce::new(); let solutions = solver.find_all_best(&problem); diff --git a/tests/suites/reductions.rs b/tests/suites/reductions.rs index 520c5b0c0..1e3285ddc 100644 --- a/tests/suites/reductions.rs +++ b/tests/suites/reductions.rs @@ -21,8 +21,8 @@ mod is_vc_reductions { let vc_problem = result.target_problem(); // Same graph structure - assert_eq!(vc_problem.num_vertices(), 3); - assert_eq!(vc_problem.num_edges(), 3); + assert_eq!(vc_problem.graph().num_vertices(), 3); + assert_eq!(vc_problem.graph().num_edges(), 3); // Solve the target VC problem let solver = BruteForce::new(); @@ -799,8 +799,8 @@ mod io_tests { let result = ReduceTo::>::reduce_to(&restored); let vc = result.target_problem(); - assert_eq!(vc.num_vertices(), 4); - assert_eq!(vc.num_edges(), 3); + assert_eq!(vc.graph().num_vertices(), 4); + assert_eq!(vc.graph().num_edges(), 3); } #[test] From 2d33c81463f595320e469401079efc1bcb5174ad Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 15 Feb 2026 21:59:15 +0800 Subject: [PATCH 03/15] =?UTF-8?q?refactor:=20trim=20MaximumClique=20API=20?= =?UTF-8?q?=E2=80=94=20remove=20delegation=20methods?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: Claude Opus 4.6 --- examples/reduction_maximumclique_to_ilp.rs | 8 ++-- src/models/graph/maximum_clique.rs | 44 +------------------ src/rules/maximumclique_ilp.rs | 6 +-- src/unit_tests/models/graph/maximum_clique.rs | 37 +++++----------- src/unit_tests/rules/maximumclique_ilp.rs | 7 ++- 5 files changed, 23 insertions(+), 79 deletions(-) diff --git a/examples/reduction_maximumclique_to_ilp.rs b/examples/reduction_maximumclique_to_ilp.rs index a431f21ae..b1bef1dcb 100644 --- a/examples/reduction_maximumclique_to_ilp.rs +++ b/examples/reduction_maximumclique_to_ilp.rs @@ -17,7 +17,7 @@ use problemreductions::export::*; use problemreductions::prelude::*; use problemreductions::topology::small_graphs::octahedral; -use problemreductions::topology::SimpleGraph; +use problemreductions::topology::{Graph, SimpleGraph}; pub fn run() { // 1. Create MaximumClique instance: Octahedron (K_{2,2,2}), 6 vertices, 12 edges, clique number 3 @@ -81,9 +81,9 @@ pub fn run() { problem: MaximumClique::::NAME.to_string(), variant: source_variant, instance: serde_json::json!({ - "num_vertices": clique.num_vertices(), - "num_edges": clique.num_edges(), - "edges": clique.edges(), + "num_vertices": clique.graph().num_vertices(), + "num_edges": clique.graph().num_edges(), + "edges": clique.graph().edges(), }), }, target: ProblemSide { diff --git a/src/models/graph/maximum_clique.rs b/src/models/graph/maximum_clique.rs index 02927a80b..d1d10f4ae 100644 --- a/src/models/graph/maximum_clique.rs +++ b/src/models/graph/maximum_clique.rs @@ -97,56 +97,16 @@ impl MaximumClique { Self { graph, weights } } - /// Create a MaximumClique problem from an existing graph with unit weights. - pub fn from_graph_unit_weights(graph: G) -> Self - where - W: From, - { - let weights = vec![W::from(1); graph.num_vertices()]; - Self { graph, weights } - } - /// Get a reference to the underlying graph. pub fn graph(&self) -> &G { &self.graph } - /// Get the number of vertices. - pub fn num_vertices(&self) -> usize { - self.graph.num_vertices() - } - - /// Get the number of edges. - pub fn num_edges(&self) -> usize { - self.graph.num_edges() - } - - /// Get the edges as a list of (u, v) pairs. - pub fn edges(&self) -> Vec<(usize, usize)> { - self.graph.edges() - } - - /// Check if two vertices are adjacent. - pub fn has_edge(&self, u: usize, v: usize) -> bool { - self.graph.has_edge(u, v) - } - - /// Get a reference to the weights vector. - pub fn weights_ref(&self) -> &Vec { + /// Get a reference to the weights. + pub fn weights(&self) -> &[W] { &self.weights } - /// Set new weights for the problem. - pub fn set_weights(&mut self, weights: Vec) { - assert_eq!(weights.len(), self.graph.num_vertices()); - self.weights = weights; - } - - /// Get the weights for the problem. - pub fn weights(&self) -> Vec { - self.weights.clone() - } - /// Check if the problem has non-uniform weights. pub fn is_weighted(&self) -> bool where diff --git a/src/rules/maximumclique_ilp.rs b/src/rules/maximumclique_ilp.rs index ec65aa70c..08f5026e1 100644 --- a/src/rules/maximumclique_ilp.rs +++ b/src/rules/maximumclique_ilp.rs @@ -12,7 +12,7 @@ use crate::poly; use crate::reduction; use crate::rules::registry::ReductionOverhead; use crate::rules::traits::{ReduceTo, ReductionResult}; -use crate::topology::SimpleGraph; +use crate::topology::{Graph, SimpleGraph}; /// Result of reducing MaximumClique to ILP. /// @@ -54,7 +54,7 @@ impl ReduceTo for MaximumClique { type Result = ReductionCliqueToILP; fn reduce_to(&self) -> Self::Result { - let num_vars = self.num_vertices(); + let num_vars = self.graph().num_vertices(); // All variables are binary (0 or 1) let bounds = vec![VarBounds::binary(); num_vars]; @@ -65,7 +65,7 @@ impl ReduceTo for MaximumClique { let mut constraints: Vec = Vec::new(); for u in 0..num_vars { for v in (u + 1)..num_vars { - if !self.has_edge(u, v) { + if !self.graph().has_edge(u, v) { constraints.push(LinearConstraint::le(vec![(u, 1.0), (v, 1.0)], 1.0)); } } diff --git a/src/unit_tests/models/graph/maximum_clique.rs b/src/unit_tests/models/graph/maximum_clique.rs index d863c99d6..5e5d45a8a 100644 --- a/src/unit_tests/models/graph/maximum_clique.rs +++ b/src/unit_tests/models/graph/maximum_clique.rs @@ -7,15 +7,15 @@ fn test_clique_creation() { use crate::traits::Problem; let problem = MaximumClique::::new(4, vec![(0, 1), (1, 2), (2, 3)]); - assert_eq!(problem.num_vertices(), 4); - assert_eq!(problem.num_edges(), 3); + assert_eq!(problem.graph().num_vertices(), 4); + assert_eq!(problem.graph().num_edges(), 3); assert_eq!(problem.dims(), vec![2, 2, 2, 2]); } #[test] fn test_clique_with_weights() { let problem = MaximumClique::::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); - assert_eq!(problem.weights(), vec![1, 2, 3]); + assert_eq!(problem.weights().to_vec(), vec![1, 2, 3]); assert!(problem.is_weighted()); } @@ -28,10 +28,10 @@ fn test_clique_unweighted() { #[test] fn test_has_edge() { let problem = MaximumClique::::new(3, vec![(0, 1), (1, 2)]); - assert!(problem.has_edge(0, 1)); - assert!(problem.has_edge(1, 0)); // Undirected - assert!(problem.has_edge(1, 2)); - assert!(!problem.has_edge(0, 2)); + assert!(problem.graph().has_edge(0, 1)); + assert!(problem.graph().has_edge(1, 0)); // Undirected + assert!(problem.graph().has_edge(1, 2)); + assert!(!problem.graph().has_edge(0, 2)); } #[test] @@ -161,17 +161,10 @@ fn test_direction() { #[test] fn test_edges() { let problem = MaximumClique::::new(4, vec![(0, 1), (2, 3)]); - let edges = problem.edges(); + let edges = problem.graph().edges(); assert_eq!(edges.len(), 2); } -#[test] -fn test_set_weights() { - let mut problem = MaximumClique::::new(3, vec![(0, 1)]); - problem.set_weights(vec![5, 10, 15]); - assert_eq!(problem.weights(), vec![5, 10, 15]); -} - #[test] fn test_empty_graph() { // No edges means any single vertex is a max clique @@ -203,16 +196,8 @@ fn test_is_clique_method() { fn test_from_graph() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); let problem = MaximumClique::::from_graph(graph.clone(), vec![1, 2, 3]); - assert_eq!(problem.num_vertices(), 3); - assert_eq!(problem.weights(), vec![1, 2, 3]); -} - -#[test] -fn test_from_graph_unit_weights() { - let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); - let problem = MaximumClique::::from_graph_unit_weights(graph); - assert_eq!(problem.num_vertices(), 3); - assert_eq!(problem.weights(), vec![1, 1, 1]); + assert_eq!(problem.graph().num_vertices(), 3); + assert_eq!(problem.weights().to_vec(), vec![1, 2, 3]); } #[test] @@ -226,7 +211,7 @@ fn test_graph_accessor() { #[test] fn test_weights_ref() { let problem = MaximumClique::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); - assert_eq!(problem.weights_ref(), &vec![5, 10, 15]); + assert_eq!(problem.weights(), &[5, 10, 15]); } #[test] diff --git a/src/unit_tests/rules/maximumclique_ilp.rs b/src/unit_tests/rules/maximumclique_ilp.rs index 88b96e154..110909a72 100644 --- a/src/unit_tests/rules/maximumclique_ilp.rs +++ b/src/unit_tests/rules/maximumclique_ilp.rs @@ -14,7 +14,7 @@ fn is_valid_clique(problem: &MaximumClique, config: &[usize]) // Check all pairs of selected vertices are adjacent for i in 0..selected.len() { for j in (i + 1)..selected.len() { - if !problem.has_edge(selected[i], selected[j]) { + if !problem.graph().has_edge(selected[i], selected[j]) { return false; } } @@ -24,18 +24,17 @@ fn is_valid_clique(problem: &MaximumClique, config: &[usize]) /// Compute the clique size (sum of weights of selected vertices). fn clique_size(problem: &MaximumClique, config: &[usize]) -> i32 { - let weights = problem.weights(); config .iter() .enumerate() .filter(|(_, &v)| v == 1) - .map(|(i, _)| weights[i]) + .map(|(i, _)| problem.weights()[i]) .sum() } /// Find maximum clique size by brute force enumeration. fn brute_force_max_clique(problem: &MaximumClique) -> i32 { - let n = problem.num_vertices(); + let n = problem.graph().num_vertices(); let mut max_size = 0; for mask in 0..(1 << n) { let config: Vec = (0..n).map(|i| (mask >> i) & 1).collect(); From 4efcc6c65c043f366c1c8581761ac702a7a54093 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 15 Feb 2026 22:03:48 +0800 Subject: [PATCH 04/15] =?UTF-8?q?refactor:=20trim=20MaximalIS=20API=20?= =?UTF-8?q?=E2=80=94=20remove=20delegation=20methods?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: Claude Opus 4.6 --- src/models/graph/maximal_is.rs | 44 ++--------------------- src/unit_tests/models/graph/maximal_is.rs | 40 +++++++-------------- 2 files changed, 14 insertions(+), 70 deletions(-) diff --git a/src/models/graph/maximal_is.rs b/src/models/graph/maximal_is.rs index 524ca9edd..85a14ee58 100644 --- a/src/models/graph/maximal_is.rs +++ b/src/models/graph/maximal_is.rs @@ -82,56 +82,16 @@ impl MaximalIS { Self { graph, weights } } - /// Create a new Maximal Independent Set problem from a graph with unit weights. - pub fn from_graph_unit_weights(graph: G) -> Self - where - W: From, - { - let weights = vec![W::from(1); graph.num_vertices()]; - Self { graph, weights } - } - /// Get a reference to the underlying graph. pub fn graph(&self) -> &G { &self.graph } - /// Get the number of vertices. - pub fn num_vertices(&self) -> usize { - self.graph.num_vertices() - } - - /// Get the number of edges. - pub fn num_edges(&self) -> usize { - self.graph.num_edges() - } - - /// Get edges as a list of (u, v) pairs. - pub fn edges(&self) -> Vec<(usize, usize)> { - self.graph.edges() - } - - /// Check if two vertices are adjacent. - pub fn has_edge(&self, u: usize, v: usize) -> bool { - self.graph.has_edge(u, v) - } - - /// Get a reference to the weights vector. - pub fn weights_ref(&self) -> &Vec { + /// Get a reference to the weights. + pub fn weights(&self) -> &[W] { &self.weights } - /// Set new weights for the problem. - pub fn set_weights(&mut self, weights: Vec) { - assert_eq!(weights.len(), self.graph.num_vertices()); - self.weights = weights; - } - - /// Get the weights for the problem. - pub fn weights(&self) -> Vec { - self.weights.clone() - } - /// Check if the problem has non-uniform weights. pub fn is_weighted(&self) -> bool where diff --git a/src/unit_tests/models/graph/maximal_is.rs b/src/unit_tests/models/graph/maximal_is.rs index ebc2f51f4..2f52c330f 100644 --- a/src/unit_tests/models/graph/maximal_is.rs +++ b/src/unit_tests/models/graph/maximal_is.rs @@ -5,14 +5,14 @@ include!("../../jl_helpers.rs"); #[test] fn test_maximal_is_creation() { let problem = MaximalIS::::new(4, vec![(0, 1), (1, 2), (2, 3)]); - assert_eq!(problem.num_vertices(), 4); - assert_eq!(problem.num_edges(), 3); + assert_eq!(problem.graph().num_vertices(), 4); + assert_eq!(problem.graph().num_edges(), 3); } #[test] fn test_maximal_is_with_weights() { let problem = MaximalIS::::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); - assert_eq!(problem.weights(), vec![1, 2, 3]); + assert_eq!(problem.weights().to_vec(), vec![1, 2, 3]); assert!(problem.is_weighted()); } @@ -20,16 +20,8 @@ fn test_maximal_is_with_weights() { fn test_maximal_is_from_graph() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); let problem = MaximalIS::::from_graph(graph, vec![1, 2, 3]); - assert_eq!(problem.num_vertices(), 3); - assert_eq!(problem.weights(), vec![1, 2, 3]); -} - -#[test] -fn test_maximal_is_from_graph_unit_weights() { - let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); - let problem = MaximalIS::::from_graph_unit_weights(graph); - assert_eq!(problem.num_vertices(), 3); - assert_eq!(problem.weights(), vec![1, 1, 1]); + assert_eq!(problem.graph().num_vertices(), 3); + assert_eq!(problem.weights().to_vec(), vec![1, 2, 3]); } #[test] @@ -84,15 +76,7 @@ fn test_direction() { #[test] fn test_weights() { let problem = MaximalIS::::new(3, vec![(0, 1)]); - let weights = problem.weights(); - assert_eq!(weights, vec![1, 1, 1]); // Unit weights -} - -#[test] -fn test_set_weights() { - let mut problem = MaximalIS::::new(3, vec![(0, 1)]); - problem.set_weights(vec![1, 2, 3]); - assert_eq!(problem.weights(), vec![1, 2, 3]); + assert_eq!(problem.weights().to_vec(), vec![1, 1, 1]); // Unit weights } #[test] @@ -123,23 +107,23 @@ fn test_graph_ref() { #[test] fn test_edges() { let problem = MaximalIS::::new(3, vec![(0, 1), (1, 2)]); - let edges = problem.edges(); + let edges = problem.graph().edges(); assert_eq!(edges.len(), 2); } #[test] fn test_has_edge() { let problem = MaximalIS::::new(3, vec![(0, 1), (1, 2)]); - assert!(problem.has_edge(0, 1)); - assert!(problem.has_edge(1, 0)); // Undirected - assert!(problem.has_edge(1, 2)); - assert!(!problem.has_edge(0, 2)); + assert!(problem.graph().has_edge(0, 1)); + assert!(problem.graph().has_edge(1, 0)); // Undirected + assert!(problem.graph().has_edge(1, 2)); + assert!(!problem.graph().has_edge(0, 2)); } #[test] fn test_weights_ref() { let problem = MaximalIS::::new(3, vec![(0, 1)]); - assert_eq!(problem.weights_ref(), &vec![1, 1, 1]); + assert_eq!(problem.weights(), &[1, 1, 1]); } #[test] From d0201c7e9ca6209bbecfa19bbcfa3245e8497b5a Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 15 Feb 2026 22:12:54 +0800 Subject: [PATCH 05/15] =?UTF-8?q?refactor:=20trim=20MinimumDominatingSet?= =?UTF-8?q?=20API=20=E2=80=94=20remove=20delegation=20methods?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: Claude Opus 4.6 --- .../reduction_minimumdominatingset_to_ilp.rs | 8 +-- ..._satisfiability_to_minimumdominatingset.rs | 10 ++-- src/models/graph/minimum_dominating_set.rs | 56 +------------------ src/rules/minimumdominatingset_ilp.rs | 4 +- .../models/graph/minimum_dominating_set.rs | 35 +++++------- .../rules/sat_minimumdominatingset.rs | 23 ++++---- 6 files changed, 40 insertions(+), 96 deletions(-) diff --git a/examples/reduction_minimumdominatingset_to_ilp.rs b/examples/reduction_minimumdominatingset_to_ilp.rs index 6a90c6888..c41c6ecd5 100644 --- a/examples/reduction_minimumdominatingset_to_ilp.rs +++ b/examples/reduction_minimumdominatingset_to_ilp.rs @@ -16,7 +16,7 @@ use problemreductions::export::*; use problemreductions::prelude::*; use problemreductions::topology::small_graphs::petersen; -use problemreductions::topology::SimpleGraph; +use problemreductions::topology::{Graph, SimpleGraph}; pub fn run() { // 1. Create MinimumDominatingSet instance: Petersen graph @@ -87,9 +87,9 @@ pub fn run() { problem: MinimumDominatingSet::::NAME.to_string(), variant: source_variant, instance: serde_json::json!({ - "num_vertices": ds.num_vertices(), - "num_edges": ds.num_edges(), - "edges": ds.edges(), + "num_vertices": ds.graph().num_vertices(), + "num_edges": ds.graph().num_edges(), + "edges": ds.graph().edges(), }), }, target: ProblemSide { diff --git a/examples/reduction_satisfiability_to_minimumdominatingset.rs b/examples/reduction_satisfiability_to_minimumdominatingset.rs index 6f87f59ac..78e7dafc8 100644 --- a/examples/reduction_satisfiability_to_minimumdominatingset.rs +++ b/examples/reduction_satisfiability_to_minimumdominatingset.rs @@ -15,7 +15,7 @@ use problemreductions::export::*; use problemreductions::prelude::*; -use problemreductions::topology::SimpleGraph; +use problemreductions::topology::{Graph, SimpleGraph}; pub fn run() { // 1. Create SAT instance: 5-variable, 7-clause 3-SAT formula @@ -53,8 +53,8 @@ pub fn run() { ); println!( "Target: MinimumDominatingSet with {} vertices, {} edges", - ds.num_vertices(), - ds.num_edges() + ds.graph().num_vertices(), + ds.graph().num_edges() ); println!(" Variable gadgets: 3 vertices per variable (pos, neg, dummy) forming triangles"); println!(" Clause vertices: 1 per clause, connected to relevant literal vertices"); @@ -141,8 +141,8 @@ pub fn run() { problem: MinimumDominatingSet::::NAME.to_string(), variant: target_variant, instance: serde_json::json!({ - "num_vertices": ds.num_vertices(), - "num_edges": ds.num_edges(), + "num_vertices": ds.graph().num_vertices(), + "num_edges": ds.graph().num_edges(), }), }, overhead: overhead_to_json(&overhead), diff --git a/src/models/graph/minimum_dominating_set.rs b/src/models/graph/minimum_dominating_set.rs index a45aa32fe..540db57f8 100644 --- a/src/models/graph/minimum_dominating_set.rs +++ b/src/models/graph/minimum_dominating_set.rs @@ -80,40 +80,11 @@ impl MinimumDominatingSet { Self { graph, weights } } - /// Create a Dominating Set problem from a graph with unit weights. - pub fn from_graph_unit_weights(graph: G) -> Self - where - W: From, - { - let weights = vec![W::from(1); graph.num_vertices()]; - Self { graph, weights } - } - /// Get a reference to the underlying graph. pub fn graph(&self) -> &G { &self.graph } - /// Get the number of vertices. - pub fn num_vertices(&self) -> usize { - self.graph.num_vertices() - } - - /// Get the number of edges. - pub fn num_edges(&self) -> usize { - self.graph.num_edges() - } - - /// Get edges as a list of (u, v) pairs. - pub fn edges(&self) -> Vec<(usize, usize)> { - self.graph.edges() - } - - /// Check if two vertices are adjacent. - pub fn has_edge(&self, u: usize, v: usize) -> bool { - self.graph.has_edge(u, v) - } - /// Get neighbors of a vertex. pub fn neighbors(&self, v: usize) -> Vec { self.graph.neighbors(v) @@ -126,34 +97,11 @@ impl MinimumDominatingSet { neighborhood } - /// Get a reference to the weights vector. - pub fn weights_ref(&self) -> &Vec { + /// Get a reference to the weights slice. + pub fn weights(&self) -> &[W] { &self.weights } - /// Set new weights for the problem. - pub fn set_weights(&mut self, weights: Vec) { - assert_eq!(weights.len(), self.graph.num_vertices()); - self.weights = weights; - } - - /// Get the weights for the problem. - pub fn weights(&self) -> Vec { - self.weights.clone() - } - - /// Check if the problem has non-uniform weights. - pub fn is_weighted(&self) -> bool - where - W: PartialEq, - { - if self.weights.is_empty() { - return false; - } - let first = &self.weights[0]; - !self.weights.iter().all(|w| w == first) - } - /// Check if a set of vertices is a dominating set. fn is_dominating(&self, config: &[usize]) -> bool { let n = self.graph.num_vertices(); diff --git a/src/rules/minimumdominatingset_ilp.rs b/src/rules/minimumdominatingset_ilp.rs index 51dbfe80a..b6c526b8a 100644 --- a/src/rules/minimumdominatingset_ilp.rs +++ b/src/rules/minimumdominatingset_ilp.rs @@ -12,7 +12,7 @@ use crate::poly; use crate::reduction; use crate::rules::registry::ReductionOverhead; use crate::rules::traits::{ReduceTo, ReductionResult}; -use crate::topology::SimpleGraph; +use crate::topology::{Graph, SimpleGraph}; /// Result of reducing MinimumDominatingSet to ILP. /// @@ -55,7 +55,7 @@ impl ReduceTo for MinimumDominatingSet { type Result = ReductionDSToILP; fn reduce_to(&self) -> Self::Result { - let num_vars = self.num_vertices(); + let num_vars = self.graph().num_vertices(); // All variables are binary (0 or 1) let bounds = vec![VarBounds::binary(); num_vars]; diff --git a/src/unit_tests/models/graph/minimum_dominating_set.rs b/src/unit_tests/models/graph/minimum_dominating_set.rs index 838a2e7ba..dc77d41bb 100644 --- a/src/unit_tests/models/graph/minimum_dominating_set.rs +++ b/src/unit_tests/models/graph/minimum_dominating_set.rs @@ -7,15 +7,15 @@ include!("../../jl_helpers.rs"); #[test] fn test_dominating_set_creation() { let problem = MinimumDominatingSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); - assert_eq!(problem.num_vertices(), 4); - assert_eq!(problem.num_edges(), 3); + assert_eq!(problem.graph().num_vertices(), 4); + assert_eq!(problem.graph().num_edges(), 3); } #[test] fn test_dominating_set_with_weights() { let problem = MinimumDominatingSet::::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); - assert_eq!(problem.weights(), vec![1, 2, 3]); + assert_eq!(problem.weights(), &[1, 2, 3]); } #[test] @@ -81,44 +81,39 @@ fn test_is_dominating_set_wrong_len() { fn test_from_graph() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); let problem = - MinimumDominatingSet::::from_graph(graph.clone(), vec![1, 2, 3]); - assert_eq!(problem.num_vertices(), 3); - assert_eq!(problem.weights(), vec![1, 2, 3]); - - let problem2 = MinimumDominatingSet::::from_graph_unit_weights(graph); - assert_eq!(problem2.num_vertices(), 3); - assert_eq!(problem2.weights(), vec![1, 1, 1]); + MinimumDominatingSet::::from_graph(graph, vec![1, 2, 3]); + assert_eq!(problem.graph().num_vertices(), 3); + assert_eq!(problem.weights(), &[1, 2, 3]); } #[test] fn test_graph_accessor() { let problem = MinimumDominatingSet::::new(3, vec![(0, 1)]); - let graph = problem.graph(); - assert_eq!(graph.num_vertices(), 3); - assert_eq!(graph.num_edges(), 1); + assert_eq!(problem.graph().num_vertices(), 3); + assert_eq!(problem.graph().num_edges(), 1); } #[test] -fn test_weights_ref() { +fn test_weights() { let problem = MinimumDominatingSet::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); - assert_eq!(problem.weights_ref(), &vec![5, 10, 15]); + assert_eq!(problem.weights(), &[5, 10, 15]); } #[test] fn test_edges() { let problem = MinimumDominatingSet::::new(3, vec![(0, 1), (1, 2)]); - let edges = problem.edges(); + let edges = problem.graph().edges(); assert_eq!(edges.len(), 2); } #[test] fn test_has_edge() { let problem = MinimumDominatingSet::::new(3, vec![(0, 1), (1, 2)]); - assert!(problem.has_edge(0, 1)); - assert!(problem.has_edge(1, 0)); // Undirected - assert!(problem.has_edge(1, 2)); - assert!(!problem.has_edge(0, 2)); + assert!(problem.graph().has_edge(0, 1)); + assert!(problem.graph().has_edge(1, 0)); // Undirected + assert!(problem.graph().has_edge(1, 2)); + assert!(!problem.graph().has_edge(0, 2)); } #[test] diff --git a/src/unit_tests/rules/sat_minimumdominatingset.rs b/src/unit_tests/rules/sat_minimumdominatingset.rs index fd16b4745..1e56439ec 100644 --- a/src/unit_tests/rules/sat_minimumdominatingset.rs +++ b/src/unit_tests/rules/sat_minimumdominatingset.rs @@ -1,6 +1,7 @@ use super::*; use crate::models::satisfiability::CNFClause; use crate::solvers::BruteForce; +use crate::topology::Graph; use crate::traits::Problem; include!("../jl_helpers.rs"); @@ -12,12 +13,12 @@ fn test_simple_sat_to_ds() { let ds_problem = reduction.target_problem(); // Should have 3 vertices (variable gadget) + 1 clause vertex = 4 vertices - assert_eq!(ds_problem.num_vertices(), 4); + assert_eq!(ds_problem.graph().num_vertices(), 4); // Edges: 3 for triangle + 1 from positive literal to clause = 4 // Triangle edges: (0,1), (0,2), (1,2) // Clause edge: (0, 3) since x1 positive connects to clause vertex - assert_eq!(ds_problem.num_edges(), 4); + assert_eq!(ds_problem.graph().num_edges(), 4); } #[test] @@ -28,13 +29,13 @@ fn test_two_variable_sat_to_ds() { let ds_problem = reduction.target_problem(); // 2 variables * 3 = 6 gadget vertices + 1 clause vertex = 7 - assert_eq!(ds_problem.num_vertices(), 7); + assert_eq!(ds_problem.graph().num_vertices(), 7); // Edges: // - 3 edges for first triangle: (0,1), (0,2), (1,2) // - 3 edges for second triangle: (3,4), (3,5), (4,5) // - 2 edges from literals to clause: (0,6), (3,6) - assert_eq!(ds_problem.num_edges(), 8); + assert_eq!(ds_problem.graph().num_edges(), 8); } #[test] @@ -87,7 +88,7 @@ fn test_ds_structure() { let ds_problem = reduction.target_problem(); // 3 vars * 3 = 9 gadget vertices + 2 clause vertices = 11 - assert_eq!(ds_problem.num_vertices(), 11); + assert_eq!(ds_problem.graph().num_vertices(), 11); } #[test] @@ -97,8 +98,8 @@ fn test_empty_sat() { let reduction = ReduceTo::>::reduce_to(&sat); let ds_problem = reduction.target_problem(); - assert_eq!(ds_problem.num_vertices(), 0); - assert_eq!(ds_problem.num_edges(), 0); + assert_eq!(ds_problem.graph().num_vertices(), 0); + assert_eq!(ds_problem.graph().num_edges(), 0); assert_eq!(reduction.num_clauses(), 0); assert_eq!(reduction.num_literals(), 0); } @@ -111,12 +112,12 @@ fn test_multiple_literals_same_variable() { let ds_problem = reduction.target_problem(); // 3 gadget vertices + 1 clause vertex = 4 - assert_eq!(ds_problem.num_vertices(), 4); + assert_eq!(ds_problem.graph().num_vertices(), 4); // Edges: // - 3 for triangle // - 2 from literals to clause (both positive and negative literals connect) - assert_eq!(ds_problem.num_edges(), 5); + assert_eq!(ds_problem.graph().num_edges(), 5); } #[test] @@ -149,13 +150,13 @@ fn test_negated_variable_connection() { let ds_problem = reduction.target_problem(); // 2 * 3 = 6 gadget vertices + 1 clause = 7 - assert_eq!(ds_problem.num_vertices(), 7); + assert_eq!(ds_problem.graph().num_vertices(), 7); // Edges: // - 3 for first triangle: (0,1), (0,2), (1,2) // - 3 for second triangle: (3,4), (3,5), (4,5) // - 2 from negated literals to clause: (1,6), (4,6) - assert_eq!(ds_problem.num_edges(), 8); + assert_eq!(ds_problem.graph().num_edges(), 8); } #[test] From 1dda66d99eab2c460b03d4157509a5f6a85ed743 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 15 Feb 2026 22:18:00 +0800 Subject: [PATCH 06/15] refactor: extract classify_problem_category from to_json() Co-Authored-By: Claude Opus 4.6 --- src/rules/graph.rs | 23 +++++++++++++++-------- src/unit_tests/rules/graph.rs | 25 +++++++++++++++++++++++++ 2 files changed, 40 insertions(+), 8 deletions(-) diff --git a/src/rules/graph.rs b/src/rules/graph.rs index 6f6b65556..3bb5f078f 100644 --- a/src/rules/graph.rs +++ b/src/rules/graph.rs @@ -188,6 +188,20 @@ impl ResolvedPath { } } +/// Classify a problem's category from its module path. +/// Expected format: "problemreductions::models::::" +pub(crate) fn classify_problem_category(module_path: &str) -> &str { + let parts: Vec<&str> = module_path.split("::").collect(); + if parts.len() >= 3 { + if let Some(pos) = parts.iter().position(|&p| p == "models") { + if pos + 1 < parts.len() { + return parts[pos + 1]; + } + } + } + "other" +} + /// Edge data for a reduction. #[derive(Clone, Debug)] pub struct ReductionEdge { @@ -1006,14 +1020,7 @@ impl ReductionGraph { /// /// E.g., `"problemreductions::models::graph::maximum_independent_set"` → `"graph"`. fn category_from_module_path(module_path: &str) -> String { - // Expected format: "problemreductions::models::::" - let parts: Vec<&str> = module_path.split("::").collect(); - // parts = ["problemreductions", "models", "graph", "maximum_independent_set"] - if parts.len() >= 3 { - parts[2].to_string() - } else { - "other".to_string() - } + classify_problem_category(module_path).to_string() } /// Build the rustdoc path from a module path and problem name. diff --git a/src/unit_tests/rules/graph.rs b/src/unit_tests/rules/graph.rs index 6e7efdc0f..67f945f63 100644 --- a/src/unit_tests/rules/graph.rs +++ b/src/unit_tests/rules/graph.rs @@ -2,6 +2,7 @@ use super::*; use crate::models::graph::{MaximumIndependentSet, MinimumVertexCover}; use crate::models::set::MaximumSetPacking; use crate::rules::cost::MinimizeSteps; +use crate::rules::graph::classify_problem_category; use crate::topology::SimpleGraph; #[test] @@ -1157,3 +1158,27 @@ fn test_resolve_path_incompatible_returns_none() { let resolved = graph.resolve_path(&name_path, &source, &target); assert!(resolved.is_none()); } + +#[test] +fn test_classify_problem_category() { + assert_eq!( + classify_problem_category("problemreductions::models::graph::maximum_independent_set"), + "graph" + ); + assert_eq!( + classify_problem_category("problemreductions::models::sat::satisfiability"), + "sat" + ); + assert_eq!( + classify_problem_category("problemreductions::models::set::maximum_set_packing"), + "set" + ); + assert_eq!( + classify_problem_category("problemreductions::models::optimization::qubo"), + "optimization" + ); + assert_eq!( + classify_problem_category("unknown::path"), + "other" + ); +} From 2d549900c5b5300bd4db77acf0fdab6406b9505d Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 15 Feb 2026 22:21:23 +0800 Subject: [PATCH 07/15] refactor: extract filter_redundant_base_nodes from to_json() Co-Authored-By: Claude Opus 4.6 --- src/rules/graph.rs | 20 +++++++++++++------- src/unit_tests/rules/graph.rs | 24 ++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 7 deletions(-) diff --git a/src/rules/graph.rs b/src/rules/graph.rs index 3bb5f078f..8bdde373f 100644 --- a/src/rules/graph.rs +++ b/src/rules/graph.rs @@ -188,6 +188,18 @@ impl ResolvedPath { } } +/// Remove base nodes (empty variant) when a variant-specific sibling exists. +pub(crate) fn filter_redundant_base_nodes( + node_set: &mut HashSet<(String, std::collections::BTreeMap)>, +) { + let names_with_variants: HashSet = node_set + .iter() + .filter(|(_, variant)| !variant.is_empty()) + .map(|(name, _)| name.clone()) + .collect(); + node_set.retain(|(name, variant)| !variant.is_empty() || !names_with_variants.contains(name)); +} + /// Classify a problem's category from its module path. /// Expected format: "problemreductions::models::::" pub(crate) fn classify_problem_category(module_path: &str) -> &str { @@ -834,13 +846,7 @@ impl ReductionGraph { } // Remove empty-variant base nodes that are redundant (same name already has specific variants) - let names_with_variants: HashSet = node_set - .iter() - .filter(|(_, variant)| !variant.is_empty()) - .map(|(name, _)| name.clone()) - .collect(); - node_set - .retain(|(name, variant)| !variant.is_empty() || !names_with_variants.contains(name)); + filter_redundant_base_nodes(&mut node_set); // Build nodes with categories and doc paths derived from ProblemSchemaEntry.module_path let mut nodes: Vec = node_set diff --git a/src/unit_tests/rules/graph.rs b/src/unit_tests/rules/graph.rs index 67f945f63..e2c468b0d 100644 --- a/src/unit_tests/rules/graph.rs +++ b/src/unit_tests/rules/graph.rs @@ -1159,6 +1159,30 @@ fn test_resolve_path_incompatible_returns_none() { assert!(resolved.is_none()); } +#[test] +fn test_filter_redundant_base_nodes() { + use std::collections::{BTreeMap, HashSet}; + + let mut node_set: HashSet<(String, BTreeMap)> = HashSet::new(); + + // Base node (empty variant) — should be removed because variant-specific sibling exists + node_set.insert(("MIS".to_string(), BTreeMap::new())); + + // Variant-specific node + let mut variant = BTreeMap::new(); + variant.insert("graph".to_string(), "GridGraph".to_string()); + node_set.insert(("MIS".to_string(), variant)); + + // Base node with no siblings — should be kept + node_set.insert(("QUBO".to_string(), BTreeMap::new())); + + filter_redundant_base_nodes(&mut node_set); + + assert_eq!(node_set.len(), 2); + assert!(!node_set.iter().any(|(name, v)| name == "MIS" && v.is_empty())); + assert!(node_set.iter().any(|(name, _)| name == "QUBO")); +} + #[test] fn test_classify_problem_category() { assert_eq!( From 7069d4fef76ea6e047a991b7ae9b138015ff1769 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 15 Feb 2026 22:25:42 +0800 Subject: [PATCH 08/15] refactor: extract is_natural_edge from to_json() Co-Authored-By: Claude Opus 4.6 --- src/rules/graph.rs | 18 +++++- src/unit_tests/rules/graph.rs | 109 ++++++++++++++++++++++++++++++++++ 2 files changed, 126 insertions(+), 1 deletion(-) diff --git a/src/rules/graph.rs b/src/rules/graph.rs index 8bdde373f..c5a6c3e9f 100644 --- a/src/rules/graph.rs +++ b/src/rules/graph.rs @@ -200,6 +200,22 @@ pub(crate) fn filter_redundant_base_nodes( node_set.retain(|(name, variant)| !variant.is_empty() || !names_with_variants.contains(name)); } +/// Determine whether a natural (subtype) edge should exist from variant `a` to variant `b`. +/// +/// A natural edge exists when all variant fields of `a` are at least as restrictive as `b`'s +/// (i.e., each field of `a` is a subtype of or equal to the corresponding field of `b`), +/// and at least one field is strictly more restrictive. This means `a` is a strict subtype of `b`. +/// +/// Returns `true` if a natural edge from `a` to `b` should exist, `false` otherwise. +/// Returns `false` when `a == b` (no self-edges). +pub(crate) fn is_natural_edge( + a: &std::collections::BTreeMap, + b: &std::collections::BTreeMap, + graph: &ReductionGraph, +) -> bool { + graph.is_variant_reducible(a, b) +} + /// Classify a problem's category from its module path. /// Expected format: "problemreductions::models::::" pub(crate) fn classify_problem_category(module_path: &str) -> &str { @@ -937,7 +953,7 @@ impl ReductionGraph { for (name, variants) in &nodes_by_name { for a in variants { for b in variants { - if self.is_variant_reducible(a, b) { + if is_natural_edge(a, b, self) { let src_ref = VariantRef { name: name.to_string(), variant: (*a).clone(), diff --git a/src/unit_tests/rules/graph.rs b/src/unit_tests/rules/graph.rs index e2c468b0d..5870cc6ed 100644 --- a/src/unit_tests/rules/graph.rs +++ b/src/unit_tests/rules/graph.rs @@ -3,6 +3,7 @@ use crate::models::graph::{MaximumIndependentSet, MinimumVertexCover}; use crate::models::set::MaximumSetPacking; use crate::rules::cost::MinimizeSteps; use crate::rules::graph::classify_problem_category; +use crate::rules::graph::is_natural_edge; use crate::topology::SimpleGraph; #[test] @@ -1206,3 +1207,111 @@ fn test_classify_problem_category() { "other" ); } + +#[test] +fn test_is_natural_edge_same_variant() { + use std::collections::BTreeMap; + let graph = ReductionGraph::new(); + + // Same variant — no edge (is_variant_reducible returns false for equal variants) + let a = BTreeMap::from([("graph".to_string(), "SimpleGraph".to_string())]); + let b = a.clone(); + assert!(!is_natural_edge(&a, &b, &graph)); +} + +#[test] +fn test_is_natural_edge_subtype_forward() { + use std::collections::BTreeMap; + let graph = ReductionGraph::new(); + + // KingsSubgraph is subtype of SimpleGraph — natural edge from sub to sup + let sub = BTreeMap::from([ + ("graph".to_string(), "KingsSubgraph".to_string()), + ("weight".to_string(), "i32".to_string()), + ]); + let sup = BTreeMap::from([ + ("graph".to_string(), "SimpleGraph".to_string()), + ("weight".to_string(), "i32".to_string()), + ]); + assert!(is_natural_edge(&sub, &sup, &graph)); +} + +#[test] +fn test_is_natural_edge_not_reverse() { + use std::collections::BTreeMap; + let graph = ReductionGraph::new(); + + // SimpleGraph is NOT a subtype of KingsSubgraph — no natural edge in this direction + let sup = BTreeMap::from([ + ("graph".to_string(), "SimpleGraph".to_string()), + ("weight".to_string(), "i32".to_string()), + ]); + let sub = BTreeMap::from([ + ("graph".to_string(), "KingsSubgraph".to_string()), + ("weight".to_string(), "i32".to_string()), + ]); + assert!(!is_natural_edge(&sup, &sub, &graph)); +} + +#[test] +fn test_is_natural_edge_different_weight() { + use std::collections::BTreeMap; + let graph = ReductionGraph::new(); + + // One is subtype of i32 — natural edge from One to i32 + let sub = BTreeMap::from([ + ("graph".to_string(), "SimpleGraph".to_string()), + ("weight".to_string(), "One".to_string()), + ]); + let sup = BTreeMap::from([ + ("graph".to_string(), "SimpleGraph".to_string()), + ("weight".to_string(), "i32".to_string()), + ]); + assert!(is_natural_edge(&sub, &sup, &graph)); +} + +#[test] +fn test_is_natural_edge_incompatible_fields() { + use std::collections::BTreeMap; + let graph = ReductionGraph::new(); + + // Graph field goes more specific but weight goes more general — not a natural edge + let a = BTreeMap::from([ + ("graph".to_string(), "SimpleGraph".to_string()), + ("weight".to_string(), "One".to_string()), + ]); + let b = BTreeMap::from([ + ("graph".to_string(), "KingsSubgraph".to_string()), + ("weight".to_string(), "i32".to_string()), + ]); + assert!(!is_natural_edge(&a, &b, &graph)); +} + +#[test] +fn test_is_natural_edge_transitive_subtype() { + use std::collections::BTreeMap; + let graph = ReductionGraph::new(); + + // KingsSubgraph -> UnitDiskGraph -> SimpleGraph (transitive) + // With weight One -> f64 (transitive) + let sub = BTreeMap::from([ + ("graph".to_string(), "KingsSubgraph".to_string()), + ("weight".to_string(), "One".to_string()), + ]); + let sup = BTreeMap::from([ + ("graph".to_string(), "SimpleGraph".to_string()), + ("weight".to_string(), "f64".to_string()), + ]); + assert!(is_natural_edge(&sub, &sup, &graph)); +} + +#[test] +fn test_is_natural_edge_empty_variants() { + use std::collections::BTreeMap; + let graph = ReductionGraph::new(); + + // Both empty — same variant, no edge + let a: BTreeMap = BTreeMap::new(); + let b: BTreeMap = BTreeMap::new(); + assert!(!is_natural_edge(&a, &b, &graph)); +} From c1b26bed22c8bda34ab9a2e5d61c9bebed253d1d Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 15 Feb 2026 22:31:05 +0800 Subject: [PATCH 09/15] feat: implement BipartiteGraph with standard bipartite representation Replace the ZST marker in graph_types.rs with a real BipartiteGraph implementation in src/topology/ that stores left/right partition sizes and edges in bipartite-local coordinates. The Graph trait maps to a unified vertex space where right vertices are offset by left_size. Co-Authored-By: Claude Opus 4.6 --- src/graph_types.rs | 17 --- src/topology/bipartite_graph.rs | 144 +++++++++++++++++++++ src/topology/mod.rs | 2 + src/unit_tests/graph_types.rs | 4 +- src/unit_tests/topology/bipartite_graph.rs | 60 +++++++++ 5 files changed, 208 insertions(+), 19 deletions(-) create mode 100644 src/topology/bipartite_graph.rs create mode 100644 src/unit_tests/topology/bipartite_graph.rs diff --git a/src/graph_types.rs b/src/graph_types.rs index 26ecf9c8a..97f1f23cc 100644 --- a/src/graph_types.rs +++ b/src/graph_types.rs @@ -28,23 +28,6 @@ inventory::submit! { #[derive(Debug, Clone, Copy, Default)] pub struct UnitDiskGraph; -/// Bipartite graph - vertices can be partitioned into two sets with edges only between sets. -#[derive(Debug, Clone, Copy, Default)] -pub struct BipartiteGraph; - -impl crate::variant::VariantParam for BipartiteGraph { - const CATEGORY: &'static str = "graph"; - const VALUE: &'static str = "BipartiteGraph"; - const PARENT_VALUE: Option<&'static str> = Some("SimpleGraph"); -} -inventory::submit! { - crate::variant::VariantTypeEntry { - category: "graph", - value: "BipartiteGraph", - parent: Some("SimpleGraph"), - } -} - /// King's subgraph - a unit disk graph on a square grid with king's move connectivity. #[derive(Debug, Clone, Copy, Default)] pub struct KingsSubgraph; diff --git a/src/topology/bipartite_graph.rs b/src/topology/bipartite_graph.rs new file mode 100644 index 000000000..a56c97e34 --- /dev/null +++ b/src/topology/bipartite_graph.rs @@ -0,0 +1,144 @@ +//! Bipartite graph with explicit left/right partitions. + +use super::graph::{Graph, SimpleGraph}; +use serde::{Deserialize, Serialize}; + +/// Bipartite graph with explicit left/right partitions. +/// +/// Vertices are split into left (indices `0..left_size`) and right (`0..right_size`). +/// Edges connect left vertices to right vertices using bipartite-local coordinates. +/// The [`Graph`] trait maps to a unified vertex space where right vertices are offset +/// by `left_size`. +/// +/// # Example +/// +/// ``` +/// use problemreductions::topology::{BipartiteGraph, Graph}; +/// +/// // K_{2,2}: complete bipartite graph +/// let g = BipartiteGraph::new(2, 2, vec![(0, 0), (0, 1), (1, 0), (1, 1)]); +/// assert_eq!(g.num_vertices(), 4); +/// assert_eq!(g.num_edges(), 4); +/// assert!(g.has_edge(0, 2)); // left 0 -> right 0 (unified index 2) +/// ``` +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct BipartiteGraph { + left_size: usize, + right_size: usize, + /// Edges in bipartite-local coordinates: (left_index, right_index). + edges: Vec<(usize, usize)>, +} + +impl BipartiteGraph { + /// Create a new bipartite graph. + /// + /// # Arguments + /// + /// * `left_size` - Number of vertices in the left partition + /// * `right_size` - Number of vertices in the right partition + /// * `edges` - Edges as `(left_index, right_index)` pairs in bipartite-local coordinates + /// + /// # Panics + /// + /// Panics if any edge references an out-of-bounds left or right vertex index. + pub fn new(left_size: usize, right_size: usize, edges: Vec<(usize, usize)>) -> Self { + for &(u, v) in &edges { + assert!( + u < left_size, + "left vertex {} out of bounds (left_size={})", + u, + left_size + ); + assert!( + v < right_size, + "right vertex {} out of bounds (right_size={})", + v, + right_size + ); + } + Self { + left_size, + right_size, + edges, + } + } + + /// Returns the number of vertices in the left partition. + pub fn left_size(&self) -> usize { + self.left_size + } + + /// Returns the number of vertices in the right partition. + pub fn right_size(&self) -> usize { + self.right_size + } + + /// Returns the edges in bipartite-local coordinates. + pub fn left_edges(&self) -> &[(usize, usize)] { + &self.edges + } +} + +impl Graph for BipartiteGraph { + const NAME: &'static str = "BipartiteGraph"; + + fn num_vertices(&self) -> usize { + self.left_size + self.right_size + } + + fn num_edges(&self) -> usize { + self.edges.len() + } + + fn edges(&self) -> Vec<(usize, usize)> { + self.edges + .iter() + .map(|&(u, v)| { + let a = u; + let b = self.left_size + v; + if a < b { + (a, b) + } else { + (b, a) + } + }) + .collect() + } + + fn has_edge(&self, u: usize, v: usize) -> bool { + let (u, v) = if u < v { (u, v) } else { (v, u) }; + // u must be a left vertex and v must be a right vertex (in unified space) + if u >= self.left_size || v < self.left_size { + return false; + } + let local_v = v - self.left_size; + self.edges.contains(&(u, local_v)) + } + + fn neighbors(&self, v: usize) -> Vec { + if v < self.left_size { + // Left vertex: find all right neighbors + self.edges + .iter() + .filter(|(u, _)| *u == v) + .map(|(_, rv)| self.left_size + rv) + .collect() + } else { + // Right vertex: find all left neighbors + let local_v = v - self.left_size; + self.edges + .iter() + .filter(|(_, rv)| *rv == local_v) + .map(|(u, _)| *u) + .collect() + } + } +} + +use crate::impl_variant_param; +impl_variant_param!(BipartiteGraph, "graph", parent: SimpleGraph, + cast: |g| SimpleGraph::new(g.num_vertices(), g.edges())); + +#[cfg(test)] +#[path = "../unit_tests/topology/bipartite_graph.rs"] +mod tests; diff --git a/src/topology/mod.rs b/src/topology/mod.rs index 6935e525e..e661fed63 100644 --- a/src/topology/mod.rs +++ b/src/topology/mod.rs @@ -22,6 +22,7 @@ //! // (UnitDiskGraph example would require specific constructors) //! ``` +mod bipartite_graph; mod graph; mod hypergraph; mod kings_subgraph; @@ -29,6 +30,7 @@ pub mod small_graphs; mod triangular_subgraph; mod unit_disk_graph; +pub use bipartite_graph::BipartiteGraph; pub use graph::{Graph, GraphCast, SimpleGraph}; pub use hypergraph::HyperGraph; pub use kings_subgraph::KingsSubgraph; diff --git a/src/unit_tests/graph_types.rs b/src/unit_tests/graph_types.rs index a462665e9..a42e64d60 100644 --- a/src/unit_tests/graph_types.rs +++ b/src/unit_tests/graph_types.rs @@ -7,7 +7,6 @@ fn test_graph_type_traits() { let _: SimpleGraph = Default::default(); let _: PlanarGraph = Default::default(); let _: UnitDiskGraph = Default::default(); - let _: BipartiteGraph = Default::default(); let _: KingsSubgraph = Default::default(); let _: TriangularSubgraph = Default::default(); let _: HyperGraph = Default::default(); @@ -29,6 +28,7 @@ fn test_planargraph_variant_param() { #[test] fn test_bipartitegraph_variant_param() { + use crate::topology::BipartiteGraph; assert_eq!(BipartiteGraph::CATEGORY, "graph"); assert_eq!(BipartiteGraph::VALUE, "BipartiteGraph"); assert_eq!(BipartiteGraph::PARENT_VALUE, Some("SimpleGraph")); @@ -120,10 +120,10 @@ fn test_unitdiskgraph_to_planargraph_not_parent() { #[test] fn test_marker_structs_exist() { // Verify that all ZST marker structs still exist and can be instantiated + // Note: BipartiteGraph is now a real topology type in src/topology/bipartite_graph.rs let _ = SimpleGraph; let _ = PlanarGraph; let _ = UnitDiskGraph; - let _ = BipartiteGraph; let _ = KingsSubgraph; let _ = TriangularSubgraph; let _ = HyperGraph; diff --git a/src/unit_tests/topology/bipartite_graph.rs b/src/unit_tests/topology/bipartite_graph.rs new file mode 100644 index 000000000..46e3b381a --- /dev/null +++ b/src/unit_tests/topology/bipartite_graph.rs @@ -0,0 +1,60 @@ +use crate::topology::{BipartiteGraph, Graph}; + +#[test] +fn test_bipartite_graph_basic() { + // K_{2,3}: left={0,1}, right={0,1,2}, all edges + let edges = vec![(0, 0), (0, 1), (0, 2), (1, 0), (1, 1), (1, 2)]; + let g = BipartiteGraph::new(2, 3, edges); + assert_eq!(g.num_vertices(), 5); + assert_eq!(g.num_edges(), 6); + assert_eq!(g.left_size(), 2); + assert_eq!(g.right_size(), 3); +} + +#[test] +fn test_bipartite_graph_edges_unified() { + let g = BipartiteGraph::new(1, 2, vec![(0, 0), (0, 1)]); + let edges = g.edges(); + assert!(edges.contains(&(0, 1))); + assert!(edges.contains(&(0, 2))); + assert_eq!(edges.len(), 2); +} + +#[test] +fn test_bipartite_graph_has_edge() { + let g = BipartiteGraph::new(2, 2, vec![(0, 0), (1, 1)]); + assert!(g.has_edge(0, 2)); + assert!(g.has_edge(1, 3)); + assert!(!g.has_edge(0, 1)); + assert!(!g.has_edge(0, 3)); +} + +#[test] +fn test_bipartite_graph_neighbors() { + let g = BipartiteGraph::new(2, 2, vec![(0, 0), (0, 1), (1, 1)]); + let mut n0 = g.neighbors(0); + n0.sort(); + assert_eq!(n0, vec![2, 3]); + let mut n3 = g.neighbors(3); + n3.sort(); + assert_eq!(n3, vec![0, 1]); +} + +#[test] +fn test_bipartite_graph_left_edges() { + let edges = vec![(0, 0), (1, 1)]; + let g = BipartiteGraph::new(2, 2, edges.clone()); + assert_eq!(g.left_edges(), &edges); +} + +#[test] +#[should_panic] +fn test_bipartite_graph_invalid_left_index() { + BipartiteGraph::new(2, 2, vec![(2, 0)]); +} + +#[test] +#[should_panic] +fn test_bipartite_graph_invalid_right_index() { + BipartiteGraph::new(2, 2, vec![(0, 2)]); +} From 8d5ca74c70af6391db34834a9bd1cc991deb9f0c Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 15 Feb 2026 22:33:00 +0800 Subject: [PATCH 10/15] feat: expand "Chaining Reductions" with ResolvedPath example MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Allow KSatisfiability construction by skipping clause-length validation when K::K is None, enabling the natural K3→KN widening step. Replace the short chaining example in getting-started.md with a richer 3-SAT→SAT→MIS pipeline that demonstrates ReductionGraph path planning, variant casts, and ILPSolver extraction. Co-Authored-By: Claude Opus 4.6 --- docs/src/getting-started.md | 66 +++++++++++++++++--- src/models/satisfiability/ksat.rs | 48 +++++++------- src/unit_tests/models/satisfiability/ksat.rs | 35 ++++++++++- 3 files changed, 116 insertions(+), 33 deletions(-) diff --git a/docs/src/getting-started.md b/docs/src/getting-started.md index f37e29e20..4249b22cf 100644 --- a/docs/src/getting-started.md +++ b/docs/src/getting-started.md @@ -55,25 +55,71 @@ assert!(metric.is_valid()); ### Chaining Reductions -Reductions can be chained. Each step preserves the solution mapping: +Reductions compose into multi-step chains. A `ResolvedPath` describes the plan — +each step carries the problem name and variant, each edge is either a `Reduction` +(with overhead) or a `NaturalCast` (free subtype relaxation). +Here we solve a 3-SAT formula by chaining through Satisfiability +and MaximumIndependentSet: ```rust +use std::collections::BTreeMap; use problemreductions::prelude::*; use problemreductions::topology::SimpleGraph; +use problemreductions::rules::{ReductionGraph, EdgeKind}; +use problemreductions::solvers::ILPSolver; -// SetPacking -> IndependentSet -> VertexCover -let sp = MaximumSetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![2, 3]]); +// --- Plan: obtain a ResolvedPath --- -let r1 = ReduceTo::>::reduce_to(&sp); -let r2 = ReduceTo::>::reduce_to(r1.target_problem()); +let graph = ReductionGraph::new(); +let path = graph.find_shortest_path_by_name("KSatisfiability", "MaximumIndependentSet").unwrap(); +let source = BTreeMap::from([("k".to_string(), "K3".to_string())]); +let resolved = graph.resolve_path(&path, &source, &BTreeMap::new()).unwrap(); -// Solve final target, extract back through chain -let solver = BruteForce::new(); -let vc_sol = solver.find_best(r2.target_problem()).unwrap(); -let is_sol = r2.extract_solution(&vc_sol); -let sp_sol = r1.extract_solution(&is_sol); +// The resolved path: +// step 0: KSatisfiability {k: "K3"} +// step 1: Satisfiability {} +// step 2: MaximumIndependentSet {graph: "SimpleGraph", weight: "i32"} +// edge 0: Reduction (K3-SAT → SAT, trivial embedding) +// edge 1: Reduction (SAT → MIS, Karp 1972) + +// --- Execute: create, reduce, solve, extract --- + +// Create: 3-SAT formula (a∨b∨¬c)∧(¬a∨¬b∨¬c)∧(¬a∨b∨c)∧(a∨¬b∨c) +let ksat = KSatisfiability::::new(3, vec![ + CNFClause::new(vec![1, 2, -3]), // a ∨ b ∨ ¬c + CNFClause::new(vec![-1, -2, -3]), // ¬a ∨ ¬b ∨ ¬c + CNFClause::new(vec![-1, 2, 3]), // ¬a ∨ b ∨ c + CNFClause::new(vec![1, -2, 3]), // a ∨ ¬b ∨ c +]); + +// Widen: 3-SAT → N-SAT (natural variant cast, KN accepts any clause size) +let nsat = KSatisfiability::::new(ksat.num_vars(), ksat.clauses().to_vec()); + +// Reduce: N-SAT → Satisfiability (trivial embedding) +let r1 = ReduceTo::::reduce_to(&nsat); + +// Reduce: Satisfiability → MaximumIndependentSet (Karp reduction) +let r2 = ReduceTo::>::reduce_to(r1.target_problem()); + +// Solve: MIS via ILP (internally: MIS → ILP → solve → extract) +let ilp = ILPSolver::new(); +let mis_solution = ilp.solve_reduced(r2.target_problem()).unwrap(); + +// Extract: trace back through the reduction chain +let sat_solution = r2.extract_solution(&mis_solution); +let nsat_solution = r1.extract_solution(&sat_solution); + +// Verify: satisfies the original 3-SAT formula +assert!(ksat.evaluate(&nsat_solution)); ``` +The `ILPSolver::solve_reduced()` handles the final MIS → ILP reduction, +solve, and extraction internally. The caller traces back the explicit chain +with `extract_solution()` at each step, recovering a satisfying assignment +for the original formula. + +> **Note:** `ILPSolver` requires the `ilp` feature flag (see [Solvers](#solvers)). + ## Solvers Two solvers for testing purposes are available: diff --git a/src/models/satisfiability/ksat.rs b/src/models/satisfiability/ksat.rs index 937eeb9d4..0383349c7 100644 --- a/src/models/satisfiability/ksat.rs +++ b/src/models/satisfiability/ksat.rs @@ -69,18 +69,20 @@ impl KSatisfiability { /// Create a new K-SAT problem. /// /// # Panics - /// Panics if any clause does not have exactly K literals, - /// or if K is KN (generic K cannot be instantiated). + /// Panics if any clause does not have exactly K literals (when K is a + /// concrete value like K2, K3). When K is KN (arbitrary), no clause-length + /// validation is performed. pub fn new(num_vars: usize, clauses: Vec) -> Self { - let k = K::K.expect("KN cannot be instantiated"); - for (i, clause) in clauses.iter().enumerate() { - assert!( - clause.len() == k, - "Clause {} has {} literals, expected {}", - i, - clause.len(), - k - ); + if let Some(k) = K::K { + for (i, clause) in clauses.iter().enumerate() { + assert!( + clause.len() == k, + "Clause {} has {} literals, expected {}", + i, + clause.len(), + k + ); + } } Self { num_vars, @@ -95,18 +97,20 @@ impl KSatisfiability { /// fewer literals (e.g., when allow_less is true in the Julia implementation). /// /// # Panics - /// Panics if any clause has more than K literals, - /// or if K is KN (generic K cannot be instantiated). + /// Panics if any clause has more than K literals (when K is a concrete + /// value like K2, K3). When K is KN (arbitrary), no clause-length + /// validation is performed. pub fn new_allow_less(num_vars: usize, clauses: Vec) -> Self { - let k = K::K.expect("KN cannot be instantiated"); - for (i, clause) in clauses.iter().enumerate() { - assert!( - clause.len() <= k, - "Clause {} has {} literals, expected at most {}", - i, - clause.len(), - k - ); + if let Some(k) = K::K { + for (i, clause) in clauses.iter().enumerate() { + assert!( + clause.len() <= k, + "Clause {} has {} literals, expected at most {}", + i, + clause.len(), + k + ); + } } Self { num_vars, diff --git a/src/unit_tests/models/satisfiability/ksat.rs b/src/unit_tests/models/satisfiability/ksat.rs index 8e9786e16..fe025f978 100644 --- a/src/unit_tests/models/satisfiability/ksat.rs +++ b/src/unit_tests/models/satisfiability/ksat.rs @@ -1,7 +1,7 @@ use super::*; use crate::solvers::BruteForce; use crate::traits::Problem; -use crate::variant::{K2, K3}; +use crate::variant::{K2, K3, KN}; include!("../../jl_helpers.rs"); #[test] @@ -179,3 +179,36 @@ fn test_jl_parity_evaluation() { assert_eq!(rust_best_set, jl_best, "KSat best solutions mismatch"); } } + +#[test] +fn test_kn_creation() { + // KN accepts clauses of any length without validation + let problem = KSatisfiability::::new( + 3, + vec![ + CNFClause::new(vec![1, 2, 3]), // 3 literals + CNFClause::new(vec![-1, -2]), // 2 literals + CNFClause::new(vec![1]), // 1 literal + ], + ); + assert_eq!(problem.num_vars(), 3); + assert_eq!(problem.num_clauses(), 3); + assert!(problem.evaluate(&[1, 0, 0])); // x1=T, x2=F, x3=F +} + +#[test] +fn test_kn_from_k3_clauses() { + // KN can be constructed from clauses originally built for K3 + let k3 = KSatisfiability::::new( + 3, + vec![ + CNFClause::new(vec![1, 2, -3]), + CNFClause::new(vec![-1, -2, -3]), + ], + ); + let kn = KSatisfiability::::new(k3.num_vars(), k3.clauses().to_vec()); + // Both should agree on evaluations + for config in &[[1, 0, 0], [0, 1, 0], [1, 1, 1]] { + assert_eq!(k3.evaluate(config), kn.evaluate(config)); + } +} From 7b182f25f0619de68dbaa384dd08009eaf0fcc91 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 15 Feb 2026 22:37:06 +0800 Subject: [PATCH 11/15] feat: implement PlanarGraph as validated SimpleGraph wrapper Replace the ZST marker PlanarGraph in graph_types.rs with a real topology type that wraps SimpleGraph and validates the necessary planarity condition |E| <= 3|V| - 6 for |V| >= 3. Co-Authored-By: Claude Opus 4.6 --- src/graph_types.rs | 17 ----- src/topology/mod.rs | 2 + src/topology/planar_graph.rs | 83 +++++++++++++++++++++++++ src/unit_tests/graph_types.rs | 5 +- src/unit_tests/topology/planar_graph.rs | 46 ++++++++++++++ 5 files changed, 133 insertions(+), 20 deletions(-) create mode 100644 src/topology/planar_graph.rs create mode 100644 src/unit_tests/topology/planar_graph.rs diff --git a/src/graph_types.rs b/src/graph_types.rs index 97f1f23cc..170435167 100644 --- a/src/graph_types.rs +++ b/src/graph_types.rs @@ -7,23 +7,6 @@ #[derive(Debug, Clone, Copy, Default)] pub struct SimpleGraph; -/// Planar graph - can be drawn on a plane without edge crossings. -#[derive(Debug, Clone, Copy, Default)] -pub struct PlanarGraph; - -impl crate::variant::VariantParam for PlanarGraph { - const CATEGORY: &'static str = "graph"; - const VALUE: &'static str = "PlanarGraph"; - const PARENT_VALUE: Option<&'static str> = Some("SimpleGraph"); -} -inventory::submit! { - crate::variant::VariantTypeEntry { - category: "graph", - value: "PlanarGraph", - parent: Some("SimpleGraph"), - } -} - /// Unit disk graph - vertices are points, edges connect points within unit distance. #[derive(Debug, Clone, Copy, Default)] pub struct UnitDiskGraph; diff --git a/src/topology/mod.rs b/src/topology/mod.rs index e661fed63..195458f27 100644 --- a/src/topology/mod.rs +++ b/src/topology/mod.rs @@ -26,6 +26,7 @@ mod bipartite_graph; mod graph; mod hypergraph; mod kings_subgraph; +mod planar_graph; pub mod small_graphs; mod triangular_subgraph; mod unit_disk_graph; @@ -34,6 +35,7 @@ pub use bipartite_graph::BipartiteGraph; pub use graph::{Graph, GraphCast, SimpleGraph}; pub use hypergraph::HyperGraph; pub use kings_subgraph::KingsSubgraph; +pub use planar_graph::PlanarGraph; pub use small_graphs::{available_graphs, smallgraph}; pub use triangular_subgraph::TriangularSubgraph; pub use unit_disk_graph::UnitDiskGraph; diff --git a/src/topology/planar_graph.rs b/src/topology/planar_graph.rs new file mode 100644 index 000000000..712ff1027 --- /dev/null +++ b/src/topology/planar_graph.rs @@ -0,0 +1,83 @@ +//! Planar graph — validated wrapper around SimpleGraph. + +use super::graph::{Graph, SimpleGraph}; +use serde::{Deserialize, Serialize}; + +/// Planar graph — validated wrapper around SimpleGraph. +/// +/// Construction validates the necessary planarity condition: |E| <= 3|V| - 6 for |V| >= 3. +/// This is a necessary but not sufficient condition. +/// +/// # Example +/// +/// ``` +/// use problemreductions::topology::{PlanarGraph, Graph}; +/// +/// // K4 is planar: 4 vertices, 6 edges, 6 <= 3*4 - 6 = 6 +/// let edges = vec![(0,1),(0,2),(0,3),(1,2),(1,3),(2,3)]; +/// let g = PlanarGraph::new(4, edges); +/// assert_eq!(g.num_vertices(), 4); +/// assert_eq!(g.num_edges(), 6); +/// ``` +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PlanarGraph { + inner: SimpleGraph, +} + +impl PlanarGraph { + /// Create a new planar graph. + /// + /// # Panics + /// Panics if the graph violates the necessary planarity condition |E| <= 3|V| - 6. + pub fn new(num_vertices: usize, edges: Vec<(usize, usize)>) -> Self { + let inner = SimpleGraph::new(num_vertices, edges); + if num_vertices >= 3 { + let max_edges = 3 * num_vertices - 6; + assert!( + inner.num_edges() <= max_edges, + "graph has {} edges but a planar graph on {} vertices can have at most {} edges", + inner.num_edges(), + num_vertices, + max_edges + ); + } + Self { inner } + } + + /// Get a reference to the underlying SimpleGraph. + pub fn inner(&self) -> &SimpleGraph { + &self.inner + } +} + +impl Graph for PlanarGraph { + const NAME: &'static str = "PlanarGraph"; + + fn num_vertices(&self) -> usize { + self.inner.num_vertices() + } + + fn num_edges(&self) -> usize { + self.inner.num_edges() + } + + fn edges(&self) -> Vec<(usize, usize)> { + self.inner.edges() + } + + fn has_edge(&self, u: usize, v: usize) -> bool { + self.inner.has_edge(u, v) + } + + fn neighbors(&self, v: usize) -> Vec { + self.inner.neighbors(v) + } +} + +use crate::impl_variant_param; +impl_variant_param!(PlanarGraph, "graph", parent: SimpleGraph, + cast: |g| g.inner.clone()); + +#[cfg(test)] +#[path = "../unit_tests/topology/planar_graph.rs"] +mod tests; diff --git a/src/unit_tests/graph_types.rs b/src/unit_tests/graph_types.rs index a42e64d60..75c4f0d09 100644 --- a/src/unit_tests/graph_types.rs +++ b/src/unit_tests/graph_types.rs @@ -5,7 +5,6 @@ use crate::variant::{VariantParam, VariantTypeEntry}; fn test_graph_type_traits() { // Test Default let _: SimpleGraph = Default::default(); - let _: PlanarGraph = Default::default(); let _: UnitDiskGraph = Default::default(); let _: KingsSubgraph = Default::default(); let _: TriangularSubgraph = Default::default(); @@ -21,6 +20,7 @@ fn test_graph_type_traits() { #[test] fn test_planargraph_variant_param() { + use crate::topology::PlanarGraph; assert_eq!(PlanarGraph::CATEGORY, "graph"); assert_eq!(PlanarGraph::VALUE, "PlanarGraph"); assert_eq!(PlanarGraph::PARENT_VALUE, Some("SimpleGraph")); @@ -120,9 +120,8 @@ fn test_unitdiskgraph_to_planargraph_not_parent() { #[test] fn test_marker_structs_exist() { // Verify that all ZST marker structs still exist and can be instantiated - // Note: BipartiteGraph is now a real topology type in src/topology/bipartite_graph.rs + // Note: BipartiteGraph and PlanarGraph are now real topology types in src/topology/ let _ = SimpleGraph; - let _ = PlanarGraph; let _ = UnitDiskGraph; let _ = KingsSubgraph; let _ = TriangularSubgraph; diff --git a/src/unit_tests/topology/planar_graph.rs b/src/unit_tests/topology/planar_graph.rs new file mode 100644 index 000000000..7fae09cb6 --- /dev/null +++ b/src/unit_tests/topology/planar_graph.rs @@ -0,0 +1,46 @@ +use crate::topology::{Graph, PlanarGraph}; + +#[test] +fn test_planar_graph_basic() { + // K4 is planar: 4 vertices, 6 edges, 6 <= 3*4 - 6 = 6 + let edges = vec![(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]; + let g = PlanarGraph::new(4, edges); + assert_eq!(g.num_vertices(), 4); + assert_eq!(g.num_edges(), 6); +} + +#[test] +fn test_planar_graph_delegates_to_inner() { + let g = PlanarGraph::new(3, vec![(0, 1), (1, 2)]); + assert!(g.has_edge(0, 1)); + assert!(!g.has_edge(0, 2)); + let mut n1 = g.neighbors(1); + n1.sort(); + assert_eq!(n1, vec![0, 2]); +} + +#[test] +#[should_panic] +fn test_planar_graph_rejects_k5() { + // K5 has 10 edges, but 3*5 - 6 = 9. Fails necessary condition. + let mut edges = Vec::new(); + for i in 0..5 { + for j in (i + 1)..5 { + edges.push((i, j)); + } + } + PlanarGraph::new(5, edges); +} + +#[test] +fn test_planar_graph_empty() { + let g = PlanarGraph::new(3, vec![]); + assert_eq!(g.num_vertices(), 3); + assert_eq!(g.num_edges(), 0); +} + +#[test] +fn test_planar_graph_tree() { + let g = PlanarGraph::new(4, vec![(0, 1), (1, 2), (2, 3)]); + assert_eq!(g.num_edges(), 3); +} From 6ef7d5ad93d84351d303cba5c3934bece5dbecec Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 15 Feb 2026 22:40:02 +0800 Subject: [PATCH 12/15] style: apply rustfmt formatting Co-Authored-By: Claude Opus 4.6 --- examples/export_mapping_stages.rs | 10 +++- examples/export_petersen_mapping.rs | 5 +- src/rules/sat_ksat.rs | 10 ++-- src/rules/unitdiskmapping/weighted.rs | 6 +-- src/topology/kings_subgraph.rs | 4 +- src/unit_tests/graph_models.rs | 6 ++- .../models/graph/maximum_independent_set.rs | 3 +- .../models/graph/minimum_dominating_set.rs | 3 +- src/unit_tests/models/satisfiability/ksat.rs | 6 +-- src/unit_tests/rules/graph.rs | 9 ++-- .../unitdiskmapping/triangular/mapping.rs | 5 +- .../rules/unitdiskmapping/triangular/mod.rs | 5 +- .../gadgets_ground_truth.rs | 47 ++++++++++++++++--- .../julia_comparison.rs | 6 +-- .../mapping_result.rs | 4 +- .../unitdiskmapping_algorithms/weighted.rs | 26 ++-------- tests/suites/integration.rs | 12 ++++- tests/suites/reductions.rs | 10 +++- 18 files changed, 101 insertions(+), 76 deletions(-) diff --git a/examples/export_mapping_stages.rs b/examples/export_mapping_stages.rs index a8bdead78..46d34e713 100644 --- a/examples/export_mapping_stages.rs +++ b/examples/export_mapping_stages.rs @@ -377,7 +377,10 @@ fn export_square( .map(|line| mis_overhead_copyline(line, spacing, padding) as i32) .sum(); let crossing_overhead: i32 = crossing_tape.iter().map(ksg::tape_entry_mis_overhead).sum(); - let simplifier_overhead: i32 = simplifier_tape.iter().map(ksg::tape_entry_mis_overhead).sum(); + let simplifier_overhead: i32 = simplifier_tape + .iter() + .map(ksg::tape_entry_mis_overhead) + .sum(); let copy_lines_export = export_copylines_square(©lines, padding, spacing); let crossing_tape_export = export_square_tape(&crossing_tape, 0); @@ -588,7 +591,10 @@ fn export_copylines_square( } // IMPORTANT: Tape positions are 0-indexed. DO NOT add +1 to row/col! -fn export_triangular_tape(tape: &[triangular::WeightedTriTapeEntry], offset: usize) -> Vec { +fn export_triangular_tape( + tape: &[triangular::WeightedTriTapeEntry], + offset: usize, +) -> Vec { tape.iter() .enumerate() .map(|(i, e)| TapeEntryExport { diff --git a/examples/export_petersen_mapping.rs b/examples/export_petersen_mapping.rs index 0eeb5e397..d8a9ec56f 100644 --- a/examples/export_petersen_mapping.rs +++ b/examples/export_petersen_mapping.rs @@ -146,10 +146,7 @@ fn main() { square_weighted_viz.nodes.len(), square_weighted_viz.edges.len() ); - println!( - " MIS overhead Δ: {}", - square_weighted_result.mis_overhead - ); + println!(" MIS overhead Δ: {}", square_weighted_result.mis_overhead); println!( " MIS(grid) = MIS(source) + Δ = {} + {} = {}", petersen_mis, diff --git a/src/rules/sat_ksat.rs b/src/rules/sat_ksat.rs index cb7e87c3a..c3fee040d 100644 --- a/src/rules/sat_ksat.rs +++ b/src/rules/sat_ksat.rs @@ -187,11 +187,11 @@ fn reduce_ksat_to_sat(ksat: &KSatisfiability) -> ReductionKSATToSA macro_rules! impl_ksat_to_sat { ($ktype:ty) => { #[reduction(overhead = { - ReductionOverhead::new(vec![ - ("num_clauses", poly!(num_clauses)), - ("num_vars", poly!(num_vars)), - ]) - })] + ReductionOverhead::new(vec![ + ("num_clauses", poly!(num_clauses)), + ("num_vars", poly!(num_vars)), + ]) + })] impl ReduceTo for KSatisfiability<$ktype> { type Result = ReductionKSATToSAT<$ktype>; diff --git a/src/rules/unitdiskmapping/weighted.rs b/src/rules/unitdiskmapping/weighted.rs index 5fb279858..aca42a7c0 100644 --- a/src/rules/unitdiskmapping/weighted.rs +++ b/src/rules/unitdiskmapping/weighted.rs @@ -457,11 +457,7 @@ pub fn map_weights(result: &MappingResult, source_weights: &[f64]) -> Vec { ); // Start with base weights from grid nodes - let mut weights: Vec = result - .node_weights - .iter() - .map(|&w| w as f64) - .collect(); + let mut weights: Vec = result.node_weights.iter().map(|&w| w as f64).collect(); // Get center locations for each original vertex let centers = trace_centers(result); diff --git a/src/topology/kings_subgraph.rs b/src/topology/kings_subgraph.rs index 49b557303..3f5effa3a 100644 --- a/src/topology/kings_subgraph.rs +++ b/src/topology/kings_subgraph.rs @@ -93,7 +93,9 @@ impl Graph for KingsSubgraph { return Vec::new(); } (0..self.positions.len()) - .filter(|&u| u != v && Self::distance(self.positions[v], self.positions[u]) < KINGS_RADIUS) + .filter(|&u| { + u != v && Self::distance(self.positions[v], self.positions[u]) < KINGS_RADIUS + }) .collect() } } diff --git a/src/unit_tests/graph_models.rs b/src/unit_tests/graph_models.rs index 1d96b1d63..eb213123d 100644 --- a/src/unit_tests/graph_models.rs +++ b/src/unit_tests/graph_models.rs @@ -169,7 +169,11 @@ mod maximum_independent_set { #[test] fn test_with_custom_weights() { - let problem = MaximumIndependentSet::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + let problem = MaximumIndependentSet::::with_weights( + 3, + vec![(0, 1)], + vec![5, 10, 15], + ); assert_eq!(problem.weights().to_vec(), vec![5, 10, 15]); } diff --git a/src/unit_tests/models/graph/maximum_independent_set.rs b/src/unit_tests/models/graph/maximum_independent_set.rs index 4daed6bf8..2e15b501f 100644 --- a/src/unit_tests/models/graph/maximum_independent_set.rs +++ b/src/unit_tests/models/graph/maximum_independent_set.rs @@ -69,7 +69,8 @@ fn test_edges() { #[test] fn test_with_custom_weights() { - let problem = MaximumIndependentSet::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + let problem = + MaximumIndependentSet::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); assert_eq!(problem.weights().to_vec(), vec![5, 10, 15]); } diff --git a/src/unit_tests/models/graph/minimum_dominating_set.rs b/src/unit_tests/models/graph/minimum_dominating_set.rs index dc77d41bb..da5c204e0 100644 --- a/src/unit_tests/models/graph/minimum_dominating_set.rs +++ b/src/unit_tests/models/graph/minimum_dominating_set.rs @@ -80,8 +80,7 @@ fn test_is_dominating_set_wrong_len() { #[test] fn test_from_graph() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); - let problem = - MinimumDominatingSet::::from_graph(graph, vec![1, 2, 3]); + let problem = MinimumDominatingSet::::from_graph(graph, vec![1, 2, 3]); assert_eq!(problem.graph().num_vertices(), 3); assert_eq!(problem.weights(), &[1, 2, 3]); } diff --git a/src/unit_tests/models/satisfiability/ksat.rs b/src/unit_tests/models/satisfiability/ksat.rs index fe025f978..e5c4cf2e2 100644 --- a/src/unit_tests/models/satisfiability/ksat.rs +++ b/src/unit_tests/models/satisfiability/ksat.rs @@ -186,9 +186,9 @@ fn test_kn_creation() { let problem = KSatisfiability::::new( 3, vec![ - CNFClause::new(vec![1, 2, 3]), // 3 literals - CNFClause::new(vec![-1, -2]), // 2 literals - CNFClause::new(vec![1]), // 1 literal + CNFClause::new(vec![1, 2, 3]), // 3 literals + CNFClause::new(vec![-1, -2]), // 2 literals + CNFClause::new(vec![1]), // 1 literal ], ); assert_eq!(problem.num_vars(), 3); diff --git a/src/unit_tests/rules/graph.rs b/src/unit_tests/rules/graph.rs index 5870cc6ed..0e51f0f96 100644 --- a/src/unit_tests/rules/graph.rs +++ b/src/unit_tests/rules/graph.rs @@ -1180,7 +1180,9 @@ fn test_filter_redundant_base_nodes() { filter_redundant_base_nodes(&mut node_set); assert_eq!(node_set.len(), 2); - assert!(!node_set.iter().any(|(name, v)| name == "MIS" && v.is_empty())); + assert!(!node_set + .iter() + .any(|(name, v)| name == "MIS" && v.is_empty())); assert!(node_set.iter().any(|(name, _)| name == "QUBO")); } @@ -1202,10 +1204,7 @@ fn test_classify_problem_category() { classify_problem_category("problemreductions::models::optimization::qubo"), "optimization" ); - assert_eq!( - classify_problem_category("unknown::path"), - "other" - ); + assert_eq!(classify_problem_category("unknown::path"), "other"); } #[test] diff --git a/src/unit_tests/rules/unitdiskmapping/triangular/mapping.rs b/src/unit_tests/rules/unitdiskmapping/triangular/mapping.rs index a95086883..104aff3ae 100644 --- a/src/unit_tests/rules/unitdiskmapping/triangular/mapping.rs +++ b/src/unit_tests/rules/unitdiskmapping/triangular/mapping.rs @@ -6,10 +6,7 @@ fn test_map_weighted_basic() { let result = map_weighted(3, &edges); assert!(!result.positions.is_empty()); - assert!(matches!( - result.kind, - GridKind::Triangular - )); + assert!(matches!(result.kind, GridKind::Triangular)); } #[test] diff --git a/src/unit_tests/rules/unitdiskmapping/triangular/mod.rs b/src/unit_tests/rules/unitdiskmapping/triangular/mod.rs index 62b286b66..8c099061a 100644 --- a/src/unit_tests/rules/unitdiskmapping/triangular/mod.rs +++ b/src/unit_tests/rules/unitdiskmapping/triangular/mod.rs @@ -13,10 +13,7 @@ fn test_map_graph_triangular() { let result = map_graph_triangular(3, &edges); assert!(!result.positions.is_empty()); - assert!(matches!( - result.kind, - GridKind::Triangular - )); + assert!(matches!(result.kind, GridKind::Triangular)); } #[test] diff --git a/src/unit_tests/unitdiskmapping_algorithms/gadgets_ground_truth.rs b/src/unit_tests/unitdiskmapping_algorithms/gadgets_ground_truth.rs index 726560416..00fdb3a49 100644 --- a/src/unit_tests/unitdiskmapping_algorithms/gadgets_ground_truth.rs +++ b/src/unit_tests/unitdiskmapping_algorithms/gadgets_ground_truth.rs @@ -288,14 +288,22 @@ fn test_unweighted_square_danglingleg() { fn test_triangular_cross_false() { let gt = load_ground_truth(); let map = to_map(>.triangular); - check_gadget!("TriCross_false", WeightedTriCross::, map["TriCross_false"]); + check_gadget!( + "TriCross_false", + WeightedTriCross::, + map["TriCross_false"] + ); } #[test] fn test_triangular_cross_true() { let gt = load_ground_truth(); let map = to_map(>.triangular); - check_gadget!("TriCross_true", WeightedTriCross::, map["TriCross_true"]); + check_gadget!( + "TriCross_true", + WeightedTriCross::, + map["TriCross_true"] + ); } #[test] @@ -544,9 +552,24 @@ test_rotated!(test_rotated_branch_rot2, KsgBranch, 2, "Branch_rot2"); test_rotated!(test_rotated_branch_rot3, KsgBranch, 3, "Branch_rot3"); // BranchFix rotations -test_rotated!(test_rotated_branchfix_rot1, KsgBranchFix, 1, "BranchFix_rot1"); -test_rotated!(test_rotated_branchfix_rot2, KsgBranchFix, 2, "BranchFix_rot2"); -test_rotated!(test_rotated_branchfix_rot3, KsgBranchFix, 3, "BranchFix_rot3"); +test_rotated!( + test_rotated_branchfix_rot1, + KsgBranchFix, + 1, + "BranchFix_rot1" +); +test_rotated!( + test_rotated_branchfix_rot2, + KsgBranchFix, + 2, + "BranchFix_rot2" +); +test_rotated!( + test_rotated_branchfix_rot3, + KsgBranchFix, + 3, + "BranchFix_rot3" +); // BranchFixB rotations test_rotated!( @@ -705,8 +728,18 @@ test_reflected!( ); // Branch reflections -test_reflected!(test_reflected_branch_x, KsgBranch, Mirror::X, "Branch_ref_x"); -test_reflected!(test_reflected_branch_y, KsgBranch, Mirror::Y, "Branch_ref_y"); +test_reflected!( + test_reflected_branch_x, + KsgBranch, + Mirror::X, + "Branch_ref_x" +); +test_reflected!( + test_reflected_branch_y, + KsgBranch, + Mirror::Y, + "Branch_ref_y" +); test_reflected!( test_reflected_branch_diag, KsgBranch, diff --git a/src/unit_tests/unitdiskmapping_algorithms/julia_comparison.rs b/src/unit_tests/unitdiskmapping_algorithms/julia_comparison.rs index 3d1605a34..9157e49b3 100644 --- a/src/unit_tests/unitdiskmapping_algorithms/julia_comparison.rs +++ b/src/unit_tests/unitdiskmapping_algorithms/julia_comparison.rs @@ -158,8 +158,7 @@ fn compare_square_unweighted(name: &str) { // Assertions assert_eq!( - julia.grid_size, - rust_result.grid_dimensions, + julia.grid_size, rust_result.grid_dimensions, "{} square: Grid size mismatch", name ); @@ -395,8 +394,7 @@ fn compare_triangular(name: &str) { // Assertions assert_eq!( - julia.grid_size, - rust_result.grid_dimensions, + julia.grid_size, rust_result.grid_dimensions, "{} triangular: Grid size mismatch", name ); diff --git a/src/unit_tests/unitdiskmapping_algorithms/mapping_result.rs b/src/unit_tests/unitdiskmapping_algorithms/mapping_result.rs index d3ebed65c..d7ccea211 100644 --- a/src/unit_tests/unitdiskmapping_algorithms/mapping_result.rs +++ b/src/unit_tests/unitdiskmapping_algorithms/mapping_result.rs @@ -434,10 +434,10 @@ fn test_full_pipeline_triangular_house() { #[test] fn test_apply_and_unapply_gadget() { + use crate::rules::unitdiskmapping::ksg::KsgTurn; use crate::rules::unitdiskmapping::{ apply_gadget, unapply_gadget, CellState, MappingGrid, Pattern, }; - use crate::rules::unitdiskmapping::ksg::KsgTurn; // Create a small grid with spacing 4 let mut grid = MappingGrid::new(10, 10, 4); @@ -467,8 +467,8 @@ fn test_apply_and_unapply_gadget() { #[test] fn test_apply_gadget_at_various_positions() { - use crate::rules::unitdiskmapping::{apply_gadget, CellState, MappingGrid, Pattern}; use crate::rules::unitdiskmapping::ksg::KsgTurn; + use crate::rules::unitdiskmapping::{apply_gadget, CellState, MappingGrid, Pattern}; let mut grid = MappingGrid::new(20, 20, 4); let turn = KsgTurn; diff --git a/src/unit_tests/unitdiskmapping_algorithms/weighted.rs b/src/unit_tests/unitdiskmapping_algorithms/weighted.rs index 838cb3729..17189cc65 100644 --- a/src/unit_tests/unitdiskmapping_algorithms/weighted.rs +++ b/src/unit_tests/unitdiskmapping_algorithms/weighted.rs @@ -81,11 +81,7 @@ fn test_map_weights_one() { assert!(mapped.iter().all(|&w| w > 0.0)); // Mapped weights should equal base weights plus original weights at centers - let base_total: f64 = result - .node_weights - .iter() - .map(|&w| w as f64) - .sum(); + let base_total: f64 = result.node_weights.iter().map(|&w| w as f64).sum(); let original_total: f64 = weights.iter().sum(); let mapped_total: f64 = mapped.iter().sum(); @@ -285,11 +281,7 @@ fn test_map_weights_preserves_total_weight() { let mapped = map_weights(&result, &original_weights); // Sum of mapped weights should be base_sum + original_sum - let base_sum: f64 = result - .node_weights - .iter() - .map(|&w| w as f64) - .sum(); + let base_sum: f64 = result.node_weights.iter().map(|&w| w as f64).sum(); let original_sum: f64 = original_weights.iter().sum(); let mapped_sum: f64 = mapped.iter().sum(); @@ -317,18 +309,8 @@ fn test_trace_centers_consistency_with_config_back() { // Each center should be within grid bounds let (rows, cols) = { - let max_row = result - .positions - .iter() - .map(|&(r, _)| r) - .max() - .unwrap_or(0); - let max_col = result - .positions - .iter() - .map(|&(_, c)| c) - .max() - .unwrap_or(0); + let max_row = result.positions.iter().map(|&(r, _)| r).max().unwrap_or(0); + let max_col = result.positions.iter().map(|&(_, c)| c).max().unwrap_or(0); (max_row as usize + 1, max_col as usize + 1) }; diff --git a/tests/suites/integration.rs b/tests/suites/integration.rs index 80a45bc5e..3db133681 100644 --- a/tests/suites/integration.rs +++ b/tests/suites/integration.rs @@ -401,7 +401,11 @@ mod weighted_problems { #[test] fn test_weighted_independent_set() { - let problem = MaximumIndependentSet::::with_weights(3, vec![(0, 1)], vec![10, 1, 1]); + let problem = MaximumIndependentSet::::with_weights( + 3, + vec![(0, 1)], + vec![10, 1, 1], + ); let solver = BruteForce::new(); let solutions = solver.find_all_best(&problem); @@ -418,7 +422,11 @@ mod weighted_problems { #[test] fn test_weighted_vertex_cover() { - let problem = MinimumVertexCover::::with_weights(3, vec![(0, 1), (1, 2)], vec![1, 10, 1]); + let problem = MinimumVertexCover::::with_weights( + 3, + vec![(0, 1), (1, 2)], + vec![1, 10, 1], + ); let solver = BruteForce::new(); let solutions = solver.find_all_best(&problem); diff --git a/tests/suites/reductions.rs b/tests/suites/reductions.rs index 1e3285ddc..572089dc5 100644 --- a/tests/suites/reductions.rs +++ b/tests/suites/reductions.rs @@ -74,7 +74,10 @@ mod is_vc_reductions { let final_is = back_to_is.target_problem(); // Should have same structure - assert_eq!(final_is.graph().num_vertices(), original.graph().num_vertices()); + assert_eq!( + final_is.graph().num_vertices(), + original.graph().num_vertices() + ); assert_eq!(final_is.graph().num_edges(), original.graph().num_edges()); // Solve the final problem @@ -792,7 +795,10 @@ mod io_tests { let restored: MaximumIndependentSet = from_json(&json).unwrap(); // Should have same structure - assert_eq!(restored.graph().num_vertices(), original.graph().num_vertices()); + assert_eq!( + restored.graph().num_vertices(), + original.graph().num_vertices() + ); assert_eq!(restored.graph().num_edges(), original.graph().num_edges()); // Reduce the restored problem From c3257ca59d84b7e54f9c4aedb963ec2f7ba57614 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 15 Feb 2026 22:49:54 +0800 Subject: [PATCH 13/15] docs: polish design.md and update diagrams Rewrite design.md with clearer structure: add variant system section, lattice diagrams, and improved trait hierarchy visuals. Update Makefile diagram target to support --root flag. Co-Authored-By: Claude Opus 4.6 --- Makefile | 11 +- .../2026-02-15-design-md-polish-design.md | 80 ++ .../plans/2026-02-15-design-md-polish-impl.md | 651 ++++++++++++++ .../2026-02-15-issue70-refactoring-design.md | 137 +++ .../2026-02-15-issue70-refactoring-impl.md | 823 ++++++++++++++++++ docs/src/design.md | 443 ++++++---- docs/src/static/lattices-dark.svg | 687 +++++++++++++++ docs/src/static/lattices.svg | 687 +++++++++++++++ docs/src/static/lattices.typ | 212 +++++ docs/src/static/trait-hierarchy-dark.svg | 388 ++++++--- docs/src/static/trait-hierarchy.svg | 388 ++++++--- docs/src/static/trait-hierarchy.typ | 33 +- docs/src/static/variant-hierarchy-dark.svg | 182 ++-- docs/src/static/variant-hierarchy.svg | 182 ++-- docs/src/static/variant-hierarchy.typ | 2 +- 15 files changed, 4252 insertions(+), 654 deletions(-) create mode 100644 docs/plans/2026-02-15-design-md-polish-design.md create mode 100644 docs/plans/2026-02-15-design-md-polish-impl.md create mode 100644 docs/plans/2026-02-15-issue70-refactoring-design.md create mode 100644 docs/plans/2026-02-15-issue70-refactoring-impl.md create mode 100644 docs/src/static/lattices-dark.svg create mode 100644 docs/src/static/lattices.svg create mode 100644 docs/src/static/lattices.typ diff --git a/Makefile b/Makefile index 1d250a09b..08cb517af 100644 --- a/Makefile +++ b/Makefile @@ -56,13 +56,14 @@ doc: cp -r target/doc docs/book/api # Generate SVG diagrams from Typst sources (light + dark themes) -TYPST_DIAGRAMS := $(wildcard docs/src/static/*.typ) +TYPST_DOC_DIAGRAMS := $(wildcard docs/src/static/*.typ) +TYPST_PAPER_DIAGRAMS := $(wildcard docs/paper/static/*.typ) diagrams: - @for src in $(TYPST_DIAGRAMS); do \ + @for src in $(TYPST_DOC_DIAGRAMS); do \ base=$$(basename $$src .typ); \ - echo "Compiling $$base..."; \ - typst compile $$src --input dark=false docs/src/static/$$base.svg; \ - typst compile $$src --input dark=true docs/src/static/$$base-dark.svg; \ + echo "Compiling $$base (doc)..."; \ + typst compile $$src --root=. --input dark=false docs/src/static/$$base.svg; \ + typst compile $$src --root=. --input dark=true docs/src/static/$$base-dark.svg; \ done # Build and serve mdBook with API docs diff --git a/docs/plans/2026-02-15-design-md-polish-design.md b/docs/plans/2026-02-15-design-md-polish-design.md new file mode 100644 index 000000000..8ed50574e --- /dev/null +++ b/docs/plans/2026-02-15-design-md-polish-design.md @@ -0,0 +1,80 @@ +# Design: Polish design.md + +## Context + +The current `docs/src/design.md` has empty sections (Overhead Evaluation, Reduction Execution), outdated content that doesn't reflect recent variant system changes, unclear section flow, and no unifying narrative for contributors. + +## Audience + +Library contributors who need to understand the internals to add new problems and reductions. + +## Approach + +**"Follow the Data"** — organize sections by the lifecycle of a reduction, from problem definition through graph construction to path resolution and execution. No overlap with `getting-started.md`. + +## Proposed Structure + +### 1. Module Overview (keep existing) +- Keep diagram + table +- Update opening line: "This guide covers the library internals for contributors." + +### 2. Problem Model (renamed from "Models") +- Keep `Problem`, `OptimizationProblem`, `SatisfactionProblem` explanations +- Keep trait hierarchy diagram +- Minor tightening of examples + +### 3. Variant System (expanded from "Problem variants") +- Keep concept intro, variant-hierarchy diagram, lattices diagram +- Add `VariantParam` trait definition (`CATEGORY`, `VALUE`, `PARENT_VALUE`) +- Add `impl_variant_param!` macro — 4 forms (root, with parent, KValue root, KValue with parent) +- Add `CastToParent` trait — runtime conversion for natural casts +- Keep `variant_params!` macro example + +### 4. Reduction Rules (restructured) +- Keep `ReductionResult` struct + trait pattern +- Keep `ReduceTo` impl with `#[reduction]` macro +- Add: what `#[reduction]` expands to (the `inventory::submit!(ReductionEntry { ... })` call) +- Add: `ReductionOverhead` declaration with `poly!` macro example + +### 5. Reduction Graph (renamed from "Reduction") +- Construction: `ReductionGraph::new()` iterates inventory entries, builds `petgraph::DiGraph` + variant hierarchy with transitive closure +- Natural edges: auto-generated between same-name variant nodes via subtype check, identity overhead +- JSON export: `to_json()` produces `ReductionGraphJson` + +### 6. Path Finding (keep and extend) +- Keep `resolve_path` algorithm steps and examples (MIS casting, KSat disambiguation) +- Keep `ResolvedPath` struct +- Add `find_cheapest_path` with Dijkstra + set-theoretic validation +- Add `PathCostFn` trait and built-in cost functions: `Minimize`, `MinimizeWeighted`, `MinimizeMax`, `MinimizeLexicographic`, `MinimizeSteps`, `CustomCost` + +### 7. Overhead Evaluation (fill empty section) +- `ProblemSize`: named size components +- `Polynomial` / `Monomial`: overhead formula representation + `poly!` macro +- `ReductionOverhead::evaluate_output_size(input) -> ProblemSize` +- Composition: chain output of step N as input of step N+1 +- Example: multi-step size propagation + +### 8. Reduction Execution (fill empty section) +- `ResolvedPath` is a plan, not an executor +- Dispatch model: `Reduction` → `reduce_to()`, `NaturalCast` → `cast_to_parent()` +- Solution extraction: walk chain in reverse, `extract_solution()` at each Reduction step, natural casts preserve solution +- Design rationale: concrete types (no `dyn Problem`) for type safety + +### 9. Solvers (expanded) +- `BruteForce`: enumerate all configs from `dims()`, `find_best`/`find_all_best`, `find_satisfying`/`find_all_satisfying` +- `ILPSolver`: feature-gated (`ilp`), HiGHS via `good_lp`, `solve_reduced()` +- Note: primarily for testing/verification + +### 10. JSON Serialization (keep, minor polish) + +### 11. Contributing (keep as-is) + +## Removals +- The "Reduction" H2 header (line 132) — content redistributed into sections 5-8 +- Duplicated `#[reduction]` example in "Reduction Graph" subsection + +## Additions +- Sections 7 and 8 get real content +- Variant system gets `VariantParam`/`impl_variant_param!`/`CastToParent` machinery +- Path finding gets `find_cheapest_path` + `PathCostFn` +- Overhead gets `Polynomial`/`poly!` + size propagation diff --git a/docs/plans/2026-02-15-design-md-polish-impl.md b/docs/plans/2026-02-15-design-md-polish-impl.md new file mode 100644 index 000000000..b551151ca --- /dev/null +++ b/docs/plans/2026-02-15-design-md-polish-impl.md @@ -0,0 +1,651 @@ +# Polish design.md Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Rewrite `docs/src/design.md` following the "Follow the Data" structure: fill empty sections, update outdated content, and create a coherent contributor-oriented narrative. + +**Architecture:** Single-file rewrite of `docs/src/design.md`. Organized as 11 sections tracing the lifecycle of a reduction. All diagrams are preserved as-is. No code changes, only documentation. + +**Tech Stack:** Markdown (mdBook), existing SVG diagrams, code snippets from the Rust source. + +--- + +### Task 1: Rewrite sections 1-2 (Module Overview + Problem Model) + +**Files:** +- Modify: `docs/src/design.md:1-44` + +**Step 1: Replace lines 1-44 with updated Module Overview and Problem Model** + +Replace the entire file content from line 1 through line 44 (end of trait hierarchy diagram) with: + +```markdown +# Design + +This guide covers the library internals for contributors. + +## Module Overview + +
+ +![Module Overview](static/module-overview.svg) + +
+
+ +![Module Overview](static/module-overview-dark.svg) + +
+ +| Module | Purpose | +|--------|---------| +| [`src/models/`](#problem-model) | Problem type implementations (SAT, Graph, Set, Optimization) | +| [`src/rules/`](#reduction-rules) | Reduction rules with `ReduceTo` implementations | +| [`src/registry/`](#reduction-graph) | Compile-time reduction graph metadata | +| [`src/solvers/`](#solvers) | BruteForce and ILP solvers | +| `src/traits.rs` | Core `Problem` and `OptimizationProblem` traits (see [Problem Model](#problem-model)) | +| `src/types.rs` | Shared types: `SolutionSize`, `Direction`, `ProblemSize` (see [Problem Model](#problem-model)) | +| `src/variant.rs` | Variant parameter system (see [Variant System](#variant-system)) | + +## Problem Model + +Every problem implements `Problem`. Optimization problems additionally implement `OptimizationProblem`; satisfaction problems implement `SatisfactionProblem`. + +- **`Problem`** — the base trait. Every problem declares a `NAME` (e.g., `"MaximumIndependentSet"`). The solver explores the configuration space defined by `dims()` and scores each configuration with `evaluate()`. For example, a 4-vertex MIS has `dims() = [2, 2, 2, 2]` (each vertex is selected or not); `evaluate(&[1, 0, 1, 0])` returns `Valid(2)` if vertices 0 and 2 form an independent set, or `Invalid` if they share an edge. +- **`OptimizationProblem`** — extends `Problem` with a comparable `Value` type and a `direction()` (`Maximize` or `Minimize`). +- **`SatisfactionProblem`** — constrains `Metric = bool`: `true` if all constraints are satisfied, `false` otherwise. + +
+ +![Trait Hierarchy](static/trait-hierarchy.svg) + +
+
+ +![Trait Hierarchy](static/trait-hierarchy-dark.svg) + +
+``` + +**Step 2: Verify the mdbook builds** + +Run: `cd /Users/liujinguo/rcode/problemreductions && mdbook build` +Expected: Build succeeds, no broken links. + +**Step 3: Commit** + +```bash +git add docs/src/design.md +git commit -m "docs: rewrite design.md sections 1-2 (module overview + problem model)" +``` + +--- + +### Task 2: Rewrite section 3 (Variant System) + +**Files:** +- Modify: `docs/src/design.md` — replace the old "Problem variants" subsection (lines 46-93 in the original) with the new "Variant System" section. + +**Step 1: Write the Variant System section** + +This section replaces everything from `### Problem variants` through the end of the `variant_params!` code block. It should appear immediately after the trait hierarchy diagram. + +```markdown +## Variant System + +A single problem name like `MaximumIndependentSet` can have multiple **variants** — carrying weights on vertices, or defined on a grid. Some variants are more specific than others: the grid graph is a special case of the unit-disk graph, which is a special case of the simple graph. + +In **set** language, variants form **subsets**: independent sets on grid graphs are a subset of independent sets on unit-disk graphs. The reduction from a more specific variant to a less specific one is a **natural reduction** (identity mapping). To avoid repeating the same rule for each variant pair, the library provides an auto-casting mechanism. + +
+ +![Variant Hierarchy](static/variant-hierarchy.svg) + +
+
+ +![Variant Hierarchy](static/variant-hierarchy-dark.svg) + +
+ +Arrows indicate the **subset** (subtype) direction. Variant types fall into three categories: + +- **Graph type** — e.g., `SimpleGraph`, `UnitDiskGraph`, `KingsSubgraph`. Available graph variants: +- **Weight type** — `One` (unweighted), `i32`, `f64`. +- **K value** — e.g., `K3` for 3-SAT, `KN` for arbitrary K. + +
+ +![Lattices](static/lattices.svg) + +
+
+ +![Lattices](static/lattices-dark.svg) + +
+ +### VariantParam trait + +Each variant parameter type implements `VariantParam`, which declares its category, value, and optional parent: + +```rust +pub trait VariantParam: 'static { + const CATEGORY: &'static str; // e.g., "graph", "weight", "k" + const VALUE: &'static str; // e.g., "SimpleGraph", "i32" + const PARENT_VALUE: Option<&'static str>; // None for root types +} +``` + +Types with a parent also implement `CastToParent`, providing the runtime conversion for natural casts: + +```rust +pub trait CastToParent: VariantParam { + type Parent: VariantParam; + fn cast_to_parent(&self) -> Self::Parent; +} +``` + +### Registration with `impl_variant_param!` + +The `impl_variant_param!` macro implements `VariantParam` (and optionally `CastToParent` / `KValue`) and registers a `VariantTypeEntry` via `inventory` for compile-time hierarchy discovery: + +```rust +// Root type (no parent): +impl_variant_param!(SimpleGraph, "graph"); + +// Type with parent (cast closure required): +impl_variant_param!(UnitDiskGraph, "graph", + parent: SimpleGraph, + cast: |g| SimpleGraph::new(g.num_vertices(), g.edges())); + +// K root (arbitrary K): +impl_variant_param!(KN, "k", k: None); + +// Specific K with parent: +impl_variant_param!(K3, "k", parent: KN, cast: |_| KN, k: Some(3)); +``` + +At startup, the `ReductionGraph` collects all `VariantTypeEntry` registrations and computes the **transitive closure** of the parent relationships, so `KingsSubgraph` is recognized as a subtype of `SimpleGraph` even though it declares `UnitDiskGraph` as its direct parent. + +### Composing `Problem::variant()` + +The `variant_params!` macro composes the `Problem::variant()` body from type parameter names: + +```rust +// MaximumIndependentSet +fn variant() -> Vec<(&'static str, &'static str)> { + crate::variant_params![G, W] + // e.g., MaximumIndependentSet + // → vec![("graph", "UnitDiskGraph"), ("weight", "One")] +} +``` +``` + +**Step 2: Verify mdbook builds** + +Run: `cd /Users/liujinguo/rcode/problemreductions && mdbook build` + +**Step 3: Commit** + +```bash +git add docs/src/design.md +git commit -m "docs: rewrite design.md section 3 (variant system)" +``` + +--- + +### Task 3: Rewrite section 4 (Reduction Rules) + +**Files:** +- Modify: `docs/src/design.md` — replace the old "Reduction Rules" section. + +**Step 1: Write the Reduction Rules section** + +This replaces the old section (lines 95-129 in the original). Place it after the Variant System section. + +```markdown +## Reduction Rules + +A reduction requires two pieces: a **result struct** and a **`ReduceTo` impl**. + +### Result struct + +Holds the target problem and the logic to map solutions back: + +```rust +#[derive(Clone)] +pub struct ReductionISToVC { + target: MinimumVertexCover, +} + +impl ReductionResult for ReductionISToVC { + type Source = MaximumIndependentSet; + type Target = MinimumVertexCover; + + fn target_problem(&self) -> &Self::Target { &self.target } + fn extract_solution(&self, target_sol: &[usize]) -> Vec { + target_sol.iter().map(|&x| 1 - x).collect() // complement + } +} +``` + +### `ReduceTo` impl with the `#[reduction]` macro + +```rust +#[reduction( + overhead = { + ReductionOverhead::new(vec![ + ("num_vertices", poly!(num_vertices)), + ("num_edges", poly!(num_edges)), + ]) + } +)] +impl ReduceTo> + for MaximumIndependentSet +{ + type Result = ReductionISToVC; + fn reduce_to(&self) -> Self::Result { /* ... */ } +} +``` + +### What the macro generates + +The `#[reduction]` attribute expands to the original `impl` block plus an `inventory::submit!` call: + +```rust +inventory::submit! { + ReductionEntry { + source_name: "MaximumIndependentSet", + target_name: "MinimumVertexCover", + source_variant_fn: || as Problem>::variant(), + target_variant_fn: || as Problem>::variant(), + overhead_fn: || ReductionOverhead::new(vec![ + ("num_vertices", poly!(num_vertices)), + ("num_edges", poly!(num_edges)), + ]), + module_path: module_path!(), + } +} +``` + +This `ReductionEntry` is collected at compile time by `inventory`, making the reduction discoverable by the `ReductionGraph` without any manual registration. + +See [adding-reductions.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/adding-reductions.md) for the full implementation guide. +``` + +**Step 2: Verify mdbook builds** + +Run: `cd /Users/liujinguo/rcode/problemreductions && mdbook build` + +**Step 3: Commit** + +```bash +git add docs/src/design.md +git commit -m "docs: rewrite design.md section 4 (reduction rules)" +``` + +--- + +### Task 4: Rewrite section 5 (Reduction Graph) + +**Files:** +- Modify: `docs/src/design.md` — replace the old "Reduction" H2 and "Reduction Graph" H3 (lines 132-149 in the original). + +**Step 1: Write the Reduction Graph section** + +Place after Reduction Rules. This replaces the old "Reduction" section header and its "Reduction Graph" subsection. + +```markdown +## Reduction Graph + +The `ReductionGraph` is the central runtime data structure. It collects all registered reductions and variant hierarchies to enable path finding and overhead evaluation. + +### Construction + +`ReductionGraph::new()` performs two `inventory` scans: + +1. **`ReductionEntry` items** — each registered reduction becomes a directed edge in a `petgraph::DiGraph`. Nodes are type-erased base names (e.g., `"MaxCut"`, not `"MaxCut"`), so path finding works regardless of type parameters. + +2. **`VariantTypeEntry` items** — parent declarations are collected per category and transitively closed, building a `variant_hierarchy: HashMap>>`. + +### Natural edges + +When exporting the graph (via `to_json()`), the graph auto-generates **natural edges** between same-name variant nodes. A natural edge from variant A to variant B exists when every field of A is at least as restrictive as B's (i.e., A is a subtype of B). Natural edges carry **identity overhead** — the problem size is unchanged. + +For example, `MaximumIndependentSet{KingsSubgraph, i32}` gets a natural edge to `MaximumIndependentSet{SimpleGraph, i32}` because `KingsSubgraph` is a subtype of `SimpleGraph`. + +### JSON export + +`ReductionGraph::to_json()` produces a `ReductionGraphJson` with fully expanded variant nodes and both reduction + natural edges: + +- [reduction_graph.json](reductions/reduction_graph.json) — all problem variants and reduction edges +- [problem_schemas.json](reductions/problem_schemas.json) — field definitions for each problem type +``` + +**Step 2: Verify mdbook builds** + +Run: `cd /Users/liujinguo/rcode/problemreductions && mdbook build` + +**Step 3: Commit** + +```bash +git add docs/src/design.md +git commit -m "docs: rewrite design.md section 5 (reduction graph)" +``` + +--- + +### Task 5: Rewrite section 6 (Path Finding) + +**Files:** +- Modify: `docs/src/design.md` — replace the old "Path Finding" H3 (lines 153-208 in the original). + +**Step 1: Write the Path Finding section** + +Place after Reduction Graph. Keep the existing resolve_path content and examples, add the Dijkstra/cost-function content. + +```markdown +## Path Finding + +Path finding operates at two levels: **name-level** paths (which problem types to traverse) and **variant-level** resolved paths (with concrete variant and overhead at each step). + +### Name-level paths + +`find_paths_by_name(src, dst)` enumerates all simple paths in the type-erased graph. `find_shortest_path_by_name()` returns the one with fewest hops. + +For cost-aware routing, `find_cheapest_path()` uses **Dijkstra's algorithm** with set-theoretic validation: + +```rust +pub fn find_cheapest_path( + &self, + source: (&str, &str), // (problem_name, graph_type) + target: (&str, &str), + input_size: &ProblemSize, + cost_fn: &C, +) -> Option +``` + +At each edge, Dijkstra checks `rule_applicable()` — the source graph must be a subtype of the rule's expected source, and the rule's target graph must be a subtype of the desired target. This ensures the chosen path respects variant constraints. + +### Cost functions + +The `PathCostFn` trait computes edge cost from overhead and current problem size: + +```rust +pub trait PathCostFn { + fn edge_cost(&self, overhead: &ReductionOverhead, current_size: &ProblemSize) -> f64; +} +``` + +Built-in implementations: + +| Cost function | Strategy | +|--------------|----------| +| `Minimize("field")` | Minimize a single output field | +| `MinimizeWeighted([(field, w)])` | Weighted sum of output fields | +| `MinimizeMax([fields])` | Minimize the maximum of fields | +| `MinimizeLexicographic([fields])` | Lexicographic: minimize first, break ties with rest | +| `MinimizeSteps` | Minimize number of hops (unit edge cost) | +| `CustomCost(closure)` | User-defined cost function | + +### Variant-level resolution: `resolve_path` + +Given a name-level `ReductionPath`, `resolve_path` threads variant state through each step to produce a `ResolvedPath`: + +```rust +pub fn resolve_path( + &self, + path: &ReductionPath, // name-level plan + source_variant: &BTreeMap, // caller's concrete variant + target_variant: &BTreeMap, // desired target variant +) -> Option +``` + +The algorithm: + +1. **Find candidates** — all `ReductionEntry` items matching `(src_name, dst_name)`. +2. **Filter compatible** — keep entries where the current variant is equal-or-more-specific than the entry's source variant on every axis. +3. **Pick most specific** — among compatible entries, choose the tightest fit. +4. **Insert natural cast** — if the current variant is more specific than the chosen entry's source, emit a `NaturalCast` edge. +5. **Advance** — update current variant to the entry's target variant, emit a `Reduction` edge with the correct overhead. + +The result is a `ResolvedPath`: + +```rust +pub struct ResolvedPath { + pub steps: Vec, // (name, variant) at each node + pub edges: Vec, // Reduction{overhead} | NaturalCast +} +``` + +#### Example: MIS on KingsSubgraph to MinimumVertexCover + +Resolving `MIS(KingsSubgraph, i32) → VC(SimpleGraph, i32)` through name-path `["MIS", "VC"]`: + +``` +steps: MIS{KingsSubgraph,i32} → MIS{SimpleGraph,i32} → VC{SimpleGraph,i32} +edges: NaturalCast Reduction{overhead} +``` + +The resolver finds that the `MIS → VC` reduction expects `SimpleGraph`, so it inserts a `NaturalCast` to relax `KingsSubgraph` to `SimpleGraph` first. + +#### Example: KSat Disambiguation + +Resolving `KSat(k=3) → QUBO` through name-path `["KSatisfiability", "QUBO"]`: + +- Candidates: `KSat<2> → QUBO` (overhead: `num_vars`) and `KSat<3> → QUBO` (overhead: `num_vars + num_clauses`). +- Filter with `k=3`: only `KSat<3>` is compatible (`3` is not a subtype of `2`). +- Result: the k=3-specific overhead is returned. +``` + +**Step 2: Verify mdbook builds** + +Run: `cd /Users/liujinguo/rcode/problemreductions && mdbook build` + +**Step 3: Commit** + +```bash +git add docs/src/design.md +git commit -m "docs: rewrite design.md section 6 (path finding)" +``` + +--- + +### Task 6: Write sections 7-8 (Overhead Evaluation + Reduction Execution) + +**Files:** +- Modify: `docs/src/design.md` — replace the empty "Overhead Evaluation" and "Reduction Execution" headers (lines 210-212 in the original). + +**Step 1: Write the Overhead Evaluation and Reduction Execution sections** + +Place after Path Finding. + +```markdown +## Overhead Evaluation + +Each reduction declares how the output problem size relates to the input size, expressed as polynomials. + +### ProblemSize + +A `ProblemSize` holds named size components — the dimensions that characterize a problem instance: + +```rust +let size = ProblemSize::new(vec![("num_vertices", 10), ("num_edges", 15)]); +assert_eq!(size.get("num_vertices"), Some(10)); +``` + +### Polynomials + +Output size formulas use `Polynomial` (a sum of `Monomial` terms). The `poly!` macro provides a concise syntax: + +```rust +poly!(num_vertices) // p(x) = num_vertices +poly!(num_vertices ^ 2) // p(x) = num_vertices² +poly!(3 * num_edges) // p(x) = 3 · num_edges +poly!(num_vertices * num_edges) // p(x) = num_vertices · num_edges +``` + +A `ReductionOverhead` pairs output field names with their polynomials: + +```rust +ReductionOverhead::new(vec![ + ("num_vars", poly!(num_vertices) + poly!(num_edges)), + ("num_clauses", poly!(3 * num_edges)), +]) +``` + +### Evaluating overhead + +`ReductionOverhead::evaluate_output_size(input)` substitutes input values into the polynomials and returns a new `ProblemSize`: + +``` +Input: ProblemSize { num_vertices: 10, num_edges: 15 } +Output: ProblemSize { num_vars: 25, num_clauses: 45 } +``` + +### Composing through a path + +For a multi-step reduction path, overhead composes: the output of step $N$ becomes the input of step $N+1$. Each `ResolvedPath` edge carries its own `ReductionOverhead` (or `NaturalCast` with identity overhead), so the total output size is computed by chaining `evaluate_output_size` calls through the path. + +## Reduction Execution + +A `ResolvedPath` is a **plan**, not an executor. It provides variant and overhead information at each step, but callers dispatch the actual transformations themselves. + +### Dispatching steps + +Walk the `edges` array and dispatch based on `EdgeKind`: + +- **`EdgeKind::Reduction`** — call `ReduceTo::reduce_to()` on the current problem to produce a `ReductionResult`, then call `target_problem()` to get the next problem. +- **`EdgeKind::NaturalCast`** — call `CastToParent::cast_to_parent()` (for graph casts) or the equivalent weight cast. The problem data is preserved; only the type changes. + +### Extracting solutions + +After solving the final target problem, walk the chain **in reverse**: + +- At each `Reduction` edge, call `extract_solution(&target_solution)` on the corresponding `ReductionResult` to map the solution back to the source space. +- At each `NaturalCast` edge, the solution passes through unchanged (identity mapping). + +### Why concrete types (no type erasure) + +The library uses concrete types at each step rather than `dyn Problem`. This preserves full type safety and avoids boxing overhead, at the cost of requiring callers to know the types at each step. This design choice keeps the reduction pipeline zero-cost and makes the compiler verify correctness at each transformation boundary. +``` + +**Step 2: Verify mdbook builds** + +Run: `cd /Users/liujinguo/rcode/problemreductions && mdbook build` + +**Step 3: Commit** + +```bash +git add docs/src/design.md +git commit -m "docs: write design.md sections 7-8 (overhead evaluation + execution)" +``` + +--- + +### Task 7: Rewrite sections 9-11 (Solvers + JSON + Contributing) + +**Files:** +- Modify: `docs/src/design.md` — replace the old "Solvers", "JSON Serialization", and "Contributing" sections (lines 237-252 in the original). + +**Step 1: Write the Solvers, JSON Serialization, and Contributing sections** + +Place after Reduction Execution. + +```markdown +## Solvers + +Solvers implement the `Solver` trait: + +```rust +pub trait Solver { + fn find_best(&self, problem: &P) -> Option>; + fn find_satisfying>(&self, problem: &P) -> Option>; +} +``` + +### BruteForce + +Enumerates every configuration in the space defined by `dims()`. Suitable for small instances (<20 variables). In addition to the `Solver` trait methods, provides: + +- `find_all_best(problem)` — returns all tied-optimal configurations. +- `find_all_satisfying(problem)` — returns all satisfying configurations. + +Primarily used for **testing and verification** of reductions via closed-loop tests. + +### ILPSolver + +Feature-gated behind `ilp`. Uses the HiGHS solver via the `good_lp` crate. Additionally provides `solve_reduced()` for problems that implement `ReduceTo` — it reduces, solves the ILP, and extracts the solution in one call. + +## JSON Serialization + +All problem types support JSON serialization via serde: + +```rust +use problemreductions::io::{to_json, from_json}; + +let json = to_json(&problem)?; +let restored: MaximumIndependentSet = from_json(&json)?; +``` + +**Exported JSON files:** +- [reduction_graph.json](reductions/reduction_graph.json) — all problem variants and reduction edges +- [problem_schemas.json](reductions/problem_schemas.json) — field definitions for each problem type + +Regenerate exports: + +```bash +cargo run --example export_graph # docs/src/reductions/reduction_graph.json (default) +cargo run --example export_graph -- output.json # custom output path +cargo run --example export_schemas # docs/src/reductions/problem_schemas.json +``` + +## Contributing + +See [Call for Contributions](./introduction.md#call-for-contributions) for the recommended issue-based workflow (no coding required). +``` + +**Step 2: Verify mdbook builds** + +Run: `cd /Users/liujinguo/rcode/problemreductions && mdbook build` + +**Step 3: Commit** + +```bash +git add docs/src/design.md +git commit -m "docs: rewrite design.md sections 9-11 (solvers + JSON + contributing)" +``` + +--- + +### Task 8: Update internal anchor links in module table + +**Files:** +- Modify: `docs/src/design.md` — the Module Overview table links. + +**Step 1: Verify all anchor links resolve correctly** + +Check that the `#problem-model`, `#reduction-rules`, `#reduction-graph`, `#solvers`, `#variant-system` anchors match the actual section headers. mdBook generates anchors from headers by lowercasing and replacing spaces with hyphens. + +Expected mappings: +- `## Problem Model` → `#problem-model` +- `## Variant System` → `#variant-system` +- `## Reduction Rules` → `#reduction-rules` +- `## Reduction Graph` → `#reduction-graph` +- `## Solvers` → `#solvers` + +**Step 2: Fix any broken anchors** + +If needed, update the table links in the Module Overview section. + +**Step 3: Final mdbook build** + +Run: `cd /Users/liujinguo/rcode/problemreductions && mdbook build` + +**Step 4: Commit** + +```bash +git add docs/src/design.md +git commit -m "docs: fix internal anchor links in design.md" +``` diff --git a/docs/plans/2026-02-15-issue70-refactoring-design.md b/docs/plans/2026-02-15-issue70-refactoring-design.md new file mode 100644 index 000000000..de4afad03 --- /dev/null +++ b/docs/plans/2026-02-15-issue70-refactoring-design.md @@ -0,0 +1,137 @@ +# Issue #70: KISS and DRY Refactoring Design + +**Date:** 2026-02-15 +**Issue:** [#70](https://github.com/GiggleLiu/problemreductions/issues/70) + +## Scope + +Three high-impact items from the issue, selected by priority: + +1. **DRY #1** — Trim vertex-weighted graph problem API +2. **KISS #4** — Extract testable functions from `to_json()` +3. **DRY #2 (expanded)** — Implement real `PlanarGraph` and `BipartiteGraph` + +## Item 1: Trim Vertex-Weighted Graph Problem API + +### Problem + +Five vertex-weighted graph problems share ~65 lines of identical delegation methods each. These convenience methods (`num_vertices()`, `num_edges()`, `edges()`, etc.) duplicate `Graph` trait methods already available via `problem.graph()`. + +**Affected files:** +- `src/models/graph/maximum_independent_set.rs` +- `src/models/graph/minimum_vertex_cover.rs` +- `src/models/graph/maximum_clique.rs` +- `src/models/graph/maximal_is.rs` +- `src/models/graph/minimum_dominating_set.rs` + +### Design + +**Remove these methods from all 5 problems:** +- `num_vertices()` — callers use `problem.graph().num_vertices()` +- `num_edges()` — callers use `problem.graph().num_edges()` +- `edges()` — callers use `problem.graph().edges()` +- `has_edge(u, v)` — callers use `problem.graph().has_edge(u, v)` +- `set_weights()` — 0 external call sites +- `from_graph_unit_weights()` — 0 external call sites +- `weights()` (the cloning version) — replaced by renaming `weights_ref()` + +**Rename:** +- `weights_ref() -> &Vec` becomes `weights() -> &[W]` + +**Keep:** +- `graph() -> &G` +- `weights() -> &[W]` (the renamed borrow version) +- `is_weighted() -> bool` +- `new(num_vertices, edges)` — 14 call sites +- `with_weights(num_vertices, edges, weights)` — 25 call sites +- `from_graph(graph, weights)` — 3 call sites + +### Call site migration + +| Old call | New call | Sites | +|----------|----------|-------| +| `problem.num_vertices()` | `problem.graph().num_vertices()` | ~49 | +| `problem.num_edges()` | `problem.graph().num_edges()` | ~36 | +| `problem.edges()` | `problem.graph().edges()` | ~29 | +| `problem.has_edge(u, v)` | `problem.graph().has_edge(u, v)` | 0 | +| `problem.weights_ref()` | `problem.weights()` | ~12 | +| `problem.weights()` (clone) | `problem.weights().to_vec()` | ~8 | + +## Item 2: Extract Testable Functions from `to_json()` + +### Problem + +`ReductionGraph::to_json()` (`src/rules/graph.rs`, ~194 lines) is a monolith doing 5+ distinct things. Complex logic is embedded inline and untestable in isolation. + +### Design + +Extract three pure, testable utility functions while keeping `to_json()` as the orchestrator: + +1. **`is_natural_edge(variant_a, variant_b, hierarchy) -> Option`** + Given two variant maps for the same problem name, determine if one is a subtype of the other. Core logic from the 65-line natural edge generation loop. Pure function. + +2. **`classify_problem_category(module_path: &str) -> &str`** + Map module path to category: `"graph"`, `"sat"`, `"set"`, or `"optimization"`. Currently inline in node-building phase. + +3. **`filter_redundant_base_nodes(node_set) -> filtered_set`** + Remove base nodes (empty variant) when a variant-specific sibling exists. ~15 lines of inline logic. + +Each function gets its own unit test. `to_json()` calls these helpers but retains the orchestration flow. + +## Item 3: Implement PlanarGraph and BipartiteGraph + +### Problem + +`PlanarGraph` and `BipartiteGraph` are currently ZST markers with no data or graph behavior. They manually implement `VariantParam` (12 lines each) instead of using `impl_variant_param!` because they have no cast closure. + +### Design + +Replace the ZST markers with real graph types. + +#### PlanarGraph — Validated wrapper + +```rust +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PlanarGraph { + inner: SimpleGraph, +} +``` + +- **Constructor:** `PlanarGraph::new(num_vertices, edges)` — validates planarity via `|E| <= 3|V| - 6` (necessary condition). Panics on non-planar input. +- **Graph trait:** All methods delegate to `inner`. +- **Variant:** `impl_variant_param!(PlanarGraph, "graph", parent: SimpleGraph, cast: |g| g.inner.clone())` + +#### BipartiteGraph — Standard bipartite representation + +```rust +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BipartiteGraph { + left_size: usize, + right_size: usize, + edges: Vec<(usize, usize)>, // (u, v) with u in [0, left_size), v in [0, right_size) +} +``` + +- **Constructor:** `BipartiteGraph::new(left_size, right_size, edges)` — validates that edges are within bounds. Edges use bipartite-local coordinates. +- **Graph trait:** Maps to unified vertex space: left vertices `0..left_size`, right vertices `left_size..left_size+right_size`. `edges()` returns `(u, left_size + v)` for each stored `(u, v)`. +- **Accessors:** `left_size()`, `right_size()`, `left_edges()` (local coords). +- **Variant:** `impl_variant_param!(BipartiteGraph, "graph", parent: SimpleGraph, cast: |g| SimpleGraph::new(g.num_vertices(), g.edges()))` + +### Follow-up issue + +File a separate issue for full data structure implementations: +- PlanarGraph: half-edge (DCEL) data structure for proper planar embedding +- BipartiteGraph: additional bipartite-specific algorithms + +## Testing + +- **Item 1:** Update all ~114 call sites. Run `make test clippy` to verify nothing breaks. +- **Item 2:** Add unit tests for each extracted function. +- **Item 3:** Add tests for PlanarGraph (construction, planarity validation, graph trait) and BipartiteGraph (construction, edge mapping, partition accessors). + +## Non-goals + +- No macro extraction for constructor/trait boilerplate (accept remaining duplication as cost of explicitness) +- No changes to the cost function zoo (KISS #2) +- No changes to `find_shortest_path` (KISS #1) +- No full DCEL or bipartite algorithm implementation (deferred) diff --git a/docs/plans/2026-02-15-issue70-refactoring-impl.md b/docs/plans/2026-02-15-issue70-refactoring-impl.md new file mode 100644 index 000000000..0d97101ff --- /dev/null +++ b/docs/plans/2026-02-15-issue70-refactoring-impl.md @@ -0,0 +1,823 @@ +# Issue #70 Refactoring Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Address top 3 high-impact KISS/DRY violations from issue #70: trim graph problem APIs, extract testable functions from `to_json()`, and implement real PlanarGraph/BipartiteGraph types. + +**Architecture:** Remove delegation methods from 5 graph problem structs so callers go through `.graph()` directly. Extract pure utility functions from the `to_json()` monolith for independent testability. Replace PlanarGraph/BipartiteGraph ZST markers with validated wrapper types. + +**Tech Stack:** Rust, petgraph (for SimpleGraph internals), serde, inventory crate + +--- + +## Task 1: Remove delegation methods from MaximumIndependentSet + +**Files:** +- Modify: `src/models/graph/maximum_independent_set.rs:100-148` (remove methods) +- Modify: `src/rules/maximumindependentset_qubo.rs` (update call sites) +- Modify: `src/rules/maximumindependentset_ilp.rs` (update call sites) +- Modify: `src/rules/maximumindependentset_maximumsetpacking.rs` (update call sites) +- Modify: `src/rules/maximumindependentset_gridgraph.rs` (update call sites) +- Modify: `src/rules/maximumindependentset_triangular.rs` (update call sites) +- Modify: `src/rules/sat_maximumindependentset.rs` (update call sites) +- Modify: `src/rules/minimumvertexcover_maximumindependentset.rs` (update MIS call sites) +- Test: `src/unit_tests/models/graph/maximum_independent_set.rs` + +**Step 1: Edit MaximumIndependentSet — remove methods and rename** + +In `src/models/graph/maximum_independent_set.rs`: +- Delete `from_graph_unit_weights()` (lines 100-107) +- Delete `num_vertices()` (lines 114-117) +- Delete `num_edges()` (lines 119-122) +- Delete `edges()` (lines 124-127) +- Delete `has_edge()` (lines 129-132) +- Delete `set_weights()` (lines 139-143) +- Delete `weights()` clone version (lines 145-148) +- Rename `weights_ref()` to `weights()`, change return type from `&Vec` to `&[W]` + +Internal code in the same file that uses `self.graph.num_vertices()` etc. should already work since it accesses the field directly. + +**Step 2: Update rule call sites** + +Replace in each rule file: +- `self.num_vertices()` → `self.graph().num_vertices()` +- `self.num_edges()` → `self.graph().num_edges()` +- `self.edges()` → `self.graph().edges()` +- `self.weights_ref()` → `self.weights()` +- `self.weights()` (where clone was intended) → `self.weights().to_vec()` + +Files to update: +- `src/rules/maximumindependentset_qubo.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights_ref()` → `self.weights()` +- `src/rules/maximumindependentset_ilp.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights_ref()` → `self.weights()` +- `src/rules/maximumindependentset_maximumsetpacking.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights_ref()` → `self.weights()` +- `src/rules/maximumindependentset_gridgraph.rs`: `self.num_vertices()` → `self.graph().num_vertices()` +- `src/rules/maximumindependentset_triangular.rs`: `self.num_vertices()` → `self.graph().num_vertices()` +- `src/rules/sat_maximumindependentset.rs`: update any MIS delegation calls +- `src/rules/minimumvertexcover_maximumindependentset.rs`: uses `self.num_vertices()` and `self.weights_ref()` on MVC (handled in Task 2), but also constructs MIS + +**Step 3: Update test call sites** + +In `src/unit_tests/models/graph/maximum_independent_set.rs` and `src/unit_tests/graph_models.rs`: +- `problem.num_vertices()` → `problem.graph().num_vertices()` +- `problem.num_edges()` → `problem.graph().num_edges()` +- `problem.edges()` → `problem.graph().edges()` +- `problem.weights()` (cloning) → `problem.weights().to_vec()` +- `problem.weights_ref()` → `problem.weights()` + +**Step 4: Run tests** + +Run: `cargo test --all-features -- --include-ignored 2>&1 | head -50` +Expected: All tests pass for MaximumIndependentSet + +**Step 5: Commit** + +```bash +git add src/models/graph/maximum_independent_set.rs src/rules/maximumindependentset_*.rs src/rules/sat_maximumindependentset.rs src/rules/minimumvertexcover_maximumindependentset.rs src/unit_tests/ +git commit -m "refactor: trim MaximumIndependentSet API — remove delegation methods" +``` + +--- + +## Task 2: Remove delegation methods from MinimumVertexCover + +**Files:** +- Modify: `src/models/graph/minimum_vertex_cover.rs:84-143` (remove methods) +- Modify: `src/rules/minimumvertexcover_qubo.rs` +- Modify: `src/rules/minimumvertexcover_ilp.rs` +- Modify: `src/rules/minimumvertexcover_maximumindependentset.rs` +- Modify: `src/rules/minimumvertexcover_minimumsetcovering.rs` +- Test: `src/unit_tests/models/graph/minimum_vertex_cover.rs` + +**Step 1: Edit MinimumVertexCover — same removal pattern as Task 1** + +In `src/models/graph/minimum_vertex_cover.rs`: +- Delete `from_graph_unit_weights()`, `num_vertices()`, `num_edges()`, `edges()`, `has_edge()`, `set_weights()`, `weights()` (clone) +- Rename `weights_ref()` → `weights()` returning `&[W]` + +**Step 2: Update rule call sites** + +- `src/rules/minimumvertexcover_qubo.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights_ref()` → `self.weights()` +- `src/rules/minimumvertexcover_ilp.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights_ref()` → `self.weights()` +- `src/rules/minimumvertexcover_maximumindependentset.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights_ref()` → `self.weights()` +- `src/rules/minimumvertexcover_minimumsetcovering.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights_ref()` → `self.weights()` + +**Step 3: Update test call sites** + +- `src/unit_tests/models/graph/minimum_vertex_cover.rs` +- `src/unit_tests/graph_models.rs` (MVC sections) +- `src/unit_tests/rules/minimumvertexcover_*.rs` + +**Step 4: Run tests** + +Run: `cargo test --all-features -- --include-ignored 2>&1 | head -50` + +**Step 5: Commit** + +```bash +git add src/models/graph/minimum_vertex_cover.rs src/rules/minimumvertexcover_*.rs src/unit_tests/ +git commit -m "refactor: trim MinimumVertexCover API — remove delegation methods" +``` + +--- + +## Task 3: Remove delegation methods from MaximumClique + +**Files:** +- Modify: `src/models/graph/maximum_clique.rs:101-148` +- Modify: `src/rules/maximumclique_ilp.rs` +- Test: `src/unit_tests/models/graph/maximum_clique.rs` + +**Step 1-5: Same pattern as Tasks 1-2** + +- Delete the same set of methods, rename `weights_ref()` → `weights()` +- Update `src/rules/maximumclique_ilp.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights()` → `self.weights().to_vec()` +- Update test files +- Run tests, commit + +```bash +git commit -m "refactor: trim MaximumClique API — remove delegation methods" +``` + +--- + +## Task 4: Remove delegation methods from MaximalIS + +**Files:** +- Modify: `src/models/graph/maximal_is.rs:86-133` +- Test: `src/unit_tests/models/graph/maximal_is.rs` + +**Step 1-5: Same pattern** + +Note: MaximalIS has no reduction rules that call delegation methods directly. Only tests need updating. + +```bash +git commit -m "refactor: trim MaximalIS API — remove delegation methods" +``` + +--- + +## Task 5: Remove delegation methods from MinimumDominatingSet + +**Files:** +- Modify: `src/models/graph/minimum_dominating_set.rs:84-143` +- Modify: `src/rules/minimumdominatingset_ilp.rs` +- Modify: `src/rules/sat_minimumdominatingset.rs` +- Test: `src/unit_tests/models/graph/minimum_dominating_set.rs` + +**Step 1-5: Same pattern** + +- Update `src/rules/minimumdominatingset_ilp.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights()` → `self.weights().to_vec()` +- Update `src/rules/sat_minimumdominatingset.rs`: update call sites +- Update tests +- Run tests, commit + +```bash +git commit -m "refactor: trim MinimumDominatingSet API — remove delegation methods" +``` + +--- + +## Task 6: Update remaining shared call sites + +**Files:** +- Modify: `src/rules/mod.rs` (the `impl_natural_reduction!` macro uses `.weights()`) +- Modify: `src/rules/spinglass_maxcut.rs` (uses `.num_vertices()` on SpinGlass/MaxCut — check if affected) +- Modify: `src/rules/coloring_qubo.rs`, `src/rules/coloring_ilp.rs` (uses `.num_vertices()` on KColoring) +- Modify: `src/rules/maximummatching_maximumsetpacking.rs` (uses `.weights()`, `.edges()`, `.num_edges()` on MaximumMatching) +- Modify: `src/rules/maximummatching_ilp.rs` (uses `.num_edges()`, `.weights()` on MaximumMatching) +- Modify: `src/rules/travelingsalesman_ilp.rs` (uses `.num_vertices()` on TravelingSalesman) +- Modify: remaining test files in `src/unit_tests/` + +**Step 1: Check which non-target problems also have delegation methods** + +The 5 problems above are not the only ones with these methods. Other graph problems (MaxCut, SpinGlass, KColoring, MaximumMatching, TravelingSalesman, MaximumSetPacking, MinimumSetCovering) may also have delegation methods. These are OUT OF SCOPE for this PR — only update call sites that break because they called methods on the 5 target problem types. + +**Step 2: Fix any remaining compilation errors** + +Run: `cargo check --all-features 2>&1` +Fix any remaining call sites that the compiler identifies. + +**Step 3: Run full test suite** + +Run: `cargo test --all-features -- --include-ignored` +Expected: All tests pass + +**Step 4: Run clippy** + +Run: `cargo clippy --all-features -- -D warnings` +Expected: No warnings + +**Step 5: Commit** + +```bash +git commit -m "refactor: update remaining call sites for trimmed graph problem APIs" +``` + +--- + +## Task 7: Extract `classify_problem_category` from `to_json()` + +**Files:** +- Modify: `src/rules/graph.rs` (extract function, update to_json) +- Test: `src/unit_tests/rules/graph.rs` (add unit test) + +**Step 1: Write the failing test** + +In the test file for graph.rs, add: + +```rust +#[test] +fn test_classify_problem_category() { + assert_eq!( + classify_problem_category("problemreductions::models::graph::maximum_independent_set"), + "graph" + ); + assert_eq!( + classify_problem_category("problemreductions::models::sat::satisfiability"), + "sat" + ); + assert_eq!( + classify_problem_category("problemreductions::models::set::maximum_set_packing"), + "set" + ); + assert_eq!( + classify_problem_category("problemreductions::models::optimization::qubo"), + "optimization" + ); + assert_eq!( + classify_problem_category("unknown::path"), + "other" + ); +} +``` + +**Step 2: Run test to verify it fails** + +Run: `cargo test --all-features test_classify_problem_category` +Expected: FAIL — function not found + +**Step 3: Extract the function** + +In `src/rules/graph.rs`, extract the existing inline logic (around the `category_from_module_path` helper) into a standalone `pub(crate) fn classify_problem_category(module_path: &str) -> &str`. Replace the inline usage in `to_json()` with a call to this function. + +```rust +/// Classify a problem's category from its module path. +/// Expected format: "problemreductions::models::::" +pub(crate) fn classify_problem_category(module_path: &str) -> &str { + let parts: Vec<&str> = module_path.split("::").collect(); + if parts.len() >= 3 { + // Return the segment after "models" + if let Some(pos) = parts.iter().position(|&p| p == "models") { + if pos + 1 < parts.len() { + return parts[pos + 1]; + } + } + } + "other" +} +``` + +**Step 4: Run test to verify it passes** + +Run: `cargo test --all-features test_classify_problem_category` +Expected: PASS + +**Step 5: Commit** + +```bash +git commit -m "refactor: extract classify_problem_category from to_json()" +``` + +--- + +## Task 8: Extract `filter_redundant_base_nodes` from `to_json()` + +**Files:** +- Modify: `src/rules/graph.rs` +- Test: `src/unit_tests/rules/graph.rs` + +**Step 1: Write the failing test** + +```rust +#[test] +fn test_filter_redundant_base_nodes() { + use std::collections::{BTreeMap, HashSet}; + + let mut node_set: HashSet<(String, BTreeMap)> = HashSet::new(); + + // Base node (empty variant) — should be removed because variant-specific sibling exists + node_set.insert(("MIS".to_string(), BTreeMap::new())); + + // Variant-specific node + let mut variant = BTreeMap::new(); + variant.insert("graph".to_string(), "GridGraph".to_string()); + node_set.insert(("MIS".to_string(), variant)); + + // Base node with no siblings — should be kept + node_set.insert(("QUBO".to_string(), BTreeMap::new())); + + filter_redundant_base_nodes(&mut node_set); + + assert_eq!(node_set.len(), 2); + assert!(!node_set.iter().any(|(name, v)| name == "MIS" && v.is_empty())); + assert!(node_set.iter().any(|(name, _)| name == "QUBO")); +} +``` + +**Step 2: Run test to verify it fails** + +**Step 3: Extract the function** + +```rust +/// Remove base nodes (empty variant) when a variant-specific sibling exists. +pub(crate) fn filter_redundant_base_nodes( + node_set: &mut HashSet<(String, BTreeMap)>, +) { + let names_with_variants: HashSet = node_set + .iter() + .filter(|(_, variant)| !variant.is_empty()) + .map(|(name, _)| name.clone()) + .collect(); + node_set.retain(|(name, variant)| !variant.is_empty() || !names_with_variants.contains(name)); +} +``` + +Replace the inline logic in `to_json()` with `filter_redundant_base_nodes(&mut node_set);`. + +**Step 4: Run test, verify pass** + +**Step 5: Commit** + +```bash +git commit -m "refactor: extract filter_redundant_base_nodes from to_json()" +``` + +--- + +## Task 9: Extract `is_natural_edge` from `to_json()` + +**Files:** +- Modify: `src/rules/graph.rs` +- Test: `src/unit_tests/rules/graph.rs` + +**Step 1: Write the failing test** + +```rust +#[test] +fn test_is_natural_edge() { + use std::collections::BTreeMap; + + let graph = ReductionGraph::new(); + + // Same variant — no edge + let mut a = BTreeMap::new(); + a.insert("graph".to_string(), "SimpleGraph".to_string()); + let b = a.clone(); + assert!(is_natural_edge(&a, &b, &graph).is_none()); + + // a is subtype of b — edge from a to b + let mut sub = BTreeMap::new(); + sub.insert("graph".to_string(), "GridGraph".to_string()); + let mut sup = BTreeMap::new(); + sup.insert("graph".to_string(), "SimpleGraph".to_string()); + // Direction depends on hierarchy — GridGraph is subtype of SimpleGraph + let result = is_natural_edge(&sub, &sup, &graph); + assert!(result.is_some()); +} +``` + +Note: The exact test depends on how the natural edge determination works in the existing code. Read the inline logic at lines 917-950 of `src/rules/graph.rs` carefully before writing the extraction. + +**Step 2: Extract the function** + +Extract the inner loop body from lines 917-950 into: +```rust +/// Determine if there is a natural (subtype) edge between two variant nodes. +/// Returns Some(...) with edge data if a→b is a valid natural edge, None otherwise. +pub(crate) fn is_natural_edge( + variant_a: &BTreeMap, + variant_b: &BTreeMap, + graph: &ReductionGraph, +) -> Option { + // ... extracted logic +} +``` + +Replace the inline logic in `to_json()` with a call to this function. + +**Step 3: Run tests** + +Run: `cargo test --all-features -- --include-ignored` + +**Step 4: Commit** + +```bash +git commit -m "refactor: extract is_natural_edge from to_json()" +``` + +--- + +## Task 10: Implement BipartiteGraph + +**Files:** +- Create: `src/topology/bipartite_graph.rs` +- Modify: `src/topology/mod.rs` (add module + export) +- Modify: `src/graph_types.rs` (remove ZST BipartiteGraph + manual VariantParam impl) +- Test: `src/unit_tests/topology/bipartite_graph.rs` + +**Step 1: Write the failing test** + +Create `src/unit_tests/topology/bipartite_graph.rs`: + +```rust +use crate::topology::{BipartiteGraph, Graph}; + +#[test] +fn test_bipartite_graph_basic() { + // K_{2,3}: left={0,1}, right={0,1,2}, all edges + let edges = vec![(0, 0), (0, 1), (0, 2), (1, 0), (1, 1), (1, 2)]; + let g = BipartiteGraph::new(2, 3, edges); + + assert_eq!(g.num_vertices(), 5); + assert_eq!(g.num_edges(), 6); + assert_eq!(g.left_size(), 2); + assert_eq!(g.right_size(), 3); +} + +#[test] +fn test_bipartite_graph_edges_unified() { + // Left={0}, Right={0,1}, edges: (0,0), (0,1) + let g = BipartiteGraph::new(1, 2, vec![(0, 0), (0, 1)]); + let edges = g.edges(); + // Unified: left vertex 0, right vertices 1 and 2 + assert!(edges.contains(&(0, 1))); + assert!(edges.contains(&(0, 2))); + assert_eq!(edges.len(), 2); +} + +#[test] +fn test_bipartite_graph_has_edge() { + let g = BipartiteGraph::new(2, 2, vec![(0, 0), (1, 1)]); + // Unified: edges (0, 2) and (1, 3) + assert!(g.has_edge(0, 2)); + assert!(g.has_edge(1, 3)); + assert!(!g.has_edge(0, 1)); // both left — no edge + assert!(!g.has_edge(0, 3)); // not in edge list +} + +#[test] +fn test_bipartite_graph_neighbors() { + let g = BipartiteGraph::new(2, 2, vec![(0, 0), (0, 1), (1, 1)]); + // Unified: (0,2), (0,3), (1,3) + let mut n0 = g.neighbors(0); + n0.sort(); + assert_eq!(n0, vec![2, 3]); + + let mut n3 = g.neighbors(3); // right vertex 1 + n3.sort(); + assert_eq!(n3, vec![0, 1]); +} + +#[test] +fn test_bipartite_graph_left_edges() { + let edges = vec![(0, 0), (1, 1)]; + let g = BipartiteGraph::new(2, 2, edges.clone()); + assert_eq!(g.left_edges(), &edges); +} + +#[test] +#[should_panic] +fn test_bipartite_graph_invalid_left_index() { + BipartiteGraph::new(2, 2, vec![(2, 0)]); // left index out of bounds +} + +#[test] +#[should_panic] +fn test_bipartite_graph_invalid_right_index() { + BipartiteGraph::new(2, 2, vec![(0, 2)]); // right index out of bounds +} +``` + +Wire up test module: add `#[path]` reference in the appropriate unit test module file. + +**Step 2: Run tests to verify they fail** + +Run: `cargo test --all-features test_bipartite_graph` +Expected: FAIL — module not found + +**Step 3: Implement BipartiteGraph** + +Create `src/topology/bipartite_graph.rs`: + +```rust +use serde::{Deserialize, Serialize}; +use super::graph::{Graph, SimpleGraph}; + +/// Bipartite graph with explicit left/right partitions. +/// +/// Vertices are split into left (indices `0..left_size`) and right (`0..right_size`). +/// Edges connect left vertices to right vertices using bipartite-local coordinates. +/// The `Graph` trait maps to a unified vertex space where right vertices are offset by `left_size`. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct BipartiteGraph { + left_size: usize, + right_size: usize, + /// Edges in bipartite-local coordinates: (u, v) with u < left_size, v < right_size. + edges: Vec<(usize, usize)>, +} + +impl BipartiteGraph { + /// Create a new bipartite graph. + /// + /// # Arguments + /// * `left_size` - Number of vertices in the left partition + /// * `right_size` - Number of vertices in the right partition + /// * `edges` - Edges in bipartite-local coordinates: (u, v) with u < left_size, v < right_size + /// + /// # Panics + /// Panics if any edge index is out of bounds. + pub fn new(left_size: usize, right_size: usize, edges: Vec<(usize, usize)>) -> Self { + for &(u, v) in &edges { + assert!( + u < left_size, + "left vertex {} out of bounds (left_size={})", + u, left_size + ); + assert!( + v < right_size, + "right vertex {} out of bounds (right_size={})", + v, right_size + ); + } + Self { left_size, right_size, edges } + } + + /// Number of left-partition vertices. + pub fn left_size(&self) -> usize { + self.left_size + } + + /// Number of right-partition vertices. + pub fn right_size(&self) -> usize { + self.right_size + } + + /// Edges in bipartite-local coordinates. + pub fn left_edges(&self) -> &[(usize, usize)] { + &self.edges + } +} + +impl Graph for BipartiteGraph { + const NAME: &'static str = "BipartiteGraph"; + + fn num_vertices(&self) -> usize { + self.left_size + self.right_size + } + + fn num_edges(&self) -> usize { + self.edges.len() + } + + fn edges(&self) -> Vec<(usize, usize)> { + self.edges + .iter() + .map(|&(u, v)| { + let a = u; + let b = self.left_size + v; + if a < b { (a, b) } else { (b, a) } + }) + .collect() + } + + fn has_edge(&self, u: usize, v: usize) -> bool { + let (u, v) = if u < v { (u, v) } else { (v, u) }; + // u must be left, v must be right (in unified space) + if u >= self.left_size || v < self.left_size { + return false; + } + let local_v = v - self.left_size; + self.edges.contains(&(u, local_v)) + } + + fn neighbors(&self, v: usize) -> Vec { + if v < self.left_size { + // Left vertex: find all right neighbors + self.edges + .iter() + .filter(|(u, _)| *u == v) + .map(|(_, rv)| self.left_size + rv) + .collect() + } else { + // Right vertex: find all left neighbors + let local_v = v - self.left_size; + self.edges + .iter() + .filter(|(_, rv)| *rv == local_v) + .map(|(u, _)| *u) + .collect() + } + } +} +``` + +**Step 4: Register with variant system** + +Add at the bottom of `src/topology/bipartite_graph.rs`: + +```rust +use crate::impl_variant_param; +impl_variant_param!(BipartiteGraph, "graph", parent: SimpleGraph, + cast: |g| SimpleGraph::new(g.num_vertices(), g.edges())); +``` + +**Step 5: Wire up module** + +In `src/topology/mod.rs`, add: +```rust +mod bipartite_graph; +pub use bipartite_graph::BipartiteGraph; +``` + +Remove the `BipartiteGraph` ZST and its manual `VariantParam` impl from `src/graph_types.rs` (lines 31-46). + +**Step 6: Run tests** + +Run: `cargo test --all-features test_bipartite_graph` +Expected: All pass + +**Step 7: Commit** + +```bash +git commit -m "feat: implement BipartiteGraph with standard bipartite representation" +``` + +--- + +## Task 11: Implement PlanarGraph + +**Files:** +- Create: `src/topology/planar_graph.rs` +- Modify: `src/topology/mod.rs` (add module + export) +- Modify: `src/graph_types.rs` (remove ZST PlanarGraph + manual VariantParam impl) +- Test: `src/unit_tests/topology/planar_graph.rs` + +**Step 1: Write the failing test** + +```rust +use crate::topology::{PlanarGraph, Graph}; + +#[test] +fn test_planar_graph_basic() { + // K4 is planar: 4 vertices, 6 edges, 6 <= 3*4 - 6 = 6 + let edges = vec![(0,1),(0,2),(0,3),(1,2),(1,3),(2,3)]; + let g = PlanarGraph::new(4, edges); + assert_eq!(g.num_vertices(), 4); + assert_eq!(g.num_edges(), 6); +} + +#[test] +fn test_planar_graph_delegates_to_inner() { + let g = PlanarGraph::new(3, vec![(0,1),(1,2)]); + assert!(g.has_edge(0, 1)); + assert!(!g.has_edge(0, 2)); + let mut n1 = g.neighbors(1); + n1.sort(); + assert_eq!(n1, vec![0, 2]); +} + +#[test] +#[should_panic] +fn test_planar_graph_rejects_k5() { + // K5 has 10 edges, but 3*5 - 6 = 9. Fails necessary condition. + let mut edges = Vec::new(); + for i in 0..5 { + for j in (i+1)..5 { + edges.push((i, j)); + } + } + PlanarGraph::new(5, edges); +} + +#[test] +fn test_planar_graph_empty() { + let g = PlanarGraph::new(3, vec![]); + assert_eq!(g.num_vertices(), 3); + assert_eq!(g.num_edges(), 0); +} + +#[test] +fn test_planar_graph_tree() { + // Trees are always planar + let g = PlanarGraph::new(4, vec![(0,1),(1,2),(2,3)]); + assert_eq!(g.num_edges(), 3); +} +``` + +**Step 2: Implement PlanarGraph** + +Create `src/topology/planar_graph.rs`: + +```rust +use serde::{Deserialize, Serialize}; +use super::graph::{Graph, SimpleGraph}; + +/// Planar graph — validated wrapper around SimpleGraph. +/// +/// Construction validates the necessary planarity condition: |E| <= 3|V| - 6 for |V| >= 3. +/// This is a necessary but not sufficient condition. A follow-up issue will add +/// full planarity testing and half-edge (DCEL) representation. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PlanarGraph { + inner: SimpleGraph, +} + +impl PlanarGraph { + /// Create a new planar graph. + /// + /// # Panics + /// Panics if the graph violates the necessary planarity condition |E| <= 3|V| - 6. + pub fn new(num_vertices: usize, edges: Vec<(usize, usize)>) -> Self { + let inner = SimpleGraph::new(num_vertices, edges); + if num_vertices >= 3 { + let max_edges = 3 * num_vertices - 6; + assert!( + inner.num_edges() <= max_edges, + "graph has {} edges but a planar graph on {} vertices can have at most {} edges", + inner.num_edges(), num_vertices, max_edges + ); + } + Self { inner } + } + + /// Get a reference to the underlying SimpleGraph. + pub fn inner(&self) -> &SimpleGraph { + &self.inner + } +} + +impl Graph for PlanarGraph { + const NAME: &'static str = "PlanarGraph"; + + fn num_vertices(&self) -> usize { self.inner.num_vertices() } + fn num_edges(&self) -> usize { self.inner.num_edges() } + fn edges(&self) -> Vec<(usize, usize)> { self.inner.edges() } + fn has_edge(&self, u: usize, v: usize) -> bool { self.inner.has_edge(u, v) } + fn neighbors(&self, v: usize) -> Vec { self.inner.neighbors(v) } +} + +use crate::impl_variant_param; +impl_variant_param!(PlanarGraph, "graph", parent: SimpleGraph, + cast: |g| g.inner.clone()); +``` + +**Step 3: Wire up module and remove ZST** + +In `src/topology/mod.rs`: +```rust +mod planar_graph; +pub use planar_graph::PlanarGraph; +``` + +Remove PlanarGraph ZST and manual VariantParam impl from `src/graph_types.rs` (lines 10-25). + +**Step 4: Run tests** + +Run: `cargo test --all-features -- --include-ignored` + +**Step 5: Commit** + +```bash +git commit -m "feat: implement PlanarGraph as validated SimpleGraph wrapper" +``` + +--- + +## Task 12: Final verification and cleanup + +**Step 1: Run full test suite** + +Run: `cargo test --all-features -- --include-ignored` +Expected: All tests pass + +**Step 2: Run clippy** + +Run: `cargo clippy --all-features -- -D warnings` +Expected: No warnings + +**Step 3: Run format check** + +Run: `cargo fmt -- --check` +Expected: No formatting issues + +**Step 4: File follow-up issue** + +Create a GitHub issue for full data structure implementations: +- PlanarGraph: half-edge (DCEL) data structure +- BipartiteGraph: bipartite-specific algorithms + +**Step 5: Final commit if any cleanup needed** + +```bash +git commit -m "chore: final cleanup for issue #70 refactoring" +``` diff --git a/docs/src/design.md b/docs/src/design.md index 45f21a5c0..38d7289d8 100644 --- a/docs/src/design.md +++ b/docs/src/design.md @@ -1,6 +1,6 @@ # Design -This guide covers the library internals for contributors and developers. See [Getting Started](./getting-started.md) for usage examples. +This guide covers the library internals for contributors. ## Module Overview @@ -17,16 +17,21 @@ This guide covers the library internals for contributors and developers. See [Ge | Module | Purpose | |--------|---------| -| [`src/models/`](#models) | Problem type implementations (SAT, Graph, Set, Optimization) | -| [`src/rules/`](#rules) | Reduction rules with `ReduceTo` implementations | -| [`src/registry/`](#registry) | Compile-time reduction graph metadata | +| [`src/models/`](#problem-model) | Problem type implementations (SAT, Graph, Set, Optimization) | +| [`src/rules/`](#reduction-rules) | Reduction rules with `ReduceTo` implementations | +| [`src/registry/`](#reduction-graph) | Compile-time reduction graph metadata | | [`src/solvers/`](#solvers) | BruteForce and ILP solvers | -| `src/traits.rs` | Core `Problem` and `OptimizationProblem` traits (see [Models](#models)) | -| `src/types.rs` | Shared types: `SolutionSize`, `Direction`, `ProblemSize` (see [Models](#models)) | +| `src/traits.rs` | Core `Problem` and `OptimizationProblem` traits (see [Problem Model](#problem-model)) | +| `src/types.rs` | Shared types: `SolutionSize`, `Direction`, `ProblemSize` (see [Problem Model](#problem-model)) | +| `src/variant.rs` | Variant parameter system (see [Variant System](#variant-system)) | -## Models +## Problem Model -Every problem implements `Problem`. Optimization problems additionally implement `OptimizationProblem`. +Every problem implements `Problem`. Optimization problems additionally implement `OptimizationProblem`; satisfaction problems implement `SatisfactionProblem`. + +- **`Problem`** — the base trait. Every problem declares a `NAME` (e.g., `"MaximumIndependentSet"`). The solver explores the configuration space defined by `dims()` and scores each configuration with `evaluate()`. For example, a 4-vertex MIS has `dims() = [2, 2, 2, 2]` (each vertex is selected or not); `evaluate(&[1, 0, 1, 0])` returns `Valid(2)` if vertices 0 and 2 form an independent set, or `Invalid` if they share an edge. +- **`OptimizationProblem`** — extends `Problem` with a comparable `Value` type and a `direction()` (`Maximize` or `Minimize`). +- **`SatisfactionProblem`** — constrains `Metric = bool`: `true` if all constraints are satisfied, `false` otherwise.
@@ -39,192 +44,235 @@ Every problem implements `Problem`. Optimization problems additionally implement
-```rust -pub trait Problem: Clone { - const NAME: &'static str; // e.g., "MaximumIndependentSet" - type Metric: Clone; // SolutionSize or bool - fn dims(&self) -> Vec; // config space: [2, 2, 2] for 3 binary vars - fn evaluate(&self, config: &[usize]) -> Self::Metric; - fn variant() -> Vec<(&'static str, &'static str)>; -} - -pub trait OptimizationProblem: Problem> { - type Value: PartialOrd + Clone; // i32, f64, etc. - fn direction(&self) -> Direction; // Maximize or Minimize -} -``` - -**Key types:** -- `SolutionSize`: `Valid(T)` for feasible solutions, `Invalid` for constraint violations -- `Direction`: `Maximize` or `Minimize` - -Problems are parameterized by graph type and weight type: +## Variant System -- `MaximumIndependentSet` — graph type `G`, weight type `W` -- `Satisfiability` — CNF formula (concrete type, no parameters) -- `QUBO` — parameterized by weight type only +A single problem name like `MaximumIndependentSet` can have multiple **variants** — carrying weights on vertices, or defined on a restricted topology (e.g., king's subgraph). Some variants are more specific than others: the king's subgraph is a special case of the unit-disk graph, which is a special case of the simple graph. -**Graph types:** +In **set** language, variants form **subsets**: independent sets on king's subgraphs are a subset of independent sets on unit-disk graphs. The reduction from a more specific variant to a less specific one is a **natural reduction** (identity mapping). To avoid repeating the same rule for each variant pair, the library provides an auto-casting mechanism. -| Type | Description | -|------|-------------| -| `SimpleGraph` | Standard adjacency-based graph | -| `UnitDiskGraph` | Edges connect vertices within a distance threshold | -| `KingsSubgraph` | King's subgraph on a square grid (subtype of UnitDiskGraph) | -| `TriangularSubgraph` | Triangular lattice subgraph (subtype of UnitDiskGraph) | -| `HyperGraph` | Edges connecting any number of vertices | +
-All problem types support JSON serialization via serde: +![Variant Hierarchy](static/variant-hierarchy.svg) -```rust -use problemreductions::io::{to_json, from_json}; +
+
-let json = to_json(&problem)?; -let restored: MaximumIndependentSet = from_json(&json)?; -``` +![Variant Hierarchy](static/variant-hierarchy-dark.svg) -See [adding-models.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/adding-models.md) for the full implementation guide. +
-## Variant System +Arrows indicate the **subset** (subtype) direction. Variant types fall into three categories: -A single problem name like `MaximumIndependentSet` can have multiple **variants** — concrete instantiations that differ in graph topology, weight type, or other parameters. The variant system tracks these distinctions in the reduction graph so that reductions between specific instantiations are represented precisely. +- **Graph type** — `HyperGraph` (root), `SimpleGraph`, `PlanarGraph`, `BipartiteGraph`, `UnitDiskGraph`, `KingsSubgraph`, `TriangularSubgraph`. +- **Weight type** — `One` (unweighted), `i32`, `f64`. +- **K value** — e.g., `K3` for 3-SAT, `KN` for arbitrary K.
-![Variant Hierarchy](static/variant-hierarchy.svg) +![Lattices](static/lattices.svg)
-![Variant Hierarchy](static/variant-hierarchy-dark.svg) +![Lattices](static/lattices-dark.svg)
-Each variant is identified by a set of key-value pairs returned by `Problem::variant()`: +### VariantParam trait + +Each variant parameter type implements `VariantParam`, which declares its category, value, and optional parent: ```rust -// MaximumIndependentSet -fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "UnitDiskGraph"), ("weight", "One")] +pub trait VariantParam: 'static { + const CATEGORY: &'static str; // e.g., "graph", "weight", "k" + const VALUE: &'static str; // e.g., "SimpleGraph", "i32" + const PARENT_VALUE: Option<&'static str>; // None for root types } +``` -// KSatisfiability<3> -fn variant() -> Vec<(&'static str, &'static str)> { - vec![("k", "3")] +Types with a parent also implement `CastToParent`, providing the runtime conversion for natural casts: + +```rust +pub trait CastToParent: VariantParam { + type Parent: VariantParam; + fn cast_to_parent(&self) -> Self::Parent; } ``` -Variant nodes in the reduction graph are discovered automatically from `#[reduction]` registrations — each reduction's source and target types become nodes. Natural edges between same-name variants are inferred from the graph/weight subtype partial order (e.g., `MIS/KingsSubgraph → MIS/SimpleGraph`). In the visualization, nodes are labeled with only the non-default fields for brevity (e.g. `MaximumIndependentSet (KingsSubgraph)` omits the default `One`). +### Registration with `impl_variant_param!` -### Graph Hierarchy +The `impl_variant_param!` macro implements `VariantParam` (and optionally `CastToParent` / `KValue`) and registers a `VariantTypeEntry` via `inventory` for compile-time hierarchy discovery: -Graph types form a subtype hierarchy declared in `src/graph_types.rs`: +```rust +// Root type (no parent): +impl_variant_param!(HyperGraph, "graph"); -``` -HyperGraph (most general) -└── SimpleGraph - ├── PlanarGraph - ├── BipartiteGraph - └── UnitDiskGraph - ├── KingsSubgraph - └── TriangularSubgraph -``` +// Type with parent (cast closure required): +impl_variant_param!(SimpleGraph, "graph", parent: HyperGraph, + cast: |g| { + let edges: Vec> = g.edges().into_iter().map(|(u, v)| vec![u, v]).collect(); + HyperGraph::new(g.num_vertices(), edges) + }); -A problem on a more specific graph type can always be treated as a problem on a more general one — a `KingsSubgraph` *is* a `SimpleGraph`. This subtype relationship is registered at compile time: +// K root (arbitrary K): +impl_variant_param!(KN, "k", k: None); -```rust -declare_graph_subtype!(KingsSubgraph => UnitDiskGraph); -declare_graph_subtype!(UnitDiskGraph => SimpleGraph); -// ... +// Specific K with parent: +impl_variant_param!(K3, "k", parent: KN, cast: |_| KN, k: Some(3)); ``` -The runtime builds a transitive closure: `KingsSubgraph` is a subtype of `UnitDiskGraph`, `SimpleGraph`, and `HyperGraph`. +At startup, the `ReductionGraph` collects all `VariantTypeEntry` registrations and computes the **transitive closure** of the parent relationships, so `KingsSubgraph` is recognized as a subtype of `SimpleGraph` even though it declares `UnitDiskGraph` as its direct parent. -**Example: natural edge for TriangularSubgraph MIS.** Suppose we have a `MaximumIndependentSet` instance — an independent set problem on a triangular lattice. Because `TriangularSubgraph` is a subtype of `SimpleGraph` in the graph hierarchy, the reduction graph contains a natural edge: +### Composing `Problem::variant()` -``` -MIS → MIS +The `variant_params!` macro composes the `Problem::variant()` body from type parameter names: + +```rust +// MaximumIndependentSet +fn variant() -> Vec<(&'static str, &'static str)> { + crate::variant_params![G, W] + // e.g., MaximumIndependentSet + // -> vec![("graph", "UnitDiskGraph"), ("weight", "One")] +} ``` -This edge has identity overhead (the problem size is unchanged) and requires no code — the triangular lattice graph *is* a simple graph, so any MIS algorithm for general graphs applies directly. Combined with the explicit reduction `MIS → MIS` (unit disk mapping), the system can automatically chain: +## Reduction Rules -``` -MIS → MIS → MIS - (natural edge) (explicit reduction) -``` +A reduction requires two pieces: a **result struct** and a **`ReduceTo` impl**. + +### Result struct -### Weight Hierarchy +Holds the target problem and the logic to map solutions back: + +```rust +#[derive(Debug, Clone)] +pub struct ReductionISToVC { + target: MinimumVertexCover, +} -Weight types form a linear promotion chain: +impl ReductionResult for ReductionISToVC { + type Source = MaximumIndependentSet; + type Target = MinimumVertexCover; + fn target_problem(&self) -> &Self::Target { &self.target } + fn extract_solution(&self, target_sol: &[usize]) -> Vec { + target_sol.iter().map(|&x| 1 - x).collect() // complement + } +} ``` -One → i32 → f64 + +### `ReduceTo` impl with the `#[reduction]` macro + +```rust +#[reduction( + overhead = { + ReductionOverhead::new(vec![ + ("num_vertices", poly!(num_vertices)), + ("num_edges", poly!(num_edges)), + ]) + } +)] +impl ReduceTo> + for MaximumIndependentSet +{ + type Result = ReductionISToVC; + fn reduce_to(&self) -> Self::Result { /* ... */ } +} ``` -An unweighted problem (using `One`, the unit-weight type) is a special case of a weighted one (all weights equal to 1), and an integer-weighted problem embeds naturally into real-weighted. This is declared in `src/graph_types.rs`: +### What the macro generates + +The `#[reduction]` attribute expands to the original `impl` block plus an `inventory::submit!` call: ```rust -declare_weight_subtype!("One" => "i32"); -declare_weight_subtype!("i32" => "f64"); +inventory::submit! { + ReductionEntry { + source_name: "MaximumIndependentSet", + target_name: "MinimumVertexCover", + source_variant_fn: || as Problem>::variant(), + target_variant_fn: || as Problem>::variant(), + overhead_fn: || ReductionOverhead::new(vec![ + ("num_vertices", poly!(num_vertices)), + ("num_edges", poly!(num_edges)), + ]), + module_path: module_path!(), + } +} ``` -### K Parameter +This `ReductionEntry` is collected at compile time by `inventory`, making the reduction discoverable by the `ReductionGraph` without any manual registration. -`KSatisfiability` and `KColoring` use type-level K values: +## Reduction Graph -| Rust type | Variant `k` | -|-----------|-------------| -| `KSatisfiability` | `"K2"` | -| `KSatisfiability` | `"K3"` | -| Generic `KSatisfiability` | `"KN"` | +The `ReductionGraph` is the central runtime data structure. It collects all registered reductions and variant hierarchies to enable path finding and overhead evaluation. -K values form a **flat hierarchy**: each specific K value (K1, K2, K3, K4, K5) is a direct child of the generic KN, with no chain between them. This reflects the fact that k-SAT and k-coloring problems with different k are independent problem classes — a 2-SAT instance is not a 3-SAT instance, and vice versa. +### Construction -### Natural Edges +`ReductionGraph::new()` performs two `inventory` scans: -When two variants of the same problem differ only in that one is "more specific" than the other, a **natural edge** is auto-generated in the reduction graph. The edge represents the trivial identity reduction — the problem instance doesn't change, only its type annotation relaxes. +1. **`ReductionEntry` items** — each registered reduction becomes a directed edge in a `petgraph::DiGraph`. Nodes are type-erased base names (e.g., `"MaxCut"`, not `"MaxCut"`), so path finding works regardless of type parameters. -A variant A is reducible to variant B when every field of A is at least as specific as the corresponding field of B: +2. **`VariantTypeEntry` items** — parent declarations are collected per category and transitively closed, building a `variant_hierarchy: HashMap>>`. -- **graph:** `is_graph_subtype(A.graph, B.graph)` — e.g. `UnitDiskGraph` ≤ `SimpleGraph` -- **weight:** `is_weight_subtype(A.weight, B.weight)` — e.g. `Unweighted` ≤ `i32` -- **k:** a concrete value is a subtype of `"N"` +### Natural edges -Natural edges have identity overhead: the output size equals the input size. +When exporting the graph (via `to_json()`), the graph auto-generates **natural edges** between same-name variant nodes. A natural edge from variant A to variant B exists when every field of A is at least as restrictive as B's (i.e., A is a subtype of B). Natural edges carry **identity overhead** — the problem size is unchanged. -### Example: Unweighted MIS on UnitDiskGraph → Weighted MIS on SimpleGraph +For example, `MaximumIndependentSet{KingsSubgraph, i32}` gets a natural edge to `MaximumIndependentSet{SimpleGraph, i32}` because `KingsSubgraph` is a subtype of `SimpleGraph`. -Consider reducing `MaximumIndependentSet` to `MaximumIndependentSet`. These are two variants of the same problem, so the reduction graph connects them via natural edges: +### JSON export -``` -MIS (UnitDiskGraph, Unweighted) - │ - │ graph relaxation: UnitDiskGraph ≤ SimpleGraph - ▼ -MIS (SimpleGraph, Unweighted) - │ - │ weight promotion: Unweighted ≤ i32 - ▼ -MIS (SimpleGraph, i32) -``` +`ReductionGraph::to_json()` produces a `ReductionGraphJson` with fully expanded variant nodes and both reduction + natural edges: + +- [reduction_graph.json](reductions/reduction_graph.json) — all problem variants and reduction edges +- [problem_schemas.json](reductions/problem_schemas.json) — field definitions for each problem type + +## Path Finding -**Step 1 — Graph relaxation.** A unit disk graph is a simple graph (it just happens to have geometric structure). The MIS instance is unchanged; we simply forget the geometric embedding and treat it as a generic graph. +Path finding operates at two levels: **name-level** paths (which problem types to traverse) and **variant-level** resolved paths (with concrete variant and overhead at each step). -**Step 2 — Weight promotion.** An unweighted MIS asks for the largest independent set (all vertices have equal value). This is equivalent to a weighted MIS where every vertex has weight 1. The instance gains uniform weights and becomes `MaximumIndependentSet`. +### Name-level paths -Both steps are identity reductions with zero overhead — no new variables or constraints are introduced. The variant system generates these edges automatically from the declared hierarchies. +`find_paths_by_name(src, dst)` enumerates all simple paths in the type-erased graph. `find_shortest_path_by_name()` returns the one with fewest hops. -### Variant-Aware Path Resolution +For cost-aware routing, `find_cheapest_path()` uses **Dijkstra's algorithm** with set-theoretic validation: -The `ReductionGraph` performs path-finding at the **name level** — nodes are `"MaximumIndependentSet"`, not `"MaximumIndependentSet"`. This keeps path discovery fast (one node per problem name), but it means a `ReductionPath` like `["KSatisfiability", "QUBO"]` carries no variant information. Two issues follow: +```rust +pub fn find_cheapest_path( + &self, + source: (&str, &str), // (problem_name, graph_type) + target: (&str, &str), + input_size: &ProblemSize, + cost_fn: &C, +) -> Option +``` + +At each edge, Dijkstra checks `rule_applicable()` — the source graph must be a subtype of the rule's expected source, and the rule's target graph must be a subtype of the desired target. This ensures the chosen path respects variant constraints. -1. **Overhead ambiguity.** `KSatisfiability<2> → QUBO` and `KSatisfiability<3> → QUBO` have different overheads (k=3 introduces auxiliary variables via Rosenberg quadratization), but a name-level path can't distinguish them. +### Cost functions -2. **Natural edge execution.** The path `MIS(KingsSubgraph) → VC(SimpleGraph)` needs an implicit graph-relaxation step, but the name-level path only says `["MaximumIndependentSet", "MinimumVertexCover"]`. +The `PathCostFn` trait computes edge cost from overhead and current problem size: + +```rust +pub trait PathCostFn { + fn edge_cost(&self, overhead: &ReductionOverhead, current_size: &ProblemSize) -> f64; +} +``` -The solution is **two-phase resolution**: name-level discovery followed by variant-level resolution. +Built-in implementations: -#### `resolve_path` +| Cost function | Strategy | +|--------------|----------| +| `Minimize("field")` | Minimize a single output field | +| `MinimizeWeighted([(field, w)])` | Weighted sum of output fields | +| `MinimizeMax([fields])` | Minimize the maximum of fields | +| `MinimizeLexicographic([fields])` | Lexicographic: minimize first, break ties with rest | +| `MinimizeSteps` | Minimize number of hops (unit edge cost) | +| `CustomCost(closure)` | User-defined cost function | + +### Variant-level resolution: `resolve_path` + +Given a name-level `ReductionPath`, `resolve_path` threads variant state through each step to produce a `ResolvedPath`: ```rust pub fn resolve_path( @@ -235,7 +283,7 @@ pub fn resolve_path( ) -> Option ``` -The resolver walks the name-level path, threading variant state through each step: +The algorithm: 1. **Find candidates** — all `ReductionEntry` items matching `(src_name, dst_name)`. 2. **Filter compatible** — keep entries where the current variant is equal-or-more-specific than the entry's source variant on every axis. @@ -254,91 +302,90 @@ pub struct ResolvedPath { #### Example: MIS on KingsSubgraph to MinimumVertexCover -Resolving `MIS(KingsSubgraph, i32) → VC(SimpleGraph, i32)` through name-path `["MIS", "VC"]`: +Resolving `MIS(KingsSubgraph, i32) -> VC(SimpleGraph, i32)` through name-path `["MIS", "VC"]`: ``` -steps: MIS{KingsSubgraph,i32} → MIS{SimpleGraph,i32} → VC{SimpleGraph,i32} +steps: MIS{KingsSubgraph,i32} -> MIS{SimpleGraph,i32} -> VC{SimpleGraph,i32} edges: NaturalCast Reduction{overhead} ``` -The resolver finds that the `MIS → VC` reduction expects `SimpleGraph`, so it inserts a `NaturalCast` to relax `KingsSubgraph` to `SimpleGraph` first. +The resolver finds that the `MIS -> VC` reduction expects `SimpleGraph`, so it inserts a `NaturalCast` to relax `KingsSubgraph` to `SimpleGraph` first. #### Example: KSat Disambiguation -Resolving `KSat(k=3) → QUBO` through name-path `["KSatisfiability", "QUBO"]`: +Resolving `KSat(k=3) -> QUBO` through name-path `["KSatisfiability", "QUBO"]`: -- Candidates: `KSat<2> → QUBO` (overhead: `num_vars`) and `KSat<3> → QUBO` (overhead: `num_vars + num_clauses`). +- Candidates: `KSat<2> -> QUBO` (overhead: `num_vars`) and `KSat<3> -> QUBO` (overhead: `num_vars + num_clauses`). - Filter with `k=3`: only `KSat<3>` is compatible (`3` is not a subtype of `2`). - Result: the k=3-specific overhead is returned. -#### Execution Model +## Overhead Evaluation -`ResolvedPath` is a **plan**, not an executor. Callers dispatch each step themselves: +Each reduction declares how the output problem size relates to the input size, expressed as polynomials. -- `EdgeKind::Reduction` → call `ReduceTo::reduce_to()` -- `EdgeKind::NaturalCast` → call `GraphCast::cast_graph()` or equivalent weight cast +### ProblemSize -This avoids type-erasure complexity while giving callers precise variant and overhead information at each step. +A `ProblemSize` holds named size components — the dimensions that characterize a problem instance: -## Rules +```rust +let size = ProblemSize::new(vec![("num_vertices", 10), ("num_edges", 15)]); +assert_eq!(size.get("num_vertices"), Some(10)); +``` -A reduction requires two pieces: +### Polynomials -**1. Result struct** — holds the target problem and extraction logic: +Output size formulas use `Polynomial` (a sum of `Monomial` terms). The `poly!` macro provides a concise syntax: ```rust -#[derive(Clone)] -pub struct ReductionAToB { - target: B, - // ... mapping data for extraction -} +poly!(num_vertices) // p(x) = num_vertices +poly!(num_vertices ^ 2) // p(x) = num_vertices^2 +poly!(3 * num_edges) // p(x) = 3 * num_edges +poly!(num_vertices * num_edges) // p(x) = num_vertices * num_edges +``` -impl ReductionResult for ReductionAToB { - type Source = A; - type Target = B; +A `ReductionOverhead` pairs output field names with their polynomials: - fn target_problem(&self) -> &B { &self.target } - fn extract_solution(&self, target_sol: &[usize]) -> Vec { /* ... */ } -} +```rust +ReductionOverhead::new(vec![ + ("num_vars", poly!(num_vertices) + poly!(num_edges)), + ("num_clauses", poly!(3 * num_edges)), +]) ``` -**2. `ReduceTo` impl** with the `#[reduction]` macro: +### Evaluating overhead -```rust -#[reduction(A -> B)] -impl ReduceTo for A { - type Result = ReductionAToB; - fn reduce_to(&self) -> Self::Result { /* ... */ } -} +`ReductionOverhead::evaluate_output_size(input)` substitutes input values into the polynomials and returns a new `ProblemSize`: + +``` +Input: ProblemSize { num_vertices: 10, num_edges: 15 } +Output: ProblemSize { num_vars: 25, num_clauses: 45 } ``` -The macro generates `inventory::submit!` calls for compile-time reduction graph registration. +### Composing through a path -See [adding-reductions.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/adding-reductions.md) for the full implementation guide. +For a multi-step reduction path, overhead composes: the output of step $N$ becomes the input of step $N+1$. Each `ResolvedPath` edge carries its own `ReductionOverhead` (or `NaturalCast` with identity overhead), so the total output size is computed by chaining `evaluate_output_size` calls through the path. -## Registry +## Reduction Execution -The reduction graph is built at compile time using the `inventory` crate: +A `ResolvedPath` is a **plan**, not an executor. It provides variant and overhead information at each step, but callers dispatch the actual transformations themselves. -```rust -#[reduction(A -> B)] -impl ReduceTo for A { /* ... */ } +### Dispatching steps -// Expands to include: -// inventory::submit! { ReductionMeta { source: "A", target: "B", ... } } -``` +Walk the `edges` array and dispatch based on `EdgeKind`: -**JSON exports:** -- [reduction_graph.json](reductions/reduction_graph.json) — all problem variants and reduction edges -- [problem_schemas.json](reductions/problem_schemas.json) — field definitions for each problem type +- **`EdgeKind::Reduction`** — call `ReduceTo::reduce_to()` on the current problem to produce a `ReductionResult`, then call `target_problem()` to get the next problem. +- **`EdgeKind::NaturalCast`** — call `CastToParent::cast_to_parent()` (for graph casts) or the equivalent weight cast. The problem data is preserved; only the type changes. -Regenerate exports: +### Extracting solutions -```bash -cargo run --example export_graph # docs/src/reductions/reduction_graph.json (default) -cargo run --example export_graph -- output.json # custom output path -cargo run --example export_schemas # docs/src/reductions/problem_schemas.json -``` +After solving the final target problem, walk the chain **in reverse**: + +- At each `Reduction` edge, call `extract_solution(&target_solution)` on the corresponding `ReductionResult` to map the solution back to the source space. +- At each `NaturalCast` edge, the solution passes through unchanged (identity mapping). + +### Why concrete types (no type erasure) + +The library uses concrete types at each step rather than `dyn Problem`. This preserves full type safety and avoids boxing overhead, at the cost of requiring callers to know the types at each step. This design choice keeps the reduction pipeline zero-cost and makes the compiler verify correctness at each transformation boundary. ## Solvers @@ -351,16 +398,42 @@ pub trait Solver { } ``` -`ILPSolver` additionally provides `solve_reduced()` for problems implementing `ReduceTo`. +### BruteForce -## Contributing +Enumerates every configuration in the space defined by `dims()`. Suitable for small instances (<20 variables). In addition to the `Solver` trait methods, provides: -See [Call for Contributions](./introduction.md#call-for-contributions) for the recommended issue-based workflow (no coding required). +- `find_all_best(problem)` — returns all tied-optimal configurations. +- `find_all_satisfying(problem)` — returns all satisfying configurations. + +Primarily used for **testing and verification** of reductions via closed-loop tests. -For manual implementation: +### ILPSolver -- **Adding a problem:** See [adding-models.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/adding-models.md) -- **Adding a reduction:** See [adding-reductions.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/adding-reductions.md) -- **Testing requirements:** See [testing.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/testing.md) +Feature-gated behind `ilp`. Uses the HiGHS solver via the `good_lp` crate. Additionally provides `solve_reduced()` for problems that implement `ReduceTo` — it reduces, solves the ILP, and extracts the solution in one call. -Run `make test clippy` before submitting PRs. +## JSON Serialization + +All problem types support JSON serialization via serde: + +```rust +use problemreductions::io::{to_json, from_json}; + +let json = to_json(&problem)?; +let restored: MaximumIndependentSet = from_json(&json)?; +``` + +**Exported JSON files:** +- [reduction_graph.json](reductions/reduction_graph.json) — all problem variants and reduction edges +- [problem_schemas.json](reductions/problem_schemas.json) — field definitions for each problem type + +Regenerate exports: + +```bash +cargo run --example export_graph # docs/src/reductions/reduction_graph.json (default) +cargo run --example export_graph -- output.json # custom output path +cargo run --example export_schemas # docs/src/reductions/problem_schemas.json +``` + +## Contributing + +See [Call for Contributions](./introduction.md#call-for-contributions) for the recommended issue-based workflow (no coding required). diff --git a/docs/src/static/lattices-dark.svg b/docs/src/static/lattices-dark.svg new file mode 100644 index 000000000..783a79ee8 --- /dev/null +++ b/docs/src/static/lattices-dark.svg @@ -0,0 +1,687 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/src/static/lattices.svg b/docs/src/static/lattices.svg new file mode 100644 index 000000000..38c2a7e2e --- /dev/null +++ b/docs/src/static/lattices.svg @@ -0,0 +1,687 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/src/static/lattices.typ b/docs/src/static/lattices.typ new file mode 100644 index 000000000..4d800f74b --- /dev/null +++ b/docs/src/static/lattices.typ @@ -0,0 +1,212 @@ +// Demonstration of graph types used in the problem-reductions library. +// Compile: +// typst compile lattices.typ --input dark=false lattices.svg +// typst compile lattices.typ --input dark=true lattices-dark.svg +#import "@preview/cetz:0.4.2": canvas, draw +#import "../../paper/lib.typ": g-node, g-edge + +#set page(width: auto, height: auto, margin: 12pt, fill: none) + +#let lattices(dark: false) = { + // ── Theme colors ──────────────────────────────────────────────── + let (fg, edge-color, secondary) = if dark { + (rgb("#e2e8f0"), rgb("#94a3b8"), rgb("#94a3b8")) + } else { + (rgb("#1e293b"), rgb("#64748b"), rgb("#64748b")) + } + + let (node-fill, node-highlight) = if dark { + (rgb("#1e3a5f"), rgb("#2563eb")) + } else { + (rgb("#dbeafe"), rgb("#93c5fd")) + } + + let hyper-colors = if dark { + ( + (fill: rgb("#1e3a5f").transparentize(30%), stroke: rgb("#60a5fa")), + (fill: rgb("#7f1d1d").transparentize(30%), stroke: rgb("#f87171")), + (fill: rgb("#064e3b").transparentize(30%), stroke: rgb("#34d399")), + ) + } else { + ( + (fill: rgb("#dbeafe").transparentize(40%), stroke: rgb("#4e79a7")), + (fill: rgb("#fecaca").transparentize(40%), stroke: rgb("#e15759")), + (fill: rgb("#d1fae5").transparentize(40%), stroke: rgb("#059669")), + ) + } + + let hyper-node-fill = if dark { rgb("#1e293b") } else { white } + + let disk-fill = if dark { + rgb("#1e3a5f").transparentize(70%) + } else { + rgb("#dbeafe").transparentize(70%) + } + + set text(fill: fg, size: 9pt) + + // ── (a) SimpleGraph ────────────────────────────────────────────── + let simple-graph-fig() = { + import draw: * + let vs = ((0, 0), (2, 0), (0, 1.5), (2, 1.5), (1, 2.5)) + let es = ((0,1),(0,2),(1,3),(2,3),(2,4),(3,4)) + for (u, v) in es { g-edge(vs.at(u), vs.at(v), stroke: 1pt + edge-color) } + for (k, pos) in vs.enumerate() { + g-node(pos, name: "s" + str(k), fill: node-fill, stroke: 0.5pt + edge-color, label: str(k)) + } + } + + // ── (b) HyperGraph ────────────────────────────────────────────── + let hypergraph-fig() = { + import draw: * + let vs = ((0, 0), (1.5, 0.3), (2.5, 0), (0.5, 1.5), (2, 1.5), (1.2, 2.5)) + + // Hyperedge A: {0, 1, 3} + draw.hobby( + (-.3, -.3), (0.8, -.2), (1.9, 0.2), (0.8, 1.8), (-.2, 1.8), (-.5, 0.5), + close: true, + fill: hyper-colors.at(0).fill, + stroke: 0.8pt + hyper-colors.at(0).stroke, + ) + // Hyperedge B: {1, 2, 4} + draw.hobby( + (1.1, -.2), (2.8, -.3), (2.6, 1.8), (1.6, 1.8), (0.9, 0.8), + close: true, + fill: hyper-colors.at(1).fill, + stroke: 0.8pt + hyper-colors.at(1).stroke, + ) + // Hyperedge C: {3, 4, 5} + draw.hobby( + (0.1, 1.2), (1.6, 1.0), (2.4, 1.8), (1.5, 2.9), (0.1, 2.2), + close: true, + fill: hyper-colors.at(2).fill, + stroke: 0.8pt + hyper-colors.at(2).stroke, + ) + + for (k, pos) in vs.enumerate() { + g-node(pos, name: "h" + str(k), fill: hyper-node-fill, stroke: 1pt + edge-color, label: str(k)) + } + } + + // ── (c) UnitDiskGraph ──────────────────────────────────────────── + let unit-disk-fig() = { + import draw: * + let vs = ((0.2, 0.2), (1.0, 0.0), (2.2, 0.3), (0.0, 1.2), (1.2, 1.5), (2.0, 1.1), (0.8, 2.3)) + let r = 1.25 + + // Radius disk around vertex 4 + draw.circle(vs.at(4), radius: r, fill: disk-fill, stroke: (dash: "dashed", paint: edge-color, thickness: 0.6pt)) + + // Compute edges: connect pairs within distance r + let es = () + for i in range(vs.len()) { + for j in range(i + 1, vs.len()) { + let dx = vs.at(i).at(0) - vs.at(j).at(0) + let dy = vs.at(i).at(1) - vs.at(j).at(1) + if calc.sqrt(dx * dx + dy * dy) <= r { + es.push((i, j)) + } + } + } + + for (u, v) in es { g-edge(vs.at(u), vs.at(v), stroke: 0.8pt + edge-color) } + for (k, pos) in vs.enumerate() { + let fill = if k == 4 { node-highlight } else { node-fill } + g-node(pos, name: "u" + str(k), fill: fill, stroke: 0.5pt + edge-color, label: str(k)) + } + + // Radius label + draw.content((vs.at(4).at(0) + r + 0.15, vs.at(4).at(1) + 0.1), text(7pt, fill: secondary)[$r$]) + } + + // ── (d) KingsSubgraph ──────────────────────────────────────────── + let kings-fig() = { + import draw: * + let rows = 4 + let cols = 5 + let sp = 0.6 + let vs = () + for row in range(rows) { + for col in range(cols) { + vs.push((col * sp, -row * sp)) + } + } + + let es = () + for row in range(rows) { + for col in range(cols) { + let i = row * cols + col + if col + 1 < cols { es.push((i, i + 1)) } + if row + 1 < rows { es.push((i, i + cols)) } + if row + 1 < rows and col + 1 < cols { es.push((i, i + cols + 1)) } + if row + 1 < rows and col > 0 { es.push((i, i + cols - 1)) } + } + } + + for (u, v) in es { g-edge(vs.at(u), vs.at(v), stroke: 0.6pt + edge-color) } + for (k, pos) in vs.enumerate() { + g-node(pos, name: "k" + str(k), radius: 0.12, fill: node-fill, stroke: 0.5pt + edge-color) + } + } + + // ── (e) TriangularSubgraph ─────────────────────────────────────── + let triangular-fig() = { + import draw: * + let rows = 5 + let cols = 6 + let sp = 0.6 + let sqrt3_2 = calc.sqrt(3) / 2 + let vs = () + for row in range(rows) { + let offset = if calc.rem(row, 2) == 0 { 0 } else { 0.5 * sp } + for col in range(cols) { + vs.push((col * sp + offset, -row * sqrt3_2 * sp)) + } + } + + let es = () + for row in range(rows) { + for col in range(cols) { + let i = row * cols + col + if col + 1 < cols { es.push((i, i + 1)) } + if row + 1 < rows { + if calc.rem(row, 2) == 0 { + if col > 0 { es.push((i, (row + 1) * cols + col - 1)) } + es.push((i, (row + 1) * cols + col)) + } else { + es.push((i, (row + 1) * cols + col)) + if col + 1 < cols { es.push((i, (row + 1) * cols + col + 1)) } + } + } + } + } + + for (u, v) in es { g-edge(vs.at(u), vs.at(v), stroke: 0.5pt + edge-color) } + for (k, pos) in vs.enumerate() { + g-node(pos, name: "t" + str(k), radius: 0.1, fill: node-fill, stroke: 0.4pt + edge-color) + } + } + + // ── Layout ─────────────────────────────────────────────────────── + let font-size = 12pt + canvas({ + import draw: * + simple-graph-fig() + content((1, -1), text(font-size, [(a) SimpleGraph])) + set-origin((5, 0)) + hypergraph-fig() + content((1, -1), text(font-size, [(b) HyperGraph])) + set-origin((5, 0)) + unit-disk-fig() + content((1, -1), text(font-size, [(c) UnitDiskGraph])) + set-origin((-10, -2)) + kings-fig() + content((1, -2.6), text(font-size, [(d) KingsSubgraph])) + set-origin((5, 0)) + triangular-fig() + content((1, -2.6), text(font-size, [(e) TriangularSubgraph])) + }) +} + +#let standalone-dark = sys.inputs.at("dark", default: "false") == "true" +#lattices(dark: standalone-dark) diff --git a/docs/src/static/trait-hierarchy-dark.svg b/docs/src/static/trait-hierarchy-dark.svg index 29e6dde4d..e6df22f04 100644 --- a/docs/src/static/trait-hierarchy-dark.svg +++ b/docs/src/static/trait-hierarchy-dark.svg @@ -1,16 +1,16 @@ - + - - - - + + + + - + @@ -33,17 +33,40 @@ - - - - - - - - + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -187,11 +210,11 @@ - - + + - + @@ -277,80 +300,66 @@ - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + @@ -368,36 +377,18 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + - + @@ -424,6 +415,86 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -670,20 +741,41 @@ - - + + + + + + + + + + + + + + + + + + + + - - + + - - + + - - + + - - + + + + + @@ -694,14 +786,44 @@ - - + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/src/static/trait-hierarchy.svg b/docs/src/static/trait-hierarchy.svg index 1004628e1..e1e9bc3a8 100644 --- a/docs/src/static/trait-hierarchy.svg +++ b/docs/src/static/trait-hierarchy.svg @@ -1,16 +1,16 @@ - + - - - - + + + + - + @@ -33,17 +33,40 @@ - - - - - - - - + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -187,11 +210,11 @@ - - + + - + @@ -277,80 +300,66 @@ - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + @@ -368,36 +377,18 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + - + @@ -424,6 +415,86 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -670,20 +741,41 @@ - - + + + + + + + + + + + + + + + + + + + + - - + + - - + + - - + + - - + + + + + @@ -694,14 +786,44 @@ - - + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/src/static/trait-hierarchy.typ b/docs/src/static/trait-hierarchy.typ index 45a760cb5..74e6691e2 100644 --- a/docs/src/static/trait-hierarchy.typ +++ b/docs/src/static/trait-hierarchy.typ @@ -23,8 +23,8 @@ edge-stroke: 1.5pt + box-color, spacing: (8mm, 12mm), - // Problem trait (main) - node((0, 0), box(width: 55mm, align(left)[ + // Problem trait (top center) + node((0.5, 0), box(width: 55mm, align(left)[ #strong[trait Problem]\ #text(size: 8pt, fill: secondary)[ `const NAME: &str`\ @@ -35,35 +35,38 @@ ] ]), fill: trait-fill, corner-radius: 6pt, inset: 10pt, name: ), - // OptimizationProblem trait + // OptimizationProblem trait (bottom left) node((0, 1), box(width: 55mm, align(left)[ #strong[trait OptimizationProblem]\ #text(size: 8pt, fill: secondary)[ `type Value: PartialOrd + Clone`\ `fn direction() -> Direction`\ #text(style: "italic")[requires `Metric = SolutionSize`] - ] - ]), fill: trait-fill, corner-radius: 6pt, inset: 10pt, name: ), - // Type boxes on the right - node((1.3, 0), box(width: 38mm, align(left)[ #strong[SolutionSize\]\ #text(size: 8pt, fill: secondary)[`Valid(T) | Invalid`] - ]), fill: type-fill, corner-radius: 6pt, inset: 8pt, name: ), - node((1.3, 1), box(width: 38mm, align(left)[ #strong[Direction]\ #text(size: 8pt, fill: secondary)[`Maximize | Minimize`] - ]), fill: type-fill, corner-radius: 6pt, inset: 8pt, name: ), - // Inheritance arrow - edge(, , "->", label: text(size: 8pt)[extends], label-side: left, label-fill: none), + ] + ]), fill: trait-fill, corner-radius: 6pt, inset: 10pt, name: ), - // Type associations - edge(, , "->"), - edge(, , "->"), + // SatisfactionProblem trait (bottom right) + node((1.2, 1), box(width: 42mm, align(left)[ + #strong[trait SatisfactionProblem]\ + #text(size: 8pt, fill: secondary)[ + #text(style: "italic")[marker trait]\ + #text(style: "italic")[requires `Metric = bool`] + ] + ]), fill: trait-fill, corner-radius: 6pt, inset: 10pt, name: ), + + // Inheritance arrows + edge(, , "->", label: text(size: 8pt)[extends], label-side: left, label-fill: none), + edge(, , "->", label: text(size: 8pt)[extends], label-side: right, label-fill: none), ) } #let standalone-dark = sys.inputs.at("dark", default: "false") == "true" #trait-hierarchy(dark: standalone-dark) + diff --git a/docs/src/static/variant-hierarchy-dark.svg b/docs/src/static/variant-hierarchy-dark.svg index 462549997..fe8394aad 100644 --- a/docs/src/static/variant-hierarchy-dark.svg +++ b/docs/src/static/variant-hierarchy-dark.svg @@ -1,52 +1,52 @@ - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + @@ -68,10 +68,10 @@ - + - + @@ -90,10 +90,10 @@ - + - + @@ -112,11 +112,11 @@ - - + + - + @@ -138,11 +138,11 @@ - - + + - + @@ -192,11 +192,11 @@ - - + + - + @@ -222,11 +222,11 @@ - - + + - + @@ -251,11 +251,11 @@ - - + + - + @@ -280,11 +280,11 @@ - - + + - + @@ -314,11 +314,11 @@ - - + + - + @@ -333,11 +333,11 @@ - - + + - + @@ -352,11 +352,11 @@ - - + + - + @@ -371,11 +371,11 @@ - - + + - + @@ -389,11 +389,11 @@ - - + + - + @@ -407,11 +407,11 @@ - - + + - + @@ -425,11 +425,11 @@ - - + + - + @@ -443,11 +443,11 @@ - - + + - + @@ -461,11 +461,11 @@ - - + + - + diff --git a/docs/src/static/variant-hierarchy.svg b/docs/src/static/variant-hierarchy.svg index af4c9e247..9a466eec3 100644 --- a/docs/src/static/variant-hierarchy.svg +++ b/docs/src/static/variant-hierarchy.svg @@ -1,52 +1,52 @@ - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + @@ -68,10 +68,10 @@ - + - + @@ -90,10 +90,10 @@ - + - + @@ -112,11 +112,11 @@ - - + + - + @@ -138,11 +138,11 @@ - - + + - + @@ -192,11 +192,11 @@ - - + + - + @@ -222,11 +222,11 @@ - - + + - + @@ -251,11 +251,11 @@ - - + + - + @@ -280,11 +280,11 @@ - - + + - + @@ -314,11 +314,11 @@ - - + + - + @@ -333,11 +333,11 @@ - - + + - + @@ -352,11 +352,11 @@ - - + + - + @@ -371,11 +371,11 @@ - - + + - + @@ -389,11 +389,11 @@ - - + + - + @@ -407,11 +407,11 @@ - - + + - + @@ -425,11 +425,11 @@ - - + + - + @@ -443,11 +443,11 @@ - - + + - + @@ -461,11 +461,11 @@ - - + + - + diff --git a/docs/src/static/variant-hierarchy.typ b/docs/src/static/variant-hierarchy.typ index 5ae4cb7ce..e1df29872 100644 --- a/docs/src/static/variant-hierarchy.typ +++ b/docs/src/static/variant-hierarchy.typ @@ -24,7 +24,7 @@ spacing: (10mm, 8mm), // Section labels - node((-0.3, -0.5), text(size: 10pt, weight: "bold")[Graph Types], stroke: none, fill: none), + node((0.0, -0.5), text(size: 10pt, weight: "bold")[Graph Types], stroke: none, fill: none), node((3.2, -0.5), text(size: 10pt, weight: "bold")[Weights], stroke: none, fill: none), node((5, -0.5), text(size: 10pt, weight: "bold")[K Values], stroke: none, fill: none), From cb28377f982052c5cad8fc8a3b93a35df6f5f5a5 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 15 Feb 2026 22:54:03 +0800 Subject: [PATCH 14/15] chore: remove plan files Co-Authored-By: Claude Opus 4.6 --- .../2026-02-15-design-md-polish-design.md | 80 -- .../plans/2026-02-15-design-md-polish-impl.md | 651 -------------- .../2026-02-15-issue70-refactoring-design.md | 137 --- .../2026-02-15-issue70-refactoring-impl.md | 823 ------------------ 4 files changed, 1691 deletions(-) delete mode 100644 docs/plans/2026-02-15-design-md-polish-design.md delete mode 100644 docs/plans/2026-02-15-design-md-polish-impl.md delete mode 100644 docs/plans/2026-02-15-issue70-refactoring-design.md delete mode 100644 docs/plans/2026-02-15-issue70-refactoring-impl.md diff --git a/docs/plans/2026-02-15-design-md-polish-design.md b/docs/plans/2026-02-15-design-md-polish-design.md deleted file mode 100644 index 8ed50574e..000000000 --- a/docs/plans/2026-02-15-design-md-polish-design.md +++ /dev/null @@ -1,80 +0,0 @@ -# Design: Polish design.md - -## Context - -The current `docs/src/design.md` has empty sections (Overhead Evaluation, Reduction Execution), outdated content that doesn't reflect recent variant system changes, unclear section flow, and no unifying narrative for contributors. - -## Audience - -Library contributors who need to understand the internals to add new problems and reductions. - -## Approach - -**"Follow the Data"** — organize sections by the lifecycle of a reduction, from problem definition through graph construction to path resolution and execution. No overlap with `getting-started.md`. - -## Proposed Structure - -### 1. Module Overview (keep existing) -- Keep diagram + table -- Update opening line: "This guide covers the library internals for contributors." - -### 2. Problem Model (renamed from "Models") -- Keep `Problem`, `OptimizationProblem`, `SatisfactionProblem` explanations -- Keep trait hierarchy diagram -- Minor tightening of examples - -### 3. Variant System (expanded from "Problem variants") -- Keep concept intro, variant-hierarchy diagram, lattices diagram -- Add `VariantParam` trait definition (`CATEGORY`, `VALUE`, `PARENT_VALUE`) -- Add `impl_variant_param!` macro — 4 forms (root, with parent, KValue root, KValue with parent) -- Add `CastToParent` trait — runtime conversion for natural casts -- Keep `variant_params!` macro example - -### 4. Reduction Rules (restructured) -- Keep `ReductionResult` struct + trait pattern -- Keep `ReduceTo` impl with `#[reduction]` macro -- Add: what `#[reduction]` expands to (the `inventory::submit!(ReductionEntry { ... })` call) -- Add: `ReductionOverhead` declaration with `poly!` macro example - -### 5. Reduction Graph (renamed from "Reduction") -- Construction: `ReductionGraph::new()` iterates inventory entries, builds `petgraph::DiGraph` + variant hierarchy with transitive closure -- Natural edges: auto-generated between same-name variant nodes via subtype check, identity overhead -- JSON export: `to_json()` produces `ReductionGraphJson` - -### 6. Path Finding (keep and extend) -- Keep `resolve_path` algorithm steps and examples (MIS casting, KSat disambiguation) -- Keep `ResolvedPath` struct -- Add `find_cheapest_path` with Dijkstra + set-theoretic validation -- Add `PathCostFn` trait and built-in cost functions: `Minimize`, `MinimizeWeighted`, `MinimizeMax`, `MinimizeLexicographic`, `MinimizeSteps`, `CustomCost` - -### 7. Overhead Evaluation (fill empty section) -- `ProblemSize`: named size components -- `Polynomial` / `Monomial`: overhead formula representation + `poly!` macro -- `ReductionOverhead::evaluate_output_size(input) -> ProblemSize` -- Composition: chain output of step N as input of step N+1 -- Example: multi-step size propagation - -### 8. Reduction Execution (fill empty section) -- `ResolvedPath` is a plan, not an executor -- Dispatch model: `Reduction` → `reduce_to()`, `NaturalCast` → `cast_to_parent()` -- Solution extraction: walk chain in reverse, `extract_solution()` at each Reduction step, natural casts preserve solution -- Design rationale: concrete types (no `dyn Problem`) for type safety - -### 9. Solvers (expanded) -- `BruteForce`: enumerate all configs from `dims()`, `find_best`/`find_all_best`, `find_satisfying`/`find_all_satisfying` -- `ILPSolver`: feature-gated (`ilp`), HiGHS via `good_lp`, `solve_reduced()` -- Note: primarily for testing/verification - -### 10. JSON Serialization (keep, minor polish) - -### 11. Contributing (keep as-is) - -## Removals -- The "Reduction" H2 header (line 132) — content redistributed into sections 5-8 -- Duplicated `#[reduction]` example in "Reduction Graph" subsection - -## Additions -- Sections 7 and 8 get real content -- Variant system gets `VariantParam`/`impl_variant_param!`/`CastToParent` machinery -- Path finding gets `find_cheapest_path` + `PathCostFn` -- Overhead gets `Polynomial`/`poly!` + size propagation diff --git a/docs/plans/2026-02-15-design-md-polish-impl.md b/docs/plans/2026-02-15-design-md-polish-impl.md deleted file mode 100644 index b551151ca..000000000 --- a/docs/plans/2026-02-15-design-md-polish-impl.md +++ /dev/null @@ -1,651 +0,0 @@ -# Polish design.md Implementation Plan - -> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. - -**Goal:** Rewrite `docs/src/design.md` following the "Follow the Data" structure: fill empty sections, update outdated content, and create a coherent contributor-oriented narrative. - -**Architecture:** Single-file rewrite of `docs/src/design.md`. Organized as 11 sections tracing the lifecycle of a reduction. All diagrams are preserved as-is. No code changes, only documentation. - -**Tech Stack:** Markdown (mdBook), existing SVG diagrams, code snippets from the Rust source. - ---- - -### Task 1: Rewrite sections 1-2 (Module Overview + Problem Model) - -**Files:** -- Modify: `docs/src/design.md:1-44` - -**Step 1: Replace lines 1-44 with updated Module Overview and Problem Model** - -Replace the entire file content from line 1 through line 44 (end of trait hierarchy diagram) with: - -```markdown -# Design - -This guide covers the library internals for contributors. - -## Module Overview - -
- -![Module Overview](static/module-overview.svg) - -
-
- -![Module Overview](static/module-overview-dark.svg) - -
- -| Module | Purpose | -|--------|---------| -| [`src/models/`](#problem-model) | Problem type implementations (SAT, Graph, Set, Optimization) | -| [`src/rules/`](#reduction-rules) | Reduction rules with `ReduceTo` implementations | -| [`src/registry/`](#reduction-graph) | Compile-time reduction graph metadata | -| [`src/solvers/`](#solvers) | BruteForce and ILP solvers | -| `src/traits.rs` | Core `Problem` and `OptimizationProblem` traits (see [Problem Model](#problem-model)) | -| `src/types.rs` | Shared types: `SolutionSize`, `Direction`, `ProblemSize` (see [Problem Model](#problem-model)) | -| `src/variant.rs` | Variant parameter system (see [Variant System](#variant-system)) | - -## Problem Model - -Every problem implements `Problem`. Optimization problems additionally implement `OptimizationProblem`; satisfaction problems implement `SatisfactionProblem`. - -- **`Problem`** — the base trait. Every problem declares a `NAME` (e.g., `"MaximumIndependentSet"`). The solver explores the configuration space defined by `dims()` and scores each configuration with `evaluate()`. For example, a 4-vertex MIS has `dims() = [2, 2, 2, 2]` (each vertex is selected or not); `evaluate(&[1, 0, 1, 0])` returns `Valid(2)` if vertices 0 and 2 form an independent set, or `Invalid` if they share an edge. -- **`OptimizationProblem`** — extends `Problem` with a comparable `Value` type and a `direction()` (`Maximize` or `Minimize`). -- **`SatisfactionProblem`** — constrains `Metric = bool`: `true` if all constraints are satisfied, `false` otherwise. - -
- -![Trait Hierarchy](static/trait-hierarchy.svg) - -
-
- -![Trait Hierarchy](static/trait-hierarchy-dark.svg) - -
-``` - -**Step 2: Verify the mdbook builds** - -Run: `cd /Users/liujinguo/rcode/problemreductions && mdbook build` -Expected: Build succeeds, no broken links. - -**Step 3: Commit** - -```bash -git add docs/src/design.md -git commit -m "docs: rewrite design.md sections 1-2 (module overview + problem model)" -``` - ---- - -### Task 2: Rewrite section 3 (Variant System) - -**Files:** -- Modify: `docs/src/design.md` — replace the old "Problem variants" subsection (lines 46-93 in the original) with the new "Variant System" section. - -**Step 1: Write the Variant System section** - -This section replaces everything from `### Problem variants` through the end of the `variant_params!` code block. It should appear immediately after the trait hierarchy diagram. - -```markdown -## Variant System - -A single problem name like `MaximumIndependentSet` can have multiple **variants** — carrying weights on vertices, or defined on a grid. Some variants are more specific than others: the grid graph is a special case of the unit-disk graph, which is a special case of the simple graph. - -In **set** language, variants form **subsets**: independent sets on grid graphs are a subset of independent sets on unit-disk graphs. The reduction from a more specific variant to a less specific one is a **natural reduction** (identity mapping). To avoid repeating the same rule for each variant pair, the library provides an auto-casting mechanism. - -
- -![Variant Hierarchy](static/variant-hierarchy.svg) - -
-
- -![Variant Hierarchy](static/variant-hierarchy-dark.svg) - -
- -Arrows indicate the **subset** (subtype) direction. Variant types fall into three categories: - -- **Graph type** — e.g., `SimpleGraph`, `UnitDiskGraph`, `KingsSubgraph`. Available graph variants: -- **Weight type** — `One` (unweighted), `i32`, `f64`. -- **K value** — e.g., `K3` for 3-SAT, `KN` for arbitrary K. - -
- -![Lattices](static/lattices.svg) - -
-
- -![Lattices](static/lattices-dark.svg) - -
- -### VariantParam trait - -Each variant parameter type implements `VariantParam`, which declares its category, value, and optional parent: - -```rust -pub trait VariantParam: 'static { - const CATEGORY: &'static str; // e.g., "graph", "weight", "k" - const VALUE: &'static str; // e.g., "SimpleGraph", "i32" - const PARENT_VALUE: Option<&'static str>; // None for root types -} -``` - -Types with a parent also implement `CastToParent`, providing the runtime conversion for natural casts: - -```rust -pub trait CastToParent: VariantParam { - type Parent: VariantParam; - fn cast_to_parent(&self) -> Self::Parent; -} -``` - -### Registration with `impl_variant_param!` - -The `impl_variant_param!` macro implements `VariantParam` (and optionally `CastToParent` / `KValue`) and registers a `VariantTypeEntry` via `inventory` for compile-time hierarchy discovery: - -```rust -// Root type (no parent): -impl_variant_param!(SimpleGraph, "graph"); - -// Type with parent (cast closure required): -impl_variant_param!(UnitDiskGraph, "graph", - parent: SimpleGraph, - cast: |g| SimpleGraph::new(g.num_vertices(), g.edges())); - -// K root (arbitrary K): -impl_variant_param!(KN, "k", k: None); - -// Specific K with parent: -impl_variant_param!(K3, "k", parent: KN, cast: |_| KN, k: Some(3)); -``` - -At startup, the `ReductionGraph` collects all `VariantTypeEntry` registrations and computes the **transitive closure** of the parent relationships, so `KingsSubgraph` is recognized as a subtype of `SimpleGraph` even though it declares `UnitDiskGraph` as its direct parent. - -### Composing `Problem::variant()` - -The `variant_params!` macro composes the `Problem::variant()` body from type parameter names: - -```rust -// MaximumIndependentSet -fn variant() -> Vec<(&'static str, &'static str)> { - crate::variant_params![G, W] - // e.g., MaximumIndependentSet - // → vec![("graph", "UnitDiskGraph"), ("weight", "One")] -} -``` -``` - -**Step 2: Verify mdbook builds** - -Run: `cd /Users/liujinguo/rcode/problemreductions && mdbook build` - -**Step 3: Commit** - -```bash -git add docs/src/design.md -git commit -m "docs: rewrite design.md section 3 (variant system)" -``` - ---- - -### Task 3: Rewrite section 4 (Reduction Rules) - -**Files:** -- Modify: `docs/src/design.md` — replace the old "Reduction Rules" section. - -**Step 1: Write the Reduction Rules section** - -This replaces the old section (lines 95-129 in the original). Place it after the Variant System section. - -```markdown -## Reduction Rules - -A reduction requires two pieces: a **result struct** and a **`ReduceTo` impl**. - -### Result struct - -Holds the target problem and the logic to map solutions back: - -```rust -#[derive(Clone)] -pub struct ReductionISToVC { - target: MinimumVertexCover, -} - -impl ReductionResult for ReductionISToVC { - type Source = MaximumIndependentSet; - type Target = MinimumVertexCover; - - fn target_problem(&self) -> &Self::Target { &self.target } - fn extract_solution(&self, target_sol: &[usize]) -> Vec { - target_sol.iter().map(|&x| 1 - x).collect() // complement - } -} -``` - -### `ReduceTo` impl with the `#[reduction]` macro - -```rust -#[reduction( - overhead = { - ReductionOverhead::new(vec![ - ("num_vertices", poly!(num_vertices)), - ("num_edges", poly!(num_edges)), - ]) - } -)] -impl ReduceTo> - for MaximumIndependentSet -{ - type Result = ReductionISToVC; - fn reduce_to(&self) -> Self::Result { /* ... */ } -} -``` - -### What the macro generates - -The `#[reduction]` attribute expands to the original `impl` block plus an `inventory::submit!` call: - -```rust -inventory::submit! { - ReductionEntry { - source_name: "MaximumIndependentSet", - target_name: "MinimumVertexCover", - source_variant_fn: || as Problem>::variant(), - target_variant_fn: || as Problem>::variant(), - overhead_fn: || ReductionOverhead::new(vec![ - ("num_vertices", poly!(num_vertices)), - ("num_edges", poly!(num_edges)), - ]), - module_path: module_path!(), - } -} -``` - -This `ReductionEntry` is collected at compile time by `inventory`, making the reduction discoverable by the `ReductionGraph` without any manual registration. - -See [adding-reductions.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/adding-reductions.md) for the full implementation guide. -``` - -**Step 2: Verify mdbook builds** - -Run: `cd /Users/liujinguo/rcode/problemreductions && mdbook build` - -**Step 3: Commit** - -```bash -git add docs/src/design.md -git commit -m "docs: rewrite design.md section 4 (reduction rules)" -``` - ---- - -### Task 4: Rewrite section 5 (Reduction Graph) - -**Files:** -- Modify: `docs/src/design.md` — replace the old "Reduction" H2 and "Reduction Graph" H3 (lines 132-149 in the original). - -**Step 1: Write the Reduction Graph section** - -Place after Reduction Rules. This replaces the old "Reduction" section header and its "Reduction Graph" subsection. - -```markdown -## Reduction Graph - -The `ReductionGraph` is the central runtime data structure. It collects all registered reductions and variant hierarchies to enable path finding and overhead evaluation. - -### Construction - -`ReductionGraph::new()` performs two `inventory` scans: - -1. **`ReductionEntry` items** — each registered reduction becomes a directed edge in a `petgraph::DiGraph`. Nodes are type-erased base names (e.g., `"MaxCut"`, not `"MaxCut"`), so path finding works regardless of type parameters. - -2. **`VariantTypeEntry` items** — parent declarations are collected per category and transitively closed, building a `variant_hierarchy: HashMap>>`. - -### Natural edges - -When exporting the graph (via `to_json()`), the graph auto-generates **natural edges** between same-name variant nodes. A natural edge from variant A to variant B exists when every field of A is at least as restrictive as B's (i.e., A is a subtype of B). Natural edges carry **identity overhead** — the problem size is unchanged. - -For example, `MaximumIndependentSet{KingsSubgraph, i32}` gets a natural edge to `MaximumIndependentSet{SimpleGraph, i32}` because `KingsSubgraph` is a subtype of `SimpleGraph`. - -### JSON export - -`ReductionGraph::to_json()` produces a `ReductionGraphJson` with fully expanded variant nodes and both reduction + natural edges: - -- [reduction_graph.json](reductions/reduction_graph.json) — all problem variants and reduction edges -- [problem_schemas.json](reductions/problem_schemas.json) — field definitions for each problem type -``` - -**Step 2: Verify mdbook builds** - -Run: `cd /Users/liujinguo/rcode/problemreductions && mdbook build` - -**Step 3: Commit** - -```bash -git add docs/src/design.md -git commit -m "docs: rewrite design.md section 5 (reduction graph)" -``` - ---- - -### Task 5: Rewrite section 6 (Path Finding) - -**Files:** -- Modify: `docs/src/design.md` — replace the old "Path Finding" H3 (lines 153-208 in the original). - -**Step 1: Write the Path Finding section** - -Place after Reduction Graph. Keep the existing resolve_path content and examples, add the Dijkstra/cost-function content. - -```markdown -## Path Finding - -Path finding operates at two levels: **name-level** paths (which problem types to traverse) and **variant-level** resolved paths (with concrete variant and overhead at each step). - -### Name-level paths - -`find_paths_by_name(src, dst)` enumerates all simple paths in the type-erased graph. `find_shortest_path_by_name()` returns the one with fewest hops. - -For cost-aware routing, `find_cheapest_path()` uses **Dijkstra's algorithm** with set-theoretic validation: - -```rust -pub fn find_cheapest_path( - &self, - source: (&str, &str), // (problem_name, graph_type) - target: (&str, &str), - input_size: &ProblemSize, - cost_fn: &C, -) -> Option -``` - -At each edge, Dijkstra checks `rule_applicable()` — the source graph must be a subtype of the rule's expected source, and the rule's target graph must be a subtype of the desired target. This ensures the chosen path respects variant constraints. - -### Cost functions - -The `PathCostFn` trait computes edge cost from overhead and current problem size: - -```rust -pub trait PathCostFn { - fn edge_cost(&self, overhead: &ReductionOverhead, current_size: &ProblemSize) -> f64; -} -``` - -Built-in implementations: - -| Cost function | Strategy | -|--------------|----------| -| `Minimize("field")` | Minimize a single output field | -| `MinimizeWeighted([(field, w)])` | Weighted sum of output fields | -| `MinimizeMax([fields])` | Minimize the maximum of fields | -| `MinimizeLexicographic([fields])` | Lexicographic: minimize first, break ties with rest | -| `MinimizeSteps` | Minimize number of hops (unit edge cost) | -| `CustomCost(closure)` | User-defined cost function | - -### Variant-level resolution: `resolve_path` - -Given a name-level `ReductionPath`, `resolve_path` threads variant state through each step to produce a `ResolvedPath`: - -```rust -pub fn resolve_path( - &self, - path: &ReductionPath, // name-level plan - source_variant: &BTreeMap, // caller's concrete variant - target_variant: &BTreeMap, // desired target variant -) -> Option -``` - -The algorithm: - -1. **Find candidates** — all `ReductionEntry` items matching `(src_name, dst_name)`. -2. **Filter compatible** — keep entries where the current variant is equal-or-more-specific than the entry's source variant on every axis. -3. **Pick most specific** — among compatible entries, choose the tightest fit. -4. **Insert natural cast** — if the current variant is more specific than the chosen entry's source, emit a `NaturalCast` edge. -5. **Advance** — update current variant to the entry's target variant, emit a `Reduction` edge with the correct overhead. - -The result is a `ResolvedPath`: - -```rust -pub struct ResolvedPath { - pub steps: Vec, // (name, variant) at each node - pub edges: Vec, // Reduction{overhead} | NaturalCast -} -``` - -#### Example: MIS on KingsSubgraph to MinimumVertexCover - -Resolving `MIS(KingsSubgraph, i32) → VC(SimpleGraph, i32)` through name-path `["MIS", "VC"]`: - -``` -steps: MIS{KingsSubgraph,i32} → MIS{SimpleGraph,i32} → VC{SimpleGraph,i32} -edges: NaturalCast Reduction{overhead} -``` - -The resolver finds that the `MIS → VC` reduction expects `SimpleGraph`, so it inserts a `NaturalCast` to relax `KingsSubgraph` to `SimpleGraph` first. - -#### Example: KSat Disambiguation - -Resolving `KSat(k=3) → QUBO` through name-path `["KSatisfiability", "QUBO"]`: - -- Candidates: `KSat<2> → QUBO` (overhead: `num_vars`) and `KSat<3> → QUBO` (overhead: `num_vars + num_clauses`). -- Filter with `k=3`: only `KSat<3>` is compatible (`3` is not a subtype of `2`). -- Result: the k=3-specific overhead is returned. -``` - -**Step 2: Verify mdbook builds** - -Run: `cd /Users/liujinguo/rcode/problemreductions && mdbook build` - -**Step 3: Commit** - -```bash -git add docs/src/design.md -git commit -m "docs: rewrite design.md section 6 (path finding)" -``` - ---- - -### Task 6: Write sections 7-8 (Overhead Evaluation + Reduction Execution) - -**Files:** -- Modify: `docs/src/design.md` — replace the empty "Overhead Evaluation" and "Reduction Execution" headers (lines 210-212 in the original). - -**Step 1: Write the Overhead Evaluation and Reduction Execution sections** - -Place after Path Finding. - -```markdown -## Overhead Evaluation - -Each reduction declares how the output problem size relates to the input size, expressed as polynomials. - -### ProblemSize - -A `ProblemSize` holds named size components — the dimensions that characterize a problem instance: - -```rust -let size = ProblemSize::new(vec![("num_vertices", 10), ("num_edges", 15)]); -assert_eq!(size.get("num_vertices"), Some(10)); -``` - -### Polynomials - -Output size formulas use `Polynomial` (a sum of `Monomial` terms). The `poly!` macro provides a concise syntax: - -```rust -poly!(num_vertices) // p(x) = num_vertices -poly!(num_vertices ^ 2) // p(x) = num_vertices² -poly!(3 * num_edges) // p(x) = 3 · num_edges -poly!(num_vertices * num_edges) // p(x) = num_vertices · num_edges -``` - -A `ReductionOverhead` pairs output field names with their polynomials: - -```rust -ReductionOverhead::new(vec![ - ("num_vars", poly!(num_vertices) + poly!(num_edges)), - ("num_clauses", poly!(3 * num_edges)), -]) -``` - -### Evaluating overhead - -`ReductionOverhead::evaluate_output_size(input)` substitutes input values into the polynomials and returns a new `ProblemSize`: - -``` -Input: ProblemSize { num_vertices: 10, num_edges: 15 } -Output: ProblemSize { num_vars: 25, num_clauses: 45 } -``` - -### Composing through a path - -For a multi-step reduction path, overhead composes: the output of step $N$ becomes the input of step $N+1$. Each `ResolvedPath` edge carries its own `ReductionOverhead` (or `NaturalCast` with identity overhead), so the total output size is computed by chaining `evaluate_output_size` calls through the path. - -## Reduction Execution - -A `ResolvedPath` is a **plan**, not an executor. It provides variant and overhead information at each step, but callers dispatch the actual transformations themselves. - -### Dispatching steps - -Walk the `edges` array and dispatch based on `EdgeKind`: - -- **`EdgeKind::Reduction`** — call `ReduceTo::reduce_to()` on the current problem to produce a `ReductionResult`, then call `target_problem()` to get the next problem. -- **`EdgeKind::NaturalCast`** — call `CastToParent::cast_to_parent()` (for graph casts) or the equivalent weight cast. The problem data is preserved; only the type changes. - -### Extracting solutions - -After solving the final target problem, walk the chain **in reverse**: - -- At each `Reduction` edge, call `extract_solution(&target_solution)` on the corresponding `ReductionResult` to map the solution back to the source space. -- At each `NaturalCast` edge, the solution passes through unchanged (identity mapping). - -### Why concrete types (no type erasure) - -The library uses concrete types at each step rather than `dyn Problem`. This preserves full type safety and avoids boxing overhead, at the cost of requiring callers to know the types at each step. This design choice keeps the reduction pipeline zero-cost and makes the compiler verify correctness at each transformation boundary. -``` - -**Step 2: Verify mdbook builds** - -Run: `cd /Users/liujinguo/rcode/problemreductions && mdbook build` - -**Step 3: Commit** - -```bash -git add docs/src/design.md -git commit -m "docs: write design.md sections 7-8 (overhead evaluation + execution)" -``` - ---- - -### Task 7: Rewrite sections 9-11 (Solvers + JSON + Contributing) - -**Files:** -- Modify: `docs/src/design.md` — replace the old "Solvers", "JSON Serialization", and "Contributing" sections (lines 237-252 in the original). - -**Step 1: Write the Solvers, JSON Serialization, and Contributing sections** - -Place after Reduction Execution. - -```markdown -## Solvers - -Solvers implement the `Solver` trait: - -```rust -pub trait Solver { - fn find_best(&self, problem: &P) -> Option>; - fn find_satisfying>(&self, problem: &P) -> Option>; -} -``` - -### BruteForce - -Enumerates every configuration in the space defined by `dims()`. Suitable for small instances (<20 variables). In addition to the `Solver` trait methods, provides: - -- `find_all_best(problem)` — returns all tied-optimal configurations. -- `find_all_satisfying(problem)` — returns all satisfying configurations. - -Primarily used for **testing and verification** of reductions via closed-loop tests. - -### ILPSolver - -Feature-gated behind `ilp`. Uses the HiGHS solver via the `good_lp` crate. Additionally provides `solve_reduced()` for problems that implement `ReduceTo` — it reduces, solves the ILP, and extracts the solution in one call. - -## JSON Serialization - -All problem types support JSON serialization via serde: - -```rust -use problemreductions::io::{to_json, from_json}; - -let json = to_json(&problem)?; -let restored: MaximumIndependentSet = from_json(&json)?; -``` - -**Exported JSON files:** -- [reduction_graph.json](reductions/reduction_graph.json) — all problem variants and reduction edges -- [problem_schemas.json](reductions/problem_schemas.json) — field definitions for each problem type - -Regenerate exports: - -```bash -cargo run --example export_graph # docs/src/reductions/reduction_graph.json (default) -cargo run --example export_graph -- output.json # custom output path -cargo run --example export_schemas # docs/src/reductions/problem_schemas.json -``` - -## Contributing - -See [Call for Contributions](./introduction.md#call-for-contributions) for the recommended issue-based workflow (no coding required). -``` - -**Step 2: Verify mdbook builds** - -Run: `cd /Users/liujinguo/rcode/problemreductions && mdbook build` - -**Step 3: Commit** - -```bash -git add docs/src/design.md -git commit -m "docs: rewrite design.md sections 9-11 (solvers + JSON + contributing)" -``` - ---- - -### Task 8: Update internal anchor links in module table - -**Files:** -- Modify: `docs/src/design.md` — the Module Overview table links. - -**Step 1: Verify all anchor links resolve correctly** - -Check that the `#problem-model`, `#reduction-rules`, `#reduction-graph`, `#solvers`, `#variant-system` anchors match the actual section headers. mdBook generates anchors from headers by lowercasing and replacing spaces with hyphens. - -Expected mappings: -- `## Problem Model` → `#problem-model` -- `## Variant System` → `#variant-system` -- `## Reduction Rules` → `#reduction-rules` -- `## Reduction Graph` → `#reduction-graph` -- `## Solvers` → `#solvers` - -**Step 2: Fix any broken anchors** - -If needed, update the table links in the Module Overview section. - -**Step 3: Final mdbook build** - -Run: `cd /Users/liujinguo/rcode/problemreductions && mdbook build` - -**Step 4: Commit** - -```bash -git add docs/src/design.md -git commit -m "docs: fix internal anchor links in design.md" -``` diff --git a/docs/plans/2026-02-15-issue70-refactoring-design.md b/docs/plans/2026-02-15-issue70-refactoring-design.md deleted file mode 100644 index de4afad03..000000000 --- a/docs/plans/2026-02-15-issue70-refactoring-design.md +++ /dev/null @@ -1,137 +0,0 @@ -# Issue #70: KISS and DRY Refactoring Design - -**Date:** 2026-02-15 -**Issue:** [#70](https://github.com/GiggleLiu/problemreductions/issues/70) - -## Scope - -Three high-impact items from the issue, selected by priority: - -1. **DRY #1** — Trim vertex-weighted graph problem API -2. **KISS #4** — Extract testable functions from `to_json()` -3. **DRY #2 (expanded)** — Implement real `PlanarGraph` and `BipartiteGraph` - -## Item 1: Trim Vertex-Weighted Graph Problem API - -### Problem - -Five vertex-weighted graph problems share ~65 lines of identical delegation methods each. These convenience methods (`num_vertices()`, `num_edges()`, `edges()`, etc.) duplicate `Graph` trait methods already available via `problem.graph()`. - -**Affected files:** -- `src/models/graph/maximum_independent_set.rs` -- `src/models/graph/minimum_vertex_cover.rs` -- `src/models/graph/maximum_clique.rs` -- `src/models/graph/maximal_is.rs` -- `src/models/graph/minimum_dominating_set.rs` - -### Design - -**Remove these methods from all 5 problems:** -- `num_vertices()` — callers use `problem.graph().num_vertices()` -- `num_edges()` — callers use `problem.graph().num_edges()` -- `edges()` — callers use `problem.graph().edges()` -- `has_edge(u, v)` — callers use `problem.graph().has_edge(u, v)` -- `set_weights()` — 0 external call sites -- `from_graph_unit_weights()` — 0 external call sites -- `weights()` (the cloning version) — replaced by renaming `weights_ref()` - -**Rename:** -- `weights_ref() -> &Vec` becomes `weights() -> &[W]` - -**Keep:** -- `graph() -> &G` -- `weights() -> &[W]` (the renamed borrow version) -- `is_weighted() -> bool` -- `new(num_vertices, edges)` — 14 call sites -- `with_weights(num_vertices, edges, weights)` — 25 call sites -- `from_graph(graph, weights)` — 3 call sites - -### Call site migration - -| Old call | New call | Sites | -|----------|----------|-------| -| `problem.num_vertices()` | `problem.graph().num_vertices()` | ~49 | -| `problem.num_edges()` | `problem.graph().num_edges()` | ~36 | -| `problem.edges()` | `problem.graph().edges()` | ~29 | -| `problem.has_edge(u, v)` | `problem.graph().has_edge(u, v)` | 0 | -| `problem.weights_ref()` | `problem.weights()` | ~12 | -| `problem.weights()` (clone) | `problem.weights().to_vec()` | ~8 | - -## Item 2: Extract Testable Functions from `to_json()` - -### Problem - -`ReductionGraph::to_json()` (`src/rules/graph.rs`, ~194 lines) is a monolith doing 5+ distinct things. Complex logic is embedded inline and untestable in isolation. - -### Design - -Extract three pure, testable utility functions while keeping `to_json()` as the orchestrator: - -1. **`is_natural_edge(variant_a, variant_b, hierarchy) -> Option`** - Given two variant maps for the same problem name, determine if one is a subtype of the other. Core logic from the 65-line natural edge generation loop. Pure function. - -2. **`classify_problem_category(module_path: &str) -> &str`** - Map module path to category: `"graph"`, `"sat"`, `"set"`, or `"optimization"`. Currently inline in node-building phase. - -3. **`filter_redundant_base_nodes(node_set) -> filtered_set`** - Remove base nodes (empty variant) when a variant-specific sibling exists. ~15 lines of inline logic. - -Each function gets its own unit test. `to_json()` calls these helpers but retains the orchestration flow. - -## Item 3: Implement PlanarGraph and BipartiteGraph - -### Problem - -`PlanarGraph` and `BipartiteGraph` are currently ZST markers with no data or graph behavior. They manually implement `VariantParam` (12 lines each) instead of using `impl_variant_param!` because they have no cast closure. - -### Design - -Replace the ZST markers with real graph types. - -#### PlanarGraph — Validated wrapper - -```rust -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct PlanarGraph { - inner: SimpleGraph, -} -``` - -- **Constructor:** `PlanarGraph::new(num_vertices, edges)` — validates planarity via `|E| <= 3|V| - 6` (necessary condition). Panics on non-planar input. -- **Graph trait:** All methods delegate to `inner`. -- **Variant:** `impl_variant_param!(PlanarGraph, "graph", parent: SimpleGraph, cast: |g| g.inner.clone())` - -#### BipartiteGraph — Standard bipartite representation - -```rust -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct BipartiteGraph { - left_size: usize, - right_size: usize, - edges: Vec<(usize, usize)>, // (u, v) with u in [0, left_size), v in [0, right_size) -} -``` - -- **Constructor:** `BipartiteGraph::new(left_size, right_size, edges)` — validates that edges are within bounds. Edges use bipartite-local coordinates. -- **Graph trait:** Maps to unified vertex space: left vertices `0..left_size`, right vertices `left_size..left_size+right_size`. `edges()` returns `(u, left_size + v)` for each stored `(u, v)`. -- **Accessors:** `left_size()`, `right_size()`, `left_edges()` (local coords). -- **Variant:** `impl_variant_param!(BipartiteGraph, "graph", parent: SimpleGraph, cast: |g| SimpleGraph::new(g.num_vertices(), g.edges()))` - -### Follow-up issue - -File a separate issue for full data structure implementations: -- PlanarGraph: half-edge (DCEL) data structure for proper planar embedding -- BipartiteGraph: additional bipartite-specific algorithms - -## Testing - -- **Item 1:** Update all ~114 call sites. Run `make test clippy` to verify nothing breaks. -- **Item 2:** Add unit tests for each extracted function. -- **Item 3:** Add tests for PlanarGraph (construction, planarity validation, graph trait) and BipartiteGraph (construction, edge mapping, partition accessors). - -## Non-goals - -- No macro extraction for constructor/trait boilerplate (accept remaining duplication as cost of explicitness) -- No changes to the cost function zoo (KISS #2) -- No changes to `find_shortest_path` (KISS #1) -- No full DCEL or bipartite algorithm implementation (deferred) diff --git a/docs/plans/2026-02-15-issue70-refactoring-impl.md b/docs/plans/2026-02-15-issue70-refactoring-impl.md deleted file mode 100644 index 0d97101ff..000000000 --- a/docs/plans/2026-02-15-issue70-refactoring-impl.md +++ /dev/null @@ -1,823 +0,0 @@ -# Issue #70 Refactoring Implementation Plan - -> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. - -**Goal:** Address top 3 high-impact KISS/DRY violations from issue #70: trim graph problem APIs, extract testable functions from `to_json()`, and implement real PlanarGraph/BipartiteGraph types. - -**Architecture:** Remove delegation methods from 5 graph problem structs so callers go through `.graph()` directly. Extract pure utility functions from the `to_json()` monolith for independent testability. Replace PlanarGraph/BipartiteGraph ZST markers with validated wrapper types. - -**Tech Stack:** Rust, petgraph (for SimpleGraph internals), serde, inventory crate - ---- - -## Task 1: Remove delegation methods from MaximumIndependentSet - -**Files:** -- Modify: `src/models/graph/maximum_independent_set.rs:100-148` (remove methods) -- Modify: `src/rules/maximumindependentset_qubo.rs` (update call sites) -- Modify: `src/rules/maximumindependentset_ilp.rs` (update call sites) -- Modify: `src/rules/maximumindependentset_maximumsetpacking.rs` (update call sites) -- Modify: `src/rules/maximumindependentset_gridgraph.rs` (update call sites) -- Modify: `src/rules/maximumindependentset_triangular.rs` (update call sites) -- Modify: `src/rules/sat_maximumindependentset.rs` (update call sites) -- Modify: `src/rules/minimumvertexcover_maximumindependentset.rs` (update MIS call sites) -- Test: `src/unit_tests/models/graph/maximum_independent_set.rs` - -**Step 1: Edit MaximumIndependentSet — remove methods and rename** - -In `src/models/graph/maximum_independent_set.rs`: -- Delete `from_graph_unit_weights()` (lines 100-107) -- Delete `num_vertices()` (lines 114-117) -- Delete `num_edges()` (lines 119-122) -- Delete `edges()` (lines 124-127) -- Delete `has_edge()` (lines 129-132) -- Delete `set_weights()` (lines 139-143) -- Delete `weights()` clone version (lines 145-148) -- Rename `weights_ref()` to `weights()`, change return type from `&Vec` to `&[W]` - -Internal code in the same file that uses `self.graph.num_vertices()` etc. should already work since it accesses the field directly. - -**Step 2: Update rule call sites** - -Replace in each rule file: -- `self.num_vertices()` → `self.graph().num_vertices()` -- `self.num_edges()` → `self.graph().num_edges()` -- `self.edges()` → `self.graph().edges()` -- `self.weights_ref()` → `self.weights()` -- `self.weights()` (where clone was intended) → `self.weights().to_vec()` - -Files to update: -- `src/rules/maximumindependentset_qubo.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights_ref()` → `self.weights()` -- `src/rules/maximumindependentset_ilp.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights_ref()` → `self.weights()` -- `src/rules/maximumindependentset_maximumsetpacking.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights_ref()` → `self.weights()` -- `src/rules/maximumindependentset_gridgraph.rs`: `self.num_vertices()` → `self.graph().num_vertices()` -- `src/rules/maximumindependentset_triangular.rs`: `self.num_vertices()` → `self.graph().num_vertices()` -- `src/rules/sat_maximumindependentset.rs`: update any MIS delegation calls -- `src/rules/minimumvertexcover_maximumindependentset.rs`: uses `self.num_vertices()` and `self.weights_ref()` on MVC (handled in Task 2), but also constructs MIS - -**Step 3: Update test call sites** - -In `src/unit_tests/models/graph/maximum_independent_set.rs` and `src/unit_tests/graph_models.rs`: -- `problem.num_vertices()` → `problem.graph().num_vertices()` -- `problem.num_edges()` → `problem.graph().num_edges()` -- `problem.edges()` → `problem.graph().edges()` -- `problem.weights()` (cloning) → `problem.weights().to_vec()` -- `problem.weights_ref()` → `problem.weights()` - -**Step 4: Run tests** - -Run: `cargo test --all-features -- --include-ignored 2>&1 | head -50` -Expected: All tests pass for MaximumIndependentSet - -**Step 5: Commit** - -```bash -git add src/models/graph/maximum_independent_set.rs src/rules/maximumindependentset_*.rs src/rules/sat_maximumindependentset.rs src/rules/minimumvertexcover_maximumindependentset.rs src/unit_tests/ -git commit -m "refactor: trim MaximumIndependentSet API — remove delegation methods" -``` - ---- - -## Task 2: Remove delegation methods from MinimumVertexCover - -**Files:** -- Modify: `src/models/graph/minimum_vertex_cover.rs:84-143` (remove methods) -- Modify: `src/rules/minimumvertexcover_qubo.rs` -- Modify: `src/rules/minimumvertexcover_ilp.rs` -- Modify: `src/rules/minimumvertexcover_maximumindependentset.rs` -- Modify: `src/rules/minimumvertexcover_minimumsetcovering.rs` -- Test: `src/unit_tests/models/graph/minimum_vertex_cover.rs` - -**Step 1: Edit MinimumVertexCover — same removal pattern as Task 1** - -In `src/models/graph/minimum_vertex_cover.rs`: -- Delete `from_graph_unit_weights()`, `num_vertices()`, `num_edges()`, `edges()`, `has_edge()`, `set_weights()`, `weights()` (clone) -- Rename `weights_ref()` → `weights()` returning `&[W]` - -**Step 2: Update rule call sites** - -- `src/rules/minimumvertexcover_qubo.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights_ref()` → `self.weights()` -- `src/rules/minimumvertexcover_ilp.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights_ref()` → `self.weights()` -- `src/rules/minimumvertexcover_maximumindependentset.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights_ref()` → `self.weights()` -- `src/rules/minimumvertexcover_minimumsetcovering.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights_ref()` → `self.weights()` - -**Step 3: Update test call sites** - -- `src/unit_tests/models/graph/minimum_vertex_cover.rs` -- `src/unit_tests/graph_models.rs` (MVC sections) -- `src/unit_tests/rules/minimumvertexcover_*.rs` - -**Step 4: Run tests** - -Run: `cargo test --all-features -- --include-ignored 2>&1 | head -50` - -**Step 5: Commit** - -```bash -git add src/models/graph/minimum_vertex_cover.rs src/rules/minimumvertexcover_*.rs src/unit_tests/ -git commit -m "refactor: trim MinimumVertexCover API — remove delegation methods" -``` - ---- - -## Task 3: Remove delegation methods from MaximumClique - -**Files:** -- Modify: `src/models/graph/maximum_clique.rs:101-148` -- Modify: `src/rules/maximumclique_ilp.rs` -- Test: `src/unit_tests/models/graph/maximum_clique.rs` - -**Step 1-5: Same pattern as Tasks 1-2** - -- Delete the same set of methods, rename `weights_ref()` → `weights()` -- Update `src/rules/maximumclique_ilp.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights()` → `self.weights().to_vec()` -- Update test files -- Run tests, commit - -```bash -git commit -m "refactor: trim MaximumClique API — remove delegation methods" -``` - ---- - -## Task 4: Remove delegation methods from MaximalIS - -**Files:** -- Modify: `src/models/graph/maximal_is.rs:86-133` -- Test: `src/unit_tests/models/graph/maximal_is.rs` - -**Step 1-5: Same pattern** - -Note: MaximalIS has no reduction rules that call delegation methods directly. Only tests need updating. - -```bash -git commit -m "refactor: trim MaximalIS API — remove delegation methods" -``` - ---- - -## Task 5: Remove delegation methods from MinimumDominatingSet - -**Files:** -- Modify: `src/models/graph/minimum_dominating_set.rs:84-143` -- Modify: `src/rules/minimumdominatingset_ilp.rs` -- Modify: `src/rules/sat_minimumdominatingset.rs` -- Test: `src/unit_tests/models/graph/minimum_dominating_set.rs` - -**Step 1-5: Same pattern** - -- Update `src/rules/minimumdominatingset_ilp.rs`: `self.num_vertices()` → `self.graph().num_vertices()`, `self.weights()` → `self.weights().to_vec()` -- Update `src/rules/sat_minimumdominatingset.rs`: update call sites -- Update tests -- Run tests, commit - -```bash -git commit -m "refactor: trim MinimumDominatingSet API — remove delegation methods" -``` - ---- - -## Task 6: Update remaining shared call sites - -**Files:** -- Modify: `src/rules/mod.rs` (the `impl_natural_reduction!` macro uses `.weights()`) -- Modify: `src/rules/spinglass_maxcut.rs` (uses `.num_vertices()` on SpinGlass/MaxCut — check if affected) -- Modify: `src/rules/coloring_qubo.rs`, `src/rules/coloring_ilp.rs` (uses `.num_vertices()` on KColoring) -- Modify: `src/rules/maximummatching_maximumsetpacking.rs` (uses `.weights()`, `.edges()`, `.num_edges()` on MaximumMatching) -- Modify: `src/rules/maximummatching_ilp.rs` (uses `.num_edges()`, `.weights()` on MaximumMatching) -- Modify: `src/rules/travelingsalesman_ilp.rs` (uses `.num_vertices()` on TravelingSalesman) -- Modify: remaining test files in `src/unit_tests/` - -**Step 1: Check which non-target problems also have delegation methods** - -The 5 problems above are not the only ones with these methods. Other graph problems (MaxCut, SpinGlass, KColoring, MaximumMatching, TravelingSalesman, MaximumSetPacking, MinimumSetCovering) may also have delegation methods. These are OUT OF SCOPE for this PR — only update call sites that break because they called methods on the 5 target problem types. - -**Step 2: Fix any remaining compilation errors** - -Run: `cargo check --all-features 2>&1` -Fix any remaining call sites that the compiler identifies. - -**Step 3: Run full test suite** - -Run: `cargo test --all-features -- --include-ignored` -Expected: All tests pass - -**Step 4: Run clippy** - -Run: `cargo clippy --all-features -- -D warnings` -Expected: No warnings - -**Step 5: Commit** - -```bash -git commit -m "refactor: update remaining call sites for trimmed graph problem APIs" -``` - ---- - -## Task 7: Extract `classify_problem_category` from `to_json()` - -**Files:** -- Modify: `src/rules/graph.rs` (extract function, update to_json) -- Test: `src/unit_tests/rules/graph.rs` (add unit test) - -**Step 1: Write the failing test** - -In the test file for graph.rs, add: - -```rust -#[test] -fn test_classify_problem_category() { - assert_eq!( - classify_problem_category("problemreductions::models::graph::maximum_independent_set"), - "graph" - ); - assert_eq!( - classify_problem_category("problemreductions::models::sat::satisfiability"), - "sat" - ); - assert_eq!( - classify_problem_category("problemreductions::models::set::maximum_set_packing"), - "set" - ); - assert_eq!( - classify_problem_category("problemreductions::models::optimization::qubo"), - "optimization" - ); - assert_eq!( - classify_problem_category("unknown::path"), - "other" - ); -} -``` - -**Step 2: Run test to verify it fails** - -Run: `cargo test --all-features test_classify_problem_category` -Expected: FAIL — function not found - -**Step 3: Extract the function** - -In `src/rules/graph.rs`, extract the existing inline logic (around the `category_from_module_path` helper) into a standalone `pub(crate) fn classify_problem_category(module_path: &str) -> &str`. Replace the inline usage in `to_json()` with a call to this function. - -```rust -/// Classify a problem's category from its module path. -/// Expected format: "problemreductions::models::::" -pub(crate) fn classify_problem_category(module_path: &str) -> &str { - let parts: Vec<&str> = module_path.split("::").collect(); - if parts.len() >= 3 { - // Return the segment after "models" - if let Some(pos) = parts.iter().position(|&p| p == "models") { - if pos + 1 < parts.len() { - return parts[pos + 1]; - } - } - } - "other" -} -``` - -**Step 4: Run test to verify it passes** - -Run: `cargo test --all-features test_classify_problem_category` -Expected: PASS - -**Step 5: Commit** - -```bash -git commit -m "refactor: extract classify_problem_category from to_json()" -``` - ---- - -## Task 8: Extract `filter_redundant_base_nodes` from `to_json()` - -**Files:** -- Modify: `src/rules/graph.rs` -- Test: `src/unit_tests/rules/graph.rs` - -**Step 1: Write the failing test** - -```rust -#[test] -fn test_filter_redundant_base_nodes() { - use std::collections::{BTreeMap, HashSet}; - - let mut node_set: HashSet<(String, BTreeMap)> = HashSet::new(); - - // Base node (empty variant) — should be removed because variant-specific sibling exists - node_set.insert(("MIS".to_string(), BTreeMap::new())); - - // Variant-specific node - let mut variant = BTreeMap::new(); - variant.insert("graph".to_string(), "GridGraph".to_string()); - node_set.insert(("MIS".to_string(), variant)); - - // Base node with no siblings — should be kept - node_set.insert(("QUBO".to_string(), BTreeMap::new())); - - filter_redundant_base_nodes(&mut node_set); - - assert_eq!(node_set.len(), 2); - assert!(!node_set.iter().any(|(name, v)| name == "MIS" && v.is_empty())); - assert!(node_set.iter().any(|(name, _)| name == "QUBO")); -} -``` - -**Step 2: Run test to verify it fails** - -**Step 3: Extract the function** - -```rust -/// Remove base nodes (empty variant) when a variant-specific sibling exists. -pub(crate) fn filter_redundant_base_nodes( - node_set: &mut HashSet<(String, BTreeMap)>, -) { - let names_with_variants: HashSet = node_set - .iter() - .filter(|(_, variant)| !variant.is_empty()) - .map(|(name, _)| name.clone()) - .collect(); - node_set.retain(|(name, variant)| !variant.is_empty() || !names_with_variants.contains(name)); -} -``` - -Replace the inline logic in `to_json()` with `filter_redundant_base_nodes(&mut node_set);`. - -**Step 4: Run test, verify pass** - -**Step 5: Commit** - -```bash -git commit -m "refactor: extract filter_redundant_base_nodes from to_json()" -``` - ---- - -## Task 9: Extract `is_natural_edge` from `to_json()` - -**Files:** -- Modify: `src/rules/graph.rs` -- Test: `src/unit_tests/rules/graph.rs` - -**Step 1: Write the failing test** - -```rust -#[test] -fn test_is_natural_edge() { - use std::collections::BTreeMap; - - let graph = ReductionGraph::new(); - - // Same variant — no edge - let mut a = BTreeMap::new(); - a.insert("graph".to_string(), "SimpleGraph".to_string()); - let b = a.clone(); - assert!(is_natural_edge(&a, &b, &graph).is_none()); - - // a is subtype of b — edge from a to b - let mut sub = BTreeMap::new(); - sub.insert("graph".to_string(), "GridGraph".to_string()); - let mut sup = BTreeMap::new(); - sup.insert("graph".to_string(), "SimpleGraph".to_string()); - // Direction depends on hierarchy — GridGraph is subtype of SimpleGraph - let result = is_natural_edge(&sub, &sup, &graph); - assert!(result.is_some()); -} -``` - -Note: The exact test depends on how the natural edge determination works in the existing code. Read the inline logic at lines 917-950 of `src/rules/graph.rs` carefully before writing the extraction. - -**Step 2: Extract the function** - -Extract the inner loop body from lines 917-950 into: -```rust -/// Determine if there is a natural (subtype) edge between two variant nodes. -/// Returns Some(...) with edge data if a→b is a valid natural edge, None otherwise. -pub(crate) fn is_natural_edge( - variant_a: &BTreeMap, - variant_b: &BTreeMap, - graph: &ReductionGraph, -) -> Option { - // ... extracted logic -} -``` - -Replace the inline logic in `to_json()` with a call to this function. - -**Step 3: Run tests** - -Run: `cargo test --all-features -- --include-ignored` - -**Step 4: Commit** - -```bash -git commit -m "refactor: extract is_natural_edge from to_json()" -``` - ---- - -## Task 10: Implement BipartiteGraph - -**Files:** -- Create: `src/topology/bipartite_graph.rs` -- Modify: `src/topology/mod.rs` (add module + export) -- Modify: `src/graph_types.rs` (remove ZST BipartiteGraph + manual VariantParam impl) -- Test: `src/unit_tests/topology/bipartite_graph.rs` - -**Step 1: Write the failing test** - -Create `src/unit_tests/topology/bipartite_graph.rs`: - -```rust -use crate::topology::{BipartiteGraph, Graph}; - -#[test] -fn test_bipartite_graph_basic() { - // K_{2,3}: left={0,1}, right={0,1,2}, all edges - let edges = vec![(0, 0), (0, 1), (0, 2), (1, 0), (1, 1), (1, 2)]; - let g = BipartiteGraph::new(2, 3, edges); - - assert_eq!(g.num_vertices(), 5); - assert_eq!(g.num_edges(), 6); - assert_eq!(g.left_size(), 2); - assert_eq!(g.right_size(), 3); -} - -#[test] -fn test_bipartite_graph_edges_unified() { - // Left={0}, Right={0,1}, edges: (0,0), (0,1) - let g = BipartiteGraph::new(1, 2, vec![(0, 0), (0, 1)]); - let edges = g.edges(); - // Unified: left vertex 0, right vertices 1 and 2 - assert!(edges.contains(&(0, 1))); - assert!(edges.contains(&(0, 2))); - assert_eq!(edges.len(), 2); -} - -#[test] -fn test_bipartite_graph_has_edge() { - let g = BipartiteGraph::new(2, 2, vec![(0, 0), (1, 1)]); - // Unified: edges (0, 2) and (1, 3) - assert!(g.has_edge(0, 2)); - assert!(g.has_edge(1, 3)); - assert!(!g.has_edge(0, 1)); // both left — no edge - assert!(!g.has_edge(0, 3)); // not in edge list -} - -#[test] -fn test_bipartite_graph_neighbors() { - let g = BipartiteGraph::new(2, 2, vec![(0, 0), (0, 1), (1, 1)]); - // Unified: (0,2), (0,3), (1,3) - let mut n0 = g.neighbors(0); - n0.sort(); - assert_eq!(n0, vec![2, 3]); - - let mut n3 = g.neighbors(3); // right vertex 1 - n3.sort(); - assert_eq!(n3, vec![0, 1]); -} - -#[test] -fn test_bipartite_graph_left_edges() { - let edges = vec![(0, 0), (1, 1)]; - let g = BipartiteGraph::new(2, 2, edges.clone()); - assert_eq!(g.left_edges(), &edges); -} - -#[test] -#[should_panic] -fn test_bipartite_graph_invalid_left_index() { - BipartiteGraph::new(2, 2, vec![(2, 0)]); // left index out of bounds -} - -#[test] -#[should_panic] -fn test_bipartite_graph_invalid_right_index() { - BipartiteGraph::new(2, 2, vec![(0, 2)]); // right index out of bounds -} -``` - -Wire up test module: add `#[path]` reference in the appropriate unit test module file. - -**Step 2: Run tests to verify they fail** - -Run: `cargo test --all-features test_bipartite_graph` -Expected: FAIL — module not found - -**Step 3: Implement BipartiteGraph** - -Create `src/topology/bipartite_graph.rs`: - -```rust -use serde::{Deserialize, Serialize}; -use super::graph::{Graph, SimpleGraph}; - -/// Bipartite graph with explicit left/right partitions. -/// -/// Vertices are split into left (indices `0..left_size`) and right (`0..right_size`). -/// Edges connect left vertices to right vertices using bipartite-local coordinates. -/// The `Graph` trait maps to a unified vertex space where right vertices are offset by `left_size`. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -pub struct BipartiteGraph { - left_size: usize, - right_size: usize, - /// Edges in bipartite-local coordinates: (u, v) with u < left_size, v < right_size. - edges: Vec<(usize, usize)>, -} - -impl BipartiteGraph { - /// Create a new bipartite graph. - /// - /// # Arguments - /// * `left_size` - Number of vertices in the left partition - /// * `right_size` - Number of vertices in the right partition - /// * `edges` - Edges in bipartite-local coordinates: (u, v) with u < left_size, v < right_size - /// - /// # Panics - /// Panics if any edge index is out of bounds. - pub fn new(left_size: usize, right_size: usize, edges: Vec<(usize, usize)>) -> Self { - for &(u, v) in &edges { - assert!( - u < left_size, - "left vertex {} out of bounds (left_size={})", - u, left_size - ); - assert!( - v < right_size, - "right vertex {} out of bounds (right_size={})", - v, right_size - ); - } - Self { left_size, right_size, edges } - } - - /// Number of left-partition vertices. - pub fn left_size(&self) -> usize { - self.left_size - } - - /// Number of right-partition vertices. - pub fn right_size(&self) -> usize { - self.right_size - } - - /// Edges in bipartite-local coordinates. - pub fn left_edges(&self) -> &[(usize, usize)] { - &self.edges - } -} - -impl Graph for BipartiteGraph { - const NAME: &'static str = "BipartiteGraph"; - - fn num_vertices(&self) -> usize { - self.left_size + self.right_size - } - - fn num_edges(&self) -> usize { - self.edges.len() - } - - fn edges(&self) -> Vec<(usize, usize)> { - self.edges - .iter() - .map(|&(u, v)| { - let a = u; - let b = self.left_size + v; - if a < b { (a, b) } else { (b, a) } - }) - .collect() - } - - fn has_edge(&self, u: usize, v: usize) -> bool { - let (u, v) = if u < v { (u, v) } else { (v, u) }; - // u must be left, v must be right (in unified space) - if u >= self.left_size || v < self.left_size { - return false; - } - let local_v = v - self.left_size; - self.edges.contains(&(u, local_v)) - } - - fn neighbors(&self, v: usize) -> Vec { - if v < self.left_size { - // Left vertex: find all right neighbors - self.edges - .iter() - .filter(|(u, _)| *u == v) - .map(|(_, rv)| self.left_size + rv) - .collect() - } else { - // Right vertex: find all left neighbors - let local_v = v - self.left_size; - self.edges - .iter() - .filter(|(_, rv)| *rv == local_v) - .map(|(u, _)| *u) - .collect() - } - } -} -``` - -**Step 4: Register with variant system** - -Add at the bottom of `src/topology/bipartite_graph.rs`: - -```rust -use crate::impl_variant_param; -impl_variant_param!(BipartiteGraph, "graph", parent: SimpleGraph, - cast: |g| SimpleGraph::new(g.num_vertices(), g.edges())); -``` - -**Step 5: Wire up module** - -In `src/topology/mod.rs`, add: -```rust -mod bipartite_graph; -pub use bipartite_graph::BipartiteGraph; -``` - -Remove the `BipartiteGraph` ZST and its manual `VariantParam` impl from `src/graph_types.rs` (lines 31-46). - -**Step 6: Run tests** - -Run: `cargo test --all-features test_bipartite_graph` -Expected: All pass - -**Step 7: Commit** - -```bash -git commit -m "feat: implement BipartiteGraph with standard bipartite representation" -``` - ---- - -## Task 11: Implement PlanarGraph - -**Files:** -- Create: `src/topology/planar_graph.rs` -- Modify: `src/topology/mod.rs` (add module + export) -- Modify: `src/graph_types.rs` (remove ZST PlanarGraph + manual VariantParam impl) -- Test: `src/unit_tests/topology/planar_graph.rs` - -**Step 1: Write the failing test** - -```rust -use crate::topology::{PlanarGraph, Graph}; - -#[test] -fn test_planar_graph_basic() { - // K4 is planar: 4 vertices, 6 edges, 6 <= 3*4 - 6 = 6 - let edges = vec![(0,1),(0,2),(0,3),(1,2),(1,3),(2,3)]; - let g = PlanarGraph::new(4, edges); - assert_eq!(g.num_vertices(), 4); - assert_eq!(g.num_edges(), 6); -} - -#[test] -fn test_planar_graph_delegates_to_inner() { - let g = PlanarGraph::new(3, vec![(0,1),(1,2)]); - assert!(g.has_edge(0, 1)); - assert!(!g.has_edge(0, 2)); - let mut n1 = g.neighbors(1); - n1.sort(); - assert_eq!(n1, vec![0, 2]); -} - -#[test] -#[should_panic] -fn test_planar_graph_rejects_k5() { - // K5 has 10 edges, but 3*5 - 6 = 9. Fails necessary condition. - let mut edges = Vec::new(); - for i in 0..5 { - for j in (i+1)..5 { - edges.push((i, j)); - } - } - PlanarGraph::new(5, edges); -} - -#[test] -fn test_planar_graph_empty() { - let g = PlanarGraph::new(3, vec![]); - assert_eq!(g.num_vertices(), 3); - assert_eq!(g.num_edges(), 0); -} - -#[test] -fn test_planar_graph_tree() { - // Trees are always planar - let g = PlanarGraph::new(4, vec![(0,1),(1,2),(2,3)]); - assert_eq!(g.num_edges(), 3); -} -``` - -**Step 2: Implement PlanarGraph** - -Create `src/topology/planar_graph.rs`: - -```rust -use serde::{Deserialize, Serialize}; -use super::graph::{Graph, SimpleGraph}; - -/// Planar graph — validated wrapper around SimpleGraph. -/// -/// Construction validates the necessary planarity condition: |E| <= 3|V| - 6 for |V| >= 3. -/// This is a necessary but not sufficient condition. A follow-up issue will add -/// full planarity testing and half-edge (DCEL) representation. -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct PlanarGraph { - inner: SimpleGraph, -} - -impl PlanarGraph { - /// Create a new planar graph. - /// - /// # Panics - /// Panics if the graph violates the necessary planarity condition |E| <= 3|V| - 6. - pub fn new(num_vertices: usize, edges: Vec<(usize, usize)>) -> Self { - let inner = SimpleGraph::new(num_vertices, edges); - if num_vertices >= 3 { - let max_edges = 3 * num_vertices - 6; - assert!( - inner.num_edges() <= max_edges, - "graph has {} edges but a planar graph on {} vertices can have at most {} edges", - inner.num_edges(), num_vertices, max_edges - ); - } - Self { inner } - } - - /// Get a reference to the underlying SimpleGraph. - pub fn inner(&self) -> &SimpleGraph { - &self.inner - } -} - -impl Graph for PlanarGraph { - const NAME: &'static str = "PlanarGraph"; - - fn num_vertices(&self) -> usize { self.inner.num_vertices() } - fn num_edges(&self) -> usize { self.inner.num_edges() } - fn edges(&self) -> Vec<(usize, usize)> { self.inner.edges() } - fn has_edge(&self, u: usize, v: usize) -> bool { self.inner.has_edge(u, v) } - fn neighbors(&self, v: usize) -> Vec { self.inner.neighbors(v) } -} - -use crate::impl_variant_param; -impl_variant_param!(PlanarGraph, "graph", parent: SimpleGraph, - cast: |g| g.inner.clone()); -``` - -**Step 3: Wire up module and remove ZST** - -In `src/topology/mod.rs`: -```rust -mod planar_graph; -pub use planar_graph::PlanarGraph; -``` - -Remove PlanarGraph ZST and manual VariantParam impl from `src/graph_types.rs` (lines 10-25). - -**Step 4: Run tests** - -Run: `cargo test --all-features -- --include-ignored` - -**Step 5: Commit** - -```bash -git commit -m "feat: implement PlanarGraph as validated SimpleGraph wrapper" -``` - ---- - -## Task 12: Final verification and cleanup - -**Step 1: Run full test suite** - -Run: `cargo test --all-features -- --include-ignored` -Expected: All tests pass - -**Step 2: Run clippy** - -Run: `cargo clippy --all-features -- -D warnings` -Expected: No warnings - -**Step 3: Run format check** - -Run: `cargo fmt -- --check` -Expected: No formatting issues - -**Step 4: File follow-up issue** - -Create a GitHub issue for full data structure implementations: -- PlanarGraph: half-edge (DCEL) data structure -- BipartiteGraph: bipartite-specific algorithms - -**Step 5: Final commit if any cleanup needed** - -```bash -git commit -m "chore: final cleanup for issue #70 refactoring" -``` From 913521829221778b80cf00b4f31d558c6e45cc78 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 15 Feb 2026 23:07:41 +0800 Subject: [PATCH 15/15] fix: use correct module path in test_classify_problem_category The test used "models::sat" but the actual module is "models::satisfiability". Co-Authored-By: Claude Opus 4.6 --- src/unit_tests/rules/graph.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/unit_tests/rules/graph.rs b/src/unit_tests/rules/graph.rs index 0e51f0f96..1819b9af8 100644 --- a/src/unit_tests/rules/graph.rs +++ b/src/unit_tests/rules/graph.rs @@ -1193,8 +1193,8 @@ fn test_classify_problem_category() { "graph" ); assert_eq!( - classify_problem_category("problemreductions::models::sat::satisfiability"), - "sat" + classify_problem_category("problemreductions::models::satisfiability::satisfiability"), + "satisfiability" ); assert_eq!( classify_problem_category("problemreductions::models::set::maximum_set_packing"),