From 1f7890c54975edcb8e03aff0724b3fb3dbef2d63 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Tue, 10 Feb 2026 16:24:09 +0800 Subject: [PATCH 01/14] docs: replace Rust code with JSON schema tables in reductions.typ MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 1. Replace render-struct (Rust code blocks) with render-schema (JSON field tables from problem_schemas.json) — more useful for interop users. 2. Classify reduction references as "Reduces to" / "Reduces from" instead of flat "Implemented reductions" lists. 3. Remove all Rust code blocks and "See ... .rs" links from the paper. Co-Authored-By: Claude Opus 4.6 --- docs/paper/reductions.typ | 328 ++++++-------------------------------- 1 file changed, 53 insertions(+), 275 deletions(-) diff --git a/docs/paper/reductions.typ b/docs/paper/reductions.typ index b4b4b794e..d682246bb 100644 --- a/docs/paper/reductions.typ +++ b/docs/paper/reductions.typ @@ -23,13 +23,17 @@ #let problem-schemas = json("problem_schemas.json") -// Render a problem's Rust struct from the JSON schema -#let render-struct(name) = { +// Render a problem's JSON schema as a field table +#let render-schema(name) = { let schema = problem-schemas.find(s => s.name == name) if schema == none { return } - let s = schema - let fields = s.fields.map(f => " " + f.name + ": " + f.type_name + ",").join("\n") - raw("pub struct " + name + " {\n" + fields + "\n}", lang: "rust", block: true) + table( + columns: (auto, auto, 1fr), + inset: 4pt, + align: left, + table.header([*Field*], [*Type*], [*Description*]), + ..schema.fields.map(f => (raw(f.name), raw(f.type_name), f.description)).flatten() + ) } // Extract primary variable count from an instance dict. @@ -113,67 +117,60 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #definition("Independent Set (IS)")[ Given $G = (V, E)$ with vertex weights $w: V -> RR$, find $S subset.eq V$ maximizing $sum_(v in S) w(v)$ such that no two vertices in $S$ are adjacent: $forall u, v in S: (u, v) in.not E$. - _Implemented reductions:_ IS→VC (@thm:is-to-vc), IS→SetPacking (@thm:is-to-setpacking), IS→QUBO (@thm:is-to-qubo), IS→ILP (@thm:is-to-ilp), IS→GridGraph IS (@thm:is-to-gridgraph), VC→IS (@thm:is-to-vc), SAT→IS (@thm:sat-to-is). - - #render-struct("IndependentSet") + _Reduces to:_ VC (@thm:is-to-vc), SetPacking (@thm:is-to-setpacking), QUBO (@thm:is-to-qubo), ILP (@thm:is-to-ilp), GridGraph IS (@thm:is-to-gridgraph). \ + _Reduces from:_ VC (@thm:is-to-vc), SAT (@thm:sat-to-is). - Where `graph` represents $G = (V, E)$ with vertices indexed $0..n-1$, and `weights` stores vertex weights $w: V -> RR$ indexed by vertex ID. The solution is a subset $S subset.eq V$ represented as a `Vec` of vertex indices. + #render-schema("IndependentSet") ] #definition("Vertex Cover (VC)")[ Given $G = (V, E)$ with vertex weights $w: V -> RR$, find $S subset.eq V$ minimizing $sum_(v in S) w(v)$ such that every edge has at least one endpoint in $S$: $forall (u, v) in E: u in S or v in S$. - _Implemented reductions:_ VC→IS (@thm:is-to-vc), VC→SetCovering (@thm:vc-to-setcovering), VC→QUBO (@thm:vc-to-qubo), VC→ILP (@thm:vc-to-ilp), IS→VC (@thm:is-to-vc). + _Reduces to:_ IS (@thm:is-to-vc), SetCovering (@thm:vc-to-setcovering), QUBO (@thm:vc-to-qubo), ILP (@thm:vc-to-ilp). \ + _Reduces from:_ IS (@thm:is-to-vc). - #render-struct("VertexCovering") - - Where `graph` represents $G = (V, E)$ with vertices indexed $0..n-1$, and `weights` stores vertex weights $w: V -> RR$ indexed by vertex ID. The solution is a subset $S subset.eq V$ represented as a `Vec` of vertex indices. + #render-schema("VertexCovering") ] #definition("Max-Cut")[ Given $G = (V, E)$ with weights $w: E -> RR$, find partition $(S, overline(S))$ maximizing $sum_((u,v) in E: u in S, v in overline(S)) w(u, v)$. - _Implemented reductions:_ MaxCut→SpinGlass (@thm:spinglass-maxcut), SpinGlass→MaxCut (@thm:spinglass-maxcut). - - #render-struct("MaxCut") + _Reduces to:_ SpinGlass (@thm:spinglass-maxcut). \ + _Reduces from:_ SpinGlass (@thm:spinglass-maxcut). - Where `graph` represents $G = (V, E)$, and `edge_weights` stores weights $w: E -> RR$ indexed by edge index. The solution is a partition $(S, overline(S))$ represented as a binary assignment `Vec` where 0/1 indicates partition membership. + #render-schema("MaxCut") ] #definition("Graph Coloring")[ Given $G = (V, E)$ and $k$ colors, find $c: V -> {1, ..., k}$ minimizing $|{(u, v) in E : c(u) = c(v)}|$. - _Implemented reductions:_ Coloring→ILP (@thm:coloring-to-ilp), Coloring→QUBO (@thm:coloring-to-qubo), SAT→Coloring (@thm:sat-to-coloring). - - #render-struct("KColoring") + _Reduces to:_ ILP (@thm:coloring-to-ilp), QUBO (@thm:coloring-to-qubo). \ + _Reduces from:_ SAT (@thm:sat-to-coloring). - Where $k$ is a const generic parameter (not a struct field), and `graph` represents $G = (V, E)$ with vertices indexed $0..n-1$. The solution is a color assignment $c: V -> {0, ..., k-1}$ represented as a `Vec` indexed by vertex. + #render-schema("KColoring") ] #definition("Dominating Set")[ Given $G = (V, E)$ with weights $w: V -> RR$, find $S subset.eq V$ minimizing $sum_(v in S) w(v)$ s.t. $forall v in V: v in S or exists u in S: (u, v) in E$. - _Implemented reductions:_ DominatingSet→ILP (@thm:dominatingset-to-ilp), SAT→DominatingSet (@thm:sat-to-dominatingset). + _Reduces to:_ ILP (@thm:dominatingset-to-ilp). \ + _Reduces from:_ SAT (@thm:sat-to-dominatingset). - #render-struct("DominatingSet") - - Where `graph` represents $G = (V, E)$ with vertices indexed $0..n-1$, and `weights` stores vertex weights $w: V -> RR$ indexed by vertex ID. The solution is a subset $S subset.eq V$ represented as a `Vec` of vertex indices. + #render-schema("DominatingSet") ] #definition("Matching")[ Given $G = (V, E)$ with weights $w: E -> RR$, find $M subset.eq E$ maximizing $sum_(e in M) w(e)$ s.t. $forall e_1, e_2 in M: e_1 inter e_2 = emptyset$. - _Implemented reductions:_ Matching→SetPacking (@thm:matching-to-setpacking), Matching→ILP (@thm:matching-to-ilp). - - #render-struct("Matching") + _Reduces to:_ SetPacking (@thm:matching-to-setpacking), ILP (@thm:matching-to-ilp). - Where `graph` represents $G = (V, E)$ with vertices indexed $0..n-1$, and `edge_weights` stores weights $w: E -> RR$ indexed by edge index. The solution is a subset $M subset.eq E$ represented as a `Vec` of edge indices. + #render-schema("Matching") ] #definition("Clique")[ Given a graph $G = (V, E)$ and an integer $k$, the *Clique* problem asks whether there exists a subset $K subset.eq V$ of size at least $k$ such that every pair of distinct vertices in $K$ is adjacent, i.e., $(u, v) in E$ for all distinct $u, v in K$. - _Implemented reductions:_ Clique→ILP (@thm:clique-to-ilp). + _Reduces to:_ ILP (@thm:clique-to-ilp). ] #definition("Unit Disk Graph (Grid Graph)")[ @@ -185,21 +182,19 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #definition("Set Packing")[ Given universe $U$, collection $cal(S) = {S_1, ..., S_m}$ with $S_i subset.eq U$, weights $w: cal(S) -> RR$, find $cal(P) subset.eq cal(S)$ maximizing $sum_(S in cal(P)) w(S)$ s.t. $forall S_i, S_j in cal(P): S_i inter S_j = emptyset$. - _Implemented reductions:_ IS→SetPacking (@thm:is-to-setpacking), SetPacking→QUBO (@thm:setpacking-to-qubo), SetPacking→ILP (@thm:setpacking-to-ilp), Matching→SetPacking (@thm:matching-to-setpacking). + _Reduces to:_ QUBO (@thm:setpacking-to-qubo), ILP (@thm:setpacking-to-ilp). \ + _Reduces from:_ IS (@thm:is-to-setpacking), Matching (@thm:matching-to-setpacking). - #render-struct("SetPacking") - - Where `sets` represents the collection $cal(S) = {S_1, ..., S_m}$ where each `Vec` contains universe element indices, and `weights` stores set weights $w: cal(S) -> RR$ indexed by set index. The solution is a subset $cal(P) subset.eq cal(S)$ represented as a `Vec` of set indices. + #render-schema("SetPacking") ] #definition("Set Covering")[ Given universe $U$, collection $cal(S)$ with weights $w: cal(S) -> RR$, find $cal(C) subset.eq cal(S)$ minimizing $sum_(S in cal(C)) w(S)$ s.t. $union.big_(S in cal(C)) S = U$. - _Implemented reductions:_ SetCovering→ILP (@thm:setcovering-to-ilp), VC→SetCovering (@thm:vc-to-setcovering). - - #render-struct("SetCovering") + _Reduces to:_ ILP (@thm:setcovering-to-ilp). \ + _Reduces from:_ VC (@thm:vc-to-setcovering). - Where `universe_size` is $|U|$, `sets` represents the collection $cal(S)$ where each `Vec` contains universe element indices, and `weights` stores set weights $w: cal(S) -> RR$ indexed by set index. The solution is a subset $cal(C) subset.eq cal(S)$ represented as a `Vec` of set indices. + #render-schema("SetCovering") ] == Optimization Problems @@ -207,31 +202,28 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #definition("Spin Glass (Ising Model)")[ Given $n$ spin variables $s_i in {-1, +1}$, pairwise couplings $J_(i j) in RR$, and external fields $h_i in RR$, minimize the Hamiltonian (energy function): $H(bold(s)) = -sum_((i,j)) J_(i j) s_i s_j - sum_i h_i s_i$. - _Implemented reductions:_ SpinGlass→MaxCut (@thm:spinglass-maxcut), SpinGlass→QUBO (@thm:spinglass-qubo), Circuit→SpinGlass (@thm:circuit-to-spinglass), MaxCut→SpinGlass (@thm:spinglass-maxcut), QUBO→SpinGlass (@thm:spinglass-qubo). - - #render-struct("SpinGlass") + _Reduces to:_ MaxCut (@thm:spinglass-maxcut), QUBO (@thm:spinglass-qubo). \ + _Reduces from:_ CircuitSAT (@thm:circuit-to-spinglass), MaxCut (@thm:spinglass-maxcut), QUBO (@thm:spinglass-qubo). - Where `graph` encodes the interaction topology, `couplings` stores pairwise couplings $J_(i j)$ as `Vec` in `graph.edges()` order, and `fields` stores external fields $h_i$ as `Vec` indexed by spin. The solution is a spin assignment $bold(s) in {-1, +1}^n$ encoded as `Vec` where 0 maps to $s=-1$ and 1 maps to $s=+1$. + #render-schema("SpinGlass") ] #definition("QUBO")[ Given $n$ binary variables $x_i in {0, 1}$, upper-triangular matrix $Q in RR^(n times n)$, minimize $f(bold(x)) = sum_(i=1)^n Q_(i i) x_i + sum_(i < j) Q_(i j) x_i x_j$ (using $x_i^2 = x_i$ for binary variables). - _Implemented reductions:_ QUBO→SpinGlass (@thm:spinglass-qubo), IS→QUBO (@thm:is-to-qubo), VC→QUBO (@thm:vc-to-qubo), Coloring→QUBO (@thm:coloring-to-qubo), SetPacking→QUBO (@thm:setpacking-to-qubo), kSAT→QUBO (@thm:ksat-to-qubo), ILP→QUBO (@thm:ilp-to-qubo), SpinGlass→QUBO (@thm:spinglass-qubo). + _Reduces to:_ SpinGlass (@thm:spinglass-qubo). \ + _Reduces from:_ IS (@thm:is-to-qubo), VC (@thm:vc-to-qubo), KColoring (@thm:coloring-to-qubo), SetPacking (@thm:setpacking-to-qubo), $k$-SAT (@thm:ksat-to-qubo), ILP (@thm:ilp-to-qubo), SpinGlass (@thm:spinglass-qubo). - #render-struct("QUBO") - - Where `num_vars` is $n$, and `matrix` stores the upper-triangular $Q in RR^(n times n)$ as `Vec>` where `matrix[i][j]` ($i <= j$) stores $Q_(i j)$. The solution is a binary assignment $bold(x) in {0, 1}^n$ represented as `Vec`. + #render-schema("QUBO") ] #definition("Integer Linear Programming (ILP)")[ Given $n$ integer variables $bold(x) in ZZ^n$, constraint matrix $A in RR^(m times n)$, bounds $bold(b) in RR^m$, and objective $bold(c) in RR^n$, find $bold(x)$ minimizing $bold(c)^top bold(x)$ subject to $A bold(x) <= bold(b)$ and variable bounds. - _Implemented reductions:_ ILP→QUBO (@thm:ilp-to-qubo), Coloring→ILP (@thm:coloring-to-ilp), Factoring→ILP (@thm:factoring-to-ilp), IS→ILP (@thm:is-to-ilp), VC→ILP (@thm:vc-to-ilp), Matching→ILP (@thm:matching-to-ilp), SetPacking→ILP (@thm:setpacking-to-ilp), SetCovering→ILP (@thm:setcovering-to-ilp), DominatingSet→ILP (@thm:dominatingset-to-ilp), Clique→ILP (@thm:clique-to-ilp). - - #render-struct("ILP") + _Reduces to:_ QUBO (@thm:ilp-to-qubo). \ + _Reduces from:_ KColoring (@thm:coloring-to-ilp), Factoring (@thm:factoring-to-ilp), IS (@thm:is-to-ilp), VC (@thm:vc-to-ilp), Matching (@thm:matching-to-ilp), SetPacking (@thm:setpacking-to-ilp), SetCovering (@thm:setcovering-to-ilp), DominatingSet (@thm:dominatingset-to-ilp), Clique (@thm:clique-to-ilp). - Where `num_vars` is $n$, `bounds` stores per-variable bounds $x_i in [l_i, u_i]$ as `Vec`, `constraints` encodes $A bold(x) <= bold(b)$ as `Vec`, `objective` is the sparse objective $bold(c)$ as `Vec<(usize, f64)>`, and `sense` specifies maximize or minimize. The solution is $bold(x) in ZZ^n$ represented as `Vec`. + #render-schema("ILP") ] == Satisfiability Problems @@ -239,41 +231,36 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #definition("SAT")[ Given a CNF formula $phi = and.big_(j=1)^m C_j$ with $m$ clauses over $n$ Boolean variables, where each clause $C_j = or.big_i ell_(j i)$ is a disjunction of literals, find an assignment $bold(x) in {0, 1}^n$ such that $phi(bold(x)) = 1$ (all clauses satisfied). - _Implemented reductions:_ SAT→IS (@thm:sat-to-is), SAT→Coloring (@thm:sat-to-coloring), SAT→DominatingSet (@thm:sat-to-dominatingset), SAT→kSAT (@thm:sat-ksat), kSAT→SAT (@thm:sat-ksat). + _Reduces to:_ IS (@thm:sat-to-is), KColoring (@thm:sat-to-coloring), DominatingSet (@thm:sat-to-dominatingset), $k$-SAT (@thm:sat-ksat). \ + _Reduces from:_ $k$-SAT (@thm:sat-ksat). - #render-struct("Satisfiability") - - Where `num_vars` is $n$, `clauses` stores CNF clauses $C_j$ as `Vec`, and `weights` stores clause weights for MAX-SAT as `Vec`. Each `CNFClause` has `literals: Vec` where $+i$ denotes $x_i$ and $-i$ denotes $not x_i$ (1-indexed). The solution is an assignment $bold(x) in {0, 1}^n$ represented as `Vec`. + #render-schema("Satisfiability") ] #definition([$k$-SAT])[ SAT with exactly $k$ literals per clause. - _Implemented reductions:_ kSAT→SAT (@thm:sat-ksat), kSAT→QUBO (@thm:ksat-to-qubo), SAT→kSAT (@thm:sat-ksat). - - #render-struct("KSatisfiability") + _Reduces to:_ SAT (@thm:sat-ksat), QUBO (@thm:ksat-to-qubo). \ + _Reduces from:_ SAT (@thm:sat-ksat). - Where `num_vars` is $n$, `clauses` stores clauses with exactly $k$ literals per clause as `Vec`, and `weights` stores clause weights as `Vec`. The solution is an assignment $bold(x) in {0, 1}^n$ represented as `Vec`. + #render-schema("KSatisfiability") ] #definition("Circuit-SAT")[ Given a Boolean circuit $C$ composed of logic gates (AND, OR, NOT, XOR) with $n$ input variables, find an input assignment $bold(x) in {0,1}^n$ such that $C(bold(x)) = 1$. - _Implemented reductions:_ Circuit→SpinGlass (@thm:circuit-to-spinglass), Factoring→Circuit (@thm:factoring-to-circuit). + _Reduces to:_ SpinGlass (@thm:circuit-to-spinglass). \ + _Reduces from:_ Factoring (@thm:factoring-to-circuit). - #render-struct("CircuitSAT") - - Where `circuit` is the Boolean circuit of logic gates (AND, OR, NOT, XOR), `variables` stores input variable names as `Vec`, and `weights` stores assignment weights as `Vec`. The solution is an input assignment $bold(x) in {0,1}^n$ represented as `Vec`. + #render-schema("CircuitSAT") ] #definition("Factoring")[ Given a composite integer $N$ and bit sizes $m, n$, find integers $p in [2, 2^m - 1]$ and $q in [2, 2^n - 1]$ such that $p times q = N$. Here $p$ has $m$ bits and $q$ has $n$ bits. - _Implemented reductions:_ Factoring→Circuit (@thm:factoring-to-circuit), Factoring→ILP (@thm:factoring-to-ilp). - - #render-struct("Factoring") + _Reduces to:_ CircuitSAT (@thm:factoring-to-circuit), ILP (@thm:factoring-to-ilp). - Where `m` is the number of bits for the first factor $p$, `n` is the number of bits for the second factor $q$, and `target` is the composite $N$ to factor. The solution is bit assignments for $p$ and $q$ represented as `Vec`. + #render-schema("Factoring") ] = Reductions @@ -288,20 +275,6 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| ($arrow.r.double$) If $S$ is independent, for any $(u, v) in E$, at most one endpoint lies in $S$, so $V backslash S$ covers all edges. ($arrow.l.double$) If $C$ is a cover, for any $u, v in V backslash C$, $(u, v) in.not E$, so $V backslash C$ is independent. _Variable mapping:_ Given IS instance $(G, w)$, create VC instance $(G, w)$ with identical graph and weights. Solution extraction: for VC solution $C$, return $S = V backslash C$. The complement operation preserves optimality since $|S| + |C| = |V|$ is constant. ] -```rust -// Minimal example: IS -> VC -> extract solution -let is_problem = IndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); -let result = ReduceTo::>::reduce_to(&is_problem); -let vc_problem = result.target_problem(); - -let solver = BruteForce::new(); -let vc_solutions = solver.find_best(vc_problem); -let is_solution = result.extract_solution(&vc_solutions[0]); -assert!(is_problem.solution_size(&is_solution).is_valid); -``` - -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_is_to_vc.rs")[`reduction_is_to_vc.rs`]. - #let is_vc = load-example("is_to_vc") #let is_vc_r = load-results("is_to_vc") #let is_vc_sol = is_vc_r.solutions.at(0) @@ -319,20 +292,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples Independence implies disjoint incident edge sets; conversely, disjoint edge sets imply no shared edges. _Variable mapping:_ Universe $U = E$ (edges), sets $S_v = {e in E : v in e}$ (edges incident to vertex $v$), weights $w(S_v) = w(v)$. Solution extraction: for packing ${S_v : v in P}$, return IS $= P$ (the vertices whose sets were packed). ] -```rust -// Minimal example: IS -> SetPacking -> extract solution -let is_problem = IndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); -let result = ReduceTo::>::reduce_to(&is_problem); -let sp_problem = result.target_problem(); - -let solver = BruteForce::new(); -let sp_solutions = solver.find_best(sp_problem); -let is_solution = result.extract_solution(&sp_solutions[0]); -assert!(is_problem.solution_size(&is_solution).is_valid); -``` - -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_is_to_setpacking.rs")[`reduction_is_to_setpacking.rs`]. - #theorem[ *(VC $arrow.r$ Set Covering)* Construct $U = {0, ..., |E|-1}$, $S_v = {i : e_i "incident to" v}$, $w(S_v) = w(v)$. Then $C$ is a cover iff ${S_v : v in C}$ covers $U$. [_Problems:_ @def:vertex-cover, @def:set-covering.] ] @@ -341,8 +300,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples Each vertex's edge set becomes a subset; the cover condition (every edge covered) maps to the covering condition (every universe element in some selected set). _Variable mapping:_ Universe $U = {0, ..., |E|-1}$ (edge indices), $S_v = {i : e_i "incident to" v}$, $w(S_v) = w(v)$. Solution extraction: for covering ${S_v : v in C}$, return VC $= C$. ] -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_vc_to_setcovering.rs")[`reduction_vc_to_setcovering.rs`]. - #theorem[ *(Matching $arrow.r$ Set Packing)* Construct $U = V$, $S_e = {u, v}$ for $e = (u,v)$, $w(S_e) = w(e)$. Then $M$ is a matching iff ${S_e : e in M}$ is a packing. [_Problems:_ @def:matching, @def:set-packing.] ] @@ -351,8 +308,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples Each edge becomes a set of its endpoints; disjoint edges have disjoint endpoint sets. _Variable mapping:_ Universe $U = V$ (vertices), $S_e = {u, v}$ for $e = (u,v)$, $w(S_e) = w(e)$. Solution extraction: for packing ${S_e : e in P}$, return matching $= P$ (the edges whose endpoint sets were packed). ] -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_matching_to_setpacking.rs")[`reduction_matching_to_setpacking.rs`]. - #theorem[ *(Spin Glass $arrow.l.r$ QUBO)* The substitution $s_i = 2x_i - 1$ yields $H_"SG"(bold(s)) = H_"QUBO"(bold(x)) + "const"$. [_Problems:_ @def:spin-glass, @def:qubo.] ] @@ -361,20 +316,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples Expanding $-sum_(i,j) J_(i j) (2x_i - 1)(2x_j - 1) - sum_i h_i (2x_i - 1)$ gives $Q_(i j) = -4J_(i j)$, $Q_(i i) = 2sum_j J_(i j) - 2h_i$. _Variable mapping:_ Spin $s_i in {-1, +1}$ maps to binary $x_i in {0, 1}$ via $s_i = 2x_i - 1$. Solution extraction: for QUBO solution $bold(x)$, return spins $s_i = 2x_i - 1$. The reverse maps $x_i = (s_i + 1)/2$. ] -```rust -// Minimal example: SpinGlass -> QUBO -> extract solution -let sg = SpinGlass::new(2, vec![((0, 1), -1.0)], vec![0.5, -0.5]); -let result = ReduceTo::::reduce_to(&sg); -let qubo = result.target_problem(); - -let solver = BruteForce::new(); -let qubo_solutions = solver.find_best(qubo); -let sg_solution = result.extract_solution(&qubo_solutions[0]); -assert_eq!(sg_solution.len(), 2); -``` - -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_spinglass_to_qubo.rs")[`reduction_spinglass_to_qubo.rs`]. - #let sg_qubo = load-example("spinglass_to_qubo") #reduction-example(sg_qubo, caption: [2-spin system with coupling $J_(01) = -1$, fields $h = (0.5, -0.5)$])[] @@ -396,20 +337,6 @@ where $P$ is a penalty weight large enough that any constraint violation costs m _Correctness._ If $bold(x)$ has any adjacent pair $(x_i = 1, x_j = 1)$ with $(i,j) in E$, the penalty $P > sum_i w_i >= -sum_i Q_(i i) x_i$ exceeds the maximum objective gain, so $bold(x)$ is not a minimizer. Among independent sets ($x_i x_j = 0$ for all edges), $f(bold(x)) = -sum_(i in S) w_i$, minimized exactly when $S$ is a maximum-weight IS. ] -```rust -// Minimal example: IS -> QUBO -> extract solution -let is = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); -let result = ReduceTo::::reduce_to(&is); -let qubo = result.target_problem(); - -let solver = BruteForce::new(); -let solutions = solver.find_best(qubo); -let is_solution = result.extract_solution(&solutions[0]); -assert!(is.solution_size(&is_solution).is_valid); -``` - -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_is_to_qubo.rs")[`reduction_is_to_qubo.rs`]. - #let is_qubo = load-example("is_to_qubo") #let is_qubo_r = load-results("is_to_qubo") #block( @@ -444,20 +371,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples Summing over all edges, each vertex $i$ appears in $"deg"(i)$ terms. The QUBO coefficients are: diagonal $Q_(i i) = w_i - P dot "deg"(i)$ (objective plus linear penalty), off-diagonal $Q_(i j) = P$ for edges. The constant $P |E|$ does not affect the minimizer. ] -```rust -// Minimal example: VC -> QUBO -> extract solution -let vc = VertexCovering::::new(4, vec![(0, 1), (1, 2), (2, 3), (0, 3)]); -let result = ReduceTo::::reduce_to(&vc); -let qubo = result.target_problem(); - -let solver = BruteForce::new(); -let solutions = solver.find_best(qubo); -let vc_solution = result.extract_solution(&solutions[0]); -assert!(vc.solution_size(&vc_solution).is_valid); -``` - -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_vc_to_qubo.rs")[`reduction_vc_to_qubo.rs`]. - #theorem[ *(KColoring $arrow.r$ QUBO)* Given $G = (V, E)$ with $k$ colors, construct upper-triangular $Q in RR^(n k times n k)$ using one-hot encoding $x_(v,c) in {0,1}$ ($n k$ variables indexed by $v dot k + c$). [_Problems:_ @def:coloring, @def:qubo.] ] @@ -477,20 +390,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples _Solution extraction._ For each vertex $v$, find $c$ with $x_(v,c) = 1$. ] -```rust -// Minimal example: KColoring -> QUBO -> extract solution -let kc = KColoring::<3, SimpleGraph, i32>::new(3, vec![(0, 1), (1, 2), (0, 2)]); -let result = ReduceTo::::reduce_to(&kc); -let qubo = result.target_problem(); - -let solver = BruteForce::new(); -let solutions = solver.find_best(qubo); -let kc_solution = result.extract_solution(&solutions[0]); -assert_eq!(solutions.len(), 6); // 3! valid 3-colorings of K3 -``` - -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_coloring_to_qubo.rs")[`reduction_coloring_to_qubo.rs`]. - #theorem[ *(SetPacking $arrow.r$ QUBO)* Equivalent to IS on the intersection graph: $Q_(i i) = -w_i$ and $Q_(i j) = P$ for overlapping sets $i, j$ ($i < j$), where $P = 1 + sum_i w_i$. [_Problems:_ @def:set-packing, @def:qubo.] ] @@ -499,20 +398,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples Two sets conflict iff they share an element. The intersection graph has sets as vertices and edges between conflicting pairs. Applying the penalty method (@sec:penalty-method) yields the same QUBO as IS on this graph: diagonal rewards selection, off-diagonal penalizes overlap. Correctness follows from the IS→QUBO proof. ] -```rust -// Minimal example: SetPacking -> QUBO -> extract solution -let sp = SetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![2, 3, 4]]); -let result = ReduceTo::::reduce_to(&sp); -let qubo = result.target_problem(); - -let solver = BruteForce::new(); -let solutions = solver.find_best(qubo); -let sp_solution = result.extract_solution(&solutions[0]); -assert!(sp.solution_size(&sp_solution).is_valid); -``` - -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_setpacking_to_qubo.rs")[`reduction_setpacking_to_qubo.rs`]. - #theorem[ *(2-SAT $arrow.r$ QUBO)* Given a Max-2-SAT instance with $m$ clauses over $n$ variables, construct upper-triangular $Q in RR^(n times n)$ where each clause $(ell_i or ell_j)$ contributes a penalty gadget encoding its unique falsifying assignment. [_Problems:_ @def:k-sat, @def:qubo.] ] @@ -534,24 +419,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples Summing over all clauses, $f(bold(x)) = sum_j "penalty"_j (bold(x))$ counts falsified clauses. Minimizers of $f$ maximize satisfied clauses. ] -```rust -// Minimal example: 2-SAT -> QUBO -> extract solution -let ksat = KSatisfiability::<2, i32>::new(3, vec![ - CNFClause::new(vec![1, 2]), // x1 OR x2 - CNFClause::new(vec![-1, 3]), // NOT x1 OR x3 - CNFClause::new(vec![2, -3]), // x2 OR NOT x3 -]); -let result = ReduceTo::::reduce_to(&ksat); -let qubo = result.target_problem(); - -let solver = BruteForce::new(); -let solutions = solver.find_best(qubo); -let sat_solution = result.extract_solution(&solutions[0]); -assert!(ksat.solution_size(&sat_solution).is_valid); -``` - -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_ksatisfiability_to_qubo.rs")[`reduction_ksatisfiability_to_qubo.rs`]. - #theorem[ *(Binary ILP $arrow.r$ QUBO)* Given binary ILP: maximize $bold(c)^top bold(x)$ subject to $A bold(x) = bold(b)$, $bold(x) in {0,1}^n$, construct upper-triangular $Q = -"diag"(bold(c) + 2P bold(b)^top A) + P A^top A$ where $P = 1 + ||bold(c)||_1 + ||bold(b)||_1$. [_Problems:_ @def:ilp, @def:qubo.] ] @@ -570,27 +437,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples _Solution extraction._ Discard slack variables: return $bold(x)' [0..n]$. ] -```rust -// Minimal example: binary ILP -> QUBO -> extract solution -let ilp = ILP::binary(3, - vec![ - LinearConstraint::le(vec![(0, 1.0), (1, 1.0)], 1.0), - LinearConstraint::le(vec![(1, 1.0), (2, 1.0)], 1.0), - ], - vec![(0, 1.0), (1, 2.0), (2, 3.0)], - ObjectiveSense::Maximize, -); -let result = ReduceTo::>::reduce_to(&ilp); -let qubo = result.target_problem(); - -let solver = BruteForce::new(); -let solutions = solver.find_best(qubo); -let ilp_solution = result.extract_solution(&solutions[0]); -assert_eq!(ilp_solution, vec![1, 0, 1]); // obj = 4 -``` - -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_ilp_to_qubo.rs")[`reduction_ilp_to_qubo.rs`]. - == Non-Trivial Reductions #theorem[ @@ -609,8 +455,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples _Solution extraction._ For $v_(j,i) in S$ with literal $x_k$: set $x_k = 1$; for $overline(x_k)$: set $x_k = 0$. ] -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_sat_to_is.rs")[`reduction_sat_to_is.rs`]. - #let sat_is = load-example("sat_to_is") #let sat_is_r = load-results("sat_to_is") #let sat_is_sol = sat_is_r.solutions.at(0) @@ -631,8 +475,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples _Solution extraction._ Set $x_i = 1$ iff $"color"("pos"_i) = "color"("TRUE")$. ] -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_sat_to_coloring.rs")[`reduction_sat_to_coloring.rs`]. - #theorem[ *(SAT $arrow.r$ Dominating Set)* @garey1979 Given CNF $phi$ with $n$ variables and $m$ clauses, $phi$ is satisfiable iff the constructed graph has a dominating set of size $n$. [_Problems:_ @def:satisfiability, @def:dominating-set.] ] @@ -645,8 +487,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples _Solution extraction._ Set $x_i = 1$ if $"pos"_i$ selected; $x_i = 0$ if $"neg"_i$ selected. ] -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_sat_to_dominatingset.rs")[`reduction_sat_to_dominatingset.rs`]. - #theorem[ *(SAT $arrow.l.r$ $k$-SAT)* @cook1971 @garey1979 Any SAT formula converts to $k$-SAT ($k >= 3$) preserving satisfiability. [_Problems:_ @def:satisfiability, @def:k-sat.] ] @@ -660,8 +500,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples _Correctness._ Original clause true $arrow.l.r$ auxiliary chain can propagate truth through new clauses. ] -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_sat_to_ksat.rs")[`reduction_sat_to_ksat.rs`]. - #theorem[ *(CircuitSAT $arrow.r$ Spin Glass)* @whitfield2012 @lucas2014 Each gate maps to a gadget whose ground states encode valid I/O. [_Problems:_ @def:circuit-sat, @def:spin-glass.] ] @@ -686,8 +524,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples caption: [Ising gadgets for logic gates. Ground states match truth tables.] ) -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_circuit_to_spinglass.rs")[`reduction_circuit_to_spinglass.rs`]. - #theorem[ *(Factoring $arrow.r$ Circuit-SAT)* An array multiplier with output constrained to $N$ is satisfiable iff $N$ factors within bit bounds. _(Folklore; no canonical reference.)_ [_Problems:_ @def:factoring, @def:circuit-sat.] ] @@ -704,8 +540,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples _Solution extraction._ $p = sum_i p_i 2^(i-1)$, $q = sum_j q_j 2^(j-1)$. ] -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_factoring_to_circuit.rs")[`reduction_factoring_to_circuit.rs`]. - #theorem[ *(Spin Glass $arrow.l.r$ Max-Cut)* @barahona1982 @lucas2014 Ground states of Ising models correspond to maximum cuts. [_Problems:_ @def:spin-glass, @def:max-cut.] ] @@ -718,20 +552,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples _Solution extraction._ Without ancilla: identity. With ancilla: if $sigma_a = 1$, flip all spins before removing ancilla. ] -```rust -// Minimal example: SpinGlass -> MaxCut -> extract solution -let sg = SpinGlass::new(3, vec![((0, 1), 1), ((1, 2), 1), ((0, 2), 1)], vec![0, 0, 0]); -let result = ReduceTo::>::reduce_to(&sg); -let maxcut = result.target_problem(); - -let solver = BruteForce::new(); -let maxcut_solutions = solver.find_best(maxcut); -let sg_solution = result.extract_solution(&maxcut_solutions[0]); -assert_eq!(sg_solution.len(), 3); -``` - -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_spinglass_to_maxcut.rs")[`reduction_spinglass_to_maxcut.rs`]. - #theorem[ *(Coloring $arrow.r$ ILP)* The $k$-coloring problem reduces to binary ILP with $|V| dot k$ variables and $|V| + |E| dot k$ constraints. [_Problems:_ @def:coloring, @def:ilp.] ] @@ -750,8 +570,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples _Solution extraction._ For each vertex $v$, find $c$ with $x_(v,c) = 1$; assign color $c$ to $v$. ] -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_coloring_to_ilp.rs")[`reduction_coloring_to_ilp.rs`]. - #theorem[ *(Factoring $arrow.r$ ILP)* Integer factorization reduces to binary ILP using McCormick linearization with $O(m n)$ variables and constraints. [_Problems:_ @def:factoring, @def:ilp.] ] @@ -775,32 +593,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples _Solution extraction._ Read $p = sum_i p_i 2^i$ and $q = sum_j q_j 2^j$ from the binary variables. ] -_Example: Factoring 15._ The following Rust code demonstrates the closed-loop reduction (requires `ilp` feature: `cargo add problemreductions --features ilp`): - -```rust -use problemreductions::prelude::*; - -// 1. Create factoring instance: find p (4-bit) × q (4-bit) = 15 -let problem = Factoring::new(4, 4, 15); - -// 2. Reduce to ILP -let reduction = ReduceTo::::reduce_to(&problem); -let ilp = reduction.target_problem(); - -// 3. Solve ILP -let solver = ILPSolver::new(); -let ilp_solution = solver.solve(ilp).unwrap(); - -// 4. Extract factoring solution -let extracted = reduction.extract_solution(&ilp_solution); - -// 5. Verify: reads factors and confirms p × q = 15 -let (p, q) = problem.read_factors(&extracted); -assert_eq!(p * q, 15); // e.g., (3, 5) or (5, 3) -``` - -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_factoring_to_ilp.rs")[`reduction_factoring_to_ilp.rs`]. - == ILP Formulations The following reductions to Integer Linear Programming are straightforward formulations where problem constraints map directly to linear inequalities. @@ -813,8 +605,6 @@ The following reductions to Integer Linear Programming are straightforward formu _Construction._ Variables: $x_v in {0, 1}$ for each $v in V$. Constraints: $x_u + x_v <= 1$ for each $(u, v) in E$. Objective: maximize $sum_v w_v x_v$. _Solution extraction:_ $S = {v : x_v = 1}$. ] -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_is_to_ilp.rs")[`reduction_is_to_ilp.rs`]. - #theorem[ *(VC $arrow.r$ ILP)* The minimum-weight VC problem reduces to binary ILP with $|V|$ variables and $|E|$ constraints. [_Problems:_ @def:vertex-cover, @def:ilp.] ] @@ -823,8 +613,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples _Construction._ Variables: $x_v in {0, 1}$ for each $v in V$. Constraints: $x_u + x_v >= 1$ for each $(u, v) in E$. Objective: minimize $sum_v w_v x_v$. _Solution extraction:_ $C = {v : x_v = 1}$. ] -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_vc_to_ilp.rs")[`reduction_vc_to_ilp.rs`]. - #theorem[ *(Matching $arrow.r$ ILP)* The maximum-weight matching reduces to binary ILP with $|E|$ variables and $|V|$ constraints. [_Problems:_ @def:matching, @def:ilp.] ] @@ -833,8 +621,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples _Construction._ Variables: $x_e in {0, 1}$ for each $e in E$. Constraints: $sum_(e in.rev v) x_e <= 1$ for each $v in V$. Objective: maximize $sum_e w_e x_e$. _Solution extraction:_ $M = {e : x_e = 1}$. ] -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_matching_to_ilp.rs")[`reduction_matching_to_ilp.rs`]. - #theorem[ *(SetPacking $arrow.r$ ILP)* Set packing reduces to binary ILP with $|cal(S)|$ variables and at most $binom(|cal(S)|, 2)$ constraints. [_Problems:_ @def:set-packing, @def:ilp.] ] @@ -843,8 +629,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples _Construction._ Variables: $x_i in {0, 1}$ for each $S_i in cal(S)$. Constraints: $x_i + x_j <= 1$ for each overlapping pair $S_i, S_j in cal(S)$ with $S_i inter S_j != emptyset$. Objective: maximize $sum_i w_i x_i$. _Solution extraction:_ $cal(P) = {S_i : x_i = 1}$. ] -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_setpacking_to_ilp.rs")[`reduction_setpacking_to_ilp.rs`]. - #theorem[ *(SetCovering $arrow.r$ ILP)* Set covering reduces to binary ILP with $|cal(S)|$ variables and $|U|$ constraints. [_Problems:_ @def:set-covering, @def:ilp.] ] @@ -853,8 +637,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples _Construction._ Variables: $x_i in {0, 1}$ for each $S_i in cal(S)$. Constraints: $sum_(S_i in.rev u) x_i >= 1$ for each $u in U$. Objective: minimize $sum_i w_i x_i$. _Solution extraction:_ $cal(C) = {S_i : x_i = 1}$. ] -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_setcovering_to_ilp.rs")[`reduction_setcovering_to_ilp.rs`]. - #theorem[ *(DominatingSet $arrow.r$ ILP)* Dominating set reduces to binary ILP with $|V|$ variables and $|V|$ constraints. [_Problems:_ @def:dominating-set, @def:ilp.] ] @@ -863,8 +645,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples _Construction._ Variables: $x_v in {0, 1}$ for each $v in V$. Constraints: $x_v + sum_(u in N(v)) x_u >= 1$ for each $v in V$ (each vertex dominated). Objective: minimize $sum_v w_v x_v$. _Solution extraction:_ $D = {v : x_v = 1}$. ] -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_dominatingset_to_ilp.rs")[`reduction_dominatingset_to_ilp.rs`]. - #theorem[ *(Clique $arrow.r$ ILP)* Maximum clique reduces to binary ILP with $|V|$ variables and $O(|overline(E)|)$ constraints. [_Problems:_ @def:clique, @def:ilp.] ] @@ -873,8 +653,6 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples _Construction._ Variables: $x_v in {0, 1}$ for each $v in V$. Constraints: $x_u + x_v <= 1$ for each $(u, v) in.not E$ (non-edges). Objective: maximize $sum_v x_v$. Equivalently, IS on the complement graph. _Solution extraction:_ $K = {v : x_v = 1}$. ] -See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_clique_to_ilp.rs")[`reduction_clique_to_ilp.rs`]. - == Unit Disk Mapping #theorem[ From 2fc7d914d8f35d777abf0be084b61b92e8675d93 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Tue, 10 Feb 2026 19:17:50 +0800 Subject: [PATCH 02/14] update --- docs/paper/reductions.typ | 198 +++++++++++++++++++++++--------------- 1 file changed, 121 insertions(+), 77 deletions(-) diff --git a/docs/paper/reductions.typ b/docs/paper/reductions.typ index d682246bb..fd201d961 100644 --- a/docs/paper/reductions.typ +++ b/docs/paper/reductions.typ @@ -23,16 +23,112 @@ #let problem-schemas = json("problem_schemas.json") -// Render a problem's JSON schema as a field table +// Problem name abbreviations for theorem labels +#let name-abbrev = ( + "IndependentSet": "is", + "VertexCovering": "vc", + "MaxCut": "maxcut", + "KColoring": "coloring", + "DominatingSet": "dominatingset", + "Matching": "matching", + "Clique": "clique", + "SetPacking": "setpacking", + "SetCovering": "setcovering", + "SpinGlass": "spinglass", + "QUBO": "qubo", + "ILP": "ilp", + "Satisfiability": "sat", + "KSatisfiability": "ksat", + "CircuitSAT": "circuit", + "Factoring": "factoring", + "GridGraph": "gridgraph", +) + +// Special case mappings where JSON direction differs from theorem label +#let label-overrides = ( + "SetPacking->IndependentSet": "thm:is-to-setpacking", + "VertexCovering->IndependentSet": "thm:is-to-vc", +) + +// Generate theorem label from source/target names (canonical direction) +#let reduction-label(source, target) = { + // Check for override first + let key = source + "->" + target + if key in label-overrides { + return label(label-overrides.at(key)) + } + let src = name-abbrev.at(source, default: lower(source)) + let tgt = name-abbrev.at(target, default: lower(target)) + label("thm:" + src + "-to-" + tgt) +} + +// Extract reductions for a problem from graph-data (returns (name, label) pairs) +// For bidirectional edges, uses the canonical (stored) direction for the label +#let get-reductions-to(problem-name) = { + // Direct edges: source = problem-name + let direct = graph-data.edges + .filter(e => e.source.name == problem-name) + .map(e => (name: e.target.name, lbl: reduction-label(e.source.name, e.target.name))) + // Reverse of bidirectional edges: target = problem-name, bidirectional = true + let reverse = graph-data.edges + .filter(e => e.target.name == problem-name and e.bidirectional) + .map(e => (name: e.source.name, lbl: reduction-label(e.source.name, e.target.name))) + (direct + reverse).dedup(key: e => e.name) +} + +#let get-reductions-from(problem-name) = { + // Direct edges: target = problem-name + let direct = graph-data.edges + .filter(e => e.target.name == problem-name) + .map(e => (name: e.source.name, lbl: reduction-label(e.source.name, e.target.name))) + // Reverse of bidirectional edges: source = problem-name, bidirectional = true + let reverse = graph-data.edges + .filter(e => e.source.name == problem-name and e.bidirectional) + .map(e => (name: e.target.name, lbl: reduction-label(e.source.name, e.target.name))) + (direct + reverse).dedup(key: e => e.name) +} + +// Render a single reduction with link +#let render-reduction-link(r) = { + link(r.lbl)[#r.name] +} + +// Render the "Reduces to/from" lines for a problem +#let render-reductions(problem-name) = { + let reduces-to = get-reductions-to(problem-name) + let reduces-from = get-reductions-from(problem-name) + if reduces-to.len() > 0 or reduces-from.len() > 0 { + block(above: 0.5em)[ + #if reduces-to.len() > 0 [ + - _Reduces to:_ #reduces-to.map(render-reduction-link).join(", "). \ + ] + #if reduces-from.len() > 0 [ + - _Reduces from:_ #reduces-from.map(render-reduction-link).join(", "). + ] + ] + } +} + +// Render a problem's JSON schema as a field table (subtle styling) #let render-schema(name) = { let schema = problem-schemas.find(s => s.name == name) if schema == none { return } + set text(size: 9pt) table( - columns: (auto, auto, 1fr), - inset: 4pt, - align: left, - table.header([*Field*], [*Type*], [*Description*]), - ..schema.fields.map(f => (raw(f.name), raw(f.type_name), f.description)).flatten() + columns: (auto, 1fr), + inset: (x: 6pt, y: 3pt), + align: (left, left), + stroke: none, + table.hline(stroke: 0.3pt + luma(200)), + table.header( + text(fill: luma(100))[Field], + text(fill: luma(100))[Description], + ), + table.hline(stroke: 0.3pt + luma(200)), + ..schema.fields.map(f => ( + text(fill: luma(60), raw(f.name)), + text(fill: luma(60), f.description) + )).flatten() ) } @@ -117,60 +213,49 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #definition("Independent Set (IS)")[ Given $G = (V, E)$ with vertex weights $w: V -> RR$, find $S subset.eq V$ maximizing $sum_(v in S) w(v)$ such that no two vertices in $S$ are adjacent: $forall u, v in S: (u, v) in.not E$. - _Reduces to:_ VC (@thm:is-to-vc), SetPacking (@thm:is-to-setpacking), QUBO (@thm:is-to-qubo), ILP (@thm:is-to-ilp), GridGraph IS (@thm:is-to-gridgraph). \ - _Reduces from:_ VC (@thm:is-to-vc), SAT (@thm:sat-to-is). - + #render-reductions("IndependentSet") #render-schema("IndependentSet") ] #definition("Vertex Cover (VC)")[ Given $G = (V, E)$ with vertex weights $w: V -> RR$, find $S subset.eq V$ minimizing $sum_(v in S) w(v)$ such that every edge has at least one endpoint in $S$: $forall (u, v) in E: u in S or v in S$. - _Reduces to:_ IS (@thm:is-to-vc), SetCovering (@thm:vc-to-setcovering), QUBO (@thm:vc-to-qubo), ILP (@thm:vc-to-ilp). \ - _Reduces from:_ IS (@thm:is-to-vc). - + #render-reductions("VertexCovering") #render-schema("VertexCovering") ] #definition("Max-Cut")[ Given $G = (V, E)$ with weights $w: E -> RR$, find partition $(S, overline(S))$ maximizing $sum_((u,v) in E: u in S, v in overline(S)) w(u, v)$. - _Reduces to:_ SpinGlass (@thm:spinglass-maxcut). \ - _Reduces from:_ SpinGlass (@thm:spinglass-maxcut). - + #render-reductions("MaxCut") #render-schema("MaxCut") ] #definition("Graph Coloring")[ Given $G = (V, E)$ and $k$ colors, find $c: V -> {1, ..., k}$ minimizing $|{(u, v) in E : c(u) = c(v)}|$. - _Reduces to:_ ILP (@thm:coloring-to-ilp), QUBO (@thm:coloring-to-qubo). \ - _Reduces from:_ SAT (@thm:sat-to-coloring). - + #render-reductions("KColoring") #render-schema("KColoring") ] #definition("Dominating Set")[ Given $G = (V, E)$ with weights $w: V -> RR$, find $S subset.eq V$ minimizing $sum_(v in S) w(v)$ s.t. $forall v in V: v in S or exists u in S: (u, v) in E$. - _Reduces to:_ ILP (@thm:dominatingset-to-ilp). \ - _Reduces from:_ SAT (@thm:sat-to-dominatingset). - + #render-reductions("DominatingSet") #render-schema("DominatingSet") ] #definition("Matching")[ Given $G = (V, E)$ with weights $w: E -> RR$, find $M subset.eq E$ maximizing $sum_(e in M) w(e)$ s.t. $forall e_1, e_2 in M: e_1 inter e_2 = emptyset$. - _Reduces to:_ SetPacking (@thm:matching-to-setpacking), ILP (@thm:matching-to-ilp). - + #render-reductions("Matching") #render-schema("Matching") ] #definition("Clique")[ Given a graph $G = (V, E)$ and an integer $k$, the *Clique* problem asks whether there exists a subset $K subset.eq V$ of size at least $k$ such that every pair of distinct vertices in $K$ is adjacent, i.e., $(u, v) in E$ for all distinct $u, v in K$. - _Reduces to:_ ILP (@thm:clique-to-ilp). + #render-reductions("Clique") ] #definition("Unit Disk Graph (Grid Graph)")[ @@ -182,18 +267,14 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #definition("Set Packing")[ Given universe $U$, collection $cal(S) = {S_1, ..., S_m}$ with $S_i subset.eq U$, weights $w: cal(S) -> RR$, find $cal(P) subset.eq cal(S)$ maximizing $sum_(S in cal(P)) w(S)$ s.t. $forall S_i, S_j in cal(P): S_i inter S_j = emptyset$. - _Reduces to:_ QUBO (@thm:setpacking-to-qubo), ILP (@thm:setpacking-to-ilp). \ - _Reduces from:_ IS (@thm:is-to-setpacking), Matching (@thm:matching-to-setpacking). - + #render-reductions("SetPacking") #render-schema("SetPacking") ] #definition("Set Covering")[ Given universe $U$, collection $cal(S)$ with weights $w: cal(S) -> RR$, find $cal(C) subset.eq cal(S)$ minimizing $sum_(S in cal(C)) w(S)$ s.t. $union.big_(S in cal(C)) S = U$. - _Reduces to:_ ILP (@thm:setcovering-to-ilp). \ - _Reduces from:_ VC (@thm:vc-to-setcovering). - + #render-reductions("SetCovering") #render-schema("SetCovering") ] @@ -202,27 +283,21 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #definition("Spin Glass (Ising Model)")[ Given $n$ spin variables $s_i in {-1, +1}$, pairwise couplings $J_(i j) in RR$, and external fields $h_i in RR$, minimize the Hamiltonian (energy function): $H(bold(s)) = -sum_((i,j)) J_(i j) s_i s_j - sum_i h_i s_i$. - _Reduces to:_ MaxCut (@thm:spinglass-maxcut), QUBO (@thm:spinglass-qubo). \ - _Reduces from:_ CircuitSAT (@thm:circuit-to-spinglass), MaxCut (@thm:spinglass-maxcut), QUBO (@thm:spinglass-qubo). - + #render-reductions("SpinGlass") #render-schema("SpinGlass") ] #definition("QUBO")[ Given $n$ binary variables $x_i in {0, 1}$, upper-triangular matrix $Q in RR^(n times n)$, minimize $f(bold(x)) = sum_(i=1)^n Q_(i i) x_i + sum_(i < j) Q_(i j) x_i x_j$ (using $x_i^2 = x_i$ for binary variables). - _Reduces to:_ SpinGlass (@thm:spinglass-qubo). \ - _Reduces from:_ IS (@thm:is-to-qubo), VC (@thm:vc-to-qubo), KColoring (@thm:coloring-to-qubo), SetPacking (@thm:setpacking-to-qubo), $k$-SAT (@thm:ksat-to-qubo), ILP (@thm:ilp-to-qubo), SpinGlass (@thm:spinglass-qubo). - + #render-reductions("QUBO") #render-schema("QUBO") ] #definition("Integer Linear Programming (ILP)")[ Given $n$ integer variables $bold(x) in ZZ^n$, constraint matrix $A in RR^(m times n)$, bounds $bold(b) in RR^m$, and objective $bold(c) in RR^n$, find $bold(x)$ minimizing $bold(c)^top bold(x)$ subject to $A bold(x) <= bold(b)$ and variable bounds. - _Reduces to:_ QUBO (@thm:ilp-to-qubo). \ - _Reduces from:_ KColoring (@thm:coloring-to-ilp), Factoring (@thm:factoring-to-ilp), IS (@thm:is-to-ilp), VC (@thm:vc-to-ilp), Matching (@thm:matching-to-ilp), SetPacking (@thm:setpacking-to-ilp), SetCovering (@thm:setcovering-to-ilp), DominatingSet (@thm:dominatingset-to-ilp), Clique (@thm:clique-to-ilp). - + #render-reductions("ILP") #render-schema("ILP") ] @@ -231,35 +306,28 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #definition("SAT")[ Given a CNF formula $phi = and.big_(j=1)^m C_j$ with $m$ clauses over $n$ Boolean variables, where each clause $C_j = or.big_i ell_(j i)$ is a disjunction of literals, find an assignment $bold(x) in {0, 1}^n$ such that $phi(bold(x)) = 1$ (all clauses satisfied). - _Reduces to:_ IS (@thm:sat-to-is), KColoring (@thm:sat-to-coloring), DominatingSet (@thm:sat-to-dominatingset), $k$-SAT (@thm:sat-ksat). \ - _Reduces from:_ $k$-SAT (@thm:sat-ksat). - + #render-reductions("Satisfiability") #render-schema("Satisfiability") ] #definition([$k$-SAT])[ SAT with exactly $k$ literals per clause. - _Reduces to:_ SAT (@thm:sat-ksat), QUBO (@thm:ksat-to-qubo). \ - _Reduces from:_ SAT (@thm:sat-ksat). - + #render-reductions("KSatisfiability") #render-schema("KSatisfiability") ] #definition("Circuit-SAT")[ Given a Boolean circuit $C$ composed of logic gates (AND, OR, NOT, XOR) with $n$ input variables, find an input assignment $bold(x) in {0,1}^n$ such that $C(bold(x)) = 1$. - _Reduces to:_ SpinGlass (@thm:circuit-to-spinglass). \ - _Reduces from:_ Factoring (@thm:factoring-to-circuit). - + #render-reductions("CircuitSAT") #render-schema("CircuitSAT") ] #definition("Factoring")[ Given a composite integer $N$ and bit sizes $m, n$, find integers $p in [2, 2^m - 1]$ and $q in [2, 2^n - 1]$ such that $p times q = N$. Here $p$ has $m$ bits and $q$ has $n$ bits. - _Reduces to:_ CircuitSAT (@thm:factoring-to-circuit), ILP (@thm:factoring-to-ilp). - + #render-reductions("Factoring") #render-schema("Factoring") ] @@ -310,7 +378,7 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #theorem[ *(Spin Glass $arrow.l.r$ QUBO)* The substitution $s_i = 2x_i - 1$ yields $H_"SG"(bold(s)) = H_"QUBO"(bold(x)) + "const"$. [_Problems:_ @def:spin-glass, @def:qubo.] -] +] #proof[ Expanding $-sum_(i,j) J_(i j) (2x_i - 1)(2x_j - 1) - sum_i h_i (2x_i - 1)$ gives $Q_(i j) = -4J_(i j)$, $Q_(i i) = 2sum_j J_(i j) - 2h_i$. _Variable mapping:_ Spin $s_i in {-1, +1}$ maps to binary $x_i in {0, 1}$ via $s_i = 2x_i - 1$. Solution extraction: for QUBO solution $bold(x)$, return spins $s_i = 2x_i - 1$. The reverse maps $x_i = (s_i + 1)/2$. @@ -489,7 +557,7 @@ where $P$ is a penalty weight large enough that any constraint violation costs m #theorem[ *(SAT $arrow.l.r$ $k$-SAT)* @cook1971 @garey1979 Any SAT formula converts to $k$-SAT ($k >= 3$) preserving satisfiability. [_Problems:_ @def:satisfiability, @def:k-sat.] -] +] #proof[ _Small clauses ($|C| < k$):_ Pad $(ell_1 or ... or ell_r)$ with auxiliary $y$: $(ell_1 or ... or ell_r or y or overline(y) or ...)$ to length $k$. @@ -542,7 +610,7 @@ where $P$ is a penalty weight large enough that any constraint violation costs m #theorem[ *(Spin Glass $arrow.l.r$ Max-Cut)* @barahona1982 @lucas2014 Ground states of Ising models correspond to maximum cuts. [_Problems:_ @def:spin-glass, @def:max-cut.] -] +] #proof[ _MaxCut $arrow.r$ SpinGlass:_ Set $J_(i j) = w_(i j)$, $h_i = 0$. Maximizing cut equals minimizing $-sum J_(i j) s_i s_j$ since $s_i s_j = -1$ when $s_i != s_j$. @@ -838,30 +906,6 @@ The following table shows concrete variable overhead for example instances, gene (name: n, data: d) }) -#figure( - table( - columns: (auto, auto, auto, auto, auto), - inset: 5pt, - align: (left, left, right, right, right), - table.header([*Reduction*], [*Instance*], [*Src Vars*], [*Tgt Vars*], [*Ratio*]), - ..examples.map(ex => { - let d = ex.data - let sv = instance-vars(d.source.instance) - let tv = instance-vars(d.target.instance) - let ratio = if sv > 0 { calc.round(tv / sv, digits: 1) } else { 0 } - let label = ex.name.replace("_to_", " $arrow.r$ ").replace("_", " ") - ( - [#label], - [#d.source.problem $arrow.r$ #d.target.problem], - [#sv], - [#tv], - [#(ratio)x], - ) - }).flatten() - ), - caption: [Concrete variable overhead for all example instances. Generated by `make examples`.] -) - = Summary #let gray = rgb("#e8e8e8") From 3a81dc65cc518dbca436cacd4648392fc96dec4a Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Tue, 10 Feb 2026 19:35:54 +0800 Subject: [PATCH 03/14] refactor: add reduction-rule function for unified theorem rendering MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add `reduction-rule(source, target, ...)` Typst function that: - Auto-generates theorem labels () - Auto-generates bold header (Source → Target) with display names - Auto-generates [Problems: Definition X, Definition Y] links - Renders proof and optional example with custom extra content - Supports bidirectional (↔), display name overrides Converts all 28 reduction theorems to use the new function, eliminating repeated boilerplate (manual labels, problem links, header formatting). Co-Authored-By: Claude Opus 4.6 --- docs/paper/reductions.typ | 399 +++++++++++++++++++++----------------- 1 file changed, 218 insertions(+), 181 deletions(-) diff --git a/docs/paper/reductions.typ b/docs/paper/reductions.typ index fd201d961..398cd5e1b 100644 --- a/docs/paper/reductions.typ +++ b/docs/paper/reductions.typ @@ -50,6 +50,49 @@ "VertexCovering->IndependentSet": "thm:is-to-vc", ) +// Problem display names for theorem headers +#let display-name = ( + "IndependentSet": "IS", + "VertexCovering": "VC", + "MaxCut": "Max-Cut", + "KColoring": "Coloring", + "DominatingSet": "Dominating Set", + "Matching": "Matching", + "Clique": "Clique", + "SetPacking": "Set Packing", + "SetCovering": "Set Covering", + "SpinGlass": "Spin Glass", + "QUBO": "QUBO", + "ILP": "ILP", + "Satisfiability": "SAT", + "KSatisfiability": [$k$-SAT], + "CircuitSAT": "CircuitSAT", + "Factoring": "Factoring", + "GridGraph": "GridGraph IS", +) + +// Problem name to definition label mapping +#let def-label-map = ( + "IndependentSet": "def:independent-set", + "VertexCovering": "def:vertex-cover", + "MaxCut": "def:max-cut", + "KColoring": "def:coloring", + "DominatingSet": "def:dominating-set", + "Matching": "def:matching", + "Clique": "def:clique", + "SetPacking": "def:set-packing", + "SetCovering": "def:set-covering", + "SpinGlass": "def:spin-glass", + "QUBO": "def:qubo", + "ILP": "def:ilp", + "Satisfiability": "def:satisfiability", + "KSatisfiability": "def:k-sat", + "CircuitSAT": "def:circuit-sat", + "Factoring": "def:factoring", + "GridGraph": "def:independent-set", +) + + // Generate theorem label from source/target names (canonical direction) #let reduction-label(source, target) = { // Check for override first @@ -177,6 +220,45 @@ base_level: 1, ) +// Unified function for reduction rules: theorem + proof + optional example +#let reduction-rule( + source, target, + bidirectional: false, + source-display: none, + target-display: none, + example: none, + example-caption: none, + extra: none, + theorem-body, proof-body, +) = { + let arrow = if bidirectional { sym.arrow.l.r } else { sym.arrow.r } + let src-disp = if source-display != none { source-display } + else { display-name.at(source) } + let tgt-disp = if target-display != none { target-display } + else { display-name.at(target) } + let src-def = def-label-map.at(source) + let tgt-def = def-label-map.at(target) + let problems = if src-def == tgt-def { + [_Problem:_ #ref(label(src-def)).] + } else { + [_Problems:_ #ref(label(src-def)), #ref(label(tgt-def)).] + } + let src-abbr = name-abbrev.at(source, default: lower(source)) + let tgt-abbr = name-abbrev.at(target, default: lower(target)) + let thm-lbl = label("thm:" + src-abbr + "-to-" + tgt-abbr) + + [#theorem[ + *(#src-disp #arrow #tgt-disp)* #theorem-body [#problems] + ] #thm-lbl] + + proof[#proof-body] + + if example != none { + let data = load-example(example) + reduction-example(data, caption: example-caption)[#extra] + } +} + #align(center)[ #text(size: 16pt, weight: "bold")[Problem Reductions: Models and Transformations] #v(0.5em) @@ -335,69 +417,79 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| == Trivial Reductions -#theorem[ - *(IS $arrow.l.r$ VC)* $S subset.eq V$ is independent iff $V backslash S$ is a vertex cover, with $|"IS"| + |"VC"| = |V|$. [_Problems:_ @def:independent-set, @def:vertex-cover.] -] - -#proof[ - ($arrow.r.double$) If $S$ is independent, for any $(u, v) in E$, at most one endpoint lies in $S$, so $V backslash S$ covers all edges. ($arrow.l.double$) If $C$ is a cover, for any $u, v in V backslash C$, $(u, v) in.not E$, so $V backslash C$ is independent. _Variable mapping:_ Given IS instance $(G, w)$, create VC instance $(G, w)$ with identical graph and weights. Solution extraction: for VC solution $C$, return $S = V backslash C$. The complement operation preserves optimality since $|S| + |C| = |V|$ is constant. -] - #let is_vc = load-example("is_to_vc") #let is_vc_r = load-results("is_to_vc") #let is_vc_sol = is_vc_r.solutions.at(0) -#reduction-example(is_vc, caption: [Path graph $P_4$: IS $arrow.l.r$ VC])[ - Source IS: $S = {#is_vc_sol.source_config.enumerate().filter(((i, x)) => x == 1).map(((i, x)) => str(i)).join(", ")}$ (size #is_vc_sol.source_config.filter(x => x == 1).len()) #h(1em) - Target VC: $C = {#is_vc_sol.target_config.enumerate().filter(((i, x)) => x == 1).map(((i, x)) => str(i)).join(", ")}$ (size #is_vc_sol.target_config.filter(x => x == 1).len()) \ - $|"IS"| + |"VC"| = #instance-vars(is_vc.source.instance) = |V|$ #sym.checkmark +#reduction-rule("IndependentSet", "VertexCovering", + bidirectional: true, + example: "is_to_vc", + example-caption: [Path graph $P_4$: IS $arrow.l.r$ VC], + extra: [ + Source IS: $S = {#is_vc_sol.source_config.enumerate().filter(((i, x)) => x == 1).map(((i, x)) => str(i)).join(", ")}$ (size #is_vc_sol.source_config.filter(x => x == 1).len()) #h(1em) + Target VC: $C = {#is_vc_sol.target_config.enumerate().filter(((i, x)) => x == 1).map(((i, x)) => str(i)).join(", ")}$ (size #is_vc_sol.target_config.filter(x => x == 1).len()) \ + $|"IS"| + |"VC"| = #instance-vars(is_vc.source.instance) = |V|$ #sym.checkmark + ], +)[ + $S subset.eq V$ is independent iff $V backslash S$ is a vertex cover, with $|"IS"| + |"VC"| = |V|$. +][ + ($arrow.r.double$) If $S$ is independent, for any $(u, v) in E$, at most one endpoint lies in $S$, so $V backslash S$ covers all edges. ($arrow.l.double$) If $C$ is a cover, for any $u, v in V backslash C$, $(u, v) in.not E$, so $V backslash C$ is independent. _Variable mapping:_ Given IS instance $(G, w)$, create VC instance $(G, w)$ with identical graph and weights. Solution extraction: for VC solution $C$, return $S = V backslash C$. The complement operation preserves optimality since $|S| + |C| = |V|$ is constant. ] -#theorem[ - *(IS $arrow.r$ Set Packing)* Construct $U = E$, $S_v = {e in E : v in e}$, $w(S_v) = w(v)$. Then $I$ is independent iff ${S_v : v in I}$ is a packing. [_Problems:_ @def:independent-set, @def:set-packing.] -] - -#proof[ +#reduction-rule("IndependentSet", "SetPacking")[ + Construct $U = E$, $S_v = {e in E : v in e}$, $w(S_v) = w(v)$. Then $I$ is independent iff ${S_v : v in I}$ is a packing. +][ Independence implies disjoint incident edge sets; conversely, disjoint edge sets imply no shared edges. _Variable mapping:_ Universe $U = E$ (edges), sets $S_v = {e in E : v in e}$ (edges incident to vertex $v$), weights $w(S_v) = w(v)$. Solution extraction: for packing ${S_v : v in P}$, return IS $= P$ (the vertices whose sets were packed). ] -#theorem[ - *(VC $arrow.r$ Set Covering)* Construct $U = {0, ..., |E|-1}$, $S_v = {i : e_i "incident to" v}$, $w(S_v) = w(v)$. Then $C$ is a cover iff ${S_v : v in C}$ covers $U$. [_Problems:_ @def:vertex-cover, @def:set-covering.] -] - -#proof[ +#reduction-rule("VertexCovering", "SetCovering")[ + Construct $U = {0, ..., |E|-1}$, $S_v = {i : e_i "incident to" v}$, $w(S_v) = w(v)$. Then $C$ is a cover iff ${S_v : v in C}$ covers $U$. +][ Each vertex's edge set becomes a subset; the cover condition (every edge covered) maps to the covering condition (every universe element in some selected set). _Variable mapping:_ Universe $U = {0, ..., |E|-1}$ (edge indices), $S_v = {i : e_i "incident to" v}$, $w(S_v) = w(v)$. Solution extraction: for covering ${S_v : v in C}$, return VC $= C$. ] -#theorem[ - *(Matching $arrow.r$ Set Packing)* Construct $U = V$, $S_e = {u, v}$ for $e = (u,v)$, $w(S_e) = w(e)$. Then $M$ is a matching iff ${S_e : e in M}$ is a packing. [_Problems:_ @def:matching, @def:set-packing.] -] - -#proof[ +#reduction-rule("Matching", "SetPacking")[ + Construct $U = V$, $S_e = {u, v}$ for $e = (u,v)$, $w(S_e) = w(e)$. Then $M$ is a matching iff ${S_e : e in M}$ is a packing. +][ Each edge becomes a set of its endpoints; disjoint edges have disjoint endpoint sets. _Variable mapping:_ Universe $U = V$ (vertices), $S_e = {u, v}$ for $e = (u,v)$, $w(S_e) = w(e)$. Solution extraction: for packing ${S_e : e in P}$, return matching $= P$ (the edges whose endpoint sets were packed). ] -#theorem[ - *(Spin Glass $arrow.l.r$ QUBO)* The substitution $s_i = 2x_i - 1$ yields $H_"SG"(bold(s)) = H_"QUBO"(bold(x)) + "const"$. [_Problems:_ @def:spin-glass, @def:qubo.] -] - -#proof[ +#reduction-rule("SpinGlass", "QUBO", + bidirectional: true, + example: "spinglass_to_qubo", + example-caption: [2-spin system with coupling $J_(01) = -1$, fields $h = (0.5, -0.5)$], +)[ + The substitution $s_i = 2x_i - 1$ yields $H_"SG"(bold(s)) = H_"QUBO"(bold(x)) + "const"$. +][ Expanding $-sum_(i,j) J_(i j) (2x_i - 1)(2x_j - 1) - sum_i h_i (2x_i - 1)$ gives $Q_(i j) = -4J_(i j)$, $Q_(i i) = 2sum_j J_(i j) - 2h_i$. _Variable mapping:_ Spin $s_i in {-1, +1}$ maps to binary $x_i in {0, 1}$ via $s_i = 2x_i - 1$. Solution extraction: for QUBO solution $bold(x)$, return spins $s_i = 2x_i - 1$. The reverse maps $x_i = (s_i + 1)/2$. ] -#let sg_qubo = load-example("spinglass_to_qubo") -#reduction-example(sg_qubo, caption: [2-spin system with coupling $J_(01) = -1$, fields $h = (0.5, -0.5)$])[] - == Penalty-Method QUBO Reductions The _penalty method_ @glover2019 @lucas2014 converts a constrained optimization problem into an unconstrained QUBO by adding quadratic penalty terms. Given an objective $"obj"(bold(x))$ to minimize and constraints $g_k (bold(x)) = 0$, construct: $ f(bold(x)) = "obj"(bold(x)) + P sum_k g_k (bold(x))^2 $ where $P$ is a penalty weight large enough that any constraint violation costs more than the entire objective range. Since $g_k (bold(x))^2 >= 0$ with equality iff $g_k (bold(x)) = 0$, minimizers of $f$ are feasible and optimal for the original problem. Because binary variables satisfy $x_i^2 = x_i$, the resulting $f$ is a quadratic in $bold(x)$, i.e.\ a QUBO. -#theorem[ - *(IS $arrow.r$ QUBO)* Given $G = (V, E)$ with weights $w$, construct upper-triangular $Q in RR^(n times n)$ with $Q_(i i) = -w_i$ and $Q_(i j) = P$ for $(i,j) in E$ ($i < j$), where $P = 1 + sum_i w_i$. Then minimizing $f(bold(x)) = sum_i Q_(i i) x_i + sum_(i - -#proof[ +#let is_qubo = load-example("is_to_qubo") +#let is_qubo_r = load-results("is_to_qubo") +#reduction-rule("IndependentSet", "QUBO", + example: "is_to_qubo", + example-caption: [IS on path $P_4$ to QUBO], + extra: [ + *Source edges:* $= {#is_qubo.source.instance.edges.map(e => $(#e.at(0), #e.at(1))$).join(", ")}$ \ + *QUBO matrix* ($Q in RR^(#is_qubo.target.instance.num_vars times #is_qubo.target.instance.num_vars)$): + $ Q = #math.mat(..is_qubo.target.instance.matrix.map(row => row.map(v => { + let r = calc.round(v, digits: 0) + [#r] + }))) $ + *Optimal IS* (size #is_qubo_r.solutions.at(0).source_config.filter(x => x == 1).len()): + #is_qubo_r.solutions.map(sol => { + let verts = sol.source_config.enumerate().filter(((i, x)) => x == 1).map(((i, x)) => str(i)) + $\{#verts.join(", ")\}$ + }).join(", ") + ], +)[ + Given $G = (V, E)$ with weights $w$, construct upper-triangular $Q in RR^(n times n)$ with $Q_(i i) = -w_i$ and $Q_(i j) = P$ for $(i,j) in E$ ($i < j$), where $P = 1 + sum_i w_i$. Then minimizing $f(bold(x)) = sum_i Q_(i i) x_i + sum_(i sum_i w_i >= -sum_i Q_(i i) x_i$ exceeds the maximum objective gain, so $bold(x)$ is not a minimizer. Among independent sets ($x_i x_j = 0$ for all edges), $f(bold(x)) = -sum_(i in S) w_i$, minimized exactly when $S$ is a maximum-weight IS. ] -#let is_qubo = load-example("is_to_qubo") -#let is_qubo_r = load-results("is_to_qubo") -#block( - width: 100%, - inset: (x: 1em, y: 0.8em), - fill: rgb("#f0f7ff"), - stroke: (left: 2pt + rgb("#4a86e8")), -)[ - #text(weight: "bold")[Concrete Example: IS on path $P_4$ to QUBO] - #parbreak() - *Source:* #is_qubo.source.problem with #is_qubo.source.instance.num_vertices vertices, edges $= {#is_qubo.source.instance.edges.map(e => $(#e.at(0), #e.at(1))$).join(", ")}$ \ - *QUBO matrix* ($Q in RR^(#is_qubo.target.instance.num_vars times #is_qubo.target.instance.num_vars)$): - $ Q = #math.mat(..is_qubo.target.instance.matrix.map(row => row.map(v => { - let r = calc.round(v, digits: 0) - [#r] - }))) $ - *Optimal IS* (size #is_qubo_r.solutions.at(0).source_config.filter(x => x == 1).len()): - #is_qubo_r.solutions.map(sol => { - let verts = sol.source_config.enumerate().filter(((i, x)) => x == 1).map(((i, x)) => str(i)) - $\{#verts.join(", ")\}$ - }).join(", ") -] - -#theorem[ - *(VC $arrow.r$ QUBO)* Given $G = (V, E)$ with weights $w$, construct upper-triangular $Q$ with $Q_(i i) = w_i - P dot "deg"(i)$ and $Q_(i j) = P$ for $(i,j) in E$ ($i < j$), where $P = 1 + sum_i w_i$ and $"deg"(i)$ is the degree of vertex $i$. [_Problems:_ @def:vertex-cover, @def:qubo.] -] - -#proof[ +#reduction-rule("VertexCovering", "QUBO")[ + Given $G = (V, E)$ with weights $w$, construct upper-triangular $Q$ with $Q_(i i) = w_i - P dot "deg"(i)$ and $Q_(i j) = P$ for $(i,j) in E$ ($i < j$), where $P = 1 + sum_i w_i$ and $"deg"(i)$ is the degree of vertex $i$. +][ _Construction._ The VC objective is: minimize $sum_i w_i x_i$ subject to $x_i + x_j >= 1$ for $(i,j) in E$. Applying the penalty method (@sec:penalty-method), the constraint $x_i + x_j >= 1$ is violated iff $x_i = x_j = 0$, with penalty $(1 - x_i)(1 - x_j)$: $ f(bold(x)) = sum_i w_i x_i + P sum_((i,j) in E) (1 - x_i)(1 - x_j) $ Expanding: $(1 - x_i)(1 - x_j) = 1 - x_i - x_j + x_i x_j$. Summing over all edges, each vertex $i$ appears in $"deg"(i)$ terms. The QUBO coefficients are: diagonal $Q_(i i) = w_i - P dot "deg"(i)$ (objective plus linear penalty), off-diagonal $Q_(i j) = P$ for edges. The constant $P |E|$ does not affect the minimizer. ] -#theorem[ - *(KColoring $arrow.r$ QUBO)* Given $G = (V, E)$ with $k$ colors, construct upper-triangular $Q in RR^(n k times n k)$ using one-hot encoding $x_(v,c) in {0,1}$ ($n k$ variables indexed by $v dot k + c$). [_Problems:_ @def:coloring, @def:qubo.] -] - -#proof[ +#reduction-rule("KColoring", "QUBO")[ + Given $G = (V, E)$ with $k$ colors, construct upper-triangular $Q in RR^(n k times n k)$ using one-hot encoding $x_(v,c) in {0,1}$ ($n k$ variables indexed by $v dot k + c$). +][ _Construction._ Applying the penalty method (@sec:penalty-method), the QUBO objective combines a one-hot constraint penalty and an edge conflict penalty: $ f(bold(x)) = P_1 sum_(v in V) (1 - sum_(c=1)^k x_(v,c))^2 + P_2 sum_((u,v) in E) sum_(c=1)^k x_(u,c) x_(v,c) $ @@ -458,19 +523,17 @@ where $P$ is a penalty weight large enough that any constraint violation costs m _Solution extraction._ For each vertex $v$, find $c$ with $x_(v,c) = 1$. ] -#theorem[ - *(SetPacking $arrow.r$ QUBO)* Equivalent to IS on the intersection graph: $Q_(i i) = -w_i$ and $Q_(i j) = P$ for overlapping sets $i, j$ ($i < j$), where $P = 1 + sum_i w_i$. [_Problems:_ @def:set-packing, @def:qubo.] -] - -#proof[ +#reduction-rule("SetPacking", "QUBO")[ + Equivalent to IS on the intersection graph: $Q_(i i) = -w_i$ and $Q_(i j) = P$ for overlapping sets $i, j$ ($i < j$), where $P = 1 + sum_i w_i$. +][ Two sets conflict iff they share an element. The intersection graph has sets as vertices and edges between conflicting pairs. Applying the penalty method (@sec:penalty-method) yields the same QUBO as IS on this graph: diagonal rewards selection, off-diagonal penalizes overlap. Correctness follows from the IS→QUBO proof. ] -#theorem[ - *(2-SAT $arrow.r$ QUBO)* Given a Max-2-SAT instance with $m$ clauses over $n$ variables, construct upper-triangular $Q in RR^(n times n)$ where each clause $(ell_i or ell_j)$ contributes a penalty gadget encoding its unique falsifying assignment. [_Problems:_ @def:k-sat, @def:qubo.] -] - -#proof[ +#reduction-rule("KSatisfiability", "QUBO", + source-display: "2-SAT", +)[ + Given a Max-2-SAT instance with $m$ clauses over $n$ variables, construct upper-triangular $Q in RR^(n times n)$ where each clause $(ell_i or ell_j)$ contributes a penalty gadget encoding its unique falsifying assignment. +][ _Construction._ Applying the penalty method (@sec:penalty-method), each 2-literal clause has exactly one falsifying assignment (both literals false). The penalty for that assignment is a quadratic function of $x_i, x_j$: #table( @@ -487,11 +550,11 @@ where $P$ is a penalty weight large enough that any constraint violation costs m Summing over all clauses, $f(bold(x)) = sum_j "penalty"_j (bold(x))$ counts falsified clauses. Minimizers of $f$ maximize satisfied clauses. ] -#theorem[ - *(Binary ILP $arrow.r$ QUBO)* Given binary ILP: maximize $bold(c)^top bold(x)$ subject to $A bold(x) = bold(b)$, $bold(x) in {0,1}^n$, construct upper-triangular $Q = -"diag"(bold(c) + 2P bold(b)^top A) + P A^top A$ where $P = 1 + ||bold(c)||_1 + ||bold(b)||_1$. [_Problems:_ @def:ilp, @def:qubo.] -] - -#proof[ +#reduction-rule("ILP", "QUBO", + source-display: "Binary ILP", +)[ + Given binary ILP: maximize $bold(c)^top bold(x)$ subject to $A bold(x) = bold(b)$, $bold(x) in {0,1}^n$, construct upper-triangular $Q = -"diag"(bold(c) + 2P bold(b)^top A) + P A^top A$ where $P = 1 + ||bold(c)||_1 + ||bold(b)||_1$. +][ _Step 1: Normalize constraints._ Convert inequalities to equalities using slack variables: $bold(a)_k^top bold(x) <= b_k$ becomes $bold(a)_k^top bold(x) + sum_(s=0)^(S_k - 1) 2^s y_(k,s) = b_k$ where $S_k = ceil(log_2 (b_k + 1))$ slack bits. For $>=$ constraints, the slack has a negative sign. The extended system is $A' bold(x)' = bold(b)$ with $bold(x)' = (bold(x), bold(y)) in {0,1}^(n')$. For minimization, negate $bold(c)$ to convert to maximization. _Step 2: QUBO construction._ Applying the penalty method (@sec:penalty-method), combine objective and penalty: @@ -507,11 +570,19 @@ where $P$ is a penalty weight large enough that any constraint violation costs m == Non-Trivial Reductions -#theorem[ - *(SAT $arrow.r$ IS)* @karp1972 Given CNF $phi$ with $m$ clauses, construct graph $G$ such that $phi$ is satisfiable iff $G$ has an IS of size $m$. [_Problems:_ @def:satisfiability, @def:independent-set.] -] - -#proof[ +#let sat_is = load-example("sat_to_is") +#let sat_is_r = load-results("sat_to_is") +#let sat_is_sol = sat_is_r.solutions.at(0) +#reduction-rule("Satisfiability", "IndependentSet", + example: "sat_to_is", + example-caption: [$phi = (x_1 or x_2) and (not x_1 or x_3) and (x_2 or not x_3)$], + extra: [ + SAT assignment: $x_1=#sat_is_sol.source_config.at(0), x_2=#sat_is_sol.source_config.at(1), x_3=#sat_is_sol.source_config.at(2)$ #h(1em) + IS graph: #sat_is.target.instance.num_vertices vertices, #sat_is.target.instance.num_edges edges (one vertex per literal occurrence) + ], +)[ + @karp1972 Given CNF $phi$ with $m$ clauses, construct graph $G$ such that $phi$ is satisfiable iff $G$ has an IS of size $m$. +][ _Construction._ For $phi = and.big_(j=1)^m C_j$ with $C_j = (ell_(j,1) or ... or ell_(j,k_j))$: _Vertices:_ For each literal $ell_(j,i)$ in clause $C_j$, create $v_(j,i)$. Total: $|V| = sum_j k_j$. @@ -523,19 +594,11 @@ where $P$ is a penalty weight large enough that any constraint violation costs m _Solution extraction._ For $v_(j,i) in S$ with literal $x_k$: set $x_k = 1$; for $overline(x_k)$: set $x_k = 0$. ] -#let sat_is = load-example("sat_to_is") -#let sat_is_r = load-results("sat_to_is") -#let sat_is_sol = sat_is_r.solutions.at(0) -#reduction-example(sat_is, caption: [$phi = (x_1 or x_2) and (not x_1 or x_3) and (x_2 or not x_3)$])[ - SAT assignment: $x_1=#sat_is_sol.source_config.at(0), x_2=#sat_is_sol.source_config.at(1), x_3=#sat_is_sol.source_config.at(2)$ #h(1em) - IS graph: #sat_is.target.instance.num_vertices vertices, #sat_is.target.instance.num_edges edges (one vertex per literal occurrence) -] - -#theorem[ - *(SAT $arrow.r$ 3-Coloring)* @garey1979 Given CNF $phi$, construct graph $G$ such that $phi$ is satisfiable iff $G$ is 3-colorable. [_Problems:_ @def:satisfiability, @def:coloring.] -] - -#proof[ +#reduction-rule("Satisfiability", "KColoring", + target-display: "3-Coloring", +)[ + @garey1979 Given CNF $phi$, construct graph $G$ such that $phi$ is satisfiable iff $G$ is 3-colorable. +][ _Construction._ (1) Base triangle: TRUE, FALSE, AUX vertices with all pairs connected. (2) Variable gadget for $x_i$: vertices $"pos"_i$, $"neg"_i$ connected to each other and to AUX. (3) Clause gadget: for $(ell_1 or ... or ell_k)$, apply OR-gadgets iteratively producing output $o$, then connect $o$ to FALSE and AUX. _OR-gadget$(a, b) arrow.bar o$:_ Five vertices encoding $o = a or b$: if both $a, b$ have FALSE color, $o$ cannot have TRUE color. @@ -543,11 +606,9 @@ where $P$ is a penalty weight large enough that any constraint violation costs m _Solution extraction._ Set $x_i = 1$ iff $"color"("pos"_i) = "color"("TRUE")$. ] -#theorem[ - *(SAT $arrow.r$ Dominating Set)* @garey1979 Given CNF $phi$ with $n$ variables and $m$ clauses, $phi$ is satisfiable iff the constructed graph has a dominating set of size $n$. [_Problems:_ @def:satisfiability, @def:dominating-set.] -] - -#proof[ +#reduction-rule("Satisfiability", "DominatingSet")[ + @garey1979 Given CNF $phi$ with $n$ variables and $m$ clauses, $phi$ is satisfiable iff the constructed graph has a dominating set of size $n$. +][ _Construction._ (1) Variable triangle for $x_i$: vertices $"pos"_i = 3i$, $"neg"_i = 3i+1$, $"dum"_i = 3i+2$ forming a triangle. (2) Clause vertex $c_j = 3n+j$ connected to $"pos"_i$ if $x_i in C_j$, to $"neg"_i$ if $overline(x_i) in C_j$. _Correctness._ Each triangle requires at least one vertex in any dominating set. Size-$n$ set must take exactly one per triangle, which dominates clause vertices iff corresponding literals satisfy all clauses. @@ -555,11 +616,11 @@ where $P$ is a penalty weight large enough that any constraint violation costs m _Solution extraction._ Set $x_i = 1$ if $"pos"_i$ selected; $x_i = 0$ if $"neg"_i$ selected. ] -#theorem[ - *(SAT $arrow.l.r$ $k$-SAT)* @cook1971 @garey1979 Any SAT formula converts to $k$-SAT ($k >= 3$) preserving satisfiability. [_Problems:_ @def:satisfiability, @def:k-sat.] -] - -#proof[ +#reduction-rule("Satisfiability", "KSatisfiability", + bidirectional: true, +)[ + @cook1971 @garey1979 Any SAT formula converts to $k$-SAT ($k >= 3$) preserving satisfiability. +][ _Small clauses ($|C| < k$):_ Pad $(ell_1 or ... or ell_r)$ with auxiliary $y$: $(ell_1 or ... or ell_r or y or overline(y) or ...)$ to length $k$. _Large clauses ($|C| > k$):_ Split $(ell_1 or ... or ell_r)$ with auxiliaries $y_1, ..., y_(r-k)$: @@ -568,11 +629,9 @@ where $P$ is a penalty weight large enough that any constraint violation costs m _Correctness._ Original clause true $arrow.l.r$ auxiliary chain can propagate truth through new clauses. ] -#theorem[ - *(CircuitSAT $arrow.r$ Spin Glass)* @whitfield2012 @lucas2014 Each gate maps to a gadget whose ground states encode valid I/O. [_Problems:_ @def:circuit-sat, @def:spin-glass.] -] - -#proof[ +#reduction-rule("CircuitSAT", "SpinGlass")[ + @whitfield2012 @lucas2014 Each gate maps to a gadget whose ground states encode valid I/O. +][ _Spin mapping:_ $sigma in {0,1} arrow.bar s = 2sigma - 1 in {-1, +1}$. _Gate gadgets_ (inputs 0,1; output 2; auxiliary 3 for XOR) are shown in @tab:gadgets. Allocate spins per variable, instantiate gadgets, sum Hamiltonians. Ground states correspond to satisfying assignments. @@ -592,11 +651,9 @@ where $P$ is a penalty weight large enough that any constraint violation costs m caption: [Ising gadgets for logic gates. Ground states match truth tables.] ) -#theorem[ - *(Factoring $arrow.r$ Circuit-SAT)* An array multiplier with output constrained to $N$ is satisfiable iff $N$ factors within bit bounds. _(Folklore; no canonical reference.)_ [_Problems:_ @def:factoring, @def:circuit-sat.] -] - -#proof[ +#reduction-rule("Factoring", "CircuitSAT")[ + An array multiplier with output constrained to $N$ is satisfiable iff $N$ factors within bit bounds. _(Folklore; no canonical reference.)_ +][ _Construction._ Build $m times n$ array multiplier for $p times q$: _Full adder $(i,j)$:_ $s_(i,j) + 2c_(i,j) = (p_i and q_j) + s_"prev" + c_"prev"$ via: @@ -608,11 +665,11 @@ where $P$ is a penalty weight large enough that any constraint violation costs m _Solution extraction._ $p = sum_i p_i 2^(i-1)$, $q = sum_j q_j 2^(j-1)$. ] -#theorem[ - *(Spin Glass $arrow.l.r$ Max-Cut)* @barahona1982 @lucas2014 Ground states of Ising models correspond to maximum cuts. [_Problems:_ @def:spin-glass, @def:max-cut.] -] - -#proof[ +#reduction-rule("SpinGlass", "MaxCut", + bidirectional: true, +)[ + @barahona1982 @lucas2014 Ground states of Ising models correspond to maximum cuts. +][ _MaxCut $arrow.r$ SpinGlass:_ Set $J_(i j) = w_(i j)$, $h_i = 0$. Maximizing cut equals minimizing $-sum J_(i j) s_i s_j$ since $s_i s_j = -1$ when $s_i != s_j$. _SpinGlass $arrow.r$ MaxCut:_ If $h_i = 0$: direct mapping $w_(i j) = J_(i j)$. Otherwise, add ancilla $a$ with $w_(i,a) = h_i$. @@ -620,11 +677,9 @@ where $P$ is a penalty weight large enough that any constraint violation costs m _Solution extraction._ Without ancilla: identity. With ancilla: if $sigma_a = 1$, flip all spins before removing ancilla. ] -#theorem[ - *(Coloring $arrow.r$ ILP)* The $k$-coloring problem reduces to binary ILP with $|V| dot k$ variables and $|V| + |E| dot k$ constraints. [_Problems:_ @def:coloring, @def:ilp.] -] - -#proof[ +#reduction-rule("KColoring", "ILP")[ + The $k$-coloring problem reduces to binary ILP with $|V| dot k$ variables and $|V| + |E| dot k$ constraints. +][ _Construction._ For graph $G = (V, E)$ with $k$ colors: _Variables:_ Binary $x_(v,c) in {0, 1}$ for each vertex $v in V$ and color $c in {1, ..., k}$. Interpretation: $x_(v,c) = 1$ iff vertex $v$ has color $c$. @@ -638,11 +693,9 @@ where $P$ is a penalty weight large enough that any constraint violation costs m _Solution extraction._ For each vertex $v$, find $c$ with $x_(v,c) = 1$; assign color $c$ to $v$. ] -#theorem[ - *(Factoring $arrow.r$ ILP)* Integer factorization reduces to binary ILP using McCormick linearization with $O(m n)$ variables and constraints. [_Problems:_ @def:factoring, @def:ilp.] -] - -#proof[ +#reduction-rule("Factoring", "ILP")[ + Integer factorization reduces to binary ILP using McCormick linearization with $O(m n)$ variables and constraints. +][ _Construction._ For target $N$ with $m$-bit factor $p$ and $n$-bit factor $q$: _Variables:_ Binary $p_i, q_j in {0,1}$ for factor bits; binary $z_(i j) in {0,1}$ for products $p_i dot q_j$; integer $c_k >= 0$ for carries at each bit position. @@ -665,69 +718,53 @@ where $P$ is a penalty weight large enough that any constraint violation costs m The following reductions to Integer Linear Programming are straightforward formulations where problem constraints map directly to linear inequalities. -#theorem[ - *(IS $arrow.r$ ILP)* The maximum-weight IS problem reduces to binary ILP with $|V|$ variables and $|E|$ constraints. [_Problems:_ @def:independent-set, @def:ilp.] -] - -#proof[ +#reduction-rule("IndependentSet", "ILP")[ + The maximum-weight IS problem reduces to binary ILP with $|V|$ variables and $|E|$ constraints. +][ _Construction._ Variables: $x_v in {0, 1}$ for each $v in V$. Constraints: $x_u + x_v <= 1$ for each $(u, v) in E$. Objective: maximize $sum_v w_v x_v$. _Solution extraction:_ $S = {v : x_v = 1}$. ] -#theorem[ - *(VC $arrow.r$ ILP)* The minimum-weight VC problem reduces to binary ILP with $|V|$ variables and $|E|$ constraints. [_Problems:_ @def:vertex-cover, @def:ilp.] -] - -#proof[ +#reduction-rule("VertexCovering", "ILP")[ + The minimum-weight VC problem reduces to binary ILP with $|V|$ variables and $|E|$ constraints. +][ _Construction._ Variables: $x_v in {0, 1}$ for each $v in V$. Constraints: $x_u + x_v >= 1$ for each $(u, v) in E$. Objective: minimize $sum_v w_v x_v$. _Solution extraction:_ $C = {v : x_v = 1}$. ] -#theorem[ - *(Matching $arrow.r$ ILP)* The maximum-weight matching reduces to binary ILP with $|E|$ variables and $|V|$ constraints. [_Problems:_ @def:matching, @def:ilp.] -] - -#proof[ +#reduction-rule("Matching", "ILP")[ + The maximum-weight matching reduces to binary ILP with $|E|$ variables and $|V|$ constraints. +][ _Construction._ Variables: $x_e in {0, 1}$ for each $e in E$. Constraints: $sum_(e in.rev v) x_e <= 1$ for each $v in V$. Objective: maximize $sum_e w_e x_e$. _Solution extraction:_ $M = {e : x_e = 1}$. ] -#theorem[ - *(SetPacking $arrow.r$ ILP)* Set packing reduces to binary ILP with $|cal(S)|$ variables and at most $binom(|cal(S)|, 2)$ constraints. [_Problems:_ @def:set-packing, @def:ilp.] -] - -#proof[ +#reduction-rule("SetPacking", "ILP")[ + Set packing reduces to binary ILP with $|cal(S)|$ variables and at most $binom(|cal(S)|, 2)$ constraints. +][ _Construction._ Variables: $x_i in {0, 1}$ for each $S_i in cal(S)$. Constraints: $x_i + x_j <= 1$ for each overlapping pair $S_i, S_j in cal(S)$ with $S_i inter S_j != emptyset$. Objective: maximize $sum_i w_i x_i$. _Solution extraction:_ $cal(P) = {S_i : x_i = 1}$. ] -#theorem[ - *(SetCovering $arrow.r$ ILP)* Set covering reduces to binary ILP with $|cal(S)|$ variables and $|U|$ constraints. [_Problems:_ @def:set-covering, @def:ilp.] -] - -#proof[ +#reduction-rule("SetCovering", "ILP")[ + Set covering reduces to binary ILP with $|cal(S)|$ variables and $|U|$ constraints. +][ _Construction._ Variables: $x_i in {0, 1}$ for each $S_i in cal(S)$. Constraints: $sum_(S_i in.rev u) x_i >= 1$ for each $u in U$. Objective: minimize $sum_i w_i x_i$. _Solution extraction:_ $cal(C) = {S_i : x_i = 1}$. ] -#theorem[ - *(DominatingSet $arrow.r$ ILP)* Dominating set reduces to binary ILP with $|V|$ variables and $|V|$ constraints. [_Problems:_ @def:dominating-set, @def:ilp.] -] - -#proof[ +#reduction-rule("DominatingSet", "ILP")[ + Dominating set reduces to binary ILP with $|V|$ variables and $|V|$ constraints. +][ _Construction._ Variables: $x_v in {0, 1}$ for each $v in V$. Constraints: $x_v + sum_(u in N(v)) x_u >= 1$ for each $v in V$ (each vertex dominated). Objective: minimize $sum_v w_v x_v$. _Solution extraction:_ $D = {v : x_v = 1}$. ] -#theorem[ - *(Clique $arrow.r$ ILP)* Maximum clique reduces to binary ILP with $|V|$ variables and $O(|overline(E)|)$ constraints. [_Problems:_ @def:clique, @def:ilp.] -] - -#proof[ +#reduction-rule("Clique", "ILP")[ + Maximum clique reduces to binary ILP with $|V|$ variables and $O(|overline(E)|)$ constraints. +][ _Construction._ Variables: $x_v in {0, 1}$ for each $v in V$. Constraints: $x_u + x_v <= 1$ for each $(u, v) in.not E$ (non-edges). Objective: maximize $sum_v x_v$. Equivalently, IS on the complement graph. _Solution extraction:_ $K = {v : x_v = 1}$. ] == Unit Disk Mapping -#theorem[ - *(IS $arrow.r$ GridGraph IS)* @nguyen2023 Any MIS problem on a general graph $G$ can be reduced to MIS on a unit disk graph (King's subgraph) with at most quadratic overhead in the number of vertices. [_Problem:_ @def:independent-set.] -] - -#proof[ +#reduction-rule("IndependentSet", "GridGraph")[ + @nguyen2023 Any MIS problem on a general graph $G$ can be reduced to MIS on a unit disk graph (King's subgraph) with at most quadratic overhead in the number of vertices. +][ _Construction (Copy-Line Method)._ Given $G = (V, E)$ with $n = |V|$: 1. _Vertex ordering:_ Compute a path decomposition of $G$ to obtain vertex order $(v_1, ..., v_n)$. The pathwidth determines the grid height. From 208fa5b04141f240763e2fc25f8d367b444514f8 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Tue, 10 Feb 2026 20:14:51 +0800 Subject: [PATCH 04/14] fix: correct overhead formulas and add Display for polynomials MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Fix Monomial/Polynomial Display to use `*` between terms - Add num_literals to Satisfiability::problem_size() - Fix 6 incorrect overhead polynomials: SAT→DS, SAT→IS, SAT→Coloring, SAT→kSAT, Factoring→Circuit, Coloring→QUBO - Add poly!(a * b) macro variant for variable products - Export overhead data in reduction_graph.json - Simplify coloring_ilp.rs to use poly! macro Co-Authored-By: Claude Opus 4.6 --- docs/paper/reduction_graph.json | 208 ++++++++++++++++++++++++++++--- docs/paper/reductions.typ | 116 +++++++++++------ src/models/satisfiability/sat.rs | 6 + src/polynomial.rs | 87 +++++++++++++ src/rules/coloring_ilp.rs | 21 +--- src/rules/coloring_qubo.rs | 2 +- src/rules/factoring_circuit.rs | 2 +- src/rules/graph.rs | 31 ++++- src/rules/sat_coloring.rs | 3 +- src/rules/sat_dominatingset.rs | 4 +- src/rules/sat_independentset.rs | 4 +- src/rules/sat_ksat.rs | 4 +- 12 files changed, 401 insertions(+), 87 deletions(-) diff --git a/docs/paper/reduction_graph.json b/docs/paper/reduction_graph.json index 1e7fd7f1d..4199ca597 100644 --- a/docs/paper/reduction_graph.json +++ b/docs/paper/reduction_graph.json @@ -315,7 +315,17 @@ "weight": "Unweighted" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_spins", + "formula": "num_assignments" + }, + { + "field": "num_interactions", + "formula": "num_assignments" + } + ] }, { "source": { @@ -332,7 +342,13 @@ "weight": "i32" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_gates", + "formula": "num_bits_first * num_bits_second" + } + ] }, { "source": { @@ -349,7 +365,17 @@ "weight": "Unweighted" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vars", + "formula": "2 * num_bits_first + 2 * num_bits_second + num_bits_first * num_bits_second" + }, + { + "field": "num_constraints", + "formula": "3 * num_bits_first * num_bits_second + num_bits_first + num_bits_second + 1" + } + ] }, { "source": { @@ -366,7 +392,13 @@ "weight": "f64" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vars", + "formula": "num_vars" + } + ] }, { "source": { @@ -383,7 +415,13 @@ "weight": "f64" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vars", + "formula": "num_vertices" + } + ] }, { "source": { @@ -401,7 +439,17 @@ "weight": "Unweighted" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vars", + "formula": "num_vertices * num_colors" + }, + { + "field": "num_constraints", + "formula": "num_vertices + num_edges * num_colors" + } + ] }, { "source": { @@ -418,7 +466,13 @@ "weight": "f64" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vars", + "formula": "num_vertices * num_colors" + } + ] }, { "source": { @@ -435,7 +489,13 @@ "weight": "f64" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vars", + "formula": "num_vars" + } + ] }, { "source": { @@ -452,7 +512,17 @@ "weight": "Unweighted" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_sets", + "formula": "num_edges" + }, + { + "field": "num_elements", + "formula": "num_vertices" + } + ] }, { "source": { @@ -469,7 +539,17 @@ "weight": "Unweighted" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vertices", + "formula": "3 * num_vars + num_clauses" + }, + { + "field": "num_edges", + "formula": "3 * num_vars + num_literals" + } + ] }, { "source": { @@ -486,7 +566,17 @@ "weight": "Unweighted" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vertices", + "formula": "num_literals" + }, + { + "field": "num_edges", + "formula": "num_literals^2" + } + ] }, { "source": { @@ -503,7 +593,17 @@ "weight": "i32" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vertices", + "formula": "2 * num_vars + 5 * num_literals - 5 * num_clauses + 3" + }, + { + "field": "num_colors", + "formula": "3" + } + ] }, { "source": { @@ -520,7 +620,17 @@ "weight": "Unweighted" } }, - "bidirectional": true + "bidirectional": true, + "overhead": [ + { + "field": "num_clauses", + "formula": "num_clauses + num_literals" + }, + { + "field": "num_vars", + "formula": "num_vars + num_literals" + } + ] }, { "source": { @@ -537,7 +647,17 @@ "weight": "Unweighted" } }, - "bidirectional": true + "bidirectional": true, + "overhead": [ + { + "field": "num_vertices", + "formula": "num_sets" + }, + { + "field": "num_edges", + "formula": "num_sets" + } + ] }, { "source": { @@ -554,7 +674,13 @@ "weight": "f64" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vars", + "formula": "num_sets" + } + ] }, { "source": { @@ -571,7 +697,17 @@ "weight": "Unweighted" } }, - "bidirectional": true + "bidirectional": true, + "overhead": [ + { + "field": "num_vertices", + "formula": "num_spins" + }, + { + "field": "num_edges", + "formula": "num_interactions" + } + ] }, { "source": { @@ -588,7 +724,13 @@ "weight": "f64" } }, - "bidirectional": true + "bidirectional": true, + "overhead": [ + { + "field": "num_vars", + "formula": "num_spins" + } + ] }, { "source": { @@ -605,7 +747,17 @@ "weight": "Unweighted" } }, - "bidirectional": true + "bidirectional": true, + "overhead": [ + { + "field": "num_vertices", + "formula": "num_vertices" + }, + { + "field": "num_edges", + "formula": "num_edges" + } + ] }, { "source": { @@ -622,7 +774,17 @@ "weight": "Unweighted" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_sets", + "formula": "num_vertices" + }, + { + "field": "num_elements", + "formula": "num_edges" + } + ] }, { "source": { @@ -639,7 +801,13 @@ "weight": "f64" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vars", + "formula": "num_vertices" + } + ] } ] } \ No newline at end of file diff --git a/docs/paper/reductions.typ b/docs/paper/reductions.typ index 398cd5e1b..bd09c4a8b 100644 --- a/docs/paper/reductions.typ +++ b/docs/paper/reductions.typ @@ -44,12 +44,6 @@ "GridGraph": "gridgraph", ) -// Special case mappings where JSON direction differs from theorem label -#let label-overrides = ( - "SetPacking->IndependentSet": "thm:is-to-setpacking", - "VertexCovering->IndependentSet": "thm:is-to-vc", -) - // Problem display names for theorem headers #let display-name = ( "IndependentSet": "IS", @@ -93,6 +87,12 @@ ) +// Special case mappings where JSON direction differs from theorem label +#let label-overrides = ( + "SetPacking->IndependentSet": "thm:is-to-setpacking", + "VertexCovering->IndependentSet": "thm:is-to-vc", +) + // Generate theorem label from source/target names (canonical direction) #let reduction-label(source, target) = { // Check for override first @@ -945,7 +945,79 @@ The following table shows concrete variable overhead for example instances, gene = Summary -#let gray = rgb("#e8e8e8") +// Metadata for each reduction: (overhead, references, is_trivial) +#let reduction-meta = ( + // Trivial reductions (gray) + "Matching->SetPacking": ([$O(|E|)$], [—], true), + "VertexCovering->SetCovering": ([$O(|V| + |E|)$], [—], true), + "SpinGlass->QUBO": ([$O(n^2)$], [—], true), + // QUBO reductions + "IndependentSet->QUBO": ([$O(n)$], [@lucas2014 @glover2019], false), + "VertexCovering->QUBO": ([$O(n)$], [@lucas2014 @glover2019], false), + "KColoring->QUBO": ([$O(n dot k)$], [@lucas2014 @glover2019], false), + "SetPacking->QUBO": ([$O(n)$], [@glover2019], false), + "KSatisfiability->QUBO": ([$O(n)$], [@glover2019], false), + "ILP->QUBO": ([$O(n)$], [@lucas2014 @glover2019], false), + // SAT reductions + "Satisfiability->IndependentSet": ([$O(sum_j |C_j|^2)$], [@karp1972], false), + "Satisfiability->KColoring": ([$O(n + sum_j |C_j|)$], [@garey1979], false), + "Satisfiability->DominatingSet": ([$O(3n + m)$], [@garey1979], false), + "Satisfiability->KSatisfiability": ([$O(sum_j |C_j|)$], [@cook1971 @garey1979], false), + // Circuit/Physics reductions + "CircuitSAT->SpinGlass": ([$O(|"gates"|)$], [@whitfield2012 @lucas2014], false), + "Factoring->CircuitSAT": ([$O(m n)$], [Folklore], false), + "SpinGlass->MaxCut": ([$O(n + |J|)$], [@barahona1982 @lucas2014], false), + // ILP reductions (trivial) + "KColoring->ILP": ([$O(|V| dot k + |E| dot k)$], [—], true), + "Factoring->ILP": ([$O(m n)$], [—], true), + "IndependentSet->ILP": ([$O(|V| + |E|)$], [—], true), + "VertexCovering->ILP": ([$O(|V| + |E|)$], [—], true), + "Matching->ILP": ([$O(|E| + |V|)$], [—], true), + "SetPacking->ILP": ([$O(|cal(S)|^2)$], [—], true), + "SetCovering->ILP": ([$O(|cal(S)| + |U|)$], [—], true), + "DominatingSet->ILP": ([$O(|V| + |E|)$], [—], true), + "Clique->ILP": ([$O(|V| + |overline(E)|)$], [—], true), + // Grid graph + "IndependentSet->GridGraph": ([$O(n^2)$], [@nguyen2023], false), +) + +// Get unique edges from graph-data +#let unique-edges = { + let seen = () + let result = () + for e in graph-data.edges { + let key = e.source.name + "->" + e.target.name + if key not in seen { + seen.push(key) + result.push(( + source: e.source.name, + target: e.target.name, + bidir: e.bidirectional, + key: key, + )) + } + } + result +} + +// Generate table row for an edge +#let make-row(e) = { + let src-disp = display-name.at(e.source, default: e.source) + let tgt-disp = display-name.at(e.target, default: e.target) + let arrow = if e.bidir { $arrow.l.r$ } else { $arrow.r$ } + let meta = reduction-meta.at(e.key, default: ([?], [?], false)) + let (overhead, refs, is-trivial) = meta + let gray = rgb("#e8e8e8") + if is-trivial { + ( + table.cell(fill: gray)[#src-disp #arrow #tgt-disp], + table.cell(fill: gray)[#overhead], + table.cell(fill: gray)[#refs], + ) + } else { + ([#src-disp #arrow #tgt-disp], [#overhead], [#refs]) + } +} #figure( table( @@ -953,35 +1025,9 @@ The following table shows concrete variable overhead for example instances, gene inset: 5pt, align: left, table.header([*Reduction*], [*Overhead*], [*Reference*]), - table.cell(fill: gray)[IS $arrow.l.r$ VC], table.cell(fill: gray)[$O(|V|)$], table.cell(fill: gray)[—], - table.cell(fill: gray)[IS $arrow.r$ SetPacking], table.cell(fill: gray)[$O(|V| + |E|)$], table.cell(fill: gray)[—], - table.cell(fill: gray)[Matching $arrow.r$ SetPacking], table.cell(fill: gray)[$O(|E|)$], table.cell(fill: gray)[—], - table.cell(fill: gray)[VC $arrow.r$ SetCovering], table.cell(fill: gray)[$O(|V| + |E|)$], table.cell(fill: gray)[—], - [IS $arrow.r$ QUBO], [$O(n)$], [@lucas2014 @glover2019], - [VC $arrow.r$ QUBO], [$O(n)$], [@lucas2014 @glover2019], - [KColoring $arrow.r$ QUBO], [$O(n dot k)$], [@lucas2014 @glover2019], - [SetPacking $arrow.r$ QUBO], [$O(n)$], [@glover2019], - [2-SAT $arrow.r$ QUBO], [$O(n)$], [@glover2019], - [Binary ILP $arrow.r$ QUBO], [$O(n)$], [@lucas2014 @glover2019], - [SAT $arrow.r$ IS], [$O(sum_j |C_j|^2)$], [@karp1972], - [SAT $arrow.r$ 3-Coloring], [$O(n + sum_j |C_j|)$], [@garey1979], - [SAT $arrow.r$ DominatingSet], [$O(3n + m)$], [@garey1979], - [SAT $arrow.l.r$ $k$-SAT], [$O(sum_j |C_j|)$], [@cook1971 @garey1979], - [CircuitSAT $arrow.r$ SpinGlass], [$O(|"gates"|)$], [@whitfield2012 @lucas2014], - [Factoring $arrow.r$ CircuitSAT], [$O(m n)$], [Folklore], - [SpinGlass $arrow.l.r$ MaxCut], [$O(n + |J|)$], [@barahona1982 @lucas2014], - table.cell(fill: gray)[Coloring $arrow.r$ ILP], table.cell(fill: gray)[$O(|V| dot k + |E| dot k)$], table.cell(fill: gray)[—], - table.cell(fill: gray)[Factoring $arrow.r$ ILP], table.cell(fill: gray)[$O(m n)$], table.cell(fill: gray)[—], - table.cell(fill: gray)[IS $arrow.r$ ILP], table.cell(fill: gray)[$O(|V| + |E|)$], table.cell(fill: gray)[—], - table.cell(fill: gray)[VC $arrow.r$ ILP], table.cell(fill: gray)[$O(|V| + |E|)$], table.cell(fill: gray)[—], - table.cell(fill: gray)[Matching $arrow.r$ ILP], table.cell(fill: gray)[$O(|E| + |V|)$], table.cell(fill: gray)[—], - table.cell(fill: gray)[SetPacking $arrow.r$ ILP], table.cell(fill: gray)[$O(|cal(S)|^2)$], table.cell(fill: gray)[—], - table.cell(fill: gray)[SetCovering $arrow.r$ ILP], table.cell(fill: gray)[$O(|cal(S)| + |U|)$], table.cell(fill: gray)[—], - table.cell(fill: gray)[DominatingSet $arrow.r$ ILP], table.cell(fill: gray)[$O(|V| + |E|)$], table.cell(fill: gray)[—], - table.cell(fill: gray)[Clique $arrow.r$ ILP], table.cell(fill: gray)[$O(|V| + |overline(E)|)$], table.cell(fill: gray)[—], - [IS $arrow.r$ GridGraph IS], [$O(n^2)$], [@nguyen2023], + ..unique-edges.map(make-row).flatten() ), - caption: [Summary of reductions. Gray rows indicate trivial (complement/isomorphism) reductions.] + caption: [Summary of #unique-edges.len() reductions. Gray rows indicate trivial (complement/isomorphism) reductions.] ) #bibliography("references.bib", style: "ieee") diff --git a/src/models/satisfiability/sat.rs b/src/models/satisfiability/sat.rs index faeacb33c..cd54d52e7 100644 --- a/src/models/satisfiability/sat.rs +++ b/src/models/satisfiability/sat.rs @@ -158,6 +158,11 @@ impl Satisfiability { self.clauses.len() } + /// Get the total number of literal occurrences across all clauses. + pub fn num_literals(&self) -> usize { + self.clauses.iter().map(|c| c.len()).sum() + } + /// Get the clauses. pub fn clauses(&self) -> &[CNFClause] { &self.clauses @@ -217,6 +222,7 @@ where ProblemSize::new(vec![ ("num_vars", self.num_vars), ("num_clauses", self.clauses.len()), + ("num_literals", self.num_literals()), ]) } diff --git a/src/polynomial.rs b/src/polynomial.rs index a41d2833f..60436b2d7 100644 --- a/src/polynomial.rs +++ b/src/polynomial.rs @@ -1,6 +1,7 @@ //! Polynomial representation for reduction overhead. use crate::types::ProblemSize; +use std::fmt; use std::ops::Add; /// A monomial: coefficient × Π(variable^exponent) @@ -79,6 +80,16 @@ impl Polynomial { } } + /// Create a polynomial with a single monomial that is a product of two variables. + pub fn var_product(a: &'static str, b: &'static str) -> Self { + Self { + terms: vec![Monomial { + coefficient: 1.0, + variables: vec![(a, 1), (b, 1)], + }], + } + } + pub fn scale(mut self, c: f64) -> Self { for term in &mut self.terms { term.coefficient *= c; @@ -91,6 +102,74 @@ impl Polynomial { } } +impl fmt::Display for Monomial { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let coeff_i = self.coefficient.round() as i64; + let is_int = (self.coefficient - coeff_i as f64).abs() < 1e-10; + if self.variables.is_empty() { + if is_int { + write!(f, "{coeff_i}") + } else { + write!(f, "{}", self.coefficient) + } + } else { + let has_coeff = if is_int { + match coeff_i { + 1 => false, + -1 => { + write!(f, "-")?; + false + } + _ => { + write!(f, "{coeff_i}")?; + true + } + } + } else { + write!(f, "{}", self.coefficient)?; + true + }; + for (i, (name, exp)) in self.variables.iter().enumerate() { + if has_coeff || i > 0 { + write!(f, " * ")?; + } + write!(f, "{name}")?; + if *exp > 1 { + write!(f, "^{exp}")?; + } + } + Ok(()) + } + } +} + +impl fmt::Display for Polynomial { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.terms.is_empty() { + write!(f, "0") + } else { + for (i, term) in self.terms.iter().enumerate() { + if i > 0 { + if term.coefficient < 0.0 { + write!(f, " - ")?; + let negated = Monomial { + coefficient: -term.coefficient, + variables: term.variables.clone(), + }; + write!(f, "{negated}")?; + } else { + write!(f, " + ")?; + write!(f, "{term}")?; + } + } else { + write!(f, "{term}")?; + } + } + Ok(()) + } + } +} + impl Add for Polynomial { type Output = Self; @@ -123,6 +202,14 @@ macro_rules! poly { ($c:literal * $name:ident ^ $exp:literal) => { $crate::polynomial::Polynomial::var_pow(stringify!($name), $exp).scale($c as f64) }; + // Product of two variables: poly!(a * b) + ($a:ident * $b:ident) => { + $crate::polynomial::Polynomial::var_product(stringify!($a), stringify!($b)) + }; + // Scaled product of two variables: poly!(3 * a * b) + ($c:literal * $a:ident * $b:ident) => { + $crate::polynomial::Polynomial::var_product(stringify!($a), stringify!($b)).scale($c as f64) + }; } #[cfg(test)] diff --git a/src/rules/coloring_ilp.rs b/src/rules/coloring_ilp.rs index 96c5047a5..ef5e9a228 100644 --- a/src/rules/coloring_ilp.rs +++ b/src/rules/coloring_ilp.rs @@ -9,7 +9,7 @@ use crate::models::graph::KColoring; use crate::models::optimization::{LinearConstraint, ObjectiveSense, VarBounds, ILP}; -use crate::polynomial::{Monomial, Polynomial}; +use crate::poly; use crate::rules::registry::{ReductionEntry, ReductionOverhead}; use crate::rules::traits::{ReduceTo, ReductionResult}; use crate::topology::{Graph, SimpleGraph}; @@ -24,23 +24,8 @@ inventory::submit! { source_variant: &[("k", "N"), ("graph", "SimpleGraph"), ("weight", "i32")], target_variant: &[("graph", ""), ("weight", "Unweighted")], overhead_fn: || ReductionOverhead::new(vec![ - // num_vars = num_vertices * num_colors - ("num_vars", Polynomial { - terms: vec![Monomial { - coefficient: 1.0, - variables: vec![("num_vertices", 1), ("num_colors", 1)], - }] - }), - // num_constraints = num_vertices + num_edges * num_colors - ("num_constraints", Polynomial { - terms: vec![ - Monomial::var("num_vertices"), - Monomial { - coefficient: 1.0, - variables: vec![("num_edges", 1), ("num_colors", 1)], - }, - ] - }), + ("num_vars", poly!(num_vertices * num_colors)), + ("num_constraints", poly!(num_vertices) + poly!(num_edges * num_colors)), ]), } } diff --git a/src/rules/coloring_qubo.rs b/src/rules/coloring_qubo.rs index 6e7f0952d..3475a0ae2 100644 --- a/src/rules/coloring_qubo.rs +++ b/src/rules/coloring_qubo.rs @@ -56,7 +56,7 @@ impl ReductionResult for ReductionKColoringToQUBO { #[reduction( source_graph = "SimpleGraph", - overhead = { ReductionOverhead::new(vec![("num_vars", poly!(num_vertices))]) } + overhead = { ReductionOverhead::new(vec![("num_vars", poly!(num_vertices * num_colors))]) } )] impl ReduceTo> for KColoring { type Result = ReductionKColoringToQUBO; diff --git a/src/rules/factoring_circuit.rs b/src/rules/factoring_circuit.rs index 4d71cb58b..d663e3dc3 100644 --- a/src/rules/factoring_circuit.rs +++ b/src/rules/factoring_circuit.rs @@ -191,7 +191,7 @@ fn build_multiplier_cell( #[reduction(overhead = { ReductionOverhead::new(vec![ - ("num_gates", poly!(num_bits_first^2)), + ("num_gates", poly!(num_bits_first * num_bits_second)), ]) })] impl ReduceTo> for Factoring { diff --git a/src/rules/graph.rs b/src/rules/graph.rs index 61ae73e75..cbb15f525 100644 --- a/src/rules/graph.rs +++ b/src/rules/graph.rs @@ -52,6 +52,15 @@ pub struct VariantRef { pub variant: std::collections::BTreeMap, } +/// A single output field in the reduction overhead. +#[derive(Debug, Clone, Serialize)] +pub struct OverheadFieldJson { + /// Output field name (e.g., "num_vars"). + pub field: String, + /// Formula as a human-readable string (e.g., "num_vertices"). + pub formula: String, +} + /// An edge in the reduction graph JSON. #[derive(Debug, Clone, Serialize)] pub struct EdgeJson { @@ -61,6 +70,8 @@ pub struct EdgeJson { pub target: VariantRef, /// Whether the reverse reduction also exists. pub bidirectional: bool, + /// Reduction overhead: output size as polynomials of input size. + pub overhead: Vec, } /// A path through the reduction graph. @@ -611,27 +622,37 @@ impl ReductionGraph { nodes.sort_by(|a, b| (&a.name, &a.variant).cmp(&(&b.name, &b.variant))); // Collect edges, checking for bidirectionality - let mut edge_set: HashMap<(VariantRef, VariantRef), bool> = HashMap::new(); + let mut edge_set: HashMap<(VariantRef, VariantRef), (bool, ReductionOverhead)> = + HashMap::new(); for entry in inventory::iter:: { let src_ref = Self::make_variant_ref(entry.source_name, entry.source_variant); let dst_ref = Self::make_variant_ref(entry.target_name, entry.target_variant); + let overhead = entry.overhead(); let reverse_key = (dst_ref.clone(), src_ref.clone()); - if edge_set.contains_key(&reverse_key) { - edge_set.insert(reverse_key, true); + if let Some(existing) = edge_set.get_mut(&reverse_key) { + existing.0 = true; } else { - edge_set.insert((src_ref, dst_ref), false); + edge_set.insert((src_ref, dst_ref), (false, overhead)); } } // Build edges let mut edges: Vec = edge_set .into_iter() - .map(|((src, dst), bidirectional)| EdgeJson { + .map(|((src, dst), (bidirectional, overhead))| EdgeJson { source: src, target: dst, bidirectional, + overhead: overhead + .output_size + .iter() + .map(|(field, poly)| OverheadFieldJson { + field: field.to_string(), + formula: poly.to_string(), + }) + .collect(), }) .collect(); edges.sort_by(|a, b| { diff --git a/src/rules/sat_coloring.rs b/src/rules/sat_coloring.rs index c4023259d..9865f0189 100644 --- a/src/rules/sat_coloring.rs +++ b/src/rules/sat_coloring.rs @@ -319,7 +319,8 @@ impl ReductionSATToColoring { target_graph = "SimpleGraph", overhead = { ReductionOverhead::new(vec![ - ("num_vertices", poly!(3 * num_vars)), + // 2*num_vars + 3 (base) + 5*(num_literals - num_clauses) (OR gadgets) + ("num_vertices", poly!(2 * num_vars) + poly!(5 * num_literals) + poly!(num_clauses).scale(-5.0) + poly!(3)), ("num_colors", poly!(3)), ]) } diff --git a/src/rules/sat_dominatingset.rs b/src/rules/sat_dominatingset.rs index 4f3dc7e6f..d8d5445d1 100644 --- a/src/rules/sat_dominatingset.rs +++ b/src/rules/sat_dominatingset.rs @@ -134,8 +134,8 @@ impl ReductionSATToDS { target_graph = "SimpleGraph", overhead = { ReductionOverhead::new(vec![ - ("num_vertices", poly!(num_vars)), - ("num_edges", poly!(num_clauses)), + ("num_vertices", poly!(3 * num_vars) + poly!(num_clauses)), + ("num_edges", poly!(3 * num_vars) + poly!(num_literals)), ]) } )] diff --git a/src/rules/sat_independentset.rs b/src/rules/sat_independentset.rs index f71e0266c..30dee06c6 100644 --- a/src/rules/sat_independentset.rs +++ b/src/rules/sat_independentset.rs @@ -130,8 +130,8 @@ impl ReductionSATToIS { target_graph = "SimpleGraph", overhead = { ReductionOverhead::new(vec![ - ("num_vertices", poly!(7 * num_clauses)), - ("num_edges", poly!(21 * num_clauses)), + ("num_vertices", poly!(num_literals)), + ("num_edges", poly!(num_literals ^ 2)), ]) } )] diff --git a/src/rules/sat_ksat.rs b/src/rules/sat_ksat.rs index f8b2227b6..bca1cc025 100644 --- a/src/rules/sat_ksat.rs +++ b/src/rules/sat_ksat.rs @@ -235,8 +235,8 @@ inventory::submit! { source_variant: &[("graph", ""), ("weight", "Unweighted")], target_variant: &[("graph", ""), ("weight", "Unweighted")], overhead_fn: || ReductionOverhead::new(vec![ - ("num_clauses", poly!(num_clauses)), - ("num_vars", poly!(num_vars)), + ("num_clauses", poly!(num_clauses) + poly!(num_literals)), + ("num_vars", poly!(num_vars) + poly!(num_literals)), ]), } } From 07688bac5b59ab3b581a4ab037303d8a500b2745 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Tue, 10 Feb 2026 20:21:15 +0800 Subject: [PATCH 05/14] docs: show reduction overhead in theorem statements Add auto-generated overhead line to each reduction theorem by looking up the edge data from reduction_graph.json. Format: field = formula pairs. Co-Authored-By: Claude Opus 4.6 --- docs/paper/reductions.typ | 107 +++++++------------------------------- 1 file changed, 20 insertions(+), 87 deletions(-) diff --git a/docs/paper/reductions.typ b/docs/paper/reductions.typ index bd09c4a8b..b90c6fc01 100644 --- a/docs/paper/reductions.typ +++ b/docs/paper/reductions.typ @@ -220,6 +220,23 @@ base_level: 1, ) +// Look up overhead for a reduction edge from graph-data +#let get-overhead(source, target) = { + // Try forward direction + let edge = graph-data.edges.find(e => e.source.name == source and e.target.name == target) + // Try reverse (for bidirectional) + if edge == none { + edge = graph-data.edges.find(e => e.source.name == target and e.target.name == source) + } + if edge != none and edge.overhead.len() > 0 { edge.overhead } else { none } +} + +// Format overhead fields as inline text +#let format-overhead(overhead) = { + let parts = overhead.map(o => raw(o.field + " = " + o.formula)) + [_Overhead:_ #parts.join(", ").] +} + // Unified function for reduction rules: theorem + proof + optional example #let reduction-rule( source, target, @@ -243,12 +260,14 @@ } else { [_Problems:_ #ref(label(src-def)), #ref(label(tgt-def)).] } + let overhead = get-overhead(source, target) let src-abbr = name-abbrev.at(source, default: lower(source)) let tgt-abbr = name-abbrev.at(target, default: lower(target)) let thm-lbl = label("thm:" + src-abbr + "-to-" + tgt-abbr) [#theorem[ *(#src-disp #arrow #tgt-disp)* #theorem-body [#problems] + #if overhead != none { linebreak(); format-overhead(overhead) } ] #thm-lbl] proof[#proof-body] @@ -943,91 +962,5 @@ The following table shows concrete variable overhead for example instances, gene (name: n, data: d) }) -= Summary - -// Metadata for each reduction: (overhead, references, is_trivial) -#let reduction-meta = ( - // Trivial reductions (gray) - "Matching->SetPacking": ([$O(|E|)$], [—], true), - "VertexCovering->SetCovering": ([$O(|V| + |E|)$], [—], true), - "SpinGlass->QUBO": ([$O(n^2)$], [—], true), - // QUBO reductions - "IndependentSet->QUBO": ([$O(n)$], [@lucas2014 @glover2019], false), - "VertexCovering->QUBO": ([$O(n)$], [@lucas2014 @glover2019], false), - "KColoring->QUBO": ([$O(n dot k)$], [@lucas2014 @glover2019], false), - "SetPacking->QUBO": ([$O(n)$], [@glover2019], false), - "KSatisfiability->QUBO": ([$O(n)$], [@glover2019], false), - "ILP->QUBO": ([$O(n)$], [@lucas2014 @glover2019], false), - // SAT reductions - "Satisfiability->IndependentSet": ([$O(sum_j |C_j|^2)$], [@karp1972], false), - "Satisfiability->KColoring": ([$O(n + sum_j |C_j|)$], [@garey1979], false), - "Satisfiability->DominatingSet": ([$O(3n + m)$], [@garey1979], false), - "Satisfiability->KSatisfiability": ([$O(sum_j |C_j|)$], [@cook1971 @garey1979], false), - // Circuit/Physics reductions - "CircuitSAT->SpinGlass": ([$O(|"gates"|)$], [@whitfield2012 @lucas2014], false), - "Factoring->CircuitSAT": ([$O(m n)$], [Folklore], false), - "SpinGlass->MaxCut": ([$O(n + |J|)$], [@barahona1982 @lucas2014], false), - // ILP reductions (trivial) - "KColoring->ILP": ([$O(|V| dot k + |E| dot k)$], [—], true), - "Factoring->ILP": ([$O(m n)$], [—], true), - "IndependentSet->ILP": ([$O(|V| + |E|)$], [—], true), - "VertexCovering->ILP": ([$O(|V| + |E|)$], [—], true), - "Matching->ILP": ([$O(|E| + |V|)$], [—], true), - "SetPacking->ILP": ([$O(|cal(S)|^2)$], [—], true), - "SetCovering->ILP": ([$O(|cal(S)| + |U|)$], [—], true), - "DominatingSet->ILP": ([$O(|V| + |E|)$], [—], true), - "Clique->ILP": ([$O(|V| + |overline(E)|)$], [—], true), - // Grid graph - "IndependentSet->GridGraph": ([$O(n^2)$], [@nguyen2023], false), -) - -// Get unique edges from graph-data -#let unique-edges = { - let seen = () - let result = () - for e in graph-data.edges { - let key = e.source.name + "->" + e.target.name - if key not in seen { - seen.push(key) - result.push(( - source: e.source.name, - target: e.target.name, - bidir: e.bidirectional, - key: key, - )) - } - } - result -} - -// Generate table row for an edge -#let make-row(e) = { - let src-disp = display-name.at(e.source, default: e.source) - let tgt-disp = display-name.at(e.target, default: e.target) - let arrow = if e.bidir { $arrow.l.r$ } else { $arrow.r$ } - let meta = reduction-meta.at(e.key, default: ([?], [?], false)) - let (overhead, refs, is-trivial) = meta - let gray = rgb("#e8e8e8") - if is-trivial { - ( - table.cell(fill: gray)[#src-disp #arrow #tgt-disp], - table.cell(fill: gray)[#overhead], - table.cell(fill: gray)[#refs], - ) - } else { - ([#src-disp #arrow #tgt-disp], [#overhead], [#refs]) - } -} - -#figure( - table( - columns: (auto, auto, auto), - inset: 5pt, - align: left, - table.header([*Reduction*], [*Overhead*], [*Reference*]), - ..unique-edges.map(make-row).flatten() - ), - caption: [Summary of #unique-edges.len() reductions. Gray rows indicate trivial (complement/isomorphism) reductions.] -) - +#pagebreak() #bibliography("references.bib", style: "ieee") From e97dc8e8e28c6750c0fde59d2fc9f41beb400910 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Tue, 10 Feb 2026 20:50:33 +0800 Subject: [PATCH 06/14] fix: add edge click and hover handlers to mdBook reduction graph MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add edge hover tooltip showing source→target and overhead formulas - Add edge click handler to highlight edge and show overhead info - Add cursor:pointer to edges for visual feedback - Sync reduction_graph.json with overhead data Co-Authored-By: Claude Opus 4.6 --- docs/src/introduction.md | 43 ++++- docs/src/reductions/reduction_graph.json | 208 ++++++++++++++++++++--- 2 files changed, 228 insertions(+), 23 deletions(-) diff --git a/docs/src/introduction.md b/docs/src/introduction.md index c7bc7e61a..f20bb7f97 100644 --- a/docs/src/introduction.md +++ b/docs/src/introduction.md @@ -57,7 +57,7 @@ For theoretical background and correctness proofs, see the [PDF manual](https:// var rev = e.target.name + '->' + e.source.name; if (edgeMap[rev]) { edgeMap[rev].bidirectional = true; } else if (!edgeMap[fwd]) { - edgeMap[fwd] = { source: e.source.name, target: e.target.name, bidirectional: e.bidirectional || false }; + edgeMap[fwd] = { source: e.source.name, target: e.target.name, bidirectional: e.bidirectional || false, overhead: e.overhead || [] }; } }); @@ -67,7 +67,7 @@ For theoretical background and correctness proofs, see the [PDF manual](https:// }); Object.keys(edgeMap).forEach(function(k) { var e = edgeMap[k]; - elements.push({ data: { id: k, source: e.source, target: e.target, bidirectional: e.bidirectional } }); + elements.push({ data: { id: k, source: e.source, target: e.target, bidirectional: e.bidirectional, overhead: e.overhead } }); }); var cy = cytoscape({ @@ -88,7 +88,7 @@ For theoretical background and correctness proofs, see the [PDF manual](https:// 'width': 2, 'line-color': '#999', 'target-arrow-color': '#999', 'target-arrow-shape': 'triangle', 'source-arrow-color': '#999', 'source-arrow-shape': function(ele) { return ele.data('bidirectional') ? 'triangle' : 'none'; }, - 'curve-style': 'bezier', 'arrow-scale': 0.8 + 'curve-style': 'bezier', 'arrow-scale': 0.8, 'cursor': 'pointer' }}, { selector: '.highlighted', style: { 'background-color': '#ff6b6b', 'border-color': '#cc0000', 'border-width': 3, 'z-index': 10 @@ -124,6 +124,26 @@ For theoretical background and correctness proofs, see the [PDF manual](https:// }); cy.on('mouseout', 'node', function() { tooltip.style.display = 'none'; }); + // Edge tooltip + cy.on('mouseover', 'edge', function(evt) { + var d = evt.target.data(); + var arrow = d.bidirectional ? ' \u2194 ' : ' \u2192 '; + var html = '' + d.source + arrow + d.target + ''; + if (d.overhead && d.overhead.length > 0) { + html += '
' + d.overhead.map(function(o) { return '' + o.field + ' = ' + o.formula + ''; }).join('
'); + } + html += '
Click to highlight'; + tooltip.innerHTML = html; + tooltip.style.display = 'block'; + }); + cy.on('mousemove', 'edge', function(evt) { + var pos = evt.renderedPosition || evt.position; + var rect = document.getElementById('cy').getBoundingClientRect(); + tooltip.style.left = (rect.left + window.scrollX + pos.x + 15) + 'px'; + tooltip.style.top = (rect.top + window.scrollY + pos.y - 10) + 'px'; + }); + cy.on('mouseout', 'edge', function() { tooltip.style.display = 'none'; }); + // Double-click to navigate to rustdoc API page cy.on('dbltap', 'node', function(evt) { var d = evt.target.data(); @@ -160,6 +180,23 @@ For theoretical background and correctness proofs, see the [PDF manual](https:// } }); + cy.on('tap', 'edge', function(evt) { + var edge = evt.target; + var d = edge.data(); + cy.elements().removeClass('highlighted selected-node'); + edge.addClass('highlighted'); + edge.source().addClass('highlighted'); + edge.target().addClass('highlighted'); + var arrow = d.bidirectional ? ' \u2194 ' : ' \u2192 '; + var text = d.source + arrow + d.target; + if (d.overhead && d.overhead.length > 0) { + text += ' | ' + d.overhead.map(function(o) { return o.field + ' = ' + o.formula; }).join(', '); + } + instructions.textContent = text; + clearBtn.style.display = 'inline'; + selectedNode = null; + }); + cy.on('tap', function(evt) { if (evt.target === cy) { clearPath(); } }); window.clearPath = function() { diff --git a/docs/src/reductions/reduction_graph.json b/docs/src/reductions/reduction_graph.json index 1e7fd7f1d..4199ca597 100644 --- a/docs/src/reductions/reduction_graph.json +++ b/docs/src/reductions/reduction_graph.json @@ -315,7 +315,17 @@ "weight": "Unweighted" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_spins", + "formula": "num_assignments" + }, + { + "field": "num_interactions", + "formula": "num_assignments" + } + ] }, { "source": { @@ -332,7 +342,13 @@ "weight": "i32" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_gates", + "formula": "num_bits_first * num_bits_second" + } + ] }, { "source": { @@ -349,7 +365,17 @@ "weight": "Unweighted" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vars", + "formula": "2 * num_bits_first + 2 * num_bits_second + num_bits_first * num_bits_second" + }, + { + "field": "num_constraints", + "formula": "3 * num_bits_first * num_bits_second + num_bits_first + num_bits_second + 1" + } + ] }, { "source": { @@ -366,7 +392,13 @@ "weight": "f64" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vars", + "formula": "num_vars" + } + ] }, { "source": { @@ -383,7 +415,13 @@ "weight": "f64" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vars", + "formula": "num_vertices" + } + ] }, { "source": { @@ -401,7 +439,17 @@ "weight": "Unweighted" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vars", + "formula": "num_vertices * num_colors" + }, + { + "field": "num_constraints", + "formula": "num_vertices + num_edges * num_colors" + } + ] }, { "source": { @@ -418,7 +466,13 @@ "weight": "f64" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vars", + "formula": "num_vertices * num_colors" + } + ] }, { "source": { @@ -435,7 +489,13 @@ "weight": "f64" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vars", + "formula": "num_vars" + } + ] }, { "source": { @@ -452,7 +512,17 @@ "weight": "Unweighted" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_sets", + "formula": "num_edges" + }, + { + "field": "num_elements", + "formula": "num_vertices" + } + ] }, { "source": { @@ -469,7 +539,17 @@ "weight": "Unweighted" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vertices", + "formula": "3 * num_vars + num_clauses" + }, + { + "field": "num_edges", + "formula": "3 * num_vars + num_literals" + } + ] }, { "source": { @@ -486,7 +566,17 @@ "weight": "Unweighted" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vertices", + "formula": "num_literals" + }, + { + "field": "num_edges", + "formula": "num_literals^2" + } + ] }, { "source": { @@ -503,7 +593,17 @@ "weight": "i32" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vertices", + "formula": "2 * num_vars + 5 * num_literals - 5 * num_clauses + 3" + }, + { + "field": "num_colors", + "formula": "3" + } + ] }, { "source": { @@ -520,7 +620,17 @@ "weight": "Unweighted" } }, - "bidirectional": true + "bidirectional": true, + "overhead": [ + { + "field": "num_clauses", + "formula": "num_clauses + num_literals" + }, + { + "field": "num_vars", + "formula": "num_vars + num_literals" + } + ] }, { "source": { @@ -537,7 +647,17 @@ "weight": "Unweighted" } }, - "bidirectional": true + "bidirectional": true, + "overhead": [ + { + "field": "num_vertices", + "formula": "num_sets" + }, + { + "field": "num_edges", + "formula": "num_sets" + } + ] }, { "source": { @@ -554,7 +674,13 @@ "weight": "f64" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vars", + "formula": "num_sets" + } + ] }, { "source": { @@ -571,7 +697,17 @@ "weight": "Unweighted" } }, - "bidirectional": true + "bidirectional": true, + "overhead": [ + { + "field": "num_vertices", + "formula": "num_spins" + }, + { + "field": "num_edges", + "formula": "num_interactions" + } + ] }, { "source": { @@ -588,7 +724,13 @@ "weight": "f64" } }, - "bidirectional": true + "bidirectional": true, + "overhead": [ + { + "field": "num_vars", + "formula": "num_spins" + } + ] }, { "source": { @@ -605,7 +747,17 @@ "weight": "Unweighted" } }, - "bidirectional": true + "bidirectional": true, + "overhead": [ + { + "field": "num_vertices", + "formula": "num_vertices" + }, + { + "field": "num_edges", + "formula": "num_edges" + } + ] }, { "source": { @@ -622,7 +774,17 @@ "weight": "Unweighted" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_sets", + "formula": "num_vertices" + }, + { + "field": "num_elements", + "formula": "num_edges" + } + ] }, { "source": { @@ -639,7 +801,13 @@ "weight": "f64" } }, - "bidirectional": false + "bidirectional": false, + "overhead": [ + { + "field": "num_vars", + "formula": "num_vertices" + } + ] } ] } \ No newline at end of file From 6ec698a11e2325b7986f658c5ad0012f4ed5df7c Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Tue, 10 Feb 2026 21:14:19 +0800 Subject: [PATCH 07/14] save --- docs/paper/reductions.typ | 72 +++++++++++++++++++++------------------ 1 file changed, 38 insertions(+), 34 deletions(-) diff --git a/docs/paper/reductions.typ b/docs/paper/reductions.typ index b90c6fc01..1ded300ca 100644 --- a/docs/paper/reductions.typ +++ b/docs/paper/reductions.typ @@ -142,11 +142,12 @@ let reduces-from = get-reductions-from(problem-name) if reduces-to.len() > 0 or reduces-from.len() > 0 { block(above: 0.5em)[ + #set text(size: 9pt) #if reduces-to.len() > 0 [ - - _Reduces to:_ #reduces-to.map(render-reduction-link).join(", "). \ + - Reduces to: #reduces-to.map(render-reduction-link).join(", "). \ ] #if reduces-from.len() > 0 [ - - _Reduces from:_ #reduces-from.map(render-reduction-link).join(", "). + - Reduces from: #reduces-from.map(render-reduction-link).join(", "). ] ] } @@ -156,23 +157,26 @@ #let render-schema(name) = { let schema = problem-schemas.find(s => s.name == name) if schema == none { return } - set text(size: 9pt) - table( - columns: (auto, 1fr), - inset: (x: 6pt, y: 3pt), - align: (left, left), - stroke: none, - table.hline(stroke: 0.3pt + luma(200)), - table.header( - text(fill: luma(100))[Field], - text(fill: luma(100))[Description], - ), - table.hline(stroke: 0.3pt + luma(200)), - ..schema.fields.map(f => ( - text(fill: luma(60), raw(f.name)), - text(fill: luma(60), f.description) - )).flatten() - ) + block( + stroke: (left: 2pt + luma(180)), + inset: (left: 8pt), + )[ + #set text(size: 9pt) + #table( + columns: (auto, 1fr), + inset: (x: 2pt, y: 3pt), + align: (left, left), + stroke: none, + table.header( + text(fill: luma(30), raw(name)), + ), + table.hline(stroke: 0.3pt + luma(200)), + ..schema.fields.map(f => ( + text(fill: luma(60), raw(f.name)), + text(fill: luma(60), raw(f.description)) + )).flatten() + ) + ] } // Extract primary variable count from an instance dict. @@ -314,43 +318,43 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #definition("Independent Set (IS)")[ Given $G = (V, E)$ with vertex weights $w: V -> RR$, find $S subset.eq V$ maximizing $sum_(v in S) w(v)$ such that no two vertices in $S$ are adjacent: $forall u, v in S: (u, v) in.not E$. - #render-reductions("IndependentSet") #render-schema("IndependentSet") + #render-reductions("IndependentSet") ] #definition("Vertex Cover (VC)")[ Given $G = (V, E)$ with vertex weights $w: V -> RR$, find $S subset.eq V$ minimizing $sum_(v in S) w(v)$ such that every edge has at least one endpoint in $S$: $forall (u, v) in E: u in S or v in S$. - #render-reductions("VertexCovering") #render-schema("VertexCovering") + #render-reductions("VertexCovering") ] #definition("Max-Cut")[ Given $G = (V, E)$ with weights $w: E -> RR$, find partition $(S, overline(S))$ maximizing $sum_((u,v) in E: u in S, v in overline(S)) w(u, v)$. - #render-reductions("MaxCut") #render-schema("MaxCut") + #render-reductions("MaxCut") ] #definition("Graph Coloring")[ Given $G = (V, E)$ and $k$ colors, find $c: V -> {1, ..., k}$ minimizing $|{(u, v) in E : c(u) = c(v)}|$. - #render-reductions("KColoring") #render-schema("KColoring") + #render-reductions("KColoring") ] #definition("Dominating Set")[ Given $G = (V, E)$ with weights $w: V -> RR$, find $S subset.eq V$ minimizing $sum_(v in S) w(v)$ s.t. $forall v in V: v in S or exists u in S: (u, v) in E$. - #render-reductions("DominatingSet") #render-schema("DominatingSet") + #render-reductions("DominatingSet") ] #definition("Matching")[ Given $G = (V, E)$ with weights $w: E -> RR$, find $M subset.eq E$ maximizing $sum_(e in M) w(e)$ s.t. $forall e_1, e_2 in M: e_1 inter e_2 = emptyset$. - #render-reductions("Matching") #render-schema("Matching") + #render-reductions("Matching") ] #definition("Clique")[ @@ -368,15 +372,15 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #definition("Set Packing")[ Given universe $U$, collection $cal(S) = {S_1, ..., S_m}$ with $S_i subset.eq U$, weights $w: cal(S) -> RR$, find $cal(P) subset.eq cal(S)$ maximizing $sum_(S in cal(P)) w(S)$ s.t. $forall S_i, S_j in cal(P): S_i inter S_j = emptyset$. - #render-reductions("SetPacking") #render-schema("SetPacking") + #render-reductions("SetPacking") ] #definition("Set Covering")[ Given universe $U$, collection $cal(S)$ with weights $w: cal(S) -> RR$, find $cal(C) subset.eq cal(S)$ minimizing $sum_(S in cal(C)) w(S)$ s.t. $union.big_(S in cal(C)) S = U$. - #render-reductions("SetCovering") #render-schema("SetCovering") + #render-reductions("SetCovering") ] == Optimization Problems @@ -384,22 +388,22 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #definition("Spin Glass (Ising Model)")[ Given $n$ spin variables $s_i in {-1, +1}$, pairwise couplings $J_(i j) in RR$, and external fields $h_i in RR$, minimize the Hamiltonian (energy function): $H(bold(s)) = -sum_((i,j)) J_(i j) s_i s_j - sum_i h_i s_i$. - #render-reductions("SpinGlass") #render-schema("SpinGlass") + #render-reductions("SpinGlass") ] #definition("QUBO")[ Given $n$ binary variables $x_i in {0, 1}$, upper-triangular matrix $Q in RR^(n times n)$, minimize $f(bold(x)) = sum_(i=1)^n Q_(i i) x_i + sum_(i < j) Q_(i j) x_i x_j$ (using $x_i^2 = x_i$ for binary variables). - #render-reductions("QUBO") #render-schema("QUBO") + #render-reductions("QUBO") ] #definition("Integer Linear Programming (ILP)")[ Given $n$ integer variables $bold(x) in ZZ^n$, constraint matrix $A in RR^(m times n)$, bounds $bold(b) in RR^m$, and objective $bold(c) in RR^n$, find $bold(x)$ minimizing $bold(c)^top bold(x)$ subject to $A bold(x) <= bold(b)$ and variable bounds. - #render-reductions("ILP") #render-schema("ILP") + #render-reductions("ILP") ] == Satisfiability Problems @@ -407,29 +411,29 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #definition("SAT")[ Given a CNF formula $phi = and.big_(j=1)^m C_j$ with $m$ clauses over $n$ Boolean variables, where each clause $C_j = or.big_i ell_(j i)$ is a disjunction of literals, find an assignment $bold(x) in {0, 1}^n$ such that $phi(bold(x)) = 1$ (all clauses satisfied). - #render-reductions("Satisfiability") #render-schema("Satisfiability") + #render-reductions("Satisfiability") ] #definition([$k$-SAT])[ SAT with exactly $k$ literals per clause. - #render-reductions("KSatisfiability") #render-schema("KSatisfiability") + #render-reductions("KSatisfiability") ] #definition("Circuit-SAT")[ Given a Boolean circuit $C$ composed of logic gates (AND, OR, NOT, XOR) with $n$ input variables, find an input assignment $bold(x) in {0,1}^n$ such that $C(bold(x)) = 1$. - #render-reductions("CircuitSAT") #render-schema("CircuitSAT") + #render-reductions("CircuitSAT") ] #definition("Factoring")[ Given a composite integer $N$ and bit sizes $m, n$, find integers $p in [2, 2^m - 1]$ and $q in [2, 2^n - 1]$ such that $p times q = N$. Here $p$ has $m$ bits and $q$ has $n$ bits. - #render-reductions("Factoring") #render-schema("Factoring") + #render-reductions("Factoring") ] = Reductions From 0875e7c3910e994c1fe3c16c2c963bfd26bfb098 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Tue, 10 Feb 2026 22:36:28 +0800 Subject: [PATCH 08/14] refactor: rename 7 problem types with Maximum/Minimum prefix, improve paper and perf MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Problem type renames for clarity: - Clique → MaximumClique - IndependentSet → MaximumIndependentSet - Matching → MaximumMatching - VertexCovering → MinimumVertexCover - DominatingSet → MinimumDominatingSet - SetPacking → MaximumSetPacking - SetCovering → MinimumSetCovering Renamed all source files, test files, rule files, example files, and test data across 118 files. Updated all references in code, docs, benchmarks, and the Typst paper. Typst paper improvements: - Removed name-abbrev and def-label-map dictionaries; use full names - Added problem-def() wrapper for programmatic definition labels - Added completeness warnings for missing models/rules vs JSON graph - Made reduction-rule use context+query for resilient label references - Swapped MVC↔MIS and MSP↔MIS theorem directions to match JSON edges - Deleted redundant examples/qubo_reductions.rs (already split) Performance: - Added Auto variant to PathDecompositionMethod (now default) - Auto uses exact branch-and-bound for ≤30 vertices, greedy for larger - Fixes slow unit disk mapping tests on larger graphs Co-Authored-By: Claude Opus 4.6 --- benches/solver_benchmarks.rs | 26 +- docs/paper/problem_schemas.json | 226 ++++---- docs/paper/reduction_graph.json | 372 ++++++------ docs/paper/reductions.typ | 533 +++++++++--------- docs/src/getting-started.md | 22 +- docs/src/introduction.md | 4 +- docs/src/io.md | 8 +- docs/src/reductions/reduction_graph.json | 372 ++++++------ examples/qubo_reductions.rs | 216 ------- ...p.rs => reduction_maximumclique_to_ilp.rs} | 22 +- ...reduction_maximumindependentset_to_ilp.rs} | 14 +- ...mumindependentset_to_maximumsetpacking.rs} | 32 +- ...umindependentset_to_minimumvertexcover.rs} | 24 +- ...eduction_maximumindependentset_to_qubo.rs} | 20 +- ...rs => reduction_maximummatching_to_ilp.rs} | 22 +- ...n_maximummatching_to_maximumsetpacking.rs} | 34 +- ... => reduction_maximumsetpacking_to_ilp.rs} | 20 +- ...=> reduction_maximumsetpacking_to_qubo.rs} | 18 +- ... reduction_minimumdominatingset_to_ilp.rs} | 20 +- ...=> reduction_minimumsetcovering_to_ilp.rs} | 20 +- ...=> reduction_minimumvertexcover_to_ilp.rs} | 14 +- ...umvertexcover_to_maximumindependentset.rs} | 24 +- ...nimumvertexcover_to_minimumsetcovering.rs} | 32 +- ...> reduction_minimumvertexcover_to_qubo.rs} | 20 +- ...reduction_sat_to_maximumindependentset.rs} | 16 +- ... reduction_sat_to_minimumdominatingset.rs} | 18 +- scripts/generate_qubo_tests.py | 6 +- src/export.rs | 2 +- src/io.rs | 8 +- src/lib.rs | 22 +- .../graph/{clique.rs => maximum_clique.rs} | 34 +- ...dent_set.rs => maximum_independent_set.rs} | 20 +- .../{matching.rs => maximum_matching.rs} | 34 +- ...ating_set.rs => minimum_dominating_set.rs} | 20 +- ...ex_covering.rs => minimum_vertex_cover.rs} | 20 +- src/models/graph/mod.rs | 30 +- src/models/mod.rs | 8 +- ...{set_packing.rs => maximum_set_packing.rs} | 18 +- ...et_covering.rs => minimum_set_covering.rs} | 18 +- src/models/set/mod.rs | 16 +- src/registry/category.rs | 22 +- src/registry/info.rs | 2 +- src/registry/schema.rs | 2 +- src/rules/circuit_spinglass.rs | 8 +- src/rules/graph.rs | 35 +- .../{clique_ilp.rs => maximumclique_ilp.rs} | 16 +- ...et_ilp.rs => maximumindependentset_ilp.rs} | 14 +- ...aximumindependentset_maximumsetpacking.rs} | 36 +- ..._qubo.rs => maximumindependentset_qubo.rs} | 12 +- ...matching_ilp.rs => maximummatching_ilp.rs} | 16 +- ...s => maximummatching_maximumsetpacking.rs} | 24 +- ...acking_ilp.rs => maximumsetpacking_ilp.rs} | 14 +- ...king_qubo.rs => maximumsetpacking_qubo.rs} | 14 +- ...set_ilp.rs => minimumdominatingset_ilp.rs} | 14 +- ...ering_ilp.rs => minimumsetcovering_ilp.rs} | 14 +- ...ering_ilp.rs => minimumvertexcover_ilp.rs} | 14 +- ...nimumvertexcover_maximumindependentset.rs} | 30 +- ... minimumvertexcover_minimumsetcovering.rs} | 20 +- ...ing_qubo.rs => minimumvertexcover_qubo.rs} | 12 +- src/rules/mod.rs | 64 +-- src/rules/registry.rs | 4 +- src/rules/sat_coloring.rs | 2 +- ...entset.rs => sat_maximumindependentset.rs} | 22 +- ...tingset.rs => sat_minimumdominatingset.rs} | 24 +- src/rules/unitdiskmapping/alpha_tensor.rs | 2 +- src/rules/unitdiskmapping/ksg/mapping.rs | 9 +- .../unitdiskmapping/pathdecomposition.rs | 18 +- .../unitdiskmapping/triangular/gadgets.rs | 2 +- .../unitdiskmapping/triangular/mapping.rs | 6 +- src/rules/unitdiskmapping/triangular/mod.rs | 2 +- src/solvers/ilp/solver.rs | 2 +- src/testing/macros.rs | 16 +- src/testing/mod.rs | 16 +- src/topology/mod.rs | 4 +- src/traits.rs | 2 +- src/types.rs | 2 +- src/unit_tests/export.rs | 8 +- src/unit_tests/graph_models.rs | 80 +-- src/unit_tests/io.rs | 16 +- .../graph/{clique.rs => maximum_clique.rs} | 50 +- ...dent_set.rs => maximum_independent_set.rs} | 48 +- .../{matching.rs => maximum_matching.rs} | 42 +- ...ating_set.rs => minimum_dominating_set.rs} | 48 +- ...ex_covering.rs => minimum_vertex_cover.rs} | 46 +- ...{set_packing.rs => maximum_set_packing.rs} | 46 +- ...et_covering.rs => minimum_set_covering.rs} | 36 +- src/unit_tests/property.rs | 20 +- src/unit_tests/reduction_graph.rs | 42 +- src/unit_tests/registry/category.rs | 6 +- src/unit_tests/registry/schema.rs | 12 +- src/unit_tests/rules/graph.rs | 120 ++-- .../{clique_ilp.rs => maximumclique_ilp.rs} | 40 +- ...et_ilp.rs => maximumindependentset_ilp.rs} | 22 +- ...aximumindependentset_maximumsetpacking.rs} | 34 +- ..._qubo.rs => maximumindependentset_qubo.rs} | 8 +- ...matching_ilp.rs => maximummatching_ilp.rs} | 24 +- ...s => maximummatching_maximumsetpacking.rs} | 52 +- ...acking_ilp.rs => maximumsetpacking_ilp.rs} | 22 +- ...king_qubo.rs => maximumsetpacking_qubo.rs} | 8 +- ...set_ilp.rs => minimumdominatingset_ilp.rs} | 22 +- ...ering_ilp.rs => minimumsetcovering_ilp.rs} | 22 +- ...ering_ilp.rs => minimumvertexcover_ilp.rs} | 26 +- ...nimumvertexcover_maximumindependentset.rs} | 22 +- ... minimumvertexcover_minimumsetcovering.rs} | 40 +- ...ing_qubo.rs => minimumvertexcover_qubo.rs} | 8 +- src/unit_tests/rules/registry.rs | 2 +- ...entset.rs => sat_maximumindependentset.rs} | 26 +- ...tingset.rs => sat_minimumdominatingset.rs} | 30 +- src/unit_tests/testing/macros.rs | 8 +- src/unit_tests/trait_consistency.rs | 28 +- .../unitdiskmapping_algorithms/common.rs | 10 +- src/unit_tests/variant.rs | 30 +- ...son => maximumindependentset_to_qubo.json} | 0 ...bo.json => maximumsetpacking_to_qubo.json} | 0 ...o.json => minimumvertexcover_to_qubo.json} | 0 tests/suites/examples.rs | 34 +- tests/suites/integration.rs | 34 +- tests/suites/reductions.rs | 84 +-- 118 files changed, 1985 insertions(+), 2192 deletions(-) delete mode 100644 examples/qubo_reductions.rs rename examples/{reduction_clique_to_ilp.rs => reduction_maximumclique_to_ilp.rs} (76%) rename examples/{reduction_is_to_ilp.rs => reduction_maximumindependentset_to_ilp.rs} (83%) rename examples/{reduction_is_to_setpacking.rs => reduction_maximumindependentset_to_maximumsetpacking.rs} (73%) rename examples/{reduction_is_to_vc.rs => reduction_maximumindependentset_to_minimumvertexcover.rs} (69%) rename examples/{reduction_is_to_qubo.rs => reduction_maximumindependentset_to_qubo.rs} (80%) rename examples/{reduction_matching_to_ilp.rs => reduction_maximummatching_to_ilp.rs} (76%) rename examples/{reduction_matching_to_setpacking.rs => reduction_maximummatching_to_maximumsetpacking.rs} (69%) rename examples/{reduction_setpacking_to_ilp.rs => reduction_maximumsetpacking_to_ilp.rs} (79%) rename examples/{reduction_setpacking_to_qubo.rs => reduction_maximumsetpacking_to_qubo.rs} (85%) rename examples/{reduction_dominatingset_to_ilp.rs => reduction_minimumdominatingset_to_ilp.rs} (77%) rename examples/{reduction_setcovering_to_ilp.rs => reduction_minimumsetcovering_to_ilp.rs} (79%) rename examples/{reduction_vc_to_ilp.rs => reduction_minimumvertexcover_to_ilp.rs} (82%) rename examples/{reduction_vc_to_is.rs => reduction_minimumvertexcover_to_maximumindependentset.rs} (70%) rename examples/{reduction_vc_to_setcovering.rs => reduction_minimumvertexcover_to_minimumsetcovering.rs} (73%) rename examples/{reduction_vc_to_qubo.rs => reduction_minimumvertexcover_to_qubo.rs} (82%) rename examples/{reduction_sat_to_is.rs => reduction_sat_to_maximumindependentset.rs} (86%) rename examples/{reduction_sat_to_dominatingset.rs => reduction_sat_to_minimumdominatingset.rs} (86%) rename src/models/graph/{clique.rs => maximum_clique.rs} (89%) rename src/models/graph/{independent_set.rs => maximum_independent_set.rs} (92%) rename src/models/graph/{matching.rs => maximum_matching.rs} (89%) rename src/models/graph/{dominating_set.rs => minimum_dominating_set.rs} (93%) rename src/models/graph/{vertex_covering.rs => minimum_vertex_cover.rs} (92%) rename src/models/set/{set_packing.rs => maximum_set_packing.rs} (93%) rename src/models/set/{set_covering.rs => minimum_set_covering.rs} (93%) rename src/rules/{clique_ilp.rs => maximumclique_ilp.rs} (86%) rename src/rules/{independentset_ilp.rs => maximumindependentset_ilp.rs} (85%) rename src/rules/{independentset_setpacking.rs => maximumindependentset_maximumsetpacking.rs} (74%) rename src/rules/{independentset_qubo.rs => maximumindependentset_qubo.rs} (84%) rename src/rules/{matching_ilp.rs => maximummatching_ilp.rs} (86%) rename src/rules/{matching_setpacking.rs => maximummatching_maximumsetpacking.rs} (72%) rename src/rules/{setpacking_ilp.rs => maximumsetpacking_ilp.rs} (87%) rename src/rules/{setpacking_qubo.rs => maximumsetpacking_qubo.rs} (84%) rename src/rules/{dominatingset_ilp.rs => minimumdominatingset_ilp.rs} (87%) rename src/rules/{setcovering_ilp.rs => minimumsetcovering_ilp.rs} (88%) rename src/rules/{vertexcovering_ilp.rs => minimumvertexcover_ilp.rs} (86%) rename src/rules/{vertexcovering_independentset.rs => minimumvertexcover_maximumindependentset.rs} (75%) rename src/rules/{vertexcovering_setcovering.rs => minimumvertexcover_minimumsetcovering.rs} (79%) rename src/rules/{vertexcovering_qubo.rs => minimumvertexcover_qubo.rs} (87%) rename src/rules/{sat_independentset.rs => sat_maximumindependentset.rs} (89%) rename src/rules/{sat_dominatingset.rs => sat_minimumdominatingset.rs} (90%) rename src/unit_tests/models/graph/{clique.rs => maximum_clique.rs} (74%) rename src/unit_tests/models/graph/{independent_set.rs => maximum_independent_set.rs} (72%) rename src/unit_tests/models/graph/{matching.rs => maximum_matching.rs} (74%) rename src/unit_tests/models/graph/{dominating_set.rs => minimum_dominating_set.rs} (71%) rename src/unit_tests/models/graph/{vertex_covering.rs => minimum_vertex_cover.rs} (74%) rename src/unit_tests/models/set/{set_packing.rs => maximum_set_packing.rs} (72%) rename src/unit_tests/models/set/{set_covering.rs => minimum_set_covering.rs} (73%) rename src/unit_tests/rules/{clique_ilp.rs => maximumclique_ilp.rs} (85%) rename src/unit_tests/rules/{independentset_ilp.rs => maximumindependentset_ilp.rs} (87%) rename src/unit_tests/rules/{independentset_setpacking.rs => maximumindependentset_maximumsetpacking.rs} (69%) rename src/unit_tests/rules/{independentset_qubo.rs => maximumindependentset_qubo.rs} (84%) rename src/unit_tests/rules/{matching_ilp.rs => maximummatching_ilp.rs} (88%) rename src/unit_tests/rules/{matching_setpacking.rs => maximummatching_maximumsetpacking.rs} (69%) rename src/unit_tests/rules/{setpacking_ilp.rs => maximumsetpacking_ilp.rs} (87%) rename src/unit_tests/rules/{setpacking_qubo.rs => maximumsetpacking_qubo.rs} (85%) rename src/unit_tests/rules/{dominatingset_ilp.rs => minimumdominatingset_ilp.rs} (87%) rename src/unit_tests/rules/{setcovering_ilp.rs => minimumsetcovering_ilp.rs} (87%) rename src/unit_tests/rules/{vertexcovering_ilp.rs => minimumvertexcover_ilp.rs} (87%) rename src/unit_tests/rules/{vertexcovering_independentset.rs => minimumvertexcover_maximumindependentset.rs} (67%) rename src/unit_tests/rules/{vertexcovering_setcovering.rs => minimumvertexcover_minimumsetcovering.rs} (75%) rename src/unit_tests/rules/{vertexcovering_qubo.rs => minimumvertexcover_qubo.rs} (84%) rename src/unit_tests/rules/{sat_independentset.rs => sat_maximumindependentset.rs} (89%) rename src/unit_tests/rules/{sat_dominatingset.rs => sat_minimumdominatingset.rs} (88%) rename tests/data/qubo/{independentset_to_qubo.json => maximumindependentset_to_qubo.json} (100%) rename tests/data/qubo/{setpacking_to_qubo.json => maximumsetpacking_to_qubo.json} (100%) rename tests/data/qubo/{vertexcovering_to_qubo.json => minimumvertexcover_to_qubo.json} (100%) diff --git a/benches/solver_benchmarks.rs b/benches/solver_benchmarks.rs index bd2abf2b7..b0a6e7196 100644 --- a/benches/solver_benchmarks.rs +++ b/benches/solver_benchmarks.rs @@ -9,14 +9,14 @@ use problemreductions::models::set::*; use problemreductions::models::specialized::*; use problemreductions::prelude::*; -/// Benchmark IndependentSet on graphs of varying sizes. +/// Benchmark MaximumIndependentSet on graphs of varying sizes. fn bench_independent_set(c: &mut Criterion) { - let mut group = c.benchmark_group("IndependentSet"); + let mut group = c.benchmark_group("MaximumIndependentSet"); for n in [4, 6, 8, 10].iter() { // Create a path graph with n vertices let edges: Vec<(usize, usize)> = (0..*n - 1).map(|i| (i, i + 1)).collect(); - let problem = IndependentSet::::new(*n, edges); + let problem = MaximumIndependentSet::::new(*n, edges); let solver = BruteForce::new(); group.bench_with_input(BenchmarkId::new("path", n), n, |b, _| { @@ -27,13 +27,13 @@ fn bench_independent_set(c: &mut Criterion) { group.finish(); } -/// Benchmark VertexCovering on graphs of varying sizes. +/// Benchmark MinimumVertexCover on graphs of varying sizes. fn bench_vertex_covering(c: &mut Criterion) { - let mut group = c.benchmark_group("VertexCovering"); + let mut group = c.benchmark_group("MinimumVertexCover"); for n in [4, 6, 8, 10].iter() { let edges: Vec<(usize, usize)> = (0..*n - 1).map(|i| (i, i + 1)).collect(); - let problem = VertexCovering::::new(*n, edges); + let problem = MinimumVertexCover::::new(*n, edges); let solver = BruteForce::new(); group.bench_with_input(BenchmarkId::new("path", n), n, |b, _| { @@ -109,16 +109,16 @@ fn bench_spin_glass(c: &mut Criterion) { group.finish(); } -/// Benchmark SetCovering on varying sizes. +/// Benchmark MinimumSetCovering on varying sizes. fn bench_set_covering(c: &mut Criterion) { - let mut group = c.benchmark_group("SetCovering"); + let mut group = c.benchmark_group("MinimumSetCovering"); for num_sets in [4, 6, 8, 10].iter() { // Create overlapping sets let sets: Vec> = (0..*num_sets) .map(|i| vec![i, (i + 1) % *num_sets, (i + 2) % *num_sets]) .collect(); - let problem = SetCovering::::new(*num_sets, sets); + let problem = MinimumSetCovering::::new(*num_sets, sets); let solver = BruteForce::new(); group.bench_with_input( @@ -154,7 +154,7 @@ fn bench_matching(c: &mut Criterion) { for n in [4, 6, 8, 10].iter() { let edges: Vec<(usize, usize, i32)> = (0..*n - 1).map(|i| (i, i + 1, 1)).collect(); - let problem = Matching::new(*n, edges); + let problem = MaximumMatching::new(*n, edges); let solver = BruteForce::new(); group.bench_with_input(BenchmarkId::new("path", n), n, |b, _| { @@ -192,9 +192,9 @@ fn bench_comparison(c: &mut Criterion) { let solver = BruteForce::new(); - // IndependentSet with 8 vertices - let is_problem = IndependentSet::::new(8, vec![(0, 1), (2, 3), (4, 5), (6, 7)]); - group.bench_function("IndependentSet", |b| { + // MaximumIndependentSet with 8 vertices + let is_problem = MaximumIndependentSet::::new(8, vec![(0, 1), (2, 3), (4, 5), (6, 7)]); + group.bench_function("MaximumIndependentSet", |b| { b.iter(|| solver.find_best(black_box(&is_problem))) }); diff --git a/docs/paper/problem_schemas.json b/docs/paper/problem_schemas.json index 065c8bfa3..1bb7e1a5d 100644 --- a/docs/paper/problem_schemas.json +++ b/docs/paper/problem_schemas.json @@ -75,40 +75,6 @@ } ] }, - { - "name": "Clique", - "category": "graph", - "description": "Find maximum weight clique in a graph", - "fields": [ - { - "name": "graph", - "type_name": "G", - "description": "The underlying graph G=(V,E)" - }, - { - "name": "weights", - "type_name": "Vec", - "description": "Vertex weights w: V -> R" - } - ] - }, - { - "name": "DominatingSet", - "category": "graph", - "description": "Find minimum weight dominating set in a graph", - "fields": [ - { - "name": "graph", - "type_name": "G", - "description": "The underlying graph G=(V,E)" - }, - { - "name": "weights", - "type_name": "Vec", - "description": "Vertex weights w: V -> R" - } - ] - }, { "name": "Factoring", "category": "specialized", @@ -164,58 +130,109 @@ ] }, { - "name": "IndependentSet", + "name": "KColoring", "category": "graph", - "description": "Find maximum weight independent set in a graph", + "description": "Find valid k-coloring of a graph", "fields": [ { "name": "graph", "type_name": "G", "description": "The underlying graph G=(V,E)" + } + ] + }, + { + "name": "KSatisfiability", + "category": "satisfiability", + "description": "SAT with exactly k literals per clause", + "fields": [ + { + "name": "num_vars", + "type_name": "usize", + "description": "Number of Boolean variables" + }, + { + "name": "clauses", + "type_name": "Vec", + "description": "Clauses each with exactly K literals" }, { "name": "weights", "type_name": "Vec", - "description": "Vertex weights w: V -> R" + "description": "Clause weights for MAX-K-SAT" } ] }, { - "name": "KColoring", + "name": "MaxCut", "category": "graph", - "description": "Find valid k-coloring of a graph", + "description": "Find maximum weight cut in a graph", + "fields": [ + { + "name": "graph", + "type_name": "G", + "description": "The graph with edge weights" + }, + { + "name": "edge_weights", + "type_name": "Vec", + "description": "Edge weights w: E -> R" + } + ] + }, + { + "name": "MaximalIS", + "category": "graph", + "description": "Find maximum weight maximal independent set", "fields": [ { "name": "graph", "type_name": "G", "description": "The underlying graph G=(V,E)" + }, + { + "name": "weights", + "type_name": "Vec", + "description": "Vertex weights w: V -> R" } ] }, { - "name": "KSatisfiability", - "category": "satisfiability", - "description": "SAT with exactly k literals per clause", + "name": "MaximumClique", + "category": "graph", + "description": "Find maximum weight clique in a graph", "fields": [ { - "name": "num_vars", - "type_name": "usize", - "description": "Number of Boolean variables" + "name": "graph", + "type_name": "G", + "description": "The underlying graph G=(V,E)" }, { - "name": "clauses", - "type_name": "Vec", - "description": "Clauses each with exactly K literals" + "name": "weights", + "type_name": "Vec", + "description": "Vertex weights w: V -> R" + } + ] + }, + { + "name": "MaximumIndependentSet", + "category": "graph", + "description": "Find maximum weight independent set in a graph", + "fields": [ + { + "name": "graph", + "type_name": "G", + "description": "The underlying graph G=(V,E)" }, { "name": "weights", "type_name": "Vec", - "description": "Clause weights for MAX-K-SAT" + "description": "Vertex weights w: V -> R" } ] }, { - "name": "Matching", + "name": "MaximumMatching", "category": "graph", "description": "Find maximum weight matching in a graph", "fields": [ @@ -232,26 +249,65 @@ ] }, { - "name": "MaxCut", + "name": "MaximumSetPacking", + "category": "set", + "description": "Find maximum weight collection of disjoint sets", + "fields": [ + { + "name": "sets", + "type_name": "Vec>", + "description": "Collection of sets over a universe" + }, + { + "name": "weights", + "type_name": "Vec", + "description": "Weight for each set" + } + ] + }, + { + "name": "MinimumDominatingSet", "category": "graph", - "description": "Find maximum weight cut in a graph", + "description": "Find minimum weight dominating set in a graph", "fields": [ { "name": "graph", "type_name": "G", - "description": "The graph with edge weights" + "description": "The underlying graph G=(V,E)" }, { - "name": "edge_weights", + "name": "weights", "type_name": "Vec", - "description": "Edge weights w: E -> R" + "description": "Vertex weights w: V -> R" } ] }, { - "name": "MaximalIS", + "name": "MinimumSetCovering", + "category": "set", + "description": "Find minimum weight collection covering the universe", + "fields": [ + { + "name": "universe_size", + "type_name": "usize", + "description": "Size of the universe U" + }, + { + "name": "sets", + "type_name": "Vec>", + "description": "Collection of subsets of U" + }, + { + "name": "weights", + "type_name": "Vec", + "description": "Weight for each set" + } + ] + }, + { + "name": "MinimumVertexCover", "category": "graph", - "description": "Find maximum weight maximal independent set", + "description": "Find minimum weight vertex cover in a graph", "fields": [ { "name": "graph", @@ -331,45 +387,6 @@ } ] }, - { - "name": "SetCovering", - "category": "set", - "description": "Find minimum weight collection covering the universe", - "fields": [ - { - "name": "universe_size", - "type_name": "usize", - "description": "Size of the universe U" - }, - { - "name": "sets", - "type_name": "Vec>", - "description": "Collection of subsets of U" - }, - { - "name": "weights", - "type_name": "Vec", - "description": "Weight for each set" - } - ] - }, - { - "name": "SetPacking", - "category": "set", - "description": "Find maximum weight collection of disjoint sets", - "fields": [ - { - "name": "sets", - "type_name": "Vec>", - "description": "Collection of sets over a universe" - }, - { - "name": "weights", - "type_name": "Vec", - "description": "Weight for each set" - } - ] - }, { "name": "SpinGlass", "category": "optimization", @@ -391,22 +408,5 @@ "description": "On-site fields h_i" } ] - }, - { - "name": "VertexCovering", - "category": "graph", - "description": "Find minimum weight vertex cover in a graph", - "fields": [ - { - "name": "graph", - "type_name": "G", - "description": "The underlying graph G=(V,E)" - }, - { - "name": "weights", - "type_name": "Vec", - "description": "Vertex weights w: V -> R" - } - ] } ] \ No newline at end of file diff --git a/docs/paper/reduction_graph.json b/docs/paper/reduction_graph.json index 4199ca597..6d05579bd 100644 --- a/docs/paper/reduction_graph.json +++ b/docs/paper/reduction_graph.json @@ -24,21 +24,6 @@ "category": "satisfiability", "doc_path": "models/specialized/struct.CircuitSAT.html" }, - { - "name": "DominatingSet", - "variant": {}, - "category": "graph", - "doc_path": "models/graph/struct.DominatingSet.html" - }, - { - "name": "DominatingSet", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - }, - "category": "graph", - "doc_path": "models/graph/struct.DominatingSet.html" - }, { "name": "Factoring", "variant": {}, @@ -69,30 +54,6 @@ "category": "optimization", "doc_path": "models/optimization/struct.ILP.html" }, - { - "name": "IndependentSet", - "variant": {}, - "category": "graph", - "doc_path": "models/graph/struct.IndependentSet.html" - }, - { - "name": "IndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - }, - "category": "graph", - "doc_path": "models/graph/struct.IndependentSet.html" - }, - { - "name": "IndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - }, - "category": "graph", - "doc_path": "models/graph/struct.IndependentSet.html" - }, { "name": "KColoring", "variant": {}, @@ -152,151 +113,190 @@ "doc_path": "models/satisfiability/struct.KSatisfiability.html" }, { - "name": "Matching", + "name": "MaxCut", "variant": {}, "category": "graph", - "doc_path": "models/graph/struct.Matching.html" + "doc_path": "models/graph/struct.MaxCut.html" }, { - "name": "Matching", + "name": "MaxCut", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" }, "category": "graph", - "doc_path": "models/graph/struct.Matching.html" + "doc_path": "models/graph/struct.MaxCut.html" }, { - "name": "MaxCut", + "name": "MaximumIndependentSet", "variant": {}, "category": "graph", - "doc_path": "models/graph/struct.MaxCut.html" + "doc_path": "models/graph/struct.MaximumIndependentSet.html" }, { - "name": "MaxCut", + "name": "MaximumIndependentSet", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" }, "category": "graph", - "doc_path": "models/graph/struct.MaxCut.html" + "doc_path": "models/graph/struct.MaximumIndependentSet.html" }, { - "name": "QUBO", - "variant": {}, - "category": "optimization", - "doc_path": "models/optimization/struct.QUBO.html" - }, - { - "name": "QUBO", + "name": "MaximumIndependentSet", "variant": { "graph": "SimpleGraph", - "weight": "f64" + "weight": "i32" }, - "category": "optimization", - "doc_path": "models/optimization/struct.QUBO.html" + "category": "graph", + "doc_path": "models/graph/struct.MaximumIndependentSet.html" }, { - "name": "Satisfiability", + "name": "MaximumMatching", "variant": {}, - "category": "satisfiability", - "doc_path": "models/satisfiability/struct.Satisfiability.html" + "category": "graph", + "doc_path": "models/graph/struct.MaximumMatching.html" }, { - "name": "Satisfiability", + "name": "MaximumMatching", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" }, - "category": "satisfiability", - "doc_path": "models/satisfiability/struct.Satisfiability.html" + "category": "graph", + "doc_path": "models/graph/struct.MaximumMatching.html" }, { - "name": "SetCovering", + "name": "MaximumSetPacking", "variant": {}, "category": "set", - "doc_path": "models/set/struct.SetCovering.html" + "doc_path": "models/set/struct.MaximumSetPacking.html" }, { - "name": "SetCovering", + "name": "MaximumSetPacking", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" }, "category": "set", - "doc_path": "models/set/struct.SetCovering.html" + "doc_path": "models/set/struct.MaximumSetPacking.html" }, { - "name": "SetPacking", - "variant": {}, + "name": "MaximumSetPacking", + "variant": { + "graph": "SimpleGraph", + "weight": "i32" + }, "category": "set", - "doc_path": "models/set/struct.SetPacking.html" + "doc_path": "models/set/struct.MaximumSetPacking.html" + }, + { + "name": "MinimumDominatingSet", + "variant": {}, + "category": "graph", + "doc_path": "models/graph/struct.MinimumDominatingSet.html" }, { - "name": "SetPacking", + "name": "MinimumDominatingSet", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" }, + "category": "graph", + "doc_path": "models/graph/struct.MinimumDominatingSet.html" + }, + { + "name": "MinimumSetCovering", + "variant": {}, "category": "set", - "doc_path": "models/set/struct.SetPacking.html" + "doc_path": "models/set/struct.MinimumSetCovering.html" }, { - "name": "SetPacking", + "name": "MinimumSetCovering", "variant": { "graph": "SimpleGraph", - "weight": "i32" + "weight": "Unweighted" }, "category": "set", - "doc_path": "models/set/struct.SetPacking.html" + "doc_path": "models/set/struct.MinimumSetCovering.html" }, { - "name": "SpinGlass", + "name": "MinimumVertexCover", "variant": {}, - "category": "optimization", - "doc_path": "models/optimization/struct.SpinGlass.html" + "category": "graph", + "doc_path": "models/graph/struct.MinimumVertexCover.html" }, { - "name": "SpinGlass", + "name": "MinimumVertexCover", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" }, + "category": "graph", + "doc_path": "models/graph/struct.MinimumVertexCover.html" + }, + { + "name": "MinimumVertexCover", + "variant": { + "graph": "SimpleGraph", + "weight": "i32" + }, + "category": "graph", + "doc_path": "models/graph/struct.MinimumVertexCover.html" + }, + { + "name": "QUBO", + "variant": {}, "category": "optimization", - "doc_path": "models/optimization/struct.SpinGlass.html" + "doc_path": "models/optimization/struct.QUBO.html" }, { - "name": "SpinGlass", + "name": "QUBO", "variant": { "graph": "SimpleGraph", "weight": "f64" }, "category": "optimization", - "doc_path": "models/optimization/struct.SpinGlass.html" + "doc_path": "models/optimization/struct.QUBO.html" }, { - "name": "VertexCovering", + "name": "Satisfiability", "variant": {}, - "category": "graph", - "doc_path": "models/graph/struct.VertexCovering.html" + "category": "satisfiability", + "doc_path": "models/satisfiability/struct.Satisfiability.html" }, { - "name": "VertexCovering", + "name": "Satisfiability", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" }, - "category": "graph", - "doc_path": "models/graph/struct.VertexCovering.html" + "category": "satisfiability", + "doc_path": "models/satisfiability/struct.Satisfiability.html" }, { - "name": "VertexCovering", + "name": "SpinGlass", + "variant": {}, + "category": "optimization", + "doc_path": "models/optimization/struct.SpinGlass.html" + }, + { + "name": "SpinGlass", "variant": { "graph": "SimpleGraph", - "weight": "i32" + "weight": "Unweighted" }, - "category": "graph", - "doc_path": "models/graph/struct.VertexCovering.html" + "category": "optimization", + "doc_path": "models/optimization/struct.SpinGlass.html" + }, + { + "name": "SpinGlass", + "variant": { + "graph": "SimpleGraph", + "weight": "f64" + }, + "category": "optimization", + "doc_path": "models/optimization/struct.SpinGlass.html" } ], "edges": [ @@ -402,24 +402,29 @@ }, { "source": { - "name": "IndependentSet", + "name": "KColoring", "variant": { "graph": "SimpleGraph", + "k": "N", "weight": "i32" } }, "target": { - "name": "QUBO", + "name": "ILP", "variant": { "graph": "SimpleGraph", - "weight": "f64" + "weight": "Unweighted" } }, "bidirectional": false, "overhead": [ { "field": "num_vars", - "formula": "num_vertices" + "formula": "num_vertices * num_colors" + }, + { + "field": "num_constraints", + "formula": "num_vertices + num_edges * num_colors" } ] }, @@ -428,15 +433,14 @@ "name": "KColoring", "variant": { "graph": "SimpleGraph", - "k": "N", - "weight": "i32" + "weight": "Unweighted" } }, "target": { - "name": "ILP", + "name": "QUBO", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "f64" } }, "bidirectional": false, @@ -444,19 +448,15 @@ { "field": "num_vars", "formula": "num_vertices * num_colors" - }, - { - "field": "num_constraints", - "formula": "num_vertices + num_edges * num_colors" } ] }, { "source": { - "name": "KColoring", + "name": "KSatisfiability", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "i32" } }, "target": { @@ -470,13 +470,13 @@ "overhead": [ { "field": "num_vars", - "formula": "num_vertices * num_colors" + "formula": "num_vars" } ] }, { "source": { - "name": "KSatisfiability", + "name": "MaximumIndependentSet", "variant": { "graph": "SimpleGraph", "weight": "i32" @@ -493,20 +493,20 @@ "overhead": [ { "field": "num_vars", - "formula": "num_vars" + "formula": "num_vertices" } ] }, { "source": { - "name": "Matching", + "name": "MaximumMatching", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "SetPacking", + "name": "MaximumSetPacking", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" @@ -526,172 +526,168 @@ }, { "source": { - "name": "Satisfiability", + "name": "MaximumSetPacking", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "DominatingSet", + "name": "MaximumIndependentSet", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, - "bidirectional": false, + "bidirectional": true, "overhead": [ { "field": "num_vertices", - "formula": "3 * num_vars + num_clauses" + "formula": "num_sets" }, { "field": "num_edges", - "formula": "3 * num_vars + num_literals" + "formula": "num_sets" } ] }, { "source": { - "name": "Satisfiability", + "name": "MaximumSetPacking", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "i32" } }, "target": { - "name": "IndependentSet", + "name": "QUBO", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "f64" } }, "bidirectional": false, "overhead": [ { - "field": "num_vertices", - "formula": "num_literals" - }, - { - "field": "num_edges", - "formula": "num_literals^2" + "field": "num_vars", + "formula": "num_sets" } ] }, { "source": { - "name": "Satisfiability", + "name": "MinimumVertexCover", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "KColoring", + "name": "MaximumIndependentSet", "variant": { "graph": "SimpleGraph", - "weight": "i32" + "weight": "Unweighted" } }, - "bidirectional": false, + "bidirectional": true, "overhead": [ { "field": "num_vertices", - "formula": "2 * num_vars + 5 * num_literals - 5 * num_clauses + 3" + "formula": "num_vertices" }, { - "field": "num_colors", - "formula": "3" + "field": "num_edges", + "formula": "num_edges" } ] }, { "source": { - "name": "Satisfiability", + "name": "MinimumVertexCover", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "KSatisfiability", + "name": "MinimumSetCovering", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, - "bidirectional": true, + "bidirectional": false, "overhead": [ { - "field": "num_clauses", - "formula": "num_clauses + num_literals" + "field": "num_sets", + "formula": "num_vertices" }, { - "field": "num_vars", - "formula": "num_vars + num_literals" + "field": "num_elements", + "formula": "num_edges" } ] }, { "source": { - "name": "SetPacking", + "name": "MinimumVertexCover", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "i32" } }, "target": { - "name": "IndependentSet", + "name": "QUBO", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "f64" } }, - "bidirectional": true, + "bidirectional": false, "overhead": [ { - "field": "num_vertices", - "formula": "num_sets" - }, - { - "field": "num_edges", - "formula": "num_sets" + "field": "num_vars", + "formula": "num_vertices" } ] }, { "source": { - "name": "SetPacking", + "name": "Satisfiability", "variant": { "graph": "SimpleGraph", - "weight": "i32" + "weight": "Unweighted" } }, "target": { - "name": "QUBO", + "name": "KColoring", "variant": { "graph": "SimpleGraph", - "weight": "f64" + "weight": "i32" } }, "bidirectional": false, "overhead": [ { - "field": "num_vars", - "formula": "num_sets" + "field": "num_vertices", + "formula": "2 * num_vars + 5 * num_literals - 5 * num_clauses + 3" + }, + { + "field": "num_colors", + "formula": "3" } ] }, { "source": { - "name": "SpinGlass", + "name": "Satisfiability", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "MaxCut", + "name": "KSatisfiability", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" @@ -700,98 +696,102 @@ "bidirectional": true, "overhead": [ { - "field": "num_vertices", - "formula": "num_spins" + "field": "num_clauses", + "formula": "num_clauses + num_literals" }, { - "field": "num_edges", - "formula": "num_interactions" + "field": "num_vars", + "formula": "num_vars + num_literals" } ] }, { "source": { - "name": "SpinGlass", + "name": "Satisfiability", "variant": { "graph": "SimpleGraph", - "weight": "f64" + "weight": "Unweighted" } }, "target": { - "name": "QUBO", + "name": "MaximumIndependentSet", "variant": { "graph": "SimpleGraph", - "weight": "f64" + "weight": "Unweighted" } }, - "bidirectional": true, + "bidirectional": false, "overhead": [ { - "field": "num_vars", - "formula": "num_spins" + "field": "num_vertices", + "formula": "num_literals" + }, + { + "field": "num_edges", + "formula": "num_literals^2" } ] }, { "source": { - "name": "VertexCovering", + "name": "Satisfiability", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "IndependentSet", + "name": "MinimumDominatingSet", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, - "bidirectional": true, + "bidirectional": false, "overhead": [ { "field": "num_vertices", - "formula": "num_vertices" + "formula": "3 * num_vars + num_clauses" }, { "field": "num_edges", - "formula": "num_edges" + "formula": "3 * num_vars + num_literals" } ] }, { "source": { - "name": "VertexCovering", + "name": "SpinGlass", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "SetCovering", + "name": "MaxCut", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, - "bidirectional": false, + "bidirectional": true, "overhead": [ { - "field": "num_sets", - "formula": "num_vertices" + "field": "num_vertices", + "formula": "num_spins" }, { - "field": "num_elements", - "formula": "num_edges" + "field": "num_edges", + "formula": "num_interactions" } ] }, { "source": { - "name": "VertexCovering", + "name": "SpinGlass", "variant": { "graph": "SimpleGraph", - "weight": "i32" + "weight": "f64" } }, "target": { @@ -801,11 +801,11 @@ "weight": "f64" } }, - "bidirectional": false, + "bidirectional": true, "overhead": [ { "field": "num_vars", - "formula": "num_vertices" + "formula": "num_spins" } ] } diff --git a/docs/paper/reductions.typ b/docs/paper/reductions.typ index 1ded300ca..13387f837 100644 --- a/docs/paper/reductions.typ +++ b/docs/paper/reductions.typ @@ -23,38 +23,17 @@ #let problem-schemas = json("problem_schemas.json") -// Problem name abbreviations for theorem labels -#let name-abbrev = ( - "IndependentSet": "is", - "VertexCovering": "vc", - "MaxCut": "maxcut", - "KColoring": "coloring", - "DominatingSet": "dominatingset", - "Matching": "matching", - "Clique": "clique", - "SetPacking": "setpacking", - "SetCovering": "setcovering", - "SpinGlass": "spinglass", - "QUBO": "qubo", - "ILP": "ilp", - "Satisfiability": "sat", - "KSatisfiability": "ksat", - "CircuitSAT": "circuit", - "Factoring": "factoring", - "GridGraph": "gridgraph", -) - // Problem display names for theorem headers #let display-name = ( - "IndependentSet": "IS", - "VertexCovering": "VC", + "MaximumIndependentSet": "MIS", + "MinimumVertexCover": "MVC", "MaxCut": "Max-Cut", "KColoring": "Coloring", - "DominatingSet": "Dominating Set", - "Matching": "Matching", - "Clique": "Clique", - "SetPacking": "Set Packing", - "SetCovering": "Set Covering", + "MinimumDominatingSet": "Min Dominating Set", + "MaximumMatching": "Max Matching", + "MaximumClique": "Max Clique", + "MaximumSetPacking": "Max Set Packing", + "MinimumSetCovering": "Min Set Covering", "SpinGlass": "Spin Glass", "QUBO": "QUBO", "ILP": "ILP", @@ -62,73 +41,33 @@ "KSatisfiability": [$k$-SAT], "CircuitSAT": "CircuitSAT", "Factoring": "Factoring", - "GridGraph": "GridGraph IS", + "GridGraph": "GridGraph MIS", ) -// Problem name to definition label mapping -#let def-label-map = ( - "IndependentSet": "def:independent-set", - "VertexCovering": "def:vertex-cover", - "MaxCut": "def:max-cut", - "KColoring": "def:coloring", - "DominatingSet": "def:dominating-set", - "Matching": "def:matching", - "Clique": "def:clique", - "SetPacking": "def:set-packing", - "SetCovering": "def:set-covering", - "SpinGlass": "def:spin-glass", - "QUBO": "def:qubo", - "ILP": "def:ilp", - "Satisfiability": "def:satisfiability", - "KSatisfiability": "def:k-sat", - "CircuitSAT": "def:circuit-sat", - "Factoring": "def:factoring", - "GridGraph": "def:independent-set", -) +// Definition label: "def:" — each definition block must have a matching label -// Special case mappings where JSON direction differs from theorem label -#let label-overrides = ( - "SetPacking->IndependentSet": "thm:is-to-setpacking", - "VertexCovering->IndependentSet": "thm:is-to-vc", -) - -// Generate theorem label from source/target names (canonical direction) +// Generate theorem label from source/target names (uses full names for consistency) #let reduction-label(source, target) = { - // Check for override first - let key = source + "->" + target - if key in label-overrides { - return label(label-overrides.at(key)) - } - let src = name-abbrev.at(source, default: lower(source)) - let tgt = name-abbrev.at(target, default: lower(target)) - label("thm:" + src + "-to-" + tgt) + label("thm:" + source + "-to-" + target) } +// State for tracking which reduction rules are described in the paper +#let covered-rules = state("covered-rules", ()) + // Extract reductions for a problem from graph-data (returns (name, label) pairs) -// For bidirectional edges, uses the canonical (stored) direction for the label #let get-reductions-to(problem-name) = { - // Direct edges: source = problem-name - let direct = graph-data.edges + graph-data.edges .filter(e => e.source.name == problem-name) .map(e => (name: e.target.name, lbl: reduction-label(e.source.name, e.target.name))) - // Reverse of bidirectional edges: target = problem-name, bidirectional = true - let reverse = graph-data.edges - .filter(e => e.target.name == problem-name and e.bidirectional) - .map(e => (name: e.source.name, lbl: reduction-label(e.source.name, e.target.name))) - (direct + reverse).dedup(key: e => e.name) + .dedup(key: e => e.name) } #let get-reductions-from(problem-name) = { - // Direct edges: target = problem-name - let direct = graph-data.edges + graph-data.edges .filter(e => e.target.name == problem-name) .map(e => (name: e.source.name, lbl: reduction-label(e.source.name, e.target.name))) - // Reverse of bidirectional edges: source = problem-name, bidirectional = true - let reverse = graph-data.edges - .filter(e => e.source.name == problem-name and e.bidirectional) - .map(e => (name: e.target.name, lbl: reduction-label(e.source.name, e.target.name))) - (direct + reverse).dedup(key: e => e.name) + .dedup(key: e => e.name) } // Render a single reduction with link @@ -179,22 +118,8 @@ ] } -// Extract primary variable count from an instance dict. -#let instance-vars(inst) = { - if "num_variables" in inst { inst.num_variables } - else if "num_vertices" in inst { inst.num_vertices } - else if "num_vars" in inst { inst.num_vars } - else if "num_sets" in inst { inst.num_sets } - else if "num_spins" in inst { inst.num_spins } - else if "num_gates" in inst { inst.num_gates } - else if "num_bits_first" in inst and "num_bits_second" in inst { inst.num_bits_first + inst.num_bits_second } - else { 0 } -} - // Render a concrete example box from JSON data (unified schema) #let reduction-example(data, caption: none, body) = { - let src-vars = instance-vars(data.source.instance) - let tgt-vars = instance-vars(data.target.instance) block( width: 100%, inset: (x: 1em, y: 0.8em), @@ -205,10 +130,9 @@ text(weight: "bold")[Concrete Example: #caption] parbreak() } - *Source:* #data.source.problem with #src-vars variables + *Source:* #data.source.problem #h(1em) - *Target:* #data.target.problem with #tgt-vars variables \ - *Overhead:* #if src-vars > 0 and tgt-vars > 0 [#calc.round(tgt-vars / src-vars, digits: 1)x variable growth] else [—] + *Target:* #data.target.problem #if body != none { parbreak(); body } ] } @@ -224,15 +148,38 @@ base_level: 1, ) -// Look up overhead for a reduction edge from graph-data -#let get-overhead(source, target) = { - // Try forward direction +// Problem definition wrapper: auto-adds schema, reductions list, and label +#let problem-def(name, title, body) = { + [#definition(title)[ + #body + #render-schema(name) + #render-reductions(name) + ]] +} + +// Find edge in graph-data by source/target names +#let find-edge(source, target) = { let edge = graph-data.edges.find(e => e.source.name == source and e.target.name == target) - // Try reverse (for bidirectional) if edge == none { edge = graph-data.edges.find(e => e.source.name == target and e.target.name == source) } - if edge != none and edge.overhead.len() > 0 { edge.overhead } else { none } + edge +} + +// Build display name from a graph-data node (name + variant) +#let variant-display(node) = { + let base = display-name.at(node.name) + if node.variant.len() == 0 { return base } + let parts = () + if "graph" in node.variant and node.variant.graph != "SimpleGraph" { + parts.push(node.variant.graph) + } + if "weight" in node.variant { + if node.variant.weight == "i32" { parts.push("weighted") } + else if node.variant.weight == "f64" { parts.push("real-weighted") } + } + if "k" in node.variant { parts.push[$k$-ary] } + if parts.len() > 0 { [#base (#parts.join(", "))] } else { base } } // Format overhead fields as inline text @@ -244,33 +191,33 @@ // Unified function for reduction rules: theorem + proof + optional example #let reduction-rule( source, target, - bidirectional: false, - source-display: none, - target-display: none, example: none, example-caption: none, extra: none, theorem-body, proof-body, ) = { - let arrow = if bidirectional { sym.arrow.l.r } else { sym.arrow.r } - let src-disp = if source-display != none { source-display } + let arrow = sym.arrow.r + let edge = find-edge(source, target) + let src-disp = if edge != none { variant-display(edge.source) } else { display-name.at(source) } - let tgt-disp = if target-display != none { target-display } + let tgt-disp = if edge != none { variant-display(edge.target) } else { display-name.at(target) } - let src-def = def-label-map.at(source) - let tgt-def = def-label-map.at(target) - let problems = if src-def == tgt-def { - [_Problem:_ #ref(label(src-def)).] - } else { - [_Problems:_ #ref(label(src-def)), #ref(label(tgt-def)).] - } - let overhead = get-overhead(source, target) - let src-abbr = name-abbrev.at(source, default: lower(source)) - let tgt-abbr = name-abbrev.at(target, default: lower(target)) - let thm-lbl = label("thm:" + src-abbr + "-to-" + tgt-abbr) + let src-lbl = label("def:" + source) + let tgt-lbl = label("def:" + target) + let overhead = if edge != none and edge.overhead.len() > 0 { edge.overhead } else { none } + let thm-lbl = label("thm:" + source + "-to-" + target) + + covered-rules.update(old => old + ((source, target),)) [#theorem[ - *(#src-disp #arrow #tgt-disp)* #theorem-body [#problems] + *(#src-disp #arrow #tgt-disp)* #theorem-body + #context { + let refs = () + if query(src-lbl).len() > 0 { refs.push(ref(src-lbl)) } + if source != target and query(tgt-lbl).len() > 0 { refs.push(ref(tgt-lbl)) } + if refs.len() == 1 { [_Problem:_ #refs.at(0).] } + else if refs.len() > 1 { [_Problems:_ #refs.join(", ").] } + } #if overhead != none { linebreak(); format-overhead(overhead) } ] #thm-lbl] @@ -307,177 +254,206 @@ A _reduction_ from problem $A$ to problem $B$, denoted $A arrow.long B$, is a po == Notation -We use the following notation throughout. An _undirected graph_ $G = (V, E)$ consists of a vertex set $V$ and edge set $E subset.eq binom(V, 2)$. For a set $S$, $overline(S)$ or $V backslash S$ denotes its complement. We write $|S|$ for cardinality. For Boolean variables, $overline(x)$ denotes negation ($not x$). A _literal_ is a variable $x$ or its negation $overline(x)$. A _clause_ is a disjunction of literals. A formula in _conjunctive normal form_ (CNF) is a conjunction of clauses. We abbreviate Independent Set as IS, Vertex Cover as VC, and use $n$ for problem size, $m$ for number of clauses, and $k_j = |C_j|$ for clause size. +We use the following notation throughout. An _undirected graph_ $G = (V, E)$ consists of a vertex set $V$ and edge set $E subset.eq binom(V, 2)$. For a set $S$, $overline(S)$ or $V backslash S$ denotes its complement. We write $|S|$ for cardinality. A _clique_ in $G$ is a subset $K subset.eq V$ where every pair of distinct vertices is adjacent: $(u, v) in E$ for all distinct $u, v in K$. A _unit disk graph_ is a graph where vertices are points on a 2D lattice and $(u, v) in E$ iff $d(u, v) <= r$ for some radius $r$; a _King's subgraph_ uses the 8-connectivity square grid with $r approx 1.5$. For Boolean variables, $overline(x)$ denotes negation ($not x$). A _literal_ is a variable $x$ or its negation $overline(x)$. A _clause_ is a disjunction of literals. A formula in _conjunctive normal form_ (CNF) is a conjunction of clauses. We abbreviate Independent Set as IS, Vertex Cover as VC, and use $n$ for problem size, $m$ for number of clauses, and $k_j = |C_j|$ for clause size. = Problem Definitions +Each problem definition follows this structure: + +#block( + inset: (x: 1em, y: 0.8em), + fill: rgb("#f8f8f8"), + stroke: (left: 2pt + rgb("#4a86e8")), +)[ + *Definition N (Problem Name).* Formal problem statement defining input, constraints, and objective. + + #block( + stroke: (left: 2pt + luma(180)), + inset: (left: 8pt), + )[ + #set text(size: 9pt) + #table( + columns: (auto, 1fr), + inset: (x: 6pt, y: 3pt), + align: (left, left), + stroke: none, + table.header(text(fill: luma(30), raw("ProblemName"))), + table.hline(stroke: 0.3pt + luma(200)), + text(fill: luma(60), raw("field_name")), text(fill: luma(60), raw("Field description from JSON schema")), + ) + ] + + #set text(size: 9pt, fill: luma(60)) + _Reduces to:_ ProblemA, ProblemB. \ + _Reduces from:_ ProblemC. +] + +The gray schema table shows the JSON field names used in the library's data structures. The reduction links at the bottom connect to the corresponding theorems in @sec:reductions. + + + == Graph Problems In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| = n$ vertices and $|E|$ edges. -#definition("Independent Set (IS)")[ +#problem-def("MaximumIndependentSet", "Independent Set (IS)")[ Given $G = (V, E)$ with vertex weights $w: V -> RR$, find $S subset.eq V$ maximizing $sum_(v in S) w(v)$ such that no two vertices in $S$ are adjacent: $forall u, v in S: (u, v) in.not E$. +] - #render-schema("IndependentSet") - #render-reductions("IndependentSet") -] - -#definition("Vertex Cover (VC)")[ +#problem-def("MinimumVertexCover", "Vertex Cover (VC)")[ Given $G = (V, E)$ with vertex weights $w: V -> RR$, find $S subset.eq V$ minimizing $sum_(v in S) w(v)$ such that every edge has at least one endpoint in $S$: $forall (u, v) in E: u in S or v in S$. +] - #render-schema("VertexCovering") - #render-reductions("VertexCovering") -] - -#definition("Max-Cut")[ +#problem-def("MaxCut", "Max-Cut")[ Given $G = (V, E)$ with weights $w: E -> RR$, find partition $(S, overline(S))$ maximizing $sum_((u,v) in E: u in S, v in overline(S)) w(u, v)$. +] - #render-schema("MaxCut") - #render-reductions("MaxCut") -] - -#definition("Graph Coloring")[ +#problem-def("KColoring", "Graph Coloring")[ Given $G = (V, E)$ and $k$ colors, find $c: V -> {1, ..., k}$ minimizing $|{(u, v) in E : c(u) = c(v)}|$. +] - #render-schema("KColoring") - #render-reductions("KColoring") -] - -#definition("Dominating Set")[ +#problem-def("MinimumDominatingSet", "Dominating Set")[ Given $G = (V, E)$ with weights $w: V -> RR$, find $S subset.eq V$ minimizing $sum_(v in S) w(v)$ s.t. $forall v in V: v in S or exists u in S: (u, v) in E$. +] - #render-schema("DominatingSet") - #render-reductions("DominatingSet") -] - -#definition("Matching")[ +#problem-def("MaximumMatching", "Matching")[ Given $G = (V, E)$ with weights $w: E -> RR$, find $M subset.eq E$ maximizing $sum_(e in M) w(e)$ s.t. $forall e_1, e_2 in M: e_1 inter e_2 = emptyset$. +] - #render-schema("Matching") - #render-reductions("Matching") -] - -#definition("Clique")[ - Given a graph $G = (V, E)$ and an integer $k$, the *Clique* problem asks whether there exists a subset $K subset.eq V$ of size at least $k$ such that every pair of distinct vertices in $K$ is adjacent, i.e., $(u, v) in E$ for all distinct $u, v in K$. - - #render-reductions("Clique") -] - -#definition("Unit Disk Graph (Grid Graph)")[ - A graph $G = (V, E)$ where vertices $V$ are points on a 2D lattice and $(u, v) in E$ iff the Euclidean distance $d(u, v) <= r$ for some radius $r$. A _King's subgraph_ uses the King's graph lattice (8-connectivity square grid) with $r approx 1.5$. +#problem-def("MaximumClique", "Maximum Clique")[ + Given $G = (V, E)$, find $K subset.eq V$ maximizing $|K|$ such that all pairs in $K$ are adjacent: $forall u, v in K: (u, v) in E$. Equivalent to MIS on the complement graph $overline(G)$. ] + == Set Problems -#definition("Set Packing")[ +#problem-def("MaximumSetPacking", "Set Packing")[ Given universe $U$, collection $cal(S) = {S_1, ..., S_m}$ with $S_i subset.eq U$, weights $w: cal(S) -> RR$, find $cal(P) subset.eq cal(S)$ maximizing $sum_(S in cal(P)) w(S)$ s.t. $forall S_i, S_j in cal(P): S_i inter S_j = emptyset$. +] - #render-schema("SetPacking") - #render-reductions("SetPacking") -] - -#definition("Set Covering")[ +#problem-def("MinimumSetCovering", "Set Covering")[ Given universe $U$, collection $cal(S)$ with weights $w: cal(S) -> RR$, find $cal(C) subset.eq cal(S)$ minimizing $sum_(S in cal(C)) w(S)$ s.t. $union.big_(S in cal(C)) S = U$. - - #render-schema("SetCovering") - #render-reductions("SetCovering") -] +] == Optimization Problems -#definition("Spin Glass (Ising Model)")[ +#problem-def("SpinGlass", "Spin Glass (Ising Model)")[ Given $n$ spin variables $s_i in {-1, +1}$, pairwise couplings $J_(i j) in RR$, and external fields $h_i in RR$, minimize the Hamiltonian (energy function): $H(bold(s)) = -sum_((i,j)) J_(i j) s_i s_j - sum_i h_i s_i$. +] - #render-schema("SpinGlass") - #render-reductions("SpinGlass") -] - -#definition("QUBO")[ +#problem-def("QUBO", "QUBO")[ Given $n$ binary variables $x_i in {0, 1}$, upper-triangular matrix $Q in RR^(n times n)$, minimize $f(bold(x)) = sum_(i=1)^n Q_(i i) x_i + sum_(i < j) Q_(i j) x_i x_j$ (using $x_i^2 = x_i$ for binary variables). +] - #render-schema("QUBO") - #render-reductions("QUBO") -] - -#definition("Integer Linear Programming (ILP)")[ +#problem-def("ILP", "Integer Linear Programming (ILP)")[ Given $n$ integer variables $bold(x) in ZZ^n$, constraint matrix $A in RR^(m times n)$, bounds $bold(b) in RR^m$, and objective $bold(c) in RR^n$, find $bold(x)$ minimizing $bold(c)^top bold(x)$ subject to $A bold(x) <= bold(b)$ and variable bounds. - - #render-schema("ILP") - #render-reductions("ILP") -] +] == Satisfiability Problems -#definition("SAT")[ +#problem-def("Satisfiability", "SAT")[ Given a CNF formula $phi = and.big_(j=1)^m C_j$ with $m$ clauses over $n$ Boolean variables, where each clause $C_j = or.big_i ell_(j i)$ is a disjunction of literals, find an assignment $bold(x) in {0, 1}^n$ such that $phi(bold(x)) = 1$ (all clauses satisfied). +] - #render-schema("Satisfiability") - #render-reductions("Satisfiability") -] - -#definition([$k$-SAT])[ +#problem-def("KSatisfiability", [$k$-SAT])[ SAT with exactly $k$ literals per clause. +] - #render-schema("KSatisfiability") - #render-reductions("KSatisfiability") -] - -#definition("Circuit-SAT")[ +#problem-def("CircuitSAT", "Circuit-SAT")[ Given a Boolean circuit $C$ composed of logic gates (AND, OR, NOT, XOR) with $n$ input variables, find an input assignment $bold(x) in {0,1}^n$ such that $C(bold(x)) = 1$. +] - #render-schema("CircuitSAT") - #render-reductions("CircuitSAT") -] - -#definition("Factoring")[ +#problem-def("Factoring", "Factoring")[ Given a composite integer $N$ and bit sizes $m, n$, find integers $p in [2, 2^m - 1]$ and $q in [2, 2^n - 1]$ such that $p times q = N$. Here $p$ has $m$ bits and $q$ has $n$ bits. +] - #render-schema("Factoring") - #render-reductions("Factoring") -] +// Completeness check: warn about problem types in JSON but missing from paper +#{ + let json-models = { + let names = graph-data.nodes.map(n => n.name) + let unique = () + for n in names { if n not in unique { unique.push(n) } } + unique + } + let defined = display-name.keys() + let missing = json-models.filter(n => n not in defined) + if missing.len() > 0 { + block(width: 100%, inset: (x: 1em, y: 0.5em), fill: rgb("#fff3cd"), stroke: (left: 3pt + rgb("#ffc107")))[ + #text(fill: rgb("#856404"), weight: "bold")[Warning: Missing problem definitions for:] + #text(fill: rgb("#856404"))[ #missing.join(", ")] + ] + } +} = Reductions +Each reduction theorem follows this structure: + +#block( + inset: (x: 1em, y: 0.8em), + fill: rgb("#f8f8f8"), +)[ + *Theorem N (Source $arrow.r$ Target).* Brief statement of the reduction's key insight. _Problem:_ Definition M. \ + _Overhead:_ $O("complexity")$ — describes how the target instance size grows relative to the source. +] + +#block( + inset: (x: 1em, y: 0.8em), +)[ + _Proof._ Detailed construction showing: (1) how to transform the source instance to target instance, (2) correctness argument, (3) variable mapping between problems, (4) solution extraction procedure. #h(1fr) $square$ +] + +#block( + width: 100%, + inset: (x: 1em, y: 0.8em), + fill: rgb("#f0f7ff"), + stroke: (left: 2pt + rgb("#4a86e8")), +)[ + *Concrete Example:* Description \ + *Source:* SourceProblem #h(1em) *Target:* TargetProblem \ + Additional details showing the transformation on a specific instance. +] + +The theorem links back to problem definitions in @sec:problems. Concrete examples (when present) demonstrate the reduction on small instances with verifiable solutions. + == Trivial Reductions -#let is_vc = load-example("is_to_vc") -#let is_vc_r = load-results("is_to_vc") -#let is_vc_sol = is_vc_r.solutions.at(0) -#reduction-rule("IndependentSet", "VertexCovering", - bidirectional: true, - example: "is_to_vc", - example-caption: [Path graph $P_4$: IS $arrow.l.r$ VC], +#let mvc_mis = load-example("mvc_to_mis") +#let mvc_mis_r = load-results("mvc_to_mis") +#let mvc_mis_sol = mvc_mis_r.solutions.at(0) +#reduction-rule("MinimumVertexCover", "MaximumIndependentSet", + example: "mvc_to_mis", + example-caption: [Path graph $P_4$: VC $arrow.l.r$ IS], extra: [ - Source IS: $S = {#is_vc_sol.source_config.enumerate().filter(((i, x)) => x == 1).map(((i, x)) => str(i)).join(", ")}$ (size #is_vc_sol.source_config.filter(x => x == 1).len()) #h(1em) - Target VC: $C = {#is_vc_sol.target_config.enumerate().filter(((i, x)) => x == 1).map(((i, x)) => str(i)).join(", ")}$ (size #is_vc_sol.target_config.filter(x => x == 1).len()) \ - $|"IS"| + |"VC"| = #instance-vars(is_vc.source.instance) = |V|$ #sym.checkmark + Source VC: $C = {#mvc_mis_sol.source_config.enumerate().filter(((i, x)) => x == 1).map(((i, x)) => str(i)).join(", ")}$ (size #mvc_mis_sol.source_config.filter(x => x == 1).len()) #h(1em) + Target IS: $S = {#mvc_mis_sol.target_config.enumerate().filter(((i, x)) => x == 1).map(((i, x)) => str(i)).join(", ")}$ (size #mvc_mis_sol.target_config.filter(x => x == 1).len()) \ + $|"VC"| + |"IS"| = #mvc_mis.source.instance.num_vertices = |V|$ #sym.checkmark ], )[ $S subset.eq V$ is independent iff $V backslash S$ is a vertex cover, with $|"IS"| + |"VC"| = |V|$. ][ - ($arrow.r.double$) If $S$ is independent, for any $(u, v) in E$, at most one endpoint lies in $S$, so $V backslash S$ covers all edges. ($arrow.l.double$) If $C$ is a cover, for any $u, v in V backslash C$, $(u, v) in.not E$, so $V backslash C$ is independent. _Variable mapping:_ Given IS instance $(G, w)$, create VC instance $(G, w)$ with identical graph and weights. Solution extraction: for VC solution $C$, return $S = V backslash C$. The complement operation preserves optimality since $|S| + |C| = |V|$ is constant. + ($arrow.r.double$) If $C$ is a vertex cover, for any $u, v in V backslash C$, $(u, v) in.not E$, so $V backslash C$ is independent. ($arrow.l.double$) If $S$ is independent, for any $(u, v) in E$, at most one endpoint is in $S$, so $V backslash S$ covers all edges. _Variable mapping:_ Given VC instance $(G, w)$, create IS instance $(G, w)$ with identical graph and weights. Solution extraction: for IS solution $S$, return $C = V backslash S$. The complement operation preserves optimality since $|S| + |C| = |V|$ is constant. ] -#reduction-rule("IndependentSet", "SetPacking")[ - Construct $U = E$, $S_v = {e in E : v in e}$, $w(S_v) = w(v)$. Then $I$ is independent iff ${S_v : v in I}$ is a packing. +#reduction-rule("MaximumSetPacking", "MaximumIndependentSet")[ + Construct intersection graph $G' = (V', E')$ where $V' = cal(S)$ and $(S_i, S_j) in E'$ iff $S_i inter S_j != emptyset$, with $w(v_i) = w(S_i)$. Max packing $equiv$ Max IS on $G'$. ][ - Independence implies disjoint incident edge sets; conversely, disjoint edge sets imply no shared edges. _Variable mapping:_ Universe $U = E$ (edges), sets $S_v = {e in E : v in e}$ (edges incident to vertex $v$), weights $w(S_v) = w(v)$. Solution extraction: for packing ${S_v : v in P}$, return IS $= P$ (the vertices whose sets were packed). + Overlapping sets become adjacent vertices; disjoint sets become non-adjacent. A packing (mutually disjoint) maps to an IS (mutually non-adjacent). _Variable mapping:_ Vertices $= {S_1, ..., S_m}$, edges $= {(S_i, S_j) : S_i inter S_j != emptyset}$, $w(v_i) = w(S_i)$. Solution extraction: for IS $I subset.eq V'$, return packing $cal(P) = {S_i : v_i in I}$. ] -#reduction-rule("VertexCovering", "SetCovering")[ +#reduction-rule("MinimumVertexCover", "MinimumSetCovering")[ Construct $U = {0, ..., |E|-1}$, $S_v = {i : e_i "incident to" v}$, $w(S_v) = w(v)$. Then $C$ is a cover iff ${S_v : v in C}$ covers $U$. ][ Each vertex's edge set becomes a subset; the cover condition (every edge covered) maps to the covering condition (every universe element in some selected set). _Variable mapping:_ Universe $U = {0, ..., |E|-1}$ (edge indices), $S_v = {i : e_i "incident to" v}$, $w(S_v) = w(v)$. Solution extraction: for covering ${S_v : v in C}$, return VC $= C$. ] -#reduction-rule("Matching", "SetPacking")[ +#reduction-rule("MaximumMatching", "MaximumSetPacking")[ Construct $U = V$, $S_e = {u, v}$ for $e = (u,v)$, $w(S_e) = w(e)$. Then $M$ is a matching iff ${S_e : e in M}$ is a packing. ][ Each edge becomes a set of its endpoints; disjoint edges have disjoint endpoint sets. _Variable mapping:_ Universe $U = V$ (vertices), $S_e = {u, v}$ for $e = (u,v)$, $w(S_e) = w(e)$. Solution extraction: for packing ${S_e : e in P}$, return matching $= P$ (the edges whose endpoint sets were packed). ] #reduction-rule("SpinGlass", "QUBO", - bidirectional: true, example: "spinglass_to_qubo", example-caption: [2-spin system with coupling $J_(01) = -1$, fields $h = (0.5, -0.5)$], )[ @@ -492,20 +468,20 @@ The _penalty method_ @glover2019 @lucas2014 converts a constrained optimization $ f(bold(x)) = "obj"(bold(x)) + P sum_k g_k (bold(x))^2 $ where $P$ is a penalty weight large enough that any constraint violation costs more than the entire objective range. Since $g_k (bold(x))^2 >= 0$ with equality iff $g_k (bold(x)) = 0$, minimizers of $f$ are feasible and optimal for the original problem. Because binary variables satisfy $x_i^2 = x_i$, the resulting $f$ is a quadratic in $bold(x)$, i.e.\ a QUBO. -#let is_qubo = load-example("is_to_qubo") -#let is_qubo_r = load-results("is_to_qubo") -#reduction-rule("IndependentSet", "QUBO", - example: "is_to_qubo", +#let mis_qubo = load-example("mis_to_qubo") +#let mis_qubo_r = load-results("mis_to_qubo") +#reduction-rule("MaximumIndependentSet", "QUBO", + example: "mis_to_qubo", example-caption: [IS on path $P_4$ to QUBO], extra: [ - *Source edges:* $= {#is_qubo.source.instance.edges.map(e => $(#e.at(0), #e.at(1))$).join(", ")}$ \ - *QUBO matrix* ($Q in RR^(#is_qubo.target.instance.num_vars times #is_qubo.target.instance.num_vars)$): - $ Q = #math.mat(..is_qubo.target.instance.matrix.map(row => row.map(v => { + *Source edges:* $= {#mis_qubo.source.instance.edges.map(e => $(#e.at(0), #e.at(1))$).join(", ")}$ \ + *QUBO matrix* ($Q in RR^(#mis_qubo.target.instance.num_vars times #mis_qubo.target.instance.num_vars)$): + $ Q = #math.mat(..mis_qubo.target.instance.matrix.map(row => row.map(v => { let r = calc.round(v, digits: 0) [#r] }))) $ - *Optimal IS* (size #is_qubo_r.solutions.at(0).source_config.filter(x => x == 1).len()): - #is_qubo_r.solutions.map(sol => { + *Optimal IS* (size #mis_qubo_r.solutions.at(0).source_config.filter(x => x == 1).len()): + #mis_qubo_r.solutions.map(sol => { let verts = sol.source_config.enumerate().filter(((i, x)) => x == 1).map(((i, x)) => str(i)) $\{#verts.join(", ")\}$ }).join(", ") @@ -520,7 +496,7 @@ where $P$ is a penalty weight large enough that any constraint violation costs m _Correctness._ If $bold(x)$ has any adjacent pair $(x_i = 1, x_j = 1)$ with $(i,j) in E$, the penalty $P > sum_i w_i >= -sum_i Q_(i i) x_i$ exceeds the maximum objective gain, so $bold(x)$ is not a minimizer. Among independent sets ($x_i x_j = 0$ for all edges), $f(bold(x)) = -sum_(i in S) w_i$, minimized exactly when $S$ is a maximum-weight IS. ] -#reduction-rule("VertexCovering", "QUBO")[ +#reduction-rule("MinimumVertexCover", "QUBO")[ Given $G = (V, E)$ with weights $w$, construct upper-triangular $Q$ with $Q_(i i) = w_i - P dot "deg"(i)$ and $Q_(i j) = P$ for $(i,j) in E$ ($i < j$), where $P = 1 + sum_i w_i$ and $"deg"(i)$ is the degree of vertex $i$. ][ _Construction._ The VC objective is: minimize $sum_i w_i x_i$ subject to $x_i + x_j >= 1$ for $(i,j) in E$. Applying the penalty method (@sec:penalty-method), the constraint $x_i + x_j >= 1$ is violated iff $x_i = x_j = 0$, with penalty $(1 - x_i)(1 - x_j)$: @@ -546,15 +522,13 @@ where $P$ is a penalty weight large enough that any constraint violation costs m _Solution extraction._ For each vertex $v$, find $c$ with $x_(v,c) = 1$. ] -#reduction-rule("SetPacking", "QUBO")[ +#reduction-rule("MaximumSetPacking", "QUBO")[ Equivalent to IS on the intersection graph: $Q_(i i) = -w_i$ and $Q_(i j) = P$ for overlapping sets $i, j$ ($i < j$), where $P = 1 + sum_i w_i$. ][ Two sets conflict iff they share an element. The intersection graph has sets as vertices and edges between conflicting pairs. Applying the penalty method (@sec:penalty-method) yields the same QUBO as IS on this graph: diagonal rewards selection, off-diagonal penalizes overlap. Correctness follows from the IS→QUBO proof. ] -#reduction-rule("KSatisfiability", "QUBO", - source-display: "2-SAT", -)[ +#reduction-rule("KSatisfiability", "QUBO")[ Given a Max-2-SAT instance with $m$ clauses over $n$ variables, construct upper-triangular $Q in RR^(n times n)$ where each clause $(ell_i or ell_j)$ contributes a penalty gadget encoding its unique falsifying assignment. ][ _Construction._ Applying the penalty method (@sec:penalty-method), each 2-literal clause has exactly one falsifying assignment (both literals false). The penalty for that assignment is a quadratic function of $x_i, x_j$: @@ -573,9 +547,7 @@ where $P$ is a penalty weight large enough that any constraint violation costs m Summing over all clauses, $f(bold(x)) = sum_j "penalty"_j (bold(x))$ counts falsified clauses. Minimizers of $f$ maximize satisfied clauses. ] -#reduction-rule("ILP", "QUBO", - source-display: "Binary ILP", -)[ +#reduction-rule("ILP", "QUBO")[ Given binary ILP: maximize $bold(c)^top bold(x)$ subject to $A bold(x) = bold(b)$, $bold(x) in {0,1}^n$, construct upper-triangular $Q = -"diag"(bold(c) + 2P bold(b)^top A) + P A^top A$ where $P = 1 + ||bold(c)||_1 + ||bold(b)||_1$. ][ _Step 1: Normalize constraints._ Convert inequalities to equalities using slack variables: $bold(a)_k^top bold(x) <= b_k$ becomes $bold(a)_k^top bold(x) + sum_(s=0)^(S_k - 1) 2^s y_(k,s) = b_k$ where $S_k = ceil(log_2 (b_k + 1))$ slack bits. For $>=$ constraints, the slack has a negative sign. The extended system is $A' bold(x)' = bold(b)$ with $bold(x)' = (bold(x), bold(y)) in {0,1}^(n')$. For minimization, negate $bold(c)$ to convert to maximization. @@ -593,15 +565,15 @@ where $P$ is a penalty weight large enough that any constraint violation costs m == Non-Trivial Reductions -#let sat_is = load-example("sat_to_is") -#let sat_is_r = load-results("sat_to_is") -#let sat_is_sol = sat_is_r.solutions.at(0) -#reduction-rule("Satisfiability", "IndependentSet", - example: "sat_to_is", +#let sat_mis = load-example("sat_to_mis") +#let sat_mis_r = load-results("sat_to_mis") +#let sat_mis_sol = sat_mis_r.solutions.at(0) +#reduction-rule("Satisfiability", "MaximumIndependentSet", + example: "sat_to_mis", example-caption: [$phi = (x_1 or x_2) and (not x_1 or x_3) and (x_2 or not x_3)$], extra: [ - SAT assignment: $x_1=#sat_is_sol.source_config.at(0), x_2=#sat_is_sol.source_config.at(1), x_3=#sat_is_sol.source_config.at(2)$ #h(1em) - IS graph: #sat_is.target.instance.num_vertices vertices, #sat_is.target.instance.num_edges edges (one vertex per literal occurrence) + SAT assignment: $x_1=#sat_mis_sol.source_config.at(0), x_2=#sat_mis_sol.source_config.at(1), x_3=#sat_mis_sol.source_config.at(2)$ #h(1em) + IS graph: #sat_mis.target.instance.num_vertices vertices, #sat_mis.target.instance.num_edges edges (one vertex per literal occurrence) ], )[ @karp1972 Given CNF $phi$ with $m$ clauses, construct graph $G$ such that $phi$ is satisfiable iff $G$ has an IS of size $m$. @@ -617,9 +589,7 @@ where $P$ is a penalty weight large enough that any constraint violation costs m _Solution extraction._ For $v_(j,i) in S$ with literal $x_k$: set $x_k = 1$; for $overline(x_k)$: set $x_k = 0$. ] -#reduction-rule("Satisfiability", "KColoring", - target-display: "3-Coloring", -)[ +#reduction-rule("Satisfiability", "KColoring")[ @garey1979 Given CNF $phi$, construct graph $G$ such that $phi$ is satisfiable iff $G$ is 3-colorable. ][ _Construction._ (1) Base triangle: TRUE, FALSE, AUX vertices with all pairs connected. (2) Variable gadget for $x_i$: vertices $"pos"_i$, $"neg"_i$ connected to each other and to AUX. (3) Clause gadget: for $(ell_1 or ... or ell_k)$, apply OR-gadgets iteratively producing output $o$, then connect $o$ to FALSE and AUX. @@ -629,7 +599,7 @@ where $P$ is a penalty weight large enough that any constraint violation costs m _Solution extraction._ Set $x_i = 1$ iff $"color"("pos"_i) = "color"("TRUE")$. ] -#reduction-rule("Satisfiability", "DominatingSet")[ +#reduction-rule("Satisfiability", "MinimumDominatingSet")[ @garey1979 Given CNF $phi$ with $n$ variables and $m$ clauses, $phi$ is satisfiable iff the constructed graph has a dominating set of size $n$. ][ _Construction._ (1) Variable triangle for $x_i$: vertices $"pos"_i = 3i$, $"neg"_i = 3i+1$, $"dum"_i = 3i+2$ forming a triangle. (2) Clause vertex $c_j = 3n+j$ connected to $"pos"_i$ if $x_i in C_j$, to $"neg"_i$ if $overline(x_i) in C_j$. @@ -639,9 +609,7 @@ where $P$ is a penalty weight large enough that any constraint violation costs m _Solution extraction._ Set $x_i = 1$ if $"pos"_i$ selected; $x_i = 0$ if $"neg"_i$ selected. ] -#reduction-rule("Satisfiability", "KSatisfiability", - bidirectional: true, -)[ +#reduction-rule("Satisfiability", "KSatisfiability")[ @cook1971 @garey1979 Any SAT formula converts to $k$-SAT ($k >= 3$) preserving satisfiability. ][ _Small clauses ($|C| < k$):_ Pad $(ell_1 or ... or ell_r)$ with auxiliary $y$: $(ell_1 or ... or ell_r or y or overline(y) or ...)$ to length $k$. @@ -688,9 +656,7 @@ where $P$ is a penalty weight large enough that any constraint violation costs m _Solution extraction._ $p = sum_i p_i 2^(i-1)$, $q = sum_j q_j 2^(j-1)$. ] -#reduction-rule("SpinGlass", "MaxCut", - bidirectional: true, -)[ +#reduction-rule("SpinGlass", "MaxCut")[ @barahona1982 @lucas2014 Ground states of Ising models correspond to maximum cuts. ][ _MaxCut $arrow.r$ SpinGlass:_ Set $J_(i j) = w_(i j)$, $h_i = 0$. Maximizing cut equals minimizing $-sum J_(i j) s_i s_j$ since $s_i s_j = -1$ when $s_i != s_j$. @@ -741,43 +707,43 @@ where $P$ is a penalty weight large enough that any constraint violation costs m The following reductions to Integer Linear Programming are straightforward formulations where problem constraints map directly to linear inequalities. -#reduction-rule("IndependentSet", "ILP")[ +#reduction-rule("MaximumIndependentSet", "ILP")[ The maximum-weight IS problem reduces to binary ILP with $|V|$ variables and $|E|$ constraints. ][ _Construction._ Variables: $x_v in {0, 1}$ for each $v in V$. Constraints: $x_u + x_v <= 1$ for each $(u, v) in E$. Objective: maximize $sum_v w_v x_v$. _Solution extraction:_ $S = {v : x_v = 1}$. ] -#reduction-rule("VertexCovering", "ILP")[ +#reduction-rule("MinimumVertexCover", "ILP")[ The minimum-weight VC problem reduces to binary ILP with $|V|$ variables and $|E|$ constraints. ][ _Construction._ Variables: $x_v in {0, 1}$ for each $v in V$. Constraints: $x_u + x_v >= 1$ for each $(u, v) in E$. Objective: minimize $sum_v w_v x_v$. _Solution extraction:_ $C = {v : x_v = 1}$. ] -#reduction-rule("Matching", "ILP")[ +#reduction-rule("MaximumMatching", "ILP")[ The maximum-weight matching reduces to binary ILP with $|E|$ variables and $|V|$ constraints. ][ _Construction._ Variables: $x_e in {0, 1}$ for each $e in E$. Constraints: $sum_(e in.rev v) x_e <= 1$ for each $v in V$. Objective: maximize $sum_e w_e x_e$. _Solution extraction:_ $M = {e : x_e = 1}$. ] -#reduction-rule("SetPacking", "ILP")[ +#reduction-rule("MaximumSetPacking", "ILP")[ Set packing reduces to binary ILP with $|cal(S)|$ variables and at most $binom(|cal(S)|, 2)$ constraints. ][ _Construction._ Variables: $x_i in {0, 1}$ for each $S_i in cal(S)$. Constraints: $x_i + x_j <= 1$ for each overlapping pair $S_i, S_j in cal(S)$ with $S_i inter S_j != emptyset$. Objective: maximize $sum_i w_i x_i$. _Solution extraction:_ $cal(P) = {S_i : x_i = 1}$. ] -#reduction-rule("SetCovering", "ILP")[ +#reduction-rule("MinimumSetCovering", "ILP")[ Set covering reduces to binary ILP with $|cal(S)|$ variables and $|U|$ constraints. ][ _Construction._ Variables: $x_i in {0, 1}$ for each $S_i in cal(S)$. Constraints: $sum_(S_i in.rev u) x_i >= 1$ for each $u in U$. Objective: minimize $sum_i w_i x_i$. _Solution extraction:_ $cal(C) = {S_i : x_i = 1}$. ] -#reduction-rule("DominatingSet", "ILP")[ +#reduction-rule("MinimumDominatingSet", "ILP")[ Dominating set reduces to binary ILP with $|V|$ variables and $|V|$ constraints. ][ _Construction._ Variables: $x_v in {0, 1}$ for each $v in V$. Constraints: $x_v + sum_(u in N(v)) x_u >= 1$ for each $v in V$ (each vertex dominated). Objective: minimize $sum_v w_v x_v$. _Solution extraction:_ $D = {v : x_v = 1}$. ] -#reduction-rule("Clique", "ILP")[ +#reduction-rule("MaximumClique", "ILP")[ Maximum clique reduces to binary ILP with $|V|$ variables and $O(|overline(E)|)$ constraints. ][ _Construction._ Variables: $x_v in {0, 1}$ for each $v in V$. Constraints: $x_u + x_v <= 1$ for each $(u, v) in.not E$ (non-edges). Objective: maximize $sum_v x_v$. Equivalently, IS on the complement graph. _Solution extraction:_ $K = {v : x_v = 1}$. @@ -785,7 +751,7 @@ The following reductions to Integer Linear Programming are straightforward formu == Unit Disk Mapping -#reduction-rule("IndependentSet", "GridGraph")[ +#reduction-rule("MaximumIndependentSet", "GridGraph")[ @nguyen2023 Any MIS problem on a general graph $G$ can be reduced to MIS on a unit disk graph (King's subgraph) with at most quadratic overhead in the number of vertices. ][ _Construction (Copy-Line Method)._ Given $G = (V, E)$ with $n = |V|$: @@ -942,23 +908,52 @@ The following reductions to Integer Linear Programming are straightforward formu See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/export_petersen_mapping.rs")[`export_petersen_mapping.rs`]. +// Completeness check: warn about reduction rules in JSON but missing from paper +#context { + let covered = covered-rules.get() + let json-edges = { + let edges = graph-data.edges.map(e => (e.source.name, e.target.name)) + let unique = () + for e in edges { + if unique.find(u => u.at(0) == e.at(0) and u.at(1) == e.at(1)) == none { + unique.push(e) + } + } + unique + } + let missing = json-edges.filter(e => { + covered.find(c => + (c.at(0) == e.at(0) and c.at(1) == e.at(1)) or + (c.at(0) == e.at(1) and c.at(1) == e.at(0)) + ) == none + }) + if missing.len() > 0 { + block(width: 100%, inset: (x: 1em, y: 0.5em), fill: rgb("#fff3cd"), stroke: (left: 3pt + rgb("#ffc107")))[ + #text(fill: rgb("#856404"), weight: "bold")[Warning: Missing reduction rules:] \ + #for m in missing [ + #text(fill: rgb("#856404"))[- #m.at(0) #sym.arrow.r #m.at(1)] \ + ] + ] + } +} + == Resource Estimation from Examples The following table shows concrete variable overhead for example instances, generated from the reduction examples (`make examples`). #let example-files = ( - "is_to_vc", "vc_to_is", "is_to_setpacking", "matching_to_setpacking", - "vc_to_setcovering", + "mis_to_mvc", "mvc_to_mis", "mis_to_msp", "mm_to_msp", + "mvc_to_msc", "maxcut_to_spinglass", "spinglass_to_maxcut", "spinglass_to_qubo", "qubo_to_spinglass", - "is_to_qubo", "vc_to_qubo", "coloring_to_qubo", - "setpacking_to_qubo", "ksatisfiability_to_qubo", "ilp_to_qubo", - "sat_to_is", "sat_to_coloring", "sat_to_dominatingset", "sat_to_ksat", + "mis_to_qubo", "mvc_to_qubo", "coloring_to_qubo", + "msp_to_qubo", "ksatisfiability_to_qubo", "ilp_to_qubo", + "sat_to_mis", "sat_to_coloring", "sat_to_mds", "sat_to_ksat", "circuit_to_spinglass", "factoring_to_circuit", - "is_to_ilp", "vc_to_ilp", "matching_to_ilp", + "mis_to_ilp", "mvc_to_ilp", "mm_to_ilp", "coloring_to_ilp", "factoring_to_ilp", - "setpacking_to_ilp", "setcovering_to_ilp", - "dominatingset_to_ilp", "clique_to_ilp", + "msp_to_ilp", "msc_to_ilp", + "mds_to_ilp", "mclique_to_ilp", ) #let examples = example-files.map(n => { diff --git a/docs/src/getting-started.md b/docs/src/getting-started.md index fd741e3bb..f8731b3b4 100644 --- a/docs/src/getting-started.md +++ b/docs/src/getting-started.md @@ -17,10 +17,10 @@ problemreductions = "0.1" use problemreductions::prelude::*; // Independent Set on a path graph -let is = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); +let is = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); // Vertex Cover on the same graph -let vc = VertexCovering::::new(4, vec![(0, 1), (1, 2), (2, 3)]); +let vc = MinimumVertexCover::::new(4, vec![(0, 1), (1, 2), (2, 3)]); // QUBO problem let qubo = QUBO::from_matrix(vec![ @@ -34,7 +34,7 @@ let qubo = QUBO::from_matrix(vec![ ```rust use problemreductions::prelude::*; -let problem = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); +let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -50,10 +50,10 @@ for sol in &solutions { use problemreductions::prelude::*; // Create an Independent Set problem -let is = IndependentSet::::new(4, vec![(0, 1), (1, 2)]); +let is = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2)]); // Reduce to Vertex Cover -let result = ReduceTo::>::reduce_to(&is); +let result = ReduceTo::>::reduce_to(&is); let vc = result.target_problem(); // Solve the reduced problem @@ -69,17 +69,17 @@ let is_solution = result.extract_solution(&vc_solutions[0]); ```rust use problemreductions::prelude::*; -let sp = SetPacking::::new(vec![ +let sp = MaximumSetPacking::::new(vec![ vec![0, 1], vec![1, 2], vec![2, 3], ]); -// SetPacking -> IndependentSet -> VertexCovering -let sp_to_is = ReduceTo::>::reduce_to(&sp); +// MaximumSetPacking -> MaximumIndependentSet -> MinimumVertexCover +let sp_to_is = ReduceTo::>::reduce_to(&sp); let is = sp_to_is.target_problem(); -let is_to_vc = ReduceTo::>::reduce_to(is); +let is_to_vc = ReduceTo::>::reduce_to(is); let vc = is_to_vc.target_problem(); // Solve and extract back through the chain @@ -94,8 +94,8 @@ let sp_solution = sp_to_is.extract_solution(&is_solution); The reduction system is compile-time verified. Invalid reductions won't compile: ```rust,compile_fail -// This won't compile - no reduction from QUBO to SetPacking -let result = ReduceTo::>::reduce_to(&qubo); +// This won't compile - no reduction from QUBO to MaximumSetPacking +let result = ReduceTo::>::reduce_to(&qubo); ``` ## Next Steps diff --git a/docs/src/introduction.md b/docs/src/introduction.md index f20bb7f97..f7499ad0c 100644 --- a/docs/src/introduction.md +++ b/docs/src/introduction.md @@ -214,7 +214,7 @@ For theoretical background and correctness proofs, see the [PDF manual](https:// ## Problem Variants -Problems are parameterized by graph type `G` and weight type `W`. The base variant uses `SimpleGraph` and `Unweighted` (e.g., `IndependentSet`). Graph variants specify a different topology (e.g., `IndependentSet/GridGraph`), and weighted variants use numeric weights (e.g., `IndependentSet/Weighted`). Variants appear as separate nodes in the reduction graph when they have distinct reductions. +Problems are parameterized by graph type `G` and weight type `W`. The base variant uses `SimpleGraph` and `Unweighted` (e.g., `MaximumIndependentSet`). Graph variants specify a different topology (e.g., `MaximumIndependentSet/GridGraph`), and weighted variants use numeric weights (e.g., `MaximumIndependentSet/Weighted`). Variants appear as separate nodes in the reduction graph when they have distinct reductions. The library supports four graph topologies: @@ -229,7 +229,7 @@ The library supports four graph topologies: use problemreductions::prelude::*; // Create an Independent Set problem on a triangle graph -let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); +let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); // Solve with brute force let solver = BruteForce::new(); diff --git a/docs/src/io.md b/docs/src/io.md index 4df3d4b77..e6bfc2de4 100644 --- a/docs/src/io.md +++ b/docs/src/io.md @@ -8,7 +8,7 @@ All problem types support JSON serialization. use problemreductions::io::{write_problem, FileFormat}; use problemreductions::prelude::*; -let problem = IndependentSet::::new(4, vec![(0, 1), (1, 2)]); +let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2)]); write_problem(&problem, "problem.json", FileFormat::Json).unwrap(); ``` @@ -18,7 +18,7 @@ write_problem(&problem, "problem.json", FileFormat::Json).unwrap(); use problemreductions::io::{read_problem, FileFormat}; use problemreductions::prelude::*; -let problem: IndependentSet = read_problem("problem.json", FileFormat::Json).unwrap(); +let problem: MaximumIndependentSet = read_problem("problem.json", FileFormat::Json).unwrap(); ``` ## String Serialization @@ -27,13 +27,13 @@ let problem: IndependentSet = read_problem("problem.json", FileFormat::Json use problemreductions::io::{to_json, from_json}; use problemreductions::prelude::*; -let problem = IndependentSet::::new(3, vec![(0, 1)]); +let problem = MaximumIndependentSet::::new(3, vec![(0, 1)]); // Serialize to string let json = to_json(&problem).unwrap(); // Deserialize from string -let restored: IndependentSet = from_json(&json).unwrap(); +let restored: MaximumIndependentSet = from_json(&json).unwrap(); ``` ## File Formats diff --git a/docs/src/reductions/reduction_graph.json b/docs/src/reductions/reduction_graph.json index 4199ca597..6d05579bd 100644 --- a/docs/src/reductions/reduction_graph.json +++ b/docs/src/reductions/reduction_graph.json @@ -24,21 +24,6 @@ "category": "satisfiability", "doc_path": "models/specialized/struct.CircuitSAT.html" }, - { - "name": "DominatingSet", - "variant": {}, - "category": "graph", - "doc_path": "models/graph/struct.DominatingSet.html" - }, - { - "name": "DominatingSet", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - }, - "category": "graph", - "doc_path": "models/graph/struct.DominatingSet.html" - }, { "name": "Factoring", "variant": {}, @@ -69,30 +54,6 @@ "category": "optimization", "doc_path": "models/optimization/struct.ILP.html" }, - { - "name": "IndependentSet", - "variant": {}, - "category": "graph", - "doc_path": "models/graph/struct.IndependentSet.html" - }, - { - "name": "IndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - }, - "category": "graph", - "doc_path": "models/graph/struct.IndependentSet.html" - }, - { - "name": "IndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - }, - "category": "graph", - "doc_path": "models/graph/struct.IndependentSet.html" - }, { "name": "KColoring", "variant": {}, @@ -152,151 +113,190 @@ "doc_path": "models/satisfiability/struct.KSatisfiability.html" }, { - "name": "Matching", + "name": "MaxCut", "variant": {}, "category": "graph", - "doc_path": "models/graph/struct.Matching.html" + "doc_path": "models/graph/struct.MaxCut.html" }, { - "name": "Matching", + "name": "MaxCut", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" }, "category": "graph", - "doc_path": "models/graph/struct.Matching.html" + "doc_path": "models/graph/struct.MaxCut.html" }, { - "name": "MaxCut", + "name": "MaximumIndependentSet", "variant": {}, "category": "graph", - "doc_path": "models/graph/struct.MaxCut.html" + "doc_path": "models/graph/struct.MaximumIndependentSet.html" }, { - "name": "MaxCut", + "name": "MaximumIndependentSet", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" }, "category": "graph", - "doc_path": "models/graph/struct.MaxCut.html" + "doc_path": "models/graph/struct.MaximumIndependentSet.html" }, { - "name": "QUBO", - "variant": {}, - "category": "optimization", - "doc_path": "models/optimization/struct.QUBO.html" - }, - { - "name": "QUBO", + "name": "MaximumIndependentSet", "variant": { "graph": "SimpleGraph", - "weight": "f64" + "weight": "i32" }, - "category": "optimization", - "doc_path": "models/optimization/struct.QUBO.html" + "category": "graph", + "doc_path": "models/graph/struct.MaximumIndependentSet.html" }, { - "name": "Satisfiability", + "name": "MaximumMatching", "variant": {}, - "category": "satisfiability", - "doc_path": "models/satisfiability/struct.Satisfiability.html" + "category": "graph", + "doc_path": "models/graph/struct.MaximumMatching.html" }, { - "name": "Satisfiability", + "name": "MaximumMatching", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" }, - "category": "satisfiability", - "doc_path": "models/satisfiability/struct.Satisfiability.html" + "category": "graph", + "doc_path": "models/graph/struct.MaximumMatching.html" }, { - "name": "SetCovering", + "name": "MaximumSetPacking", "variant": {}, "category": "set", - "doc_path": "models/set/struct.SetCovering.html" + "doc_path": "models/set/struct.MaximumSetPacking.html" }, { - "name": "SetCovering", + "name": "MaximumSetPacking", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" }, "category": "set", - "doc_path": "models/set/struct.SetCovering.html" + "doc_path": "models/set/struct.MaximumSetPacking.html" }, { - "name": "SetPacking", - "variant": {}, + "name": "MaximumSetPacking", + "variant": { + "graph": "SimpleGraph", + "weight": "i32" + }, "category": "set", - "doc_path": "models/set/struct.SetPacking.html" + "doc_path": "models/set/struct.MaximumSetPacking.html" + }, + { + "name": "MinimumDominatingSet", + "variant": {}, + "category": "graph", + "doc_path": "models/graph/struct.MinimumDominatingSet.html" }, { - "name": "SetPacking", + "name": "MinimumDominatingSet", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" }, + "category": "graph", + "doc_path": "models/graph/struct.MinimumDominatingSet.html" + }, + { + "name": "MinimumSetCovering", + "variant": {}, "category": "set", - "doc_path": "models/set/struct.SetPacking.html" + "doc_path": "models/set/struct.MinimumSetCovering.html" }, { - "name": "SetPacking", + "name": "MinimumSetCovering", "variant": { "graph": "SimpleGraph", - "weight": "i32" + "weight": "Unweighted" }, "category": "set", - "doc_path": "models/set/struct.SetPacking.html" + "doc_path": "models/set/struct.MinimumSetCovering.html" }, { - "name": "SpinGlass", + "name": "MinimumVertexCover", "variant": {}, - "category": "optimization", - "doc_path": "models/optimization/struct.SpinGlass.html" + "category": "graph", + "doc_path": "models/graph/struct.MinimumVertexCover.html" }, { - "name": "SpinGlass", + "name": "MinimumVertexCover", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" }, + "category": "graph", + "doc_path": "models/graph/struct.MinimumVertexCover.html" + }, + { + "name": "MinimumVertexCover", + "variant": { + "graph": "SimpleGraph", + "weight": "i32" + }, + "category": "graph", + "doc_path": "models/graph/struct.MinimumVertexCover.html" + }, + { + "name": "QUBO", + "variant": {}, "category": "optimization", - "doc_path": "models/optimization/struct.SpinGlass.html" + "doc_path": "models/optimization/struct.QUBO.html" }, { - "name": "SpinGlass", + "name": "QUBO", "variant": { "graph": "SimpleGraph", "weight": "f64" }, "category": "optimization", - "doc_path": "models/optimization/struct.SpinGlass.html" + "doc_path": "models/optimization/struct.QUBO.html" }, { - "name": "VertexCovering", + "name": "Satisfiability", "variant": {}, - "category": "graph", - "doc_path": "models/graph/struct.VertexCovering.html" + "category": "satisfiability", + "doc_path": "models/satisfiability/struct.Satisfiability.html" }, { - "name": "VertexCovering", + "name": "Satisfiability", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" }, - "category": "graph", - "doc_path": "models/graph/struct.VertexCovering.html" + "category": "satisfiability", + "doc_path": "models/satisfiability/struct.Satisfiability.html" }, { - "name": "VertexCovering", + "name": "SpinGlass", + "variant": {}, + "category": "optimization", + "doc_path": "models/optimization/struct.SpinGlass.html" + }, + { + "name": "SpinGlass", "variant": { "graph": "SimpleGraph", - "weight": "i32" + "weight": "Unweighted" }, - "category": "graph", - "doc_path": "models/graph/struct.VertexCovering.html" + "category": "optimization", + "doc_path": "models/optimization/struct.SpinGlass.html" + }, + { + "name": "SpinGlass", + "variant": { + "graph": "SimpleGraph", + "weight": "f64" + }, + "category": "optimization", + "doc_path": "models/optimization/struct.SpinGlass.html" } ], "edges": [ @@ -402,24 +402,29 @@ }, { "source": { - "name": "IndependentSet", + "name": "KColoring", "variant": { "graph": "SimpleGraph", + "k": "N", "weight": "i32" } }, "target": { - "name": "QUBO", + "name": "ILP", "variant": { "graph": "SimpleGraph", - "weight": "f64" + "weight": "Unweighted" } }, "bidirectional": false, "overhead": [ { "field": "num_vars", - "formula": "num_vertices" + "formula": "num_vertices * num_colors" + }, + { + "field": "num_constraints", + "formula": "num_vertices + num_edges * num_colors" } ] }, @@ -428,15 +433,14 @@ "name": "KColoring", "variant": { "graph": "SimpleGraph", - "k": "N", - "weight": "i32" + "weight": "Unweighted" } }, "target": { - "name": "ILP", + "name": "QUBO", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "f64" } }, "bidirectional": false, @@ -444,19 +448,15 @@ { "field": "num_vars", "formula": "num_vertices * num_colors" - }, - { - "field": "num_constraints", - "formula": "num_vertices + num_edges * num_colors" } ] }, { "source": { - "name": "KColoring", + "name": "KSatisfiability", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "i32" } }, "target": { @@ -470,13 +470,13 @@ "overhead": [ { "field": "num_vars", - "formula": "num_vertices * num_colors" + "formula": "num_vars" } ] }, { "source": { - "name": "KSatisfiability", + "name": "MaximumIndependentSet", "variant": { "graph": "SimpleGraph", "weight": "i32" @@ -493,20 +493,20 @@ "overhead": [ { "field": "num_vars", - "formula": "num_vars" + "formula": "num_vertices" } ] }, { "source": { - "name": "Matching", + "name": "MaximumMatching", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "SetPacking", + "name": "MaximumSetPacking", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" @@ -526,172 +526,168 @@ }, { "source": { - "name": "Satisfiability", + "name": "MaximumSetPacking", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "DominatingSet", + "name": "MaximumIndependentSet", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, - "bidirectional": false, + "bidirectional": true, "overhead": [ { "field": "num_vertices", - "formula": "3 * num_vars + num_clauses" + "formula": "num_sets" }, { "field": "num_edges", - "formula": "3 * num_vars + num_literals" + "formula": "num_sets" } ] }, { "source": { - "name": "Satisfiability", + "name": "MaximumSetPacking", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "i32" } }, "target": { - "name": "IndependentSet", + "name": "QUBO", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "f64" } }, "bidirectional": false, "overhead": [ { - "field": "num_vertices", - "formula": "num_literals" - }, - { - "field": "num_edges", - "formula": "num_literals^2" + "field": "num_vars", + "formula": "num_sets" } ] }, { "source": { - "name": "Satisfiability", + "name": "MinimumVertexCover", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "KColoring", + "name": "MaximumIndependentSet", "variant": { "graph": "SimpleGraph", - "weight": "i32" + "weight": "Unweighted" } }, - "bidirectional": false, + "bidirectional": true, "overhead": [ { "field": "num_vertices", - "formula": "2 * num_vars + 5 * num_literals - 5 * num_clauses + 3" + "formula": "num_vertices" }, { - "field": "num_colors", - "formula": "3" + "field": "num_edges", + "formula": "num_edges" } ] }, { "source": { - "name": "Satisfiability", + "name": "MinimumVertexCover", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "KSatisfiability", + "name": "MinimumSetCovering", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, - "bidirectional": true, + "bidirectional": false, "overhead": [ { - "field": "num_clauses", - "formula": "num_clauses + num_literals" + "field": "num_sets", + "formula": "num_vertices" }, { - "field": "num_vars", - "formula": "num_vars + num_literals" + "field": "num_elements", + "formula": "num_edges" } ] }, { "source": { - "name": "SetPacking", + "name": "MinimumVertexCover", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "i32" } }, "target": { - "name": "IndependentSet", + "name": "QUBO", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "f64" } }, - "bidirectional": true, + "bidirectional": false, "overhead": [ { - "field": "num_vertices", - "formula": "num_sets" - }, - { - "field": "num_edges", - "formula": "num_sets" + "field": "num_vars", + "formula": "num_vertices" } ] }, { "source": { - "name": "SetPacking", + "name": "Satisfiability", "variant": { "graph": "SimpleGraph", - "weight": "i32" + "weight": "Unweighted" } }, "target": { - "name": "QUBO", + "name": "KColoring", "variant": { "graph": "SimpleGraph", - "weight": "f64" + "weight": "i32" } }, "bidirectional": false, "overhead": [ { - "field": "num_vars", - "formula": "num_sets" + "field": "num_vertices", + "formula": "2 * num_vars + 5 * num_literals - 5 * num_clauses + 3" + }, + { + "field": "num_colors", + "formula": "3" } ] }, { "source": { - "name": "SpinGlass", + "name": "Satisfiability", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "MaxCut", + "name": "KSatisfiability", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" @@ -700,98 +696,102 @@ "bidirectional": true, "overhead": [ { - "field": "num_vertices", - "formula": "num_spins" + "field": "num_clauses", + "formula": "num_clauses + num_literals" }, { - "field": "num_edges", - "formula": "num_interactions" + "field": "num_vars", + "formula": "num_vars + num_literals" } ] }, { "source": { - "name": "SpinGlass", + "name": "Satisfiability", "variant": { "graph": "SimpleGraph", - "weight": "f64" + "weight": "Unweighted" } }, "target": { - "name": "QUBO", + "name": "MaximumIndependentSet", "variant": { "graph": "SimpleGraph", - "weight": "f64" + "weight": "Unweighted" } }, - "bidirectional": true, + "bidirectional": false, "overhead": [ { - "field": "num_vars", - "formula": "num_spins" + "field": "num_vertices", + "formula": "num_literals" + }, + { + "field": "num_edges", + "formula": "num_literals^2" } ] }, { "source": { - "name": "VertexCovering", + "name": "Satisfiability", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "IndependentSet", + "name": "MinimumDominatingSet", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, - "bidirectional": true, + "bidirectional": false, "overhead": [ { "field": "num_vertices", - "formula": "num_vertices" + "formula": "3 * num_vars + num_clauses" }, { "field": "num_edges", - "formula": "num_edges" + "formula": "3 * num_vars + num_literals" } ] }, { "source": { - "name": "VertexCovering", + "name": "SpinGlass", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "SetCovering", + "name": "MaxCut", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, - "bidirectional": false, + "bidirectional": true, "overhead": [ { - "field": "num_sets", - "formula": "num_vertices" + "field": "num_vertices", + "formula": "num_spins" }, { - "field": "num_elements", - "formula": "num_edges" + "field": "num_edges", + "formula": "num_interactions" } ] }, { "source": { - "name": "VertexCovering", + "name": "SpinGlass", "variant": { "graph": "SimpleGraph", - "weight": "i32" + "weight": "f64" } }, "target": { @@ -801,11 +801,11 @@ "weight": "f64" } }, - "bidirectional": false, + "bidirectional": true, "overhead": [ { "field": "num_vars", - "formula": "num_vertices" + "formula": "num_spins" } ] } diff --git a/examples/qubo_reductions.rs b/examples/qubo_reductions.rs deleted file mode 100644 index 6d0103e96..000000000 --- a/examples/qubo_reductions.rs +++ /dev/null @@ -1,216 +0,0 @@ -//! Demonstrates 6 problem-to-QUBO reductions with practical stories. -//! -//! Run with: `cargo run --example qubo_reductions --features ilp` - -use problemreductions::prelude::*; -use problemreductions::topology::SimpleGraph; - -fn main() { - println!("=== Problem-to-QUBO Reductions ===\n"); - - demo_independent_set(); - demo_vertex_covering(); - demo_coloring(); - demo_set_packing(); - demo_ksat(); - demo_ilp(); -} - -/// Wireless tower placement: find the largest set of non-interfering towers. -fn demo_independent_set() { - println!("--- 1. IndependentSet -> QUBO ---"); - println!("Story: Place wireless towers on a 4-site grid. Adjacent towers interfere."); - println!(" Find the maximum set of non-interfering towers.\n"); - - // Path graph: sites 0-1-2-3, adjacent sites interfere - let is = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); - let reduction = ReduceTo::::reduce_to(&is); - let qubo = reduction.target_problem(); - - let solver = BruteForce::new(); - let solutions = solver.find_best(qubo); - - println!(" QUBO variables: {}", qubo.num_variables()); - println!(" Optimal solutions:"); - for sol in &solutions { - let extracted = reduction.extract_solution(sol); - let sites: Vec = extracted - .iter() - .enumerate() - .filter(|(_, &x)| x == 1) - .map(|(i, _)| i) - .collect(); - println!(" Tower sites: {:?} (size {})", sites, sites.len()); - } - println!(); -} - -/// Security camera placement: cover all corridors with minimum cameras. -fn demo_vertex_covering() { - println!("--- 2. VertexCovering -> QUBO ---"); - println!("Story: Place security cameras at intersections to cover all corridors."); - println!(" Minimize the number of cameras needed.\n"); - - // Cycle C4: 4 intersections, 4 corridors - let vc = VertexCovering::::new(4, vec![(0, 1), (1, 2), (2, 3), (0, 3)]); - let reduction = ReduceTo::::reduce_to(&vc); - let qubo = reduction.target_problem(); - - let solver = BruteForce::new(); - let solutions = solver.find_best(qubo); - - println!(" QUBO variables: {}", qubo.num_variables()); - println!(" Optimal solutions:"); - for sol in &solutions { - let extracted = reduction.extract_solution(sol); - let cameras: Vec = extracted - .iter() - .enumerate() - .filter(|(_, &x)| x == 1) - .map(|(i, _)| i) - .collect(); - println!( - " Camera positions: {:?} ({} cameras)", - cameras, - cameras.len() - ); - } - println!(); -} - -/// Map coloring: color a triangle map with 3 colors so no neighbors share a color. -fn demo_coloring() { - println!("--- 3. KColoring -> QUBO ---"); - println!("Story: Color 3 countries on a map with 3 colors so no neighbors match.\n"); - - // Triangle K3: 3 countries, all share borders - let kc = KColoring::<3, SimpleGraph, i32>::new(3, vec![(0, 1), (1, 2), (0, 2)]); - let reduction = ReduceTo::::reduce_to(&kc); - let qubo = reduction.target_problem(); - - let solver = BruteForce::new(); - let solutions = solver.find_best(qubo); - - let colors = ["Red", "Green", "Blue"]; - println!( - " QUBO variables: {} (one-hot: 3 countries x 3 colors)", - qubo.num_variables() - ); - println!(" Valid colorings: {}", solutions.len()); - let extracted = reduction.extract_solution(&solutions[0]); - println!( - " Example: Country0={}, Country1={}, Country2={}", - colors[extracted[0]], colors[extracted[1]], colors[extracted[2]] - ); - println!(); -} - -/// Warehouse selection: pick maximum non-overlapping delivery zones. -fn demo_set_packing() { - println!("--- 4. SetPacking -> QUBO ---"); - println!("Story: Select delivery zones that don't overlap. Maximize coverage.\n"); - - // 3 zones covering different areas - let sp = SetPacking::::new(vec![ - vec![0, 1], // Zone A covers areas 0,1 - vec![1, 2], // Zone B covers areas 1,2 - vec![2, 3, 4], // Zone C covers areas 2,3,4 - ]); - let reduction = ReduceTo::::reduce_to(&sp); - let qubo = reduction.target_problem(); - - let solver = BruteForce::new(); - let solutions = solver.find_best(qubo); - - println!(" QUBO variables: {}", qubo.num_variables()); - println!(" Optimal solutions:"); - for sol in &solutions { - let extracted = reduction.extract_solution(sol); - let zones: Vec<&str> = extracted - .iter() - .enumerate() - .filter(|(_, &x)| x == 1) - .map(|(i, _)| ["Zone-A", "Zone-B", "Zone-C"][i]) - .collect(); - println!(" Selected: {:?}", zones); - } - println!(); -} - -/// Satisfiability: find a boolean assignment satisfying maximum clauses. -fn demo_ksat() { - println!("--- 5. KSatisfiability(K=2) -> QUBO ---"); - println!("Story: Configure 3 switches (on/off) to satisfy maximum rules.\n"); - - // 4 rules over 3 switches - let ksat = KSatisfiability::<2, i32>::new( - 3, - vec![ - CNFClause::new(vec![1, 2]), // switch1 OR switch2 - CNFClause::new(vec![-1, 3]), // NOT switch1 OR switch3 - CNFClause::new(vec![2, -3]), // switch2 OR NOT switch3 - CNFClause::new(vec![-2, -3]), // NOT switch2 OR NOT switch3 - ], - ); - let reduction = ReduceTo::::reduce_to(&ksat); - let qubo = reduction.target_problem(); - - let solver = BruteForce::new(); - let solutions = solver.find_best(qubo); - - println!(" QUBO variables: {}", qubo.num_variables()); - for sol in &solutions { - let extracted = reduction.extract_solution(sol); - let switches: Vec<&str> = extracted.iter().map(|&x| if x == 1 { "ON" } else { "OFF" }).collect(); - let satisfied = ksat.solution_size(&extracted).size; - println!( - " Switches: [{}] -> {}/{} rules satisfied", - switches.join(", "), - satisfied, - ksat.clauses().len() - ); - } - println!(); -} - -/// Resource allocation: maximize value under budget constraints. -fn demo_ilp() { - println!("--- 6. ILP (binary) -> QUBO ---"); - println!("Story: Select projects to maximize profit under resource constraints.\n"); - - // 3 projects: values 1, 2, 3 - // Constraint 1: projects 0 and 1 share a team (at most one) - // Constraint 2: projects 1 and 2 share equipment (at most one) - let ilp = ILP::binary( - 3, - vec![ - LinearConstraint::le(vec![(0, 1.0), (1, 1.0)], 1.0), - LinearConstraint::le(vec![(1, 1.0), (2, 1.0)], 1.0), - ], - vec![(0, 1.0), (1, 2.0), (2, 3.0)], - ObjectiveSense::Maximize, - ); - let reduction = ReduceTo::::reduce_to(&ilp); - let qubo = reduction.target_problem(); - - let solver = BruteForce::new(); - let solutions = solver.find_best(qubo); - - let names = ["Alpha", "Beta", "Gamma"]; - println!(" QUBO variables: {}", qubo.num_variables()); - for sol in &solutions { - let extracted = reduction.extract_solution(sol); - let selected: Vec<&str> = extracted - .iter() - .enumerate() - .filter(|(_, &x)| x == 1) - .map(|(i, _)| names[i]) - .collect(); - let value = ilp.solution_size(&extracted).size; - println!( - " Selected projects: {:?} (total value: {:.0})", - selected, value - ); - } - println!(); -} diff --git a/examples/reduction_clique_to_ilp.rs b/examples/reduction_maximumclique_to_ilp.rs similarity index 76% rename from examples/reduction_clique_to_ilp.rs rename to examples/reduction_maximumclique_to_ilp.rs index 9f78f0cac..b76e1c16b 100644 --- a/examples/reduction_clique_to_ilp.rs +++ b/examples/reduction_maximumclique_to_ilp.rs @@ -1,4 +1,4 @@ -//! # Clique to ILP Reduction +//! # MaximumClique to ILP Reduction //! //! ## Mathematical Formulation //! Variables: x_v in {0,1} for each vertex v. @@ -8,12 +8,12 @@ //! ## This Example //! - Instance: 4-vertex graph with a triangle subgraph on {0,1,2} plus vertex 3 //! connected only to vertex 2. Edges: 0-1, 0-2, 1-2, 2-3. -//! - Source Clique: max clique is {0,1,2} (size 3) +//! - Source MaximumClique: max clique is {0,1,2} (size 3) //! - Target ILP: 4 binary variables, 3 non-edge constraints //! (non-edges: (0,3), (1,3)) //! //! ## Output -//! Exports `docs/paper/examples/clique_to_ilp.json` for use in paper code blocks. +//! Exports `docs/paper/examples/mclique_to_ilp.json` for use in paper code blocks. use problemreductions::export::*; use problemreductions::prelude::*; @@ -21,8 +21,8 @@ use problemreductions::solvers::BruteForceFloat; use problemreductions::topology::SimpleGraph; fn main() { - // 1. Create Clique instance: 4 vertices, triangle {0,1,2} plus vertex 3 connected to 2 - let clique = Clique::::new(4, vec![(0, 1), (0, 2), (1, 2), (2, 3)]); + // 1. Create MaximumClique instance: 4 vertices, triangle {0,1,2} plus vertex 3 connected to 2 + let clique = MaximumClique::::new(4, vec![(0, 1), (0, 2), (1, 2), (2, 3)]); // 2. Reduce to ILP let reduction = ReduceTo::::reduce_to(&clique); @@ -30,7 +30,7 @@ fn main() { // 3. Print transformation println!("\n=== Problem Transformation ==="); - println!("Source: Clique with {} variables", clique.num_variables()); + println!("Source: MaximumClique with {} variables", clique.num_variables()); println!("Target: ILP with {} variables, {} constraints", ilp.num_vars, ilp.constraints.len()); // 4. Solve target ILP @@ -44,7 +44,7 @@ fn main() { // 5. Extract source solution let clique_solution = reduction.extract_solution(ilp_solution); - println!("Source Clique solution: {:?}", clique_solution); + println!("Source MaximumClique solution: {:?}", clique_solution); // 6. Verify let size = clique.solution_size(&clique_solution); @@ -64,12 +64,12 @@ fn main() { }); } - let overhead = lookup_overhead_or_empty("Clique", "ILP"); + let overhead = lookup_overhead_or_empty("MaximumClique", "ILP"); let data = ReductionData { source: ProblemSide { - problem: Clique::::NAME.to_string(), - variant: variant_to_map(Clique::::variant()), + problem: MaximumClique::::NAME.to_string(), + variant: variant_to_map(MaximumClique::::variant()), instance: serde_json::json!({ "num_vertices": clique.num_vertices(), "num_edges": clique.num_edges(), @@ -88,5 +88,5 @@ fn main() { }; let results = ResultData { solutions }; - write_example("clique_to_ilp", &data, &results); + write_example("mclique_to_ilp", &data, &results); } diff --git a/examples/reduction_is_to_ilp.rs b/examples/reduction_maximumindependentset_to_ilp.rs similarity index 83% rename from examples/reduction_is_to_ilp.rs rename to examples/reduction_maximumindependentset_to_ilp.rs index e6f1cb09b..1b6a78fc8 100644 --- a/examples/reduction_is_to_ilp.rs +++ b/examples/reduction_maximumindependentset_to_ilp.rs @@ -11,7 +11,7 @@ //! - Target ILP: 4 binary variables, 3 constraints //! //! ## Output -//! Exports `docs/paper/examples/is_to_ilp.json` for use in paper code blocks. +//! Exports `docs/paper/examples/mis_to_ilp.json` for use in paper code blocks. use problemreductions::export::*; use problemreductions::prelude::*; @@ -21,7 +21,7 @@ use problemreductions::topology::SimpleGraph; fn main() { // 1. Create IS instance: path graph P4 let edges = vec![(0, 1), (1, 2), (2, 3)]; - let is = IndependentSet::::new(4, edges.clone()); + let is = MaximumIndependentSet::::new(4, edges.clone()); // 2. Reduce to ILP let reduction = ReduceTo::::reduce_to(&is); @@ -29,7 +29,7 @@ fn main() { // 3. Print transformation println!("\n=== Problem Transformation ==="); - println!("Source: IndependentSet with {} variables", is.num_variables()); + println!("Source: MaximumIndependentSet with {} variables", is.num_variables()); println!("Target: ILP with {} variables, {} constraints", ilp.num_vars, ilp.constraints.len()); // 4. Solve target ILP (uses BruteForceFloat since ILP has f64 objective) @@ -63,12 +63,12 @@ fn main() { }); } - let overhead = lookup_overhead_or_empty("IndependentSet", "ILP"); + let overhead = lookup_overhead_or_empty("MaximumIndependentSet", "ILP"); let data = ReductionData { source: ProblemSide { - problem: IndependentSet::::NAME.to_string(), - variant: variant_to_map(IndependentSet::::variant()), + problem: MaximumIndependentSet::::NAME.to_string(), + variant: variant_to_map(MaximumIndependentSet::::variant()), instance: serde_json::json!({ "num_vertices": is.num_vertices(), "num_edges": is.num_edges(), @@ -87,5 +87,5 @@ fn main() { }; let results = ResultData { solutions }; - write_example("is_to_ilp", &data, &results); + write_example("mis_to_ilp", &data, &results); } diff --git a/examples/reduction_is_to_setpacking.rs b/examples/reduction_maximumindependentset_to_maximumsetpacking.rs similarity index 73% rename from examples/reduction_is_to_setpacking.rs rename to examples/reduction_maximumindependentset_to_maximumsetpacking.rs index 9b7588131..da09f4f23 100644 --- a/examples/reduction_is_to_setpacking.rs +++ b/examples/reduction_maximumindependentset_to_maximumsetpacking.rs @@ -8,10 +8,10 @@ //! ## This Example //! - Instance: Path graph P4 (4 vertices, 3 edges: (0,1), (1,2), (2,3)) //! - Source IS: max size 2 -//! - Target SetPacking: max packing 2 +//! - Target MaximumSetPacking: max packing 2 //! //! ## Output -//! Exports `docs/paper/examples/is_to_setpacking.json` and `is_to_setpacking.result.json`. +//! Exports `docs/paper/examples/mis_to_msp.json` and `is_to_setpacking.result.json`. //! //! See docs/paper/reductions.typ for the full reduction specification. @@ -24,17 +24,17 @@ fn main() { // Path graph P4: 0-1-2-3 let edges = vec![(0, 1), (1, 2), (2, 3)]; - let source = IndependentSet::::new(4, edges.clone()); + let source = MaximumIndependentSet::::new(4, edges.clone()); - println!("Source: IndependentSet on P4"); + println!("Source: MaximumIndependentSet on P4"); println!(" Vertices: 4"); println!(" Edges: {:?}", edges); - // Reduce to SetPacking - let reduction = ReduceTo::>::reduce_to(&source); + // Reduce to MaximumSetPacking + let reduction = ReduceTo::>::reduce_to(&source); let target = reduction.target_problem(); - println!("\nTarget: SetPacking"); + println!("\nTarget: MaximumSetPacking"); println!(" Sets: {} sets", target.num_sets()); for (i, set) in target.sets().iter().enumerate() { println!(" S_{} = {:?}", i, set); @@ -76,16 +76,16 @@ fn main() { let target_size = target.solution_size(target_sol); assert_eq!(source_size.size, 2, "IS on P4 has optimal size 2"); - assert_eq!(target_size.size, 2, "SetPacking should also have size 2"); + assert_eq!(target_size.size, 2, "MaximumSetPacking should also have size 2"); // Export JSON - let overhead = lookup_overhead("IndependentSet", "SetPacking") - .expect("IndependentSet -> SetPacking overhead not found"); + let overhead = lookup_overhead("MaximumIndependentSet", "MaximumSetPacking") + .expect("MaximumIndependentSet -> MaximumSetPacking overhead not found"); let data = ReductionData { source: ProblemSide { - problem: IndependentSet::::NAME.to_string(), - variant: variant_to_map(IndependentSet::::variant()), + problem: MaximumIndependentSet::::NAME.to_string(), + variant: variant_to_map(MaximumIndependentSet::::variant()), instance: serde_json::json!({ "num_vertices": source.num_vertices(), "num_edges": source.num_edges(), @@ -93,8 +93,8 @@ fn main() { }), }, target: ProblemSide { - problem: SetPacking::::NAME.to_string(), - variant: variant_to_map(SetPacking::::variant()), + problem: MaximumSetPacking::::NAME.to_string(), + variant: variant_to_map(MaximumSetPacking::::variant()), instance: serde_json::json!({ "num_sets": target.num_sets(), "sets": target.sets(), @@ -104,7 +104,7 @@ fn main() { }; let results = ResultData { solutions }; - write_example("is_to_setpacking", &data, &results); + write_example("mis_to_msp", &data, &results); - println!("\nDone: IS(P4) optimal=2 maps to SetPacking optimal=2"); + println!("\nDone: IS(P4) optimal=2 maps to MaximumSetPacking optimal=2"); } diff --git a/examples/reduction_is_to_vc.rs b/examples/reduction_maximumindependentset_to_minimumvertexcover.rs similarity index 69% rename from examples/reduction_is_to_vc.rs rename to examples/reduction_maximumindependentset_to_minimumvertexcover.rs index 318f70497..f0204eef2 100644 --- a/examples/reduction_is_to_vc.rs +++ b/examples/reduction_maximumindependentset_to_minimumvertexcover.rs @@ -10,7 +10,7 @@ //! - Target VC: min size 2 //! //! ## Output -//! Exports `docs/paper/examples/is_to_vc.json` and `is_to_vc.result.json`. +//! Exports `docs/paper/examples/mis_to_mvc.json` and `is_to_vc.result.json`. //! //! See docs/paper/reductions.typ for the full reduction specification. @@ -21,16 +21,16 @@ use problemreductions::topology::SimpleGraph; fn main() { // 1. Create IS instance: path graph P4 let edges = vec![(0, 1), (1, 2), (2, 3)]; - let is = IndependentSet::::new(4, edges.clone()); + let is = MaximumIndependentSet::::new(4, edges.clone()); // 2. Reduce to VC - let reduction = ReduceTo::>::reduce_to(&is); + let reduction = ReduceTo::>::reduce_to(&is); let vc = reduction.target_problem(); // 3. Print transformation println!("\n=== Problem Transformation ==="); - println!("Source: IndependentSet with {} variables", is.num_variables()); - println!("Target: VertexCovering with {} variables", vc.num_variables()); + println!("Source: MaximumIndependentSet with {} variables", is.num_variables()); + println!("Target: MinimumVertexCover with {} variables", vc.num_variables()); // 4. Solve target let solver = BruteForce::new(); @@ -52,14 +52,14 @@ fn main() { println!("Reduction verified successfully"); // 6. Export JSON - let overhead = lookup_overhead("IndependentSet", "VertexCovering") - .expect("IndependentSet -> VertexCovering overhead not found"); + let overhead = lookup_overhead("MaximumIndependentSet", "MinimumVertexCover") + .expect("MaximumIndependentSet -> MinimumVertexCover overhead not found"); let vc_edges = vc.edges(); let data = ReductionData { source: ProblemSide { - problem: IndependentSet::::NAME.to_string(), - variant: variant_to_map(IndependentSet::::variant()), + problem: MaximumIndependentSet::::NAME.to_string(), + variant: variant_to_map(MaximumIndependentSet::::variant()), instance: serde_json::json!({ "num_vertices": is.num_vertices(), "num_edges": is.num_edges(), @@ -67,8 +67,8 @@ fn main() { }), }, target: ProblemSide { - problem: VertexCovering::::NAME.to_string(), - variant: variant_to_map(VertexCovering::::variant()), + problem: MinimumVertexCover::::NAME.to_string(), + variant: variant_to_map(MinimumVertexCover::::variant()), instance: serde_json::json!({ "num_vertices": vc.num_vertices(), "num_edges": vc.num_edges(), @@ -79,5 +79,5 @@ fn main() { }; let results = ResultData { solutions }; - write_example("is_to_vc", &data, &results); + write_example("mis_to_mvc", &data, &results); } diff --git a/examples/reduction_is_to_qubo.rs b/examples/reduction_maximumindependentset_to_qubo.rs similarity index 80% rename from examples/reduction_is_to_qubo.rs rename to examples/reduction_maximumindependentset_to_qubo.rs index 0d51f0bec..2d6caeab7 100644 --- a/examples/reduction_is_to_qubo.rs +++ b/examples/reduction_maximumindependentset_to_qubo.rs @@ -12,13 +12,13 @@ //! //! ## This Example //! - Instance: Path graph P4 with 4 vertices and 3 edges (0-1-2-3) -//! - Source: IndependentSet with maximum size 2 (e.g., {0,2} or {1,3}) +//! - Source: MaximumIndependentSet with maximum size 2 (e.g., {0,2} or {1,3}) //! - QUBO variables: 4 (one per vertex) //! - Expected: Two optimal solutions of size 2: vertices {0,2} and {1,3} //! //! ## Outputs -//! - `docs/paper/examples/is_to_qubo.json` — reduction structure -//! - `docs/paper/examples/is_to_qubo.result.json` — solutions +//! - `docs/paper/examples/mis_to_qubo.json` — reduction structure +//! - `docs/paper/examples/mis_to_qubo.result.json` — solutions //! //! ## Usage //! ```bash @@ -34,13 +34,13 @@ fn main() { // Path graph P4: 0-1-2-3 let edges = vec![(0, 1), (1, 2), (2, 3)]; - let is = IndependentSet::::new(4, edges.clone()); + let is = MaximumIndependentSet::::new(4, edges.clone()); // Reduce to QUBO let reduction = ReduceTo::::reduce_to(&is); let qubo = reduction.target_problem(); - println!("Source: IndependentSet on path P4 (4 vertices, 3 edges)"); + println!("Source: MaximumIndependentSet on path P4 (4 vertices, 3 edges)"); println!("Target: QUBO with {} variables", qubo.num_variables()); println!("Q matrix:"); for row in qubo.matrix() { @@ -76,13 +76,13 @@ fn main() { println!("\nVerification passed: all solutions are valid"); // Export JSON - let overhead = lookup_overhead("IndependentSet", "QUBO") - .expect("IndependentSet -> QUBO overhead not found"); + let overhead = lookup_overhead("MaximumIndependentSet", "QUBO") + .expect("MaximumIndependentSet -> QUBO overhead not found"); let data = ReductionData { source: ProblemSide { - problem: IndependentSet::::NAME.to_string(), - variant: variant_to_map(IndependentSet::::variant()), + problem: MaximumIndependentSet::::NAME.to_string(), + variant: variant_to_map(MaximumIndependentSet::::variant()), instance: serde_json::json!({ "num_vertices": is.num_vertices(), "num_edges": is.num_edges(), @@ -101,5 +101,5 @@ fn main() { }; let results = ResultData { solutions }; - write_example("is_to_qubo", &data, &results); + write_example("mis_to_qubo", &data, &results); } diff --git a/examples/reduction_matching_to_ilp.rs b/examples/reduction_maximummatching_to_ilp.rs similarity index 76% rename from examples/reduction_matching_to_ilp.rs rename to examples/reduction_maximummatching_to_ilp.rs index d67e9aced..4a3755021 100644 --- a/examples/reduction_matching_to_ilp.rs +++ b/examples/reduction_maximummatching_to_ilp.rs @@ -1,4 +1,4 @@ -//! # Matching to ILP Reduction +//! # MaximumMatching to ILP Reduction //! //! ## Mathematical Formulation //! Variables: x_e in {0,1} for each edge e. @@ -7,11 +7,11 @@ //! //! ## This Example //! - Instance: Path graph P4 (4 vertices, 3 edges: 0-1, 1-2, 2-3) -//! - Source Matching: max matching size 2 (e.g., {0-1, 2-3}) +//! - Source MaximumMatching: max matching size 2 (e.g., {0-1, 2-3}) //! - Target ILP: 3 binary variables (one per edge), 4 vertex constraints //! //! ## Output -//! Exports `docs/paper/examples/matching_to_ilp.json` for use in paper code blocks. +//! Exports `docs/paper/examples/mm_to_ilp.json` for use in paper code blocks. use problemreductions::export::*; use problemreductions::prelude::*; @@ -19,9 +19,9 @@ use problemreductions::solvers::BruteForceFloat; use problemreductions::topology::SimpleGraph; fn main() { - // 1. Create Matching instance: path graph P4 with unit weights + // 1. Create MaximumMatching instance: path graph P4 with unit weights let edges = vec![(0, 1), (1, 2), (2, 3)]; - let matching = Matching::::unweighted(4, edges.clone()); + let matching = MaximumMatching::::unweighted(4, edges.clone()); // 2. Reduce to ILP let reduction = ReduceTo::::reduce_to(&matching); @@ -29,7 +29,7 @@ fn main() { // 3. Print transformation println!("\n=== Problem Transformation ==="); - println!("Source: Matching with {} variables (edges)", matching.num_variables()); + println!("Source: MaximumMatching with {} variables (edges)", matching.num_variables()); println!("Target: ILP with {} variables, {} constraints", ilp.num_vars, ilp.constraints.len()); // 4. Solve target ILP @@ -43,7 +43,7 @@ fn main() { // 5. Extract source solution let matching_solution = reduction.extract_solution(ilp_solution); - println!("Source Matching solution: {:?}", matching_solution); + println!("Source MaximumMatching solution: {:?}", matching_solution); // 6. Verify let size = matching.solution_size(&matching_solution); @@ -63,12 +63,12 @@ fn main() { }); } - let overhead = lookup_overhead_or_empty("Matching", "ILP"); + let overhead = lookup_overhead_or_empty("MaximumMatching", "ILP"); let data = ReductionData { source: ProblemSide { - problem: Matching::::NAME.to_string(), - variant: variant_to_map(Matching::::variant()), + problem: MaximumMatching::::NAME.to_string(), + variant: variant_to_map(MaximumMatching::::variant()), instance: serde_json::json!({ "num_vertices": matching.num_vertices(), "num_edges": matching.num_edges(), @@ -87,5 +87,5 @@ fn main() { }; let results = ResultData { solutions }; - write_example("matching_to_ilp", &data, &results); + write_example("mm_to_ilp", &data, &results); } diff --git a/examples/reduction_matching_to_setpacking.rs b/examples/reduction_maximummatching_to_maximumsetpacking.rs similarity index 69% rename from examples/reduction_matching_to_setpacking.rs rename to examples/reduction_maximummatching_to_maximumsetpacking.rs index ee19de4a0..c7e7dc8ab 100644 --- a/examples/reduction_matching_to_setpacking.rs +++ b/examples/reduction_maximummatching_to_maximumsetpacking.rs @@ -1,4 +1,4 @@ -//! # Matching to Set Packing Reduction +//! # MaximumMatching to Set Packing Reduction //! //! ## Mathematical Equivalence //! Each edge e = (u,v) becomes a set S_e = {u, v}. Universe U = V. @@ -8,10 +8,10 @@ //! ## This Example //! - Instance: Path graph P4 (4 vertices, 3 edges) with unit weights //! - Source matching: max size 2 (e.g., edges {(0,1), (2,3)}) -//! - Target SetPacking: max packing 2 +//! - Target MaximumSetPacking: max packing 2 //! //! ## Output -//! Exports `docs/paper/examples/matching_to_setpacking.json` and `.result.json`. +//! Exports `docs/paper/examples/mm_to_msp.json` and `.result.json`. //! //! See docs/paper/reductions.typ for the full reduction specification. @@ -20,21 +20,21 @@ use problemreductions::prelude::*; use problemreductions::topology::SimpleGraph; fn main() { - println!("\n=== Matching -> Set Packing Reduction ===\n"); + println!("\n=== MaximumMatching -> Set Packing Reduction ===\n"); // Path graph P4: 0-1-2-3 with unit weights let edges = vec![(0, 1), (1, 2), (2, 3)]; - let source = Matching::::unweighted(4, edges.clone()); + let source = MaximumMatching::::unweighted(4, edges.clone()); - println!("Source: Matching on P4"); + println!("Source: MaximumMatching on P4"); println!(" Vertices: 4"); println!(" Edges: {:?}", edges); - // Reduce to SetPacking - let reduction = ReduceTo::>::reduce_to(&source); + // Reduce to MaximumSetPacking + let reduction = ReduceTo::>::reduce_to(&source); let target = reduction.target_problem(); - println!("\nTarget: SetPacking"); + println!("\nTarget: MaximumSetPacking"); println!(" Sets: {} sets", target.num_sets()); for (i, set) in target.sets().iter().enumerate() { println!(" S_{} = {:?}", i, set); @@ -66,13 +66,13 @@ fn main() { } // Export JSON - let overhead = lookup_overhead("Matching", "SetPacking") - .expect("Matching -> SetPacking overhead not found"); + let overhead = lookup_overhead("MaximumMatching", "MaximumSetPacking") + .expect("MaximumMatching -> MaximumSetPacking overhead not found"); let data = ReductionData { source: ProblemSide { - problem: Matching::::NAME.to_string(), - variant: variant_to_map(Matching::::variant()), + problem: MaximumMatching::::NAME.to_string(), + variant: variant_to_map(MaximumMatching::::variant()), instance: serde_json::json!({ "num_vertices": source.num_vertices(), "num_edges": source.num_edges(), @@ -80,8 +80,8 @@ fn main() { }), }, target: ProblemSide { - problem: SetPacking::::NAME.to_string(), - variant: variant_to_map(SetPacking::::variant()), + problem: MaximumSetPacking::::NAME.to_string(), + variant: variant_to_map(MaximumSetPacking::::variant()), instance: serde_json::json!({ "num_sets": target.num_sets(), "sets": target.sets(), @@ -91,7 +91,7 @@ fn main() { }; let results = ResultData { solutions }; - write_example("matching_to_setpacking", &data, &results); + write_example("mm_to_msp", &data, &results); - println!("\nDone: Matching(P4) optimal=2 maps to SetPacking optimal=2"); + println!("\nDone: MaximumMatching(P4) optimal=2 maps to MaximumSetPacking optimal=2"); } diff --git a/examples/reduction_setpacking_to_ilp.rs b/examples/reduction_maximumsetpacking_to_ilp.rs similarity index 79% rename from examples/reduction_setpacking_to_ilp.rs rename to examples/reduction_maximumsetpacking_to_ilp.rs index f445aee95..02a417a94 100644 --- a/examples/reduction_setpacking_to_ilp.rs +++ b/examples/reduction_maximumsetpacking_to_ilp.rs @@ -8,19 +8,19 @@ //! ## This Example //! - Instance: 3 sets: S0={0,1}, S1={1,2}, S2={2,3,4} //! Overlapping pairs: (S0,S1) share element 1, (S1,S2) share element 2 -//! - Source SetPacking: max packing size 2 (S0 and S2 are disjoint) +//! - Source MaximumSetPacking: max packing size 2 (S0 and S2 are disjoint) //! - Target ILP: 3 binary variables, 2 overlap constraints //! //! ## Output -//! Exports `docs/paper/examples/setpacking_to_ilp.json` for use in paper code blocks. +//! Exports `docs/paper/examples/msp_to_ilp.json` for use in paper code blocks. use problemreductions::export::*; use problemreductions::prelude::*; use problemreductions::solvers::BruteForceFloat; fn main() { - // 1. Create SetPacking instance: 3 sets - let sp = SetPacking::::new(vec![ + // 1. Create MaximumSetPacking instance: 3 sets + let sp = MaximumSetPacking::::new(vec![ vec![0, 1], vec![1, 2], vec![2, 3, 4], @@ -32,7 +32,7 @@ fn main() { // 3. Print transformation println!("\n=== Problem Transformation ==="); - println!("Source: SetPacking with {} variables", sp.num_variables()); + println!("Source: MaximumSetPacking with {} variables", sp.num_variables()); println!("Target: ILP with {} variables, {} constraints", ilp.num_vars, ilp.constraints.len()); // 4. Solve target ILP @@ -46,7 +46,7 @@ fn main() { // 5. Extract source solution let sp_solution = reduction.extract_solution(ilp_solution); - println!("Source SetPacking solution: {:?}", sp_solution); + println!("Source MaximumSetPacking solution: {:?}", sp_solution); // 6. Verify let size = sp.solution_size(&sp_solution); @@ -66,12 +66,12 @@ fn main() { }); } - let overhead = lookup_overhead_or_empty("SetPacking", "ILP"); + let overhead = lookup_overhead_or_empty("MaximumSetPacking", "ILP"); let data = ReductionData { source: ProblemSide { - problem: SetPacking::::NAME.to_string(), - variant: variant_to_map(SetPacking::::variant()), + problem: MaximumSetPacking::::NAME.to_string(), + variant: variant_to_map(MaximumSetPacking::::variant()), instance: serde_json::json!({ "num_sets": sp.num_sets(), "sets": sp.sets(), @@ -89,5 +89,5 @@ fn main() { }; let results = ResultData { solutions }; - write_example("setpacking_to_ilp", &data, &results); + write_example("msp_to_ilp", &data, &results); } diff --git a/examples/reduction_setpacking_to_qubo.rs b/examples/reduction_maximumsetpacking_to_qubo.rs similarity index 85% rename from examples/reduction_setpacking_to_qubo.rs rename to examples/reduction_maximumsetpacking_to_qubo.rs index 26279acdc..359320acf 100644 --- a/examples/reduction_setpacking_to_qubo.rs +++ b/examples/reduction_maximumsetpacking_to_qubo.rs @@ -21,8 +21,8 @@ //! do not overlap //! //! ## Outputs -//! - `docs/paper/examples/setpacking_to_qubo.json` — reduction structure -//! - `docs/paper/examples/setpacking_to_qubo.result.json` — solutions +//! - `docs/paper/examples/msp_to_qubo.json` — reduction structure +//! - `docs/paper/examples/msp_to_qubo.result.json` — solutions //! //! ## Usage //! ```bash @@ -42,13 +42,13 @@ fn main() { vec![2, 3, 4], // Set C ]; let set_names = ["Set-A".to_string(), "Set-B".to_string(), "Set-C".to_string()]; - let sp = SetPacking::::new(sets.clone()); + let sp = MaximumSetPacking::::new(sets.clone()); // Reduce to QUBO let reduction = ReduceTo::::reduce_to(&sp); let qubo = reduction.target_problem(); - println!("Source: SetPacking with 3 sets over universe {{0,1,2,3,4}}"); + println!("Source: MaximumSetPacking with 3 sets over universe {{0,1,2,3,4}}"); println!(" Set A = {{0, 1}}, Set B = {{1, 2}}, Set C = {{2, 3, 4}}"); println!("Target: QUBO with {} variables", qubo.num_variables()); println!("Q matrix:"); @@ -87,13 +87,13 @@ fn main() { println!("\nVerification passed: all solutions are valid set packings"); // Export JSON - let overhead = lookup_overhead("SetPacking", "QUBO") - .expect("SetPacking -> QUBO overhead not found"); + let overhead = lookup_overhead("MaximumSetPacking", "QUBO") + .expect("MaximumSetPacking -> QUBO overhead not found"); let data = ReductionData { source: ProblemSide { - problem: SetPacking::::NAME.to_string(), - variant: variant_to_map(SetPacking::::variant()), + problem: MaximumSetPacking::::NAME.to_string(), + variant: variant_to_map(MaximumSetPacking::::variant()), instance: serde_json::json!({ "num_sets": sp.num_sets(), "sets": sp.sets(), @@ -111,5 +111,5 @@ fn main() { }; let results = ResultData { solutions }; - write_example("setpacking_to_qubo", &data, &results); + write_example("msp_to_qubo", &data, &results); } diff --git a/examples/reduction_dominatingset_to_ilp.rs b/examples/reduction_minimumdominatingset_to_ilp.rs similarity index 77% rename from examples/reduction_dominatingset_to_ilp.rs rename to examples/reduction_minimumdominatingset_to_ilp.rs index 52fb88ba4..61a9dcc76 100644 --- a/examples/reduction_dominatingset_to_ilp.rs +++ b/examples/reduction_minimumdominatingset_to_ilp.rs @@ -7,11 +7,11 @@ //! //! ## This Example //! - Instance: Path graph P4 (4 vertices, 3 edges: 0-1-2-3) -//! - Source DominatingSet: min dominating set size 2 (e.g., {1,2}) +//! - Source MinimumDominatingSet: min dominating set size 2 (e.g., {1,2}) //! - Target ILP: 4 binary variables, 4 domination constraints //! //! ## Output -//! Exports `docs/paper/examples/dominatingset_to_ilp.json` for use in paper code blocks. +//! Exports `docs/paper/examples/mds_to_ilp.json` for use in paper code blocks. use problemreductions::export::*; use problemreductions::prelude::*; @@ -19,8 +19,8 @@ use problemreductions::solvers::BruteForceFloat; use problemreductions::topology::SimpleGraph; fn main() { - // 1. Create DominatingSet instance: path graph P4 - let ds = DominatingSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + // 1. Create MinimumDominatingSet instance: path graph P4 + let ds = MinimumDominatingSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); // 2. Reduce to ILP let reduction = ReduceTo::::reduce_to(&ds); @@ -28,7 +28,7 @@ fn main() { // 3. Print transformation println!("\n=== Problem Transformation ==="); - println!("Source: DominatingSet with {} variables", ds.num_variables()); + println!("Source: MinimumDominatingSet with {} variables", ds.num_variables()); println!("Target: ILP with {} variables, {} constraints", ilp.num_vars, ilp.constraints.len()); // 4. Solve target ILP @@ -42,7 +42,7 @@ fn main() { // 5. Extract source solution let ds_solution = reduction.extract_solution(ilp_solution); - println!("Source DominatingSet solution: {:?}", ds_solution); + println!("Source MinimumDominatingSet solution: {:?}", ds_solution); // 6. Verify let size = ds.solution_size(&ds_solution); @@ -62,12 +62,12 @@ fn main() { }); } - let overhead = lookup_overhead_or_empty("DominatingSet", "ILP"); + let overhead = lookup_overhead_or_empty("MinimumDominatingSet", "ILP"); let data = ReductionData { source: ProblemSide { - problem: DominatingSet::::NAME.to_string(), - variant: variant_to_map(DominatingSet::::variant()), + problem: MinimumDominatingSet::::NAME.to_string(), + variant: variant_to_map(MinimumDominatingSet::::variant()), instance: serde_json::json!({ "num_vertices": ds.num_vertices(), "num_edges": ds.num_edges(), @@ -86,5 +86,5 @@ fn main() { }; let results = ResultData { solutions }; - write_example("dominatingset_to_ilp", &data, &results); + write_example("mds_to_ilp", &data, &results); } diff --git a/examples/reduction_setcovering_to_ilp.rs b/examples/reduction_minimumsetcovering_to_ilp.rs similarity index 79% rename from examples/reduction_setcovering_to_ilp.rs rename to examples/reduction_minimumsetcovering_to_ilp.rs index 305455e7c..3f57ee705 100644 --- a/examples/reduction_setcovering_to_ilp.rs +++ b/examples/reduction_minimumsetcovering_to_ilp.rs @@ -7,19 +7,19 @@ //! //! ## This Example //! - Instance: Universe size 3, sets: S0={0,1}, S1={1,2}, S2={0,2} -//! - Source SetCovering: min cover size 2 (any two sets cover all elements) +//! - Source MinimumSetCovering: min cover size 2 (any two sets cover all elements) //! - Target ILP: 3 binary variables, 3 element-coverage constraints //! //! ## Output -//! Exports `docs/paper/examples/setcovering_to_ilp.json` for use in paper code blocks. +//! Exports `docs/paper/examples/msc_to_ilp.json` for use in paper code blocks. use problemreductions::export::*; use problemreductions::prelude::*; use problemreductions::solvers::BruteForceFloat; fn main() { - // 1. Create SetCovering instance: universe {0,1,2}, 3 sets - let sc = SetCovering::::new( + // 1. Create MinimumSetCovering instance: universe {0,1,2}, 3 sets + let sc = MinimumSetCovering::::new( 3, vec![ vec![0, 1], @@ -34,7 +34,7 @@ fn main() { // 3. Print transformation println!("\n=== Problem Transformation ==="); - println!("Source: SetCovering with {} variables", sc.num_variables()); + println!("Source: MinimumSetCovering with {} variables", sc.num_variables()); println!("Target: ILP with {} variables, {} constraints", ilp.num_vars, ilp.constraints.len()); // 4. Solve target ILP @@ -48,7 +48,7 @@ fn main() { // 5. Extract source solution let sc_solution = reduction.extract_solution(ilp_solution); - println!("Source SetCovering solution: {:?}", sc_solution); + println!("Source MinimumSetCovering solution: {:?}", sc_solution); // 6. Verify let size = sc.solution_size(&sc_solution); @@ -68,12 +68,12 @@ fn main() { }); } - let overhead = lookup_overhead_or_empty("SetCovering", "ILP"); + let overhead = lookup_overhead_or_empty("MinimumSetCovering", "ILP"); let data = ReductionData { source: ProblemSide { - problem: SetCovering::::NAME.to_string(), - variant: variant_to_map(SetCovering::::variant()), + problem: MinimumSetCovering::::NAME.to_string(), + variant: variant_to_map(MinimumSetCovering::::variant()), instance: serde_json::json!({ "num_sets": sc.num_sets(), "sets": sc.sets(), @@ -92,5 +92,5 @@ fn main() { }; let results = ResultData { solutions }; - write_example("setcovering_to_ilp", &data, &results); + write_example("msc_to_ilp", &data, &results); } diff --git a/examples/reduction_vc_to_ilp.rs b/examples/reduction_minimumvertexcover_to_ilp.rs similarity index 82% rename from examples/reduction_vc_to_ilp.rs rename to examples/reduction_minimumvertexcover_to_ilp.rs index cb3164990..b04e11633 100644 --- a/examples/reduction_vc_to_ilp.rs +++ b/examples/reduction_minimumvertexcover_to_ilp.rs @@ -11,7 +11,7 @@ //! - Target ILP: 4 binary variables, 4 constraints //! //! ## Output -//! Exports `docs/paper/examples/vc_to_ilp.json` for use in paper code blocks. +//! Exports `docs/paper/examples/mvc_to_ilp.json` for use in paper code blocks. use problemreductions::export::*; use problemreductions::prelude::*; @@ -20,7 +20,7 @@ use problemreductions::topology::SimpleGraph; fn main() { // 1. Create VC instance: cycle C4 - let vc = VertexCovering::::new(4, vec![(0, 1), (1, 2), (2, 3), (3, 0)]); + let vc = MinimumVertexCover::::new(4, vec![(0, 1), (1, 2), (2, 3), (3, 0)]); // 2. Reduce to ILP let reduction = ReduceTo::::reduce_to(&vc); @@ -28,7 +28,7 @@ fn main() { // 3. Print transformation println!("\n=== Problem Transformation ==="); - println!("Source: VertexCovering with {} variables", vc.num_variables()); + println!("Source: MinimumVertexCover with {} variables", vc.num_variables()); println!("Target: ILP with {} variables, {} constraints", ilp.num_vars, ilp.constraints.len()); // 4. Solve target ILP @@ -62,12 +62,12 @@ fn main() { }); } - let overhead = lookup_overhead_or_empty("VertexCovering", "ILP"); + let overhead = lookup_overhead_or_empty("MinimumVertexCover", "ILP"); let data = ReductionData { source: ProblemSide { - problem: VertexCovering::::NAME.to_string(), - variant: variant_to_map(VertexCovering::::variant()), + problem: MinimumVertexCover::::NAME.to_string(), + variant: variant_to_map(MinimumVertexCover::::variant()), instance: serde_json::json!({ "num_vertices": vc.num_vertices(), "num_edges": vc.num_edges(), @@ -86,5 +86,5 @@ fn main() { }; let results = ResultData { solutions }; - write_example("vc_to_ilp", &data, &results); + write_example("mvc_to_ilp", &data, &results); } diff --git a/examples/reduction_vc_to_is.rs b/examples/reduction_minimumvertexcover_to_maximumindependentset.rs similarity index 70% rename from examples/reduction_vc_to_is.rs rename to examples/reduction_minimumvertexcover_to_maximumindependentset.rs index 9624a0ccb..24d04810d 100644 --- a/examples/reduction_vc_to_is.rs +++ b/examples/reduction_minimumvertexcover_to_maximumindependentset.rs @@ -11,7 +11,7 @@ //! - Target IS: max size 2 //! //! ## Output -//! Exports `docs/paper/examples/vc_to_is.json` and `vc_to_is.result.json`. +//! Exports `docs/paper/examples/mvc_to_mis.json` and `vc_to_is.result.json`. //! //! See docs/paper/reductions.typ for the full reduction specification. @@ -20,14 +20,14 @@ use problemreductions::prelude::*; use problemreductions::topology::SimpleGraph; fn main() { - let vc = VertexCovering::::new(4, vec![(0, 1), (1, 2), (2, 3), (0, 3)]); + let vc = MinimumVertexCover::::new(4, vec![(0, 1), (1, 2), (2, 3), (0, 3)]); - let reduction = ReduceTo::>::reduce_to(&vc); + let reduction = ReduceTo::>::reduce_to(&vc); let is = reduction.target_problem(); println!("\n=== Problem Transformation ==="); - println!("Source: VertexCovering with {} variables", vc.num_variables()); - println!("Target: IndependentSet with {} variables", is.num_variables()); + println!("Source: MinimumVertexCover with {} variables", vc.num_variables()); + println!("Target: MaximumIndependentSet with {} variables", is.num_variables()); let solver = BruteForce::new(); let is_solutions = solver.find_best(is); @@ -57,13 +57,13 @@ fn main() { // Export JSON let vc_edges = vc.edges(); let is_edges = is.edges(); - let overhead = lookup_overhead("VertexCovering", "IndependentSet") - .expect("VertexCovering -> IndependentSet overhead not found"); + let overhead = lookup_overhead("MinimumVertexCover", "MaximumIndependentSet") + .expect("MinimumVertexCover -> MaximumIndependentSet overhead not found"); let data = ReductionData { source: ProblemSide { - problem: VertexCovering::::NAME.to_string(), - variant: variant_to_map(VertexCovering::::variant()), + problem: MinimumVertexCover::::NAME.to_string(), + variant: variant_to_map(MinimumVertexCover::::variant()), instance: serde_json::json!({ "num_vertices": vc.num_vertices(), "num_edges": vc.num_edges(), @@ -71,8 +71,8 @@ fn main() { }), }, target: ProblemSide { - problem: IndependentSet::::NAME.to_string(), - variant: variant_to_map(IndependentSet::::variant()), + problem: MaximumIndependentSet::::NAME.to_string(), + variant: variant_to_map(MaximumIndependentSet::::variant()), instance: serde_json::json!({ "num_vertices": is.num_vertices(), "num_edges": is.num_edges(), @@ -83,5 +83,5 @@ fn main() { }; let results = ResultData { solutions }; - write_example("vc_to_is", &data, &results); + write_example("mvc_to_mis", &data, &results); } diff --git a/examples/reduction_vc_to_setcovering.rs b/examples/reduction_minimumvertexcover_to_minimumsetcovering.rs similarity index 73% rename from examples/reduction_vc_to_setcovering.rs rename to examples/reduction_minimumvertexcover_to_minimumsetcovering.rs index 09812fe17..7a17b6666 100644 --- a/examples/reduction_vc_to_setcovering.rs +++ b/examples/reduction_minimumvertexcover_to_minimumsetcovering.rs @@ -8,10 +8,10 @@ //! ## This Example //! - Instance: Triangle K3 (3 vertices, 3 edges) //! - Source VC: min size 2 -//! - Target SetCovering: min cover 2 +//! - Target MinimumSetCovering: min cover 2 //! //! ## Output -//! Exports `docs/paper/examples/vc_to_setcovering.json` for use in paper code blocks. +//! Exports `docs/paper/examples/mvc_to_msc.json` for use in paper code blocks. //! //! See docs/paper/reductions.typ for the full reduction specification. @@ -24,17 +24,17 @@ fn main() { // Triangle K3: 3 vertices, 3 edges let edges = vec![(0, 1), (1, 2), (0, 2)]; - let source = VertexCovering::::new(3, edges.clone()); + let source = MinimumVertexCover::::new(3, edges.clone()); - println!("Source: VertexCovering on K3"); + println!("Source: MinimumVertexCover on K3"); println!(" Vertices: 3"); println!(" Edges: {:?}", edges); - // Reduce to SetCovering - let reduction = ReduceTo::>::reduce_to(&source); + // Reduce to MinimumSetCovering + let reduction = ReduceTo::>::reduce_to(&source); let target = reduction.target_problem(); - println!("\nTarget: SetCovering"); + println!("\nTarget: MinimumSetCovering"); println!(" Universe size: {}", target.universe_size()); println!(" Sets: {} sets", target.num_sets()); for (i, set) in target.sets().iter().enumerate() { @@ -77,16 +77,16 @@ fn main() { let target_size = target.solution_size(target_sol); assert_eq!(source_size.size, 2, "VC on K3 has optimal size 2"); - assert_eq!(target_size.size, 2, "SetCovering should also have size 2"); + assert_eq!(target_size.size, 2, "MinimumSetCovering should also have size 2"); // Export JSON - let overhead = lookup_overhead("VertexCovering", "SetCovering") - .expect("VertexCovering -> SetCovering overhead not found"); + let overhead = lookup_overhead("MinimumVertexCover", "MinimumSetCovering") + .expect("MinimumVertexCover -> MinimumSetCovering overhead not found"); let data = ReductionData { source: ProblemSide { - problem: VertexCovering::::NAME.to_string(), - variant: variant_to_map(VertexCovering::::variant()), + problem: MinimumVertexCover::::NAME.to_string(), + variant: variant_to_map(MinimumVertexCover::::variant()), instance: serde_json::json!({ "num_vertices": source.num_vertices(), "num_edges": source.num_edges(), @@ -94,8 +94,8 @@ fn main() { }), }, target: ProblemSide { - problem: SetCovering::::NAME.to_string(), - variant: variant_to_map(SetCovering::::variant()), + problem: MinimumSetCovering::::NAME.to_string(), + variant: variant_to_map(MinimumSetCovering::::variant()), instance: serde_json::json!({ "num_sets": target.num_sets(), "sets": target.sets(), @@ -106,7 +106,7 @@ fn main() { }; let results = ResultData { solutions }; - write_example("vc_to_setcovering", &data, &results); + write_example("mvc_to_msc", &data, &results); - println!("\nDone: VC(K3) optimal=2 maps to SetCovering optimal=2"); + println!("\nDone: VC(K3) optimal=2 maps to MinimumSetCovering optimal=2"); } diff --git a/examples/reduction_vc_to_qubo.rs b/examples/reduction_minimumvertexcover_to_qubo.rs similarity index 82% rename from examples/reduction_vc_to_qubo.rs rename to examples/reduction_minimumvertexcover_to_qubo.rs index 38920c95b..ebcc9eb7d 100644 --- a/examples/reduction_vc_to_qubo.rs +++ b/examples/reduction_minimumvertexcover_to_qubo.rs @@ -12,13 +12,13 @@ //! //! ## This Example //! - Instance: Cycle graph C4 with 4 vertices and 4 edges (0-1-2-3-0) -//! - Source: VertexCovering with minimum size 2 +//! - Source: MinimumVertexCover with minimum size 2 //! - QUBO variables: 4 (one per vertex) //! - Expected: Optimal vertex covers of size 2 (e.g., {0,2} or {1,3}) //! //! ## Outputs -//! - `docs/paper/examples/vc_to_qubo.json` — reduction structure -//! - `docs/paper/examples/vc_to_qubo.result.json` — solutions +//! - `docs/paper/examples/mvc_to_qubo.json` — reduction structure +//! - `docs/paper/examples/mvc_to_qubo.result.json` — solutions //! //! ## Usage //! ```bash @@ -34,13 +34,13 @@ fn main() { // Cycle C4: 0-1-2-3-0 let edges = vec![(0, 1), (1, 2), (2, 3), (0, 3)]; - let vc = VertexCovering::::new(4, edges.clone()); + let vc = MinimumVertexCover::::new(4, edges.clone()); // Reduce to QUBO let reduction = ReduceTo::::reduce_to(&vc); let qubo = reduction.target_problem(); - println!("Source: VertexCovering on cycle C4 (4 vertices, 4 edges)"); + println!("Source: MinimumVertexCover on cycle C4 (4 vertices, 4 edges)"); println!("Target: QUBO with {} variables", qubo.num_variables()); println!("Q matrix:"); for row in qubo.matrix() { @@ -86,13 +86,13 @@ fn main() { println!("\nVerification passed: all solutions are valid with size 2"); // Export JSON - let overhead = lookup_overhead("VertexCovering", "QUBO") - .expect("VertexCovering -> QUBO overhead not found"); + let overhead = lookup_overhead("MinimumVertexCover", "QUBO") + .expect("MinimumVertexCover -> QUBO overhead not found"); let data = ReductionData { source: ProblemSide { - problem: VertexCovering::::NAME.to_string(), - variant: variant_to_map(VertexCovering::::variant()), + problem: MinimumVertexCover::::NAME.to_string(), + variant: variant_to_map(MinimumVertexCover::::variant()), instance: serde_json::json!({ "num_vertices": vc.num_vertices(), "num_edges": vc.num_edges(), @@ -111,5 +111,5 @@ fn main() { }; let results = ResultData { solutions }; - write_example("vc_to_qubo", &data, &results); + write_example("mvc_to_qubo", &data, &results); } diff --git a/examples/reduction_sat_to_is.rs b/examples/reduction_sat_to_maximumindependentset.rs similarity index 86% rename from examples/reduction_sat_to_is.rs rename to examples/reduction_sat_to_maximumindependentset.rs index ca5b096e5..1fa1372eb 100644 --- a/examples/reduction_sat_to_is.rs +++ b/examples/reduction_sat_to_maximumindependentset.rs @@ -11,7 +11,7 @@ //! - Target IS: size 3 (one vertex per clause) //! //! ## Output -//! Exports `docs/paper/examples/sat_to_is.json` and `sat_to_is.result.json`. +//! Exports `docs/paper/examples/sat_to_mis.json` and `sat_to_is.result.json`. use problemreductions::export::*; use problemreductions::prelude::*; @@ -35,13 +35,13 @@ fn main() { println!(" {} variables, {} clauses", sat.num_vars(), sat.num_clauses()); // 2. Reduce to Independent Set - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let is = reduction.target_problem(); println!("\n=== Problem Transformation ==="); println!("Source: Satisfiability with {} variables", sat.num_variables()); println!( - "Target: IndependentSet with {} vertices, {} edges", + "Target: MaximumIndependentSet with {} vertices, {} edges", is.num_vertices(), is.num_edges() ); @@ -91,8 +91,8 @@ fn main() { println!("\nReduction verified successfully"); // 5. Export JSON - let overhead = lookup_overhead("Satisfiability", "IndependentSet") - .expect("Satisfiability -> IndependentSet overhead not found"); + let overhead = lookup_overhead("Satisfiability", "MaximumIndependentSet") + .expect("Satisfiability -> MaximumIndependentSet overhead not found"); let data = ReductionData { source: ProblemSide { @@ -104,8 +104,8 @@ fn main() { }), }, target: ProblemSide { - problem: IndependentSet::::NAME.to_string(), - variant: variant_to_map(IndependentSet::::variant()), + problem: MaximumIndependentSet::::NAME.to_string(), + variant: variant_to_map(MaximumIndependentSet::::variant()), instance: serde_json::json!({ "num_vertices": is.num_vertices(), "num_edges": is.num_edges(), @@ -115,5 +115,5 @@ fn main() { }; let results = ResultData { solutions }; - write_example("sat_to_is", &data, &results); + write_example("sat_to_mis", &data, &results); } diff --git a/examples/reduction_sat_to_dominatingset.rs b/examples/reduction_sat_to_minimumdominatingset.rs similarity index 86% rename from examples/reduction_sat_to_dominatingset.rs rename to examples/reduction_sat_to_minimumdominatingset.rs index 60b221540..2170a3d9d 100644 --- a/examples/reduction_sat_to_dominatingset.rs +++ b/examples/reduction_sat_to_minimumdominatingset.rs @@ -11,8 +11,8 @@ //! - Target: Dominating set //! //! ## Output -//! Exports `docs/paper/examples/sat_to_dominatingset.json` and -//! `docs/paper/examples/sat_to_dominatingset.result.json` for use in paper code blocks. +//! Exports `docs/paper/examples/sat_to_mds.json` and +//! `docs/paper/examples/sat_to_mds.result.json` for use in paper code blocks. use problemreductions::export::*; use problemreductions::prelude::*; @@ -34,13 +34,13 @@ fn main() { println!(" {} variables, {} clauses", sat.num_vars(), sat.num_clauses()); // 2. Reduce to Dominating Set - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let ds = reduction.target_problem(); println!("\n=== Problem Transformation ==="); println!("Source: Satisfiability with {} variables", sat.num_variables()); println!( - "Target: DominatingSet with {} vertices, {} edges", + "Target: MinimumDominatingSet with {} vertices, {} edges", ds.num_vertices(), ds.num_edges() ); @@ -100,8 +100,8 @@ fn main() { } // 6. Export JSON - let overhead = lookup_overhead("Satisfiability", "DominatingSet") - .expect("Satisfiability -> DominatingSet overhead not found"); + let overhead = lookup_overhead("Satisfiability", "MinimumDominatingSet") + .expect("Satisfiability -> MinimumDominatingSet overhead not found"); let data = ReductionData { source: ProblemSide { @@ -113,8 +113,8 @@ fn main() { }), }, target: ProblemSide { - problem: DominatingSet::::NAME.to_string(), - variant: variant_to_map(DominatingSet::::variant()), + problem: MinimumDominatingSet::::NAME.to_string(), + variant: variant_to_map(MinimumDominatingSet::::variant()), instance: serde_json::json!({ "num_vertices": ds.num_vertices(), "num_edges": ds.num_edges(), @@ -124,5 +124,5 @@ fn main() { }; let results = ResultData { solutions }; - write_example("sat_to_dominatingset", &data, &results); + write_example("sat_to_mds", &data, &results); } diff --git a/scripts/generate_qubo_tests.py b/scripts/generate_qubo_tests.py index 6f5df8908..de1cb5a9e 100644 --- a/scripts/generate_qubo_tests.py +++ b/scripts/generate_qubo_tests.py @@ -56,7 +56,7 @@ def generate_vertex_covering(outdir: Path): qubo_result = brute_force_qubo(Q) - save_test("vertexcovering_to_qubo", { + save_test("minimumvertexcover_to_qubo", { "problem": "VertexCovering", "source": {"num_vertices": n_nodes, "edges": edges, "penalty": penalty}, "qubo_matrix": Q.tolist(), @@ -86,7 +86,7 @@ def generate_independent_set(outdir: Path): qubo_result = brute_force_qubo(Q) - save_test("independentset_to_qubo", { + save_test("maximumindependentset_to_qubo", { "problem": "IndependentSet", "source": {"num_vertices": n_nodes, "edges": edges, "penalty": penalty}, "qubo_matrix": Q.tolist(), @@ -143,7 +143,7 @@ def generate_set_packing(outdir: Path): qubo_result = brute_force_qubo(Q) - save_test("setpacking_to_qubo", { + save_test("maximumsetpacking_to_qubo", { "problem": "SetPacking", "source": { "sets": sets, diff --git a/src/export.rs b/src/export.rs index e5af4e91d..c246307bc 100644 --- a/src/export.rs +++ b/src/export.rs @@ -17,7 +17,7 @@ use std::path::Path; /// One side (source or target) of a reduction. #[derive(Serialize, Clone, Debug)] pub struct ProblemSide { - /// Problem name matching `Problem::NAME` (e.g., `"IndependentSet"`). + /// Problem name matching `Problem::NAME` (e.g., `"MaximumIndependentSet"`). pub problem: String, /// Variant attributes (e.g., `{"graph": "SimpleGraph", "weight": "Unweighted"}`). pub variant: HashMap, diff --git a/src/io.rs b/src/io.rs index 144271301..78104cbea 100644 --- a/src/io.rs +++ b/src/io.rs @@ -41,10 +41,10 @@ impl FileFormat { /// /// ```no_run /// use problemreductions::io::{write_problem, FileFormat}; -/// use problemreductions::models::graph::IndependentSet; +/// use problemreductions::models::graph::MaximumIndependentSet; /// use problemreductions::topology::SimpleGraph; /// -/// let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2)]); +/// let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); /// write_problem(&problem, "problem.json", FileFormat::Json).unwrap(); /// ``` pub fn write_problem>( @@ -75,10 +75,10 @@ pub fn write_problem>( /// /// ```no_run /// use problemreductions::io::{read_problem, FileFormat}; -/// use problemreductions::models::graph::IndependentSet; +/// use problemreductions::models::graph::MaximumIndependentSet; /// use problemreductions::topology::SimpleGraph; /// -/// let problem: IndependentSet = read_problem("problem.json", FileFormat::Json).unwrap(); +/// let problem: MaximumIndependentSet = read_problem("problem.json", FileFormat::Json).unwrap(); /// ``` pub fn read_problem>(path: P, format: FileFormat) -> Result { let file = File::open(path.as_ref()) diff --git a/src/lib.rs b/src/lib.rs index 3a264f7d0..30bb11af2 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -17,11 +17,11 @@ //! //! ```rust //! use problemreductions::prelude::*; -//! use problemreductions::models::graph::IndependentSet; +//! use problemreductions::models::graph::MaximumIndependentSet; //! use problemreductions::topology::SimpleGraph; //! //! // Create an Independent Set problem on a triangle graph -//! let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); +//! let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); //! //! // Solve with brute force //! let solver = BruteForce::new(); @@ -40,21 +40,21 @@ //! - Factoring: Integer factorization //! //! ### Graph Problems -//! - IndependentSet: Maximum weight independent set +//! - MaximumIndependentSet: Maximum weight independent set //! - MaximalIS: Maximal independent set -//! - VertexCovering: Minimum weight vertex cover -//! - DominatingSet: Minimum dominating set +//! - MinimumVertexCover: Minimum weight vertex cover +//! - MinimumDominatingSet: Minimum dominating set //! - Coloring: K-vertex coloring //! //! ### Set Problems -//! - SetCovering: Minimum weight set cover -//! - SetPacking: Maximum weight set packing +//! - MinimumSetCovering: Minimum weight set cover +//! - MaximumSetPacking: Maximum weight set packing //! //! ### Optimization Problems //! - MaxCut: Maximum cut on weighted graphs //! - SpinGlass: Ising model Hamiltonian //! - QUBO: Quadratic unconstrained binary optimization -//! - Matching: Maximum weight matching +//! - MaximumMatching: Maximum weight matching //! //! ### Specialized Problems //! - Paintshop: Minimize color switches @@ -85,14 +85,14 @@ pub mod prelude { }; pub use crate::error::{ProblemError, Result}; pub use crate::models::graph::{ - Clique, DominatingSet, IndependentSet, KColoring, Matching, MaxCut, MaximalIS, - VertexCovering, + MaximumClique, MinimumDominatingSet, MaximumIndependentSet, KColoring, MaximumMatching, MaxCut, MaximalIS, + MinimumVertexCover, }; pub use crate::models::optimization::{ Comparison, LinearConstraint, ObjectiveSense, SpinGlass, VarBounds, ILP, QUBO, }; pub use crate::models::satisfiability::{CNFClause, KSatisfiability, Satisfiability}; - pub use crate::models::set::{SetCovering, SetPacking}; + pub use crate::models::set::{MinimumSetCovering, MaximumSetPacking}; pub use crate::models::specialized::{BicliqueCover, CircuitSAT, Factoring, PaintShop, BMF}; pub use crate::registry::{ ComplexityClass, GraphSubcategory, ProblemCategory, ProblemInfo, ProblemMetadata, diff --git a/src/models/graph/clique.rs b/src/models/graph/maximum_clique.rs similarity index 89% rename from src/models/graph/clique.rs rename to src/models/graph/maximum_clique.rs index 79f5bd1ff..a168f1a50 100644 --- a/src/models/graph/clique.rs +++ b/src/models/graph/maximum_clique.rs @@ -1,6 +1,6 @@ -//! Clique problem implementation. +//! MaximumClique problem implementation. //! -//! The Clique problem asks for a maximum weight subset of vertices +//! The MaximumClique problem asks for a maximum weight subset of vertices //! such that all vertices in the subset are pairwise adjacent. use crate::registry::{FieldInfo, ProblemSchemaEntry}; @@ -12,7 +12,7 @@ use serde::{Deserialize, Serialize}; inventory::submit! { ProblemSchemaEntry { - name: "Clique", + name: "MaximumClique", category: "graph", description: "Find maximum weight clique in a graph", fields: &[ @@ -22,7 +22,7 @@ inventory::submit! { } } -/// The Clique problem. +/// The MaximumClique problem. /// /// Given a graph G = (V, E) and weights w_v for each vertex, /// find a subset S ⊆ V such that: @@ -37,12 +37,12 @@ inventory::submit! { /// # Example /// /// ``` -/// use problemreductions::models::graph::Clique; +/// use problemreductions::models::graph::MaximumClique; /// use problemreductions::topology::SimpleGraph; /// use problemreductions::{Problem, Solver, BruteForce}; /// /// // Create a triangle graph (3 vertices, 3 edges - complete graph) -/// let problem = Clique::::new(3, vec![(0, 1), (1, 2), (0, 2)]); +/// let problem = MaximumClique::::new(3, vec![(0, 1), (1, 2), (0, 2)]); /// /// // Solve with brute force /// let solver = BruteForce::new(); @@ -52,15 +52,15 @@ inventory::submit! { /// assert!(solutions.iter().all(|s| s.iter().sum::() == 3)); /// ``` #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct Clique { +pub struct MaximumClique { /// The underlying graph. graph: G, /// Weights for each vertex. weights: Vec, } -impl Clique { - /// Create a new Clique problem with unit weights. +impl MaximumClique { + /// Create a new MaximumClique problem with unit weights. /// /// # Arguments /// * `num_vertices` - Number of vertices in the graph @@ -74,7 +74,7 @@ impl Clique { Self { graph, weights } } - /// Create a new Clique problem with custom weights. + /// Create a new MaximumClique problem with custom weights. pub fn with_weights(num_vertices: usize, edges: Vec<(usize, usize)>, weights: Vec) -> Self { assert_eq!( weights.len(), @@ -86,8 +86,8 @@ impl Clique { } } -impl Clique { - /// Create a Clique problem from an existing graph with custom weights. +impl MaximumClique { + /// Create a MaximumClique problem from an existing graph with custom weights. pub fn from_graph(graph: G, weights: Vec) -> Self { assert_eq!( weights.len(), @@ -97,7 +97,7 @@ impl Clique { Self { graph, weights } } - /// Create a Clique problem from an existing graph with unit weights. + /// Create a MaximumClique problem from an existing graph with unit weights. pub fn from_graph_unit_weights(graph: G) -> Self where W: From, @@ -137,7 +137,7 @@ impl Clique { } } -impl Problem for Clique +impl Problem for MaximumClique where G: Graph, W: Clone @@ -148,7 +148,7 @@ where + std::ops::AddAssign + 'static, { - const NAME: &'static str = "Clique"; + const NAME: &'static str = "MaximumClique"; fn variant() -> Vec<(&'static str, &'static str)> { vec![ @@ -190,7 +190,7 @@ where } } -impl ConstraintSatisfactionProblem for Clique +impl ConstraintSatisfactionProblem for MaximumClique where G: Graph, W: Clone @@ -310,5 +310,5 @@ pub fn is_clique(num_vertices: usize, edges: &[(usize, usize)], selected: &[bool } #[cfg(test)] -#[path = "../../unit_tests/models/graph/clique.rs"] +#[path = "../../unit_tests/models/graph/maximum_clique.rs"] mod tests; diff --git a/src/models/graph/independent_set.rs b/src/models/graph/maximum_independent_set.rs similarity index 92% rename from src/models/graph/independent_set.rs rename to src/models/graph/maximum_independent_set.rs index 67f507079..051005554 100644 --- a/src/models/graph/independent_set.rs +++ b/src/models/graph/maximum_independent_set.rs @@ -12,7 +12,7 @@ use serde::{Deserialize, Serialize}; inventory::submit! { ProblemSchemaEntry { - name: "IndependentSet", + name: "MaximumIndependentSet", category: "graph", description: "Find maximum weight independent set in a graph", fields: &[ @@ -37,12 +37,12 @@ inventory::submit! { /// # Example /// /// ``` -/// use problemreductions::models::graph::IndependentSet; +/// use problemreductions::models::graph::MaximumIndependentSet; /// use problemreductions::topology::SimpleGraph; /// use problemreductions::{Problem, Solver, BruteForce}; /// /// // Create a triangle graph (3 vertices, 3 edges) -/// let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); +/// let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); /// /// // Solve with brute force /// let solver = BruteForce::new(); @@ -52,14 +52,14 @@ inventory::submit! { /// assert!(solutions.iter().all(|s| s.iter().sum::() == 1)); /// ``` #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct IndependentSet { +pub struct MaximumIndependentSet { /// The underlying graph. graph: G, /// Weights for each vertex. weights: Vec, } -impl IndependentSet { +impl MaximumIndependentSet { /// Create a new Independent Set problem with unit weights. /// /// # Arguments @@ -86,7 +86,7 @@ impl IndependentSet { } } -impl IndependentSet { +impl MaximumIndependentSet { /// Create an Independent Set problem from an existing graph with custom weights. pub fn from_graph(graph: G, weights: Vec) -> Self { assert_eq!( @@ -137,7 +137,7 @@ impl IndependentSet { } } -impl Problem for IndependentSet +impl Problem for MaximumIndependentSet where G: Graph, W: Clone @@ -148,7 +148,7 @@ where + std::ops::AddAssign + 'static, { - const NAME: &'static str = "IndependentSet"; + const NAME: &'static str = "MaximumIndependentSet"; fn variant() -> Vec<(&'static str, &'static str)> { vec![ @@ -190,7 +190,7 @@ where } } -impl ConstraintSatisfactionProblem for IndependentSet +impl ConstraintSatisfactionProblem for MaximumIndependentSet where G: Graph, W: Clone @@ -278,5 +278,5 @@ pub fn is_independent_set( } #[cfg(test)] -#[path = "../../unit_tests/models/graph/independent_set.rs"] +#[path = "../../unit_tests/models/graph/maximum_independent_set.rs"] mod tests; diff --git a/src/models/graph/matching.rs b/src/models/graph/maximum_matching.rs similarity index 89% rename from src/models/graph/matching.rs rename to src/models/graph/maximum_matching.rs index e0092b5ac..f9fe318e0 100644 --- a/src/models/graph/matching.rs +++ b/src/models/graph/maximum_matching.rs @@ -1,6 +1,6 @@ -//! Matching problem implementation. +//! MaximumMatching problem implementation. //! -//! The Maximum Matching problem asks for a maximum weight set of edges +//! The Maximum MaximumMatching problem asks for a maximum weight set of edges //! such that no two edges share a vertex. use crate::registry::{FieldInfo, ProblemSchemaEntry}; @@ -13,7 +13,7 @@ use std::collections::HashMap; inventory::submit! { ProblemSchemaEntry { - name: "Matching", + name: "MaximumMatching", category: "graph", description: "Find maximum weight matching in a graph", fields: &[ @@ -23,7 +23,7 @@ inventory::submit! { } } -/// The Maximum Matching problem. +/// The Maximum MaximumMatching problem. /// /// Given a graph G = (V, E) with edge weights, find a maximum weight /// subset M ⊆ E such that no two edges in M share a vertex. @@ -36,12 +36,12 @@ inventory::submit! { /// # Example /// /// ``` -/// use problemreductions::models::graph::Matching; +/// use problemreductions::models::graph::MaximumMatching; /// use problemreductions::topology::SimpleGraph; /// use problemreductions::{Problem, Solver, BruteForce}; /// /// // Path graph 0-1-2 -/// let problem = Matching::::new(3, vec![(0, 1, 1), (1, 2, 1)]); +/// let problem = MaximumMatching::::new(3, vec![(0, 1, 1), (1, 2, 1)]); /// /// let solver = BruteForce::new(); /// let solutions = solver.find_best(&problem); @@ -52,15 +52,15 @@ inventory::submit! { /// } /// ``` #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct Matching { +pub struct MaximumMatching { /// The underlying graph. graph: G, /// Weights for each edge (in edge index order). edge_weights: Vec, } -impl Matching { - /// Create a new Matching problem. +impl MaximumMatching { + /// Create a new MaximumMatching problem. /// /// # Arguments /// * `num_vertices` - Number of vertices @@ -79,7 +79,7 @@ impl Matching { } } - /// Create a Matching problem with unit weights. + /// Create a MaximumMatching problem with unit weights. pub fn unweighted(num_vertices: usize, edges: Vec<(usize, usize)>) -> Self where W: From, @@ -91,8 +91,8 @@ impl Matching { } } -impl Matching { - /// Create a Matching problem from a graph with given edge weights. +impl MaximumMatching { + /// Create a MaximumMatching problem from a graph with given edge weights. /// /// # Arguments /// * `graph` - The graph @@ -109,7 +109,7 @@ impl Matching { } } - /// Create a Matching problem from a graph with unit weights. + /// Create a MaximumMatching problem from a graph with unit weights. pub fn from_graph_unit_weights(graph: G) -> Self where W: From, @@ -180,7 +180,7 @@ impl Matching { } } -impl Problem for Matching +impl Problem for MaximumMatching where G: Graph, W: Clone @@ -191,7 +191,7 @@ where + std::ops::AddAssign + 'static, { - const NAME: &'static str = "Matching"; + const NAME: &'static str = "MaximumMatching"; fn variant() -> Vec<(&'static str, &'static str)> { vec![("graph", G::NAME), ("weight", short_type_name::())] @@ -232,7 +232,7 @@ where } } -impl ConstraintSatisfactionProblem for Matching +impl ConstraintSatisfactionProblem for MaximumMatching where G: Graph, W: Clone @@ -322,5 +322,5 @@ pub fn is_matching(num_vertices: usize, edges: &[(usize, usize)], selected: &[bo } #[cfg(test)] -#[path = "../../unit_tests/models/graph/matching.rs"] +#[path = "../../unit_tests/models/graph/maximum_matching.rs"] mod tests; diff --git a/src/models/graph/dominating_set.rs b/src/models/graph/minimum_dominating_set.rs similarity index 93% rename from src/models/graph/dominating_set.rs rename to src/models/graph/minimum_dominating_set.rs index e745d6cba..4c187afe9 100644 --- a/src/models/graph/dominating_set.rs +++ b/src/models/graph/minimum_dominating_set.rs @@ -13,7 +13,7 @@ use std::collections::HashSet; inventory::submit! { ProblemSchemaEntry { - name: "DominatingSet", + name: "MinimumDominatingSet", category: "graph", description: "Find minimum weight dominating set in a graph", fields: &[ @@ -33,12 +33,12 @@ inventory::submit! { /// # Example /// /// ``` -/// use problemreductions::models::graph::DominatingSet; +/// use problemreductions::models::graph::MinimumDominatingSet; /// use problemreductions::topology::SimpleGraph; /// use problemreductions::{Problem, Solver, BruteForce}; /// /// // Star graph: center dominates all -/// let problem = DominatingSet::::new(4, vec![(0, 1), (0, 2), (0, 3)]); +/// let problem = MinimumDominatingSet::::new(4, vec![(0, 1), (0, 2), (0, 3)]); /// /// let solver = BruteForce::new(); /// let solutions = solver.find_best(&problem); @@ -47,14 +47,14 @@ inventory::submit! { /// assert!(solutions.contains(&vec![1, 0, 0, 0])); /// ``` #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct DominatingSet { +pub struct MinimumDominatingSet { /// The underlying graph. graph: G, /// Weights for each vertex. weights: Vec, } -impl DominatingSet { +impl MinimumDominatingSet { /// Create a new Dominating Set problem with unit weights. pub fn new(num_vertices: usize, edges: Vec<(usize, usize)>) -> Self where @@ -73,7 +73,7 @@ impl DominatingSet { } } -impl DominatingSet { +impl MinimumDominatingSet { /// Create a Dominating Set problem from a graph with custom weights. pub fn from_graph(graph: G, weights: Vec) -> Self { assert_eq!(weights.len(), graph.num_vertices()); @@ -153,7 +153,7 @@ impl DominatingSet { } } -impl Problem for DominatingSet +impl Problem for MinimumDominatingSet where G: Graph, W: Clone @@ -164,7 +164,7 @@ where + std::ops::AddAssign + 'static, { - const NAME: &'static str = "DominatingSet"; + const NAME: &'static str = "MinimumDominatingSet"; fn variant() -> Vec<(&'static str, &'static str)> { vec![("graph", G::NAME), ("weight", short_type_name::())] @@ -203,7 +203,7 @@ where } } -impl ConstraintSatisfactionProblem for DominatingSet +impl ConstraintSatisfactionProblem for MinimumDominatingSet where G: Graph, W: Clone @@ -288,5 +288,5 @@ pub fn is_dominating_set(num_vertices: usize, edges: &[(usize, usize)], selected } #[cfg(test)] -#[path = "../../unit_tests/models/graph/dominating_set.rs"] +#[path = "../../unit_tests/models/graph/minimum_dominating_set.rs"] mod tests; diff --git a/src/models/graph/vertex_covering.rs b/src/models/graph/minimum_vertex_cover.rs similarity index 92% rename from src/models/graph/vertex_covering.rs rename to src/models/graph/minimum_vertex_cover.rs index e97dc3aba..d18f45acb 100644 --- a/src/models/graph/vertex_covering.rs +++ b/src/models/graph/minimum_vertex_cover.rs @@ -12,7 +12,7 @@ use serde::{Deserialize, Serialize}; inventory::submit! { ProblemSchemaEntry { - name: "VertexCovering", + name: "MinimumVertexCover", category: "graph", description: "Find minimum weight vertex cover in a graph", fields: &[ @@ -32,12 +32,12 @@ inventory::submit! { /// # Example /// /// ``` -/// use problemreductions::models::graph::VertexCovering; +/// use problemreductions::models::graph::MinimumVertexCover; /// use problemreductions::topology::SimpleGraph; /// use problemreductions::{Problem, Solver, BruteForce}; /// /// // Create a path graph 0-1-2 -/// let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); +/// let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); /// /// // Solve with brute force /// let solver = BruteForce::new(); @@ -47,14 +47,14 @@ inventory::submit! { /// assert!(solutions.contains(&vec![0, 1, 0])); /// ``` #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct VertexCovering { +pub struct MinimumVertexCover { /// The underlying graph. graph: G, /// Weights for each vertex. weights: Vec, } -impl VertexCovering { +impl MinimumVertexCover { /// Create a new Vertex Covering problem with unit weights. pub fn new(num_vertices: usize, edges: Vec<(usize, usize)>) -> Self where @@ -73,7 +73,7 @@ impl VertexCovering { } } -impl VertexCovering { +impl MinimumVertexCover { /// Create a Vertex Covering problem from a graph with custom weights. pub fn from_graph(graph: G, weights: Vec) -> Self { assert_eq!(weights.len(), graph.num_vertices()); @@ -120,7 +120,7 @@ impl VertexCovering { } } -impl Problem for VertexCovering +impl Problem for MinimumVertexCover where G: Graph, W: Clone @@ -131,7 +131,7 @@ where + std::ops::AddAssign + 'static, { - const NAME: &'static str = "VertexCovering"; + const NAME: &'static str = "MinimumVertexCover"; fn variant() -> Vec<(&'static str, &'static str)> { vec![("graph", G::NAME), ("weight", short_type_name::())] @@ -170,7 +170,7 @@ where } } -impl ConstraintSatisfactionProblem for VertexCovering +impl ConstraintSatisfactionProblem for MinimumVertexCover where G: Graph, W: Clone @@ -255,5 +255,5 @@ pub fn is_vertex_cover(num_vertices: usize, edges: &[(usize, usize)], selected: } #[cfg(test)] -#[path = "../../unit_tests/models/graph/vertex_covering.rs"] +#[path = "../../unit_tests/models/graph/minimum_vertex_cover.rs"] mod tests; diff --git a/src/models/graph/mod.rs b/src/models/graph/mod.rs index aa03ee415..f65c7a7a2 100644 --- a/src/models/graph/mod.rs +++ b/src/models/graph/mod.rs @@ -1,29 +1,29 @@ //! Graph-based optimization problems. //! //! This module contains NP-hard problems defined on graphs: -//! - [`IndependentSet`]: Maximum weight independent set +//! - [`MaximumIndependentSet`]: Maximum weight independent set //! - [`MaximalIS`]: Maximal independent set -//! - [`VertexCovering`]: Minimum weight vertex cover -//! - [`DominatingSet`]: Minimum dominating set -//! - [`Clique`]: Maximum weight clique +//! - [`MinimumVertexCover`]: Minimum weight vertex cover +//! - [`MinimumDominatingSet`]: Minimum dominating set +//! - [`MaximumClique`]: Maximum weight clique //! - [`MaxCut`]: Maximum cut on weighted graphs //! - [`KColoring`]: K-vertex coloring -//! - [`Matching`]: Maximum weight matching +//! - [`MaximumMatching`]: Maximum weight matching -mod clique; -mod dominating_set; -mod independent_set; +mod maximum_clique; +mod minimum_dominating_set; +mod maximum_independent_set; mod kcoloring; -mod matching; +mod maximum_matching; mod max_cut; mod maximal_is; -mod vertex_covering; +mod minimum_vertex_cover; -pub use clique::{is_clique, Clique}; -pub use dominating_set::{is_dominating_set, DominatingSet}; -pub use independent_set::{is_independent_set, IndependentSet}; +pub use maximum_clique::{is_clique, MaximumClique}; +pub use minimum_dominating_set::{is_dominating_set, MinimumDominatingSet}; +pub use maximum_independent_set::{is_independent_set, MaximumIndependentSet}; pub use kcoloring::{is_valid_coloring, KColoring}; -pub use matching::{is_matching, Matching}; +pub use maximum_matching::{is_matching, MaximumMatching}; pub use max_cut::{cut_size, MaxCut}; pub use maximal_is::{is_maximal_independent_set, MaximalIS}; -pub use vertex_covering::{is_vertex_cover, VertexCovering}; +pub use minimum_vertex_cover::{is_vertex_cover, MinimumVertexCover}; diff --git a/src/models/mod.rs b/src/models/mod.rs index 8a4f9fb37..7cbcd06e6 100644 --- a/src/models/mod.rs +++ b/src/models/mod.rs @@ -5,8 +5,8 @@ //! # Problem Categories //! //! - **Satisfiability**: SAT, K-SAT, CircuitSAT, Factoring -//! - **Graph**: IndependentSet, MaximalIS, VertexCovering, DominatingSet, KColoring, Matching -//! - **Set**: SetCovering, SetPacking +//! - **Graph**: MaximumIndependentSet, MaximalIS, MinimumVertexCover, MinimumDominatingSet, KColoring, MaximumMatching +//! - **Set**: MinimumSetCovering, MaximumSetPacking //! - **Optimization**: MaxCut, SpinGlass, QUBO //! - **Specialized**: Paintshop, BicliqueCover, BMF @@ -18,9 +18,9 @@ pub mod specialized; // Re-export commonly used types pub use graph::{ - DominatingSet, IndependentSet, KColoring, Matching, MaxCut, MaximalIS, VertexCovering, + MinimumDominatingSet, MaximumIndependentSet, KColoring, MaximumMatching, MaxCut, MaximalIS, MinimumVertexCover, }; pub use optimization::{SpinGlass, QUBO}; pub use satisfiability::{CNFClause, Satisfiability}; -pub use set::{SetCovering, SetPacking}; +pub use set::{MinimumSetCovering, MaximumSetPacking}; pub use specialized::{BicliqueCover, CircuitSAT, Factoring, PaintShop, BMF}; diff --git a/src/models/set/set_packing.rs b/src/models/set/maximum_set_packing.rs similarity index 93% rename from src/models/set/set_packing.rs rename to src/models/set/maximum_set_packing.rs index 06a20d319..b4481c995 100644 --- a/src/models/set/set_packing.rs +++ b/src/models/set/maximum_set_packing.rs @@ -12,7 +12,7 @@ use std::collections::HashSet; inventory::submit! { ProblemSchemaEntry { - name: "SetPacking", + name: "MaximumSetPacking", category: "set", description: "Find maximum weight collection of disjoint sets", fields: &[ @@ -30,12 +30,12 @@ inventory::submit! { /// # Example /// /// ``` -/// use problemreductions::models::set::SetPacking; +/// use problemreductions::models::set::MaximumSetPacking; /// use problemreductions::{Problem, Solver, BruteForce}; /// /// // Sets: S0={0,1}, S1={1,2}, S2={2,3}, S3={3,4} /// // S0 and S1 overlap, S2 and S3 are disjoint from S0 -/// let problem = SetPacking::::new(vec![ +/// let problem = MaximumSetPacking::::new(vec![ /// vec![0, 1], /// vec![1, 2], /// vec![2, 3], @@ -51,14 +51,14 @@ inventory::submit! { /// } /// ``` #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct SetPacking { +pub struct MaximumSetPacking { /// Collection of sets. sets: Vec>, /// Weights for each set. weights: Vec, } -impl SetPacking { +impl MaximumSetPacking { /// Create a new Set Packing problem with unit weights. pub fn new(sets: Vec>) -> Self where @@ -119,7 +119,7 @@ impl SetPacking { } } -impl Problem for SetPacking +impl Problem for MaximumSetPacking where W: Clone + Default @@ -129,7 +129,7 @@ where + std::ops::AddAssign + 'static, { - const NAME: &'static str = "SetPacking"; + const NAME: &'static str = "MaximumSetPacking"; fn variant() -> Vec<(&'static str, &'static str)> { vec![("graph", "SimpleGraph"), ("weight", short_type_name::())] @@ -165,7 +165,7 @@ where } } -impl ConstraintSatisfactionProblem for SetPacking +impl ConstraintSatisfactionProblem for MaximumSetPacking where W: Clone + Default @@ -247,5 +247,5 @@ pub fn is_set_packing(sets: &[Vec], selected: &[bool]) -> bool { } #[cfg(test)] -#[path = "../../unit_tests/models/set/set_packing.rs"] +#[path = "../../unit_tests/models/set/maximum_set_packing.rs"] mod tests; diff --git a/src/models/set/set_covering.rs b/src/models/set/minimum_set_covering.rs similarity index 93% rename from src/models/set/set_covering.rs rename to src/models/set/minimum_set_covering.rs index 21c3fbeab..d370a19b7 100644 --- a/src/models/set/set_covering.rs +++ b/src/models/set/minimum_set_covering.rs @@ -12,7 +12,7 @@ use std::collections::HashSet; inventory::submit! { ProblemSchemaEntry { - name: "SetCovering", + name: "MinimumSetCovering", category: "set", description: "Find minimum weight collection covering the universe", fields: &[ @@ -32,12 +32,12 @@ inventory::submit! { /// # Example /// /// ``` -/// use problemreductions::models::set::SetCovering; +/// use problemreductions::models::set::MinimumSetCovering; /// use problemreductions::{Problem, Solver, BruteForce}; /// /// // Universe: {0, 1, 2, 3} /// // Sets: S0={0,1}, S1={1,2}, S2={2,3}, S3={0,3} -/// let problem = SetCovering::::new( +/// let problem = MinimumSetCovering::::new( /// 4, // universe size /// vec![ /// vec![0, 1], @@ -56,7 +56,7 @@ inventory::submit! { /// } /// ``` #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct SetCovering { +pub struct MinimumSetCovering { /// Size of the universe (elements are 0..universe_size). universe_size: usize, /// Collection of sets, each represented as a vector of elements. @@ -65,7 +65,7 @@ pub struct SetCovering { weights: Vec, } -impl SetCovering { +impl MinimumSetCovering { /// Create a new Set Covering problem with unit weights. pub fn new(universe_size: usize, sets: Vec>) -> Self where @@ -124,7 +124,7 @@ impl SetCovering { } } -impl Problem for SetCovering +impl Problem for MinimumSetCovering where W: Clone + Default @@ -134,7 +134,7 @@ where + std::ops::AddAssign + 'static, { - const NAME: &'static str = "SetCovering"; + const NAME: &'static str = "MinimumSetCovering"; fn variant() -> Vec<(&'static str, &'static str)> { vec![("graph", "SimpleGraph"), ("weight", short_type_name::())] @@ -176,7 +176,7 @@ where } } -impl ConstraintSatisfactionProblem for SetCovering +impl ConstraintSatisfactionProblem for MinimumSetCovering where W: Clone + Default @@ -255,5 +255,5 @@ pub fn is_set_cover(universe_size: usize, sets: &[Vec], selected: &[bool] } #[cfg(test)] -#[path = "../../unit_tests/models/set/set_covering.rs"] +#[path = "../../unit_tests/models/set/minimum_set_covering.rs"] mod tests; diff --git a/src/models/set/mod.rs b/src/models/set/mod.rs index 471b8f9d3..7a149c555 100644 --- a/src/models/set/mod.rs +++ b/src/models/set/mod.rs @@ -1,15 +1,15 @@ //! Set-based optimization problems. //! //! This module contains NP-hard problems based on set operations: -//! - [`SetCovering`]: Minimum weight set cover -//! - [`SetPacking`]: Maximum weight set packing +//! - [`MinimumSetCovering`]: Minimum weight set cover +//! - [`MaximumSetPacking`]: Maximum weight set packing -mod set_covering; -mod set_packing; +mod minimum_set_covering; +mod maximum_set_packing; -pub use set_covering::SetCovering; -pub use set_packing::SetPacking; +pub use minimum_set_covering::MinimumSetCovering; +pub use maximum_set_packing::MaximumSetPacking; // Validation utilities -pub use set_covering::is_set_cover; -pub use set_packing::is_set_packing; +pub use minimum_set_covering::is_set_cover; +pub use maximum_set_packing::is_set_packing; diff --git a/src/registry/category.rs b/src/registry/category.rs index affea793e..eae543ea5 100644 --- a/src/registry/category.rs +++ b/src/registry/category.rs @@ -11,11 +11,11 @@ //! ├── Graph //! │ ├── Coloring (3-Coloring, Chromatic Number) //! │ ├── Covering (Vertex Cover, Dominating Set) -//! │ ├── Independent (Independent Set, Clique) +//! │ ├── Independent (Independent Set, MaximumClique) //! │ ├── Paths (Hamiltonian Path, TSP) //! │ ├── Structure (Graph Partition) //! │ ├── Trees (Steiner Tree) -//! │ └── Matching (3D Matching) +//! │ └── MaximumMatching (3D MaximumMatching) //! ├── Satisfiability //! │ ├── Sat (SAT, 3-SAT, Max-SAT) //! │ ├── Circuit (Circuit SAT) @@ -24,7 +24,7 @@ //! │ ├── Covering (Set Cover, Exact Cover) //! │ ├── Packing (Bin Packing, Knapsack) //! │ ├── Partition (Partition, Subset Sum) -//! │ └── Matching (Hitting Set) +//! │ └── MaximumMatching (Hitting Set) //! ├── Optimization //! │ ├── Quadratic (QUBO, Max-Cut) //! │ ├── Linear (ILP) @@ -39,7 +39,7 @@ //! │ └── Connectivity (k-Connectivity) //! ├── String //! │ ├── Sequence (Shortest Superstring) -//! │ ├── Matching (String Matching) +//! │ ├── MaximumMatching (String MaximumMatching) //! │ └── Compression (Grammar Compression) //! └── Specialized //! ├── Geometry (Protein Folding) @@ -142,8 +142,8 @@ pub enum GraphSubcategory { Structure, /// Tree problems (Steiner, spanning) Trees, - /// Matching problems - Matching, + /// MaximumMatching problems + MaximumMatching, } impl GraphSubcategory { @@ -156,7 +156,7 @@ impl GraphSubcategory { GraphSubcategory::Paths => "paths", GraphSubcategory::Structure => "structure", GraphSubcategory::Trees => "trees", - GraphSubcategory::Matching => "matching", + GraphSubcategory::MaximumMatching => "matching", } } } @@ -193,7 +193,7 @@ pub enum SetSubcategory { /// Partition and subset sum Partition, /// Set splitting and hitting set - Matching, + MaximumMatching, } impl SetSubcategory { @@ -203,7 +203,7 @@ impl SetSubcategory { SetSubcategory::Covering => "covering", SetSubcategory::Packing => "packing", SetSubcategory::Partition => "partition", - SetSubcategory::Matching => "matching", + SetSubcategory::MaximumMatching => "matching", } } } @@ -280,7 +280,7 @@ pub enum StringSubcategory { /// Sequence problems (superstring, subsequence) Sequence, /// String matching - Matching, + MaximumMatching, /// Compression problems Compression, } @@ -290,7 +290,7 @@ impl StringSubcategory { pub fn name(&self) -> &'static str { match self { StringSubcategory::Sequence => "sequence", - StringSubcategory::Matching => "matching", + StringSubcategory::MaximumMatching => "matching", StringSubcategory::Compression => "compression", } } diff --git a/src/registry/info.rs b/src/registry/info.rs index 2cec1be19..4e03efc68 100644 --- a/src/registry/info.rs +++ b/src/registry/info.rs @@ -212,7 +212,7 @@ impl fmt::Display for ProblemInfo { pub struct FieldInfo { /// Field name as it appears in the Rust struct. pub name: &'static str, - /// Type name (e.g., "Vec", "UnGraph<(), ()>"). + /// Type name (e.g., `Vec`, `UnGraph<(), ()>`). pub type_name: &'static str, /// Human-readable description of what this field represents. pub description: &'static str, diff --git a/src/registry/schema.rs b/src/registry/schema.rs index b743260ef..79aed61de 100644 --- a/src/registry/schema.rs +++ b/src/registry/schema.rs @@ -5,7 +5,7 @@ use serde::Serialize; /// A registered problem schema entry for static inventory registration. pub struct ProblemSchemaEntry { - /// Problem name (e.g., "IndependentSet"). + /// Problem name (e.g., "MaximumIndependentSet"). pub name: &'static str, /// Category (e.g., "graph", "optimization"). pub category: &'static str, diff --git a/src/rules/circuit_spinglass.rs b/src/rules/circuit_spinglass.rs index ddc91cca4..29bdf401a 100644 --- a/src/rules/circuit_spinglass.rs +++ b/src/rules/circuit_spinglass.rs @@ -108,8 +108,8 @@ where /// 2-variable SpinGlass: input at index 0; output at index 1. /// Ground states: (0,1), (1,0) corresponding to valid NOT. /// -/// J = [1] for edge (0,1) -/// h = [0, 0] +/// J = \[1\] for edge (0,1) +/// h = \[0, 0\] pub fn not_gadget() -> LogicGadget where W: Clone + Default + From + Zero, @@ -150,7 +150,7 @@ where /// Create a SET0 gadget (constant false). /// /// 1-variable SpinGlass that prefers config 0 (spin -1 in Rust convention). -/// h = [1] (negated from Julia's [-1] to account for different spin convention) +/// h = \[1\] (negated from Julia's \[-1\] to account for different spin convention) pub fn set0_gadget() -> LogicGadget where W: Clone + Default + From, @@ -164,7 +164,7 @@ where /// Create a SET1 gadget (constant true). /// /// 1-variable SpinGlass that prefers config 1 (spin +1 in Rust convention). -/// h = [-1] (negated from Julia's [1] to account for different spin convention) +/// h = \[-1\] (negated from Julia's \[1\] to account for different spin convention) pub fn set1_gadget() -> LogicGadget where W: Clone + Default + From, diff --git a/src/rules/graph.rs b/src/rules/graph.rs index cbb15f525..59f6e99bf 100644 --- a/src/rules/graph.rs +++ b/src/rules/graph.rs @@ -33,13 +33,13 @@ pub struct ReductionGraphJson { /// A node in the reduction graph JSON. #[derive(Debug, Clone, Serialize)] pub struct NodeJson { - /// Base problem name (e.g., "IndependentSet"). + /// Base problem name (e.g., "MaximumIndependentSet"). pub name: String, /// Variant attributes as key-value pairs. pub variant: std::collections::BTreeMap, /// Category of the problem (e.g., "graph", "set", "optimization", "satisfiability", "specialized"). pub category: String, - /// Relative rustdoc path (e.g., "models/graph/independent_set"). + /// Relative rustdoc path (e.g., "models/graph/maximum_independent_set"). pub doc_path: String, } @@ -291,18 +291,18 @@ impl ReductionGraph { // Register problem types - multiple concrete types can share a base name register! { // Graph problems - IndependentSet => "IndependentSet", - IndependentSet => "IndependentSet", - VertexCovering => "VertexCovering", - VertexCovering => "VertexCovering", + MaximumIndependentSet => "MaximumIndependentSet", + MaximumIndependentSet => "MaximumIndependentSet", + MinimumVertexCover => "MinimumVertexCover", + MinimumVertexCover => "MinimumVertexCover", MaxCut => "MaxCut", MaxCut => "MaxCut", - Matching => "Matching", - DominatingSet => "DominatingSet", + MaximumMatching => "MaximumMatching", + MinimumDominatingSet => "MinimumDominatingSet", KColoring<3, SimpleGraph, i32> => "KColoring", // Set problems - SetPacking => "SetPacking", - SetCovering => "SetCovering", + MaximumSetPacking => "MaximumSetPacking", + MinimumSetCovering => "MinimumSetCovering", // Optimization problems SpinGlass => "SpinGlass", SpinGlass => "SpinGlass", @@ -691,11 +691,11 @@ impl ReductionGraph { /// Maps name → actual Rust module location (which may differ from the visualization category). fn compute_doc_path(name: &str) -> String { let module = match name { - "IndependentSet" | "MaximalIS" | "VertexCovering" | "DominatingSet" | "KColoring" - | "Matching" | "MaxCut" | "Clique" => "graph", + "MaximumIndependentSet" | "MaximalIS" | "MinimumVertexCover" | "MinimumDominatingSet" | "KColoring" + | "MaximumMatching" | "MaxCut" | "MaximumClique" => "graph", "Satisfiability" | "KSatisfiability" => "satisfiability", "SpinGlass" | "QUBO" | "ILP" => "optimization", - "SetCovering" | "SetPacking" => "set", + "MinimumSetCovering" | "MaximumSetPacking" => "set", _ => "specialized", }; format!("models/{module}/struct.{name}.html") @@ -703,15 +703,16 @@ impl ReductionGraph { /// Categorize a type name into a problem category. fn categorize_type(name: &str) -> &'static str { - if name.contains("IndependentSet") + if name.contains("MaximumIndependentSet") || name.contains("VertexCover") || name.contains("MaxCut") || name.contains("Coloring") - || name.contains("DominatingSet") - || name.contains("Matching") + || name.contains("MinimumDominatingSet") + || name.contains("MaximumMatching") + || name.contains("MaximumClique") { "graph" - } else if name.contains("SetPacking") || name.contains("SetCover") { + } else if name.contains("MaximumSetPacking") || name.contains("SetCover") { "set" } else if name.contains("SpinGlass") || name.contains("QUBO") || name.contains("ILP") { "optimization" diff --git a/src/rules/clique_ilp.rs b/src/rules/maximumclique_ilp.rs similarity index 86% rename from src/rules/clique_ilp.rs rename to src/rules/maximumclique_ilp.rs index ad6505f76..84e0e5d83 100644 --- a/src/rules/clique_ilp.rs +++ b/src/rules/maximumclique_ilp.rs @@ -1,19 +1,19 @@ -//! Reduction from Clique to ILP (Integer Linear Programming). +//! Reduction from MaximumClique to ILP (Integer Linear Programming). //! -//! The Clique problem can be formulated as a binary ILP: +//! The MaximumClique problem can be formulated as a binary ILP: //! - Variables: One binary variable per vertex (0 = not selected, 1 = selected) //! - Constraints: x_u + x_v <= 1 for each NON-EDGE (u, v) - if two vertices are not adjacent, //! at most one can be in the clique //! - Objective: Maximize the sum of weights of selected vertices -use crate::models::graph::Clique; +use crate::models::graph::MaximumClique; use crate::models::optimization::{LinearConstraint, ObjectiveSense, VarBounds, ILP}; use crate::rules::traits::{ReduceTo, ReductionResult}; use crate::topology::SimpleGraph; use crate::traits::{ConstraintSatisfactionProblem, Problem}; use crate::types::ProblemSize; -/// Result of reducing Clique to ILP. +/// Result of reducing MaximumClique to ILP. /// /// This reduction creates a binary ILP where: /// - Each vertex corresponds to a binary variable @@ -26,14 +26,14 @@ pub struct ReductionCliqueToILP { } impl ReductionResult for ReductionCliqueToILP { - type Source = Clique; + type Source = MaximumClique; type Target = ILP; fn target_problem(&self) -> &ILP { &self.target } - /// Extract solution from ILP back to Clique. + /// Extract solution from ILP back to MaximumClique. /// /// Since the mapping is 1:1 (each vertex maps to one binary variable), /// the solution extraction is simply copying the configuration. @@ -50,7 +50,7 @@ impl ReductionResult for ReductionCliqueToILP { } } -impl ReduceTo for Clique { +impl ReduceTo for MaximumClique { type Result = ReductionCliqueToILP; fn reduce_to(&self) -> Self::Result { @@ -95,5 +95,5 @@ impl ReduceTo for Clique { } #[cfg(test)] -#[path = "../unit_tests/rules/clique_ilp.rs"] +#[path = "../unit_tests/rules/maximumclique_ilp.rs"] mod tests; diff --git a/src/rules/independentset_ilp.rs b/src/rules/maximumindependentset_ilp.rs similarity index 85% rename from src/rules/independentset_ilp.rs rename to src/rules/maximumindependentset_ilp.rs index 137f2094c..d286c870a 100644 --- a/src/rules/independentset_ilp.rs +++ b/src/rules/maximumindependentset_ilp.rs @@ -1,18 +1,18 @@ -//! Reduction from IndependentSet to ILP (Integer Linear Programming). +//! Reduction from MaximumIndependentSet to ILP (Integer Linear Programming). //! //! The Independent Set problem can be formulated as a binary ILP: //! - Variables: One binary variable per vertex (0 = not selected, 1 = selected) //! - Constraints: x_u + x_v <= 1 for each edge (u, v) - at most one endpoint can be selected //! - Objective: Maximize the sum of weights of selected vertices -use crate::models::graph::IndependentSet; +use crate::models::graph::MaximumIndependentSet; use crate::topology::SimpleGraph; use crate::models::optimization::{LinearConstraint, ObjectiveSense, VarBounds, ILP}; use crate::rules::traits::{ReduceTo, ReductionResult}; use crate::traits::Problem; use crate::types::ProblemSize; -/// Result of reducing IndependentSet to ILP. +/// Result of reducing MaximumIndependentSet to ILP. /// /// This reduction creates a binary ILP where: /// - Each vertex corresponds to a binary variable @@ -25,14 +25,14 @@ pub struct ReductionISToILP { } impl ReductionResult for ReductionISToILP { - type Source = IndependentSet; + type Source = MaximumIndependentSet; type Target = ILP; fn target_problem(&self) -> &ILP { &self.target } - /// Extract solution from ILP back to IndependentSet. + /// Extract solution from ILP back to MaximumIndependentSet. /// /// Since the mapping is 1:1 (each vertex maps to one binary variable), /// the solution extraction is simply copying the configuration. @@ -49,7 +49,7 @@ impl ReductionResult for ReductionISToILP { } } -impl ReduceTo for IndependentSet { +impl ReduceTo for MaximumIndependentSet { type Result = ReductionISToILP; fn reduce_to(&self) -> Self::Result { @@ -90,5 +90,5 @@ impl ReduceTo for IndependentSet { } #[cfg(test)] -#[path = "../unit_tests/rules/independentset_ilp.rs"] +#[path = "../unit_tests/rules/maximumindependentset_ilp.rs"] mod tests; diff --git a/src/rules/independentset_setpacking.rs b/src/rules/maximumindependentset_maximumsetpacking.rs similarity index 74% rename from src/rules/independentset_setpacking.rs rename to src/rules/maximumindependentset_maximumsetpacking.rs index 0544da77b..0d79d2947 100644 --- a/src/rules/independentset_setpacking.rs +++ b/src/rules/maximumindependentset_maximumsetpacking.rs @@ -1,11 +1,11 @@ -//! Reductions between IndependentSet and SetPacking problems. +//! Reductions between MaximumIndependentSet and MaximumSetPacking problems. //! -//! IS → SetPacking: Each vertex becomes a set containing its incident edge indices. -//! SetPacking → IS: Each set becomes a vertex; two vertices are adjacent if their sets overlap. +//! IS → MaximumSetPacking: Each vertex becomes a set containing its incident edge indices. +//! MaximumSetPacking → IS: Each set becomes a vertex; two vertices are adjacent if their sets overlap. -use crate::models::graph::IndependentSet; +use crate::models::graph::MaximumIndependentSet; use crate::topology::SimpleGraph; -use crate::models::set::SetPacking; +use crate::models::set::MaximumSetPacking; use crate::poly; use crate::reduction; use crate::rules::registry::ReductionOverhead; @@ -16,10 +16,10 @@ use num_traits::{Num, Zero}; use std::collections::HashSet; use std::ops::AddAssign; -/// Result of reducing IndependentSet to SetPacking. +/// Result of reducing MaximumIndependentSet to MaximumSetPacking. #[derive(Debug, Clone)] pub struct ReductionISToSP { - target: SetPacking, + target: MaximumSetPacking, source_size: ProblemSize, } @@ -27,8 +27,8 @@ impl ReductionResult for ReductionISToSP where W: Clone + Default + PartialOrd + Num + Zero + AddAssign + 'static, { - type Source = IndependentSet; - type Target = SetPacking; + type Source = MaximumIndependentSet; + type Target = MaximumSetPacking; fn target_problem(&self) -> &Self::Target { &self.target @@ -57,7 +57,7 @@ where ]) } )] -impl ReduceTo> for IndependentSet +impl ReduceTo> for MaximumIndependentSet where W: Clone + Default + PartialOrd + Num + Zero + AddAssign + From + 'static, { @@ -74,7 +74,7 @@ where sets[v].push(edge_idx); } - let target = SetPacking::with_weights(sets, self.weights_ref().clone()); + let target = MaximumSetPacking::with_weights(sets, self.weights_ref().clone()); ReductionISToSP { target, @@ -83,10 +83,10 @@ where } } -/// Result of reducing SetPacking to IndependentSet. +/// Result of reducing MaximumSetPacking to MaximumIndependentSet. #[derive(Debug, Clone)] pub struct ReductionSPToIS { - target: IndependentSet, + target: MaximumIndependentSet, source_size: ProblemSize, } @@ -94,8 +94,8 @@ impl ReductionResult for ReductionSPToIS where W: Clone + Default + PartialOrd + Num + Zero + AddAssign + 'static, { - type Source = SetPacking; - type Target = IndependentSet; + type Source = MaximumSetPacking; + type Target = MaximumIndependentSet; fn target_problem(&self) -> &Self::Target { &self.target @@ -124,7 +124,7 @@ where ]) } )] -impl ReduceTo> for SetPacking +impl ReduceTo> for MaximumSetPacking where W: Clone + Default + PartialOrd + Num + Zero + AddAssign + From + 'static, { @@ -146,7 +146,7 @@ where } } - let target = IndependentSet::with_weights(n, edges, self.weights_ref().clone()); + let target = MaximumIndependentSet::with_weights(n, edges, self.weights_ref().clone()); ReductionSPToIS { target, @@ -156,5 +156,5 @@ where } #[cfg(test)] -#[path = "../unit_tests/rules/independentset_setpacking.rs"] +#[path = "../unit_tests/rules/maximumindependentset_maximumsetpacking.rs"] mod tests; diff --git a/src/rules/independentset_qubo.rs b/src/rules/maximumindependentset_qubo.rs similarity index 84% rename from src/rules/independentset_qubo.rs rename to src/rules/maximumindependentset_qubo.rs index ea6e3293d..1660e96ef 100644 --- a/src/rules/independentset_qubo.rs +++ b/src/rules/maximumindependentset_qubo.rs @@ -1,11 +1,11 @@ -//! Reduction from IndependentSet to QUBO. +//! Reduction from MaximumIndependentSet to QUBO. //! //! Maximize Σ w_i·x_i s.t. x_i·x_j = 0 for (i,j) ∈ E //! = Minimize -Σ w_i·x_i + P·Σ_{(i,j)∈E} x_i·x_j //! //! Q[i][i] = -w_i, Q[i][j] = P for edges. P = 1 + Σ w_i. -use crate::models::graph::IndependentSet; +use crate::models::graph::MaximumIndependentSet; use crate::models::optimization::QUBO; use crate::poly; use crate::reduction; @@ -15,7 +15,7 @@ use crate::topology::SimpleGraph; use crate::traits::Problem; use crate::types::ProblemSize; -/// Result of reducing IndependentSet to QUBO. +/// Result of reducing MaximumIndependentSet to QUBO. #[derive(Debug, Clone)] pub struct ReductionISToQUBO { target: QUBO, @@ -23,7 +23,7 @@ pub struct ReductionISToQUBO { } impl ReductionResult for ReductionISToQUBO { - type Source = IndependentSet; + type Source = MaximumIndependentSet; type Target = QUBO; fn target_problem(&self) -> &Self::Target { @@ -47,7 +47,7 @@ impl ReductionResult for ReductionISToQUBO { source_graph = "SimpleGraph", overhead = { ReductionOverhead::new(vec![("num_vars", poly!(num_vertices))]) } )] -impl ReduceTo> for IndependentSet { +impl ReduceTo> for MaximumIndependentSet { type Result = ReductionISToQUBO; fn reduce_to(&self) -> Self::Result { @@ -74,5 +74,5 @@ impl ReduceTo> for IndependentSet { } #[cfg(test)] -#[path = "../unit_tests/rules/independentset_qubo.rs"] +#[path = "../unit_tests/rules/maximumindependentset_qubo.rs"] mod tests; diff --git a/src/rules/matching_ilp.rs b/src/rules/maximummatching_ilp.rs similarity index 86% rename from src/rules/matching_ilp.rs rename to src/rules/maximummatching_ilp.rs index fdcab58b6..011892135 100644 --- a/src/rules/matching_ilp.rs +++ b/src/rules/maximummatching_ilp.rs @@ -1,19 +1,19 @@ -//! Reduction from Matching to ILP (Integer Linear Programming). +//! Reduction from MaximumMatching to ILP (Integer Linear Programming). //! -//! The Maximum Matching problem can be formulated as a binary ILP: +//! The Maximum MaximumMatching problem can be formulated as a binary ILP: //! - Variables: One binary variable per edge (0 = not selected, 1 = selected) //! - Constraints: For each vertex v, sum of incident edge variables <= 1 //! (at most one incident edge can be selected) //! - Objective: Maximize the sum of weights of selected edges -use crate::models::graph::Matching; +use crate::models::graph::MaximumMatching; use crate::models::optimization::{LinearConstraint, ObjectiveSense, VarBounds, ILP}; use crate::rules::traits::{ReduceTo, ReductionResult}; use crate::topology::SimpleGraph; use crate::traits::{ConstraintSatisfactionProblem, Problem}; use crate::types::ProblemSize; -/// Result of reducing Matching to ILP. +/// Result of reducing MaximumMatching to ILP. /// /// This reduction creates a binary ILP where: /// - Each edge corresponds to a binary variable @@ -26,14 +26,14 @@ pub struct ReductionMatchingToILP { } impl ReductionResult for ReductionMatchingToILP { - type Source = Matching; + type Source = MaximumMatching; type Target = ILP; fn target_problem(&self) -> &ILP { &self.target } - /// Extract solution from ILP back to Matching. + /// Extract solution from ILP back to MaximumMatching. /// /// Since the mapping is 1:1 (each edge maps to one binary variable), /// the solution extraction is simply copying the configuration. @@ -50,7 +50,7 @@ impl ReductionResult for ReductionMatchingToILP { } } -impl ReduceTo for Matching { +impl ReduceTo for MaximumMatching { type Result = ReductionMatchingToILP; fn reduce_to(&self) -> Self::Result { @@ -95,5 +95,5 @@ impl ReduceTo for Matching { } #[cfg(test)] -#[path = "../unit_tests/rules/matching_ilp.rs"] +#[path = "../unit_tests/rules/maximummatching_ilp.rs"] mod tests; diff --git a/src/rules/matching_setpacking.rs b/src/rules/maximummatching_maximumsetpacking.rs similarity index 72% rename from src/rules/matching_setpacking.rs rename to src/rules/maximummatching_maximumsetpacking.rs index 8d7cbd09a..99e16aae7 100644 --- a/src/rules/matching_setpacking.rs +++ b/src/rules/maximummatching_maximumsetpacking.rs @@ -1,10 +1,10 @@ -//! Reductions between Matching and SetPacking problems. +//! Reductions between MaximumMatching and MaximumSetPacking problems. //! -//! Matching -> SetPacking: Each edge becomes a set containing its two endpoint vertices. +//! MaximumMatching -> MaximumSetPacking: Each edge becomes a set containing its two endpoint vertices. //! For edge (u, v), create set = {u, v}. Weights are preserved from edges. -use crate::models::graph::Matching; -use crate::models::set::SetPacking; +use crate::models::graph::MaximumMatching; +use crate::models::set::MaximumSetPacking; use crate::poly; use crate::reduction; use crate::rules::registry::ReductionOverhead; @@ -15,10 +15,10 @@ use crate::types::ProblemSize; use num_traits::{Num, Zero}; use std::ops::AddAssign; -/// Result of reducing Matching to SetPacking. +/// Result of reducing MaximumMatching to MaximumSetPacking. #[derive(Debug, Clone)] pub struct ReductionMatchingToSP { - target: SetPacking, + target: MaximumSetPacking, source_size: ProblemSize, _marker: std::marker::PhantomData, } @@ -28,14 +28,14 @@ where G: Graph, W: Clone + Default + PartialOrd + Num + Zero + AddAssign + 'static, { - type Source = Matching; - type Target = SetPacking; + type Source = MaximumMatching; + type Target = MaximumSetPacking; fn target_problem(&self) -> &Self::Target { &self.target } - /// Solutions map directly: edge i in Matching = set i in SetPacking. + /// Solutions map directly: edge i in MaximumMatching = set i in MaximumSetPacking. fn extract_solution(&self, target_solution: &[usize]) -> Vec { target_solution.to_vec() } @@ -58,7 +58,7 @@ where ]) } )] -impl ReduceTo> for Matching +impl ReduceTo> for MaximumMatching where G: Graph, W: Clone + Default + PartialOrd + Num + Zero + AddAssign + From + 'static, @@ -74,7 +74,7 @@ where // Preserve weights from edges let weights = self.weights(); - let target = SetPacking::with_weights(sets, weights); + let target = MaximumSetPacking::with_weights(sets, weights); ReductionMatchingToSP { target, @@ -85,5 +85,5 @@ where } #[cfg(test)] -#[path = "../unit_tests/rules/matching_setpacking.rs"] +#[path = "../unit_tests/rules/maximummatching_maximumsetpacking.rs"] mod tests; diff --git a/src/rules/setpacking_ilp.rs b/src/rules/maximumsetpacking_ilp.rs similarity index 87% rename from src/rules/setpacking_ilp.rs rename to src/rules/maximumsetpacking_ilp.rs index 450ce615b..c6ddceddf 100644 --- a/src/rules/setpacking_ilp.rs +++ b/src/rules/maximumsetpacking_ilp.rs @@ -1,4 +1,4 @@ -//! Reduction from SetPacking to ILP (Integer Linear Programming). +//! Reduction from MaximumSetPacking to ILP (Integer Linear Programming). //! //! The Set Packing problem can be formulated as a binary ILP: //! - Variables: One binary variable per set (0 = not selected, 1 = selected) @@ -6,12 +6,12 @@ //! - Objective: Maximize the sum of weights of selected sets use crate::models::optimization::{LinearConstraint, ObjectiveSense, VarBounds, ILP}; -use crate::models::set::SetPacking; +use crate::models::set::MaximumSetPacking; use crate::rules::traits::{ReduceTo, ReductionResult}; use crate::traits::Problem; use crate::types::ProblemSize; -/// Result of reducing SetPacking to ILP. +/// Result of reducing MaximumSetPacking to ILP. /// /// This reduction creates a binary ILP where: /// - Each set corresponds to a binary variable @@ -24,14 +24,14 @@ pub struct ReductionSPToILP { } impl ReductionResult for ReductionSPToILP { - type Source = SetPacking; + type Source = MaximumSetPacking; type Target = ILP; fn target_problem(&self) -> &ILP { &self.target } - /// Extract solution from ILP back to SetPacking. + /// Extract solution from ILP back to MaximumSetPacking. /// /// Since the mapping is 1:1 (each set maps to one binary variable), /// the solution extraction is simply copying the configuration. @@ -48,7 +48,7 @@ impl ReductionResult for ReductionSPToILP { } } -impl ReduceTo for SetPacking { +impl ReduceTo for MaximumSetPacking { type Result = ReductionSPToILP; fn reduce_to(&self) -> Self::Result { @@ -89,5 +89,5 @@ impl ReduceTo for SetPacking { } #[cfg(test)] -#[path = "../unit_tests/rules/setpacking_ilp.rs"] +#[path = "../unit_tests/rules/maximumsetpacking_ilp.rs"] mod tests; diff --git a/src/rules/setpacking_qubo.rs b/src/rules/maximumsetpacking_qubo.rs similarity index 84% rename from src/rules/setpacking_qubo.rs rename to src/rules/maximumsetpacking_qubo.rs index e3924e5c6..e74b9d5c9 100644 --- a/src/rules/setpacking_qubo.rs +++ b/src/rules/maximumsetpacking_qubo.rs @@ -1,13 +1,13 @@ -//! Reduction from SetPacking to QUBO. +//! Reduction from MaximumSetPacking to QUBO. //! -//! Same structure as IndependentSet on the intersection graph: +//! Same structure as MaximumIndependentSet on the intersection graph: //! Maximize Σ w_i·x_i s.t. x_i·x_j = 0 for overlapping pairs (i,j). //! = Minimize -Σ w_i·x_i + P·Σ_{overlapping (i,j)} x_i·x_j //! //! Q[i][i] = -w_i, Q[i][j] = P for overlapping pairs. P = 1 + Σ w_i. use crate::models::optimization::QUBO; -use crate::models::set::SetPacking; +use crate::models::set::MaximumSetPacking; use crate::poly; use crate::reduction; use crate::rules::registry::ReductionOverhead; @@ -17,7 +17,7 @@ use crate::types::{NumericWeight, ProblemSize}; use std::marker::PhantomData; -/// Result of reducing SetPacking to QUBO. +/// Result of reducing MaximumSetPacking to QUBO. #[derive(Debug, Clone)] pub struct ReductionSPToQUBO { target: QUBO, @@ -26,7 +26,7 @@ pub struct ReductionSPToQUBO { } impl> ReductionResult for ReductionSPToQUBO { - type Source = SetPacking; + type Source = MaximumSetPacking; type Target = QUBO; fn target_problem(&self) -> &Self::Target { @@ -50,7 +50,7 @@ impl> ReductionResult for ReductionSPToQUBO { source_weighted = true, overhead = { ReductionOverhead::new(vec![("num_vars", poly!(num_sets))]) } )] -impl> ReduceTo> for SetPacking { +impl> ReduceTo> for MaximumSetPacking { type Result = ReductionSPToQUBO; fn reduce_to(&self) -> Self::Result { @@ -82,5 +82,5 @@ impl> ReduceTo> for SetPacking { } #[cfg(test)] -#[path = "../unit_tests/rules/setpacking_qubo.rs"] +#[path = "../unit_tests/rules/maximumsetpacking_qubo.rs"] mod tests; diff --git a/src/rules/dominatingset_ilp.rs b/src/rules/minimumdominatingset_ilp.rs similarity index 87% rename from src/rules/dominatingset_ilp.rs rename to src/rules/minimumdominatingset_ilp.rs index 62acf5cb6..aa1417f72 100644 --- a/src/rules/dominatingset_ilp.rs +++ b/src/rules/minimumdominatingset_ilp.rs @@ -1,4 +1,4 @@ -//! Reduction from DominatingSet to ILP (Integer Linear Programming). +//! Reduction from MinimumDominatingSet to ILP (Integer Linear Programming). //! //! The Dominating Set problem can be formulated as a binary ILP: //! - Variables: One binary variable per vertex (0 = not selected, 1 = selected) @@ -6,14 +6,14 @@ //! (v or at least one of its neighbors must be selected) //! - Objective: Minimize the sum of weights of selected vertices -use crate::models::graph::DominatingSet; +use crate::models::graph::MinimumDominatingSet; use crate::topology::SimpleGraph; use crate::models::optimization::{LinearConstraint, ObjectiveSense, VarBounds, ILP}; use crate::rules::traits::{ReduceTo, ReductionResult}; use crate::traits::{ConstraintSatisfactionProblem, Problem}; use crate::types::ProblemSize; -/// Result of reducing DominatingSet to ILP. +/// Result of reducing MinimumDominatingSet to ILP. /// /// This reduction creates a binary ILP where: /// - Each vertex corresponds to a binary variable @@ -27,14 +27,14 @@ pub struct ReductionDSToILP { } impl ReductionResult for ReductionDSToILP { - type Source = DominatingSet; + type Source = MinimumDominatingSet; type Target = ILP; fn target_problem(&self) -> &ILP { &self.target } - /// Extract solution from ILP back to DominatingSet. + /// Extract solution from ILP back to MinimumDominatingSet. /// /// Since the mapping is 1:1 (each vertex maps to one binary variable), /// the solution extraction is simply copying the configuration. @@ -51,7 +51,7 @@ impl ReductionResult for ReductionDSToILP { } } -impl ReduceTo for DominatingSet { +impl ReduceTo for MinimumDominatingSet { type Result = ReductionDSToILP; fn reduce_to(&self) -> Self::Result { @@ -97,5 +97,5 @@ impl ReduceTo for DominatingSet { } #[cfg(test)] -#[path = "../unit_tests/rules/dominatingset_ilp.rs"] +#[path = "../unit_tests/rules/minimumdominatingset_ilp.rs"] mod tests; diff --git a/src/rules/setcovering_ilp.rs b/src/rules/minimumsetcovering_ilp.rs similarity index 88% rename from src/rules/setcovering_ilp.rs rename to src/rules/minimumsetcovering_ilp.rs index 1e76cbcff..78eecc8ff 100644 --- a/src/rules/setcovering_ilp.rs +++ b/src/rules/minimumsetcovering_ilp.rs @@ -1,4 +1,4 @@ -//! Reduction from SetCovering to ILP (Integer Linear Programming). +//! Reduction from MinimumSetCovering to ILP (Integer Linear Programming). //! //! The Set Covering problem can be formulated as a binary ILP: //! - Variables: One binary variable per set (0 = not selected, 1 = selected) @@ -6,12 +6,12 @@ //! - Objective: Minimize the sum of weights of selected sets use crate::models::optimization::{LinearConstraint, ObjectiveSense, VarBounds, ILP}; -use crate::models::set::SetCovering; +use crate::models::set::MinimumSetCovering; use crate::rules::traits::{ReduceTo, ReductionResult}; use crate::traits::{ConstraintSatisfactionProblem, Problem}; use crate::types::ProblemSize; -/// Result of reducing SetCovering to ILP. +/// Result of reducing MinimumSetCovering to ILP. /// /// This reduction creates a binary ILP where: /// - Each set corresponds to a binary variable @@ -24,14 +24,14 @@ pub struct ReductionSCToILP { } impl ReductionResult for ReductionSCToILP { - type Source = SetCovering; + type Source = MinimumSetCovering; type Target = ILP; fn target_problem(&self) -> &ILP { &self.target } - /// Extract solution from ILP back to SetCovering. + /// Extract solution from ILP back to MinimumSetCovering. /// /// Since the mapping is 1:1 (each set maps to one binary variable), /// the solution extraction is simply copying the configuration. @@ -48,7 +48,7 @@ impl ReductionResult for ReductionSCToILP { } } -impl ReduceTo for SetCovering { +impl ReduceTo for MinimumSetCovering { type Result = ReductionSCToILP; fn reduce_to(&self) -> Self::Result { @@ -98,5 +98,5 @@ impl ReduceTo for SetCovering { } #[cfg(test)] -#[path = "../unit_tests/rules/setcovering_ilp.rs"] +#[path = "../unit_tests/rules/minimumsetcovering_ilp.rs"] mod tests; diff --git a/src/rules/vertexcovering_ilp.rs b/src/rules/minimumvertexcover_ilp.rs similarity index 86% rename from src/rules/vertexcovering_ilp.rs rename to src/rules/minimumvertexcover_ilp.rs index 733e78945..79134273c 100644 --- a/src/rules/vertexcovering_ilp.rs +++ b/src/rules/minimumvertexcover_ilp.rs @@ -1,18 +1,18 @@ -//! Reduction from VertexCovering to ILP (Integer Linear Programming). +//! Reduction from MinimumVertexCover to ILP (Integer Linear Programming). //! //! The Vertex Cover problem can be formulated as a binary ILP: //! - Variables: One binary variable per vertex (0 = not selected, 1 = selected) //! - Constraints: x_u + x_v >= 1 for each edge (u, v) - at least one endpoint must be selected //! - Objective: Minimize the sum of weights of selected vertices -use crate::models::graph::VertexCovering; +use crate::models::graph::MinimumVertexCover; use crate::topology::SimpleGraph; use crate::models::optimization::{LinearConstraint, ObjectiveSense, VarBounds, ILP}; use crate::rules::traits::{ReduceTo, ReductionResult}; use crate::traits::Problem; use crate::types::ProblemSize; -/// Result of reducing VertexCovering to ILP. +/// Result of reducing MinimumVertexCover to ILP. /// /// This reduction creates a binary ILP where: /// - Each vertex corresponds to a binary variable @@ -25,14 +25,14 @@ pub struct ReductionVCToILP { } impl ReductionResult for ReductionVCToILP { - type Source = VertexCovering; + type Source = MinimumVertexCover; type Target = ILP; fn target_problem(&self) -> &ILP { &self.target } - /// Extract solution from ILP back to VertexCovering. + /// Extract solution from ILP back to MinimumVertexCover. /// /// Since the mapping is 1:1 (each vertex maps to one binary variable), /// the solution extraction is simply copying the configuration. @@ -49,7 +49,7 @@ impl ReductionResult for ReductionVCToILP { } } -impl ReduceTo for VertexCovering { +impl ReduceTo for MinimumVertexCover { type Result = ReductionVCToILP; fn reduce_to(&self) -> Self::Result { @@ -90,5 +90,5 @@ impl ReduceTo for VertexCovering { } #[cfg(test)] -#[path = "../unit_tests/rules/vertexcovering_ilp.rs"] +#[path = "../unit_tests/rules/minimumvertexcover_ilp.rs"] mod tests; diff --git a/src/rules/vertexcovering_independentset.rs b/src/rules/minimumvertexcover_maximumindependentset.rs similarity index 75% rename from src/rules/vertexcovering_independentset.rs rename to src/rules/minimumvertexcover_maximumindependentset.rs index 496aac78e..883d2bc98 100644 --- a/src/rules/vertexcovering_independentset.rs +++ b/src/rules/minimumvertexcover_maximumindependentset.rs @@ -1,8 +1,8 @@ -//! Reductions between IndependentSet and VertexCovering problems. +//! Reductions between MaximumIndependentSet and MinimumVertexCover problems. //! //! These problems are complements: a set S is an independent set iff V\S is a vertex cover. -use crate::models::graph::{IndependentSet, VertexCovering}; +use crate::models::graph::{MaximumIndependentSet, MinimumVertexCover}; use crate::topology::SimpleGraph; use crate::poly; use crate::reduction; @@ -13,10 +13,10 @@ use crate::types::ProblemSize; use num_traits::{Num, Zero}; use std::ops::AddAssign; -/// Result of reducing IndependentSet to VertexCovering. +/// Result of reducing MaximumIndependentSet to MinimumVertexCover. #[derive(Debug, Clone)] pub struct ReductionISToVC { - target: VertexCovering, + target: MinimumVertexCover, source_size: ProblemSize, } @@ -24,8 +24,8 @@ impl ReductionResult for ReductionISToVC where W: Clone + Default + PartialOrd + Num + Zero + AddAssign + 'static, { - type Source = IndependentSet; - type Target = VertexCovering; + type Source = MaximumIndependentSet; + type Target = MinimumVertexCover; fn target_problem(&self) -> &Self::Target { &self.target @@ -56,14 +56,14 @@ where ]) } )] -impl ReduceTo> for IndependentSet +impl ReduceTo> for MaximumIndependentSet where W: Clone + Default + PartialOrd + Num + Zero + AddAssign + From + 'static, { type Result = ReductionISToVC; fn reduce_to(&self) -> Self::Result { - let target = VertexCovering::with_weights( + let target = MinimumVertexCover::with_weights( self.num_vertices(), self.edges(), self.weights_ref().clone(), @@ -75,10 +75,10 @@ where } } -/// Result of reducing VertexCovering to IndependentSet. +/// Result of reducing MinimumVertexCover to MaximumIndependentSet. #[derive(Debug, Clone)] pub struct ReductionVCToIS { - target: IndependentSet, + target: MaximumIndependentSet, source_size: ProblemSize, } @@ -86,8 +86,8 @@ impl ReductionResult for ReductionVCToIS where W: Clone + Default + PartialOrd + Num + Zero + AddAssign + 'static, { - type Source = VertexCovering; - type Target = IndependentSet; + type Source = MinimumVertexCover; + type Target = MaximumIndependentSet; fn target_problem(&self) -> &Self::Target { &self.target @@ -117,14 +117,14 @@ where ]) } )] -impl ReduceTo> for VertexCovering +impl ReduceTo> for MinimumVertexCover where W: Clone + Default + PartialOrd + Num + Zero + AddAssign + From + 'static, { type Result = ReductionVCToIS; fn reduce_to(&self) -> Self::Result { - let target = IndependentSet::with_weights( + let target = MaximumIndependentSet::with_weights( self.num_vertices(), self.edges(), self.weights_ref().clone(), @@ -137,5 +137,5 @@ where } #[cfg(test)] -#[path = "../unit_tests/rules/vertexcovering_independentset.rs"] +#[path = "../unit_tests/rules/minimumvertexcover_maximumindependentset.rs"] mod tests; diff --git a/src/rules/vertexcovering_setcovering.rs b/src/rules/minimumvertexcover_minimumsetcovering.rs similarity index 79% rename from src/rules/vertexcovering_setcovering.rs rename to src/rules/minimumvertexcover_minimumsetcovering.rs index b581d54e1..b799e512a 100644 --- a/src/rules/vertexcovering_setcovering.rs +++ b/src/rules/minimumvertexcover_minimumsetcovering.rs @@ -1,11 +1,11 @@ -//! Reduction from VertexCovering to SetCovering. +//! Reduction from MinimumVertexCover to MinimumSetCovering. //! //! Each vertex becomes a set containing the edges it covers. //! The universe is the set of all edges (labeled 0 to num_edges-1). -use crate::models::graph::VertexCovering; +use crate::models::graph::MinimumVertexCover; use crate::topology::SimpleGraph; -use crate::models::set::SetCovering; +use crate::models::set::MinimumSetCovering; use crate::poly; use crate::reduction; use crate::rules::registry::ReductionOverhead; @@ -15,10 +15,10 @@ use crate::types::ProblemSize; use num_traits::{Num, Zero}; use std::ops::AddAssign; -/// Result of reducing VertexCovering to SetCovering. +/// Result of reducing MinimumVertexCover to MinimumSetCovering. #[derive(Debug, Clone)] pub struct ReductionVCToSC { - target: SetCovering, + target: MinimumSetCovering, source_size: ProblemSize, } @@ -26,8 +26,8 @@ impl ReductionResult for ReductionVCToSC where W: Clone + Default + PartialOrd + Num + Zero + AddAssign + 'static, { - type Source = VertexCovering; - type Target = SetCovering; + type Source = MinimumVertexCover; + type Target = MinimumSetCovering; fn target_problem(&self) -> &Self::Target { &self.target @@ -57,7 +57,7 @@ where ]) } )] -impl ReduceTo> for VertexCovering +impl ReduceTo> for MinimumVertexCover where W: Clone + Default + PartialOrd + Num + Zero + AddAssign + From + 'static, { @@ -81,7 +81,7 @@ where }) .collect(); - let target = SetCovering::with_weights(num_edges, sets, self.weights_ref().clone()); + let target = MinimumSetCovering::with_weights(num_edges, sets, self.weights_ref().clone()); ReductionVCToSC { target, @@ -91,5 +91,5 @@ where } #[cfg(test)] -#[path = "../unit_tests/rules/vertexcovering_setcovering.rs"] +#[path = "../unit_tests/rules/minimumvertexcover_minimumsetcovering.rs"] mod tests; diff --git a/src/rules/vertexcovering_qubo.rs b/src/rules/minimumvertexcover_qubo.rs similarity index 87% rename from src/rules/vertexcovering_qubo.rs rename to src/rules/minimumvertexcover_qubo.rs index 588246804..0cb9b1fb4 100644 --- a/src/rules/vertexcovering_qubo.rs +++ b/src/rules/minimumvertexcover_qubo.rs @@ -1,4 +1,4 @@ -//! Reduction from VertexCovering to QUBO. +//! Reduction from MinimumVertexCover to QUBO. //! //! Minimize Σ w_i·x_i s.t. x_i + x_j ≥ 1 for (i,j) ∈ E //! = Minimize Σ w_i·x_i + P·Σ_{(i,j)∈E} (1-x_i)(1-x_j) @@ -6,7 +6,7 @@ //! Expanding: Q[i][i] = w_i - P·deg(i), Q[i][j] = P for edges. //! P = 1 + Σ w_i. -use crate::models::graph::VertexCovering; +use crate::models::graph::MinimumVertexCover; use crate::models::optimization::QUBO; use crate::poly; use crate::reduction; @@ -16,7 +16,7 @@ use crate::topology::SimpleGraph; use crate::traits::Problem; use crate::types::ProblemSize; -/// Result of reducing VertexCovering to QUBO. +/// Result of reducing MinimumVertexCover to QUBO. #[derive(Debug, Clone)] pub struct ReductionVCToQUBO { target: QUBO, @@ -24,7 +24,7 @@ pub struct ReductionVCToQUBO { } impl ReductionResult for ReductionVCToQUBO { - type Source = VertexCovering; + type Source = MinimumVertexCover; type Target = QUBO; fn target_problem(&self) -> &Self::Target { @@ -48,7 +48,7 @@ impl ReductionResult for ReductionVCToQUBO { source_graph = "SimpleGraph", overhead = { ReductionOverhead::new(vec![("num_vars", poly!(num_vertices))]) } )] -impl ReduceTo> for VertexCovering { +impl ReduceTo> for MinimumVertexCover { type Result = ReductionVCToQUBO; fn reduce_to(&self) -> Self::Result { @@ -86,5 +86,5 @@ impl ReduceTo> for VertexCovering { } #[cfg(test)] -#[path = "../unit_tests/rules/vertexcovering_qubo.rs"] +#[path = "../unit_tests/rules/minimumvertexcover_qubo.rs"] mod tests; diff --git a/src/rules/mod.rs b/src/rules/mod.rs index 137a7c2e9..19662875d 100644 --- a/src/rules/mod.rs +++ b/src/rules/mod.rs @@ -12,44 +12,44 @@ mod circuit_spinglass; mod coloring_qubo; mod factoring_circuit; mod graph; -mod independentset_qubo; -mod independentset_setpacking; +mod maximumindependentset_qubo; +mod maximumindependentset_maximumsetpacking; mod ksatisfiability_qubo; -mod matching_setpacking; +mod maximummatching_maximumsetpacking; mod sat_coloring; -mod sat_dominatingset; -mod sat_independentset; +mod sat_minimumdominatingset; +mod sat_maximumindependentset; mod sat_ksat; -mod setpacking_qubo; +mod maximumsetpacking_qubo; mod spinglass_maxcut; mod spinglass_qubo; mod traits; -mod vertexcovering_independentset; -mod vertexcovering_qubo; -mod vertexcovering_setcovering; +mod minimumvertexcover_maximumindependentset; +mod minimumvertexcover_qubo; +mod minimumvertexcover_minimumsetcovering; pub mod unitdiskmapping; #[cfg(feature = "ilp")] -mod clique_ilp; +mod maximumclique_ilp; #[cfg(feature = "ilp")] mod ilp_qubo; #[cfg(feature = "ilp")] mod coloring_ilp; #[cfg(feature = "ilp")] -mod dominatingset_ilp; +mod minimumdominatingset_ilp; #[cfg(feature = "ilp")] mod factoring_ilp; #[cfg(feature = "ilp")] -mod independentset_ilp; +mod maximumindependentset_ilp; #[cfg(feature = "ilp")] -mod matching_ilp; +mod maximummatching_ilp; #[cfg(feature = "ilp")] -mod setcovering_ilp; +mod minimumsetcovering_ilp; #[cfg(feature = "ilp")] -mod setpacking_ilp; +mod maximumsetpacking_ilp; #[cfg(feature = "ilp")] -mod vertexcovering_ilp; +mod minimumvertexcover_ilp; pub use circuit_spinglass::{ and_gadget, not_gadget, or_gadget, set0_gadget, set1_gadget, xor_gadget, LogicGadget, @@ -60,39 +60,39 @@ pub use graph::{ EdgeJson, NodeJson, ReductionEdge, ReductionGraph, ReductionGraphJson, ReductionPath, }; pub use coloring_qubo::ReductionKColoringToQUBO; -pub use independentset_qubo::ReductionISToQUBO; -pub use independentset_setpacking::{ReductionISToSP, ReductionSPToIS}; +pub use maximumindependentset_qubo::ReductionISToQUBO; +pub use maximumindependentset_maximumsetpacking::{ReductionISToSP, ReductionSPToIS}; pub use ksatisfiability_qubo::ReductionKSatToQUBO; -pub use matching_setpacking::ReductionMatchingToSP; +pub use maximummatching_maximumsetpacking::ReductionMatchingToSP; pub use sat_coloring::ReductionSATToColoring; -pub use setpacking_qubo::ReductionSPToQUBO; -pub use sat_dominatingset::ReductionSATToDS; -pub use sat_independentset::{BoolVar, ReductionSATToIS}; +pub use maximumsetpacking_qubo::ReductionSPToQUBO; +pub use sat_minimumdominatingset::ReductionSATToDS; +pub use sat_maximumindependentset::{BoolVar, ReductionSATToIS}; pub use sat_ksat::{ReductionKSATToSAT, ReductionSATToKSAT}; pub use spinglass_maxcut::{ReductionMaxCutToSG, ReductionSGToMaxCut}; pub use spinglass_qubo::{ReductionQUBOToSG, ReductionSGToQUBO}; pub use traits::{ReduceTo, ReductionResult}; -pub use vertexcovering_independentset::{ReductionISToVC, ReductionVCToIS}; -pub use vertexcovering_qubo::ReductionVCToQUBO; -pub use vertexcovering_setcovering::ReductionVCToSC; +pub use minimumvertexcover_maximumindependentset::{ReductionISToVC, ReductionVCToIS}; +pub use minimumvertexcover_qubo::ReductionVCToQUBO; +pub use minimumvertexcover_minimumsetcovering::ReductionVCToSC; #[cfg(feature = "ilp")] -pub use clique_ilp::ReductionCliqueToILP; +pub use maximumclique_ilp::ReductionCliqueToILP; #[cfg(feature = "ilp")] pub use coloring_ilp::{ReductionColoringToILP, ReductionKColoringToILP}; #[cfg(feature = "ilp")] -pub use dominatingset_ilp::ReductionDSToILP; +pub use minimumdominatingset_ilp::ReductionDSToILP; #[cfg(feature = "ilp")] pub use ilp_qubo::ReductionILPToQUBO; #[cfg(feature = "ilp")] pub use factoring_ilp::ReductionFactoringToILP; #[cfg(feature = "ilp")] -pub use independentset_ilp::ReductionISToILP; +pub use maximumindependentset_ilp::ReductionISToILP; #[cfg(feature = "ilp")] -pub use matching_ilp::ReductionMatchingToILP; +pub use maximummatching_ilp::ReductionMatchingToILP; #[cfg(feature = "ilp")] -pub use setcovering_ilp::ReductionSCToILP; +pub use minimumsetcovering_ilp::ReductionSCToILP; #[cfg(feature = "ilp")] -pub use setpacking_ilp::ReductionSPToILP; +pub use maximumsetpacking_ilp::ReductionSPToILP; #[cfg(feature = "ilp")] -pub use vertexcovering_ilp::ReductionVCToILP; +pub use minimumvertexcover_ilp::ReductionVCToILP; diff --git a/src/rules/registry.rs b/src/rules/registry.rs index 8e10debcf..5f1bfcc2a 100644 --- a/src/rules/registry.rs +++ b/src/rules/registry.rs @@ -35,9 +35,9 @@ impl ReductionOverhead { /// A registered reduction entry for static inventory registration. /// Uses function pointer to lazily create the overhead (avoids static allocation issues). pub struct ReductionEntry { - /// Base name of source problem (e.g., "IndependentSet"). + /// Base name of source problem (e.g., "MaximumIndependentSet"). pub source_name: &'static str, - /// Base name of target problem (e.g., "VertexCovering"). + /// Base name of target problem (e.g., "MinimumVertexCover"). pub target_name: &'static str, /// Variant attributes for source problem as key-value pairs. /// Common keys: "graph" (graph type), "weight" (weight type). diff --git a/src/rules/sat_coloring.rs b/src/rules/sat_coloring.rs index 9865f0189..dd83b6a93 100644 --- a/src/rules/sat_coloring.rs +++ b/src/rules/sat_coloring.rs @@ -14,7 +14,7 @@ use crate::models::satisfiability::Satisfiability; use crate::poly; use crate::reduction; use crate::rules::registry::ReductionOverhead; -use crate::rules::sat_independentset::BoolVar; +use crate::rules::sat_maximumindependentset::BoolVar; use crate::rules::traits::{ReduceTo, ReductionResult}; use crate::traits::Problem; use crate::types::ProblemSize; diff --git a/src/rules/sat_independentset.rs b/src/rules/sat_maximumindependentset.rs similarity index 89% rename from src/rules/sat_independentset.rs rename to src/rules/sat_maximumindependentset.rs index 30dee06c6..c9c7ef91a 100644 --- a/src/rules/sat_independentset.rs +++ b/src/rules/sat_maximumindependentset.rs @@ -1,4 +1,4 @@ -//! Reduction from Satisfiability (SAT) to IndependentSet. +//! Reduction from Satisfiability (SAT) to MaximumIndependentSet. //! //! The reduction creates one vertex for each literal occurrence in each clause. //! Edges are added: @@ -8,7 +8,7 @@ //! A satisfying assignment corresponds to an independent set of size = num_clauses, //! where we pick exactly one literal from each clause. -use crate::models::graph::IndependentSet; +use crate::models::graph::MaximumIndependentSet; use crate::topology::SimpleGraph; use crate::models::satisfiability::Satisfiability; use crate::poly; @@ -49,17 +49,17 @@ impl BoolVar { } } -/// Result of reducing Satisfiability to IndependentSet. +/// Result of reducing Satisfiability to MaximumIndependentSet. /// /// This struct contains: -/// - The target IndependentSet problem +/// - The target MaximumIndependentSet problem /// - A mapping from vertex indices to literals /// - The list of source variable indices /// - The number of clauses in the original SAT problem #[derive(Debug, Clone)] pub struct ReductionSATToIS { - /// The target IndependentSet problem. - target: IndependentSet, + /// The target MaximumIndependentSet problem. + target: MaximumIndependentSet, /// Mapping from vertex index to the literal it represents. literals: Vec, /// The number of variables in the source SAT problem. @@ -75,13 +75,13 @@ where W: Clone + Default + PartialOrd + Num + Zero + AddAssign + 'static, { type Source = Satisfiability; - type Target = IndependentSet; + type Target = MaximumIndependentSet; fn target_problem(&self) -> &Self::Target { &self.target } - /// Extract a SAT solution from an IndependentSet solution. + /// Extract a SAT solution from an MaximumIndependentSet solution. /// /// For each selected vertex (representing a literal), we set the corresponding /// variable to make that literal true. Variables not covered by any selected @@ -135,7 +135,7 @@ impl ReductionSATToIS { ]) } )] -impl ReduceTo> for Satisfiability +impl ReduceTo> for Satisfiability where W: Clone + Default + PartialOrd + Num + Zero + AddAssign + From + 'static, { @@ -176,7 +176,7 @@ where } } - let target = IndependentSet::new(vertex_count, edges); + let target = MaximumIndependentSet::new(vertex_count, edges); ReductionSATToIS { target, @@ -189,5 +189,5 @@ where } #[cfg(test)] -#[path = "../unit_tests/rules/sat_independentset.rs"] +#[path = "../unit_tests/rules/sat_maximumindependentset.rs"] mod tests; diff --git a/src/rules/sat_dominatingset.rs b/src/rules/sat_minimumdominatingset.rs similarity index 90% rename from src/rules/sat_dominatingset.rs rename to src/rules/sat_minimumdominatingset.rs index d8d5445d1..a39b974b6 100644 --- a/src/rules/sat_dominatingset.rs +++ b/src/rules/sat_minimumdominatingset.rs @@ -1,4 +1,4 @@ -//! Reduction from Satisfiability (SAT) to DominatingSet. +//! Reduction from Satisfiability (SAT) to MinimumDominatingSet. //! //! The reduction follows this construction: //! 1. For each variable x_i, create a "variable gadget" with 3 vertices: @@ -14,29 +14,29 @@ //! - Selecting the negative literal vertex means the variable is false //! - Selecting the dummy vertex means the variable can be either (unused in any clause) -use crate::models::graph::DominatingSet; +use crate::models::graph::MinimumDominatingSet; use crate::models::satisfiability::Satisfiability; use crate::topology::SimpleGraph; use crate::poly; use crate::reduction; use crate::rules::registry::ReductionOverhead; -use crate::rules::sat_independentset::BoolVar; +use crate::rules::sat_maximumindependentset::BoolVar; use crate::rules::traits::{ReduceTo, ReductionResult}; use crate::traits::Problem; use crate::types::ProblemSize; use num_traits::{Num, Zero}; use std::ops::AddAssign; -/// Result of reducing Satisfiability to DominatingSet. +/// Result of reducing Satisfiability to MinimumDominatingSet. /// /// This struct contains: -/// - The target DominatingSet problem +/// - The target MinimumDominatingSet problem /// - The number of literals (variables) in the source SAT problem /// - The number of clauses in the source SAT problem #[derive(Debug, Clone)] pub struct ReductionSATToDS { - /// The target DominatingSet problem. - target: DominatingSet, + /// The target MinimumDominatingSet problem. + target: MinimumDominatingSet, /// The number of variables in the source SAT problem. num_literals: usize, /// The number of clauses in the source SAT problem. @@ -50,13 +50,13 @@ where W: Clone + Default + PartialOrd + Num + Zero + AddAssign + 'static, { type Source = Satisfiability; - type Target = DominatingSet; + type Target = MinimumDominatingSet; fn target_problem(&self) -> &Self::Target { &self.target } - /// Extract a SAT solution from a DominatingSet solution. + /// Extract a SAT solution from a MinimumDominatingSet solution. /// /// The dominating set solution encodes variable assignments: /// - For each variable x_i (0-indexed), vertices are at positions 3*i, 3*i+1, 3*i+2 @@ -139,7 +139,7 @@ impl ReductionSATToDS { ]) } )] -impl ReduceTo> for Satisfiability +impl ReduceTo> for Satisfiability where W: Clone + Default + PartialOrd + Num + Zero + AddAssign + From + 'static, { @@ -187,7 +187,7 @@ where } } - let target = DominatingSet::new(num_vertices, edges); + let target = MinimumDominatingSet::new(num_vertices, edges); ReductionSATToDS { target, @@ -199,5 +199,5 @@ where } #[cfg(test)] -#[path = "../unit_tests/rules/sat_dominatingset.rs"] +#[path = "../unit_tests/rules/sat_minimumdominatingset.rs"] mod tests; diff --git a/src/rules/unitdiskmapping/alpha_tensor.rs b/src/rules/unitdiskmapping/alpha_tensor.rs index 81e7cc058..3f1810c4c 100644 --- a/src/rules/unitdiskmapping/alpha_tensor.rs +++ b/src/rules/unitdiskmapping/alpha_tensor.rs @@ -205,7 +205,7 @@ fn worse_than(bs_a: usize, bs_b: usize, val_a: i32, val_b: i32) -> bool { /// Check if two tensors differ by a constant. /// -/// Returns (is_equivalent, difference) where difference = t1[i] - t2[i] for valid entries. +/// Returns (is_equivalent, difference) where difference = `t1[i] - t2[i]` for valid entries. /// Invalid entries (i32::MIN) in both tensors are skipped. /// If one is valid and other is invalid, returns false. pub fn is_diff_by_const(t1: &[i32], t2: &[i32]) -> (bool, i32) { diff --git a/src/rules/unitdiskmapping/ksg/mapping.rs b/src/rules/unitdiskmapping/ksg/mapping.rs index 3ab4d6e87..4c05d386f 100644 --- a/src/rules/unitdiskmapping/ksg/mapping.rs +++ b/src/rules/unitdiskmapping/ksg/mapping.rs @@ -445,15 +445,14 @@ pub fn embed_graph( // Unweighted Mapping Functions // ============================================================================ -/// Map a graph to a KSG grid graph using optimal path decomposition (MinhThiTrick). +/// Map a graph to a KSG grid graph using automatic path decomposition. /// -/// This uses the branch-and-bound algorithm to find the optimal vertex ordering -/// that minimizes the grid size. +/// Uses exact branch-and-bound for small graphs (≤30 vertices) and greedy for larger. pub fn map_unweighted( num_vertices: usize, edges: &[(usize, usize)], ) -> MappingResult { - map_unweighted_with_method(num_vertices, edges, PathDecompositionMethod::MinhThiTrick) + map_unweighted_with_method(num_vertices, edges, PathDecompositionMethod::Auto) } /// Map a graph using a specific path decomposition method (unweighted). @@ -544,7 +543,7 @@ pub fn map_weighted( num_vertices: usize, edges: &[(usize, usize)], ) -> MappingResult { - map_weighted_with_method(num_vertices, edges, PathDecompositionMethod::MinhThiTrick) + map_weighted_with_method(num_vertices, edges, PathDecompositionMethod::Auto) } /// Map a graph using a specific path decomposition method (weighted). diff --git a/src/rules/unitdiskmapping/pathdecomposition.rs b/src/rules/unitdiskmapping/pathdecomposition.rs index 11afda721..16aa1c513 100644 --- a/src/rules/unitdiskmapping/pathdecomposition.rs +++ b/src/rules/unitdiskmapping/pathdecomposition.rs @@ -4,7 +4,8 @@ //! which are used to determine optimal vertex orderings for the copy-line embedding. //! The pathwidth of a graph determines the grid height needed for the embedding. //! -//! Two methods are provided: +//! Three methods are provided: +//! - `Auto` (default): Exact for ≤30 vertices, greedy for larger //! - `Greedy`: Fast heuristic with random restarts //! - `MinhThiTrick`: Branch-and-bound algorithm for optimal pathwidth //! @@ -402,8 +403,10 @@ pub enum PathDecompositionMethod { }, /// Branch and bound method for optimal pathwidth. /// Named in memory of Minh-Thi Nguyen, one of the main developers. - #[default] MinhThiTrick, + /// Automatically select method: exact for small graphs (≤30 vertices), greedy for larger. + #[default] + Auto, } impl PathDecompositionMethod { @@ -442,6 +445,16 @@ pub fn pathwidth( edges: &[(usize, usize)], method: PathDecompositionMethod, ) -> Layout { + let method = match method { + PathDecompositionMethod::Auto => { + if num_vertices > 30 { + PathDecompositionMethod::greedy() + } else { + PathDecompositionMethod::MinhThiTrick + } + } + other => other, + }; match method { PathDecompositionMethod::Greedy { nrepeat } => { let mut best: Option = None; @@ -454,6 +467,7 @@ pub fn pathwidth( best.unwrap_or_else(|| Layout::empty(num_vertices)) } PathDecompositionMethod::MinhThiTrick => branch_and_bound(num_vertices, edges), + PathDecompositionMethod::Auto => unreachable!(), } } diff --git a/src/rules/unitdiskmapping/triangular/gadgets.rs b/src/rules/unitdiskmapping/triangular/gadgets.rs index 456a8cb7d..ecc58945b 100644 --- a/src/rules/unitdiskmapping/triangular/gadgets.rs +++ b/src/rules/unitdiskmapping/triangular/gadgets.rs @@ -915,7 +915,7 @@ impl WeightedTriangularGadget for WeightedTriBranchFixB { } // ============================================================================ -// Pattern Matching and Application Functions +// Pattern MaximumMatching and Application Functions // ============================================================================ /// Check if a weighted triangular gadget pattern matches at position (i, j) in the grid. diff --git a/src/rules/unitdiskmapping/triangular/mapping.rs b/src/rules/unitdiskmapping/triangular/mapping.rs index a705ad8dc..3f4215992 100644 --- a/src/rules/unitdiskmapping/triangular/mapping.rs +++ b/src/rules/unitdiskmapping/triangular/mapping.rs @@ -52,8 +52,8 @@ fn crossat( /// Map a graph to a weighted triangular lattice grid graph using optimal path decomposition. /// -/// This is the main entry point for triangular lattice mapping. It uses the -/// MinhThiTrick path decomposition method by default. +/// This is the main entry point for triangular lattice mapping. It uses +/// automatic path decomposition (exact for ≤30 vertices, greedy for larger). /// /// # Arguments /// * `num_vertices` - Number of vertices in the original graph @@ -75,7 +75,7 @@ fn crossat( /// assert!(result.grid_graph.num_vertices() > 0); /// ``` pub fn map_weighted(num_vertices: usize, edges: &[(usize, usize)]) -> MappingResult { - map_weighted_with_method(num_vertices, edges, PathDecompositionMethod::MinhThiTrick) + map_weighted_with_method(num_vertices, edges, PathDecompositionMethod::Auto) } /// Map a graph to weighted triangular lattice using a specific path decomposition method. diff --git a/src/rules/unitdiskmapping/triangular/mod.rs b/src/rules/unitdiskmapping/triangular/mod.rs index db5d4efaa..b126f3968 100644 --- a/src/rules/unitdiskmapping/triangular/mod.rs +++ b/src/rules/unitdiskmapping/triangular/mod.rs @@ -1473,7 +1473,7 @@ fn try_apply_dangling_leg_left(grid: &mut MappingGrid, i: usize, j: usize) -> bo /// # Panics /// Panics if `num_vertices == 0`. pub fn map_graph_triangular(num_vertices: usize, edges: &[(usize, usize)]) -> MappingResult { - map_graph_triangular_with_method(num_vertices, edges, PathDecompositionMethod::MinhThiTrick) + map_graph_triangular_with_method(num_vertices, edges, PathDecompositionMethod::Auto) } /// Map a graph to triangular lattice using a specific path decomposition method. diff --git a/src/solvers/ilp/solver.rs b/src/solvers/ilp/solver.rs index 10769907d..1b388960b 100644 --- a/src/solvers/ilp/solver.rs +++ b/src/solvers/ilp/solver.rs @@ -148,7 +148,7 @@ impl ILPSolver { /// use problemreductions::topology::SimpleGraph; /// /// // Create a problem that reduces to ILP (e.g., Independent Set) - /// let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2)]); + /// let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); /// /// // Solve using ILP solver /// let solver = ILPSolver::new(); diff --git a/src/testing/macros.rs b/src/testing/macros.rs index 7a75266e0..eed58b569 100644 --- a/src/testing/macros.rs +++ b/src/testing/macros.rs @@ -14,12 +14,12 @@ /// ```text /// // Macro usage example - users customize for their tests /// use problemreductions::graph_problem_tests; -/// use problemreductions::models::graph::IndependentSet; +/// use problemreductions::models::graph::MaximumIndependentSet; /// use problemreductions::topology::SimpleGraph; /// /// graph_problem_tests! { -/// problem_type: IndependentSet, -/// constraint_type: IndependentSet, +/// problem_type: MaximumIndependentSet, +/// constraint_type: MaximumIndependentSet, /// test_cases: [ /// // (name, num_vertices, edges, valid_solution, expected_size, is_maximization) /// (triangle, 3, [(0, 1), (1, 2), (0, 2)], [1, 0, 0], 1, true), @@ -142,12 +142,12 @@ macro_rules! graph_problem_tests { /// ```text /// // Macro usage example - users customize for their tests /// use problemreductions::complement_test; -/// use problemreductions::prelude::{IndependentSet, VertexCovering}; +/// use problemreductions::prelude::{MaximumIndependentSet, MinimumVertexCover}; /// /// complement_test! { /// name: is_vc_complement, -/// problem_a: IndependentSet, -/// problem_b: VertexCovering, +/// problem_a: MaximumIndependentSet, +/// problem_b: MinimumVertexCover, /// test_graphs: [ /// (3, [(0, 1), (1, 2)]), /// (4, [(0, 1), (1, 2), (2, 3), (0, 3)]), @@ -213,10 +213,10 @@ macro_rules! complement_test { /// ```text /// // Macro usage example - users customize for their tests /// use problemreductions::quick_problem_test; -/// use problemreductions::prelude::IndependentSet; +/// use problemreductions::prelude::MaximumIndependentSet; /// /// quick_problem_test!( -/// IndependentSet, +/// MaximumIndependentSet, /// new(3, vec![(0, 1), (1, 2)]), /// solution: [1, 0, 1], /// expected_size: 2, diff --git a/src/testing/mod.rs b/src/testing/mod.rs index 43f8ea515..8bcbe5d67 100644 --- a/src/testing/mod.rs +++ b/src/testing/mod.rs @@ -13,12 +13,12 @@ //! ```text //! // Macro usage example - users customize for their tests //! use problemreductions::graph_problem_tests; -//! use problemreductions::models::graph::IndependentSet; +//! use problemreductions::models::graph::MaximumIndependentSet; //! use problemreductions::topology::SimpleGraph; //! //! graph_problem_tests! { -//! problem_type: IndependentSet, -//! constraint_type: IndependentSet, +//! problem_type: MaximumIndependentSet, +//! constraint_type: MaximumIndependentSet, //! test_cases: [ //! // (name, num_vertices, edges, valid_solution, expected_size, is_maximization) //! (triangle, 3, [(0, 1), (1, 2), (0, 2)], [1, 0, 0], 1, true), @@ -41,13 +41,13 @@ //! ```text //! // Macro usage example - users customize for their tests //! use problemreductions::complement_test; -//! use problemreductions::models::graph::{IndependentSet, VertexCovering}; +//! use problemreductions::models::graph::{MaximumIndependentSet, MinimumVertexCover}; //! use problemreductions::topology::SimpleGraph; //! //! complement_test! { //! name: test_is_vc_complement, -//! problem_a: IndependentSet, -//! problem_b: VertexCovering, +//! problem_a: MaximumIndependentSet, +//! problem_b: MinimumVertexCover, //! test_graphs: [ //! (3, [(0, 1), (1, 2)]), //! (4, [(0, 1), (1, 2), (2, 3)]), @@ -62,10 +62,10 @@ //! ```text //! // Macro usage example - users customize for their tests //! use problemreductions::quick_problem_test; -//! use problemreductions::prelude::IndependentSet; +//! use problemreductions::prelude::MaximumIndependentSet; //! //! quick_problem_test!( -//! IndependentSet, +//! MaximumIndependentSet, //! new(3, vec![(0, 1)]), //! solution: [0, 0, 1], //! expected_size: 1, diff --git a/src/topology/mod.rs b/src/topology/mod.rs index aa46efcd5..198e9ca0a 100644 --- a/src/topology/mod.rs +++ b/src/topology/mod.rs @@ -12,10 +12,10 @@ //! //! ``` //! use problemreductions::topology::{Graph, SimpleGraph, UnitDiskGraph}; -//! use problemreductions::models::graph::IndependentSet; +//! use problemreductions::models::graph::MaximumIndependentSet; //! //! // Problems work with any graph type - SimpleGraph by default -//! let simple_graph_problem: IndependentSet = IndependentSet::new(3, vec![(0, 1)]); +//! let simple_graph_problem: MaximumIndependentSet = MaximumIndependentSet::new(3, vec![(0, 1)]); //! assert_eq!(simple_graph_problem.num_vertices(), 3); //! //! // Different graph topologies enable different reduction algorithms diff --git a/src/traits.rs b/src/traits.rs index f27535405..7d6a8a3d5 100644 --- a/src/traits.rs +++ b/src/traits.rs @@ -9,7 +9,7 @@ use std::ops::AddAssign; /// This trait defines the interface for computational problems that can be /// solved by enumeration or reduction to other problems. pub trait Problem: Clone { - /// Base name of this problem type (e.g., "IndependentSet"). + /// Base name of this problem type (e.g., "MaximumIndependentSet"). const NAME: &'static str; /// Returns attributes describing this problem variant. diff --git a/src/types.rs b/src/types.rs index daeb003f2..f0df42f64 100644 --- a/src/types.rs +++ b/src/types.rs @@ -6,7 +6,7 @@ use std::fmt; /// Marker trait for numeric weight types. /// /// Weight subsumption uses Rust's `From` trait: -/// - `i32 → f64` is valid (From for f64 exists) +/// - `i32 → f64` is valid (`From` for f64 exists) /// - `f64 → i32` is invalid (no lossless conversion) pub trait NumericWeight: Clone + Default + PartialOrd + num_traits::Num + num_traits::Zero + std::ops::AddAssign + 'static diff --git a/src/unit_tests/export.rs b/src/unit_tests/export.rs index e74814e3c..1ab376bb8 100644 --- a/src/unit_tests/export.rs +++ b/src/unit_tests/export.rs @@ -92,7 +92,7 @@ fn test_variant_to_map_multiple() { #[test] fn test_lookup_overhead_known_reduction() { // IS -> VC is a known registered reduction - let result = lookup_overhead("IndependentSet", "VertexCovering"); + let result = lookup_overhead("MaximumIndependentSet", "MinimumVertexCover"); assert!(result.is_some()); } @@ -104,7 +104,7 @@ fn test_lookup_overhead_unknown_reduction() { #[test] fn test_lookup_overhead_or_empty_known() { - let overhead = lookup_overhead_or_empty("IndependentSet", "VertexCovering"); + let overhead = lookup_overhead_or_empty("MaximumIndependentSet", "MinimumVertexCover"); assert!(!overhead.output_size.is_empty()); } @@ -162,12 +162,12 @@ fn test_write_example_creates_files() { #[test] fn test_problem_side_serialization() { let side = ProblemSide { - problem: "IndependentSet".to_string(), + problem: "MaximumIndependentSet".to_string(), variant: variant_to_map(vec![("graph", "SimpleGraph"), ("weight", "i32")]), instance: serde_json::json!({"num_vertices": 4, "edges": [[0, 1], [1, 2]]}), }; let json = serde_json::to_value(&side).unwrap(); - assert_eq!(json["problem"], "IndependentSet"); + assert_eq!(json["problem"], "MaximumIndependentSet"); assert!(json["variant"]["graph"] == "SimpleGraph"); assert!(json["instance"]["num_vertices"] == 4); } diff --git a/src/unit_tests/graph_models.rs b/src/unit_tests/graph_models.rs index 7e11ef03e..9ccf5f058 100644 --- a/src/unit_tests/graph_models.rs +++ b/src/unit_tests/graph_models.rs @@ -4,8 +4,8 @@ //! and clearer separation of concerns. use crate::models::graph::{ - is_independent_set, is_valid_coloring, is_vertex_cover, IndependentSet, KColoring, - VertexCovering, + is_independent_set, is_valid_coloring, is_vertex_cover, MaximumIndependentSet, KColoring, + MinimumVertexCover, }; use crate::prelude::*; use crate::topology::SimpleGraph; @@ -14,12 +14,12 @@ use crate::topology::SimpleGraph; // Independent Set Tests // ============================================================================= -mod independent_set { +mod maximum_independent_set { use super::*; #[test] fn test_creation() { - let problem = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); assert_eq!(problem.num_vertices(), 4); assert_eq!(problem.num_edges(), 3); assert_eq!(problem.num_variables(), 4); @@ -28,20 +28,20 @@ mod independent_set { #[test] fn test_with_weights() { - let problem = IndependentSet::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); + let problem = MaximumIndependentSet::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); assert_eq!(problem.weights(), vec![1, 2, 3]); assert!(problem.is_weighted()); } #[test] fn test_unweighted() { - let problem = IndependentSet::::new(3, vec![(0, 1)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1)]); assert!(!problem.is_weighted()); } #[test] fn test_has_edge() { - let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); assert!(problem.has_edge(0, 1)); assert!(problem.has_edge(1, 0)); // Undirected assert!(problem.has_edge(1, 2)); @@ -50,7 +50,7 @@ mod independent_set { #[test] fn test_solution_size_valid() { - let problem = IndependentSet::::new(4, vec![(0, 1), (2, 3)]); + let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (2, 3)]); // Valid: select 0 and 2 (not adjacent) let sol = problem.solution_size(&[1, 0, 1, 0]); @@ -65,7 +65,7 @@ mod independent_set { #[test] fn test_solution_size_invalid() { - let problem = IndependentSet::::new(4, vec![(0, 1), (2, 3)]); + let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (2, 3)]); // Invalid: 0 and 1 are adjacent let sol = problem.solution_size(&[1, 1, 0, 0]); @@ -79,7 +79,7 @@ mod independent_set { #[test] fn test_solution_size_empty() { - let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); let sol = problem.solution_size(&[0, 0, 0]); assert!(sol.is_valid); assert_eq!(sol.size, 0); @@ -87,7 +87,7 @@ mod independent_set { #[test] fn test_weighted_solution() { - let problem = IndependentSet::with_weights(3, vec![(0, 1)], vec![10, 20, 30]); + let problem = MaximumIndependentSet::with_weights(3, vec![(0, 1)], vec![10, 20, 30]); // Select vertex 2 (weight 30) let sol = problem.solution_size(&[0, 0, 1]); @@ -102,14 +102,14 @@ mod independent_set { #[test] fn test_constraints() { - let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); let constraints = problem.constraints(); assert_eq!(constraints.len(), 2); // One per edge } #[test] fn test_objectives() { - let problem = IndependentSet::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + let problem = MaximumIndependentSet::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); let objectives = problem.objectives(); assert_eq!(objectives.len(), 3); // One per vertex } @@ -117,7 +117,7 @@ mod independent_set { #[test] fn test_brute_force_triangle() { // Triangle graph: maximum IS has size 1 - let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -131,7 +131,7 @@ mod independent_set { #[test] fn test_brute_force_path() { // Path graph 0-1-2-3: maximum IS = {0,2} or {1,3} or {0,3} - let problem = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -148,7 +148,7 @@ mod independent_set { #[test] fn test_brute_force_weighted() { // Graph with weights: vertex 1 has high weight but is connected to both 0 and 2 - let problem = IndependentSet::with_weights(3, vec![(0, 1), (1, 2)], vec![1, 100, 1]); + let problem = MaximumIndependentSet::with_weights(3, vec![(0, 1), (1, 2)], vec![1, 100, 1]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -176,7 +176,7 @@ mod independent_set { #[test] fn test_problem_size() { - let problem = IndependentSet::::new(5, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumIndependentSet::::new(5, vec![(0, 1), (1, 2), (2, 3)]); let size = problem.problem_size(); assert_eq!(size.get("num_vertices"), Some(5)); assert_eq!(size.get("num_edges"), Some(3)); @@ -184,13 +184,13 @@ mod independent_set { #[test] fn test_energy_mode() { - let problem = IndependentSet::::new(3, vec![(0, 1)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1)]); assert!(problem.energy_mode().is_maximization()); } #[test] fn test_edges() { - let problem = IndependentSet::::new(4, vec![(0, 1), (2, 3)]); + let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (2, 3)]); let edges = problem.edges(); assert_eq!(edges.len(), 2); assert!(edges.contains(&(0, 1)) || edges.contains(&(1, 0))); @@ -199,14 +199,14 @@ mod independent_set { #[test] fn test_set_weights() { - let mut problem = IndependentSet::::new(3, vec![(0, 1)]); + let mut problem = MaximumIndependentSet::::new(3, vec![(0, 1)]); problem.set_weights(vec![5, 10, 15]); assert_eq!(problem.weights(), vec![5, 10, 15]); } #[test] fn test_empty_graph() { - let problem = IndependentSet::::new(3, vec![]); + let problem = MaximumIndependentSet::::new(3, vec![]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -217,7 +217,7 @@ mod independent_set { #[test] fn test_is_satisfied() { - let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); assert!(problem.is_satisfied(&[1, 0, 1])); // Valid IS assert!(problem.is_satisfied(&[0, 1, 0])); // Valid IS @@ -230,12 +230,12 @@ mod independent_set { // Vertex Covering Tests // ============================================================================= -mod vertex_covering { +mod minimum_vertex_cover { use super::*; #[test] fn test_creation() { - let problem = VertexCovering::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MinimumVertexCover::::new(4, vec![(0, 1), (1, 2), (2, 3)]); assert_eq!(problem.num_vertices(), 4); assert_eq!(problem.num_edges(), 3); assert_eq!(problem.num_variables(), 4); @@ -244,14 +244,14 @@ mod vertex_covering { #[test] fn test_with_weights() { - let problem = VertexCovering::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); + let problem = MinimumVertexCover::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); assert_eq!(problem.weights(), vec![1, 2, 3]); assert!(problem.is_weighted()); } #[test] fn test_solution_size_valid() { - let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); // Valid: select vertex 1 (covers both edges) let sol = problem.solution_size(&[0, 1, 0]); @@ -266,7 +266,7 @@ mod vertex_covering { #[test] fn test_solution_size_invalid() { - let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); // Invalid: no vertex selected let sol = problem.solution_size(&[0, 0, 0]); @@ -280,7 +280,7 @@ mod vertex_covering { #[test] fn test_brute_force_path() { // Path graph 0-1-2: minimum vertex cover is {1} - let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -291,7 +291,7 @@ mod vertex_covering { #[test] fn test_brute_force_triangle() { // Triangle: minimum vertex cover has size 2 - let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2), (0, 2)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -306,7 +306,7 @@ mod vertex_covering { #[test] fn test_brute_force_weighted() { // Weighted: prefer selecting low-weight vertices - let problem = VertexCovering::with_weights(3, vec![(0, 1), (1, 2)], vec![100, 1, 100]); + let problem = MinimumVertexCover::with_weights(3, vec![(0, 1), (1, 2)], vec![100, 1, 100]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -333,20 +333,20 @@ mod vertex_covering { #[test] fn test_constraints() { - let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); let constraints = problem.constraints(); assert_eq!(constraints.len(), 2); } #[test] fn test_energy_mode() { - let problem = VertexCovering::::new(3, vec![(0, 1)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1)]); assert!(problem.energy_mode().is_minimization()); } #[test] fn test_empty_graph() { - let problem = VertexCovering::::new(3, vec![]); + let problem = MinimumVertexCover::::new(3, vec![]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -357,7 +357,7 @@ mod vertex_covering { #[test] fn test_single_edge() { - let problem = VertexCovering::::new(2, vec![(0, 1)]); + let problem = MinimumVertexCover::::new(2, vec![(0, 1)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -367,7 +367,7 @@ mod vertex_covering { #[test] fn test_is_satisfied() { - let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); assert!(problem.is_satisfied(&[0, 1, 0])); // Valid cover assert!(problem.is_satisfied(&[1, 0, 1])); // Valid cover @@ -379,8 +379,8 @@ mod vertex_covering { fn test_complement_relationship() { // For a graph, if S is an independent set, then V\S is a vertex cover let edges = vec![(0, 1), (1, 2), (2, 3)]; - let is_problem = IndependentSet::::new(4, edges.clone()); - let vc_problem = VertexCovering::::new(4, edges); + let is_problem = MaximumIndependentSet::::new(4, edges.clone()); + let vc_problem = MinimumVertexCover::::new(4, edges); let solver = BruteForce::new(); @@ -394,14 +394,14 @@ mod vertex_covering { #[test] fn test_objectives() { - let problem = VertexCovering::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + let problem = MinimumVertexCover::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); let objectives = problem.objectives(); assert_eq!(objectives.len(), 3); } #[test] fn test_set_weights() { - let mut problem = VertexCovering::::new(3, vec![(0, 1)]); + let mut problem = MinimumVertexCover::::new(3, vec![(0, 1)]); assert!(!problem.is_weighted()); // Initially uniform problem.set_weights(vec![1, 2, 3]); assert!(problem.is_weighted()); @@ -410,7 +410,7 @@ mod vertex_covering { #[test] fn test_is_weighted_empty() { - let problem = VertexCovering::::new(0, vec![]); + let problem = MinimumVertexCover::::new(0, vec![]); assert!(!problem.is_weighted()); } diff --git a/src/unit_tests/io.rs b/src/unit_tests/io.rs index 64b155beb..5253530b4 100644 --- a/src/unit_tests/io.rs +++ b/src/unit_tests/io.rs @@ -1,12 +1,12 @@ use super::*; -use crate::models::graph::IndependentSet; +use crate::models::graph::MaximumIndependentSet; use crate::topology::SimpleGraph; use std::fs; use std::time::{SystemTime, UNIX_EPOCH}; #[test] fn test_to_json() { - let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); let json = to_json(&problem); assert!(json.is_ok()); let json = json.unwrap(); @@ -15,16 +15,16 @@ fn test_to_json() { #[test] fn test_from_json() { - let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); let json = to_json(&problem).unwrap(); - let restored: IndependentSet = from_json(&json).unwrap(); + let restored: MaximumIndependentSet = from_json(&json).unwrap(); assert_eq!(restored.num_vertices(), 3); assert_eq!(restored.num_edges(), 2); } #[test] fn test_json_compact() { - let problem = IndependentSet::::new(3, vec![(0, 1)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1)]); let compact = to_json_compact(&problem).unwrap(); let pretty = to_json(&problem).unwrap(); // Compact should be shorter @@ -33,7 +33,7 @@ fn test_json_compact() { #[test] fn test_file_roundtrip() { - let problem = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); let ts = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_nanos(); let path = std::env::temp_dir().join(format!("test_problem_{ts}.json")); let path = path.to_str().unwrap(); @@ -42,7 +42,7 @@ fn test_file_roundtrip() { write_problem(&problem, path, FileFormat::Json).unwrap(); // Read back - let restored: IndependentSet = read_problem(path, FileFormat::Json).unwrap(); + let restored: MaximumIndependentSet = read_problem(path, FileFormat::Json).unwrap(); assert_eq!(restored.num_vertices(), 4); assert_eq!(restored.num_edges(), 3); @@ -81,6 +81,6 @@ fn test_read_write_file() { #[test] fn test_invalid_json() { - let result: Result> = from_json("not valid json"); + let result: Result> = from_json("not valid json"); assert!(result.is_err()); } diff --git a/src/unit_tests/models/graph/clique.rs b/src/unit_tests/models/graph/maximum_clique.rs similarity index 74% rename from src/unit_tests/models/graph/clique.rs rename to src/unit_tests/models/graph/maximum_clique.rs index 1e2d8382f..eb8f98db2 100644 --- a/src/unit_tests/models/graph/clique.rs +++ b/src/unit_tests/models/graph/maximum_clique.rs @@ -3,7 +3,7 @@ use crate::solvers::{BruteForce, Solver}; #[test] fn test_clique_creation() { - let problem = Clique::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumClique::::new(4, vec![(0, 1), (1, 2), (2, 3)]); assert_eq!(problem.num_vertices(), 4); assert_eq!(problem.num_edges(), 3); assert_eq!(problem.num_variables(), 4); @@ -13,20 +13,20 @@ fn test_clique_creation() { #[test] fn test_clique_with_weights() { let problem = - Clique::::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); + MaximumClique::::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); assert_eq!(problem.weights(), vec![1, 2, 3]); assert!(problem.is_weighted()); } #[test] fn test_clique_unweighted() { - let problem = Clique::::new(3, vec![(0, 1)]); + let problem = MaximumClique::::new(3, vec![(0, 1)]); assert!(!problem.is_weighted()); } #[test] fn test_has_edge() { - let problem = Clique::::new(3, vec![(0, 1), (1, 2)]); + let problem = MaximumClique::::new(3, vec![(0, 1), (1, 2)]); assert!(problem.has_edge(0, 1)); assert!(problem.has_edge(1, 0)); // Undirected assert!(problem.has_edge(1, 2)); @@ -36,7 +36,7 @@ fn test_has_edge() { #[test] fn test_solution_size_valid() { // Complete graph K3 (triangle) - let problem = Clique::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let problem = MaximumClique::::new(3, vec![(0, 1), (1, 2), (0, 2)]); // Valid: all three form a clique let sol = problem.solution_size(&[1, 1, 1]); @@ -52,7 +52,7 @@ fn test_solution_size_valid() { #[test] fn test_solution_size_invalid() { // Path graph: 0-1-2 (no edge between 0 and 2) - let problem = Clique::::new(3, vec![(0, 1), (1, 2)]); + let problem = MaximumClique::::new(3, vec![(0, 1), (1, 2)]); // Invalid: 0 and 2 are not adjacent let sol = problem.solution_size(&[1, 0, 1]); @@ -66,7 +66,7 @@ fn test_solution_size_invalid() { #[test] fn test_solution_size_empty() { - let problem = Clique::::new(3, vec![(0, 1), (1, 2)]); + let problem = MaximumClique::::new(3, vec![(0, 1), (1, 2)]); let sol = problem.solution_size(&[0, 0, 0]); assert!(sol.is_valid); // Empty set is a valid clique assert_eq!(sol.size, 0); @@ -75,7 +75,7 @@ fn test_solution_size_empty() { #[test] fn test_weighted_solution() { let problem = - Clique::::with_weights(3, vec![(0, 1), (1, 2), (0, 2)], vec![10, 20, 30]); + MaximumClique::::with_weights(3, vec![(0, 1), (1, 2), (0, 2)], vec![10, 20, 30]); // Select vertex 2 (weight 30) let sol = problem.solution_size(&[0, 0, 1]); @@ -91,7 +91,7 @@ fn test_weighted_solution() { #[test] fn test_constraints() { // Path graph: 0-1-2 (non-edge between 0 and 2) - let problem = Clique::::new(3, vec![(0, 1), (1, 2)]); + let problem = MaximumClique::::new(3, vec![(0, 1), (1, 2)]); let constraints = problem.constraints(); assert_eq!(constraints.len(), 1); // One constraint for non-edge (0, 2) } @@ -99,7 +99,7 @@ fn test_constraints() { #[test] fn test_objectives() { let problem = - Clique::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + MaximumClique::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); let objectives = problem.objectives(); assert_eq!(objectives.len(), 3); // One per vertex } @@ -108,7 +108,7 @@ fn test_objectives() { fn test_brute_force_triangle() { // Triangle graph (K3): max clique is all 3 vertices let problem = - Clique::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + MaximumClique::::new(3, vec![(0, 1), (1, 2), (0, 2)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -119,7 +119,7 @@ fn test_brute_force_triangle() { #[test] fn test_brute_force_path() { // Path graph 0-1-2: max clique is any adjacent pair - let problem = Clique::::new(3, vec![(0, 1), (1, 2)]); + let problem = MaximumClique::::new(3, vec![(0, 1), (1, 2)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -137,7 +137,7 @@ fn test_brute_force_path() { fn test_brute_force_weighted() { // Path with weights: vertex 1 has high weight let problem = - Clique::::with_weights(3, vec![(0, 1), (1, 2)], vec![1, 100, 1]); + MaximumClique::::with_weights(3, vec![(0, 1), (1, 2)], vec![1, 100, 1]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -162,7 +162,7 @@ fn test_is_clique_function() { #[test] fn test_problem_size() { - let problem = Clique::::new(5, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumClique::::new(5, vec![(0, 1), (1, 2), (2, 3)]); let size = problem.problem_size(); assert_eq!(size.get("num_vertices"), Some(5)); assert_eq!(size.get("num_edges"), Some(3)); @@ -170,20 +170,20 @@ fn test_problem_size() { #[test] fn test_energy_mode() { - let problem = Clique::::new(3, vec![(0, 1)]); + let problem = MaximumClique::::new(3, vec![(0, 1)]); assert!(problem.energy_mode().is_maximization()); } #[test] fn test_edges() { - let problem = Clique::::new(4, vec![(0, 1), (2, 3)]); + let problem = MaximumClique::::new(4, vec![(0, 1), (2, 3)]); let edges = problem.edges(); assert_eq!(edges.len(), 2); } #[test] fn test_set_weights() { - let mut problem = Clique::::new(3, vec![(0, 1)]); + let mut problem = MaximumClique::::new(3, vec![(0, 1)]); problem.set_weights(vec![5, 10, 15]); assert_eq!(problem.weights(), vec![5, 10, 15]); } @@ -191,7 +191,7 @@ fn test_set_weights() { #[test] fn test_empty_graph() { // No edges means any single vertex is a max clique - let problem = Clique::::new(3, vec![]); + let problem = MaximumClique::::new(3, vec![]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -204,7 +204,7 @@ fn test_empty_graph() { #[test] fn test_is_satisfied() { - let problem = Clique::::new(3, vec![(0, 1), (1, 2)]); + let problem = MaximumClique::::new(3, vec![(0, 1), (1, 2)]); assert!(problem.is_satisfied(&[1, 1, 0])); // Valid clique assert!(problem.is_satisfied(&[0, 1, 1])); // Valid clique @@ -214,7 +214,7 @@ fn test_is_satisfied() { #[test] fn test_from_graph() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); - let problem = Clique::::from_graph(graph.clone(), vec![1, 2, 3]); + let problem = MaximumClique::::from_graph(graph.clone(), vec![1, 2, 3]); assert_eq!(problem.num_vertices(), 3); assert_eq!(problem.weights(), vec![1, 2, 3]); } @@ -222,14 +222,14 @@ fn test_from_graph() { #[test] fn test_from_graph_unit_weights() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); - let problem = Clique::::from_graph_unit_weights(graph); + let problem = MaximumClique::::from_graph_unit_weights(graph); assert_eq!(problem.num_vertices(), 3); assert_eq!(problem.weights(), vec![1, 1, 1]); } #[test] fn test_graph_accessor() { - let problem = Clique::::new(3, vec![(0, 1)]); + let problem = MaximumClique::::new(3, vec![(0, 1)]); let graph = problem.graph(); assert_eq!(graph.num_vertices(), 3); assert_eq!(graph.num_edges(), 1); @@ -237,7 +237,7 @@ fn test_graph_accessor() { #[test] fn test_variant() { - let variant = Clique::::variant(); + let variant = MaximumClique::::variant(); assert_eq!(variant.len(), 2); assert_eq!(variant[0], ("graph", "SimpleGraph")); assert_eq!(variant[1], ("weight", "i32")); @@ -246,7 +246,7 @@ fn test_variant() { #[test] fn test_weights_ref() { let problem = - Clique::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + MaximumClique::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); assert_eq!(problem.weights_ref(), &vec![5, 10, 15]); } @@ -259,7 +259,7 @@ fn test_is_clique_wrong_len() { #[test] fn test_complete_graph() { // K4 - complete graph with 4 vertices - let problem = Clique::::new( + let problem = MaximumClique::::new( 4, vec![(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)], ); diff --git a/src/unit_tests/models/graph/independent_set.rs b/src/unit_tests/models/graph/maximum_independent_set.rs similarity index 72% rename from src/unit_tests/models/graph/independent_set.rs rename to src/unit_tests/models/graph/maximum_independent_set.rs index 9ee41a421..e75f5b95b 100644 --- a/src/unit_tests/models/graph/independent_set.rs +++ b/src/unit_tests/models/graph/maximum_independent_set.rs @@ -3,7 +3,7 @@ use crate::solvers::{BruteForce, Solver}; #[test] fn test_independent_set_creation() { - let problem = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); assert_eq!(problem.num_vertices(), 4); assert_eq!(problem.num_edges(), 3); assert_eq!(problem.num_variables(), 4); @@ -13,20 +13,20 @@ fn test_independent_set_creation() { #[test] fn test_independent_set_with_weights() { let problem = - IndependentSet::::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); + MaximumIndependentSet::::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); assert_eq!(problem.weights(), vec![1, 2, 3]); assert!(problem.is_weighted()); } #[test] fn test_independent_set_unweighted() { - let problem = IndependentSet::::new(3, vec![(0, 1)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1)]); assert!(!problem.is_weighted()); } #[test] fn test_has_edge() { - let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); assert!(problem.has_edge(0, 1)); assert!(problem.has_edge(1, 0)); // Undirected assert!(problem.has_edge(1, 2)); @@ -35,7 +35,7 @@ fn test_has_edge() { #[test] fn test_solution_size_valid() { - let problem = IndependentSet::::new(4, vec![(0, 1), (2, 3)]); + let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (2, 3)]); // Valid: select 0 and 2 (not adjacent) let sol = problem.solution_size(&[1, 0, 1, 0]); @@ -50,7 +50,7 @@ fn test_solution_size_valid() { #[test] fn test_solution_size_invalid() { - let problem = IndependentSet::::new(4, vec![(0, 1), (2, 3)]); + let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (2, 3)]); // Invalid: 0 and 1 are adjacent let sol = problem.solution_size(&[1, 1, 0, 0]); @@ -64,7 +64,7 @@ fn test_solution_size_invalid() { #[test] fn test_solution_size_empty() { - let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); let sol = problem.solution_size(&[0, 0, 0]); assert!(sol.is_valid); assert_eq!(sol.size, 0); @@ -73,7 +73,7 @@ fn test_solution_size_empty() { #[test] fn test_weighted_solution() { let problem = - IndependentSet::::with_weights(3, vec![(0, 1)], vec![10, 20, 30]); + MaximumIndependentSet::::with_weights(3, vec![(0, 1)], vec![10, 20, 30]); // Select vertex 2 (weight 30) let sol = problem.solution_size(&[0, 0, 1]); @@ -88,7 +88,7 @@ fn test_weighted_solution() { #[test] fn test_constraints() { - let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); let constraints = problem.constraints(); assert_eq!(constraints.len(), 2); // One per edge } @@ -96,7 +96,7 @@ fn test_constraints() { #[test] fn test_objectives() { let problem = - IndependentSet::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + MaximumIndependentSet::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); let objectives = problem.objectives(); assert_eq!(objectives.len(), 3); // One per vertex } @@ -105,7 +105,7 @@ fn test_objectives() { fn test_brute_force_triangle() { // Triangle graph: maximum IS has size 1 let problem = - IndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -119,7 +119,7 @@ fn test_brute_force_triangle() { #[test] fn test_brute_force_path() { // Path graph 0-1-2-3: maximum IS = {0,2} or {1,3} or {0,3} - let problem = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -137,7 +137,7 @@ fn test_brute_force_path() { fn test_brute_force_weighted() { // Graph with weights: vertex 1 has high weight but is connected to both 0 and 2 let problem = - IndependentSet::::with_weights(3, vec![(0, 1), (1, 2)], vec![1, 100, 1]); + MaximumIndependentSet::::with_weights(3, vec![(0, 1), (1, 2)], vec![1, 100, 1]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -165,7 +165,7 @@ fn test_is_independent_set_function() { #[test] fn test_problem_size() { - let problem = IndependentSet::::new(5, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumIndependentSet::::new(5, vec![(0, 1), (1, 2), (2, 3)]); let size = problem.problem_size(); assert_eq!(size.get("num_vertices"), Some(5)); assert_eq!(size.get("num_edges"), Some(3)); @@ -173,13 +173,13 @@ fn test_problem_size() { #[test] fn test_energy_mode() { - let problem = IndependentSet::::new(3, vec![(0, 1)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1)]); assert!(problem.energy_mode().is_maximization()); } #[test] fn test_edges() { - let problem = IndependentSet::::new(4, vec![(0, 1), (2, 3)]); + let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (2, 3)]); let edges = problem.edges(); assert_eq!(edges.len(), 2); assert!(edges.contains(&(0, 1)) || edges.contains(&(1, 0))); @@ -188,14 +188,14 @@ fn test_edges() { #[test] fn test_set_weights() { - let mut problem = IndependentSet::::new(3, vec![(0, 1)]); + let mut problem = MaximumIndependentSet::::new(3, vec![(0, 1)]); problem.set_weights(vec![5, 10, 15]); assert_eq!(problem.weights(), vec![5, 10, 15]); } #[test] fn test_empty_graph() { - let problem = IndependentSet::::new(3, vec![]); + let problem = MaximumIndependentSet::::new(3, vec![]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -206,7 +206,7 @@ fn test_empty_graph() { #[test] fn test_is_satisfied() { - let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); assert!(problem.is_satisfied(&[1, 0, 1])); // Valid IS assert!(problem.is_satisfied(&[0, 1, 0])); // Valid IS @@ -217,7 +217,7 @@ fn test_is_satisfied() { #[test] fn test_from_graph() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); - let problem = IndependentSet::::from_graph(graph.clone(), vec![1, 2, 3]); + let problem = MaximumIndependentSet::::from_graph(graph.clone(), vec![1, 2, 3]); assert_eq!(problem.num_vertices(), 3); assert_eq!(problem.weights(), vec![1, 2, 3]); } @@ -225,14 +225,14 @@ fn test_from_graph() { #[test] fn test_from_graph_unit_weights() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); - let problem = IndependentSet::::from_graph_unit_weights(graph); + let problem = MaximumIndependentSet::::from_graph_unit_weights(graph); assert_eq!(problem.num_vertices(), 3); assert_eq!(problem.weights(), vec![1, 1, 1]); } #[test] fn test_graph_accessor() { - let problem = IndependentSet::::new(3, vec![(0, 1)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1)]); let graph = problem.graph(); assert_eq!(graph.num_vertices(), 3); assert_eq!(graph.num_edges(), 1); @@ -240,7 +240,7 @@ fn test_graph_accessor() { #[test] fn test_variant() { - let variant = IndependentSet::::variant(); + let variant = MaximumIndependentSet::::variant(); assert_eq!(variant.len(), 2); assert_eq!(variant[0], ("graph", "SimpleGraph")); assert_eq!(variant[1], ("weight", "i32")); @@ -249,6 +249,6 @@ fn test_variant() { #[test] fn test_weights_ref() { let problem = - IndependentSet::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + MaximumIndependentSet::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); assert_eq!(problem.weights_ref(), &vec![5, 10, 15]); } diff --git a/src/unit_tests/models/graph/matching.rs b/src/unit_tests/models/graph/maximum_matching.rs similarity index 74% rename from src/unit_tests/models/graph/matching.rs rename to src/unit_tests/models/graph/maximum_matching.rs index 9a10b630b..608f3ce13 100644 --- a/src/unit_tests/models/graph/matching.rs +++ b/src/unit_tests/models/graph/maximum_matching.rs @@ -3,7 +3,7 @@ use crate::solvers::{BruteForce, Solver}; #[test] fn test_matching_creation() { - let problem = Matching::::new(4, vec![(0, 1, 1), (1, 2, 2), (2, 3, 3)]); + let problem = MaximumMatching::::new(4, vec![(0, 1, 1), (1, 2, 2), (2, 3, 3)]); assert_eq!(problem.num_vertices(), 4); assert_eq!(problem.num_edges(), 3); assert_eq!(problem.num_variables(), 3); @@ -11,13 +11,13 @@ fn test_matching_creation() { #[test] fn test_matching_unweighted() { - let problem = Matching::::unweighted(3, vec![(0, 1), (1, 2)]); + let problem = MaximumMatching::::unweighted(3, vec![(0, 1), (1, 2)]); assert_eq!(problem.num_edges(), 2); } #[test] fn test_edge_endpoints() { - let problem = Matching::::new(3, vec![(0, 1, 1), (1, 2, 2)]); + let problem = MaximumMatching::::new(3, vec![(0, 1, 1), (1, 2, 2)]); assert_eq!(problem.edge_endpoints(0), Some((0, 1))); assert_eq!(problem.edge_endpoints(1), Some((1, 2))); assert_eq!(problem.edge_endpoints(2), None); @@ -25,7 +25,7 @@ fn test_edge_endpoints() { #[test] fn test_is_valid_matching() { - let problem = Matching::::new(4, vec![(0, 1, 1), (1, 2, 1), (2, 3, 1)]); + let problem = MaximumMatching::::new(4, vec![(0, 1, 1), (1, 2, 1), (2, 3, 1)]); // Valid: select edge 0 only assert!(problem.is_valid_matching(&[1, 0, 0])); @@ -39,7 +39,7 @@ fn test_is_valid_matching() { #[test] fn test_solution_size() { - let problem = Matching::::new(4, vec![(0, 1, 5), (1, 2, 10), (2, 3, 3)]); + let problem = MaximumMatching::::new(4, vec![(0, 1, 5), (1, 2, 10), (2, 3, 3)]); let sol = problem.solution_size(&[1, 0, 1]); assert!(sol.is_valid); @@ -53,7 +53,7 @@ fn test_solution_size() { #[test] fn test_brute_force_path() { // Path 0-1-2-3 with unit weights - let problem = Matching::::unweighted(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumMatching::::unweighted(4, vec![(0, 1), (1, 2), (2, 3)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -66,7 +66,7 @@ fn test_brute_force_path() { #[test] fn test_brute_force_triangle() { - let problem = Matching::::unweighted(3, vec![(0, 1), (1, 2), (0, 2)]); + let problem = MaximumMatching::::unweighted(3, vec![(0, 1), (1, 2), (0, 2)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -80,7 +80,7 @@ fn test_brute_force_triangle() { #[test] fn test_brute_force_weighted() { // Prefer heavy edge even if it excludes more edges - let problem = Matching::::new(4, vec![(0, 1, 100), (0, 2, 1), (1, 3, 1)]); + let problem = MaximumMatching::::new(4, vec![(0, 1, 100), (0, 2, 1), (1, 3, 1)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -100,13 +100,13 @@ fn test_is_matching_function() { #[test] fn test_energy_mode() { - let problem = Matching::::unweighted(2, vec![(0, 1)]); + let problem = MaximumMatching::::unweighted(2, vec![(0, 1)]); assert!(problem.energy_mode().is_maximization()); } #[test] fn test_empty_graph() { - let problem = Matching::::unweighted(3, vec![]); + let problem = MaximumMatching::::unweighted(3, vec![]); let sol = problem.solution_size(&[]); assert!(sol.is_valid); assert_eq!(sol.size, 0); @@ -114,7 +114,7 @@ fn test_empty_graph() { #[test] fn test_constraints() { - let problem = Matching::::unweighted(3, vec![(0, 1), (1, 2)]); + let problem = MaximumMatching::::unweighted(3, vec![(0, 1), (1, 2)]); let constraints = problem.constraints(); // Vertex 1 has degree 2, so 1 constraint assert_eq!(constraints.len(), 1); @@ -122,7 +122,7 @@ fn test_constraints() { #[test] fn test_edges() { - let problem = Matching::::new(3, vec![(0, 1, 5), (1, 2, 10)]); + let problem = MaximumMatching::::new(3, vec![(0, 1, 5), (1, 2, 10)]); let edges = problem.edges(); assert_eq!(edges.len(), 2); } @@ -130,7 +130,7 @@ fn test_edges() { #[test] fn test_perfect_matching() { // K4: can have perfect matching (2 edges covering all 4 vertices) - let problem = Matching::::unweighted( + let problem = MaximumMatching::::unweighted( 4, vec![(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)], ); @@ -156,7 +156,7 @@ fn test_perfect_matching() { #[test] fn test_is_satisfied() { - let problem = Matching::::unweighted(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumMatching::::unweighted(4, vec![(0, 1), (1, 2), (2, 3)]); assert!(problem.is_satisfied(&[1, 0, 1])); // Valid matching assert!(problem.is_satisfied(&[0, 1, 0])); // Valid matching @@ -165,14 +165,14 @@ fn test_is_satisfied() { #[test] fn test_objectives() { - let problem = Matching::::new(3, vec![(0, 1, 5), (1, 2, 10)]); + let problem = MaximumMatching::::new(3, vec![(0, 1, 5), (1, 2, 10)]); let objectives = problem.objectives(); assert_eq!(objectives.len(), 2); } #[test] fn test_set_weights() { - let mut problem = Matching::::unweighted(3, vec![(0, 1), (1, 2)]); + let mut problem = MaximumMatching::::unweighted(3, vec![(0, 1), (1, 2)]); assert!(!problem.is_weighted()); // Initially uniform problem.set_weights(vec![1, 2]); assert!(problem.is_weighted()); @@ -181,7 +181,7 @@ fn test_set_weights() { #[test] fn test_is_weighted_empty() { - let problem = Matching::::unweighted(2, vec![]); + let problem = MaximumMatching::::unweighted(2, vec![]); assert!(!problem.is_weighted()); } @@ -199,7 +199,7 @@ fn test_is_matching_out_of_bounds() { #[test] fn test_problem_size() { - let problem = Matching::::unweighted(5, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumMatching::::unweighted(5, vec![(0, 1), (1, 2), (2, 3)]); let size = problem.problem_size(); assert_eq!(size.get("num_vertices"), Some(5)); assert_eq!(size.get("num_edges"), Some(3)); @@ -208,7 +208,7 @@ fn test_problem_size() { #[test] fn test_from_graph() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); - let problem = Matching::::from_graph(graph, vec![5, 10]); + let problem = MaximumMatching::::from_graph(graph, vec![5, 10]); assert_eq!(problem.num_vertices(), 3); assert_eq!(problem.num_edges(), 2); assert_eq!(problem.weights(), vec![5, 10]); @@ -217,7 +217,7 @@ fn test_from_graph() { #[test] fn test_from_graph_unit_weights() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); - let problem = Matching::::from_graph_unit_weights(graph); + let problem = MaximumMatching::::from_graph_unit_weights(graph); assert_eq!(problem.num_vertices(), 3); assert_eq!(problem.num_edges(), 2); assert_eq!(problem.weights(), vec![1, 1]); @@ -226,7 +226,7 @@ fn test_from_graph_unit_weights() { #[test] fn test_graph_accessor() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); - let problem = Matching::::from_graph_unit_weights(graph); + let problem = MaximumMatching::::from_graph_unit_weights(graph); assert_eq!(problem.graph().num_vertices(), 3); assert_eq!(problem.graph().num_edges(), 2); } diff --git a/src/unit_tests/models/graph/dominating_set.rs b/src/unit_tests/models/graph/minimum_dominating_set.rs similarity index 71% rename from src/unit_tests/models/graph/dominating_set.rs rename to src/unit_tests/models/graph/minimum_dominating_set.rs index 1ee68c293..6215b1387 100644 --- a/src/unit_tests/models/graph/dominating_set.rs +++ b/src/unit_tests/models/graph/minimum_dominating_set.rs @@ -3,7 +3,7 @@ use crate::solvers::{BruteForce, Solver}; #[test] fn test_dominating_set_creation() { - let problem = DominatingSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MinimumDominatingSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); assert_eq!(problem.num_vertices(), 4); assert_eq!(problem.num_edges(), 3); } @@ -11,13 +11,13 @@ fn test_dominating_set_creation() { #[test] fn test_dominating_set_with_weights() { let problem = - DominatingSet::::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); + MinimumDominatingSet::::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); assert_eq!(problem.weights(), vec![1, 2, 3]); } #[test] fn test_neighbors() { - let problem = DominatingSet::::new(4, vec![(0, 1), (0, 2), (1, 2)]); + let problem = MinimumDominatingSet::::new(4, vec![(0, 1), (0, 2), (1, 2)]); let nbrs = problem.neighbors(0); assert!(nbrs.contains(&1)); assert!(nbrs.contains(&2)); @@ -26,7 +26,7 @@ fn test_neighbors() { #[test] fn test_closed_neighborhood() { - let problem = DominatingSet::::new(4, vec![(0, 1), (0, 2)]); + let problem = MinimumDominatingSet::::new(4, vec![(0, 1), (0, 2)]); let cn = problem.closed_neighborhood(0); assert!(cn.contains(&0)); assert!(cn.contains(&1)); @@ -37,7 +37,7 @@ fn test_closed_neighborhood() { #[test] fn test_solution_size_valid() { // Star graph: center dominates all - let problem = DominatingSet::::new(4, vec![(0, 1), (0, 2), (0, 3)]); + let problem = MinimumDominatingSet::::new(4, vec![(0, 1), (0, 2), (0, 3)]); // Select center let sol = problem.solution_size(&[1, 0, 0, 0]); @@ -52,7 +52,7 @@ fn test_solution_size_valid() { #[test] fn test_solution_size_invalid() { - let problem = DominatingSet::::new(4, vec![(0, 1), (2, 3)]); + let problem = MinimumDominatingSet::::new(4, vec![(0, 1), (2, 3)]); // Select none let sol = problem.solution_size(&[0, 0, 0, 0]); @@ -66,7 +66,7 @@ fn test_solution_size_invalid() { #[test] fn test_brute_force_star() { // Star graph: minimum dominating set is the center - let problem = DominatingSet::::new(4, vec![(0, 1), (0, 2), (0, 3)]); + let problem = MinimumDominatingSet::::new(4, vec![(0, 1), (0, 2), (0, 3)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -80,7 +80,7 @@ fn test_brute_force_star() { fn test_brute_force_path() { // Path 0-1-2-3-4: need to dominate all 5 vertices let problem = - DominatingSet::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); + MinimumDominatingSet::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -94,7 +94,7 @@ fn test_brute_force_path() { #[test] fn test_brute_force_weighted() { // Star with heavy center - let problem = DominatingSet::::with_weights( + let problem = MinimumDominatingSet::::with_weights( 4, vec![(0, 1), (0, 2), (0, 3)], vec![100, 1, 1, 1], @@ -123,21 +123,21 @@ fn test_is_dominating_set_function() { #[test] fn test_constraints() { - let problem = DominatingSet::::new(3, vec![(0, 1), (1, 2)]); + let problem = MinimumDominatingSet::::new(3, vec![(0, 1), (1, 2)]); let constraints = problem.constraints(); assert_eq!(constraints.len(), 3); // One per vertex } #[test] fn test_energy_mode() { - let problem = DominatingSet::::new(2, vec![(0, 1)]); + let problem = MinimumDominatingSet::::new(2, vec![(0, 1)]); assert!(problem.energy_mode().is_minimization()); } #[test] fn test_isolated_vertex() { // Isolated vertex must be in dominating set - let problem = DominatingSet::::new(3, vec![(0, 1)]); + let problem = MinimumDominatingSet::::new(3, vec![(0, 1)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -150,7 +150,7 @@ fn test_isolated_vertex() { #[test] fn test_is_satisfied() { - let problem = DominatingSet::::new(4, vec![(0, 1), (0, 2), (0, 3)]); + let problem = MinimumDominatingSet::::new(4, vec![(0, 1), (0, 2), (0, 3)]); assert!(problem.is_satisfied(&[1, 0, 0, 0])); // Center dominates all assert!(problem.is_satisfied(&[0, 1, 1, 1])); // Leaves dominate @@ -160,14 +160,14 @@ fn test_is_satisfied() { #[test] fn test_objectives() { let problem = - DominatingSet::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + MinimumDominatingSet::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); let objectives = problem.objectives(); assert_eq!(objectives.len(), 3); } #[test] fn test_set_weights() { - let mut problem = DominatingSet::::new(3, vec![(0, 1)]); + let mut problem = MinimumDominatingSet::::new(3, vec![(0, 1)]); assert!(!problem.is_weighted()); // Initially uniform problem.set_weights(vec![1, 2, 3]); assert!(problem.is_weighted()); @@ -176,7 +176,7 @@ fn test_set_weights() { #[test] fn test_is_weighted_empty() { - let problem = DominatingSet::::with_weights(0, vec![], vec![]); + let problem = MinimumDominatingSet::::with_weights(0, vec![], vec![]); assert!(!problem.is_weighted()); } @@ -187,7 +187,7 @@ fn test_is_dominating_set_wrong_len() { #[test] fn test_problem_size() { - let problem = DominatingSet::::new(5, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MinimumDominatingSet::::new(5, vec![(0, 1), (1, 2), (2, 3)]); let size = problem.problem_size(); assert_eq!(size.get("num_vertices"), Some(5)); assert_eq!(size.get("num_edges"), Some(3)); @@ -196,18 +196,18 @@ fn test_problem_size() { #[test] fn test_from_graph() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); - let problem = DominatingSet::::from_graph(graph.clone(), vec![1, 2, 3]); + let problem = MinimumDominatingSet::::from_graph(graph.clone(), vec![1, 2, 3]); assert_eq!(problem.num_vertices(), 3); assert_eq!(problem.weights(), vec![1, 2, 3]); - let problem2 = DominatingSet::::from_graph_unit_weights(graph); + let problem2 = MinimumDominatingSet::::from_graph_unit_weights(graph); assert_eq!(problem2.num_vertices(), 3); assert_eq!(problem2.weights(), vec![1, 1, 1]); } #[test] fn test_graph_accessor() { - let problem = DominatingSet::::new(3, vec![(0, 1)]); + let problem = MinimumDominatingSet::::new(3, vec![(0, 1)]); let graph = problem.graph(); assert_eq!(graph.num_vertices(), 3); assert_eq!(graph.num_edges(), 1); @@ -216,13 +216,13 @@ fn test_graph_accessor() { #[test] fn test_weights_ref() { let problem = - DominatingSet::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + MinimumDominatingSet::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); assert_eq!(problem.weights_ref(), &vec![5, 10, 15]); } #[test] fn test_variant() { - let variant = DominatingSet::::variant(); + let variant = MinimumDominatingSet::::variant(); assert_eq!(variant.len(), 2); assert_eq!(variant[0], ("graph", "SimpleGraph")); assert_eq!(variant[1], ("weight", "i32")); @@ -230,14 +230,14 @@ fn test_variant() { #[test] fn test_edges() { - let problem = DominatingSet::::new(3, vec![(0, 1), (1, 2)]); + let problem = MinimumDominatingSet::::new(3, vec![(0, 1), (1, 2)]); let edges = problem.edges(); assert_eq!(edges.len(), 2); } #[test] fn test_has_edge() { - let problem = DominatingSet::::new(3, vec![(0, 1), (1, 2)]); + let problem = MinimumDominatingSet::::new(3, vec![(0, 1), (1, 2)]); assert!(problem.has_edge(0, 1)); assert!(problem.has_edge(1, 0)); // Undirected assert!(problem.has_edge(1, 2)); diff --git a/src/unit_tests/models/graph/vertex_covering.rs b/src/unit_tests/models/graph/minimum_vertex_cover.rs similarity index 74% rename from src/unit_tests/models/graph/vertex_covering.rs rename to src/unit_tests/models/graph/minimum_vertex_cover.rs index b83c068e0..17e1a084f 100644 --- a/src/unit_tests/models/graph/vertex_covering.rs +++ b/src/unit_tests/models/graph/minimum_vertex_cover.rs @@ -3,7 +3,7 @@ use crate::solvers::{BruteForce, Solver}; #[test] fn test_vertex_cover_creation() { - let problem = VertexCovering::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MinimumVertexCover::::new(4, vec![(0, 1), (1, 2), (2, 3)]); assert_eq!(problem.num_vertices(), 4); assert_eq!(problem.num_edges(), 3); assert_eq!(problem.num_variables(), 4); @@ -13,14 +13,14 @@ fn test_vertex_cover_creation() { #[test] fn test_vertex_cover_with_weights() { let problem = - VertexCovering::::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); + MinimumVertexCover::::with_weights(3, vec![(0, 1)], vec![1, 2, 3]); assert_eq!(problem.weights(), vec![1, 2, 3]); assert!(problem.is_weighted()); } #[test] fn test_solution_size_valid() { - let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); // Valid: select vertex 1 (covers both edges) let sol = problem.solution_size(&[0, 1, 0]); @@ -35,7 +35,7 @@ fn test_solution_size_valid() { #[test] fn test_solution_size_invalid() { - let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); // Invalid: no vertex selected let sol = problem.solution_size(&[0, 0, 0]); @@ -49,7 +49,7 @@ fn test_solution_size_invalid() { #[test] fn test_brute_force_path() { // Path graph 0-1-2: minimum vertex cover is {1} - let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -60,7 +60,7 @@ fn test_brute_force_path() { #[test] fn test_brute_force_triangle() { // Triangle: minimum vertex cover has size 2 - let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2), (0, 2)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -75,7 +75,7 @@ fn test_brute_force_triangle() { #[test] fn test_brute_force_weighted() { // Weighted: prefer selecting low-weight vertices - let problem = VertexCovering::::with_weights( + let problem = MinimumVertexCover::::with_weights( 3, vec![(0, 1), (1, 2)], vec![100, 1, 100], @@ -106,20 +106,20 @@ fn test_is_vertex_cover_function() { #[test] fn test_constraints() { - let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); let constraints = problem.constraints(); assert_eq!(constraints.len(), 2); } #[test] fn test_energy_mode() { - let problem = VertexCovering::::new(3, vec![(0, 1)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1)]); assert!(problem.energy_mode().is_minimization()); } #[test] fn test_empty_graph() { - let problem = VertexCovering::::new(3, vec![]); + let problem = MinimumVertexCover::::new(3, vec![]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -130,7 +130,7 @@ fn test_empty_graph() { #[test] fn test_single_edge() { - let problem = VertexCovering::::new(2, vec![(0, 1)]); + let problem = MinimumVertexCover::::new(2, vec![(0, 1)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -140,7 +140,7 @@ fn test_single_edge() { #[test] fn test_is_satisfied() { - let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); assert!(problem.is_satisfied(&[0, 1, 0])); // Valid cover assert!(problem.is_satisfied(&[1, 0, 1])); // Valid cover @@ -151,11 +151,11 @@ fn test_is_satisfied() { #[test] fn test_complement_relationship() { // For a graph, if S is an independent set, then V\S is a vertex cover - use crate::models::graph::IndependentSet; + use crate::models::graph::MaximumIndependentSet; let edges = vec![(0, 1), (1, 2), (2, 3)]; - let is_problem = IndependentSet::::new(4, edges.clone()); - let vc_problem = VertexCovering::::new(4, edges); + let is_problem = MaximumIndependentSet::::new(4, edges.clone()); + let vc_problem = MinimumVertexCover::::new(4, edges); let solver = BruteForce::new(); @@ -170,14 +170,14 @@ fn test_complement_relationship() { #[test] fn test_objectives() { let problem = - VertexCovering::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + MinimumVertexCover::::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); let objectives = problem.objectives(); assert_eq!(objectives.len(), 3); } #[test] fn test_set_weights() { - let mut problem = VertexCovering::::new(3, vec![(0, 1)]); + let mut problem = MinimumVertexCover::::new(3, vec![(0, 1)]); assert!(!problem.is_weighted()); // Initially uniform problem.set_weights(vec![1, 2, 3]); assert!(problem.is_weighted()); @@ -186,7 +186,7 @@ fn test_set_weights() { #[test] fn test_is_weighted_empty() { - let problem = VertexCovering::::new(0, vec![]); + let problem = MinimumVertexCover::::new(0, vec![]); assert!(!problem.is_weighted()); } @@ -199,7 +199,7 @@ fn test_is_vertex_cover_wrong_len() { #[test] fn test_from_graph() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); - let problem = VertexCovering::::from_graph_unit_weights(graph); + let problem = MinimumVertexCover::::from_graph_unit_weights(graph); assert_eq!(problem.num_vertices(), 3); assert_eq!(problem.num_edges(), 2); } @@ -207,14 +207,14 @@ fn test_from_graph() { #[test] fn test_from_graph_with_weights() { let graph = SimpleGraph::new(3, vec![(0, 1), (1, 2)]); - let problem = VertexCovering::::from_graph(graph, vec![1, 2, 3]); + let problem = MinimumVertexCover::::from_graph(graph, vec![1, 2, 3]); assert_eq!(problem.weights(), vec![1, 2, 3]); assert!(problem.is_weighted()); } #[test] fn test_graph_accessor() { - let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); let graph = problem.graph(); assert_eq!(graph.num_vertices(), 3); assert_eq!(graph.num_edges(), 2); @@ -222,7 +222,7 @@ fn test_graph_accessor() { #[test] fn test_has_edge() { - let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); assert!(problem.has_edge(0, 1)); assert!(problem.has_edge(1, 0)); // Undirected assert!(problem.has_edge(1, 2)); @@ -231,7 +231,7 @@ fn test_has_edge() { #[test] fn test_variant() { - let variant = VertexCovering::::variant(); + let variant = MinimumVertexCover::::variant(); assert_eq!(variant.len(), 2); assert_eq!(variant[0], ("graph", "SimpleGraph")); assert_eq!(variant[1], ("weight", "i32")); diff --git a/src/unit_tests/models/set/set_packing.rs b/src/unit_tests/models/set/maximum_set_packing.rs similarity index 72% rename from src/unit_tests/models/set/set_packing.rs rename to src/unit_tests/models/set/maximum_set_packing.rs index 416b8b17c..9eb70f06e 100644 --- a/src/unit_tests/models/set/set_packing.rs +++ b/src/unit_tests/models/set/maximum_set_packing.rs @@ -3,21 +3,21 @@ use crate::solvers::{BruteForce, Solver}; #[test] fn test_set_packing_creation() { - let problem = SetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![3, 4]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![3, 4]]); assert_eq!(problem.num_sets(), 3); assert_eq!(problem.num_variables(), 3); } #[test] fn test_set_packing_with_weights() { - let problem = SetPacking::with_weights(vec![vec![0, 1], vec![2, 3]], vec![5, 10]); + let problem = MaximumSetPacking::with_weights(vec![vec![0, 1], vec![2, 3]], vec![5, 10]); assert_eq!(problem.weights(), vec![5, 10]); assert!(problem.is_weighted()); } #[test] fn test_sets_overlap() { - let problem = SetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![3, 4]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![3, 4]]); assert!(problem.sets_overlap(0, 1)); // Share element 1 assert!(!problem.sets_overlap(0, 2)); // No overlap @@ -26,7 +26,7 @@ fn test_sets_overlap() { #[test] fn test_overlapping_pairs() { - let problem = SetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![2, 3]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![2, 3]]); let pairs = problem.overlapping_pairs(); assert_eq!(pairs.len(), 2); @@ -36,7 +36,7 @@ fn test_overlapping_pairs() { #[test] fn test_solution_size_valid() { - let problem = SetPacking::::new(vec![vec![0, 1], vec![2, 3], vec![4, 5]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![2, 3], vec![4, 5]]); // All disjoint, can select all let sol = problem.solution_size(&[1, 1, 1]); @@ -51,7 +51,7 @@ fn test_solution_size_valid() { #[test] fn test_solution_size_invalid() { - let problem = SetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![3, 4]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![3, 4]]); // Sets 0 and 1 overlap let sol = problem.solution_size(&[1, 1, 0]); @@ -61,7 +61,7 @@ fn test_solution_size_invalid() { #[test] fn test_brute_force_chain() { // Chain: {0,1}, {1,2}, {2,3} - can select at most 2 non-adjacent sets - let problem = SetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![2, 3]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![2, 3]]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -75,7 +75,7 @@ fn test_brute_force_chain() { #[test] fn test_brute_force_weighted() { // Weighted: single heavy set vs multiple light sets - let problem = SetPacking::with_weights( + let problem = MaximumSetPacking::with_weights( vec![vec![0, 1, 2, 3], vec![0, 1], vec![2, 3]], vec![5, 3, 3], ); @@ -99,7 +99,7 @@ fn test_is_set_packing_function() { #[test] fn test_constraints() { - let problem = SetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![3, 4]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![3, 4]]); let constraints = problem.constraints(); // Only one overlapping pair assert_eq!(constraints.len(), 1); @@ -107,13 +107,13 @@ fn test_constraints() { #[test] fn test_energy_mode() { - let problem = SetPacking::::new(vec![vec![0, 1]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1]]); assert!(problem.energy_mode().is_maximization()); } #[test] fn test_disjoint_sets() { - let problem = SetPacking::::new(vec![vec![0], vec![1], vec![2], vec![3]]); + let problem = MaximumSetPacking::::new(vec![vec![0], vec![1], vec![2], vec![3]]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -125,7 +125,7 @@ fn test_disjoint_sets() { #[test] fn test_all_overlapping() { // All sets share element 0 - let problem = SetPacking::::new(vec![vec![0, 1], vec![0, 2], vec![0, 3]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![0, 2], vec![0, 3]]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -137,7 +137,7 @@ fn test_all_overlapping() { #[test] fn test_is_satisfied() { - let problem = SetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![3, 4]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![3, 4]]); assert!(problem.is_satisfied(&[1, 0, 1])); // Disjoint selection assert!(problem.is_satisfied(&[0, 1, 1])); // Disjoint selection @@ -146,7 +146,7 @@ fn test_is_satisfied() { #[test] fn test_empty_sets() { - let problem = SetPacking::::new(vec![]); + let problem = MaximumSetPacking::::new(vec![]); let sol = problem.solution_size(&[]); assert!(sol.is_valid); assert_eq!(sol.size, 0); @@ -154,7 +154,7 @@ fn test_empty_sets() { #[test] fn test_get_set() { - let problem = SetPacking::::new(vec![vec![0, 1], vec![2, 3]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![2, 3]]); assert_eq!(problem.get_set(0), Some(&vec![0, 1])); assert_eq!(problem.get_set(1), Some(&vec![2, 3])); assert_eq!(problem.get_set(2), None); @@ -162,16 +162,16 @@ fn test_get_set() { #[test] fn test_relationship_to_independent_set() { - // SetPacking on sets is equivalent to IndependentSet on the intersection graph - use crate::models::graph::IndependentSet; + // MaximumSetPacking on sets is equivalent to MaximumIndependentSet on the intersection graph + use crate::models::graph::MaximumIndependentSet; use crate::topology::SimpleGraph; let sets = vec![vec![0, 1], vec![1, 2], vec![2, 3], vec![3, 4]]; - let sp_problem = SetPacking::::new(sets.clone()); + let sp_problem = MaximumSetPacking::::new(sets.clone()); // Build intersection graph let edges = sp_problem.overlapping_pairs(); - let is_problem = IndependentSet::::new(sets.len(), edges); + let is_problem = MaximumIndependentSet::::new(sets.len(), edges); let solver = BruteForce::new(); @@ -186,14 +186,14 @@ fn test_relationship_to_independent_set() { #[test] fn test_objectives() { - let problem = SetPacking::with_weights(vec![vec![0, 1], vec![1, 2]], vec![5, 10]); + let problem = MaximumSetPacking::with_weights(vec![vec![0, 1], vec![1, 2]], vec![5, 10]); let objectives = problem.objectives(); assert_eq!(objectives.len(), 2); } #[test] fn test_set_weights() { - let mut problem = SetPacking::::new(vec![vec![0, 1], vec![1, 2]]); + let mut problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![1, 2]]); assert!(!problem.is_weighted()); // Initially uniform problem.set_weights(vec![1, 2]); assert!(problem.is_weighted()); @@ -202,7 +202,7 @@ fn test_set_weights() { #[test] fn test_is_weighted_empty() { - let problem = SetPacking::::new(vec![]); + let problem = MaximumSetPacking::::new(vec![]); assert!(!problem.is_weighted()); } @@ -214,7 +214,7 @@ fn test_is_set_packing_wrong_len() { #[test] fn test_problem_size() { - let problem = SetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![3, 4]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![3, 4]]); let size = problem.problem_size(); assert_eq!(size.get("num_sets"), Some(3)); } diff --git a/src/unit_tests/models/set/set_covering.rs b/src/unit_tests/models/set/minimum_set_covering.rs similarity index 73% rename from src/unit_tests/models/set/set_covering.rs rename to src/unit_tests/models/set/minimum_set_covering.rs index 031369368..05d325565 100644 --- a/src/unit_tests/models/set/set_covering.rs +++ b/src/unit_tests/models/set/minimum_set_covering.rs @@ -3,7 +3,7 @@ use crate::solvers::{BruteForce, Solver}; #[test] fn test_set_covering_creation() { - let problem = SetCovering::::new(4, vec![vec![0, 1], vec![1, 2], vec![2, 3]]); + let problem = MinimumSetCovering::::new(4, vec![vec![0, 1], vec![1, 2], vec![2, 3]]); assert_eq!(problem.universe_size(), 4); assert_eq!(problem.num_sets(), 3); assert_eq!(problem.num_variables(), 3); @@ -11,14 +11,14 @@ fn test_set_covering_creation() { #[test] fn test_set_covering_with_weights() { - let problem = SetCovering::with_weights(3, vec![vec![0, 1], vec![1, 2]], vec![5, 10]); + let problem = MinimumSetCovering::with_weights(3, vec![vec![0, 1], vec![1, 2]], vec![5, 10]); assert_eq!(problem.weights(), vec![5, 10]); assert!(problem.is_weighted()); } #[test] fn test_covered_elements() { - let problem = SetCovering::::new(4, vec![vec![0, 1], vec![1, 2], vec![2, 3]]); + let problem = MinimumSetCovering::::new(4, vec![vec![0, 1], vec![1, 2], vec![2, 3]]); let covered = problem.covered_elements(&[1, 0, 0]); assert!(covered.contains(&0)); @@ -34,7 +34,7 @@ fn test_covered_elements() { #[test] fn test_solution_size_valid() { - let problem = SetCovering::::new(4, vec![vec![0, 1], vec![1, 2], vec![2, 3]]); + let problem = MinimumSetCovering::::new(4, vec![vec![0, 1], vec![1, 2], vec![2, 3]]); // Select first and third sets: covers {0,1} ∪ {2,3} = {0,1,2,3} let sol = problem.solution_size(&[1, 0, 1]); @@ -49,7 +49,7 @@ fn test_solution_size_valid() { #[test] fn test_solution_size_invalid() { - let problem = SetCovering::::new(4, vec![vec![0, 1], vec![1, 2], vec![2, 3]]); + let problem = MinimumSetCovering::::new(4, vec![vec![0, 1], vec![1, 2], vec![2, 3]]); // Select only first set: missing 2, 3 let sol = problem.solution_size(&[1, 0, 0]); @@ -64,7 +64,7 @@ fn test_solution_size_invalid() { fn test_brute_force_simple() { // Universe {0,1,2}, sets: {0,1}, {1,2}, {0,2} // Minimum cover: any 2 sets work - let problem = SetCovering::::new(3, vec![vec![0, 1], vec![1, 2], vec![0, 2]]); + let problem = MinimumSetCovering::::new(3, vec![vec![0, 1], vec![1, 2], vec![0, 2]]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -78,7 +78,7 @@ fn test_brute_force_simple() { fn test_brute_force_weighted() { // Prefer lighter sets let problem = - SetCovering::with_weights(3, vec![vec![0, 1, 2], vec![0, 1], vec![2]], vec![10, 3, 3]); + MinimumSetCovering::with_weights(3, vec![vec![0, 1, 2], vec![0, 1], vec![2]], vec![10, 3, 3]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -99,7 +99,7 @@ fn test_is_set_cover_function() { #[test] fn test_get_set() { - let problem = SetCovering::::new(4, vec![vec![0, 1], vec![2, 3]]); + let problem = MinimumSetCovering::::new(4, vec![vec![0, 1], vec![2, 3]]); assert_eq!(problem.get_set(0), Some(&vec![0, 1])); assert_eq!(problem.get_set(1), Some(&vec![2, 3])); assert_eq!(problem.get_set(2), None); @@ -107,13 +107,13 @@ fn test_get_set() { #[test] fn test_energy_mode() { - let problem = SetCovering::::new(2, vec![vec![0, 1]]); + let problem = MinimumSetCovering::::new(2, vec![vec![0, 1]]); assert!(problem.energy_mode().is_minimization()); } #[test] fn test_constraints() { - let problem = SetCovering::::new(3, vec![vec![0, 1], vec![1, 2]]); + let problem = MinimumSetCovering::::new(3, vec![vec![0, 1], vec![1, 2]]); let constraints = problem.constraints(); // One constraint per element assert_eq!(constraints.len(), 3); @@ -121,7 +121,7 @@ fn test_constraints() { #[test] fn test_single_set_covers_all() { - let problem = SetCovering::::new(3, vec![vec![0, 1, 2], vec![0], vec![1], vec![2]]); + let problem = MinimumSetCovering::::new(3, vec![vec![0, 1, 2], vec![0], vec![1], vec![2]]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -133,7 +133,7 @@ fn test_single_set_covers_all() { #[test] fn test_overlapping_sets() { // All sets overlap on element 1 - let problem = SetCovering::::new(3, vec![vec![0, 1], vec![1, 2], vec![1]]); + let problem = MinimumSetCovering::::new(3, vec![vec![0, 1], vec![1, 2], vec![1]]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -145,7 +145,7 @@ fn test_overlapping_sets() { #[test] fn test_is_satisfied() { - let problem = SetCovering::::new(3, vec![vec![0, 1], vec![1, 2]]); + let problem = MinimumSetCovering::::new(3, vec![vec![0, 1], vec![1, 2]]); assert!(problem.is_satisfied(&[1, 1, 0])); // Note: 3 vars needed assert!(!problem.is_satisfied(&[1, 0])); @@ -153,7 +153,7 @@ fn test_is_satisfied() { #[test] fn test_empty_universe() { - let problem = SetCovering::::new(0, vec![]); + let problem = MinimumSetCovering::::new(0, vec![]); let sol = problem.solution_size(&[]); assert!(sol.is_valid); // Empty universe is trivially covered assert_eq!(sol.size, 0); @@ -161,14 +161,14 @@ fn test_empty_universe() { #[test] fn test_objectives() { - let problem = SetCovering::with_weights(3, vec![vec![0, 1], vec![1, 2]], vec![5, 10]); + let problem = MinimumSetCovering::with_weights(3, vec![vec![0, 1], vec![1, 2]], vec![5, 10]); let objectives = problem.objectives(); assert_eq!(objectives.len(), 2); } #[test] fn test_set_weights() { - let mut problem = SetCovering::::new(3, vec![vec![0, 1], vec![1, 2]]); + let mut problem = MinimumSetCovering::::new(3, vec![vec![0, 1], vec![1, 2]]); assert!(!problem.is_weighted()); // Initially uniform problem.set_weights(vec![1, 2]); assert!(problem.is_weighted()); @@ -177,7 +177,7 @@ fn test_set_weights() { #[test] fn test_is_weighted_empty() { - let problem = SetCovering::::new(0, vec![]); + let problem = MinimumSetCovering::::new(0, vec![]); assert!(!problem.is_weighted()); } @@ -189,7 +189,7 @@ fn test_is_set_cover_wrong_len() { #[test] fn test_problem_size() { - let problem = SetCovering::::new(5, vec![vec![0, 1], vec![1, 2], vec![3, 4]]); + let problem = MinimumSetCovering::::new(5, vec![vec![0, 1], vec![1, 2], vec![3, 4]]); let size = problem.problem_size(); assert_eq!(size.get("universe_size"), Some(5)); assert_eq!(size.get("num_sets"), Some(3)); diff --git a/src/unit_tests/property.rs b/src/unit_tests/property.rs index bf70713aa..62f7e8c30 100644 --- a/src/unit_tests/property.rs +++ b/src/unit_tests/property.rs @@ -3,7 +3,7 @@ //! These tests verify mathematical invariants and properties //! that should hold for all valid inputs. -use crate::models::graph::{IndependentSet, VertexCovering}; +use crate::models::graph::{MaximumIndependentSet, MinimumVertexCover}; use crate::prelude::*; use crate::topology::SimpleGraph; use proptest::prelude::*; @@ -39,8 +39,8 @@ proptest! { /// is a minimum vertex cover, and their sizes sum to n. #[test] fn independent_set_complement_is_vertex_cover((n, edges) in graph_strategy(8)) { - let is_problem = IndependentSet::::new(n, edges.clone()); - let vc_problem = VertexCovering::::new(n, edges); + let is_problem = MaximumIndependentSet::::new(n, edges.clone()); + let vc_problem = MinimumVertexCover::::new(n, edges); let solver = BruteForce::new(); let is_solutions = solver.find_best(&is_problem); @@ -56,7 +56,7 @@ proptest! { /// Property: Any subset of a valid independent set is also a valid independent set. #[test] fn valid_solution_stays_valid_under_subset((n, edges) in graph_strategy(6)) { - let problem = IndependentSet::::new(n, edges); + let problem = MaximumIndependentSet::::new(n, edges); let solver = BruteForce::new(); for sol in solver.find_best(&problem) { @@ -72,7 +72,7 @@ proptest! { /// Property: A vertex cover with additional vertices is still a valid cover. #[test] fn vertex_cover_superset_is_valid((n, edges) in graph_strategy(6)) { - let problem = VertexCovering::::new(n, edges); + let problem = MinimumVertexCover::::new(n, edges); let solver = BruteForce::new(); for sol in solver.find_best(&problem) { @@ -88,8 +88,8 @@ proptest! { /// Property: The complement of any valid independent set is a valid vertex cover. #[test] fn is_complement_is_vc((n, edges) in graph_strategy(7)) { - let is_problem = IndependentSet::::new(n, edges.clone()); - let vc_problem = VertexCovering::::new(n, edges); + let is_problem = MaximumIndependentSet::::new(n, edges.clone()); + let vc_problem = MinimumVertexCover::::new(n, edges); let solver = BruteForce::new(); // Get all valid independent sets (not just optimal) @@ -104,7 +104,7 @@ proptest! { /// Property: Empty selection is always a valid (but possibly non-optimal) independent set. #[test] fn empty_is_always_valid_is((n, edges) in graph_strategy(10)) { - let problem = IndependentSet::::new(n, edges); + let problem = MaximumIndependentSet::::new(n, edges); let empty = vec![0; n]; prop_assert!(problem.solution_size(&empty).is_valid); } @@ -113,7 +113,7 @@ proptest! { /// (when there is at least one vertex). #[test] fn full_is_always_valid_vc((n, edges) in graph_strategy(10)) { - let problem = VertexCovering::::new(n, edges); + let problem = MinimumVertexCover::::new(n, edges); let full = vec![1; n]; prop_assert!(problem.solution_size(&full).is_valid); } @@ -121,7 +121,7 @@ proptest! { /// Property: Solution size is non-negative for independent sets. #[test] fn is_size_non_negative((n, edges) in graph_strategy(8)) { - let problem = IndependentSet::::new(n, edges); + let problem = MaximumIndependentSet::::new(n, edges); let solver = BruteForce::new(); for sol in solver.find_best(&problem) { diff --git a/src/unit_tests/reduction_graph.rs b/src/unit_tests/reduction_graph.rs index 8f2b54fd0..b3ae33546 100644 --- a/src/unit_tests/reduction_graph.rs +++ b/src/unit_tests/reduction_graph.rs @@ -22,9 +22,9 @@ fn test_reduction_graph_discovers_registered_reductions() { ); // Specific reductions should exist - assert!(graph.has_direct_reduction_by_name("IndependentSet", "VertexCovering")); + assert!(graph.has_direct_reduction_by_name("MaximumIndependentSet", "MinimumVertexCover")); assert!(graph.has_direct_reduction_by_name("MaxCut", "SpinGlass")); - assert!(graph.has_direct_reduction_by_name("Satisfiability", "IndependentSet")); + assert!(graph.has_direct_reduction_by_name("Satisfiability", "MaximumIndependentSet")); } #[test] @@ -32,8 +32,8 @@ fn test_bidirectional_reductions() { let graph = ReductionGraph::new(); // IS <-> VC should both be registered - assert!(graph.has_direct_reduction_by_name("IndependentSet", "VertexCovering")); - assert!(graph.has_direct_reduction_by_name("VertexCovering", "IndependentSet")); + assert!(graph.has_direct_reduction_by_name("MaximumIndependentSet", "MinimumVertexCover")); + assert!(graph.has_direct_reduction_by_name("MinimumVertexCover", "MaximumIndependentSet")); // MaxCut <-> SpinGlass should both be registered assert!(graph.has_direct_reduction_by_name("MaxCut", "SpinGlass")); @@ -48,8 +48,8 @@ fn test_find_path_with_cost_function() { let input_size = ProblemSize::new(vec![("n", 100), ("m", 200)]); let path = graph.find_cheapest_path( - ("IndependentSet", "SimpleGraph"), - ("VertexCovering", "SimpleGraph"), + ("MaximumIndependentSet", "SimpleGraph"), + ("MinimumVertexCover", "SimpleGraph"), &input_size, &MinimizeSteps, ); @@ -57,8 +57,8 @@ fn test_find_path_with_cost_function() { assert!(path.is_some(), "Should find path from IS to VC"); let path = path.unwrap(); assert_eq!(path.len(), 1, "Should be a 1-step path"); - assert_eq!(path.source(), Some("IndependentSet")); - assert_eq!(path.target(), Some("VertexCovering")); + assert_eq!(path.source(), Some("MaximumIndependentSet")); + assert_eq!(path.target(), Some("MinimumVertexCover")); } #[test] @@ -86,15 +86,15 @@ fn test_problem_size_propagation() { let input_size = ProblemSize::new(vec![("num_vertices", 50), ("num_edges", 100)]); let path = graph.find_cheapest_path( - ("IndependentSet", "SimpleGraph"), - ("VertexCovering", "SimpleGraph"), + ("MaximumIndependentSet", "SimpleGraph"), + ("MinimumVertexCover", "SimpleGraph"), &input_size, &MinimizeSteps, ); assert!(path.is_some()); - let path2 = graph.find_shortest_path_by_name("IndependentSet", "SetPacking"); + let path2 = graph.find_shortest_path_by_name("MaximumIndependentSet", "MaximumSetPacking"); assert!(path2.is_some()); } @@ -147,9 +147,9 @@ fn test_json_export() { fn test_direct_reduction_exists() { let graph = ReductionGraph::new(); - assert!(graph.has_direct_reduction::, VertexCovering>()); - assert!(graph.has_direct_reduction::, IndependentSet>()); - assert!(graph.has_direct_reduction::, SetPacking>()); + assert!(graph.has_direct_reduction::, MinimumVertexCover>()); + assert!(graph.has_direct_reduction::, MaximumIndependentSet>()); + assert!(graph.has_direct_reduction::, MaximumSetPacking>()); assert!(graph.has_direct_reduction::, QUBO>()); assert!(graph.has_direct_reduction::, MaxCut>()); } @@ -158,7 +158,7 @@ fn test_direct_reduction_exists() { fn test_find_direct_path() { let graph = ReductionGraph::new(); - let paths = graph.find_paths::, VertexCovering>(); + let paths = graph.find_paths::, MinimumVertexCover>(); assert!(!paths.is_empty()); assert_eq!(paths[0].len(), 1); } @@ -167,11 +167,11 @@ fn test_find_direct_path() { fn test_find_indirect_path() { let graph = ReductionGraph::new(); - // SetPacking -> IndependentSet -> VertexCovering - let paths = graph.find_paths::, VertexCovering>(); + // MaximumSetPacking -> MaximumIndependentSet -> MinimumVertexCover + let paths = graph.find_paths::, MinimumVertexCover>(); assert!(!paths.is_empty()); - let shortest = graph.find_shortest_path::, VertexCovering>(); + let shortest = graph.find_shortest_path::, MinimumVertexCover>(); assert!(shortest.is_some()); assert_eq!(shortest.unwrap().len(), 2); } @@ -180,7 +180,7 @@ fn test_find_indirect_path() { fn test_no_path_exists() { let graph = ReductionGraph::new(); - let paths = graph.find_paths::, SetPacking>(); + let paths = graph.find_paths::, MaximumSetPacking>(); assert!(paths.is_empty()); } @@ -189,10 +189,10 @@ fn test_bidirectional_paths() { let graph = ReductionGraph::new(); assert!(!graph - .find_paths::, VertexCovering>() + .find_paths::, MinimumVertexCover>() .is_empty()); assert!(!graph - .find_paths::, IndependentSet>() + .find_paths::, MaximumIndependentSet>() .is_empty()); assert!(!graph.find_paths::, QUBO>().is_empty()); diff --git a/src/unit_tests/registry/category.rs b/src/unit_tests/registry/category.rs index 5e66f8615..ac78c2b05 100644 --- a/src/unit_tests/registry/category.rs +++ b/src/unit_tests/registry/category.rs @@ -23,7 +23,7 @@ fn test_all_subcategories() { assert_eq!(GraphSubcategory::Paths.name(), "paths"); assert_eq!(GraphSubcategory::Structure.name(), "structure"); assert_eq!(GraphSubcategory::Trees.name(), "trees"); - assert_eq!(GraphSubcategory::Matching.name(), "matching"); + assert_eq!(GraphSubcategory::MaximumMatching.name(), "matching"); // Satisfiability assert_eq!(SatisfiabilitySubcategory::Sat.name(), "sat"); @@ -34,7 +34,7 @@ fn test_all_subcategories() { assert_eq!(SetSubcategory::Covering.name(), "covering"); assert_eq!(SetSubcategory::Packing.name(), "packing"); assert_eq!(SetSubcategory::Partition.name(), "partition"); - assert_eq!(SetSubcategory::Matching.name(), "matching"); + assert_eq!(SetSubcategory::MaximumMatching.name(), "matching"); // Optimization assert_eq!(OptimizationSubcategory::Quadratic.name(), "quadratic"); @@ -53,7 +53,7 @@ fn test_all_subcategories() { // String assert_eq!(StringSubcategory::Sequence.name(), "sequence"); - assert_eq!(StringSubcategory::Matching.name(), "matching"); + assert_eq!(StringSubcategory::MaximumMatching.name(), "matching"); assert_eq!(StringSubcategory::Compression.name(), "compression"); // Specialized diff --git a/src/unit_tests/registry/schema.rs b/src/unit_tests/registry/schema.rs index 1af9a90b4..d23f12ef1 100644 --- a/src/unit_tests/registry/schema.rs +++ b/src/unit_tests/registry/schema.rs @@ -19,7 +19,7 @@ fn test_collect_schemas_sorted_by_name() { fn test_collect_schemas_known_problems() { let schemas = collect_schemas(); let names: Vec<&str> = schemas.iter().map(|s| s.name.as_str()).collect(); - for expected in &["IndependentSet", "VertexCovering", "QUBO", "SpinGlass", "Satisfiability", "KColoring"] { + for expected in &["MaximumIndependentSet", "MinimumVertexCover", "QUBO", "SpinGlass", "Satisfiability", "KColoring"] { assert!(names.contains(expected), "Missing schema for {}", expected); } } @@ -27,19 +27,19 @@ fn test_collect_schemas_known_problems() { #[test] fn test_schema_fields_populated() { let schemas = collect_schemas(); - let is_schema = schemas.iter().find(|s| s.name == "IndependentSet").unwrap(); + let is_schema = schemas.iter().find(|s| s.name == "MaximumIndependentSet").unwrap(); assert_eq!(is_schema.category, "graph"); - assert!(!is_schema.fields.is_empty(), "IndependentSet should have fields"); + assert!(!is_schema.fields.is_empty(), "MaximumIndependentSet should have fields"); let field_names: Vec<&str> = is_schema.fields.iter().map(|f| f.name.as_str()).collect(); - assert!(field_names.contains(&"graph"), "IndependentSet should have 'graph' field"); - assert!(field_names.contains(&"weights"), "IndependentSet should have 'weights' field"); + assert!(field_names.contains(&"graph"), "MaximumIndependentSet should have 'graph' field"); + assert!(field_names.contains(&"weights"), "MaximumIndependentSet should have 'weights' field"); } #[test] fn test_schema_json_serialization() { let schemas = collect_schemas(); let json = serde_json::to_string(&schemas).expect("Schemas should serialize to JSON"); - assert!(json.contains("IndependentSet")); + assert!(json.contains("MaximumIndependentSet")); assert!(json.contains("graph")); } diff --git a/src/unit_tests/rules/graph.rs b/src/unit_tests/rules/graph.rs index fd3ebe340..60eb5d0db 100644 --- a/src/unit_tests/rules/graph.rs +++ b/src/unit_tests/rules/graph.rs @@ -1,13 +1,13 @@ use super::*; -use crate::models::graph::{IndependentSet, VertexCovering}; -use crate::models::set::SetPacking; +use crate::models::graph::{MaximumIndependentSet, MinimumVertexCover}; +use crate::models::set::MaximumSetPacking; use crate::rules::cost::MinimizeSteps; use crate::topology::SimpleGraph; #[test] fn test_find_direct_path() { let graph = ReductionGraph::new(); - let paths = graph.find_paths::, VertexCovering>(); + let paths = graph.find_paths::, MinimumVertexCover>(); assert!(!paths.is_empty()); assert_eq!(paths[0].type_names.len(), 2); assert_eq!(paths[0].len(), 1); // One reduction step @@ -17,14 +17,14 @@ fn test_find_direct_path() { fn test_find_indirect_path() { let graph = ReductionGraph::new(); // IS -> VC -> IS -> SP or IS -> SP directly - let paths = graph.find_paths::, SetPacking>(); + let paths = graph.find_paths::, MaximumSetPacking>(); assert!(!paths.is_empty()); } #[test] fn test_find_shortest_path() { let graph = ReductionGraph::new(); - let path = graph.find_shortest_path::, SetPacking>(); + let path = graph.find_shortest_path::, MaximumSetPacking>(); assert!(path.is_some()); let path = path.unwrap(); assert_eq!(path.len(), 1); // Direct path exists @@ -33,8 +33,8 @@ fn test_find_shortest_path() { #[test] fn test_has_direct_reduction() { let graph = ReductionGraph::new(); - assert!(graph.has_direct_reduction::, VertexCovering>()); - assert!(graph.has_direct_reduction::, IndependentSet>()); + assert!(graph.has_direct_reduction::, MinimumVertexCover>()); + assert!(graph.has_direct_reduction::, MaximumIndependentSet>()); } #[test] @@ -42,7 +42,7 @@ fn test_is_to_qubo_path() { let graph = ReductionGraph::new(); // IS -> QUBO should now have a direct path let path = - graph.find_shortest_path::, crate::models::optimization::QUBO>(); + graph.find_shortest_path::, crate::models::optimization::QUBO>(); assert!(path.is_some()); assert_eq!(path.unwrap().len(), 1); // Direct path } @@ -85,8 +85,8 @@ fn test_problem_types() { let graph = ReductionGraph::new(); let types = graph.problem_types(); assert!(types.len() >= 5); - assert!(types.iter().any(|t| t.contains("IndependentSet"))); - assert!(types.iter().any(|t| t.contains("VertexCovering"))); + assert!(types.iter().any(|t| t.contains("MaximumIndependentSet"))); + assert!(types.iter().any(|t| t.contains("MinimumVertexCover"))); } #[test] @@ -100,12 +100,12 @@ fn test_graph_statistics() { fn test_reduction_path_methods() { let graph = ReductionGraph::new(); let path = graph - .find_shortest_path::, VertexCovering>() + .find_shortest_path::, MinimumVertexCover>() .unwrap(); assert!(!path.is_empty()); - assert!(path.source().unwrap().contains("IndependentSet")); - assert!(path.target().unwrap().contains("VertexCovering")); + assert!(path.source().unwrap().contains("MaximumIndependentSet")); + assert!(path.target().unwrap().contains("MinimumVertexCover")); } #[test] @@ -113,11 +113,11 @@ fn test_bidirectional_paths() { let graph = ReductionGraph::new(); // Forward path - let forward = graph.find_paths::, VertexCovering>(); + let forward = graph.find_paths::, MinimumVertexCover>(); assert!(!forward.is_empty()); // Backward path - let backward = graph.find_paths::, IndependentSet>(); + let backward = graph.find_paths::, MaximumIndependentSet>(); assert!(!backward.is_empty()); } @@ -128,7 +128,7 @@ fn test_to_json() { // Check nodes assert!(json.nodes.len() >= 10); - assert!(json.nodes.iter().any(|n| n.name == "IndependentSet")); + assert!(json.nodes.iter().any(|n| n.name == "MaximumIndependentSet")); assert!(json.nodes.iter().any(|n| n.category == "graph")); assert!(json.nodes.iter().any(|n| n.category == "optimization")); @@ -137,9 +137,9 @@ fn test_to_json() { // Check that IS <-> VC is marked bidirectional let is_vc_edge = json.edges.iter().find(|e| { - (e.source.name.contains("IndependentSet") && e.target.name.contains("VertexCovering")) - || (e.source.name.contains("VertexCovering") - && e.target.name.contains("IndependentSet")) + (e.source.name.contains("MaximumIndependentSet") && e.target.name.contains("MinimumVertexCover")) + || (e.source.name.contains("MinimumVertexCover") + && e.target.name.contains("MaximumIndependentSet")) }); assert!(is_vc_edge.is_some()); assert!(is_vc_edge.unwrap().bidirectional); @@ -153,7 +153,7 @@ fn test_to_json_string() { // Should be valid JSON assert!(json_string.contains("\"nodes\"")); assert!(json_string.contains("\"edges\"")); - assert!(json_string.contains("IndependentSet")); + assert!(json_string.contains("MaximumIndependentSet")); assert!(json_string.contains("\"category\"")); assert!(json_string.contains("\"bidirectional\"")); } @@ -162,24 +162,24 @@ fn test_to_json_string() { fn test_categorize_type() { // Graph problems assert_eq!( - ReductionGraph::categorize_type("IndependentSet"), + ReductionGraph::categorize_type("MaximumIndependentSet"), "graph" ); assert_eq!( - ReductionGraph::categorize_type("VertexCovering"), + ReductionGraph::categorize_type("MinimumVertexCover"), "graph" ); assert_eq!(ReductionGraph::categorize_type("MaxCut"), "graph"); assert_eq!(ReductionGraph::categorize_type("KColoring"), "graph"); assert_eq!( - ReductionGraph::categorize_type("DominatingSet"), + ReductionGraph::categorize_type("MinimumDominatingSet"), "graph" ); - assert_eq!(ReductionGraph::categorize_type("Matching"), "graph"); + assert_eq!(ReductionGraph::categorize_type("MaximumMatching"), "graph"); // Set problems - assert_eq!(ReductionGraph::categorize_type("SetPacking"), "set"); - assert_eq!(ReductionGraph::categorize_type("SetCovering"), "set"); + assert_eq!(ReductionGraph::categorize_type("MaximumSetPacking"), "set"); + assert_eq!(ReductionGraph::categorize_type("MinimumSetCovering"), "set"); // Optimization assert_eq!( @@ -212,19 +212,19 @@ fn test_categorize_type() { #[test] fn test_sat_based_reductions() { use crate::models::graph::KColoring; - use crate::models::graph::DominatingSet; + use crate::models::graph::MinimumDominatingSet; use crate::models::satisfiability::Satisfiability; let graph = ReductionGraph::new(); // SAT -> IS - assert!(graph.has_direct_reduction::, IndependentSet>()); + assert!(graph.has_direct_reduction::, MaximumIndependentSet>()); // SAT -> KColoring assert!(graph.has_direct_reduction::, KColoring<3, SimpleGraph, i32>>()); - // SAT -> DominatingSet - assert!(graph.has_direct_reduction::, DominatingSet>()); + // SAT -> MinimumDominatingSet + assert!(graph.has_direct_reduction::, MinimumDominatingSet>()); } #[test] @@ -303,12 +303,12 @@ fn test_empty_path_source_target() { #[test] fn test_single_node_path() { let path = ReductionPath { - type_names: vec!["IndependentSet"], + type_names: vec!["MaximumIndependentSet"], }; assert!(!path.is_empty()); assert_eq!(path.len(), 0); // No reductions, just one type - assert_eq!(path.source(), Some("IndependentSet")); - assert_eq!(path.target(), Some("IndependentSet")); + assert_eq!(path.source(), Some("MaximumIndependentSet")); + assert_eq!(path.target(), Some("MaximumIndependentSet")); } #[test] @@ -335,7 +335,7 @@ fn test_to_json_file() { let content = fs::read_to_string(&file_path).unwrap(); assert!(content.contains("\"nodes\"")); assert!(content.contains("\"edges\"")); - assert!(content.contains("IndependentSet")); + assert!(content.contains("MaximumIndependentSet")); // Parse as generic JSON to verify validity let parsed: serde_json::Value = serde_json::from_str(&content).unwrap(); @@ -354,10 +354,10 @@ fn test_has_direct_reduction_unregistered_types() { let graph = ReductionGraph::new(); // Source type not registered - assert!(!graph.has_direct_reduction::>()); + assert!(!graph.has_direct_reduction::>()); // Target type not registered - assert!(!graph.has_direct_reduction::, UnregisteredType>()); + assert!(!graph.has_direct_reduction::, UnregisteredType>()); // Both types not registered assert!(!graph.has_direct_reduction::()); @@ -368,7 +368,7 @@ fn test_find_paths_unregistered_source() { struct UnregisteredType; let graph = ReductionGraph::new(); - let paths = graph.find_paths::>(); + let paths = graph.find_paths::>(); assert!(paths.is_empty()); } @@ -377,7 +377,7 @@ fn test_find_paths_unregistered_target() { struct UnregisteredType; let graph = ReductionGraph::new(); - let paths = graph.find_paths::, UnregisteredType>(); + let paths = graph.find_paths::, UnregisteredType>(); assert!(paths.is_empty()); } @@ -386,7 +386,7 @@ fn test_find_shortest_path_no_path() { struct UnregisteredType; let graph = ReductionGraph::new(); - let path = graph.find_shortest_path::>(); + let path = graph.find_shortest_path::>(); assert!(path.is_none()); } @@ -416,9 +416,9 @@ fn test_edge_bidirectionality_detection() { // Verify specific known bidirectional edges let is_vc_bidir = json.edges.iter().any(|e| { - (e.source.name.contains("IndependentSet") && e.target.name.contains("VertexCovering") - || e.source.name.contains("VertexCovering") - && e.target.name.contains("IndependentSet")) + (e.source.name.contains("MaximumIndependentSet") && e.target.name.contains("MinimumVertexCover") + || e.source.name.contains("MinimumVertexCover") + && e.target.name.contains("MaximumIndependentSet")) && e.bidirectional }); assert!(is_vc_bidir, "IS <-> VC should be bidirectional"); @@ -548,10 +548,10 @@ fn test_find_cheapest_path_minimize_steps() { let cost_fn = MinimizeSteps; let input_size = ProblemSize::new(vec![("n", 10), ("m", 20)]); - // Find path from IndependentSet to VertexCovering on SimpleGraph + // Find path from MaximumIndependentSet to MinimumVertexCover on SimpleGraph let path = graph.find_cheapest_path( - ("IndependentSet", "SimpleGraph"), - ("VertexCovering", "SimpleGraph"), + ("MaximumIndependentSet", "SimpleGraph"), + ("MinimumVertexCover", "SimpleGraph"), &input_size, &cost_fn, ); @@ -568,18 +568,18 @@ fn test_find_cheapest_path_multi_step() { let input_size = ProblemSize::new(vec![("num_vertices", 10), ("num_edges", 20)]); // Find multi-step path where all edges use compatible graph types - // IndependentSet (SimpleGraph) -> SetPacking (SimpleGraph) + // MaximumIndependentSet (SimpleGraph) -> MaximumSetPacking (SimpleGraph) // This tests the algorithm can find paths with consistent graph types let path = graph.find_cheapest_path( - ("IndependentSet", "SimpleGraph"), - ("SetPacking", "SimpleGraph"), + ("MaximumIndependentSet", "SimpleGraph"), + ("MaximumSetPacking", "SimpleGraph"), &input_size, &cost_fn, ); assert!(path.is_some()); let path = path.unwrap(); - assert_eq!(path.len(), 1); // Direct path: IndependentSet -> SetPacking + assert_eq!(path.len(), 1); // Direct path: MaximumIndependentSet -> MaximumSetPacking } #[test] @@ -588,9 +588,9 @@ fn test_find_cheapest_path_is_to_qubo() { let cost_fn = MinimizeSteps; let input_size = ProblemSize::new(vec![("n", 10)]); - // Direct path from IndependentSet to QUBO + // Direct path from MaximumIndependentSet to QUBO let path = graph.find_cheapest_path( - ("IndependentSet", "SimpleGraph"), + ("MaximumIndependentSet", "SimpleGraph"), ("QUBO", "SimpleGraph"), &input_size, &cost_fn, @@ -608,7 +608,7 @@ fn test_find_cheapest_path_unknown_source() { let path = graph.find_cheapest_path( ("UnknownProblem", "SimpleGraph"), - ("VertexCovering", "SimpleGraph"), + ("MinimumVertexCover", "SimpleGraph"), &input_size, &cost_fn, ); @@ -623,7 +623,7 @@ fn test_find_cheapest_path_unknown_target() { let input_size = ProblemSize::new(vec![("n", 10)]); let path = graph.find_cheapest_path( - ("IndependentSet", "SimpleGraph"), + ("MaximumIndependentSet", "SimpleGraph"), ("UnknownProblem", "SimpleGraph"), &input_size, &cost_fn, @@ -676,8 +676,8 @@ fn test_variant_to_map_empty() { #[test] fn test_make_variant_ref() { let variant: &[(&str, &str)] = &[("graph", "PlanarGraph"), ("weight", "f64")]; - let variant_ref = ReductionGraph::make_variant_ref("IndependentSet", variant); - assert_eq!(variant_ref.name, "IndependentSet"); + let variant_ref = ReductionGraph::make_variant_ref("MaximumIndependentSet", variant); + assert_eq!(variant_ref.name, "MaximumIndependentSet"); assert_eq!( variant_ref.variant.get("graph"), Some(&"PlanarGraph".to_string()) @@ -717,16 +717,16 @@ fn test_json_variant_content() { let json = graph.to_json(); // Find a node and verify its variant contains expected keys - let is_node = json.nodes.iter().find(|n| n.name == "IndependentSet"); - assert!(is_node.is_some(), "IndependentSet node should exist"); + let is_node = json.nodes.iter().find(|n| n.name == "MaximumIndependentSet"); + assert!(is_node.is_some(), "MaximumIndependentSet node should exist"); - // Find an edge involving IndependentSet (could be source or target) + // Find an edge involving MaximumIndependentSet (could be source or target) let is_edge = json .edges .iter() - .find(|e| e.source.name == "IndependentSet" || e.target.name == "IndependentSet"); + .find(|e| e.source.name == "MaximumIndependentSet" || e.target.name == "MaximumIndependentSet"); assert!( is_edge.is_some(), - "Edge involving IndependentSet should exist" + "Edge involving MaximumIndependentSet should exist" ); } diff --git a/src/unit_tests/rules/clique_ilp.rs b/src/unit_tests/rules/maximumclique_ilp.rs similarity index 85% rename from src/unit_tests/rules/clique_ilp.rs rename to src/unit_tests/rules/maximumclique_ilp.rs index b79c28cf8..7db54d2ca 100644 --- a/src/unit_tests/rules/clique_ilp.rs +++ b/src/unit_tests/rules/maximumclique_ilp.rs @@ -3,7 +3,7 @@ use crate::solvers::ILPSolver; /// Check if a configuration represents a valid clique in the graph. /// A clique is valid if all selected vertices are pairwise adjacent. -fn is_valid_clique(problem: &Clique, config: &[usize]) -> bool { +fn is_valid_clique(problem: &MaximumClique, config: &[usize]) -> bool { let selected: Vec = config .iter() .enumerate() @@ -23,7 +23,7 @@ fn is_valid_clique(problem: &Clique, config: &[usize]) -> bool } /// Compute the clique size (sum of weights of selected vertices). -fn clique_size(problem: &Clique, config: &[usize]) -> i32 { +fn clique_size(problem: &MaximumClique, config: &[usize]) -> i32 { let weights = problem.weights(); config .iter() @@ -34,7 +34,7 @@ fn clique_size(problem: &Clique, config: &[usize]) -> i32 { } /// Find maximum clique size by brute force enumeration. -fn brute_force_max_clique(problem: &Clique) -> i32 { +fn brute_force_max_clique(problem: &MaximumClique) -> i32 { let n = problem.num_vertices(); let mut max_size = 0; for mask in 0..(1 << n) { @@ -53,7 +53,7 @@ fn brute_force_max_clique(problem: &Clique) -> i32 { fn test_reduction_creates_valid_ilp() { // Triangle graph: 3 vertices, 3 edges (complete graph K3) // All pairs are adjacent, so no constraints should be added - let problem: Clique = Clique::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let problem: MaximumClique = MaximumClique::new(3, vec![(0, 1), (1, 2), (0, 2)]); let reduction: ReductionCliqueToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -75,7 +75,7 @@ fn test_reduction_creates_valid_ilp() { #[test] fn test_reduction_with_non_edges() { // Path graph 0-1-2: edges (0,1) and (1,2), non-edge (0,2) - let problem: Clique = Clique::new(3, vec![(0, 1), (1, 2)]); + let problem: MaximumClique = MaximumClique::new(3, vec![(0, 1), (1, 2)]); let reduction: ReductionCliqueToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -90,8 +90,8 @@ fn test_reduction_with_non_edges() { #[test] fn test_reduction_weighted() { - let problem: Clique = - Clique::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + let problem: MaximumClique = + MaximumClique::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); let reduction: ReductionCliqueToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -108,7 +108,7 @@ fn test_reduction_weighted() { #[test] fn test_ilp_solution_equals_brute_force_triangle() { // Triangle graph (K3): max clique = 3 vertices - let problem: Clique = Clique::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let problem: MaximumClique = MaximumClique::new(3, vec![(0, 1), (1, 2), (0, 2)]); let reduction: ReductionCliqueToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -136,7 +136,7 @@ fn test_ilp_solution_equals_brute_force_triangle() { #[test] fn test_ilp_solution_equals_brute_force_path() { // Path graph 0-1-2-3: max clique = 2 (any adjacent pair) - let problem: Clique = Clique::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem: MaximumClique = MaximumClique::new(4, vec![(0, 1), (1, 2), (2, 3)]); let reduction: ReductionCliqueToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -163,8 +163,8 @@ fn test_ilp_solution_equals_brute_force_weighted() { // Weights: [1, 100, 1] // Max clique by weight: {0, 1} (weight 101) or {1, 2} (weight 101), or just {1} (weight 100) // Since 0-1 and 1-2 are edges, both {0,1} and {1,2} are valid cliques - let problem: Clique = - Clique::with_weights(3, vec![(0, 1), (1, 2)], vec![1, 100, 1]); + let problem: MaximumClique = + MaximumClique::with_weights(3, vec![(0, 1), (1, 2)], vec![1, 100, 1]); let reduction: ReductionCliqueToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -185,7 +185,7 @@ fn test_ilp_solution_equals_brute_force_weighted() { #[test] fn test_solution_extraction() { - let problem: Clique = Clique::new(4, vec![(0, 1), (2, 3)]); + let problem: MaximumClique = MaximumClique::new(4, vec![(0, 1), (2, 3)]); let reduction: ReductionCliqueToILP = ReduceTo::::reduce_to(&problem); // Test that extraction works correctly (1:1 mapping) @@ -199,8 +199,8 @@ fn test_solution_extraction() { #[test] fn test_source_and_target_size() { - let problem: Clique = - Clique::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); + let problem: MaximumClique = + MaximumClique::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); let reduction: ReductionCliqueToILP = ReduceTo::::reduce_to(&problem); let source_size = reduction.source_size(); @@ -217,7 +217,7 @@ fn test_source_and_target_size() { #[test] fn test_empty_graph() { // Graph with no edges: max clique = 1 (any single vertex) - let problem: Clique = Clique::new(3, vec![]); + let problem: MaximumClique = MaximumClique::new(3, vec![]); let reduction: ReductionCliqueToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -238,8 +238,8 @@ fn test_empty_graph() { #[test] fn test_complete_graph() { // Complete graph K4: max clique = 4 (all vertices) - let problem: Clique = - Clique::new(4, vec![(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]); + let problem: MaximumClique = + MaximumClique::new(4, vec![(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]); let reduction: ReductionCliqueToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -261,8 +261,8 @@ fn test_complete_graph() { fn test_bipartite_graph() { // Bipartite graph: 0-2, 0-3, 1-2, 1-3 (two independent sets: {0,1} and {2,3}) // Max clique = 2 (any edge, e.g., {0, 2}) - let problem: Clique = - Clique::new(4, vec![(0, 2), (0, 3), (1, 2), (1, 3)]); + let problem: MaximumClique = + MaximumClique::new(4, vec![(0, 2), (0, 3), (1, 2), (1, 3)]); let reduction: ReductionCliqueToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -282,7 +282,7 @@ fn test_bipartite_graph() { fn test_star_graph() { // Star graph: center 0 connected to 1, 2, 3 // Max clique = 2 (center + any leaf) - let problem: Clique = Clique::new(4, vec![(0, 1), (0, 2), (0, 3)]); + let problem: MaximumClique = MaximumClique::new(4, vec![(0, 1), (0, 2), (0, 3)]); let reduction: ReductionCliqueToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); diff --git a/src/unit_tests/rules/independentset_ilp.rs b/src/unit_tests/rules/maximumindependentset_ilp.rs similarity index 87% rename from src/unit_tests/rules/independentset_ilp.rs rename to src/unit_tests/rules/maximumindependentset_ilp.rs index f92aa672e..f3fa09664 100644 --- a/src/unit_tests/rules/independentset_ilp.rs +++ b/src/unit_tests/rules/maximumindependentset_ilp.rs @@ -4,7 +4,7 @@ use crate::solvers::{BruteForce, ILPSolver, Solver}; #[test] fn test_reduction_creates_valid_ilp() { // Triangle graph: 3 vertices, 3 edges - let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); let reduction: ReductionISToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -31,7 +31,7 @@ fn test_reduction_creates_valid_ilp() { #[test] fn test_reduction_weighted() { - let problem = IndependentSet::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + let problem = MaximumIndependentSet::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); let reduction: ReductionISToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -48,7 +48,7 @@ fn test_reduction_weighted() { #[test] fn test_ilp_solution_equals_brute_force_triangle() { // Triangle graph: max IS = 1 vertex - let problem = IndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); let reduction: ReductionISToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -76,7 +76,7 @@ fn test_ilp_solution_equals_brute_force_triangle() { #[test] fn test_ilp_solution_equals_brute_force_path() { // Path graph 0-1-2-3: max IS = 2 (e.g., {0, 2} or {1, 3} or {0, 3}) - let problem = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); let reduction: ReductionISToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -106,7 +106,7 @@ fn test_ilp_solution_equals_brute_force_weighted() { // 0 -- 1 -- 2 // Weights: [1, 100, 1] // Max IS by weight: just vertex 1 (weight 100) beats 0+2 (weight 2) - let problem = IndependentSet::with_weights(3, vec![(0, 1), (1, 2)], vec![1, 100, 1]); + let problem = MaximumIndependentSet::with_weights(3, vec![(0, 1), (1, 2)], vec![1, 100, 1]); let reduction: ReductionISToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -129,7 +129,7 @@ fn test_ilp_solution_equals_brute_force_weighted() { #[test] fn test_solution_extraction() { - let problem = IndependentSet::::new(4, vec![(0, 1), (2, 3)]); + let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (2, 3)]); let reduction: ReductionISToILP = ReduceTo::::reduce_to(&problem); // Test that extraction works correctly (1:1 mapping) @@ -144,7 +144,7 @@ fn test_solution_extraction() { #[test] fn test_source_and_target_size() { - let problem = IndependentSet::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); + let problem = MaximumIndependentSet::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); let reduction: ReductionISToILP = ReduceTo::::reduce_to(&problem); let source_size = reduction.source_size(); @@ -160,7 +160,7 @@ fn test_source_and_target_size() { #[test] fn test_empty_graph() { // Graph with no edges: all vertices can be selected - let problem = IndependentSet::::new(3, vec![]); + let problem = MaximumIndependentSet::::new(3, vec![]); let reduction: ReductionISToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -182,7 +182,7 @@ fn test_empty_graph() { fn test_complete_graph() { // Complete graph K4: max IS = 1 let problem = - IndependentSet::::new(4, vec![(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]); + MaximumIndependentSet::::new(4, vec![(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]); let reduction: ReductionISToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -200,7 +200,7 @@ fn test_complete_graph() { #[test] fn test_solve_reduced() { // Test the ILPSolver::solve_reduced method - let problem = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); let ilp_solver = ILPSolver::new(); let solution = ilp_solver @@ -216,7 +216,7 @@ fn test_solve_reduced() { fn test_bipartite_graph() { // Bipartite graph: 0-2, 0-3, 1-2, 1-3 (two independent sets: {0,1} and {2,3}) // With equal weights, max IS = 2 - let problem = IndependentSet::::new(4, vec![(0, 2), (0, 3), (1, 2), (1, 3)]); + let problem = MaximumIndependentSet::::new(4, vec![(0, 2), (0, 3), (1, 2), (1, 3)]); let reduction: ReductionISToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); diff --git a/src/unit_tests/rules/independentset_setpacking.rs b/src/unit_tests/rules/maximumindependentset_maximumsetpacking.rs similarity index 69% rename from src/unit_tests/rules/independentset_setpacking.rs rename to src/unit_tests/rules/maximumindependentset_maximumsetpacking.rs index 14f827f7a..e6d763083 100644 --- a/src/unit_tests/rules/independentset_setpacking.rs +++ b/src/unit_tests/rules/maximumindependentset_maximumsetpacking.rs @@ -4,8 +4,8 @@ use crate::solvers::{BruteForce, Solver}; #[test] fn test_is_to_setpacking() { // Triangle graph - let is_problem = IndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); - let reduction = ReduceTo::>::reduce_to(&is_problem); + let is_problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let reduction = ReduceTo::>::reduce_to(&is_problem); let sp_problem = reduction.target_problem(); let solver = BruteForce::new(); @@ -32,9 +32,9 @@ fn test_setpacking_to_is() { vec![2, 3], vec![1, 2], // overlaps with both ]; - let sp_problem = SetPacking::::new(sets); + let sp_problem = MaximumSetPacking::::new(sets); let reduction: ReductionSPToIS = - ReduceTo::>::reduce_to(&sp_problem); + ReduceTo::>::reduce_to(&sp_problem); let is_problem = reduction.target_problem(); let solver = BruteForce::new(); @@ -49,14 +49,14 @@ fn test_setpacking_to_is() { #[test] fn test_roundtrip_is_sp_is() { - let original = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let original = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); let solver = BruteForce::new(); let original_solutions = solver.find_best(&original); // IS -> SP -> IS - let reduction1 = ReduceTo::>::reduce_to(&original); + let reduction1 = ReduceTo::>::reduce_to(&original); let sp = reduction1.target_problem().clone(); - let reduction2: ReductionSPToIS = ReduceTo::>::reduce_to(&sp); + let reduction2: ReductionSPToIS = ReduceTo::>::reduce_to(&sp); let roundtrip = reduction2.target_problem(); let roundtrip_solutions = solver.find_best(roundtrip); @@ -69,8 +69,8 @@ fn test_roundtrip_is_sp_is() { #[test] fn test_weighted_reduction() { - let is_problem = IndependentSet::with_weights(3, vec![(0, 1), (1, 2)], vec![10, 20, 30]); - let reduction = ReduceTo::>::reduce_to(&is_problem); + let is_problem = MaximumIndependentSet::with_weights(3, vec![(0, 1), (1, 2)], vec![10, 20, 30]); + let reduction = ReduceTo::>::reduce_to(&is_problem); let sp_problem = reduction.target_problem(); // Weights should be preserved @@ -80,8 +80,8 @@ fn test_weighted_reduction() { #[test] fn test_empty_graph() { // No edges means all sets are empty (or we need to handle it) - let is_problem = IndependentSet::::new(3, vec![]); - let reduction = ReduceTo::>::reduce_to(&is_problem); + let is_problem = MaximumIndependentSet::::new(3, vec![]); + let reduction = ReduceTo::>::reduce_to(&is_problem); let sp_problem = reduction.target_problem(); // All sets should be empty (no edges to include) @@ -98,9 +98,9 @@ fn test_empty_graph() { fn test_disjoint_sets() { // Completely disjoint sets let sets = vec![vec![0], vec![1], vec![2]]; - let sp_problem = SetPacking::::new(sets); + let sp_problem = MaximumSetPacking::::new(sets); let reduction: ReductionSPToIS = - ReduceTo::>::reduce_to(&sp_problem); + ReduceTo::>::reduce_to(&sp_problem); let is_problem = reduction.target_problem(); // No edges in the intersection graph @@ -110,8 +110,8 @@ fn test_disjoint_sets() { #[test] fn test_reduction_sizes() { // Test source_size and target_size methods - let is_problem = IndependentSet::::new(4, vec![(0, 1), (1, 2)]); - let reduction = ReduceTo::>::reduce_to(&is_problem); + let is_problem = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2)]); + let reduction = ReduceTo::>::reduce_to(&is_problem); let source_size = reduction.source_size(); let target_size = reduction.target_size(); @@ -122,9 +122,9 @@ fn test_reduction_sizes() { // Test SP to IS sizes let sets = vec![vec![0, 1], vec![2, 3]]; - let sp_problem = SetPacking::::new(sets); + let sp_problem = MaximumSetPacking::::new(sets); let reduction2: ReductionSPToIS = - ReduceTo::>::reduce_to(&sp_problem); + ReduceTo::>::reduce_to(&sp_problem); let source_size2 = reduction2.source_size(); let target_size2 = reduction2.target_size(); diff --git a/src/unit_tests/rules/independentset_qubo.rs b/src/unit_tests/rules/maximumindependentset_qubo.rs similarity index 84% rename from src/unit_tests/rules/independentset_qubo.rs rename to src/unit_tests/rules/maximumindependentset_qubo.rs index f1ed1c116..186fb79c7 100644 --- a/src/unit_tests/rules/independentset_qubo.rs +++ b/src/unit_tests/rules/maximumindependentset_qubo.rs @@ -5,7 +5,7 @@ use crate::solvers::{BruteForce, Solver}; fn test_independentset_to_qubo_closed_loop() { // Path graph: 0-1-2-3 (4 vertices, 3 edges) // Maximum IS = {0, 2} or {1, 3} (size 2) - let is = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let is = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); let reduction = ReduceTo::>::reduce_to(&is); let qubo = reduction.target_problem(); @@ -23,7 +23,7 @@ fn test_independentset_to_qubo_closed_loop() { fn test_independentset_to_qubo_triangle() { // Triangle: 0-1-2 (complete graph K3) // Maximum IS = any single vertex (size 1) - let is = IndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let is = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); let reduction = ReduceTo::>::reduce_to(&is); let qubo = reduction.target_problem(); @@ -40,7 +40,7 @@ fn test_independentset_to_qubo_triangle() { #[test] fn test_independentset_to_qubo_empty_graph() { // No edges: all vertices form the IS - let is = IndependentSet::::new(3, vec![]); + let is = MaximumIndependentSet::::new(3, vec![]); let reduction = ReduceTo::>::reduce_to(&is); let qubo = reduction.target_problem(); @@ -56,7 +56,7 @@ fn test_independentset_to_qubo_empty_graph() { #[test] fn test_independentset_to_qubo_sizes() { - let is = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let is = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); let reduction = ReduceTo::>::reduce_to(&is); let source_size = reduction.source_size(); diff --git a/src/unit_tests/rules/matching_ilp.rs b/src/unit_tests/rules/maximummatching_ilp.rs similarity index 88% rename from src/unit_tests/rules/matching_ilp.rs rename to src/unit_tests/rules/maximummatching_ilp.rs index 6a36176f6..ee1162e0b 100644 --- a/src/unit_tests/rules/matching_ilp.rs +++ b/src/unit_tests/rules/maximummatching_ilp.rs @@ -4,7 +4,7 @@ use crate::solvers::{BruteForce, ILPSolver, Solver}; #[test] fn test_reduction_creates_valid_ilp() { // Triangle graph: 3 vertices, 3 edges - let problem = Matching::::unweighted(3, vec![(0, 1), (1, 2), (0, 2)]); + let problem = MaximumMatching::::unweighted(3, vec![(0, 1), (1, 2), (0, 2)]); let reduction: ReductionMatchingToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -31,7 +31,7 @@ fn test_reduction_creates_valid_ilp() { #[test] fn test_reduction_weighted() { - let problem = Matching::new(3, vec![(0, 1, 5), (1, 2, 10)]); + let problem = MaximumMatching::new(3, vec![(0, 1, 5), (1, 2, 10)]); let reduction: ReductionMatchingToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -47,7 +47,7 @@ fn test_reduction_weighted() { #[test] fn test_ilp_solution_equals_brute_force_triangle() { // Triangle graph: max matching = 1 edge - let problem = Matching::::unweighted(3, vec![(0, 1), (1, 2), (0, 2)]); + let problem = MaximumMatching::::unweighted(3, vec![(0, 1), (1, 2), (0, 2)]); let reduction: ReductionMatchingToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -75,7 +75,7 @@ fn test_ilp_solution_equals_brute_force_triangle() { #[test] fn test_ilp_solution_equals_brute_force_path() { // Path graph 0-1-2-3: max matching = 2 (edges {0-1, 2-3}) - let problem = Matching::::unweighted(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumMatching::::unweighted(4, vec![(0, 1), (1, 2), (2, 3)]); let reduction: ReductionMatchingToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -105,7 +105,7 @@ fn test_ilp_solution_equals_brute_force_weighted() { // 0 -- 1 -- 2 // Weights: [100, 1] // Max matching by weight: just edge 0-1 (weight 100) beats edge 1-2 (weight 1) - let problem = Matching::new(3, vec![(0, 1, 100), (1, 2, 1)]); + let problem = MaximumMatching::new(3, vec![(0, 1, 100), (1, 2, 1)]); let reduction: ReductionMatchingToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -128,7 +128,7 @@ fn test_ilp_solution_equals_brute_force_weighted() { #[test] fn test_solution_extraction() { - let problem = Matching::::unweighted(4, vec![(0, 1), (2, 3)]); + let problem = MaximumMatching::::unweighted(4, vec![(0, 1), (2, 3)]); let reduction: ReductionMatchingToILP = ReduceTo::::reduce_to(&problem); // Test that extraction works correctly (1:1 mapping) @@ -144,7 +144,7 @@ fn test_solution_extraction() { #[test] fn test_source_and_target_size() { let problem = - Matching::::unweighted(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); + MaximumMatching::::unweighted(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); let reduction: ReductionMatchingToILP = ReduceTo::::reduce_to(&problem); let source_size = reduction.source_size(); @@ -162,7 +162,7 @@ fn test_source_and_target_size() { #[test] fn test_empty_graph() { // Graph with no edges: empty matching - let problem = Matching::::unweighted(3, vec![]); + let problem = MaximumMatching::::unweighted(3, vec![]); let reduction: ReductionMatchingToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -177,7 +177,7 @@ fn test_empty_graph() { #[test] fn test_k4_perfect_matching() { // Complete graph K4: can have perfect matching (2 edges covering all 4 vertices) - let problem = Matching::::unweighted( + let problem = MaximumMatching::::unweighted( 4, vec![(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)], ); @@ -205,7 +205,7 @@ fn test_k4_perfect_matching() { fn test_star_graph() { // Star graph with center vertex 0 connected to 1, 2, 3 // Max matching = 1 (only one edge can be selected) - let problem = Matching::::unweighted(4, vec![(0, 1), (0, 2), (0, 3)]); + let problem = MaximumMatching::::unweighted(4, vec![(0, 1), (0, 2), (0, 3)]); let reduction: ReductionMatchingToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -223,7 +223,7 @@ fn test_bipartite_graph() { // Bipartite graph: {0,1} and {2,3} with all cross edges // Max matching = 2 (one perfect matching) let problem = - Matching::::unweighted(4, vec![(0, 2), (0, 3), (1, 2), (1, 3)]); + MaximumMatching::::unweighted(4, vec![(0, 2), (0, 3), (1, 2), (1, 3)]); let reduction: ReductionMatchingToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -239,7 +239,7 @@ fn test_bipartite_graph() { #[test] fn test_solve_reduced() { // Test the ILPSolver::solve_reduced method - let problem = Matching::::unweighted(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumMatching::::unweighted(4, vec![(0, 1), (1, 2), (2, 3)]); let ilp_solver = ILPSolver::new(); let solution = ilp_solver diff --git a/src/unit_tests/rules/matching_setpacking.rs b/src/unit_tests/rules/maximummatching_maximumsetpacking.rs similarity index 69% rename from src/unit_tests/rules/matching_setpacking.rs rename to src/unit_tests/rules/maximummatching_maximumsetpacking.rs index dbb15cbbd..af5f04c5c 100644 --- a/src/unit_tests/rules/matching_setpacking.rs +++ b/src/unit_tests/rules/maximummatching_maximumsetpacking.rs @@ -5,8 +5,8 @@ use crate::topology::SimpleGraph; #[test] fn test_matching_to_setpacking_structure() { // Path graph 0-1-2 - let matching = Matching::::unweighted(3, vec![(0, 1), (1, 2)]); - let reduction = ReduceTo::>::reduce_to(&matching); + let matching = MaximumMatching::::unweighted(3, vec![(0, 1), (1, 2)]); + let reduction = ReduceTo::>::reduce_to(&matching); let sp = reduction.target_problem(); // Should have 2 sets (one for each edge) @@ -21,20 +21,20 @@ fn test_matching_to_setpacking_structure() { #[test] fn test_matching_to_setpacking_path() { // Path 0-1-2-3 with unit weights - let matching = Matching::::unweighted(4, vec![(0, 1), (1, 2), (2, 3)]); - let reduction = ReduceTo::>::reduce_to(&matching); + let matching = MaximumMatching::::unweighted(4, vec![(0, 1), (1, 2), (2, 3)]); + let reduction = ReduceTo::>::reduce_to(&matching); let sp = reduction.target_problem(); let solver = BruteForce::new(); let sp_solutions = solver.find_best(sp); - // Extract back to Matching solutions + // Extract back to MaximumMatching solutions let _matching_solutions: Vec<_> = sp_solutions .iter() .map(|s| reduction.extract_solution(s)) .collect(); - // Verify against direct Matching solution + // Verify against direct MaximumMatching solution let direct_solutions = solver.find_best(&matching); // Solutions should have same objective value @@ -47,8 +47,8 @@ fn test_matching_to_setpacking_path() { #[test] fn test_matching_to_setpacking_triangle() { // Triangle graph - let matching = Matching::::unweighted(3, vec![(0, 1), (1, 2), (0, 2)]); - let reduction = ReduceTo::>::reduce_to(&matching); + let matching = MaximumMatching::::unweighted(3, vec![(0, 1), (1, 2), (0, 2)]); + let reduction = ReduceTo::>::reduce_to(&matching); let sp = reduction.target_problem(); let solver = BruteForce::new(); @@ -67,8 +67,8 @@ fn test_matching_to_setpacking_triangle() { fn test_matching_to_setpacking_weighted() { // Weighted edges: heavy edge should win over multiple light edges let matching = - Matching::::new(4, vec![(0, 1, 100), (0, 2, 1), (1, 3, 1)]); - let reduction = ReduceTo::>::reduce_to(&matching); + MaximumMatching::::new(4, vec![(0, 1, 100), (0, 2, 1), (1, 3, 1)]); + let reduction = ReduceTo::>::reduce_to(&matching); let sp = reduction.target_problem(); // Weights should be preserved @@ -80,7 +80,7 @@ fn test_matching_to_setpacking_weighted() { // Edge 0-1 (weight 100) alone beats edges 0-2 + 1-3 (weight 2) assert!(sp_solutions.contains(&vec![1, 0, 0])); - // Verify through direct Matching solution + // Verify through direct MaximumMatching solution let direct_solutions = solver.find_best(&matching); assert_eq!(matching.solution_size(&sp_solutions[0]).size, 100); assert_eq!(matching.solution_size(&direct_solutions[0]).size, 100); @@ -88,26 +88,26 @@ fn test_matching_to_setpacking_weighted() { #[test] fn test_matching_to_setpacking_solution_extraction() { - let matching = Matching::::unweighted(4, vec![(0, 1), (1, 2), (2, 3)]); - let reduction = ReduceTo::>::reduce_to(&matching); + let matching = MaximumMatching::::unweighted(4, vec![(0, 1), (1, 2), (2, 3)]); + let reduction = ReduceTo::>::reduce_to(&matching); // Test solution extraction is 1:1 let sp_solution = vec![1, 0, 1]; let matching_solution = reduction.extract_solution(&sp_solution); assert_eq!(matching_solution, vec![1, 0, 1]); - // Verify the extracted solution is valid for original Matching + // Verify the extracted solution is valid for original MaximumMatching assert!(matching.solution_size(&matching_solution).is_valid); } #[test] fn test_matching_to_setpacking_k4() { // Complete graph K4: can have perfect matching (2 edges covering all 4 vertices) - let matching = Matching::::unweighted( + let matching = MaximumMatching::::unweighted( 4, vec![(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)], ); - let reduction = ReduceTo::>::reduce_to(&matching); + let reduction = ReduceTo::>::reduce_to(&matching); let sp = reduction.target_problem(); let solver = BruteForce::new(); @@ -124,8 +124,8 @@ fn test_matching_to_setpacking_k4() { #[test] fn test_matching_to_setpacking_empty() { // Graph with no edges - let matching = Matching::::unweighted(3, vec![]); - let reduction = ReduceTo::>::reduce_to(&matching); + let matching = MaximumMatching::::unweighted(3, vec![]); + let reduction = ReduceTo::>::reduce_to(&matching); let sp = reduction.target_problem(); assert_eq!(sp.num_sets(), 0); @@ -133,8 +133,8 @@ fn test_matching_to_setpacking_empty() { #[test] fn test_matching_to_setpacking_single_edge() { - let matching = Matching::::unweighted(2, vec![(0, 1)]); - let reduction = ReduceTo::>::reduce_to(&matching); + let matching = MaximumMatching::::unweighted(2, vec![(0, 1)]); + let reduction = ReduceTo::>::reduce_to(&matching); let sp = reduction.target_problem(); assert_eq!(sp.num_sets(), 1); @@ -150,8 +150,8 @@ fn test_matching_to_setpacking_single_edge() { #[test] fn test_matching_to_setpacking_disjoint_edges() { // Two disjoint edges: 0-1 and 2-3 - let matching = Matching::::unweighted(4, vec![(0, 1), (2, 3)]); - let reduction = ReduceTo::>::reduce_to(&matching); + let matching = MaximumMatching::::unweighted(4, vec![(0, 1), (2, 3)]); + let reduction = ReduceTo::>::reduce_to(&matching); let sp = reduction.target_problem(); let solver = BruteForce::new(); @@ -163,8 +163,8 @@ fn test_matching_to_setpacking_disjoint_edges() { #[test] fn test_reduction_sizes() { - let matching = Matching::::unweighted(5, vec![(0, 1), (1, 2), (2, 3)]); - let reduction = ReduceTo::>::reduce_to(&matching); + let matching = MaximumMatching::::unweighted(5, vec![(0, 1), (1, 2), (2, 3)]); + let reduction = ReduceTo::>::reduce_to(&matching); let source_size = reduction.source_size(); let target_size = reduction.target_size(); @@ -177,8 +177,8 @@ fn test_reduction_sizes() { #[test] fn test_matching_to_setpacking_star() { // Star graph: center vertex 0 connected to 1, 2, 3 - let matching = Matching::::unweighted(4, vec![(0, 1), (0, 2), (0, 3)]); - let reduction = ReduceTo::>::reduce_to(&matching); + let matching = MaximumMatching::::unweighted(4, vec![(0, 1), (0, 2), (0, 3)]); + let reduction = ReduceTo::>::reduce_to(&matching); let sp = reduction.target_problem(); let solver = BruteForce::new(); diff --git a/src/unit_tests/rules/setpacking_ilp.rs b/src/unit_tests/rules/maximumsetpacking_ilp.rs similarity index 87% rename from src/unit_tests/rules/setpacking_ilp.rs rename to src/unit_tests/rules/maximumsetpacking_ilp.rs index e15b2634a..c2e32aacb 100644 --- a/src/unit_tests/rules/setpacking_ilp.rs +++ b/src/unit_tests/rules/maximumsetpacking_ilp.rs @@ -4,7 +4,7 @@ use crate::solvers::{BruteForce, ILPSolver, Solver}; #[test] fn test_reduction_creates_valid_ilp() { // Three sets with two overlapping pairs - let problem = SetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![2, 3]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![2, 3]]); let reduction: ReductionSPToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -31,7 +31,7 @@ fn test_reduction_creates_valid_ilp() { #[test] fn test_reduction_weighted() { - let problem = SetPacking::with_weights(vec![vec![0, 1], vec![2, 3]], vec![5, 10]); + let problem = MaximumSetPacking::with_weights(vec![vec![0, 1], vec![2, 3]], vec![5, 10]); let reduction: ReductionSPToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -47,7 +47,7 @@ fn test_reduction_weighted() { #[test] fn test_ilp_solution_equals_brute_force_chain() { // Chain: {0,1}, {1,2}, {2,3} - can select at most 2 non-adjacent sets - let problem = SetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![2, 3]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![2, 3]]); let reduction: ReductionSPToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -75,7 +75,7 @@ fn test_ilp_solution_equals_brute_force_chain() { #[test] fn test_ilp_solution_equals_brute_force_all_overlap() { // All sets share element 0: can only select one - let problem = SetPacking::::new(vec![vec![0, 1], vec![0, 2], vec![0, 3]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![0, 2], vec![0, 3]]); let reduction: ReductionSPToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -101,7 +101,7 @@ fn test_ilp_solution_equals_brute_force_weighted() { // Weighted problem: single heavy set vs multiple light sets // Set 0 covers all elements but has weight 5 // Sets 1 and 2 are disjoint and together have weight 6 - let problem = SetPacking::with_weights( + let problem = MaximumSetPacking::with_weights( vec![vec![0, 1, 2, 3], vec![0, 1], vec![2, 3]], vec![5, 3, 3], ); @@ -127,7 +127,7 @@ fn test_ilp_solution_equals_brute_force_weighted() { #[test] fn test_solution_extraction() { - let problem = SetPacking::::new(vec![vec![0, 1], vec![2, 3], vec![4, 5], vec![6, 7]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![2, 3], vec![4, 5], vec![6, 7]]); let reduction: ReductionSPToILP = ReduceTo::::reduce_to(&problem); // Test that extraction works correctly (1:1 mapping) @@ -142,7 +142,7 @@ fn test_solution_extraction() { #[test] fn test_source_and_target_size() { - let problem = SetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![2, 3], vec![3, 4]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![2, 3], vec![3, 4]]); let reduction: ReductionSPToILP = ReduceTo::::reduce_to(&problem); let source_size = reduction.source_size(); @@ -158,7 +158,7 @@ fn test_source_and_target_size() { #[test] fn test_disjoint_sets() { // All sets are disjoint: no overlapping pairs - let problem = SetPacking::::new(vec![vec![0], vec![1], vec![2], vec![3]]); + let problem = MaximumSetPacking::::new(vec![vec![0], vec![1], vec![2], vec![3]]); let reduction: ReductionSPToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -178,7 +178,7 @@ fn test_disjoint_sets() { #[test] fn test_empty_sets() { - let problem = SetPacking::::new(vec![]); + let problem = MaximumSetPacking::::new(vec![]); let reduction: ReductionSPToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -189,7 +189,7 @@ fn test_empty_sets() { #[test] fn test_solve_reduced() { // Test the ILPSolver::solve_reduced method - let problem = SetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![2, 3]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![1, 2], vec![2, 3]]); let ilp_solver = ILPSolver::new(); let solution = ilp_solver @@ -205,7 +205,7 @@ fn test_solve_reduced() { fn test_all_sets_overlap_pairwise() { // All pairs overlap: can only select one set // Sets: {0,1}, {0,2}, {1,2} - each pair shares one element - let problem = SetPacking::::new(vec![vec![0, 1], vec![0, 2], vec![1, 2]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![0, 2], vec![1, 2]]); let reduction: ReductionSPToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); diff --git a/src/unit_tests/rules/setpacking_qubo.rs b/src/unit_tests/rules/maximumsetpacking_qubo.rs similarity index 85% rename from src/unit_tests/rules/setpacking_qubo.rs rename to src/unit_tests/rules/maximumsetpacking_qubo.rs index e34d75bf6..71f482152 100644 --- a/src/unit_tests/rules/setpacking_qubo.rs +++ b/src/unit_tests/rules/maximumsetpacking_qubo.rs @@ -6,7 +6,7 @@ fn test_setpacking_to_qubo_closed_loop() { // 3 sets: {0,2}, {1,2}, {0,3} // Overlaps: (0,1) share element 2, (0,2) share element 0 // Max packing: sets 1 and 2 → {1,2} and {0,3} (no overlap) - let sp = SetPacking::::new(vec![vec![0, 2], vec![1, 2], vec![0, 3]]); + let sp = MaximumSetPacking::::new(vec![vec![0, 2], vec![1, 2], vec![0, 3]]); let reduction = ReduceTo::>::reduce_to(&sp); let qubo = reduction.target_problem(); @@ -23,7 +23,7 @@ fn test_setpacking_to_qubo_closed_loop() { #[test] fn test_setpacking_to_qubo_disjoint() { // Disjoint sets: all can be packed - let sp = SetPacking::::new(vec![vec![0, 1], vec![2, 3], vec![4]]); + let sp = MaximumSetPacking::::new(vec![vec![0, 1], vec![2, 3], vec![4]]); let reduction = ReduceTo::>::reduce_to(&sp); let qubo = reduction.target_problem(); @@ -41,7 +41,7 @@ fn test_setpacking_to_qubo_disjoint() { #[test] fn test_setpacking_to_qubo_all_overlap() { // All sets overlap: only 1 can be selected - let sp = SetPacking::::new(vec![vec![0, 1], vec![0, 2], vec![0, 3]]); + let sp = MaximumSetPacking::::new(vec![vec![0, 1], vec![0, 2], vec![0, 3]]); let reduction = ReduceTo::>::reduce_to(&sp); let qubo = reduction.target_problem(); @@ -57,7 +57,7 @@ fn test_setpacking_to_qubo_all_overlap() { #[test] fn test_setpacking_to_qubo_sizes() { - let sp = SetPacking::::new(vec![vec![0, 2], vec![1, 2], vec![0, 3]]); + let sp = MaximumSetPacking::::new(vec![vec![0, 2], vec![1, 2], vec![0, 3]]); let reduction = ReduceTo::>::reduce_to(&sp); let source_size = reduction.source_size(); diff --git a/src/unit_tests/rules/dominatingset_ilp.rs b/src/unit_tests/rules/minimumdominatingset_ilp.rs similarity index 87% rename from src/unit_tests/rules/dominatingset_ilp.rs rename to src/unit_tests/rules/minimumdominatingset_ilp.rs index 0b9aad191..ff317b23e 100644 --- a/src/unit_tests/rules/dominatingset_ilp.rs +++ b/src/unit_tests/rules/minimumdominatingset_ilp.rs @@ -4,7 +4,7 @@ use crate::solvers::{BruteForce, ILPSolver, Solver}; #[test] fn test_reduction_creates_valid_ilp() { // Triangle graph: 3 vertices, 3 edges - let problem = DominatingSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let problem = MinimumDominatingSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); let reduction: ReductionDSToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -31,7 +31,7 @@ fn test_reduction_creates_valid_ilp() { #[test] fn test_reduction_weighted() { - let problem = DominatingSet::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + let problem = MinimumDominatingSet::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); let reduction: ReductionDSToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -49,7 +49,7 @@ fn test_reduction_weighted() { fn test_ilp_solution_equals_brute_force_star() { // Star graph: center vertex 0 connected to all others // Minimum dominating set is just the center (weight 1) - let problem = DominatingSet::::new(4, vec![(0, 1), (0, 2), (0, 3)]); + let problem = MinimumDominatingSet::::new(4, vec![(0, 1), (0, 2), (0, 3)]); let reduction: ReductionDSToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -77,7 +77,7 @@ fn test_ilp_solution_equals_brute_force_star() { #[test] fn test_ilp_solution_equals_brute_force_path() { // Path graph 0-1-2-3-4: min DS = 2 (e.g., vertices 1 and 3) - let problem = DominatingSet::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); + let problem = MinimumDominatingSet::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); let reduction: ReductionDSToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -106,7 +106,7 @@ fn test_ilp_solution_equals_brute_force_weighted() { // Star with heavy center: prefer selecting all leaves (total weight 3) // over center (weight 100) let problem = - DominatingSet::with_weights(4, vec![(0, 1), (0, 2), (0, 3)], vec![100, 1, 1, 1]); + MinimumDominatingSet::with_weights(4, vec![(0, 1), (0, 2), (0, 3)], vec![100, 1, 1, 1]); let reduction: ReductionDSToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -129,7 +129,7 @@ fn test_ilp_solution_equals_brute_force_weighted() { #[test] fn test_solution_extraction() { - let problem = DominatingSet::::new(4, vec![(0, 1), (2, 3)]); + let problem = MinimumDominatingSet::::new(4, vec![(0, 1), (2, 3)]); let reduction: ReductionDSToILP = ReduceTo::::reduce_to(&problem); // Test that extraction works correctly (1:1 mapping) @@ -144,7 +144,7 @@ fn test_solution_extraction() { #[test] fn test_source_and_target_size() { - let problem = DominatingSet::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); + let problem = MinimumDominatingSet::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); let reduction: ReductionDSToILP = ReduceTo::::reduce_to(&problem); let source_size = reduction.source_size(); @@ -160,7 +160,7 @@ fn test_source_and_target_size() { #[test] fn test_isolated_vertices() { // Graph with isolated vertex 2: it must be in the dominating set - let problem = DominatingSet::::new(3, vec![(0, 1)]); + let problem = MinimumDominatingSet::::new(3, vec![(0, 1)]); let reduction: ReductionDSToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -179,7 +179,7 @@ fn test_isolated_vertices() { fn test_complete_graph() { // Complete graph K4: min DS = 1 (any vertex dominates all) let problem = - DominatingSet::::new(4, vec![(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]); + MinimumDominatingSet::::new(4, vec![(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]); let reduction: ReductionDSToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -195,7 +195,7 @@ fn test_complete_graph() { #[test] fn test_single_vertex() { // Single vertex with no edges: must be in dominating set - let problem = DominatingSet::::new(1, vec![]); + let problem = MinimumDominatingSet::::new(1, vec![]); let reduction: ReductionDSToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -214,7 +214,7 @@ fn test_single_vertex() { fn test_cycle_graph() { // Cycle C5: 0-1-2-3-4-0 // Minimum dominating set size = 2 - let problem = DominatingSet::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)]); + let problem = MinimumDominatingSet::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)]); let reduction: ReductionDSToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); diff --git a/src/unit_tests/rules/setcovering_ilp.rs b/src/unit_tests/rules/minimumsetcovering_ilp.rs similarity index 87% rename from src/unit_tests/rules/setcovering_ilp.rs rename to src/unit_tests/rules/minimumsetcovering_ilp.rs index 5d2ce3827..ea0fd158c 100644 --- a/src/unit_tests/rules/setcovering_ilp.rs +++ b/src/unit_tests/rules/minimumsetcovering_ilp.rs @@ -4,7 +4,7 @@ use crate::solvers::{BruteForce, ILPSolver, Solver}; #[test] fn test_reduction_creates_valid_ilp() { // Universe: {0, 1, 2}, Sets: S0={0,1}, S1={1,2} - let problem = SetCovering::::new(3, vec![vec![0, 1], vec![1, 2]]); + let problem = MinimumSetCovering::::new(3, vec![vec![0, 1], vec![1, 2]]); let reduction: ReductionSCToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -30,7 +30,7 @@ fn test_reduction_creates_valid_ilp() { #[test] fn test_reduction_weighted() { - let problem = SetCovering::with_weights(3, vec![vec![0, 1], vec![1, 2]], vec![5, 10]); + let problem = MinimumSetCovering::with_weights(3, vec![vec![0, 1], vec![1, 2]], vec![5, 10]); let reduction: ReductionSCToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -47,7 +47,7 @@ fn test_reduction_weighted() { fn test_ilp_solution_equals_brute_force_simple() { // Universe: {0, 1, 2}, Sets: S0={0,1}, S1={1,2}, S2={0,2} // Minimum cover: any 2 sets work - let problem = SetCovering::::new(3, vec![vec![0, 1], vec![1, 2], vec![0, 2]]); + let problem = MinimumSetCovering::::new(3, vec![vec![0, 1], vec![1, 2], vec![0, 2]]); let reduction: ReductionSCToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -79,7 +79,7 @@ fn test_ilp_solution_equals_brute_force_weighted() { // Weights: [10, 3, 3] // Optimal: select S1 and S2 (weight 6) instead of S0 (weight 10) let problem = - SetCovering::with_weights(3, vec![vec![0, 1, 2], vec![0, 1], vec![2]], vec![10, 3, 3]); + MinimumSetCovering::with_weights(3, vec![vec![0, 1, 2], vec![0, 1], vec![2]], vec![10, 3, 3]); let reduction: ReductionSCToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -102,7 +102,7 @@ fn test_ilp_solution_equals_brute_force_weighted() { #[test] fn test_solution_extraction() { - let problem = SetCovering::::new(4, vec![vec![0, 1], vec![2, 3]]); + let problem = MinimumSetCovering::::new(4, vec![vec![0, 1], vec![2, 3]]); let reduction: ReductionSCToILP = ReduceTo::::reduce_to(&problem); // Test that extraction works correctly (1:1 mapping) @@ -118,7 +118,7 @@ fn test_solution_extraction() { #[test] fn test_source_and_target_size() { let problem = - SetCovering::::new(5, vec![vec![0, 1], vec![1, 2], vec![2, 3], vec![3, 4]]); + MinimumSetCovering::::new(5, vec![vec![0, 1], vec![1, 2], vec![2, 3], vec![3, 4]]); let reduction: ReductionSCToILP = ReduceTo::::reduce_to(&problem); let source_size = reduction.source_size(); @@ -134,7 +134,7 @@ fn test_source_and_target_size() { #[test] fn test_single_set_covers_all() { // Single set covers entire universe - let problem = SetCovering::::new(3, vec![vec![0, 1, 2], vec![0], vec![1], vec![2]]); + let problem = MinimumSetCovering::::new(3, vec![vec![0, 1, 2], vec![0], vec![1], vec![2]]); let ilp_solver = ILPSolver::new(); let reduction: ReductionSCToILP = ReduceTo::::reduce_to(&problem); @@ -154,7 +154,7 @@ fn test_single_set_covers_all() { #[test] fn test_overlapping_sets() { // All sets overlap on element 1 - let problem = SetCovering::::new(3, vec![vec![0, 1], vec![1, 2]]); + let problem = MinimumSetCovering::::new(3, vec![vec![0, 1], vec![1, 2]]); let ilp_solver = ILPSolver::new(); let reduction: ReductionSCToILP = ReduceTo::::reduce_to(&problem); @@ -174,7 +174,7 @@ fn test_overlapping_sets() { #[test] fn test_empty_universe() { // Empty universe is trivially covered - let problem = SetCovering::::new(0, vec![]); + let problem = MinimumSetCovering::::new(0, vec![]); let reduction: ReductionSCToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -186,7 +186,7 @@ fn test_empty_universe() { fn test_solve_reduced() { // Test the ILPSolver::solve_reduced method let problem = - SetCovering::::new(4, vec![vec![0, 1], vec![1, 2], vec![2, 3], vec![0, 3]]); + MinimumSetCovering::::new(4, vec![vec![0, 1], vec![1, 2], vec![2, 3], vec![0, 3]]); let ilp_solver = ILPSolver::new(); let solution = ilp_solver @@ -205,7 +205,7 @@ fn test_constraint_structure() { // Element 0 is in S0, S1 -> constraint: x0 + x1 >= 1 // Element 1 is in S1, S2 -> constraint: x1 + x2 >= 1 // Element 2 is in S2 -> constraint: x2 >= 1 - let problem = SetCovering::::new(3, vec![vec![0], vec![0, 1], vec![1, 2]]); + let problem = MinimumSetCovering::::new(3, vec![vec![0], vec![0, 1], vec![1, 2]]); let reduction: ReductionSCToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); diff --git a/src/unit_tests/rules/vertexcovering_ilp.rs b/src/unit_tests/rules/minimumvertexcover_ilp.rs similarity index 87% rename from src/unit_tests/rules/vertexcovering_ilp.rs rename to src/unit_tests/rules/minimumvertexcover_ilp.rs index 108cc042d..aee8b1ecd 100644 --- a/src/unit_tests/rules/vertexcovering_ilp.rs +++ b/src/unit_tests/rules/minimumvertexcover_ilp.rs @@ -4,7 +4,7 @@ use crate::solvers::{BruteForce, ILPSolver, Solver}; #[test] fn test_reduction_creates_valid_ilp() { // Triangle graph: 3 vertices, 3 edges - let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2), (0, 2)]); let reduction: ReductionVCToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -31,7 +31,7 @@ fn test_reduction_creates_valid_ilp() { #[test] fn test_reduction_weighted() { - let problem = VertexCovering::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); + let problem = MinimumVertexCover::with_weights(3, vec![(0, 1)], vec![5, 10, 15]); let reduction: ReductionVCToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -48,7 +48,7 @@ fn test_reduction_weighted() { #[test] fn test_ilp_solution_equals_brute_force_triangle() { // Triangle graph: min VC = 2 vertices - let problem = VertexCovering::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2), (0, 2)]); let reduction: ReductionVCToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -76,7 +76,7 @@ fn test_ilp_solution_equals_brute_force_triangle() { #[test] fn test_ilp_solution_equals_brute_force_path() { // Path graph 0-1-2-3: min VC = 2 (e.g., {1, 2} or {0, 2} or {1, 3}) - let problem = VertexCovering::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MinimumVertexCover::::new(4, vec![(0, 1), (1, 2), (2, 3)]); let reduction: ReductionVCToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -106,7 +106,7 @@ fn test_ilp_solution_equals_brute_force_weighted() { // 0 -- 1 -- 2 // Weights: [100, 1, 100] // Min VC by weight: just vertex 1 (weight 1) beats 0+2 (weight 200) - let problem = VertexCovering::with_weights(3, vec![(0, 1), (1, 2)], vec![100, 1, 100]); + let problem = MinimumVertexCover::with_weights(3, vec![(0, 1), (1, 2)], vec![100, 1, 100]); let reduction: ReductionVCToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -129,7 +129,7 @@ fn test_ilp_solution_equals_brute_force_weighted() { #[test] fn test_solution_extraction() { - let problem = VertexCovering::::new(4, vec![(0, 1), (2, 3)]); + let problem = MinimumVertexCover::::new(4, vec![(0, 1), (2, 3)]); let reduction: ReductionVCToILP = ReduceTo::::reduce_to(&problem); // Test that extraction works correctly (1:1 mapping) @@ -144,7 +144,7 @@ fn test_solution_extraction() { #[test] fn test_source_and_target_size() { - let problem = VertexCovering::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); + let problem = MinimumVertexCover::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); let reduction: ReductionVCToILP = ReduceTo::::reduce_to(&problem); let source_size = reduction.source_size(); @@ -160,7 +160,7 @@ fn test_source_and_target_size() { #[test] fn test_empty_graph() { // Graph with no edges: empty cover is valid - let problem = VertexCovering::::new(3, vec![]); + let problem = MinimumVertexCover::::new(3, vec![]); let reduction: ReductionVCToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -182,7 +182,7 @@ fn test_empty_graph() { fn test_complete_graph() { // Complete graph K4: min VC = 3 (all but one vertex) let problem = - VertexCovering::::new(4, vec![(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]); + MinimumVertexCover::::new(4, vec![(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]); let reduction: ReductionVCToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -200,7 +200,7 @@ fn test_complete_graph() { #[test] fn test_solve_reduced() { // Test the ILPSolver::solve_reduced method - let problem = VertexCovering::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MinimumVertexCover::::new(4, vec![(0, 1), (1, 2), (2, 3)]); let ilp_solver = ILPSolver::new(); let solution = ilp_solver @@ -216,7 +216,7 @@ fn test_solve_reduced() { fn test_bipartite_graph() { // Bipartite graph: 0-2, 0-3, 1-2, 1-3 (complete bipartite K_{2,2}) // Min VC = 2 (either side of the bipartition) - let problem = VertexCovering::::new(4, vec![(0, 2), (0, 3), (1, 2), (1, 3)]); + let problem = MinimumVertexCover::::new(4, vec![(0, 2), (0, 3), (1, 2), (1, 3)]); let reduction: ReductionVCToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -236,7 +236,7 @@ fn test_bipartite_graph() { #[test] fn test_single_edge() { // Single edge: min VC = 1 - let problem = VertexCovering::::new(2, vec![(0, 1)]); + let problem = MinimumVertexCover::::new(2, vec![(0, 1)]); let reduction: ReductionVCToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -258,7 +258,7 @@ fn test_single_edge() { fn test_star_graph() { // Star graph: center vertex 0 connected to all others // Min VC = 1 (just the center) - let problem = VertexCovering::::new(5, vec![(0, 1), (0, 2), (0, 3), (0, 4)]); + let problem = MinimumVertexCover::::new(5, vec![(0, 1), (0, 2), (0, 3), (0, 4)]); let reduction: ReductionVCToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); diff --git a/src/unit_tests/rules/vertexcovering_independentset.rs b/src/unit_tests/rules/minimumvertexcover_maximumindependentset.rs similarity index 67% rename from src/unit_tests/rules/vertexcovering_independentset.rs rename to src/unit_tests/rules/minimumvertexcover_maximumindependentset.rs index 8fafe8e5c..bbb5bf807 100644 --- a/src/unit_tests/rules/vertexcovering_independentset.rs +++ b/src/unit_tests/rules/minimumvertexcover_maximumindependentset.rs @@ -4,8 +4,8 @@ use crate::solvers::{BruteForce, Solver}; #[test] fn test_is_to_vc_reduction() { // Triangle graph: max IS = 1, min VC = 2 - let is_problem = IndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); - let reduction = ReduceTo::>::reduce_to(&is_problem); + let is_problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let reduction = ReduceTo::>::reduce_to(&is_problem); let vc_problem = reduction.target_problem(); // Solve the VC problem @@ -28,8 +28,8 @@ fn test_is_to_vc_reduction() { #[test] fn test_vc_to_is_reduction() { // Path graph 0-1-2: min VC = 1 (just vertex 1), max IS = 2 (vertices 0 and 2) - let vc_problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); - let reduction = ReduceTo::>::reduce_to(&vc_problem); + let vc_problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); + let reduction = ReduceTo::>::reduce_to(&vc_problem); let is_problem = reduction.target_problem(); let solver = BruteForce::new(); @@ -49,14 +49,14 @@ fn test_vc_to_is_reduction() { #[test] fn test_roundtrip_is_vc_is() { - let original = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let original = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); let solver = BruteForce::new(); let original_solutions = solver.find_best(&original); // IS -> VC -> IS - let reduction1 = ReduceTo::>::reduce_to(&original); + let reduction1 = ReduceTo::>::reduce_to(&original); let vc = reduction1.target_problem().clone(); - let reduction2 = ReduceTo::>::reduce_to(&vc); + let reduction2 = ReduceTo::>::reduce_to(&vc); let roundtrip = reduction2.target_problem(); let roundtrip_solutions = solver.find_best(roundtrip); @@ -70,8 +70,8 @@ fn test_roundtrip_is_vc_is() { #[test] fn test_weighted_reduction() { // Test with weighted problems - let is_problem = IndependentSet::with_weights(3, vec![(0, 1), (1, 2)], vec![10, 20, 30]); - let reduction = ReduceTo::>::reduce_to(&is_problem); + let is_problem = MaximumIndependentSet::with_weights(3, vec![(0, 1), (1, 2)], vec![10, 20, 30]); + let reduction = ReduceTo::>::reduce_to(&is_problem); let vc_problem = reduction.target_problem(); // Weights should be preserved @@ -80,8 +80,8 @@ fn test_weighted_reduction() { #[test] fn test_source_and_target_size() { - let is_problem = IndependentSet::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); - let reduction = ReduceTo::>::reduce_to(&is_problem); + let is_problem = MaximumIndependentSet::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); + let reduction = ReduceTo::>::reduce_to(&is_problem); let source_size = reduction.source_size(); let target_size = reduction.target_size(); diff --git a/src/unit_tests/rules/vertexcovering_setcovering.rs b/src/unit_tests/rules/minimumvertexcover_minimumsetcovering.rs similarity index 75% rename from src/unit_tests/rules/vertexcovering_setcovering.rs rename to src/unit_tests/rules/minimumvertexcover_minimumsetcovering.rs index c69a4c91e..852324291 100644 --- a/src/unit_tests/rules/vertexcovering_setcovering.rs +++ b/src/unit_tests/rules/minimumvertexcover_minimumsetcovering.rs @@ -8,8 +8,8 @@ fn test_vc_to_sc_basic() { // Vertex 0 covers edge 0 // Vertex 1 covers edges 0 and 1 // Vertex 2 covers edge 1 - let vc_problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); - let reduction = ReduceTo::>::reduce_to(&vc_problem); + let vc_problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); + let reduction = ReduceTo::>::reduce_to(&vc_problem); let sc_problem = reduction.target_problem(); // Check the sets are constructed correctly @@ -28,8 +28,8 @@ fn test_vc_to_sc_basic() { fn test_vc_to_sc_triangle() { // Triangle graph: 3 vertices, 3 edges // Edge indices: (0,1)->0, (1,2)->1, (0,2)->2 - let vc_problem = VertexCovering::::new(3, vec![(0, 1), (1, 2), (0, 2)]); - let reduction = ReduceTo::>::reduce_to(&vc_problem); + let vc_problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let reduction = ReduceTo::>::reduce_to(&vc_problem); let sc_problem = reduction.target_problem(); assert_eq!(sc_problem.universe_size(), 3); @@ -44,15 +44,15 @@ fn test_vc_to_sc_triangle() { #[test] fn test_vc_to_sc_solution_extraction() { - let vc_problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); - let reduction = ReduceTo::>::reduce_to(&vc_problem); + let vc_problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); + let reduction = ReduceTo::>::reduce_to(&vc_problem); let sc_problem = reduction.target_problem(); - // Solve the SetCovering problem + // Solve the MinimumSetCovering problem let solver = BruteForce::new(); let sc_solutions = solver.find_best(sc_problem); - // Extract solutions back to VertexCovering + // Extract solutions back to MinimumVertexCover let vc_solutions: Vec<_> = sc_solutions .iter() .map(|s| reduction.extract_solution(s)) @@ -71,7 +71,7 @@ fn test_vc_to_sc_solution_extraction() { #[test] fn test_vc_to_sc_optimality_preservation() { // Test that optimal solutions are preserved through reduction - let vc_problem = VertexCovering::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let vc_problem = MinimumVertexCover::::new(4, vec![(0, 1), (1, 2), (2, 3)]); let solver = BruteForce::new(); // Solve VC directly @@ -79,7 +79,7 @@ fn test_vc_to_sc_optimality_preservation() { let direct_size = direct_solutions[0].iter().sum::(); // Solve via reduction - let reduction = ReduceTo::>::reduce_to(&vc_problem); + let reduction = ReduceTo::>::reduce_to(&vc_problem); let sc_solutions = solver.find_best(reduction.target_problem()); let reduced_solutions: Vec<_> = sc_solutions .iter() @@ -94,8 +94,8 @@ fn test_vc_to_sc_optimality_preservation() { #[test] fn test_vc_to_sc_weighted() { // Weighted problem: weights should be preserved - let vc_problem = VertexCovering::with_weights(3, vec![(0, 1), (1, 2)], vec![10, 1, 10]); - let reduction = ReduceTo::>::reduce_to(&vc_problem); + let vc_problem = MinimumVertexCover::with_weights(3, vec![(0, 1), (1, 2)], vec![10, 1, 10]); + let reduction = ReduceTo::>::reduce_to(&vc_problem); let sc_problem = reduction.target_problem(); // Weights should be preserved @@ -114,8 +114,8 @@ fn test_vc_to_sc_weighted() { #[test] fn test_vc_to_sc_empty_graph() { // Graph with no edges - let vc_problem = VertexCovering::::new(3, vec![]); - let reduction = ReduceTo::>::reduce_to(&vc_problem); + let vc_problem = MinimumVertexCover::::new(3, vec![]); + let reduction = ReduceTo::>::reduce_to(&vc_problem); let sc_problem = reduction.target_problem(); assert_eq!(sc_problem.universe_size(), 0); @@ -129,8 +129,8 @@ fn test_vc_to_sc_empty_graph() { #[test] fn test_vc_to_sc_source_target_size() { - let vc_problem = VertexCovering::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); - let reduction = ReduceTo::>::reduce_to(&vc_problem); + let vc_problem = MinimumVertexCover::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); + let reduction = ReduceTo::>::reduce_to(&vc_problem); let source_size = reduction.source_size(); let target_size = reduction.target_size(); @@ -145,8 +145,8 @@ fn test_vc_to_sc_source_target_size() { fn test_vc_to_sc_star_graph() { // Star graph: center vertex 0 connected to all others // Edges: (0,1), (0,2), (0,3) - let vc_problem = VertexCovering::::new(4, vec![(0, 1), (0, 2), (0, 3)]); - let reduction = ReduceTo::>::reduce_to(&vc_problem); + let vc_problem = MinimumVertexCover::::new(4, vec![(0, 1), (0, 2), (0, 3)]); + let reduction = ReduceTo::>::reduce_to(&vc_problem); let sc_problem = reduction.target_problem(); // Vertex 0 should cover all 3 edges @@ -165,8 +165,8 @@ fn test_vc_to_sc_star_graph() { #[test] fn test_vc_to_sc_all_solutions_valid() { // Ensure all solutions extracted from SC are valid VC solutions - let vc_problem = VertexCovering::::new(4, vec![(0, 1), (1, 2), (0, 2), (2, 3)]); - let reduction = ReduceTo::>::reduce_to(&vc_problem); + let vc_problem = MinimumVertexCover::::new(4, vec![(0, 1), (1, 2), (0, 2), (2, 3)]); + let reduction = ReduceTo::>::reduce_to(&vc_problem); let sc_problem = reduction.target_problem(); let solver = BruteForce::new(); diff --git a/src/unit_tests/rules/vertexcovering_qubo.rs b/src/unit_tests/rules/minimumvertexcover_qubo.rs similarity index 84% rename from src/unit_tests/rules/vertexcovering_qubo.rs rename to src/unit_tests/rules/minimumvertexcover_qubo.rs index 83416b1cf..1d2c5e092 100644 --- a/src/unit_tests/rules/vertexcovering_qubo.rs +++ b/src/unit_tests/rules/minimumvertexcover_qubo.rs @@ -5,7 +5,7 @@ use crate::solvers::{BruteForce, Solver}; fn test_vertexcovering_to_qubo_closed_loop() { // Cycle C4: 0-1-2-3-0 (4 vertices, 4 edges) // Minimum VC = 2 vertices (e.g., {0, 2} or {1, 3}) - let vc = VertexCovering::::new(4, vec![(0, 1), (1, 2), (2, 3), (0, 3)]); + let vc = MinimumVertexCover::::new(4, vec![(0, 1), (1, 2), (2, 3), (0, 3)]); let reduction = ReduceTo::>::reduce_to(&vc); let qubo = reduction.target_problem(); @@ -22,7 +22,7 @@ fn test_vertexcovering_to_qubo_closed_loop() { #[test] fn test_vertexcovering_to_qubo_triangle() { // Triangle K3: minimum VC = 2 (any two vertices) - let vc = VertexCovering::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let vc = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2), (0, 2)]); let reduction = ReduceTo::>::reduce_to(&vc); let qubo = reduction.target_problem(); @@ -40,7 +40,7 @@ fn test_vertexcovering_to_qubo_triangle() { fn test_vertexcovering_to_qubo_star() { // Star graph: center vertex 0 connected to 1, 2, 3 // Minimum VC = {0} (just the center) - let vc = VertexCovering::::new(4, vec![(0, 1), (0, 2), (0, 3)]); + let vc = MinimumVertexCover::::new(4, vec![(0, 1), (0, 2), (0, 3)]); let reduction = ReduceTo::>::reduce_to(&vc); let qubo = reduction.target_problem(); @@ -56,7 +56,7 @@ fn test_vertexcovering_to_qubo_star() { #[test] fn test_vertexcovering_to_qubo_sizes() { - let vc = VertexCovering::::new(4, vec![(0, 1), (1, 2), (2, 3), (0, 3)]); + let vc = MinimumVertexCover::::new(4, vec![(0, 1), (1, 2), (2, 3), (0, 3)]); let reduction = ReduceTo::>::reduce_to(&vc); let source_size = reduction.source_size(); diff --git a/src/unit_tests/rules/registry.rs b/src/unit_tests/rules/registry.rs index 5e7eb3d33..7b129be22 100644 --- a/src/unit_tests/rules/registry.rs +++ b/src/unit_tests/rules/registry.rs @@ -120,5 +120,5 @@ fn test_reduction_entries_registered() { // Check specific reductions exist assert!(entries .iter() - .any(|e| e.source_name == "IndependentSet" && e.target_name == "VertexCovering")); + .any(|e| e.source_name == "MaximumIndependentSet" && e.target_name == "MinimumVertexCover")); } diff --git a/src/unit_tests/rules/sat_independentset.rs b/src/unit_tests/rules/sat_maximumindependentset.rs similarity index 89% rename from src/unit_tests/rules/sat_independentset.rs rename to src/unit_tests/rules/sat_maximumindependentset.rs index 235e2f661..b7c198ab2 100644 --- a/src/unit_tests/rules/sat_independentset.rs +++ b/src/unit_tests/rules/sat_maximumindependentset.rs @@ -42,7 +42,7 @@ fn test_boolvar_complement() { fn test_simple_sat_to_is() { // Simple SAT: (x1) - one clause with one literal let sat = Satisfiability::::new(1, vec![CNFClause::new(vec![1])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let is_problem = reduction.target_problem(); // Should have 1 vertex (one literal) @@ -57,7 +57,7 @@ fn test_two_clause_sat_to_is() { // This is unsatisfiable let sat = Satisfiability::::new(1, vec![CNFClause::new(vec![1]), CNFClause::new(vec![-1])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let is_problem = reduction.target_problem(); // Should have 2 vertices @@ -85,7 +85,7 @@ fn test_satisfiable_formula() { CNFClause::new(vec![1, -2]), // x1 OR NOT x2 ], ); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let is_problem = reduction.target_problem(); // Should have 6 vertices (2 literals per clause, 3 clauses) @@ -126,7 +126,7 @@ fn test_unsatisfiable_formula() { // SAT: (x1) AND (NOT x1) - unsatisfiable let sat = Satisfiability::::new(1, vec![CNFClause::new(vec![1]), CNFClause::new(vec![-1])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let is_problem = reduction.target_problem(); let solver = BruteForce::new(); @@ -154,7 +154,7 @@ fn test_three_sat_example() { ], ); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let is_problem = reduction.target_problem(); // Should have 9 vertices (3 literals per clause, 3 clauses) @@ -179,7 +179,7 @@ fn test_three_sat_example() { fn test_extract_solution_basic() { // Simple case: (x1 OR x2) let sat = Satisfiability::::new(2, vec![CNFClause::new(vec![1, 2])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); // Select vertex 0 (literal x1) let is_sol = vec![1, 0]; @@ -196,7 +196,7 @@ fn test_extract_solution_basic() { fn test_extract_solution_with_negation() { // (NOT x1) - selecting NOT x1 means x1 should be false let sat = Satisfiability::::new(1, vec![CNFClause::new(vec![-1])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let is_sol = vec![1]; let sat_sol = reduction.extract_solution(&is_sol); @@ -207,7 +207,7 @@ fn test_extract_solution_with_negation() { fn test_clique_edges_in_clause() { // A clause with 3 literals should form a clique (3 edges) let sat = Satisfiability::::new(3, vec![CNFClause::new(vec![1, 2, 3])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let is_problem = reduction.target_problem(); // 3 vertices, 3 edges (complete graph K3) @@ -228,7 +228,7 @@ fn test_complement_edges_across_clauses() { CNFClause::new(vec![2]), ], ); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let is_problem = reduction.target_problem(); assert_eq!(is_problem.num_vertices(), 3); @@ -241,7 +241,7 @@ fn test_source_and_target_size() { 3, vec![CNFClause::new(vec![1, 2]), CNFClause::new(vec![-1, 3])], ); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let source_size = reduction.source_size(); let target_size = reduction.target_size(); @@ -255,7 +255,7 @@ fn test_source_and_target_size() { fn test_empty_sat() { // Empty SAT (trivially satisfiable) let sat = Satisfiability::::new(0, vec![]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let is_problem = reduction.target_problem(); assert_eq!(is_problem.num_vertices(), 0); @@ -276,7 +276,7 @@ fn test_sat_is_solution_correspondence() { let direct_sat_solutions = sat_solver.find_best(&sat); // Solve via reduction - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let is_problem = reduction.target_problem(); let is_solutions = sat_solver.find_best(is_problem); @@ -303,7 +303,7 @@ fn test_sat_is_solution_correspondence() { #[test] fn test_literals_accessor() { let sat = Satisfiability::::new(2, vec![CNFClause::new(vec![1, -2])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let literals = reduction.literals(); assert_eq!(literals.len(), 2); diff --git a/src/unit_tests/rules/sat_dominatingset.rs b/src/unit_tests/rules/sat_minimumdominatingset.rs similarity index 88% rename from src/unit_tests/rules/sat_dominatingset.rs rename to src/unit_tests/rules/sat_minimumdominatingset.rs index 1b188267b..e36eb4401 100644 --- a/src/unit_tests/rules/sat_dominatingset.rs +++ b/src/unit_tests/rules/sat_minimumdominatingset.rs @@ -6,7 +6,7 @@ use crate::solvers::{BruteForce, Solver}; fn test_simple_sat_to_ds() { // Simple SAT: (x1) - one variable, one clause let sat = Satisfiability::::new(1, vec![CNFClause::new(vec![1])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let ds_problem = reduction.target_problem(); // Should have 3 vertices (variable gadget) + 1 clause vertex = 4 vertices @@ -22,7 +22,7 @@ fn test_simple_sat_to_ds() { fn test_two_variable_sat_to_ds() { // SAT: (x1 OR x2) let sat = Satisfiability::::new(2, vec![CNFClause::new(vec![1, 2])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let ds_problem = reduction.target_problem(); // 2 variables * 3 = 6 gadget vertices + 1 clause vertex = 7 @@ -46,7 +46,7 @@ fn test_satisfiable_formula() { CNFClause::new(vec![-1, 2]), // NOT x1 OR x2 ], ); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let ds_problem = reduction.target_problem(); // Solve the dominating set problem @@ -75,7 +75,7 @@ fn test_unsatisfiable_formula() { // SAT: (x1) AND (NOT x1) - unsatisfiable let sat = Satisfiability::::new(1, vec![CNFClause::new(vec![1]), CNFClause::new(vec![-1])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let ds_problem = reduction.target_problem(); // Vertices: 3 (gadget) + 2 (clauses) = 5 @@ -120,7 +120,7 @@ fn test_three_sat_example() { ], ); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let ds_problem = reduction.target_problem(); // 3 variables * 3 = 9 gadget vertices + 3 clauses = 12 @@ -153,7 +153,7 @@ fn test_three_sat_example() { fn test_extract_solution_positive_literal() { // (x1) - select positive literal let sat = Satisfiability::::new(1, vec![CNFClause::new(vec![1])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); // Solution: select vertex 0 (positive literal x1) // This dominates vertices 1, 2 (gadget) and vertex 3 (clause) @@ -166,7 +166,7 @@ fn test_extract_solution_positive_literal() { fn test_extract_solution_negative_literal() { // (NOT x1) - select negative literal let sat = Satisfiability::::new(1, vec![CNFClause::new(vec![-1])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); // Solution: select vertex 1 (negative literal NOT x1) // This dominates vertices 0, 2 (gadget) and vertex 3 (clause) @@ -179,7 +179,7 @@ fn test_extract_solution_negative_literal() { fn test_extract_solution_dummy() { // (x1 OR x2) where only x1 matters let sat = Satisfiability::::new(2, vec![CNFClause::new(vec![1])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); // Select: vertex 0 (x1 positive) and vertex 5 (x2 dummy) // Vertex 0 dominates: itself, 1, 2, and clause 6 @@ -195,7 +195,7 @@ fn test_source_and_target_size() { 3, vec![CNFClause::new(vec![1, 2]), CNFClause::new(vec![-1, 3])], ); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let source_size = reduction.source_size(); let target_size = reduction.target_size(); @@ -210,7 +210,7 @@ fn test_source_and_target_size() { fn test_empty_sat() { // Empty SAT (trivially satisfiable) let sat = Satisfiability::::new(0, vec![]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let ds_problem = reduction.target_problem(); assert_eq!(ds_problem.num_vertices(), 0); @@ -223,7 +223,7 @@ fn test_empty_sat() { fn test_multiple_literals_same_variable() { // Clause with repeated variable: (x1 OR NOT x1) - tautology let sat = Satisfiability::::new(1, vec![CNFClause::new(vec![1, -1])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let ds_problem = reduction.target_problem(); // 3 gadget vertices + 1 clause vertex = 4 @@ -248,7 +248,7 @@ fn test_sat_ds_solution_correspondence() { let direct_sat_solutions = sat_solver.find_best(&sat); // Solve via reduction - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let ds_problem = reduction.target_problem(); let ds_solutions = sat_solver.find_best(ds_problem); @@ -283,7 +283,7 @@ fn test_sat_ds_solution_correspondence() { #[test] fn test_accessors() { let sat = Satisfiability::::new(2, vec![CNFClause::new(vec![1, -2])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); assert_eq!(reduction.num_literals(), 2); assert_eq!(reduction.num_clauses(), 1); @@ -293,7 +293,7 @@ fn test_accessors() { fn test_extract_solution_too_many_selected() { // Test that extract_solution handles invalid (non-minimal) dominating sets let sat = Satisfiability::::new(1, vec![CNFClause::new(vec![1])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); // Select all 4 vertices (more than num_literals=1) let ds_sol = vec![1, 1, 1, 1]; @@ -306,7 +306,7 @@ fn test_extract_solution_too_many_selected() { fn test_negated_variable_connection() { // (NOT x1 OR NOT x2) - both negated let sat = Satisfiability::::new(2, vec![CNFClause::new(vec![-1, -2])]); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let ds_problem = reduction.target_problem(); // 2 * 3 = 6 gadget vertices + 1 clause = 7 diff --git a/src/unit_tests/testing/macros.rs b/src/unit_tests/testing/macros.rs index d4cd0894f..8afe1fdb4 100644 --- a/src/unit_tests/testing/macros.rs +++ b/src/unit_tests/testing/macros.rs @@ -5,7 +5,7 @@ use crate::topology::SimpleGraph; #[test] fn test_quick_problem_test_macro() { quick_problem_test!( - IndependentSet, + MaximumIndependentSet, new(3, vec![(0, 1), (1, 2)]), solution: [1, 0, 1], expected_size: 2, @@ -13,7 +13,7 @@ fn test_quick_problem_test_macro() { ); quick_problem_test!( - IndependentSet, + MaximumIndependentSet, new(3, vec![(0, 1), (1, 2)]), solution: [1, 1, 0], expected_size: 2, @@ -24,8 +24,8 @@ fn test_quick_problem_test_macro() { // Test the complement_test macro complement_test! { name: test_is_vc_complement, - problem_a: IndependentSet, - problem_b: VertexCovering, + problem_a: MaximumIndependentSet, + problem_b: MinimumVertexCover, test_graphs: [ (3, [(0, 1), (1, 2)]), (4, [(0, 1), (1, 2), (2, 3), (0, 3)]), diff --git a/src/unit_tests/trait_consistency.rs b/src/unit_tests/trait_consistency.rs index 038964d9c..0a9f196a3 100644 --- a/src/unit_tests/trait_consistency.rs +++ b/src/unit_tests/trait_consistency.rs @@ -43,18 +43,18 @@ where #[test] fn test_all_problems_implement_trait_correctly() { check_problem_trait( - &IndependentSet::::new(3, vec![(0, 1)]), - "IndependentSet", + &MaximumIndependentSet::::new(3, vec![(0, 1)]), + "MaximumIndependentSet", ); check_problem_trait( - &VertexCovering::::new(3, vec![(0, 1)]), - "VertexCovering", + &MinimumVertexCover::::new(3, vec![(0, 1)]), + "MinimumVertexCover", ); check_problem_trait(&MaxCut::::new(3, vec![(0, 1, 1)]), "MaxCut"); check_problem_trait(&KColoring::<3, SimpleGraph, i32>::new(3, vec![(0, 1)]), "KColoring"); - check_problem_trait(&DominatingSet::::new(3, vec![(0, 1)]), "DominatingSet"); + check_problem_trait(&MinimumDominatingSet::::new(3, vec![(0, 1)]), "MinimumDominatingSet"); check_problem_trait(&MaximalIS::::new(3, vec![(0, 1)]), "MaximalIS"); - check_problem_trait(&Matching::::new(3, vec![(0, 1, 1)]), "Matching"); + check_problem_trait(&MaximumMatching::::new(3, vec![(0, 1, 1)]), "MaximumMatching"); check_problem_trait( &Satisfiability::::new(3, vec![CNFClause::new(vec![1])]), "SAT", @@ -64,8 +64,8 @@ fn test_all_problems_implement_trait_correctly() { "SpinGlass", ); check_problem_trait(&QUBO::from_matrix(vec![vec![1.0; 3]; 3]), "QUBO"); - check_problem_trait(&SetCovering::::new(3, vec![vec![0, 1]]), "SetCovering"); - check_problem_trait(&SetPacking::::new(vec![vec![0, 1]]), "SetPacking"); + check_problem_trait(&MinimumSetCovering::::new(3, vec![vec![0, 1]]), "MinimumSetCovering"); + check_problem_trait(&MaximumSetPacking::::new(vec![vec![0, 1]]), "MaximumSetPacking"); check_problem_trait(&PaintShop::new(vec!["a", "a"]), "PaintShop"); check_problem_trait(&BMF::new(vec![vec![true]], 1), "BMF"); check_problem_trait(&BicliqueCover::new(2, 2, vec![(0, 2)], 1), "BicliqueCover"); @@ -81,13 +81,13 @@ fn test_all_problems_implement_trait_correctly() { #[test] fn test_energy_modes() { // Minimization problems - assert!(VertexCovering::::new(2, vec![(0, 1)]) + assert!(MinimumVertexCover::::new(2, vec![(0, 1)]) .energy_mode() .is_minimization()); - assert!(DominatingSet::::new(2, vec![(0, 1)]) + assert!(MinimumDominatingSet::::new(2, vec![(0, 1)]) .energy_mode() .is_minimization()); - assert!(SetCovering::::new(2, vec![vec![0, 1]]) + assert!(MinimumSetCovering::::new(2, vec![vec![0, 1]]) .energy_mode() .is_minimization()); assert!(PaintShop::new(vec!["a", "a"]) @@ -111,7 +111,7 @@ fn test_energy_modes() { .is_minimization()); // Maximization problems - assert!(IndependentSet::::new(2, vec![(0, 1)]) + assert!(MaximumIndependentSet::::new(2, vec![(0, 1)]) .energy_mode() .is_maximization()); assert!(MaximalIS::::new(2, vec![(0, 1)]) @@ -120,10 +120,10 @@ fn test_energy_modes() { assert!(MaxCut::::new(2, vec![(0, 1, 1)]) .energy_mode() .is_maximization()); - assert!(Matching::::new(2, vec![(0, 1, 1)]) + assert!(MaximumMatching::::new(2, vec![(0, 1, 1)]) .energy_mode() .is_maximization()); - assert!(SetPacking::::new(vec![vec![0]]) + assert!(MaximumSetPacking::::new(vec![vec![0]]) .energy_mode() .is_maximization()); assert!(Satisfiability::::new(1, vec![CNFClause::new(vec![1])]) diff --git a/src/unit_tests/unitdiskmapping_algorithms/common.rs b/src/unit_tests/unitdiskmapping_algorithms/common.rs index 6ca0cb0db..473eeee59 100644 --- a/src/unit_tests/unitdiskmapping_algorithms/common.rs +++ b/src/unit_tests/unitdiskmapping_algorithms/common.rs @@ -1,7 +1,7 @@ //! Common test utilities for mapping tests. use crate::models::optimization::{LinearConstraint, ObjectiveSense, ILP}; -use crate::models::IndependentSet; +use crate::models::MaximumIndependentSet; use crate::rules::unitdiskmapping::MappingResult; use crate::rules::{ReduceTo, ReductionResult}; use crate::solvers::ILPSolver; @@ -20,8 +20,8 @@ pub fn is_independent_set(edges: &[(usize, usize)], config: &[usize]) -> bool { /// Solve maximum independent set using ILP. /// Returns the size of the MIS. pub fn solve_mis(num_vertices: usize, edges: &[(usize, usize)]) -> usize { - let problem = IndependentSet::::new(num_vertices, edges.to_vec()); - let reduction = as ReduceTo>::reduce_to(&problem); + let problem = MaximumIndependentSet::::new(num_vertices, edges.to_vec()); + let reduction = as ReduceTo>::reduce_to(&problem); let solver = ILPSolver::new(); if let Some(solution) = solver.solve(reduction.target_problem()) { solution.iter().filter(|&&x| x > 0).count() @@ -32,8 +32,8 @@ pub fn solve_mis(num_vertices: usize, edges: &[(usize, usize)]) -> usize { /// Solve MIS and return the binary configuration. pub fn solve_mis_config(num_vertices: usize, edges: &[(usize, usize)]) -> Vec { - let problem = IndependentSet::::new(num_vertices, edges.to_vec()); - let reduction = as ReduceTo>::reduce_to(&problem); + let problem = MaximumIndependentSet::::new(num_vertices, edges.to_vec()); + let reduction = as ReduceTo>::reduce_to(&problem); let solver = ILPSolver::new(); if let Some(solution) = solver.solve(reduction.target_problem()) { solution diff --git a/src/unit_tests/variant.rs b/src/unit_tests/variant.rs index fcaba91ef..95628e574 100644 --- a/src/unit_tests/variant.rs +++ b/src/unit_tests/variant.rs @@ -31,39 +31,39 @@ fn test_const_usize_str() { #[test] fn test_variant_for_problems() { use crate::models::graph::{ - DominatingSet, IndependentSet, KColoring, Matching, MaxCut, MaximalIS, VertexCovering, + MinimumDominatingSet, MaximumIndependentSet, KColoring, MaximumMatching, MaxCut, MaximalIS, MinimumVertexCover, }; use crate::models::optimization::{SpinGlass, QUBO}; use crate::models::satisfiability::{KSatisfiability, Satisfiability}; - use crate::models::set::{SetCovering, SetPacking}; + use crate::models::set::{MinimumSetCovering, MaximumSetPacking}; use crate::models::specialized::{BicliqueCover, CircuitSAT, Factoring, PaintShop, BMF}; use crate::topology::SimpleGraph; use crate::traits::Problem; - // Test IndependentSet variants - let v = IndependentSet::::variant(); + // Test MaximumIndependentSet variants + let v = MaximumIndependentSet::::variant(); assert_eq!(v.len(), 2); assert_eq!(v[0].0, "graph"); assert_eq!(v[0].1, "SimpleGraph"); assert_eq!(v[1].0, "weight"); assert_eq!(v[1].1, "i32"); - let v = IndependentSet::::variant(); + let v = MaximumIndependentSet::::variant(); assert_eq!(v[1].1, "f64"); - // Test VertexCovering - let v = VertexCovering::::variant(); + // Test MinimumVertexCover + let v = MinimumVertexCover::::variant(); assert_eq!(v.len(), 2); assert_eq!(v[0].1, "SimpleGraph"); assert_eq!(v[1].1, "i32"); - // Test DominatingSet - let v = DominatingSet::::variant(); + // Test MinimumDominatingSet + let v = MinimumDominatingSet::::variant(); assert_eq!(v.len(), 2); assert_eq!(v[0].1, "SimpleGraph"); - // Test Matching - let v = Matching::::variant(); + // Test MaximumMatching + let v = MaximumMatching::::variant(); assert_eq!(v.len(), 2); assert_eq!(v[0].1, "SimpleGraph"); @@ -95,12 +95,12 @@ fn test_variant_for_problems() { let v = KSatisfiability::<3, i32>::variant(); assert_eq!(v.len(), 2); - // Test SetPacking - let v = SetPacking::::variant(); + // Test MaximumSetPacking + let v = MaximumSetPacking::::variant(); assert_eq!(v.len(), 2); - // Test SetCovering - let v = SetCovering::::variant(); + // Test MinimumSetCovering + let v = MinimumSetCovering::::variant(); assert_eq!(v.len(), 2); // Test SpinGlass diff --git a/tests/data/qubo/independentset_to_qubo.json b/tests/data/qubo/maximumindependentset_to_qubo.json similarity index 100% rename from tests/data/qubo/independentset_to_qubo.json rename to tests/data/qubo/maximumindependentset_to_qubo.json diff --git a/tests/data/qubo/setpacking_to_qubo.json b/tests/data/qubo/maximumsetpacking_to_qubo.json similarity index 100% rename from tests/data/qubo/setpacking_to_qubo.json rename to tests/data/qubo/maximumsetpacking_to_qubo.json diff --git a/tests/data/qubo/vertexcovering_to_qubo.json b/tests/data/qubo/minimumvertexcover_to_qubo.json similarity index 100% rename from tests/data/qubo/vertexcovering_to_qubo.json rename to tests/data/qubo/minimumvertexcover_to_qubo.json diff --git a/tests/suites/examples.rs b/tests/suites/examples.rs index 2bb6236dd..dae0c9711 100644 --- a/tests/suites/examples.rs +++ b/tests/suites/examples.rs @@ -21,35 +21,35 @@ fn run_example(name: &str) { fn test_all_reduction_examples() { let examples = [ "reduction_circuit_to_spinglass", - "reduction_clique_to_ilp", + "reduction_maximumclique_to_ilp", "reduction_coloring_to_ilp", "reduction_coloring_to_qubo", - "reduction_dominatingset_to_ilp", + "reduction_minimumdominatingset_to_ilp", "reduction_factoring_to_circuit", "reduction_factoring_to_ilp", "reduction_ilp_to_qubo", - "reduction_is_to_ilp", - "reduction_is_to_qubo", - "reduction_is_to_setpacking", - "reduction_is_to_vc", + "reduction_maximumindependentset_to_ilp", + "reduction_maximumindependentset_to_qubo", + "reduction_maximumindependentset_to_maximumsetpacking", + "reduction_maximumindependentset_to_minimumvertexcover", "reduction_ksatisfiability_to_qubo", - "reduction_matching_to_ilp", - "reduction_matching_to_setpacking", + "reduction_maximummatching_to_ilp", + "reduction_maximummatching_to_maximumsetpacking", "reduction_maxcut_to_spinglass", "reduction_qubo_to_spinglass", "reduction_sat_to_coloring", - "reduction_sat_to_dominatingset", - "reduction_sat_to_is", + "reduction_sat_to_minimumdominatingset", + "reduction_sat_to_maximumindependentset", "reduction_sat_to_ksat", - "reduction_setcovering_to_ilp", - "reduction_setpacking_to_ilp", - "reduction_setpacking_to_qubo", + "reduction_minimumsetcovering_to_ilp", + "reduction_maximumsetpacking_to_ilp", + "reduction_maximumsetpacking_to_qubo", "reduction_spinglass_to_maxcut", "reduction_spinglass_to_qubo", - "reduction_vc_to_ilp", - "reduction_vc_to_is", - "reduction_vc_to_qubo", - "reduction_vc_to_setcovering", + "reduction_minimumvertexcover_to_ilp", + "reduction_minimumvertexcover_to_maximumindependentset", + "reduction_minimumvertexcover_to_qubo", + "reduction_minimumvertexcover_to_minimumsetcovering", ]; for name in &examples { diff --git a/tests/suites/integration.rs b/tests/suites/integration.rs index 5bdbec3f5..9809598f3 100644 --- a/tests/suites/integration.rs +++ b/tests/suites/integration.rs @@ -17,7 +17,7 @@ mod all_problems_solvable { #[test] fn test_independent_set_solvable() { - let problem = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); assert!(!solutions.is_empty()); @@ -28,7 +28,7 @@ mod all_problems_solvable { #[test] fn test_vertex_covering_solvable() { - let problem = VertexCovering::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MinimumVertexCover::::new(4, vec![(0, 1), (1, 2), (2, 3)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); assert!(!solutions.is_empty()); @@ -58,7 +58,7 @@ mod all_problems_solvable { #[test] fn test_dominating_set_solvable() { - let problem = DominatingSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let problem = MinimumDominatingSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); assert!(!solutions.is_empty()); @@ -80,7 +80,7 @@ mod all_problems_solvable { #[test] fn test_matching_solvable() { - let problem = Matching::::new(4, vec![(0, 1, 1), (1, 2, 2), (2, 3, 1)]); + let problem = MaximumMatching::::new(4, vec![(0, 1, 1), (1, 2, 2), (2, 3, 1)]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); assert!(!solutions.is_empty()); @@ -125,7 +125,7 @@ mod all_problems_solvable { #[test] fn test_set_covering_solvable() { - let problem = SetCovering::::new(5, vec![vec![0, 1, 2], vec![2, 3, 4], vec![0, 4]]); + let problem = MinimumSetCovering::::new(5, vec![vec![0, 1, 2], vec![2, 3, 4], vec![0, 4]]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); assert!(!solutions.is_empty()); @@ -136,7 +136,7 @@ mod all_problems_solvable { #[test] fn test_set_packing_solvable() { - let problem = SetPacking::::new(vec![vec![0, 1], vec![2, 3], vec![1, 2], vec![4]]); + let problem = MaximumSetPacking::::new(vec![vec![0, 1], vec![2, 3], vec![1, 2], vec![4]]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); assert!(!solutions.is_empty()); @@ -214,8 +214,8 @@ mod problem_relationships { let edges = vec![(0, 1), (1, 2), (2, 3), (0, 3)]; let n = 4; - let is_problem = IndependentSet::::new(n, edges.clone()); - let vc_problem = VertexCovering::::new(n, edges); + let is_problem = MaximumIndependentSet::::new(n, edges.clone()); + let vc_problem = MinimumVertexCover::::new(n, edges); let solver = BruteForce::new(); let is_solutions = solver.find_best(&is_problem); @@ -228,14 +228,14 @@ mod problem_relationships { assert_eq!(max_is_size + min_vc_size, n); } - /// MaximalIS solutions are a subset of IndependentSet solutions (valid IS). + /// MaximalIS solutions are a subset of MaximumIndependentSet solutions (valid IS). #[test] fn test_maximal_is_is_independent_set() { let edges = vec![(0, 1), (1, 2), (2, 3)]; let n = 4; let maximal_is = MaximalIS::::new(n, edges.clone()); - let is_problem = IndependentSet::::new(n, edges); + let is_problem = MaximumIndependentSet::::new(n, edges); let solver = BruteForce::new(); let maximal_solutions = solver.find_best(&maximal_is); @@ -286,14 +286,14 @@ mod problem_relationships { } } - /// SetCovering and SetPacking on disjoint sets. + /// MinimumSetCovering and MaximumSetPacking on disjoint sets. #[test] fn test_set_covering_packing_disjoint() { // Three disjoint sets covering universe {0,1,2,3,4,5} let sets = vec![vec![0, 1], vec![2, 3], vec![4, 5]]; - let covering = SetCovering::::new(6, sets.clone()); - let packing = SetPacking::::new(sets); + let covering = MinimumSetCovering::::new(6, sets.clone()); + let packing = MaximumSetPacking::::new(sets); let solver = BruteForce::new(); @@ -313,7 +313,7 @@ mod edge_cases { #[test] fn test_empty_graph_independent_set() { - let problem = IndependentSet::::new(3, vec![]); + let problem = MaximumIndependentSet::::new(3, vec![]); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -325,7 +325,7 @@ mod edge_cases { fn test_complete_graph_independent_set() { // K4 - complete graph on 4 vertices let edges = vec![(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]; - let problem = IndependentSet::::new(4, edges); + let problem = MaximumIndependentSet::::new(4, edges); let solver = BruteForce::new(); let solutions = solver.find_best(&problem); @@ -377,7 +377,7 @@ mod weighted_problems { #[test] fn test_weighted_independent_set() { - let mut problem = IndependentSet::::new(3, vec![(0, 1)]); + let mut problem = MaximumIndependentSet::::new(3, vec![(0, 1)]); problem.set_weights(vec![10, 1, 1]); let solver = BruteForce::new(); @@ -395,7 +395,7 @@ mod weighted_problems { #[test] fn test_weighted_vertex_cover() { - let mut problem = VertexCovering::::new(3, vec![(0, 1), (1, 2)]); + let mut problem = MinimumVertexCover::::new(3, vec![(0, 1), (1, 2)]); problem.set_weights(vec![1, 10, 1]); let solver = BruteForce::new(); diff --git a/tests/suites/reductions.rs b/tests/suites/reductions.rs index 28fccf840..4ada170c6 100644 --- a/tests/suites/reductions.rs +++ b/tests/suites/reductions.rs @@ -6,17 +6,17 @@ use problemreductions::prelude::*; use problemreductions::topology::SimpleGraph; -/// Tests for IndependentSet <-> VertexCovering reductions. +/// Tests for MaximumIndependentSet <-> MinimumVertexCover reductions. mod is_vc_reductions { use super::*; #[test] fn test_is_to_vc_basic() { // Triangle graph - let is_problem = IndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let is_problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); // Reduce IS to VC - let result = ReduceTo::>::reduce_to(&is_problem); + let result = ReduceTo::>::reduce_to(&is_problem); let vc_problem = result.target_problem(); // Same graph structure @@ -37,10 +37,10 @@ mod is_vc_reductions { #[test] fn test_vc_to_is_basic() { // Path graph - let vc_problem = VertexCovering::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let vc_problem = MinimumVertexCover::::new(4, vec![(0, 1), (1, 2), (2, 3)]); // Reduce VC to IS - let result = ReduceTo::>::reduce_to(&vc_problem); + let result = ReduceTo::>::reduce_to(&vc_problem); let is_problem = result.target_problem(); // Same graph structure @@ -60,14 +60,14 @@ mod is_vc_reductions { #[test] fn test_is_vc_roundtrip() { - let original = IndependentSet::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); + let original = MaximumIndependentSet::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4)]); // IS -> VC - let to_vc = ReduceTo::>::reduce_to(&original); + let to_vc = ReduceTo::>::reduce_to(&original); let vc_problem = to_vc.target_problem(); // VC -> IS - let back_to_is = ReduceTo::>::reduce_to(vc_problem); + let back_to_is = ReduceTo::>::reduce_to(vc_problem); let final_is = back_to_is.target_problem(); // Should have same structure @@ -88,9 +88,9 @@ mod is_vc_reductions { #[test] fn test_is_vc_weighted() { - let is_problem = IndependentSet::with_weights(3, vec![(0, 1)], vec![10, 1, 5]); + let is_problem = MaximumIndependentSet::with_weights(3, vec![(0, 1)], vec![10, 1, 5]); - let result = ReduceTo::>::reduce_to(&is_problem); + let result = ReduceTo::>::reduce_to(&is_problem); let vc_problem = result.target_problem(); // Weights should be preserved @@ -103,8 +103,8 @@ mod is_vc_reductions { let edges = vec![(0, 1), (1, 2), (2, 3), (0, 3)]; let n = 4; - let is_problem = IndependentSet::::new(n, edges.clone()); - let vc_problem = VertexCovering::::new(n, edges); + let is_problem = MaximumIndependentSet::::new(n, edges.clone()); + let vc_problem = MinimumVertexCover::::new(n, edges); let solver = BruteForce::new(); @@ -119,16 +119,16 @@ mod is_vc_reductions { } } -/// Tests for IndependentSet <-> SetPacking reductions. +/// Tests for MaximumIndependentSet <-> MaximumSetPacking reductions. mod is_sp_reductions { use super::*; #[test] fn test_is_to_sp_basic() { // Triangle graph - each vertex's incident edges become a set - let is_problem = IndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let is_problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); - let result = ReduceTo::>::reduce_to(&is_problem); + let result = ReduceTo::>::reduce_to(&is_problem); let sp_problem = result.target_problem(); // 3 sets (one per vertex) @@ -148,9 +148,9 @@ mod is_sp_reductions { fn test_sp_to_is_basic() { // Disjoint sets pack perfectly let sets = vec![vec![0, 1], vec![2, 3], vec![4]]; - let sp_problem = SetPacking::::new(sets); + let sp_problem = MaximumSetPacking::::new(sets); - let result = ReduceTo::>::reduce_to(&sp_problem); + let result = ReduceTo::>::reduce_to(&sp_problem); let is_problem = result.target_problem(); // Should have an edge for each pair of overlapping sets (none here) @@ -170,10 +170,10 @@ mod is_sp_reductions { #[test] fn test_is_sp_roundtrip() { - let original = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let original = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); // IS -> SP - let to_sp = ReduceTo::>::reduce_to(&original); + let to_sp = ReduceTo::>::reduce_to(&original); let sp_problem = to_sp.target_problem(); // Solve SP @@ -356,12 +356,12 @@ mod topology_tests { #[test] fn test_hypergraph_to_setpacking() { - // HyperGraph can be seen as a SetPacking problem + // HyperGraph can be seen as a MaximumSetPacking problem let hg = HyperGraph::new(5, vec![vec![0, 1, 2], vec![2, 3], vec![3, 4]]); - // Convert hyperedges to sets for SetPacking + // Convert hyperedges to sets for MaximumSetPacking let sets: Vec> = hg.edges().to_vec(); - let sp = SetPacking::::new(sets); + let sp = MaximumSetPacking::::new(sets); let solver = BruteForce::new(); let solutions = solver.find_best(&sp); @@ -382,7 +382,7 @@ mod topology_tests { // Extract edges let edges = udg.edges().to_vec(); - let is_problem = IndependentSet::::new(4, edges); + let is_problem = MaximumIndependentSet::::new(4, edges); let solver = BruteForce::new(); let solutions = solver.find_best(&is_problem); @@ -468,10 +468,10 @@ mod qubo_reductions { #[test] fn test_is_to_qubo_ground_truth() { - let json = std::fs::read_to_string("tests/data/qubo/independentset_to_qubo.json").unwrap(); + let json = std::fs::read_to_string("tests/data/qubo/maximumindependentset_to_qubo.json").unwrap(); let data: ISToQuboData = serde_json::from_str(&json).unwrap(); - let is = IndependentSet::::new( + let is = MaximumIndependentSet::::new( data.source.num_vertices, data.source.edges, ); @@ -511,10 +511,10 @@ mod qubo_reductions { #[test] fn test_vc_to_qubo_ground_truth() { let json = - std::fs::read_to_string("tests/data/qubo/vertexcovering_to_qubo.json").unwrap(); + std::fs::read_to_string("tests/data/qubo/minimumvertexcover_to_qubo.json").unwrap(); let data: VCToQuboData = serde_json::from_str(&json).unwrap(); - let vc = VertexCovering::::new( + let vc = MinimumVertexCover::::new( data.source.num_vertices, data.source.edges, ); @@ -594,10 +594,10 @@ mod qubo_reductions { #[test] fn test_setpacking_to_qubo_ground_truth() { - let json = std::fs::read_to_string("tests/data/qubo/setpacking_to_qubo.json").unwrap(); + let json = std::fs::read_to_string("tests/data/qubo/maximumsetpacking_to_qubo.json").unwrap(); let data: SPToQuboData = serde_json::from_str(&json).unwrap(); - let sp = SetPacking::with_weights(data.source.sets, data.source.weights); + let sp = MaximumSetPacking::with_weights(data.source.sets, data.source.weights); let reduction = ReduceTo::::reduce_to(&sp); let qubo = reduction.target_problem(); @@ -771,20 +771,20 @@ mod io_tests { #[test] fn test_serialize_reduce_deserialize() { - let original = IndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + let original = MaximumIndependentSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); // Serialize let json = to_json(&original).unwrap(); // Deserialize - let restored: IndependentSet = from_json(&json).unwrap(); + let restored: MaximumIndependentSet = from_json(&json).unwrap(); // Should have same structure assert_eq!(restored.num_vertices(), original.num_vertices()); assert_eq!(restored.num_edges(), original.num_edges()); // Reduce the restored problem - let result = ReduceTo::>::reduce_to(&restored); + let result = ReduceTo::>::reduce_to(&restored); let vc = result.target_problem(); assert_eq!(vc.num_vertices(), 4); @@ -822,8 +822,8 @@ mod end_to_end { #[test] fn test_full_pipeline_is_vc_sp() { - // Start with an IndependentSet problem - let is = IndependentSet::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4), (0, 4)]); + // Start with an MaximumIndependentSet problem + let is = MaximumIndependentSet::::new(5, vec![(0, 1), (1, 2), (2, 3), (3, 4), (0, 4)]); // Solve directly let solver = BruteForce::new(); @@ -831,14 +831,14 @@ mod end_to_end { let direct_size = is_solutions[0].iter().sum::(); // Reduce to VC and solve - let to_vc = ReduceTo::>::reduce_to(&is); + let to_vc = ReduceTo::>::reduce_to(&is); let vc = to_vc.target_problem(); let vc_solutions = solver.find_best(vc); let vc_extracted = to_vc.extract_solution(&vc_solutions[0]); let via_vc_size = vc_extracted.iter().sum::(); - // Reduce to SetPacking and solve - let to_sp = ReduceTo::>::reduce_to(&is); + // Reduce to MaximumSetPacking and solve + let to_sp = ReduceTo::>::reduce_to(&is); let sp = to_sp.target_problem(); let sp_solutions = solver.find_best(sp); let sp_extracted = to_sp.extract_solution(&sp_solutions[0]); @@ -882,16 +882,16 @@ mod end_to_end { #[test] fn test_chain_reduction_sp_is_vc() { - // SetPacking -> IndependentSet -> VertexCovering + // MaximumSetPacking -> MaximumIndependentSet -> MinimumVertexCover let sets = vec![vec![0, 1], vec![1, 2], vec![2, 3], vec![3]]; - let sp = SetPacking::::new(sets); + let sp = MaximumSetPacking::::new(sets); // SP -> IS - let sp_to_is = ReduceTo::>::reduce_to(&sp); + let sp_to_is = ReduceTo::>::reduce_to(&sp); let is = sp_to_is.target_problem(); // IS -> VC - let is_to_vc = ReduceTo::>::reduce_to(is); + let is_to_vc = ReduceTo::>::reduce_to(is); let vc = is_to_vc.target_problem(); // Solve VC @@ -902,7 +902,7 @@ mod end_to_end { let is_sol = is_to_vc.extract_solution(&vc_solutions[0]); let sp_sol = sp_to_is.extract_solution(&is_sol); - // Should be valid SetPacking + // Should be valid MaximumSetPacking assert!(sp.solution_size(&sp_sol).is_valid); } } From 6fc9683f0b19d9a1955e61706f0b1c2b17914099 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Tue, 10 Feb 2026 23:20:52 +0800 Subject: [PATCH 09/14] feat: add edge doc links and compound variant nodes to reduction graph Edge documentation: - Add module_path field to ReductionEntry, populated via module_path!() in the proc macro and 3 manual inventory::submit! calls - Add doc_path to EdgeJson, computed from module_path - Add edge double-click handler to navigate to reduction module rustdoc Compound variant nodes: - Replace base-node-only filtering with compound parent + variant children - Parent nodes show problem name with dashed border, children show variant (Unweighted/Weighted/etc.) - Edges connect variant nodes directly, preserving full reduction detail - Update all event handlers (tooltip, path selection, edge click) Also fix problem-def Typst function to accept (name, title, body) matching all 16 call sites, and update CLAUDE.md/rules documentation. Co-Authored-By: Claude Opus 4.6 --- .claude/CLAUDE.md | 27 +++-- .claude/rules/adding-models.md | 13 +- .claude/rules/adding-reductions.md | 6 +- .claude/rules/documentation.md | 58 +++++---- docs/paper/reduction_graph.json | 60 ++++++--- docs/paper/reductions.typ | 19 ++- docs/src/introduction.md | 148 +++++++++++++++++------ docs/src/reductions/reduction_graph.json | 60 ++++++--- problemreductions-macros/src/lib.rs | 1 + src/rules/coloring_ilp.rs | 1 + src/rules/factoring_ilp.rs | 1 + src/rules/graph.rs | 20 ++- src/rules/registry.rs | 3 + src/rules/sat_ksat.rs | 1 + src/unit_tests/rules/registry.rs | 7 ++ 15 files changed, 293 insertions(+), 132 deletions(-) diff --git a/.claude/CLAUDE.md b/.claude/CLAUDE.md index b693e8e9a..dbf4ed9bc 100644 --- a/.claude/CLAUDE.md +++ b/.claude/CLAUDE.md @@ -47,7 +47,7 @@ make test clippy export-graph # Must pass before PR ``` Problem (core trait - all problems must implement) │ -├── const NAME: &'static str // Problem name, e.g., "IndependentSet" +├── const NAME: &'static str // Problem name, e.g., "MaximumIndependentSet" ├── type GraphType: GraphMarker // Graph topology marker ├── type Weight: NumericWeight // Weight type (i32, f64, Unweighted) ├── type Size // Objective value type @@ -75,20 +75,33 @@ ConstraintSatisfactionProblem : Problem (extension for CSPs) - Graph types: SimpleGraph, GridGraph, UnitDiskGraph, Hypergraph - Weight types: `Unweighted` (marker), `i32`, `f64` +### Problem Names +Problem types use explicit optimization prefixes: +- `MaximumIndependentSet`, `MaximumClique`, `MaximumMatching`, `MaximumSetPacking` +- `MinimumVertexCover`, `MinimumDominatingSet`, `MinimumSetCovering` +- No prefix: `MaxCut`, `SpinGlass`, `QUBO`, `ILP`, `Satisfiability`, `KSatisfiability`, `CircuitSAT`, `Factoring`, `MaximalIS` + ### Problem Variant IDs Reduction graph nodes use variant IDs: `ProblemName[/GraphType][/Weighted]` -- Base: `IndependentSet` (SimpleGraph, unweighted) -- Graph variant: `IndependentSet/GridGraph` -- Weighted variant: `IndependentSet/Weighted` -- Both: `IndependentSet/GridGraph/Weighted` +- Base: `MaximumIndependentSet` (SimpleGraph, unweighted) +- Graph variant: `MaximumIndependentSet/GridGraph` +- Weighted variant: `MaximumIndependentSet/Weighted` +- Both: `MaximumIndependentSet/GridGraph/Weighted` ## Conventions ### File Naming -- Reduction files: `src/rules/_.rs` -- Model files: `src/models//.rs` +- Reduction files: `src/rules/_.rs` (e.g., `maximumindependentset_qubo.rs`) +- Model files: `src/models//.rs` (e.g., `maximum_independent_set.rs`) +- Example files: `examples/reduction__to_.rs` - Test naming: `test__to__closed_loop` +### Paper (docs/paper/reductions.typ) +- `problem-def(name, title, body)` — defines a problem with auto-generated schema, reductions list, and label `` +- `reduction-rule(source, target, ...)` — generates a theorem with label `` and registers in `covered-rules` state +- Completeness warnings auto-check that all JSON graph nodes/edges are covered in the paper +- `display-name` dict maps `ProblemName` to display text + ## Contributing See `.claude/rules/` for detailed guides: - `adding-reductions.md` - How to add reduction rules diff --git a/.claude/rules/adding-models.md b/.claude/rules/adding-models.md index 4088da2ce..fd98a9574 100644 --- a/.claude/rules/adding-models.md +++ b/.claude/rules/adding-models.md @@ -36,9 +36,9 @@ pub use my_problem::MyProblem; ## 3. Categories Place models in appropriate category: -- `src/models/satisfiability/` - SAT, K-SAT, CircuitSAT -- `src/models/graph/` - IndependentSet, VertexCovering, Coloring, etc. -- `src/models/set/` - SetCovering, SetPacking +- `src/models/satisfiability/` - Satisfiability, KSatisfiability, CircuitSAT +- `src/models/graph/` - MaximumIndependentSet, MinimumVertexCover, KColoring, etc. +- `src/models/set/` - MinimumSetCovering, MaximumSetPacking - `src/models/optimization/` - SpinGlass, QUBO, ILP ## 4. Required Traits @@ -47,8 +47,11 @@ Place models in appropriate category: - `Problem` - Core trait with `num_variables()`, `problem_size()`, `is_valid_solution()` - Consider `ConstraintSatisfactionProblem` if applicable -## 5. Documentation -Document in `docs/paper/reductions.typ` +## 5. Naming +Use explicit optimization prefixes: `Maximum` for maximization, `Minimum` for minimization (e.g., `MaximumIndependentSet`, `MinimumVertexCover`). + +## 6. Documentation +Document in `docs/paper/reductions.typ` using `#problem-def("ProblemName", "Display Title")[...]` ## Anti-patterns - Don't create models without JSON serialization support diff --git a/.claude/rules/adding-reductions.md b/.claude/rules/adding-reductions.md index 20411ac84..b92664b55 100644 --- a/.claude/rules/adding-reductions.md +++ b/.claude/rules/adding-reductions.md @@ -20,7 +20,7 @@ Before writing any Rust code, follow this workflow: # Example: generate QUBO test data cd scripts && uv run python generate_qubo_tests.py ``` -3. **Create a practical example** — design a small, explainable instance for `examples/` (e.g., "wireless tower placement" for IndependentSet, "map coloring" for Coloring). This example will also appear in the `docs/paper/reductions.typ`. +3. **Create a practical example** — design a small, explainable instance for `examples/` (e.g., "wireless tower placement" for MaximumIndependentSet, "map coloring" for KColoring). This example will also appear in the `docs/paper/reductions.typ`. 4. **Write the implementation plan** — save to `docs/plans/` using `superpowers:writing-plans`. The plan must include implementation details from the brainstorming session (formulas, penalty terms, matrix construction, variable indexing). ## 1. Implementation @@ -84,9 +84,9 @@ Add a round-trip demo to `examples/` showing a practical, explainable instance: ## 4. Documentation Update `docs/paper/reductions.typ` (see `rules/documentation.md` for the pattern): -- Add theorem + proof sketch +- Add `reduction-rule("Source", "Target", ...)` theorem with proof sketch - Add Rust code example from the example program -- Add to summary table with overhead and citation +- Add `display-name` entry if the problem is new The goal is to 1. prove the correctness of the reduction to human beings. 2. provide a minimal working example to the readers. diff --git a/.claude/rules/documentation.md b/.claude/rules/documentation.md index 7cabfe0bd..d29234c40 100644 --- a/.claude/rules/documentation.md +++ b/.claude/rules/documentation.md @@ -7,34 +7,48 @@ paths: The technical paper (`docs/paper/reductions.typ`) must include: -1. **Table of Contents** - Auto-generated outline of all sections -2. **Problem Data Structures** - Rust struct with fields in a code block -3. **Reduction Examples** - Minimal working example showing reduce → solve → extract +1. **Problem Definitions** — using `problem-def` wrapper +2. **Reduction Theorems** — using `reduction-rule` function +3. **Reduction Examples** — minimal working example showing reduce → solve → extract -## Pattern +## Adding a Problem Definition ```typst -#definition("Problem Name")[ +#problem-def("MaximumIndependentSet", "Maximum Independent Set (MIS)")[ Mathematical definition... ] +``` -// Rust data structure -```rust -pub struct ProblemName { - field1: Type1, - field2: Type2, -} -`` ` +This auto-generates: +- A label `` for cross-references +- The problem's schema (fields from Rust struct) +- The list of available reductions -#theorem[ - *(Source → Target)* Reduction description... -] +Also add an entry to the `display-name` dictionary: +```typst +"MaximumIndependentSet": "MIS", +``` + +## Adding a Reduction Theorem -// Minimal working example from closed-loop tests -```rust -let source = SourceProblem::new(...); -let reduction = ReduceTo::::reduce_to(&source); -let target = reduction.target_problem(); -// ... solve and extract -`` ` +```typst +#reduction-rule( + "MaximumIndependentSet", "QUBO", + example: "maximumindependentset_to_qubo", + overhead: (n: 0, m: 1), +)[ + Proof sketch... +] ``` + +This auto-generates: +- A theorem label `` +- References to source/target problem definitions (if they exist) +- Registration in `covered-rules` state for completeness checking +- The example code block from `examples/reduction_.rs` + +## Completeness Warnings + +The paper auto-checks completeness: +- After Problem Definitions: warns if JSON graph nodes are missing from `display-name` +- After Reductions section: warns if JSON graph edges are missing from `covered-rules` diff --git a/docs/paper/reduction_graph.json b/docs/paper/reduction_graph.json index 6d05579bd..d05d9f2b2 100644 --- a/docs/paper/reduction_graph.json +++ b/docs/paper/reduction_graph.json @@ -325,7 +325,8 @@ "field": "num_interactions", "formula": "num_assignments" } - ] + ], + "doc_path": "rules/circuit_spinglass/index.html" }, { "source": { @@ -348,7 +349,8 @@ "field": "num_gates", "formula": "num_bits_first * num_bits_second" } - ] + ], + "doc_path": "rules/factoring_circuit/index.html" }, { "source": { @@ -375,7 +377,8 @@ "field": "num_constraints", "formula": "3 * num_bits_first * num_bits_second + num_bits_first + num_bits_second + 1" } - ] + ], + "doc_path": "rules/factoring_ilp/index.html" }, { "source": { @@ -398,7 +401,8 @@ "field": "num_vars", "formula": "num_vars" } - ] + ], + "doc_path": "rules/ilp_qubo/index.html" }, { "source": { @@ -426,7 +430,8 @@ "field": "num_constraints", "formula": "num_vertices + num_edges * num_colors" } - ] + ], + "doc_path": "rules/coloring_ilp/index.html" }, { "source": { @@ -449,7 +454,8 @@ "field": "num_vars", "formula": "num_vertices * num_colors" } - ] + ], + "doc_path": "rules/coloring_qubo/index.html" }, { "source": { @@ -472,7 +478,8 @@ "field": "num_vars", "formula": "num_vars" } - ] + ], + "doc_path": "rules/ksatisfiability_qubo/index.html" }, { "source": { @@ -495,7 +502,8 @@ "field": "num_vars", "formula": "num_vertices" } - ] + ], + "doc_path": "rules/maximumindependentset_qubo/index.html" }, { "source": { @@ -522,7 +530,8 @@ "field": "num_elements", "formula": "num_vertices" } - ] + ], + "doc_path": "rules/maximummatching_maximumsetpacking/index.html" }, { "source": { @@ -549,7 +558,8 @@ "field": "num_edges", "formula": "num_sets" } - ] + ], + "doc_path": "rules/maximumindependentset_maximumsetpacking/index.html" }, { "source": { @@ -572,7 +582,8 @@ "field": "num_vars", "formula": "num_sets" } - ] + ], + "doc_path": "rules/maximumsetpacking_qubo/index.html" }, { "source": { @@ -599,7 +610,8 @@ "field": "num_edges", "formula": "num_edges" } - ] + ], + "doc_path": "rules/minimumvertexcover_maximumindependentset/index.html" }, { "source": { @@ -626,7 +638,8 @@ "field": "num_elements", "formula": "num_edges" } - ] + ], + "doc_path": "rules/minimumvertexcover_minimumsetcovering/index.html" }, { "source": { @@ -649,7 +662,8 @@ "field": "num_vars", "formula": "num_vertices" } - ] + ], + "doc_path": "rules/minimumvertexcover_qubo/index.html" }, { "source": { @@ -676,7 +690,8 @@ "field": "num_colors", "formula": "3" } - ] + ], + "doc_path": "rules/sat_coloring/index.html" }, { "source": { @@ -703,7 +718,8 @@ "field": "num_vars", "formula": "num_vars + num_literals" } - ] + ], + "doc_path": "rules/sat_ksat/index.html" }, { "source": { @@ -730,7 +746,8 @@ "field": "num_edges", "formula": "num_literals^2" } - ] + ], + "doc_path": "rules/sat_maximumindependentset/index.html" }, { "source": { @@ -757,7 +774,8 @@ "field": "num_edges", "formula": "3 * num_vars + num_literals" } - ] + ], + "doc_path": "rules/sat_minimumdominatingset/index.html" }, { "source": { @@ -784,7 +802,8 @@ "field": "num_edges", "formula": "num_interactions" } - ] + ], + "doc_path": "rules/spinglass_maxcut/index.html" }, { "source": { @@ -807,7 +826,8 @@ "field": "num_vars", "formula": "num_spins" } - ] + ], + "doc_path": "rules/spinglass_qubo/index.html" } ] } \ No newline at end of file diff --git a/docs/paper/reductions.typ b/docs/paper/reductions.typ index 13387f837..a8b30ea40 100644 --- a/docs/paper/reductions.typ +++ b/docs/paper/reductions.typ @@ -25,8 +25,8 @@ // Problem display names for theorem headers #let display-name = ( - "MaximumIndependentSet": "MIS", - "MinimumVertexCover": "MVC", + "MaximumIndependentSet": "Maximum Independent Set", + "MinimumVertexCover": "Minimum Vertex Cover", "MaxCut": "Max-Cut", "KColoring": "Coloring", "MinimumDominatingSet": "Min Dominating Set", @@ -149,12 +149,14 @@ ) // Problem definition wrapper: auto-adds schema, reductions list, and label -#let problem-def(name, title, body) = { +#let problem-def(name, body) = { + let lbl = label("def:" + name) + let title = display-name.at(name) [#definition(title)[ #body #render-schema(name) #render-reductions(name) - ]] + ] #lbl] } // Find edge in graph-data by source/target names @@ -210,14 +212,7 @@ covered-rules.update(old => old + ((source, target),)) [#theorem[ - *(#src-disp #arrow #tgt-disp)* #theorem-body - #context { - let refs = () - if query(src-lbl).len() > 0 { refs.push(ref(src-lbl)) } - if source != target and query(tgt-lbl).len() > 0 { refs.push(ref(tgt-lbl)) } - if refs.len() == 1 { [_Problem:_ #refs.at(0).] } - else if refs.len() > 1 { [_Problems:_ #refs.join(", ").] } - } + *(*#context { if query(src-lbl).len() > 0 { link(src-lbl)[#src-disp] } else [#src-disp] }* #arrow *#context { if query(tgt-lbl).len() > 0 { link(tgt-lbl)[#tgt-disp] } else [#tgt-disp] }*)* #theorem-body #if overhead != none { linebreak(); format-overhead(overhead) } ] #thm-lbl] diff --git a/docs/src/introduction.md b/docs/src/introduction.md index f7499ad0c..f565c5df1 100644 --- a/docs/src/introduction.md +++ b/docs/src/introduction.md @@ -4,15 +4,15 @@ A Rust library for reducing NP-hard problems. ## Overview -**problemreductions** provides implementations of various NP-hard computational problems and reduction rules between them. It is designed for algorithm research, education, and quantum optimization studies. +**problemreductions** provides implementations of various computational hard problems and reduction rules between them. It is designed for algorithm research, education, and quantum optimization studies. For theoretical background and correctness proofs, see the [PDF manual](https://codingthrust.github.io/problem-reductions/reductions.pdf). ## Reduction Graph -
+
-
+
Graph Set Optimization @@ -20,14 +20,14 @@ For theoretical background and correctness proofs, see the [PDF manual](https:// Specialized
- Click a node to start path selection + Click a node to start path selection
-
- Click two nodes to find a reduction path. Double-click a node to view its API docs. Scroll to zoom, drag to pan. +
+ Click two variant nodes to find a reduction path. Double-click a node or edge to view its API docs. Scroll to zoom, drag to pan.
- +