diff --git a/.claude/CLAUDE.md b/.claude/CLAUDE.md index a4ad24222..74415bfc2 100644 --- a/.claude/CLAUDE.md +++ b/.claude/CLAUDE.md @@ -81,8 +81,9 @@ enum Direction { Maximize, Minimize } - `ReductionResult` provides `target_problem()` and `extract_solution()` - `Solver::find_best()` → `Option>` for optimization problems; `Solver::find_satisfying()` → `Option>` for `Metric = bool` - `BruteForce::find_all_best()` / `find_all_satisfying()` return `Vec>` for all optimal/satisfying solutions -- Graph types: SimpleGraph, GridGraph, UnitDiskGraph, Hypergraph -- Weight types: `Unweighted` (marker), `i32`, `f64` +- Graph types: SimpleGraph, GridGraph, UnitDiskGraph, Triangular, HyperGraph +- Weight types: `One` (unit weight marker), `i32`, `f64` — all implement `WeightElement` trait +- `WeightElement` trait: `type Sum: NumericSize` + `fn to_sum(&self)` — converts weight to a summable numeric type - Weight management via inherent methods (`weights()`, `set_weights()`, `is_weighted()`), not traits - `NumericSize` supertrait bundles common numeric bounds (`Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + 'static`) @@ -93,11 +94,11 @@ Problem types use explicit optimization prefixes: - No prefix: `MaxCut`, `SpinGlass`, `QUBO`, `ILP`, `Satisfiability`, `KSatisfiability`, `CircuitSAT`, `Factoring`, `MaximalIS`, `PaintShop`, `BicliqueCover`, `BMF` ### Problem Variant IDs -Reduction graph nodes use variant IDs: `ProblemName[/GraphType][/Weighted]` -- Base: `MaximumIndependentSet` (SimpleGraph, unweighted) -- Graph variant: `MaximumIndependentSet/GridGraph` -- Weighted variant: `MaximumIndependentSet/Weighted` -- Both: `MaximumIndependentSet/GridGraph/Weighted` +Reduction graph nodes use variant key-value pairs from `Problem::variant()`: +- Base: `MaximumIndependentSet` (empty variant = defaults) +- Graph variant: `MaximumIndependentSet {graph: "GridGraph", weight: "i32"}` +- Weight variant: `MaximumIndependentSet {graph: "SimpleGraph", weight: "f64"}` +- Nodes come exclusively from `#[reduction]` registrations; natural edges between same-name variants are inferred from the graph/weight subtype partial order ## Conventions diff --git a/.claude/skills/issue-to-pr.md b/.claude/skills/issue-to-pr.md index c17ea1d3c..021f0f4c1 100644 --- a/.claude/skills/issue-to-pr.md +++ b/.claude/skills/issue-to-pr.md @@ -49,7 +49,7 @@ Present issue summary to user. Check that the issue template is fully filled out: - For **[Model]** issues: A clear mathmatical definition, Type specification, Variables and fields, The complexity clarification, verify an existing solver can solve it, or a solving strategy is provided, A detailed example for human. -- For **[Rule]** issues: Source, Target, Reference to verify information, Implementable reduction algorithm, Test dataset generation method, Size overhead, A clear example for human. +- For **[Rule]** issues: Source, Target, Reference to verify information, Implementable reduction algorithm, Test dataset generation method, Size overhead, A reduction example for human to verify the reduction is correct. Please put a high standard on the example: it must be in tutorial style with clear intuition and is easy to understand. Verify facts provided by the user, feel free to ask user questions. If any piece is missing or unclear, comment on the issue via `gh issue comment --body "..."` to ask user clarify. Then stop and wait — do NOT proceed until the issue is complete. diff --git a/docs/paper/reductions.typ b/docs/paper/reductions.typ index 1ff262445..d163e99bb 100644 --- a/docs/paper/reductions.typ +++ b/docs/paper/reductions.typ @@ -44,6 +44,7 @@ "CircuitSAT": [CircuitSAT], "Factoring": [Factoring], "GridGraph": [GridGraph MIS], + "Triangular": [Triangular MIS], ) // Definition label: "def:" — each definition block must have a matching label @@ -60,15 +61,15 @@ // Extract reductions for a problem from graph-data (returns (name, label) pairs) #let get-reductions-to(problem-name) = { graph-data.edges - .filter(e => e.source.name == problem-name) - .map(e => (name: e.target.name, lbl: reduction-label(e.source.name, e.target.name))) + .filter(e => graph-data.nodes.at(e.source).name == problem-name) + .map(e => (name: graph-data.nodes.at(e.target).name, lbl: reduction-label(graph-data.nodes.at(e.source).name, graph-data.nodes.at(e.target).name))) .dedup(key: e => e.name) } #let get-reductions-from(problem-name) = { graph-data.edges - .filter(e => e.target.name == problem-name) - .map(e => (name: e.source.name, lbl: reduction-label(e.source.name, e.target.name))) + .filter(e => graph-data.nodes.at(e.target).name == problem-name) + .map(e => (name: graph-data.nodes.at(e.source).name, lbl: reduction-label(graph-data.nodes.at(e.source).name, graph-data.nodes.at(e.target).name))) .dedup(key: e => e.name) } @@ -166,9 +167,9 @@ // Find edge in graph-data by source/target names #let find-edge(source, target) = { - let edge = graph-data.edges.find(e => e.source.name == source and e.target.name == target) + let edge = graph-data.edges.find(e => graph-data.nodes.at(e.source).name == source and graph-data.nodes.at(e.target).name == target) if edge == none { - edge = graph-data.edges.find(e => e.source.name == target and e.target.name == source) + edge = graph-data.edges.find(e => graph-data.nodes.at(e.source).name == target and graph-data.nodes.at(e.target).name == source) } edge } @@ -205,9 +206,9 @@ ) = { let arrow = sym.arrow.r let edge = find-edge(source, target) - let src-disp = if edge != none { variant-display(edge.source) } + let src-disp = if edge != none { variant-display(graph-data.nodes.at(edge.source)) } else { display-name.at(source) } - let tgt-disp = if edge != none { variant-display(edge.target) } + let tgt-disp = if edge != none { variant-display(graph-data.nodes.at(edge.target)) } else { display-name.at(target) } let src-lbl = label("def:" + source) let tgt-lbl = label("def:" + target) @@ -851,10 +852,10 @@ The following reductions to Integer Linear Programming are straightforward formu *Example: Petersen Graph.*#footnote[Generated using `cargo run --example export_petersen_mapping` from the accompanying code repository.] The Petersen graph ($n=10$, MIS$=4$) maps to a $30 times 42$ King's subgraph with 219 nodes and overhead $Delta = 89$. Solving MIS on the grid yields $"MIS"(G_"grid") = 4 + 89 = 93$. The weighted and unweighted KSG mappings share identical grid topology (same node positions and edges); only the vertex weights differ. With triangular lattice encoding @nguyen2023, the same graph maps to a $42 times 60$ grid with 395 nodes and overhead $Delta = 375$, giving $"MIS"(G_"tri") = 4 + 375 = 379$. // Load JSON data -#let petersen = json("petersen_source.json") -#let square_weighted = json("petersen_square_weighted.json") -#let square_unweighted = json("petersen_square_unweighted.json") -#let triangular_mapping = json("petersen_triangular.json") +#let petersen = json("static/petersen_source.json") +#let square_weighted = json("static/petersen_square_weighted.json") +#let square_unweighted = json("static/petersen_square_unweighted.json") +#let triangular_mapping = json("static/petersen_triangular.json") #figure( grid( @@ -884,6 +885,14 @@ The following reductions to Integer Linear Programming are straightforward formu caption: [Unit disk mappings of the Petersen graph. Blue: weight 1, red: weight 2, green: weight 3.], ) +#reduction-rule("MaximumIndependentSet", "Triangular")[ + @nguyen2023 Any MIS problem on a general graph $G$ can be reduced to MIS on a weighted triangular lattice graph with at most quadratic overhead in the number of vertices. +][ + _Construction._ Same copy-line method as the KSG mapping, but uses a triangular lattice instead of a square grid. Crossing and simplifier gadgets are adapted for triangular geometry, producing a unit disk graph on a triangular grid where edges connect nodes within unit distance under the triangular metric. + + _Overhead._ Both vertex and edge counts grow as $O(n^2)$ where $n = |V|$, matching the KSG mapping. +] + *Weighted Extension.* For MWIS, copy lines use weighted vertices (weights 1, 2, or 3). Source weights $< 1$ are added to designated "pin" vertices. *QUBO Mapping.* A QUBO problem $min bold(x)^top Q bold(x)$ maps to weighted MIS on a grid by: @@ -897,7 +906,7 @@ See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples #context { let covered = covered-rules.get() let json-edges = { - let edges = graph-data.edges.map(e => (e.source.name, e.target.name)) + let edges = graph-data.edges.map(e => (graph-data.nodes.at(e.source).name, graph-data.nodes.at(e.target).name)) let unique = () for e in edges { if unique.find(u => u.at(0) == e.at(0) and u.at(1) == e.at(1)) == none { diff --git a/docs/paper/petersen_source.json b/docs/paper/static/petersen_source.json similarity index 100% rename from docs/paper/petersen_source.json rename to docs/paper/static/petersen_source.json diff --git a/docs/paper/petersen_square_unweighted.json b/docs/paper/static/petersen_square_unweighted.json similarity index 100% rename from docs/paper/petersen_square_unweighted.json rename to docs/paper/static/petersen_square_unweighted.json diff --git a/docs/paper/petersen_square_weighted.json b/docs/paper/static/petersen_square_weighted.json similarity index 100% rename from docs/paper/petersen_square_weighted.json rename to docs/paper/static/petersen_square_weighted.json diff --git a/docs/paper/petersen_triangular.json b/docs/paper/static/petersen_triangular.json similarity index 100% rename from docs/paper/petersen_triangular.json rename to docs/paper/static/petersen_triangular.json diff --git a/docs/plans/2026-02-14-type-system-cleanup-design.md b/docs/plans/2026-02-14-type-system-cleanup-design.md new file mode 100644 index 000000000..6d52ee8d4 --- /dev/null +++ b/docs/plans/2026-02-14-type-system-cleanup-design.md @@ -0,0 +1,131 @@ +# Type System Cleanup Design + +## Problem + +The weight and trait system has several mathematical inconsistencies: + +1. **Weight dual role**: The type parameter `W` serves as both the per-element weight type and the accumulation/metric type. This prevents using a unit-weight type (`One`) because `One + One` can't produce `2` within the same type. + +2. **Dead abstractions**: `Unweighted(usize)` is never used as a type parameter. The `Weights` trait is implemented but never used outside its own tests. `NumericWeight` and `NumericSize` are nearly identical traits. + +3. **Missing satisfaction trait**: Satisfaction problems (SAT, CircuitSAT, KColoring, Factoring) use `Metric = bool` but have no shared trait. The `BruteForce::find_satisfying()` method uses `Problem` inline. + +## Design + +### 1. `WeightElement` trait + `One` type + +Introduce a trait that maps weight element types to their accumulation type: + +```rust +/// Maps a weight element to its sum/metric type. +pub trait WeightElement: Clone + Default + 'static { + /// The numeric type used for sums and comparisons. + type Sum: NumericSize; + /// Convert this weight element to the sum type. + fn to_sum(&self) -> Self::Sum; +} +``` + +Implementations: + +```rust +/// The constant 1. Unit weight for unweighted problems. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default, Serialize, Deserialize)] +pub struct One; + +impl WeightElement for One { + type Sum = i32; + fn to_sum(&self) -> i32 { 1 } +} + +impl WeightElement for i32 { + type Sum = i32; + fn to_sum(&self) -> i32 { *self } +} + +impl WeightElement for f64 { + type Sum = f64; + fn to_sum(&self) -> f64 { *self } +} +``` + +**Impact on problems:** + +Before: +```rust +impl Problem for MaximumIndependentSet +where W: Clone + Default + PartialOrd + Num + Zero + AddAssign + 'static +{ + type Metric = SolutionSize; + fn evaluate(&self, config: &[usize]) -> SolutionSize { + let mut total = W::zero(); + for (i, &sel) in config.iter().enumerate() { + if sel == 1 { total += self.weights[i].clone(); } + } + SolutionSize::Valid(total) + } +} + +impl OptimizationProblem for MaximumIndependentSet { + type Value = W; +} +``` + +After: +```rust +impl Problem for MaximumIndependentSet +where W::Sum: PartialOrd +{ + type Metric = SolutionSize; + fn evaluate(&self, config: &[usize]) -> SolutionSize { + let mut total = W::Sum::zero(); + for (i, &sel) in config.iter().enumerate() { + if sel == 1 { total += self.weights[i].to_sum(); } + } + SolutionSize::Valid(total) + } +} + +impl OptimizationProblem for MaximumIndependentSet { + type Value = W::Sum; +} +``` + +**Variant output:** `variant()` uses `short_type_name::()` which returns `"One"`, `"i32"`, or `"f64"`. The variant label changes from `"Unweighted"` to `"One"`. + +### 2. `SatisfactionProblem` marker trait + +```rust +/// Marker trait for satisfaction (decision) problems. +pub trait SatisfactionProblem: Problem {} +``` + +Implemented by: `Satisfiability`, `KSatisfiability`, `CircuitSAT`, `KColoring`, `Factoring`. + +No new methods. Makes the problem category explicit in the type system. `BruteForce::find_satisfying()` can use `P: SatisfactionProblem` as its bound. + +### 3. Merge `NumericWeight` / `NumericSize` + +Delete `NumericWeight`. Keep `NumericSize` as the sole numeric bound trait: + +```rust +pub trait NumericSize: + Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + 'static +{} +``` + +This is the bound on `WeightElement::Sum`. The extra `Bounded` requirement (vs the old `NumericWeight`) is needed for solver penalty calculations and is satisfied by `i32` and `f64`. + +### Removals + +- `Unweighted` struct (replaced by `One`) +- `Weights` trait (unused, subsumed by `WeightElement`) +- `NumericWeight` trait (merged into `NumericSize`) + +### Reduction impact + +Concrete `ReduceTo` impls change `Unweighted` references to `One`. The `ConcreteVariantEntry` registrations in `variants.rs` change `"Unweighted"` to `"One"`. The natural edge system (weight subtype hierarchy) adds `One` as a subtype of `i32`. + +### Variant impact + +The `variant()` output for unweighted problems changes from `("weight", "Unweighted")` to `("weight", "One")`. The reduction graph JSON, paper, and JavaScript visualization update accordingly. diff --git a/docs/plans/2026-02-14-type-system-cleanup-impl.md b/docs/plans/2026-02-14-type-system-cleanup-impl.md new file mode 100644 index 000000000..9795908ff --- /dev/null +++ b/docs/plans/2026-02-14-type-system-cleanup-impl.md @@ -0,0 +1,368 @@ +# Type System Cleanup Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Clean up the weight type system by introducing `WeightElement` trait and `One` type, add `SatisfactionProblem` marker trait, and delete dead abstractions. + +**Architecture:** Three independent changes: (1) `WeightElement` trait decouples weight element type from accumulation type, enabling `One` as a unit weight; (2) `SatisfactionProblem` marker trait for `Metric = bool` problems; (3) merge `NumericWeight` into `NumericSize`. + +**Tech Stack:** Rust, inventory crate for registry + +**Design doc:** `docs/plans/2026-02-14-type-system-cleanup-design.md` + +--- + +### Task 1: Add `WeightElement` trait and `One` type to `types.rs` + +**Files:** +- Modify: `src/types.rs` +- Test: `src/unit_tests/types.rs` + +**Step 1: Add `WeightElement` trait and implementations after `NumericSize`** + +Add after the `NumericSize` blanket impl (after line 51): + +```rust +/// Maps a weight element to its sum/metric type. +/// +/// This decouples the per-element weight type from the accumulation type. +/// For concrete weights (`i32`, `f64`), `Sum` is the same type. +/// For the unit weight `One`, `Sum = i32`. +pub trait WeightElement: Clone + Default + 'static { + /// The numeric type used for sums and comparisons. + type Sum: NumericSize; + /// Convert this weight element to the sum type. + fn to_sum(&self) -> Self::Sum; +} + +impl WeightElement for i32 { + type Sum = i32; + fn to_sum(&self) -> i32 { + *self + } +} + +impl WeightElement for f64 { + type Sum = f64; + fn to_sum(&self) -> f64 { + *self + } +} +``` + +**Step 2: Replace `Unweighted` with `One`** + +Replace the `Unweighted` struct, its methods, Display impl, and Weights impl with: + +```rust +/// The constant 1. Unit weight for unweighted problems. +/// +/// When used as the weight type parameter `W`, indicates that all weights +/// are uniformly 1. `One::to_sum()` returns `1i32`. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default, Serialize, Deserialize)] +pub struct One; + +impl WeightElement for One { + type Sum = i32; + fn to_sum(&self) -> i32 { + 1 + } +} + +impl std::fmt::Display for One { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "One") + } +} +``` + +**Step 3: Delete dead code** + +- Delete `NumericWeight` trait and blanket impl (lines 11-26) +- Delete `Weights` trait (lines 53-67) +- Delete `Weights for Unweighted` impl (lines 93-102) +- Delete `Weights for Vec` impl (lines 104-113) +- Delete `Weights for Vec` impl (lines 115-124) + +**Step 4: Update `src/unit_tests/types.rs`** + +- Replace `test_unweighted` to test `One` instead +- Replace `test_unweighted_weights_trait` to test `WeightElement for One` +- Add tests for `WeightElement for i32` and `WeightElement for f64` + +**Step 5: Run test to verify** + +Run: `cargo test --lib types::tests` + +**Step 6: Commit** + +``` +feat: add WeightElement trait and One type, remove Unweighted/Weights/NumericWeight +``` + +--- + +### Task 2: Update `lib.rs` exports + +**Files:** +- Modify: `src/lib.rs` + +**Step 1: Update prelude and crate-level re-exports** + +In the `pub mod prelude` block (line 102), replace: +```rust +Direction, NumericSize, NumericWeight, ProblemSize, SolutionSize, Unweighted, Weights, +``` +with: +```rust +Direction, NumericSize, One, ProblemSize, SolutionSize, WeightElement, +``` + +**Step 2: Run build to check** + +Run: `cargo check --all-features` + +**Step 3: Commit** + +``` +refactor: update lib.rs exports for WeightElement/One +``` + +--- + +### Task 3: Update graph problem `Problem` and `OptimizationProblem` impls + +**Files (8 graph problems):** +- Modify: `src/models/graph/maximum_independent_set.rs` +- Modify: `src/models/graph/maximum_clique.rs` +- Modify: `src/models/graph/minimum_vertex_cover.rs` +- Modify: `src/models/graph/minimum_dominating_set.rs` +- Modify: `src/models/graph/maximal_is.rs` +- Modify: `src/models/graph/maximum_matching.rs` +- Modify: `src/models/graph/traveling_salesman.rs` +- Modify: `src/models/graph/max_cut.rs` + +For each file, apply the same pattern: + +**Step 1: Update `Problem` impl** + +Change trait bounds from: +```rust +impl Problem for ProblemType +where + G: Graph, + W: Clone + Default + PartialOrd + Num + Zero + AddAssign + 'static, +``` +to: +```rust +impl Problem for ProblemType +where + G: Graph, + W: WeightElement, +``` + +Change `type Metric = SolutionSize` to `type Metric = SolutionSize`. + +Change `evaluate()` body: replace `W::zero()` with `W::Sum::zero()`, replace `self.weights[i].clone()` with `self.weights[i].to_sum()` (in the accumulation `total +=` line). + +**Step 2: Update `OptimizationProblem` impl** + +Same bound changes. Change `type Value = W` to `type Value = W::Sum`. + +**Step 3: Update constructors and helper methods** + +In `new()` constructors that create default weights: `vec![W::from(1); n]` stays as-is since `One::default()` doesn't produce 1 — but actually these constructors are bounded on `W: From`, and `One` doesn't implement `From`. Instead, `new()` should use `W::default()` for `One` (but default of `One` is `One`, which is correct). Check each constructor individually — most use `W::from(1)` which needs `From` bound. Since `One` is constructed as the default and `from(1)` makes no sense for `One`, the `new()` constructor that creates unit weights should be specialized or use a `WeightElement`-specific helper. + +**Alternative:** Add `From for One`: +```rust +impl From for One { + fn from(_: i32) -> Self { One } +} +``` +This allows `W::from(1)` to work for `One` — it ignores the value and returns `One`. This is mathematically sound: promoting any integer to the `One` type gives `One`. + +Add this to `types.rs` in Task 1. + +**Step 4: Update `ReductionResult` impls in the same files** + +`ReductionResult` impls with generic `W` bounds need the same bound change from the long trait list to `W: WeightElement`. + +**Step 5: Run tests** + +Run: `cargo test --lib models::graph` + +**Step 6: Commit** + +``` +refactor: update graph problem impls to use WeightElement +``` + +--- + +### Task 4: Update set and optimization problem impls + +**Files (4 problems):** +- Modify: `src/models/set/maximum_set_packing.rs` +- Modify: `src/models/set/minimum_set_covering.rs` +- Modify: `src/models/optimization/qubo.rs` +- Modify: `src/models/optimization/spin_glass.rs` + +Same pattern as Task 3: update `Problem` bounds, `Metric`, `Value`, and `evaluate()` body. + +For `QUBO` and `SpinGlass`, the `W` parameter is already the numeric type (not a weight element in the vertex sense), so `WeightElement for f64` with `Sum = f64` should work directly. Verify that `W::zero()` still works via `NumericSize` bound on `W::Sum`. + +**Step 1: Apply same changes as Task 3** + +**Step 2: Run tests** + +Run: `cargo test --lib models::set models::optimization` + +**Step 3: Commit** + +``` +refactor: update set and optimization problem impls to use WeightElement +``` + +--- + +### Task 5: Update reduction rule files + +**Files (~20 reduction files):** +- All files in `src/rules/` that have generic `W` bounds on `ReductionResult` impls + +The concrete `ReduceTo` impls (from our previous work) don't need changes since they use `i32`/`f64` directly. But the generic `ReductionResult` impls need bounds updated. + +**Step 1: For each reduction file with generic `ReductionResult` impls** + +Replace: +```rust +where W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + 'static +``` +or similar long bound lists with: +```rust +where W: WeightElement +``` + +Add `use crate::types::WeightElement;` to imports if not already present. Remove unused `num_traits` imports. + +**Step 2: Run tests** + +Run: `cargo test --all-features` + +**Step 3: Commit** + +``` +refactor: update reduction rule bounds to use WeightElement +``` + +--- + +### Task 6: Update variant metadata — `"Unweighted"` to `"One"` + +**Files:** +- Modify: `src/rules/variants.rs` — replace all `"Unweighted"` with `"One"` +- Modify: `src/graph_types.rs` — replace `"Unweighted"` in weight subtype declarations +- Modify: `src/rules/registry.rs` — replace `"Unweighted"` in weight checking (if any) +- Modify: `docs/src/reductions/reduction_graph.json` — regenerated +- Modify: test files that assert on `"Unweighted"` string + +**Step 1: Replace `"Unweighted"` with `"One"` in source files** + +In `variants.rs`, `graph_types.rs`, and `registry.rs`. + +**Step 2: Update test assertions** + +In `unit_tests/rules/graph.rs`, `unit_tests/rules/registry.rs`, `unit_tests/graph_types.rs` — replace all `"Unweighted"` assertions with `"One"`. + +**Step 3: Regenerate reduction graph JSON** + +Run: `make rust-export` + +**Step 4: Run tests** + +Run: `cargo test --all-features` + +**Step 5: Commit** + +``` +refactor: rename Unweighted to One in variant metadata +``` + +--- + +### Task 7: Add `SatisfactionProblem` marker trait + +**Files:** +- Modify: `src/traits.rs` +- Modify: `src/models/satisfiability/sat.rs` +- Modify: `src/models/satisfiability/ksat.rs` +- Modify: `src/models/specialized/circuit.rs` +- Modify: `src/models/graph/kcoloring.rs` +- Modify: `src/lib.rs` (re-export) + +**Step 1: Add trait to `src/traits.rs`** + +After the `OptimizationProblem` trait: +```rust +/// Marker trait for satisfaction (decision) problems. +/// +/// Satisfaction problems evaluate configurations to `bool`: +/// `true` if the configuration satisfies all constraints, `false` otherwise. +pub trait SatisfactionProblem: Problem {} +``` + +**Step 2: Implement for each satisfaction problem** + +In each file, add after the `Problem` impl: +```rust +impl SatisfactionProblem for Satisfiability {} +impl SatisfactionProblem for KSatisfiability {} +impl SatisfactionProblem for CircuitSAT {} +impl SatisfactionProblem for KColoring {} +``` + +**Step 3: Add re-export in `lib.rs`** + +Add `SatisfactionProblem` to the traits re-export. + +**Step 4: Optionally update solver bounds** + +In `src/solvers/brute_force.rs` and `src/solvers/mod.rs`, change `P: Problem` to `P: SatisfactionProblem`. This is optional — the existing bound still works. + +**Step 5: Run tests** + +Run: `cargo test --all-features` + +**Step 6: Commit** + +``` +feat: add SatisfactionProblem marker trait +``` + +--- + +### Task 8: Final verification and cleanup + +**Step 1: Run full test suite** + +Run: `make test clippy` + +**Step 2: Check for any remaining `Unweighted` or `NumericWeight` references** + +Run: `rg "Unweighted|NumericWeight" src/` + +Any remaining references should be in comments/docs only — update those too. + +**Step 3: Update paper if needed** + +Check `docs/paper/reductions.typ` for `Unweighted` references. + +**Step 4: Run doc build** + +Run: `make doc` + +**Step 5: Final commit** + +``` +chore: cleanup remaining Unweighted/NumericWeight references +``` diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index 5e018c802..f04c36c31 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -5,7 +5,7 @@ # User Guide - [Getting Started](./getting-started.md) -- [Architecture](./arch.md) +- [Design](./design.md) # Developer Guide diff --git a/docs/src/arch.md b/docs/src/arch.md deleted file mode 100644 index 64c62db27..000000000 --- a/docs/src/arch.md +++ /dev/null @@ -1,178 +0,0 @@ -# Architecture - -This guide covers the library internals for contributors and developers. See [Getting Started](./getting-started.md) for usage examples. - -## Module Overview - -
- -![Module Overview](static/module-overview.svg) - -
-
- -![Module Overview](static/module-overview-dark.svg) - -
- -| Module | Purpose | -|--------|---------| -| [`src/models/`](#models) | Problem type implementations (SAT, Graph, Set, Optimization) | -| [`src/rules/`](#rules) | Reduction rules with `ReduceTo` implementations | -| [`src/registry/`](#registry) | Compile-time reduction graph metadata | -| [`src/solvers/`](#solvers) | BruteForce and ILP solvers | -| `src/traits.rs` | Core `Problem` and `OptimizationProblem` traits (see [Models](#models)) | -| `src/types.rs` | Shared types: `SolutionSize`, `Direction`, `ProblemSize` (see [Models](#models)) | - -## Models - -Every problem implements `Problem`. Optimization problems additionally implement `OptimizationProblem`. - -
- -![Trait Hierarchy](static/trait-hierarchy.svg) - -
-
- -![Trait Hierarchy](static/trait-hierarchy-dark.svg) - -
- -```rust -pub trait Problem: Clone { - const NAME: &'static str; // e.g., "MaximumIndependentSet" - type Metric: Clone; // SolutionSize or bool - fn dims(&self) -> Vec; // config space: [2, 2, 2] for 3 binary vars - fn evaluate(&self, config: &[usize]) -> Self::Metric; - fn variant() -> Vec<(&'static str, &'static str)>; -} - -pub trait OptimizationProblem: Problem> { - type Value: PartialOrd + Clone; // i32, f64, etc. - fn direction(&self) -> Direction; // Maximize or Minimize -} -``` - -**Key types:** -- `SolutionSize`: `Valid(T)` for feasible solutions, `Invalid` for constraint violations -- `Direction`: `Maximize` or `Minimize` - -Problems are parameterized by graph type and weight type: - -- `MaximumIndependentSet` — graph type `G`, weight type `W` -- `Satisfiability` — CNF formula (concrete type, no parameters) -- `QUBO` — parameterized by weight type only - -**Graph types:** - -| Type | Description | -|------|-------------| -| `SimpleGraph` | Standard adjacency-based graph | -| `GridGraph` | Vertices on a regular grid | -| `UnitDiskGraph` | Edges connect vertices within a distance threshold | -| `HyperGraph` | Edges connecting any number of vertices | - -**Variant IDs** in the reduction graph follow `ProblemName[/GraphType][/Weighted]`: - -``` -MaximumIndependentSet # base variant (SimpleGraph, unweighted) -MaximumIndependentSet/GridGraph # different graph topology -MaximumIndependentSet/Weighted # weighted objective -``` - -All problem types support JSON serialization via serde: - -```rust -use problemreductions::io::{to_json, from_json}; - -let json = to_json(&problem)?; -let restored: MaximumIndependentSet = from_json(&json)?; -``` - -See [adding-models.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/adding-models.md) for the full implementation guide. - -## Rules - -A reduction requires two pieces: - -**1. Result struct** — holds the target problem and extraction logic: - -```rust -#[derive(Clone)] -pub struct ReductionAToB { - target: B, - // ... mapping data for extraction -} - -impl ReductionResult for ReductionAToB { - type Source = A; - type Target = B; - - fn target_problem(&self) -> &B { &self.target } - fn extract_solution(&self, target_sol: &[usize]) -> Vec { /* ... */ } -} -``` - -**2. `ReduceTo` impl** with the `#[reduction]` macro: - -```rust -#[reduction(A -> B)] -impl ReduceTo for A { - type Result = ReductionAToB; - fn reduce_to(&self) -> Self::Result { /* ... */ } -} -``` - -The macro generates `inventory::submit!` calls for compile-time reduction graph registration. - -See [adding-reductions.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/adding-reductions.md) for the full implementation guide. - -## Registry - -The reduction graph is built at compile time using the `inventory` crate: - -```rust -#[reduction(A -> B)] -impl ReduceTo for A { /* ... */ } - -// Expands to include: -// inventory::submit! { ReductionMeta { source: "A", target: "B", ... } } -``` - -**JSON exports:** -- [reduction_graph.json](reductions/reduction_graph.json) — all problem variants and reduction edges -- [problem_schemas.json](reductions/problem_schemas.json) — field definitions for each problem type - -Regenerate exports: - -```bash -cargo run --example export_graph # docs/src/reductions/reduction_graph.json (default) -cargo run --example export_graph -- output.json # custom output path -cargo run --example export_schemas # docs/src/reductions/problem_schemas.json -``` - -## Solvers - -Solvers implement the `Solver` trait: - -```rust -pub trait Solver { - fn find_best(&self, problem: &P) -> Option>; - fn find_satisfying>(&self, problem: &P) -> Option>; -} -``` - -`ILPSolver` additionally provides `solve_reduced()` for problems implementing `ReduceTo`. - -## Contributing - -See [Call for Contributions](./introduction.md#call-for-contributions) for the recommended issue-based workflow (no coding required). - -For manual implementation: - -- **Adding a problem:** See [adding-models.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/adding-models.md) -- **Adding a reduction:** See [adding-reductions.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/adding-reductions.md) -- **Testing requirements:** See [testing.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/testing.md) - -Run `make test clippy` before submitting PRs. diff --git a/docs/src/design.md b/docs/src/design.md new file mode 100644 index 000000000..990c884ae --- /dev/null +++ b/docs/src/design.md @@ -0,0 +1,289 @@ +# Design + +This guide covers the library internals for contributors and developers. See [Getting Started](./getting-started.md) for usage examples. + +## Module Overview + +
+ +![Module Overview](static/module-overview.svg) + +
+
+ +![Module Overview](static/module-overview-dark.svg) + +
+ +| Module | Purpose | +|--------|---------| +| [`src/models/`](#models) | Problem type implementations (SAT, Graph, Set, Optimization) | +| [`src/rules/`](#rules) | Reduction rules with `ReduceTo` implementations | +| [`src/registry/`](#registry) | Compile-time reduction graph metadata | +| [`src/solvers/`](#solvers) | BruteForce and ILP solvers | +| `src/traits.rs` | Core `Problem` and `OptimizationProblem` traits (see [Models](#models)) | +| `src/types.rs` | Shared types: `SolutionSize`, `Direction`, `ProblemSize` (see [Models](#models)) | + +## Models + +Every problem implements `Problem`. Optimization problems additionally implement `OptimizationProblem`. + +
+ +![Trait Hierarchy](static/trait-hierarchy.svg) + +
+
+ +![Trait Hierarchy](static/trait-hierarchy-dark.svg) + +
+ +```rust +pub trait Problem: Clone { + const NAME: &'static str; // e.g., "MaximumIndependentSet" + type Metric: Clone; // SolutionSize or bool + fn dims(&self) -> Vec; // config space: [2, 2, 2] for 3 binary vars + fn evaluate(&self, config: &[usize]) -> Self::Metric; + fn variant() -> Vec<(&'static str, &'static str)>; +} + +pub trait OptimizationProblem: Problem> { + type Value: PartialOrd + Clone; // i32, f64, etc. + fn direction(&self) -> Direction; // Maximize or Minimize +} +``` + +**Key types:** +- `SolutionSize`: `Valid(T)` for feasible solutions, `Invalid` for constraint violations +- `Direction`: `Maximize` or `Minimize` + +Problems are parameterized by graph type and weight type: + +- `MaximumIndependentSet` — graph type `G`, weight type `W` +- `Satisfiability` — CNF formula (concrete type, no parameters) +- `QUBO` — parameterized by weight type only + +**Graph types:** + +| Type | Description | +|------|-------------| +| `SimpleGraph` | Standard adjacency-based graph | +| `GridGraph` | Vertices on a regular grid | +| `UnitDiskGraph` | Edges connect vertices within a distance threshold | +| `Triangular` | Triangular lattice graph (subtype of UnitDiskGraph) | +| `HyperGraph` | Edges connecting any number of vertices | + +All problem types support JSON serialization via serde: + +```rust +use problemreductions::io::{to_json, from_json}; + +let json = to_json(&problem)?; +let restored: MaximumIndependentSet = from_json(&json)?; +``` + +See [adding-models.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/adding-models.md) for the full implementation guide. + +## Variant System + +A single problem name like `MaximumIndependentSet` can have multiple **variants** — concrete instantiations that differ in graph topology, weight type, or other parameters. The variant system tracks these distinctions in the reduction graph so that reductions between specific instantiations are represented precisely. + +Each variant is identified by a set of key-value pairs returned by `Problem::variant()`: + +```rust +// MaximumIndependentSet +fn variant() -> Vec<(&'static str, &'static str)> { + vec![("graph", "UnitDiskGraph"), ("weight", "One")] +} + +// KSatisfiability<3> +fn variant() -> Vec<(&'static str, &'static str)> { + vec![("k", "3")] +} +``` + +Variant nodes in the reduction graph are discovered automatically from `#[reduction]` registrations — each reduction's source and target types become nodes. Natural edges between same-name variants are inferred from the graph/weight subtype partial order (e.g., `MIS/GridGraph → MIS/SimpleGraph`). In the visualization, nodes are labeled with only the non-default fields for brevity (e.g. `MaximumIndependentSet (GridGraph)` omits the default `One`). + +### Graph Hierarchy + +Graph types form a subtype hierarchy declared in `src/graph_types.rs`: + +``` +HyperGraph (most general) +└── SimpleGraph + ├── PlanarGraph + ├── BipartiteGraph + └── UnitDiskGraph + ├── GridGraph + └── Triangular +``` + +A problem on a more specific graph type can always be treated as a problem on a more general one — a `GridGraph` *is* a `SimpleGraph`. This subtype relationship is registered at compile time: + +```rust +declare_graph_subtype!(GridGraph => UnitDiskGraph); +declare_graph_subtype!(UnitDiskGraph => SimpleGraph); +// ... +``` + +The runtime builds a transitive closure: `GridGraph` is a subtype of `UnitDiskGraph`, `SimpleGraph`, and `HyperGraph`. + +**Example: natural edge for Triangular MIS.** Suppose we have a `MaximumIndependentSet` instance — an independent set problem on a triangular lattice. Because `Triangular` is a subtype of `SimpleGraph` in the graph hierarchy, the reduction graph contains a natural edge: + +``` +MIS → MIS +``` + +This edge has identity overhead (the problem size is unchanged) and requires no code — the triangular lattice graph *is* a simple graph, so any MIS algorithm for general graphs applies directly. Combined with the explicit reduction `MIS → MIS` (unit disk mapping), the system can automatically chain: + +``` +MIS → MIS → MIS + (natural edge) (explicit reduction) +``` + +### Weight Hierarchy + +Weight types form a linear promotion chain: + +``` +One → i32 → f64 +``` + +An unweighted problem (using `One`, the unit-weight type) is a special case of a weighted one (all weights equal to 1), and an integer-weighted problem embeds naturally into real-weighted. This is declared in `src/graph_types.rs`: + +```rust +declare_weight_subtype!("One" => "i32"); +declare_weight_subtype!("i32" => "f64"); +``` + +### K Parameter + +`KSatisfiability` and `KColoring` use a const generic `K` mapped to a string via `const_usize_str`: + +| Rust type | Variant `k` | +|-----------|-------------| +| `KSatisfiability<2>` | `"2"` | +| `KSatisfiability<3>` | `"3"` | +| Generic `KSatisfiability` | `"N"` | + +A specific K value (e.g. `"3"`) is a subtype of the generic `"N"`, meaning any concrete K-SAT instance can be treated as a general K-SAT problem. + +### Natural Edges + +When two variants of the same problem differ only in that one is "more specific" than the other, a **natural edge** is auto-generated in the reduction graph. The edge represents the trivial identity reduction — the problem instance doesn't change, only its type annotation relaxes. + +A variant A is reducible to variant B when every field of A is at least as specific as the corresponding field of B: + +- **graph:** `is_graph_subtype(A.graph, B.graph)` — e.g. `UnitDiskGraph` ≤ `SimpleGraph` +- **weight:** `is_weight_subtype(A.weight, B.weight)` — e.g. `Unweighted` ≤ `i32` +- **k:** a concrete value is a subtype of `"N"` + +Natural edges have identity overhead: the output size equals the input size. + +### Example: Unweighted MIS on UnitDiskGraph → Weighted MIS on SimpleGraph + +Consider reducing `MaximumIndependentSet` to `MaximumIndependentSet`. These are two variants of the same problem, so the reduction graph connects them via natural edges: + +``` +MIS (UnitDiskGraph, Unweighted) + │ + │ graph relaxation: UnitDiskGraph ≤ SimpleGraph + ▼ +MIS (SimpleGraph, Unweighted) + │ + │ weight promotion: Unweighted ≤ i32 + ▼ +MIS (SimpleGraph, i32) +``` + +**Step 1 — Graph relaxation.** A unit disk graph is a simple graph (it just happens to have geometric structure). The MIS instance is unchanged; we simply forget the geometric embedding and treat it as a generic graph. + +**Step 2 — Weight promotion.** An unweighted MIS asks for the largest independent set (all vertices have equal value). This is equivalent to a weighted MIS where every vertex has weight 1. The instance gains uniform weights and becomes `MaximumIndependentSet`. + +Both steps are identity reductions with zero overhead — no new variables or constraints are introduced. The variant system generates these edges automatically from the declared hierarchies. + +## Rules + +A reduction requires two pieces: + +**1. Result struct** — holds the target problem and extraction logic: + +```rust +#[derive(Clone)] +pub struct ReductionAToB { + target: B, + // ... mapping data for extraction +} + +impl ReductionResult for ReductionAToB { + type Source = A; + type Target = B; + + fn target_problem(&self) -> &B { &self.target } + fn extract_solution(&self, target_sol: &[usize]) -> Vec { /* ... */ } +} +``` + +**2. `ReduceTo` impl** with the `#[reduction]` macro: + +```rust +#[reduction(A -> B)] +impl ReduceTo for A { + type Result = ReductionAToB; + fn reduce_to(&self) -> Self::Result { /* ... */ } +} +``` + +The macro generates `inventory::submit!` calls for compile-time reduction graph registration. + +See [adding-reductions.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/adding-reductions.md) for the full implementation guide. + +## Registry + +The reduction graph is built at compile time using the `inventory` crate: + +```rust +#[reduction(A -> B)] +impl ReduceTo for A { /* ... */ } + +// Expands to include: +// inventory::submit! { ReductionMeta { source: "A", target: "B", ... } } +``` + +**JSON exports:** +- [reduction_graph.json](reductions/reduction_graph.json) — all problem variants and reduction edges +- [problem_schemas.json](reductions/problem_schemas.json) — field definitions for each problem type + +Regenerate exports: + +```bash +cargo run --example export_graph # docs/src/reductions/reduction_graph.json (default) +cargo run --example export_graph -- output.json # custom output path +cargo run --example export_schemas # docs/src/reductions/problem_schemas.json +``` + +## Solvers + +Solvers implement the `Solver` trait: + +```rust +pub trait Solver { + fn find_best(&self, problem: &P) -> Option>; + fn find_satisfying>(&self, problem: &P) -> Option>; +} +``` + +`ILPSolver` additionally provides `solve_reduced()` for problems implementing `ReduceTo`. + +## Contributing + +See [Call for Contributions](./introduction.md#call-for-contributions) for the recommended issue-based workflow (no coding required). + +For manual implementation: + +- **Adding a problem:** See [adding-models.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/adding-models.md) +- **Adding a reduction:** See [adding-reductions.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/adding-reductions.md) +- **Testing requirements:** See [testing.md](https://github.com/CodingThrust/problem-reductions/blob/main/.claude/rules/testing.md) + +Run `make test clippy` before submitting PRs. diff --git a/docs/src/introduction.md b/docs/src/introduction.md index 289a1b5a9..a36138d92 100644 --- a/docs/src/introduction.md +++ b/docs/src/introduction.md @@ -40,46 +40,60 @@ return name + '/' + keys.map(function(k) { return k + '=' + variant[k]; }).join(','); } + // Default values per variant key — omitted in concise labels + var variantDefaults = { graph: 'SimpleGraph', weight: 'One' }; + function variantLabel(variant) { - var graph = variant.graph || 'SimpleGraph'; - var weight = variant.weight || 'Unweighted'; - var extra = Object.keys(variant).filter(function(k) { return k !== 'graph' && k !== 'weight'; }); + var keys = Object.keys(variant); var parts = []; - if (graph !== 'SimpleGraph') parts.push(graph); - if (weight !== 'Unweighted') parts.push('Weighted'); - extra.forEach(function(k) { parts.push(k + '=' + variant[k]); }); + keys.forEach(function(k) { + var v = variant[k]; + if (variantDefaults[k] && v === variantDefaults[k]) return; // skip defaults + parts.push(k === 'graph' || k === 'weight' ? v : k + '=' + v); + }); return parts.length > 0 ? parts.join(', ') : 'base'; } + function fullVariantLabel(variant) { + var keys = Object.keys(variant); + if (keys.length === 0) return 'no parameters'; + var parts = []; + keys.forEach(function(k) { + parts.push(k === 'graph' || k === 'weight' ? variant[k] : k + '=' + variant[k]); + }); + return parts.join(', '); + } + function isBaseVariant(variant) { - var graph = variant.graph || 'SimpleGraph'; - var weight = variant.weight || 'Unweighted'; - var extra = Object.keys(variant).filter(function(k) { return k !== 'graph' && k !== 'weight'; }); - return graph === 'SimpleGraph' && weight === 'Unweighted' && extra.length === 0; + var keys = Object.keys(variant); + return keys.every(function(k) { + return variantDefaults[k] && variant[k] === variantDefaults[k]; + }); } fetch('reductions/reduction_graph.json') .then(function(r) { if (!r.ok) throw new Error('HTTP ' + r.status); return r.json(); }) .then(function(data) { - // Collect variant nodes (skip base nodes with empty variant) - var variantNodes = data.nodes.filter(function(n) { - return n.variant && Object.keys(n.variant).length > 0; - }); - - // Group by problem name + // Group all nodes by problem name var problems = {}; - variantNodes.forEach(function(n) { + data.nodes.forEach(function(n) { if (!problems[n.name]) { problems[n.name] = { category: n.category, doc_path: n.doc_path, children: [] }; } - problems[n.name].children.push(n); + // Only track nodes with non-empty variants as children; + // empty-variant nodes are base placeholders + if (n.variant && Object.keys(n.variant).length > 0) { + problems[n.name].children.push(n); + } }); // Build edges at variant level, detecting bidirectional pairs var edgeMap = {}; data.edges.forEach(function(e) { - var srcId = variantId(e.source.name, e.source.variant); - var dstId = variantId(e.target.name, e.target.variant); + var src = data.nodes[e.source]; + var dst = data.nodes[e.target]; + var srcId = variantId(src.name, src.variant); + var dstId = variantId(dst.name, dst.variant); var fwd = srcId + '->' + dstId; var rev = dstId + '->' + srcId; if (edgeMap[rev]) { edgeMap[rev].bidirectional = true; } @@ -111,11 +125,13 @@ }); var tempEdgeSet = {}; data.edges.forEach(function(e) { - var key = e.source.name + '->' + e.target.name; - var rev = e.target.name + '->' + e.source.name; + var srcName = data.nodes[e.source].name; + var dstName = data.nodes[e.target].name; + var key = srcName + '->' + dstName; + var rev = dstName + '->' + srcName; if (!tempEdgeSet[key] && !tempEdgeSet[rev]) { tempEdgeSet[key] = true; - tempElements.push({ data: { id: 'te_' + key, source: e.source.name, target: e.target.name } }); + tempElements.push({ data: { id: 'te_' + key, source: srcName, target: dstName } }); } }); @@ -154,11 +170,18 @@ var pi = problemInfo[name]; var pos = positions[name]; - if (pi.baseChild) { + if (info.children.length === 0) { + // No parameterized variants — single node with empty variant + var vid = variantId(name, {}); + elements.push({ + data: { id: vid, label: name, fullLabel: name + ' (no parameters)', category: info.category, doc_path: info.doc_path }, + position: { x: pos.x, y: pos.y } + }); + } else if (pi.baseChild) { // Base variant at parent position, labeled with problem name var baseId = variantId(name, pi.baseChild.variant); elements.push({ - data: { id: baseId, label: name, category: info.category, doc_path: info.doc_path }, + data: { id: baseId, label: name, fullLabel: name + ' (' + fullVariantLabel(pi.baseChild.variant) + ')', category: info.category, doc_path: info.doc_path }, position: { x: pos.x, y: pos.y } }); // Non-base variants placed below @@ -166,7 +189,7 @@ var vid = variantId(name, child.variant); var vl = variantLabel(child.variant); elements.push({ - data: { id: vid, label: name + ' (' + vl + ')', category: child.category, doc_path: child.doc_path }, + data: { id: vid, label: name + ' (' + vl + ')', fullLabel: name + ' (' + fullVariantLabel(child.variant) + ')', category: child.category, doc_path: child.doc_path }, position: { x: pos.x, y: pos.y + (i + 1) * variantOffsetY } }); }); @@ -175,7 +198,7 @@ var child = pi.nonBase[0]; var vid = variantId(name, child.variant); elements.push({ - data: { id: vid, label: name, category: child.category, doc_path: child.doc_path }, + data: { id: vid, label: name, fullLabel: name + ' (' + fullVariantLabel(child.variant) + ')', category: child.category, doc_path: child.doc_path }, position: { x: pos.x, y: pos.y } }); } else { @@ -184,7 +207,7 @@ var vid = variantId(name, child.variant); var vl = variantLabel(child.variant); elements.push({ - data: { id: vid, label: name + ' (' + vl + ')', category: child.category, doc_path: child.doc_path }, + data: { id: vid, label: name + ' (' + vl + ')', fullLabel: name + ' (' + fullVariantLabel(child.variant) + ')', category: child.category, doc_path: child.doc_path }, position: { x: pos.x, y: pos.y + i * variantOffsetY } }); }); @@ -240,7 +263,7 @@ var tooltip = document.getElementById('tooltip'); cy.on('mouseover', 'node', function(evt) { var d = evt.target.data(); - tooltip.innerHTML = '' + d.label + '
Category: ' + d.category + '
Double-click to view API docs'; + tooltip.innerHTML = '' + d.fullLabel + '
Double-click to view API docs'; tooltip.style.display = 'block'; }); cy.on('mousemove', 'node', function(evt) { diff --git a/docs/src/reductions/reduction_graph.json b/docs/src/reductions/reduction_graph.json index 98821b4bc..9650acd0f 100644 --- a/docs/src/reductions/reduction_graph.json +++ b/docs/src/reductions/reduction_graph.json @@ -3,15 +3,7 @@ { "name": "CircuitSAT", "variant": {}, - "category": "satisfiability", - "doc_path": "models/specialized/struct.CircuitSAT.html" - }, - { - "name": "CircuitSAT", - "variant": { - "graph": "SimpleGraph" - }, - "category": "satisfiability", + "category": "specialized", "doc_path": "models/specialized/struct.CircuitSAT.html" }, { @@ -20,30 +12,12 @@ "category": "specialized", "doc_path": "models/specialized/struct.Factoring.html" }, - { - "name": "Factoring", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - }, - "category": "specialized", - "doc_path": "models/specialized/struct.Factoring.html" - }, { "name": "ILP", "variant": {}, "category": "optimization", "doc_path": "models/optimization/struct.ILP.html" }, - { - "name": "ILP", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - }, - "category": "optimization", - "doc_path": "models/optimization/struct.ILP.html" - }, { "name": "KColoring", "variant": {}, @@ -68,15 +42,6 @@ "category": "graph", "doc_path": "models/graph/struct.KColoring.html" }, - { - "name": "KColoring", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - }, - "category": "graph", - "doc_path": "models/graph/struct.KColoring.html" - }, { "name": "KSatisfiability", "variant": {}, @@ -86,26 +51,7 @@ { "name": "KSatisfiability", "variant": { - "k": "2", - "weight": "Unweighted" - }, - "category": "satisfiability", - "doc_path": "models/satisfiability/struct.KSatisfiability.html" - }, - { - "name": "KSatisfiability", - "variant": { - "k": "3", - "weight": "Unweighted" - }, - "category": "satisfiability", - "doc_path": "models/satisfiability/struct.KSatisfiability.html" - }, - { - "name": "KSatisfiability", - "variant": { - "k": "4", - "weight": "Unweighted" + "k": "2" }, "category": "satisfiability", "doc_path": "models/satisfiability/struct.KSatisfiability.html" @@ -113,8 +59,7 @@ { "name": "KSatisfiability", "variant": { - "k": "5", - "weight": "Unweighted" + "k": "3" }, "category": "satisfiability", "doc_path": "models/satisfiability/struct.KSatisfiability.html" @@ -122,8 +67,7 @@ { "name": "KSatisfiability", "variant": { - "k": "N", - "weight": "Unweighted" + "k": "N" }, "category": "satisfiability", "doc_path": "models/satisfiability/struct.KSatisfiability.html" @@ -134,20 +78,11 @@ "category": "graph", "doc_path": "models/graph/struct.MaxCut.html" }, - { - "name": "MaxCut", - "variant": { - "graph": "GridGraph", - "weight": "Unweighted" - }, - "category": "graph", - "doc_path": "models/graph/struct.MaxCut.html" - }, { "name": "MaxCut", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "i32" }, "category": "graph", "doc_path": "models/graph/struct.MaxCut.html" @@ -177,7 +112,7 @@ "name": "MaximumIndependentSet", "variant": { "graph": "GridGraph", - "weight": "Unweighted" + "weight": "i32" }, "category": "graph", "doc_path": "models/graph/struct.MaximumIndependentSet.html" @@ -186,7 +121,7 @@ "name": "MaximumIndependentSet", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "i32" }, "category": "graph", "doc_path": "models/graph/struct.MaximumIndependentSet.html" @@ -194,7 +129,7 @@ { "name": "MaximumIndependentSet", "variant": { - "graph": "SimpleGraph", + "graph": "Triangular", "weight": "i32" }, "category": "graph", @@ -204,7 +139,7 @@ "name": "MaximumIndependentSet", "variant": { "graph": "UnitDiskGraph", - "weight": "Unweighted" + "weight": "i32" }, "category": "graph", "doc_path": "models/graph/struct.MaximumIndependentSet.html" @@ -215,15 +150,6 @@ "category": "graph", "doc_path": "models/graph/struct.MaximumMatching.html" }, - { - "name": "MaximumMatching", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - }, - "category": "graph", - "doc_path": "models/graph/struct.MaximumMatching.html" - }, { "name": "MaximumMatching", "variant": { @@ -242,8 +168,7 @@ { "name": "MaximumSetPacking", "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "f64" }, "category": "set", "doc_path": "models/set/struct.MaximumSetPacking.html" @@ -251,7 +176,6 @@ { "name": "MaximumSetPacking", "variant": { - "graph": "SimpleGraph", "weight": "i32" }, "category": "set", @@ -281,16 +205,6 @@ { "name": "MinimumSetCovering", "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - }, - "category": "set", - "doc_path": "models/set/struct.MinimumSetCovering.html" - }, - { - "name": "MinimumSetCovering", - "variant": { - "graph": "SimpleGraph", "weight": "i32" }, "category": "set", @@ -302,15 +216,6 @@ "category": "graph", "doc_path": "models/graph/struct.MinimumVertexCover.html" }, - { - "name": "MinimumVertexCover", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - }, - "category": "graph", - "doc_path": "models/graph/struct.MinimumVertexCover.html" - }, { "name": "MinimumVertexCover", "variant": { @@ -329,7 +234,6 @@ { "name": "QUBO", "variant": { - "graph": "SimpleGraph", "weight": "f64" }, "category": "optimization", @@ -341,39 +245,12 @@ "category": "satisfiability", "doc_path": "models/satisfiability/struct.Satisfiability.html" }, - { - "name": "Satisfiability", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - }, - "category": "satisfiability", - "doc_path": "models/satisfiability/struct.Satisfiability.html" - }, { "name": "SpinGlass", "variant": {}, "category": "optimization", "doc_path": "models/optimization/struct.SpinGlass.html" }, - { - "name": "SpinGlass", - "variant": { - "graph": "GridGraph", - "weight": "f64" - }, - "category": "optimization", - "doc_path": "models/optimization/struct.SpinGlass.html" - }, - { - "name": "SpinGlass", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - }, - "category": "optimization", - "doc_path": "models/optimization/struct.SpinGlass.html" - }, { "name": "SpinGlass", "variant": { @@ -410,19 +287,8 @@ ], "edges": [ { - "source": { - "name": "CircuitSAT", - "variant": { - "graph": "SimpleGraph" - } - }, - "target": { - "name": "SpinGlass", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, + "source": 0, + "target": 35, "overhead": [ { "field": "num_spins", @@ -436,19 +302,8 @@ "doc_path": "rules/circuit_spinglass/index.html" }, { - "source": { - "name": "Factoring", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, - "target": { - "name": "CircuitSAT", - "variant": { - "graph": "SimpleGraph" - } - }, + "source": 1, + "target": 0, "overhead": [ { "field": "num_gates", @@ -458,20 +313,8 @@ "doc_path": "rules/factoring_circuit/index.html" }, { - "source": { - "name": "Factoring", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, - "target": { - "name": "ILP", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 1, + "target": 2, "overhead": [ { "field": "num_vars", @@ -485,20 +328,8 @@ "doc_path": "rules/factoring_ilp/index.html" }, { - "source": { - "name": "ILP", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, - "target": { - "name": "QUBO", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 2, + "target": 31, "overhead": [ { "field": "num_vars", @@ -508,20 +339,8 @@ "doc_path": "rules/ilp_qubo/index.html" }, { - "source": { - "name": "KColoring", - "variant": { - "graph": "SimpleGraph", - "k": "3" - } - }, - "target": { - "name": "KColoring", - "variant": { - "graph": "SimpleGraph", - "k": "N" - } - }, + "source": 4, + "target": 5, "overhead": [ { "field": "num_vertices", @@ -535,70 +354,34 @@ "doc_path": "" }, { - "source": { - "name": "KColoring", - "variant": { - "graph": "SimpleGraph", - "k": "N" - } - }, - "target": { - "name": "QUBO", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 5, + "target": 2, "overhead": [ { "field": "num_vars", "formula": "num_vertices * num_colors" + }, + { + "field": "num_constraints", + "formula": "num_vertices + num_edges * num_colors" } ], - "doc_path": "rules/coloring_qubo/index.html" + "doc_path": "rules/coloring_ilp/index.html" }, { - "source": { - "name": "KColoring", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "ILP", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 5, + "target": 31, "overhead": [ { "field": "num_vars", "formula": "num_vertices * num_colors" - }, - { - "field": "num_constraints", - "formula": "num_vertices + num_edges * num_colors" } ], - "doc_path": "rules/coloring_ilp/index.html" + "doc_path": "rules/coloring_qubo/index.html" }, { - "source": { - "name": "KSatisfiability", - "variant": { - "k": "2", - "weight": "Unweighted" - } - }, - "target": { - "name": "KSatisfiability", - "variant": { - "k": "N", - "weight": "Unweighted" - } - }, + "source": 7, + "target": 9, "overhead": [ { "field": "num_clauses", @@ -612,20 +395,8 @@ "doc_path": "" }, { - "source": { - "name": "KSatisfiability", - "variant": { - "k": "2", - "weight": "Unweighted" - } - }, - "target": { - "name": "QUBO", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 7, + "target": 31, "overhead": [ { "field": "num_vars", @@ -635,20 +406,8 @@ "doc_path": "rules/ksatisfiability_qubo/index.html" }, { - "source": { - "name": "KSatisfiability", - "variant": { - "k": "3", - "weight": "Unweighted" - } - }, - "target": { - "name": "KSatisfiability", - "variant": { - "k": "N", - "weight": "Unweighted" - } - }, + "source": 8, + "target": 9, "overhead": [ { "field": "num_clauses", @@ -662,20 +421,8 @@ "doc_path": "" }, { - "source": { - "name": "KSatisfiability", - "variant": { - "k": "3", - "weight": "Unweighted" - } - }, - "target": { - "name": "QUBO", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 8, + "target": 31, "overhead": [ { "field": "num_vars", @@ -685,20 +432,8 @@ "doc_path": "rules/ksatisfiability_qubo/index.html" }, { - "source": { - "name": "KSatisfiability", - "variant": { - "k": "4", - "weight": "Unweighted" - } - }, - "target": { - "name": "KSatisfiability", - "variant": { - "k": "N", - "weight": "Unweighted" - } - }, + "source": 9, + "target": 32, "overhead": [ { "field": "num_clauses", @@ -709,77 +444,41 @@ "formula": "num_vars" } ], - "doc_path": "" + "doc_path": "rules/sat_ksat/index.html" }, { - "source": { - "name": "KSatisfiability", - "variant": { - "k": "5", - "weight": "Unweighted" - } - }, - "target": { - "name": "KSatisfiability", - "variant": { - "k": "N", - "weight": "Unweighted" - } - }, + "source": 11, + "target": 35, "overhead": [ { - "field": "num_clauses", - "formula": "num_clauses" + "field": "num_spins", + "formula": "num_vertices" }, { - "field": "num_vars", - "formula": "num_vars" + "field": "num_interactions", + "formula": "num_edges" } ], - "doc_path": "" + "doc_path": "rules/spinglass_maxcut/index.html" }, { - "source": { - "name": "KSatisfiability", - "variant": { - "k": "N", - "weight": "Unweighted" - } - }, - "target": { - "name": "Satisfiability", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, + "source": 13, + "target": 2, "overhead": [ { - "field": "num_clauses", - "formula": "num_clauses" + "field": "num_vars", + "formula": "num_vertices" }, { - "field": "num_vars", - "formula": "num_vars" + "field": "num_constraints", + "formula": "num_vertices^2" } ], - "doc_path": "rules/sat_ksat/index.html" + "doc_path": "rules/maximumclique_ilp/index.html" }, { - "source": { - "name": "MaxCut", - "variant": { - "graph": "GridGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MaxCut", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, + "source": 15, + "target": 16, "overhead": [ { "field": "num_vertices", @@ -793,47 +492,23 @@ "doc_path": "" }, { - "source": { - "name": "MaxCut", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "SpinGlass", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, + "source": 15, + "target": 18, "overhead": [ { - "field": "num_spins", + "field": "num_vertices", "formula": "num_vertices" }, { - "field": "num_interactions", + "field": "num_edges", "formula": "num_edges" } ], - "doc_path": "rules/spinglass_maxcut/index.html" + "doc_path": "" }, { - "source": { - "name": "MaximumClique", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, - "target": { - "name": "ILP", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 16, + "target": 2, "overhead": [ { "field": "num_vars", @@ -841,107 +516,59 @@ }, { "field": "num_constraints", - "formula": "num_vertices^2" + "formula": "num_edges" } ], - "doc_path": "rules/maximumclique_ilp/index.html" + "doc_path": "rules/maximumindependentset_ilp/index.html" }, { - "source": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "GridGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, + "source": 16, + "target": 15, "overhead": [ { "field": "num_vertices", - "formula": "num_vertices" + "formula": "num_vertices * num_vertices" }, { "field": "num_edges", - "formula": "num_edges" + "formula": "num_vertices * num_vertices" } ], - "doc_path": "" + "doc_path": "rules/maximumindependentset_gridgraph/index.html" }, { - "source": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "GridGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, + "source": 16, + "target": 17, "overhead": [ { "field": "num_vertices", - "formula": "num_vertices" + "formula": "num_vertices * num_vertices" }, { "field": "num_edges", - "formula": "num_edges" + "formula": "num_vertices * num_vertices" } ], - "doc_path": "" + "doc_path": "rules/maximumindependentset_triangular/index.html" }, { - "source": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "GridGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "UnitDiskGraph", - "weight": "Unweighted" - } - }, + "source": 16, + "target": 23, "overhead": [ { - "field": "num_vertices", + "field": "num_sets", "formula": "num_vertices" }, { - "field": "num_edges", - "formula": "num_edges" + "field": "num_elements", + "formula": "num_vertices" } ], - "doc_path": "" + "doc_path": "rules/maximumindependentset_maximumsetpacking/index.html" }, { - "source": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, + "source": 16, + "target": 29, "overhead": [ { "field": "num_vertices", @@ -952,50 +579,22 @@ "formula": "num_edges" } ], - "doc_path": "" + "doc_path": "rules/minimumvertexcover_maximumindependentset/index.html" }, { - "source": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MaximumSetPacking", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, + "source": 16, + "target": 31, "overhead": [ { - "field": "num_sets", - "formula": "num_vertices" - }, - { - "field": "num_elements", + "field": "num_vars", "formula": "num_vertices" } ], - "doc_path": "rules/maximumindependentset_maximumsetpacking/index.html" + "doc_path": "rules/maximumindependentset_qubo/index.html" }, { - "source": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MinimumVertexCover", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, + "source": 17, + "target": 16, "overhead": [ { "field": "num_vertices", @@ -1006,100 +605,41 @@ "formula": "num_edges" } ], - "doc_path": "rules/minimumvertexcover_maximumindependentset/index.html" + "doc_path": "rules/natural/index.html" }, { - "source": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, - "target": { - "name": "ILP", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 17, + "target": 18, "overhead": [ { - "field": "num_vars", + "field": "num_vertices", "formula": "num_vertices" }, { - "field": "num_constraints", + "field": "num_edges", "formula": "num_edges" } ], - "doc_path": "rules/maximumindependentset_ilp/index.html" - }, - { - "source": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, - "target": { - "name": "QUBO", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, - "overhead": [ - { - "field": "num_vars", - "formula": "num_vertices" - } - ], - "doc_path": "rules/maximumindependentset_qubo/index.html" + "doc_path": "" }, { - "source": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "UnitDiskGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, + "source": 18, + "target": 15, "overhead": [ { "field": "num_vertices", - "formula": "num_vertices" + "formula": "num_vertices * num_vertices" }, { "field": "num_edges", - "formula": "num_edges" + "formula": "num_vertices * num_vertices" } ], - "doc_path": "" + "doc_path": "rules/maximumindependentset_gridgraph/index.html" }, { - "source": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "UnitDiskGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, + "source": 18, + "target": 16, "overhead": [ { "field": "num_vertices", @@ -1113,65 +653,8 @@ "doc_path": "" }, { - "source": { - "name": "MaximumMatching", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MaximumMatching", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, - "overhead": [], - "doc_path": "" - }, - { - "source": { - "name": "MaximumMatching", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MaximumSetPacking", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "overhead": [ - { - "field": "num_sets", - "formula": "num_edges" - }, - { - "field": "num_elements", - "formula": "num_vertices" - } - ], - "doc_path": "rules/maximummatching_maximumsetpacking/index.html" - }, - { - "source": { - "name": "MaximumMatching", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, - "target": { - "name": "ILP", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 20, + "target": 2, "overhead": [ { "field": "num_vars", @@ -1185,74 +668,34 @@ "doc_path": "rules/maximummatching_ilp/index.html" }, { - "source": { - "name": "MaximumSetPacking", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, + "source": 20, + "target": 23, "overhead": [ { - "field": "num_vertices", - "formula": "num_sets" + "field": "num_sets", + "formula": "num_edges" }, { - "field": "num_edges", - "formula": "num_sets" + "field": "num_elements", + "formula": "num_vertices" } ], - "doc_path": "rules/maximumindependentset_maximumsetpacking/index.html" + "doc_path": "rules/maximummatching_maximumsetpacking/index.html" }, { - "source": { - "name": "MaximumSetPacking", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MaximumSetPacking", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, + "source": 22, + "target": 31, "overhead": [ { - "field": "num_sets", + "field": "num_vars", "formula": "num_sets" - }, - { - "field": "num_elements", - "formula": "num_elements" } ], - "doc_path": "" + "doc_path": "rules/maximumsetpacking_qubo/index.html" }, { - "source": { - "name": "MaximumSetPacking", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, - "target": { - "name": "ILP", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 23, + "target": 2, "overhead": [ { "field": "num_vars", @@ -1266,70 +709,23 @@ "doc_path": "rules/maximumsetpacking_ilp/index.html" }, { - "source": { - "name": "MaximumSetPacking", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, - "target": { - "name": "QUBO", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 23, + "target": 16, "overhead": [ { - "field": "num_vars", + "field": "num_vertices", "formula": "num_sets" - } - ], - "doc_path": "rules/maximumsetpacking_qubo/index.html" - }, - { - "source": { - "name": "MinimumDominatingSet", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, - "target": { - "name": "ILP", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, - "overhead": [ - { - "field": "num_vars", - "formula": "num_vertices" }, { - "field": "num_constraints", - "formula": "num_vertices" + "field": "num_edges", + "formula": "num_sets" } ], - "doc_path": "rules/minimumdominatingset_ilp/index.html" + "doc_path": "rules/maximumindependentset_maximumsetpacking/index.html" }, { - "source": { - "name": "MinimumSetCovering", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MinimumSetCovering", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, + "source": 23, + "target": 22, "overhead": [ { "field": "num_sets", @@ -1343,101 +739,53 @@ "doc_path": "" }, { - "source": { - "name": "MinimumSetCovering", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, - "target": { - "name": "ILP", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 25, + "target": 2, "overhead": [ { "field": "num_vars", - "formula": "num_sets" + "formula": "num_vertices" }, { "field": "num_constraints", - "formula": "universe_size" + "formula": "num_vertices" } ], - "doc_path": "rules/minimumsetcovering_ilp/index.html" + "doc_path": "rules/minimumdominatingset_ilp/index.html" }, { - "source": { - "name": "MinimumVertexCover", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, + "source": 27, + "target": 2, "overhead": [ { - "field": "num_vertices", - "formula": "num_vertices" + "field": "num_vars", + "formula": "num_sets" }, { - "field": "num_edges", - "formula": "num_edges" + "field": "num_constraints", + "formula": "universe_size" } ], - "doc_path": "rules/minimumvertexcover_maximumindependentset/index.html" + "doc_path": "rules/minimumsetcovering_ilp/index.html" }, { - "source": { - "name": "MinimumVertexCover", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MinimumSetCovering", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, + "source": 29, + "target": 2, "overhead": [ { - "field": "num_sets", + "field": "num_vars", "formula": "num_vertices" }, { - "field": "num_elements", + "field": "num_constraints", "formula": "num_edges" } ], - "doc_path": "rules/minimumvertexcover_minimumsetcovering/index.html" + "doc_path": "rules/minimumvertexcover_ilp/index.html" }, { - "source": { - "name": "MinimumVertexCover", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MinimumVertexCover", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, + "source": 29, + "target": 16, "overhead": [ { "field": "num_vertices", @@ -1448,50 +796,26 @@ "formula": "num_edges" } ], - "doc_path": "" + "doc_path": "rules/minimumvertexcover_maximumindependentset/index.html" }, { - "source": { - "name": "MinimumVertexCover", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, - "target": { - "name": "ILP", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 29, + "target": 27, "overhead": [ { - "field": "num_vars", + "field": "num_sets", "formula": "num_vertices" }, { - "field": "num_constraints", + "field": "num_elements", "formula": "num_edges" } ], - "doc_path": "rules/minimumvertexcover_ilp/index.html" + "doc_path": "rules/minimumvertexcover_minimumsetcovering/index.html" }, { - "source": { - "name": "MinimumVertexCover", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, - "target": { - "name": "QUBO", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 29, + "target": 31, "overhead": [ { "field": "num_vars", @@ -1501,20 +825,8 @@ "doc_path": "rules/minimumvertexcover_qubo/index.html" }, { - "source": { - "name": "QUBO", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, - "target": { - "name": "SpinGlass", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 31, + "target": 34, "overhead": [ { "field": "num_spins", @@ -1524,20 +836,8 @@ "doc_path": "rules/spinglass_qubo/index.html" }, { - "source": { - "name": "Satisfiability", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "KColoring", - "variant": { - "graph": "SimpleGraph", - "k": "3" - } - }, + "source": 32, + "target": 4, "overhead": [ { "field": "num_vertices", @@ -1551,47 +851,8 @@ "doc_path": "rules/sat_coloring/index.html" }, { - "source": { - "name": "Satisfiability", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "KSatisfiability", - "variant": { - "k": "3", - "weight": "Unweighted" - } - }, - "overhead": [ - { - "field": "num_clauses", - "formula": "num_clauses + num_literals" - }, - { - "field": "num_vars", - "formula": "num_vars + num_literals" - } - ], - "doc_path": "rules/sat_ksat/index.html" - }, - { - "source": { - "name": "Satisfiability", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "KSatisfiability", - "variant": { - "k": "4", - "weight": "Unweighted" - } - }, + "source": 32, + "target": 8, "overhead": [ { "field": "num_clauses", @@ -1605,47 +866,8 @@ "doc_path": "rules/sat_ksat/index.html" }, { - "source": { - "name": "Satisfiability", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "KSatisfiability", - "variant": { - "k": "5", - "weight": "Unweighted" - } - }, - "overhead": [ - { - "field": "num_clauses", - "formula": "num_clauses + num_literals" - }, - { - "field": "num_vars", - "formula": "num_vars + num_literals" - } - ], - "doc_path": "rules/sat_ksat/index.html" - }, - { - "source": { - "name": "Satisfiability", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MaximumIndependentSet", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, + "source": 32, + "target": 16, "overhead": [ { "field": "num_vertices", @@ -1659,20 +881,8 @@ "doc_path": "rules/sat_maximumindependentset/index.html" }, { - "source": { - "name": "Satisfiability", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MinimumDominatingSet", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, + "source": 32, + "target": 25, "overhead": [ { "field": "num_vertices", @@ -1686,47 +896,19 @@ "doc_path": "rules/sat_minimumdominatingset/index.html" }, { - "source": { - "name": "SpinGlass", - "variant": { - "graph": "GridGraph", - "weight": "f64" - } - }, - "target": { - "name": "SpinGlass", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 34, + "target": 31, "overhead": [ { - "field": "num_spins", + "field": "num_vars", "formula": "num_spins" - }, - { - "field": "num_interactions", - "formula": "num_interactions" } ], - "doc_path": "" + "doc_path": "rules/spinglass_qubo/index.html" }, { - "source": { - "name": "SpinGlass", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "MaxCut", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, + "source": 35, + "target": 11, "overhead": [ { "field": "num_vertices", @@ -1740,97 +922,8 @@ "doc_path": "rules/spinglass_maxcut/index.html" }, { - "source": { - "name": "SpinGlass", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "SpinGlass", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, - "overhead": [ - { - "field": "num_spins", - "formula": "num_spins" - }, - { - "field": "num_interactions", - "formula": "num_interactions" - } - ], - "doc_path": "" - }, - { - "source": { - "name": "SpinGlass", - "variant": { - "graph": "SimpleGraph", - "weight": "Unweighted" - } - }, - "target": { - "name": "SpinGlass", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, - "overhead": [ - { - "field": "num_spins", - "formula": "num_spins" - }, - { - "field": "num_interactions", - "formula": "num_interactions" - } - ], - "doc_path": "" - }, - { - "source": { - "name": "SpinGlass", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, - "target": { - "name": "QUBO", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, - "overhead": [ - { - "field": "num_vars", - "formula": "num_spins" - } - ], - "doc_path": "rules/spinglass_qubo/index.html" - }, - { - "source": { - "name": "SpinGlass", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, - "target": { - "name": "SpinGlass", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 35, + "target": 34, "overhead": [ { "field": "num_spins", @@ -1844,20 +937,8 @@ "doc_path": "" }, { - "source": { - "name": "TravelingSalesman", - "variant": { - "graph": "SimpleGraph", - "weight": "i32" - } - }, - "target": { - "name": "ILP", - "variant": { - "graph": "SimpleGraph", - "weight": "f64" - } - }, + "source": 37, + "target": 2, "overhead": [ { "field": "num_vars", diff --git a/examples/export_petersen_mapping.rs b/examples/export_petersen_mapping.rs index 62ea97063..4ffe78f80 100644 --- a/examples/export_petersen_mapping.rs +++ b/examples/export_petersen_mapping.rs @@ -23,10 +23,10 @@ //! ``` //! //! ## Outputs -//! - `docs/paper/petersen_source.json` - The original Petersen graph -//! - `docs/paper/petersen_square_weighted.json` - Weighted King's subgraph -//! - `docs/paper/petersen_square_unweighted.json` - Unweighted King's subgraph -//! - `docs/paper/petersen_triangular.json` - Weighted triangular lattice +//! - `docs/paper/static/petersen_source.json` - The original Petersen graph +//! - `docs/paper/static/petersen_square_weighted.json` - Weighted King's subgraph +//! - `docs/paper/static/petersen_square_unweighted.json` - Unweighted King's subgraph +//! - `docs/paper/static/petersen_triangular.json` - Weighted triangular lattice //! //! See docs/paper/reductions.typ for the full reduction specification. @@ -119,7 +119,7 @@ fn main() { edges: petersen_edges.clone(), mis: petersen_mis, }; - write_json(&source, Path::new("docs/paper/petersen_source.json")); + write_json(&source, Path::new("docs/paper/static/petersen_source.json")); println!("\n=== Mapping to Grid Graphs ===\n"); @@ -146,7 +146,7 @@ fn main() { ); write_json( &square_weighted, - Path::new("docs/paper/petersen_square_weighted.json"), + Path::new("docs/paper/static/petersen_square_weighted.json"), ); // Map to unweighted King's subgraph (square lattice) @@ -172,7 +172,7 @@ fn main() { ); write_json( &square_unweighted, - Path::new("docs/paper/petersen_square_unweighted.json"), + Path::new("docs/paper/static/petersen_square_unweighted.json"), ); // Map to weighted triangular lattice @@ -198,7 +198,7 @@ fn main() { ); write_json( &triangular_weighted, - Path::new("docs/paper/petersen_triangular.json"), + Path::new("docs/paper/static/petersen_triangular.json"), ); println!("\n=== Summary ===\n"); diff --git a/examples/reduction_maximumsetpacking_to_qubo.rs b/examples/reduction_maximumsetpacking_to_qubo.rs index ea6c80f96..22564e93b 100644 --- a/examples/reduction_maximumsetpacking_to_qubo.rs +++ b/examples/reduction_maximumsetpacking_to_qubo.rs @@ -44,7 +44,7 @@ pub fn run() { vec![1, 3, 5], // S4 (overlaps S0, S1, S2) vec![0, 4, 7], // S5 (overlaps S0, S1, S3) ]; - let sp = MaximumSetPacking::::new(sets.clone()); + let sp = MaximumSetPacking::::new(sets.clone()); // Reduce to QUBO let reduction = ReduceTo::::reduce_to(&sp); diff --git a/problemreductions-macros/src/lib.rs b/problemreductions-macros/src/lib.rs index e1396075c..f4b5b5afe 100644 --- a/problemreductions-macros/src/lib.rs +++ b/problemreductions-macros/src/lib.rs @@ -12,23 +12,16 @@ use syn::{parse_macro_input, GenericArgument, ItemImpl, Path, PathArguments, Typ /// Attribute macro for automatic reduction registration. /// /// Parses a `ReduceTo` impl block and generates the corresponding `inventory::submit!` -/// call. Variant fields are derived from `Problem::variant()` when possible. +/// call. Variant fields are derived from `Problem::variant()`. Const generics like `K` +/// are substituted with `usize::MAX` (maps to `"N"` via `const_usize_str`). /// -/// # Variant Derivation -/// -/// - **Types without type generics** (e.g., `KColoring`): calls -/// `Problem::variant()` at runtime. Const generics like `K` are substituted with -/// `usize::MAX` (maps to `"N"` via `const_usize_str`). -/// - **Types with type generics** (e.g., `MaxCut`): falls back to -/// constructing `("graph", ...), ("weight", ...)` from type parameter analysis. +/// **Type generics are not supported** — all `ReduceTo` impls must use concrete types. +/// If you need a reduction for a generic problem, write separate impls for each concrete +/// type combination. /// /// # Attributes /// /// - `overhead = { expr }` — overhead specification (required for non-trivial reductions) -/// - `source_graph = "..."` — override source graph type (fallback path only) -/// - `target_graph = "..."` — override target graph type (fallback path only) -/// - `source_weighted = bool` — override source weight (fallback path only) -/// - `target_weighted = bool` — override target weight (fallback path only) #[proc_macro_attribute] pub fn reduction(attr: TokenStream, item: TokenStream) -> TokenStream { let attrs = parse_macro_input!(attr as ReductionAttrs); @@ -42,44 +35,18 @@ pub fn reduction(attr: TokenStream, item: TokenStream) -> TokenStream { /// Parsed attributes from #[reduction(...)] struct ReductionAttrs { - source_graph: Option, - target_graph: Option, - source_weighted: Option, - target_weighted: Option, overhead: Option, } impl syn::parse::Parse for ReductionAttrs { fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut attrs = ReductionAttrs { - source_graph: None, - target_graph: None, - source_weighted: None, - target_weighted: None, - overhead: None, - }; + let mut attrs = ReductionAttrs { overhead: None }; while !input.is_empty() { let ident: syn::Ident = input.parse()?; input.parse::()?; match ident.to_string().as_str() { - "source_graph" => { - let lit: syn::LitStr = input.parse()?; - attrs.source_graph = Some(lit.value()); - } - "target_graph" => { - let lit: syn::LitStr = input.parse()?; - attrs.target_graph = Some(lit.value()); - } - "source_weighted" => { - let lit: syn::LitBool = input.parse()?; - attrs.source_weighted = Some(lit.value()); - } - "target_weighted" => { - let lit: syn::LitBool = input.parse()?; - attrs.target_weighted = Some(lit.value()); - } "overhead" => { let content; syn::braced!(content in input); @@ -198,40 +165,26 @@ fn rewrite_const_generics(ty: &Type, const_generics: &HashSet) -> Type { /// Generate the variant fn body for a type. /// -/// If the type has no type generics: calls `Problem::variant()` with const generic -/// sentinels. Otherwise falls back to manual `("graph", ...), ("weight", ...)` construction. +/// Calls `Problem::variant()` with const generic sentinels. +/// Errors if the type uses any type generics — all `ReduceTo` impls must be concrete. fn make_variant_fn_body( ty: &Type, const_generics: &HashSet, type_generics: &HashSet, - graph_override: Option<&str>, - weighted_override: Option, -) -> TokenStream2 { +) -> syn::Result { if type_uses_type_generics(ty, type_generics) { - // Fallback: construct variant manually from type parameter analysis - let graph = graph_override - .map(|s| s.to_string()) - .or_else(|| extract_graph_type(ty)) - .unwrap_or_else(|| "SimpleGraph".to_string()); - let weight = weighted_override - .map(|w| { - if w { - "i32".to_string() - } else { - "Unweighted".to_string() - } - }) - .unwrap_or_else(|| { - extract_weight_type(ty) - .map(|t| get_weight_name(&t)) - .unwrap_or_else(|| "Unweighted".to_string()) - }); - quote! { vec![("graph", #graph), ("weight", #weight)] } - } else { - // Call Problem::variant() with const generic sentinels - let rewritten = rewrite_const_generics(ty, const_generics); - quote! { <#rewritten as crate::traits::Problem>::variant() } + let used: Vec<_> = type_generics.iter().cloned().collect(); + return Err(syn::Error::new_spanned( + ty, + format!( + "#[reduction] does not support type generics (found: {}). \ + Make the ReduceTo impl concrete by specifying explicit types.", + used.join(", ") + ), + )); } + let rewritten = rewrite_const_generics(ty, const_generics); + Ok(quote! { <#rewritten as crate::traits::Problem>::variant() }) } /// Generate the reduction entry code @@ -263,20 +216,8 @@ fn generate_reduction_entry( let type_generics = collect_type_generic_names(&impl_block.generics); // Generate variant fn bodies - let source_variant_body = make_variant_fn_body( - source_type, - &const_generics, - &type_generics, - attrs.source_graph.as_deref(), - attrs.source_weighted, - ); - let target_variant_body = make_variant_fn_body( - &target_type, - &const_generics, - &type_generics, - attrs.target_graph.as_deref(), - attrs.target_weighted, - ); + let source_variant_body = make_variant_fn_body(source_type, &const_generics, &type_generics)?; + let target_variant_body = make_variant_fn_body(&target_type, &const_generics, &type_generics)?; // Generate overhead or use default let overhead = attrs.overhead.clone().unwrap_or_else(|| { @@ -326,124 +267,3 @@ fn extract_target_from_trait(path: &Path) -> syn::Result { "Expected ReduceTo with type parameter", )) } - -// --- Fallback helpers for types with type generics --- - -/// Extract graph type from type parameters (first parameter in `Problem` order) -fn extract_graph_type(ty: &Type) -> Option { - match ty { - Type::Path(type_path) => { - let segment = type_path.path.segments.last()?; - if let PathArguments::AngleBracketed(args) = &segment.arguments { - for arg in args.args.iter() { - if let GenericArgument::Type(Type::Path(inner_path)) = arg { - let name = inner_path - .path - .segments - .last() - .map(|s| s.ident.to_string())?; - // Skip generic params (single uppercase letter) - if name.len() == 1 - && name - .chars() - .next() - .map(|c| c.is_ascii_uppercase()) - .unwrap_or(false) - { - return None; - } - // Skip known weight types - if is_weight_type(&name) { - return None; - } - return Some(name); - } - } - } - None - } - _ => None, - } -} - -/// Check if a type name is a known weight type -fn is_weight_type(name: &str) -> bool { - ["i32", "i64", "f32", "f64", "Unweighted"].contains(&name) -} - -/// Extract weight type from type parameters. -fn extract_weight_type(ty: &Type) -> Option { - match ty { - Type::Path(type_path) => { - let segment = type_path.path.segments.last()?; - if let PathArguments::AngleBracketed(args) = &segment.arguments { - let type_args: Vec<_> = args - .args - .iter() - .filter_map(|arg| { - if let GenericArgument::Type(t) = arg { - Some(t) - } else { - None - } - }) - .collect(); - - match type_args.len() { - 1 => { - let first = type_args[0]; - if let Type::Path(inner_path) = first { - let name = inner_path.path.segments.last()?.ident.to_string(); - if is_weight_type(&name) { - return Some(first.clone()); - } - } - None - } - 2 => { - let second = type_args[1]; - if let Type::Path(inner_path) = second { - let name = inner_path.path.segments.last()?.ident.to_string(); - if is_weight_type(&name) { - return Some(second.clone()); - } - } - None - } - _ => None, - } - } else { - None - } - } - _ => None, - } -} - -/// Get weight type name as a string for the variant. -/// Single-letter uppercase names are treated as generic type parameters -/// and default to "Unweighted". -fn get_weight_name(ty: &Type) -> String { - match ty { - Type::Path(type_path) => { - let name = type_path - .path - .segments - .last() - .map(|s| s.ident.to_string()) - .unwrap_or_else(|| "Unweighted".to_string()); - if name.len() == 1 - && name - .chars() - .next() - .map(|c| c.is_ascii_uppercase()) - .unwrap_or(false) - { - "Unweighted".to_string() - } else { - name - } - } - _ => "Unweighted".to_string(), - } -} diff --git a/src/export.rs b/src/export.rs index c246307bc..fa0232197 100644 --- a/src/export.rs +++ b/src/export.rs @@ -19,7 +19,7 @@ use std::path::Path; pub struct ProblemSide { /// Problem name matching `Problem::NAME` (e.g., `"MaximumIndependentSet"`). pub problem: String, - /// Variant attributes (e.g., `{"graph": "SimpleGraph", "weight": "Unweighted"}`). + /// Variant attributes (e.g., `{"graph": "SimpleGraph", "weight": "One"}`). pub variant: HashMap, /// Problem-specific instance data (edges, matrix, clauses, etc.). pub instance: serde_json::Value, diff --git a/src/graph_types.rs b/src/graph_types.rs index 2a6f58bd4..6334403ea 100644 --- a/src/graph_types.rs +++ b/src/graph_types.rs @@ -40,6 +40,11 @@ impl GraphMarker for BipartiteGraph {} pub struct GridGraph; impl GraphMarker for GridGraph {} +/// Triangular lattice graph - a unit disk graph on a triangular grid. +#[derive(Debug, Clone, Copy, Default)] +pub struct Triangular; +impl GraphMarker for Triangular {} + /// Hypergraph - most general graph type. Edges can connect any number of vertices. #[derive(Debug, Clone, Copy, Default)] pub struct HyperGraph; @@ -78,6 +83,9 @@ macro_rules! declare_graph_subtype { declare_graph_subtype!(GridGraph => UnitDiskGraph); declare_graph_subtype!(GridGraph => SimpleGraph); declare_graph_subtype!(GridGraph => HyperGraph); +declare_graph_subtype!(Triangular => UnitDiskGraph); +declare_graph_subtype!(Triangular => SimpleGraph); +declare_graph_subtype!(Triangular => HyperGraph); declare_graph_subtype!(UnitDiskGraph => SimpleGraph); declare_graph_subtype!(UnitDiskGraph => HyperGraph); declare_graph_subtype!(PlanarGraph => SimpleGraph); @@ -108,9 +116,9 @@ macro_rules! declare_weight_subtype { } // Weight type hierarchy (with transitive relationships): -// Unweighted (most restrictive) => i32 => f64 (most general) -declare_weight_subtype!("Unweighted" => "i32"); -declare_weight_subtype!("Unweighted" => "f64"); // transitive +// One (most restrictive) => i32 => f64 (most general) +declare_weight_subtype!("One" => "i32"); +declare_weight_subtype!("One" => "f64"); // transitive declare_weight_subtype!("i32" => "f64"); #[cfg(test)] diff --git a/src/lib.rs b/src/lib.rs index f71bab44c..6c844fe99 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -85,8 +85,8 @@ pub mod prelude { }; pub use crate::error::{ProblemError, Result}; pub use crate::models::graph::{ - TravelingSalesman, KColoring, MaxCut, MaximalIS, MaximumClique, MaximumIndependentSet, - MaximumMatching, MinimumDominatingSet, MinimumVertexCover, + KColoring, MaxCut, MaximalIS, MaximumClique, MaximumIndependentSet, MaximumMatching, + MinimumDominatingSet, MinimumVertexCover, TravelingSalesman, }; pub use crate::models::optimization::{ Comparison, LinearConstraint, ObjectiveSense, SpinGlass, VarBounds, ILP, QUBO, @@ -97,9 +97,9 @@ pub mod prelude { pub use crate::registry::{ComplexityClass, ProblemInfo, ProblemMetadata}; pub use crate::rules::{ReduceTo, ReductionResult}; pub use crate::solvers::{BruteForce, Solver}; - pub use crate::traits::{OptimizationProblem, Problem}; + pub use crate::traits::{OptimizationProblem, Problem, SatisfactionProblem}; pub use crate::types::{ - Direction, NumericSize, NumericWeight, ProblemSize, SolutionSize, Unweighted, Weights, + Direction, NumericSize, One, ProblemSize, SolutionSize, Unweighted, WeightElement, }; } @@ -107,8 +107,10 @@ pub mod prelude { pub use error::{ProblemError, Result}; pub use registry::{ComplexityClass, ProblemInfo}; pub use solvers::{BruteForce, Solver}; -pub use traits::{OptimizationProblem, Problem}; -pub use types::{Direction, NumericSize, ProblemSize, SolutionSize, Unweighted, Weights}; +pub use traits::{OptimizationProblem, Problem, SatisfactionProblem}; +pub use types::{ + Direction, NumericSize, One, ProblemSize, SolutionSize, Unweighted, WeightElement, +}; // Re-export proc macro for reduction registration pub use problemreductions_macros::reduction; diff --git a/src/models/graph/kcoloring.rs b/src/models/graph/kcoloring.rs index 8550b442a..aae55b488 100644 --- a/src/models/graph/kcoloring.rs +++ b/src/models/graph/kcoloring.rs @@ -5,12 +5,13 @@ use crate::registry::{FieldInfo, ProblemSchemaEntry}; use crate::topology::{Graph, SimpleGraph}; -use crate::traits::Problem; +use crate::traits::{Problem, SatisfactionProblem}; use serde::{Deserialize, Serialize}; inventory::submit! { ProblemSchemaEntry { name: "KColoring", + module_path: module_path!(), description: "Find valid k-coloring of a graph", fields: &[ FieldInfo { name: "graph", type_name: "G", description: "The underlying graph G=(V,E)" }, @@ -118,7 +119,7 @@ where fn variant() -> Vec<(&'static str, &'static str)> { vec![ ("k", crate::variant::const_usize_str::()), - ("graph", crate::variant::short_type_name::()), + ("graph", G::NAME), ] } @@ -131,6 +132,8 @@ where } } +impl SatisfactionProblem for KColoring {} + /// Check if a coloring is valid for a graph. pub fn is_valid_coloring( num_vertices: usize, diff --git a/src/models/graph/max_cut.rs b/src/models/graph/max_cut.rs index f886d4735..40328cd12 100644 --- a/src/models/graph/max_cut.rs +++ b/src/models/graph/max_cut.rs @@ -6,12 +6,14 @@ use crate::registry::{FieldInfo, ProblemSchemaEntry}; use crate::topology::{Graph, SimpleGraph}; use crate::traits::{OptimizationProblem, Problem}; -use crate::types::{Direction, SolutionSize}; +use crate::types::{Direction, SolutionSize, WeightElement}; +use num_traits::Zero; use serde::{Deserialize, Serialize}; inventory::submit! { ProblemSchemaEntry { name: "MaxCut", + module_path: module_path!(), description: "Find maximum weight cut in a graph", fields: &[ FieldInfo { name: "graph", type_name: "G", description: "The graph with edge weights" }, @@ -192,20 +194,14 @@ impl MaxCut { impl Problem for MaxCut where G: Graph, - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { const NAME: &'static str = "MaxCut"; - type Metric = SolutionSize; + type Metric = SolutionSize; fn variant() -> Vec<(&'static str, &'static str)> { vec![ - ("graph", crate::variant::short_type_name::()), + ("graph", G::NAME), ("weight", crate::variant::short_type_name::()), ] } @@ -214,7 +210,7 @@ where vec![2; self.graph.num_vertices()] } - fn evaluate(&self, config: &[usize]) -> SolutionSize { + fn evaluate(&self, config: &[usize]) -> SolutionSize { // All cuts are valid, so always return Valid let partition: Vec = config.iter().map(|&c| c != 0).collect(); SolutionSize::Valid(cut_size(&self.graph, &self.edge_weights, &partition)) @@ -224,15 +220,9 @@ where impl OptimizationProblem for MaxCut where G: Graph, - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { - type Value = W; + type Value = W::Sum; fn direction(&self) -> Direction { Direction::Maximize @@ -245,15 +235,15 @@ where /// * `graph` - The graph structure /// * `edge_weights` - Weights for each edge (same order as `graph.edges()`) /// * `partition` - Boolean slice indicating which set each vertex belongs to -pub fn cut_size(graph: &G, edge_weights: &[W], partition: &[bool]) -> W +pub fn cut_size(graph: &G, edge_weights: &[W], partition: &[bool]) -> W::Sum where G: Graph, - W: Clone + num_traits::Zero + std::ops::AddAssign, + W: WeightElement, { - let mut total = W::zero(); + let mut total = W::Sum::zero(); for ((u, v), weight) in graph.edges().iter().zip(edge_weights.iter()) { if *u < partition.len() && *v < partition.len() && partition[*u] != partition[*v] { - total += weight.clone(); + total += weight.to_sum(); } } total diff --git a/src/models/graph/maximal_is.rs b/src/models/graph/maximal_is.rs index 8160020fa..fe05b27f4 100644 --- a/src/models/graph/maximal_is.rs +++ b/src/models/graph/maximal_is.rs @@ -6,12 +6,14 @@ use crate::registry::{FieldInfo, ProblemSchemaEntry}; use crate::topology::{Graph, SimpleGraph}; use crate::traits::{OptimizationProblem, Problem}; -use crate::types::{Direction, SolutionSize}; +use crate::types::{Direction, SolutionSize, WeightElement}; +use num_traits::Zero; use serde::{Deserialize, Serialize}; inventory::submit! { ProblemSchemaEntry { name: "MaximalIS", + module_path: module_path!(), description: "Find maximum weight maximal independent set", fields: &[ FieldInfo { name: "graph", type_name: "G", description: "The underlying graph G=(V,E)" }, @@ -183,20 +185,14 @@ impl MaximalIS { impl Problem for MaximalIS where G: Graph, - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { const NAME: &'static str = "MaximalIS"; - type Metric = SolutionSize; + type Metric = SolutionSize; fn variant() -> Vec<(&'static str, &'static str)> { vec![ - ("graph", crate::variant::short_type_name::()), + ("graph", G::NAME), ("weight", crate::variant::short_type_name::()), ] } @@ -205,14 +201,14 @@ where vec![2; self.graph.num_vertices()] } - fn evaluate(&self, config: &[usize]) -> SolutionSize { + fn evaluate(&self, config: &[usize]) -> SolutionSize { if !self.is_maximal(config) { return SolutionSize::Invalid; } - let mut total = W::zero(); + let mut total = W::Sum::zero(); for (i, &selected) in config.iter().enumerate() { if selected == 1 { - total += self.weights[i].clone(); + total += self.weights[i].to_sum(); } } SolutionSize::Valid(total) @@ -222,15 +218,9 @@ where impl OptimizationProblem for MaximalIS where G: Graph, - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { - type Value = W; + type Value = W::Sum; fn direction(&self) -> Direction { Direction::Maximize diff --git a/src/models/graph/maximum_clique.rs b/src/models/graph/maximum_clique.rs index 213ef7c83..4b886765c 100644 --- a/src/models/graph/maximum_clique.rs +++ b/src/models/graph/maximum_clique.rs @@ -6,12 +6,14 @@ use crate::registry::{FieldInfo, ProblemSchemaEntry}; use crate::topology::{Graph, SimpleGraph}; use crate::traits::{OptimizationProblem, Problem}; -use crate::types::{Direction, SolutionSize}; +use crate::types::{Direction, SolutionSize, WeightElement}; +use num_traits::Zero; use serde::{Deserialize, Serialize}; inventory::submit! { ProblemSchemaEntry { name: "MaximumClique", + module_path: module_path!(), description: "Find maximum weight clique in a graph", fields: &[ FieldInfo { name: "graph", type_name: "G", description: "The underlying graph G=(V,E)" }, @@ -30,7 +32,7 @@ inventory::submit! { /// # Type Parameters /// /// * `G` - The graph type (e.g., `SimpleGraph`, `GridGraph`, `UnitDiskGraph`) -/// * `W` - The weight type (e.g., `i32`, `f64`, `Unweighted`) +/// * `W` - The weight type (e.g., `i32`, `f64`, `One`) /// /// # Example /// @@ -161,20 +163,14 @@ impl MaximumClique { impl Problem for MaximumClique where G: Graph, - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { const NAME: &'static str = "MaximumClique"; - type Metric = SolutionSize; + type Metric = SolutionSize; fn variant() -> Vec<(&'static str, &'static str)> { vec![ - ("graph", crate::variant::short_type_name::()), + ("graph", G::NAME), ("weight", crate::variant::short_type_name::()), ] } @@ -183,14 +179,14 @@ where vec![2; self.graph.num_vertices()] } - fn evaluate(&self, config: &[usize]) -> SolutionSize { + fn evaluate(&self, config: &[usize]) -> SolutionSize { if !is_clique_config(&self.graph, config) { return SolutionSize::Invalid; } - let mut total = W::zero(); + let mut total = W::Sum::zero(); for (i, &selected) in config.iter().enumerate() { if selected == 1 { - total += self.weights[i].clone(); + total += self.weights[i].to_sum(); } } SolutionSize::Valid(total) @@ -200,15 +196,9 @@ where impl OptimizationProblem for MaximumClique where G: Graph, - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { - type Value = W; + type Value = W::Sum; fn direction(&self) -> Direction { Direction::Maximize diff --git a/src/models/graph/maximum_independent_set.rs b/src/models/graph/maximum_independent_set.rs index efd19550d..20c75736f 100644 --- a/src/models/graph/maximum_independent_set.rs +++ b/src/models/graph/maximum_independent_set.rs @@ -6,12 +6,14 @@ use crate::registry::{FieldInfo, ProblemSchemaEntry}; use crate::topology::{Graph, SimpleGraph}; use crate::traits::{OptimizationProblem, Problem}; -use crate::types::{Direction, SolutionSize}; +use crate::types::{Direction, SolutionSize, WeightElement}; +use num_traits::Zero; use serde::{Deserialize, Serialize}; inventory::submit! { ProblemSchemaEntry { name: "MaximumIndependentSet", + module_path: module_path!(), description: "Find maximum weight independent set in a graph", fields: &[ FieldInfo { name: "graph", type_name: "G", description: "The underlying graph G=(V,E)" }, @@ -30,7 +32,7 @@ inventory::submit! { /// # Type Parameters /// /// * `G` - The graph type (e.g., `SimpleGraph`, `GridGraph`, `UnitDiskGraph`) -/// * `W` - The weight type (e.g., `i32`, `f64`, `Unweighted`) +/// * `W` - The weight type (e.g., `i32`, `f64`, `One`) /// /// # Example /// @@ -161,20 +163,14 @@ impl MaximumIndependentSet { impl Problem for MaximumIndependentSet where G: Graph, - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { const NAME: &'static str = "MaximumIndependentSet"; - type Metric = SolutionSize; + type Metric = SolutionSize; fn variant() -> Vec<(&'static str, &'static str)> { vec![ - ("graph", crate::variant::short_type_name::()), + ("graph", G::NAME), ("weight", crate::variant::short_type_name::()), ] } @@ -183,14 +179,14 @@ where vec![2; self.graph.num_vertices()] } - fn evaluate(&self, config: &[usize]) -> SolutionSize { + fn evaluate(&self, config: &[usize]) -> SolutionSize { if !is_independent_set_config(&self.graph, config) { return SolutionSize::Invalid; } - let mut total = W::zero(); + let mut total = W::Sum::zero(); for (i, &selected) in config.iter().enumerate() { if selected == 1 { - total += self.weights[i].clone(); + total += self.weights[i].to_sum(); } } SolutionSize::Valid(total) @@ -200,15 +196,9 @@ where impl OptimizationProblem for MaximumIndependentSet where G: Graph, - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { - type Value = W; + type Value = W::Sum; fn direction(&self) -> Direction { Direction::Maximize diff --git a/src/models/graph/maximum_matching.rs b/src/models/graph/maximum_matching.rs index d3d72edb4..b471114e0 100644 --- a/src/models/graph/maximum_matching.rs +++ b/src/models/graph/maximum_matching.rs @@ -6,13 +6,15 @@ use crate::registry::{FieldInfo, ProblemSchemaEntry}; use crate::topology::{Graph, SimpleGraph}; use crate::traits::{OptimizationProblem, Problem}; -use crate::types::{Direction, SolutionSize}; +use crate::types::{Direction, SolutionSize, WeightElement}; +use num_traits::Zero; use serde::{Deserialize, Serialize}; use std::collections::HashMap; inventory::submit! { ProblemSchemaEntry { name: "MaximumMatching", + module_path: module_path!(), description: "Find maximum weight matching in a graph", fields: &[ FieldInfo { name: "graph", type_name: "G", description: "The underlying graph G=(V,E)" }, @@ -29,7 +31,7 @@ inventory::submit! { /// # Type Parameters /// /// * `G` - The graph type (e.g., `SimpleGraph`, `GridGraph`, `UnitDiskGraph`) -/// * `W` - The weight type (e.g., `i32`, `f64`, `Unweighted`) +/// * `W` - The weight type (e.g., `i32`, `f64`, `One`) /// /// # Example /// @@ -204,20 +206,14 @@ impl MaximumMatching { impl Problem for MaximumMatching where G: Graph, - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { const NAME: &'static str = "MaximumMatching"; - type Metric = SolutionSize; + type Metric = SolutionSize; fn variant() -> Vec<(&'static str, &'static str)> { vec![ - ("graph", crate::variant::short_type_name::()), + ("graph", G::NAME), ("weight", crate::variant::short_type_name::()), ] } @@ -226,15 +222,15 @@ where vec![2; self.graph.num_edges()] } - fn evaluate(&self, config: &[usize]) -> SolutionSize { + fn evaluate(&self, config: &[usize]) -> SolutionSize { if !self.is_valid_matching(config) { return SolutionSize::Invalid; } - let mut total = W::zero(); + let mut total = W::Sum::zero(); for (idx, &selected) in config.iter().enumerate() { if selected == 1 { if let Some(w) = self.edge_weights.get(idx) { - total += w.clone(); + total += w.to_sum(); } } } @@ -245,15 +241,9 @@ where impl OptimizationProblem for MaximumMatching where G: Graph, - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { - type Value = W; + type Value = W::Sum; fn direction(&self) -> Direction { Direction::Maximize diff --git a/src/models/graph/minimum_dominating_set.rs b/src/models/graph/minimum_dominating_set.rs index eb4a2e9ae..824328041 100644 --- a/src/models/graph/minimum_dominating_set.rs +++ b/src/models/graph/minimum_dominating_set.rs @@ -6,13 +6,15 @@ use crate::registry::{FieldInfo, ProblemSchemaEntry}; use crate::topology::{Graph, SimpleGraph}; use crate::traits::{OptimizationProblem, Problem}; -use crate::types::{Direction, SolutionSize}; +use crate::types::{Direction, SolutionSize, WeightElement}; +use num_traits::Zero; use serde::{Deserialize, Serialize}; use std::collections::HashSet; inventory::submit! { ProblemSchemaEntry { name: "MinimumDominatingSet", + module_path: module_path!(), description: "Find minimum weight dominating set in a graph", fields: &[ FieldInfo { name: "graph", type_name: "G", description: "The underlying graph G=(V,E)" }, @@ -177,20 +179,14 @@ impl MinimumDominatingSet { impl Problem for MinimumDominatingSet where G: Graph, - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { const NAME: &'static str = "MinimumDominatingSet"; - type Metric = SolutionSize; + type Metric = SolutionSize; fn variant() -> Vec<(&'static str, &'static str)> { vec![ - ("graph", crate::variant::short_type_name::()), + ("graph", G::NAME), ("weight", crate::variant::short_type_name::()), ] } @@ -199,14 +195,14 @@ where vec![2; self.graph.num_vertices()] } - fn evaluate(&self, config: &[usize]) -> SolutionSize { + fn evaluate(&self, config: &[usize]) -> SolutionSize { if !self.is_dominating(config) { return SolutionSize::Invalid; } - let mut total = W::zero(); + let mut total = W::Sum::zero(); for (i, &selected) in config.iter().enumerate() { if selected == 1 { - total += self.weights[i].clone(); + total += self.weights[i].to_sum(); } } SolutionSize::Valid(total) @@ -216,15 +212,9 @@ where impl OptimizationProblem for MinimumDominatingSet where G: Graph, - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { - type Value = W; + type Value = W::Sum; fn direction(&self) -> Direction { Direction::Minimize diff --git a/src/models/graph/minimum_vertex_cover.rs b/src/models/graph/minimum_vertex_cover.rs index f126da6df..5773a0230 100644 --- a/src/models/graph/minimum_vertex_cover.rs +++ b/src/models/graph/minimum_vertex_cover.rs @@ -6,12 +6,14 @@ use crate::registry::{FieldInfo, ProblemSchemaEntry}; use crate::topology::{Graph, SimpleGraph}; use crate::traits::{OptimizationProblem, Problem}; -use crate::types::{Direction, SolutionSize}; +use crate::types::{Direction, SolutionSize, WeightElement}; +use num_traits::Zero; use serde::{Deserialize, Serialize}; inventory::submit! { ProblemSchemaEntry { name: "MinimumVertexCover", + module_path: module_path!(), description: "Find minimum weight vertex cover in a graph", fields: &[ FieldInfo { name: "graph", type_name: "G", description: "The underlying graph G=(V,E)" }, @@ -144,20 +146,14 @@ impl MinimumVertexCover { impl Problem for MinimumVertexCover where G: Graph, - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { const NAME: &'static str = "MinimumVertexCover"; - type Metric = SolutionSize; + type Metric = SolutionSize; fn variant() -> Vec<(&'static str, &'static str)> { vec![ - ("graph", crate::variant::short_type_name::()), + ("graph", G::NAME), ("weight", crate::variant::short_type_name::()), ] } @@ -166,14 +162,14 @@ where vec![2; self.graph.num_vertices()] } - fn evaluate(&self, config: &[usize]) -> SolutionSize { + fn evaluate(&self, config: &[usize]) -> SolutionSize { if !is_vertex_cover_config(&self.graph, config) { return SolutionSize::Invalid; } - let mut total = W::zero(); + let mut total = W::Sum::zero(); for (i, &selected) in config.iter().enumerate() { if selected == 1 { - total += self.weights[i].clone(); + total += self.weights[i].to_sum(); } } SolutionSize::Valid(total) @@ -183,15 +179,9 @@ where impl OptimizationProblem for MinimumVertexCover where G: Graph, - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { - type Value = W; + type Value = W::Sum; fn direction(&self) -> Direction { Direction::Minimize diff --git a/src/models/graph/mod.rs b/src/models/graph/mod.rs index 37054fffe..7dd9d8052 100644 --- a/src/models/graph/mod.rs +++ b/src/models/graph/mod.rs @@ -11,7 +11,6 @@ //! - [`MaximumMatching`]: Maximum weight matching //! - [`TravelingSalesman`]: Traveling Salesman (minimum weight Hamiltonian cycle) -mod traveling_salesman; mod kcoloring; mod max_cut; mod maximal_is; @@ -20,8 +19,8 @@ mod maximum_independent_set; mod maximum_matching; mod minimum_dominating_set; mod minimum_vertex_cover; +mod traveling_salesman; -pub use traveling_salesman::{is_hamiltonian_cycle, TravelingSalesman}; pub use kcoloring::{is_valid_coloring, KColoring}; pub use max_cut::{cut_size, MaxCut}; pub use maximal_is::{is_maximal_independent_set, MaximalIS}; @@ -30,3 +29,4 @@ pub use maximum_independent_set::{is_independent_set, MaximumIndependentSet}; pub use maximum_matching::{is_matching, MaximumMatching}; pub use minimum_dominating_set::{is_dominating_set, MinimumDominatingSet}; pub use minimum_vertex_cover::{is_vertex_cover, MinimumVertexCover}; +pub use traveling_salesman::{is_hamiltonian_cycle, TravelingSalesman}; diff --git a/src/models/graph/traveling_salesman.rs b/src/models/graph/traveling_salesman.rs index 40ad11e7e..494bde838 100644 --- a/src/models/graph/traveling_salesman.rs +++ b/src/models/graph/traveling_salesman.rs @@ -6,12 +6,14 @@ use crate::registry::{FieldInfo, ProblemSchemaEntry}; use crate::topology::{Graph, SimpleGraph}; use crate::traits::{OptimizationProblem, Problem}; -use crate::types::{Direction, SolutionSize}; +use crate::types::{Direction, SolutionSize, WeightElement}; +use num_traits::Zero; use serde::{Deserialize, Serialize}; inventory::submit! { ProblemSchemaEntry { name: "TravelingSalesman", + module_path: module_path!(), description: "Find minimum weight Hamiltonian cycle in a graph (Traveling Salesman Problem)", fields: &[ FieldInfo { name: "graph", type_name: "G", description: "The underlying graph G=(V,E)" }, @@ -164,20 +166,14 @@ impl TravelingSalesman { impl Problem for TravelingSalesman where G: Graph, - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { const NAME: &'static str = "TravelingSalesman"; - type Metric = SolutionSize; + type Metric = SolutionSize; fn variant() -> Vec<(&'static str, &'static str)> { vec![ - ("graph", crate::variant::short_type_name::()), + ("graph", G::NAME), ("weight", crate::variant::short_type_name::()), ] } @@ -186,15 +182,15 @@ where vec![2; self.graph.num_edges()] } - fn evaluate(&self, config: &[usize]) -> SolutionSize { + fn evaluate(&self, config: &[usize]) -> SolutionSize { if !self.is_valid_hamiltonian_cycle(config) { return SolutionSize::Invalid; } - let mut total = W::zero(); + let mut total = W::Sum::zero(); for (idx, &selected) in config.iter().enumerate() { if selected == 1 { if let Some(w) = self.edge_weights.get(idx) { - total += w.clone(); + total += w.to_sum(); } } } @@ -205,15 +201,9 @@ where impl OptimizationProblem for TravelingSalesman where G: Graph, - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { - type Value = W; + type Value = W::Sum; fn direction(&self) -> Direction { Direction::Minimize diff --git a/src/models/mod.rs b/src/models/mod.rs index 5691e145b..55e532d28 100644 --- a/src/models/mod.rs +++ b/src/models/mod.rs @@ -18,8 +18,8 @@ pub mod specialized; // Re-export commonly used types pub use graph::{ - TravelingSalesman, KColoring, MaxCut, MaximalIS, MaximumIndependentSet, MaximumMatching, - MinimumDominatingSet, MinimumVertexCover, + KColoring, MaxCut, MaximalIS, MaximumIndependentSet, MaximumMatching, MinimumDominatingSet, + MinimumVertexCover, TravelingSalesman, }; pub use optimization::{SpinGlass, QUBO}; pub use satisfiability::{CNFClause, Satisfiability}; diff --git a/src/models/optimization/ilp.rs b/src/models/optimization/ilp.rs index 80de1ea7b..14e708e01 100644 --- a/src/models/optimization/ilp.rs +++ b/src/models/optimization/ilp.rs @@ -11,6 +11,7 @@ use serde::{Deserialize, Serialize}; inventory::submit! { ProblemSchemaEntry { name: "ILP", + module_path: module_path!(), description: "Optimize linear objective subject to linear constraints", fields: &[ FieldInfo { name: "num_vars", type_name: "usize", description: "Number of integer variables" }, @@ -350,7 +351,7 @@ impl Problem for ILP { } fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", "f64")] + vec![] } } diff --git a/src/models/optimization/qubo.rs b/src/models/optimization/qubo.rs index 5b20bcbef..803248f7b 100644 --- a/src/models/optimization/qubo.rs +++ b/src/models/optimization/qubo.rs @@ -4,12 +4,13 @@ use crate::registry::{FieldInfo, ProblemSchemaEntry}; use crate::traits::{OptimizationProblem, Problem}; -use crate::types::{Direction, SolutionSize}; +use crate::types::{Direction, SolutionSize, WeightElement}; use serde::{Deserialize, Serialize}; inventory::submit! { ProblemSchemaEntry { name: "QUBO", + module_path: module_path!(), description: "Minimize quadratic unconstrained binary objective", fields: &[ FieldInfo { name: "num_vars", type_name: "usize", description: "Number of binary variables" }, @@ -144,48 +145,41 @@ where impl Problem for QUBO where - W: Clone - + Default + W: WeightElement + PartialOrd + num_traits::Num + num_traits::Zero + num_traits::Bounded + std::ops::AddAssign - + std::ops::Mul - + 'static, + + std::ops::Mul, { const NAME: &'static str = "QUBO"; - type Metric = SolutionSize; + type Metric = SolutionSize; fn dims(&self) -> Vec { vec![2; self.num_vars] } - fn evaluate(&self, config: &[usize]) -> SolutionSize { - SolutionSize::Valid(self.evaluate(config)) + fn evaluate(&self, config: &[usize]) -> SolutionSize { + SolutionSize::Valid(self.evaluate(config).to_sum()) } fn variant() -> Vec<(&'static str, &'static str)> { - vec![ - ("graph", "SimpleGraph"), - ("weight", crate::variant::short_type_name::()), - ] + vec![("weight", crate::variant::short_type_name::())] } } impl OptimizationProblem for QUBO where - W: Clone - + Default + W: WeightElement + PartialOrd + num_traits::Num + num_traits::Zero + num_traits::Bounded + std::ops::AddAssign - + std::ops::Mul - + 'static, + + std::ops::Mul, { - type Value = W; + type Value = W::Sum; fn direction(&self) -> Direction { Direction::Minimize diff --git a/src/models/optimization/spin_glass.rs b/src/models/optimization/spin_glass.rs index 2c771bc73..c717b44c6 100644 --- a/src/models/optimization/spin_glass.rs +++ b/src/models/optimization/spin_glass.rs @@ -5,12 +5,13 @@ use crate::registry::{FieldInfo, ProblemSchemaEntry}; use crate::topology::{Graph, SimpleGraph}; use crate::traits::{OptimizationProblem, Problem}; -use crate::types::{Direction, SolutionSize}; +use crate::types::{Direction, SolutionSize, WeightElement}; use serde::{Deserialize, Serialize}; inventory::submit! { ProblemSchemaEntry { name: "SpinGlass", + module_path: module_path!(), description: "Minimize Ising Hamiltonian on a graph", fields: &[ FieldInfo { name: "graph", type_name: "G", description: "The interaction graph" }, @@ -197,32 +198,30 @@ where impl Problem for SpinGlass where G: Graph, - W: Clone - + Default + W: WeightElement + PartialOrd + num_traits::Num + num_traits::Zero + num_traits::Bounded + std::ops::AddAssign + std::ops::Mul - + From - + 'static, + + From, { const NAME: &'static str = "SpinGlass"; - type Metric = SolutionSize; + type Metric = SolutionSize; fn dims(&self) -> Vec { vec![2; self.graph.num_vertices()] } - fn evaluate(&self, config: &[usize]) -> SolutionSize { + fn evaluate(&self, config: &[usize]) -> SolutionSize { let spins = Self::config_to_spins(config); - SolutionSize::Valid(self.compute_energy(&spins)) + SolutionSize::Valid(self.compute_energy(&spins).to_sum()) } fn variant() -> Vec<(&'static str, &'static str)> { vec![ - ("graph", crate::variant::short_type_name::()), + ("graph", G::NAME), ("weight", crate::variant::short_type_name::()), ] } @@ -231,18 +230,16 @@ where impl OptimizationProblem for SpinGlass where G: Graph, - W: Clone - + Default + W: WeightElement + PartialOrd + num_traits::Num + num_traits::Zero + num_traits::Bounded + std::ops::AddAssign + std::ops::Mul - + From - + 'static, + + From, { - type Value = W; + type Value = W::Sum; fn direction(&self) -> Direction { Direction::Minimize diff --git a/src/models/satisfiability/ksat.rs b/src/models/satisfiability/ksat.rs index 26ce049fd..1ffa921e9 100644 --- a/src/models/satisfiability/ksat.rs +++ b/src/models/satisfiability/ksat.rs @@ -6,7 +6,7 @@ //! MaxKSatisfiability type (if available). use crate::registry::{FieldInfo, ProblemSchemaEntry}; -use crate::traits::Problem; +use crate::traits::{Problem, SatisfactionProblem}; use serde::{Deserialize, Serialize}; use super::CNFClause; @@ -14,6 +14,7 @@ use super::CNFClause; inventory::submit! { ProblemSchemaEntry { name: "KSatisfiability", + module_path: module_path!(), description: "SAT with exactly k literals per clause", fields: &[ FieldInfo { name: "num_vars", type_name: "usize", description: "Number of Boolean variables" }, @@ -150,13 +151,12 @@ impl Problem for KSatisfiability { } fn variant() -> Vec<(&'static str, &'static str)> { - vec![ - ("k", crate::variant::const_usize_str::()), - ("weight", "Unweighted"), - ] + vec![("k", crate::variant::const_usize_str::())] } } +impl SatisfactionProblem for KSatisfiability {} + #[cfg(test)] #[path = "../../unit_tests/models/satisfiability/ksat.rs"] mod tests; diff --git a/src/models/satisfiability/sat.rs b/src/models/satisfiability/sat.rs index a8eae3abd..b27b2bf4a 100644 --- a/src/models/satisfiability/sat.rs +++ b/src/models/satisfiability/sat.rs @@ -6,12 +6,13 @@ //! the separate MaxSatisfiability type (if available). use crate::registry::{FieldInfo, ProblemSchemaEntry}; -use crate::traits::Problem; +use crate::traits::{Problem, SatisfactionProblem}; use serde::{Deserialize, Serialize}; inventory::submit! { ProblemSchemaEntry { name: "Satisfiability", + module_path: module_path!(), description: "Find satisfying assignment for CNF formula", fields: &[ FieldInfo { name: "num_vars", type_name: "usize", description: "Number of Boolean variables" }, @@ -181,10 +182,12 @@ impl Problem for Satisfiability { } fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", "Unweighted")] + vec![] } } +impl SatisfactionProblem for Satisfiability {} + /// Check if an assignment satisfies a SAT formula. /// /// # Arguments diff --git a/src/models/set/maximum_set_packing.rs b/src/models/set/maximum_set_packing.rs index 0ccfa0dec..9ace61a7b 100644 --- a/src/models/set/maximum_set_packing.rs +++ b/src/models/set/maximum_set_packing.rs @@ -5,13 +5,15 @@ use crate::registry::{FieldInfo, ProblemSchemaEntry}; use crate::traits::{OptimizationProblem, Problem}; -use crate::types::{Direction, SolutionSize}; +use crate::types::{Direction, SolutionSize, WeightElement}; +use num_traits::Zero; use serde::{Deserialize, Serialize}; use std::collections::HashSet; inventory::submit! { ProblemSchemaEntry { name: "MaximumSetPacking", + module_path: module_path!(), description: "Find maximum weight collection of disjoint sets", fields: &[ FieldInfo { name: "sets", type_name: "Vec>", description: "Collection of sets over a universe" }, @@ -119,53 +121,38 @@ impl MaximumSetPacking { impl Problem for MaximumSetPacking where - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { const NAME: &'static str = "MaximumSetPacking"; - type Metric = SolutionSize; + type Metric = SolutionSize; fn dims(&self) -> Vec { vec![2; self.sets.len()] } - fn evaluate(&self, config: &[usize]) -> SolutionSize { + fn evaluate(&self, config: &[usize]) -> SolutionSize { if !is_valid_packing(&self.sets, config) { return SolutionSize::Invalid; } - let mut total = W::zero(); + let mut total = W::Sum::zero(); for (i, &selected) in config.iter().enumerate() { if selected == 1 { - total += self.weights[i].clone(); + total += self.weights[i].to_sum(); } } SolutionSize::Valid(total) } fn variant() -> Vec<(&'static str, &'static str)> { - vec![ - ("graph", "SimpleGraph"), - ("weight", crate::variant::short_type_name::()), - ] + vec![("weight", crate::variant::short_type_name::())] } } impl OptimizationProblem for MaximumSetPacking where - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { - type Value = W; + type Value = W::Sum; fn direction(&self) -> Direction { Direction::Maximize diff --git a/src/models/set/minimum_set_covering.rs b/src/models/set/minimum_set_covering.rs index 067faeedc..01d4fac2d 100644 --- a/src/models/set/minimum_set_covering.rs +++ b/src/models/set/minimum_set_covering.rs @@ -5,13 +5,15 @@ use crate::registry::{FieldInfo, ProblemSchemaEntry}; use crate::traits::{OptimizationProblem, Problem}; -use crate::types::{Direction, SolutionSize}; +use crate::types::{Direction, SolutionSize, WeightElement}; +use num_traits::Zero; use serde::{Deserialize, Serialize}; use std::collections::HashSet; inventory::submit! { ProblemSchemaEntry { name: "MinimumSetCovering", + module_path: module_path!(), description: "Find minimum weight collection covering the universe", fields: &[ FieldInfo { name: "universe_size", type_name: "usize", description: "Size of the universe U" }, @@ -129,56 +131,41 @@ impl MinimumSetCovering { impl Problem for MinimumSetCovering where - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { const NAME: &'static str = "MinimumSetCovering"; - type Metric = SolutionSize; + type Metric = SolutionSize; fn dims(&self) -> Vec { vec![2; self.sets.len()] } - fn evaluate(&self, config: &[usize]) -> SolutionSize { + fn evaluate(&self, config: &[usize]) -> SolutionSize { let covered = self.covered_elements(config); let is_valid = covered.len() == self.universe_size && (0..self.universe_size).all(|e| covered.contains(&e)); if !is_valid { return SolutionSize::Invalid; } - let mut total = W::zero(); + let mut total = W::Sum::zero(); for (i, &selected) in config.iter().enumerate() { if selected == 1 { - total += self.weights[i].clone(); + total += self.weights[i].to_sum(); } } SolutionSize::Valid(total) } fn variant() -> Vec<(&'static str, &'static str)> { - vec![ - ("graph", "SimpleGraph"), - ("weight", crate::variant::short_type_name::()), - ] + vec![("weight", crate::variant::short_type_name::())] } } impl OptimizationProblem for MinimumSetCovering where - W: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static, + W: WeightElement, { - type Value = W; + type Value = W::Sum; fn direction(&self) -> Direction { Direction::Minimize diff --git a/src/models/specialized/biclique_cover.rs b/src/models/specialized/biclique_cover.rs index 3c3053795..4f9492dd4 100644 --- a/src/models/specialized/biclique_cover.rs +++ b/src/models/specialized/biclique_cover.rs @@ -12,6 +12,7 @@ use std::collections::HashSet; inventory::submit! { ProblemSchemaEntry { name: "BicliqueCover", + module_path: module_path!(), description: "Cover bipartite edges with k bicliques", fields: &[ FieldInfo { name: "left_size", type_name: "usize", description: "Vertices in left partition" }, @@ -225,7 +226,7 @@ impl Problem for BicliqueCover { } fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", "i32")] + vec![] } } diff --git a/src/models/specialized/bmf.rs b/src/models/specialized/bmf.rs index 6eac46ad5..e546ac4c4 100644 --- a/src/models/specialized/bmf.rs +++ b/src/models/specialized/bmf.rs @@ -12,6 +12,7 @@ use serde::{Deserialize, Serialize}; inventory::submit! { ProblemSchemaEntry { name: "BMF", + module_path: module_path!(), description: "Boolean matrix factorization", fields: &[ FieldInfo { name: "matrix", type_name: "Vec>", description: "Target boolean matrix A" }, @@ -205,7 +206,7 @@ impl Problem for BMF { } fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", "i32")] + vec![] } } diff --git a/src/models/specialized/circuit.rs b/src/models/specialized/circuit.rs index f07869126..ddc1d5dd1 100644 --- a/src/models/specialized/circuit.rs +++ b/src/models/specialized/circuit.rs @@ -4,13 +4,14 @@ //! The goal is to find variable assignments that satisfy the circuit constraints. use crate::registry::{FieldInfo, ProblemSchemaEntry}; -use crate::traits::Problem; +use crate::traits::{Problem, SatisfactionProblem}; use serde::{Deserialize, Serialize}; use std::collections::HashMap; inventory::submit! { ProblemSchemaEntry { name: "CircuitSAT", + module_path: module_path!(), description: "Find satisfying input to a boolean circuit", fields: &[ FieldInfo { name: "circuit", type_name: "Circuit", description: "The boolean circuit" }, @@ -278,10 +279,12 @@ impl Problem for CircuitSAT { } fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph")] + vec![] } } +impl SatisfactionProblem for CircuitSAT {} + #[cfg(test)] #[path = "../../unit_tests/models/specialized/circuit.rs"] mod tests; diff --git a/src/models/specialized/factoring.rs b/src/models/specialized/factoring.rs index bb831aa0c..490f30257 100644 --- a/src/models/specialized/factoring.rs +++ b/src/models/specialized/factoring.rs @@ -11,6 +11,7 @@ use serde::{Deserialize, Serialize}; inventory::submit! { ProblemSchemaEntry { name: "Factoring", + module_path: module_path!(), description: "Factor a composite integer into two factors", fields: &[ FieldInfo { name: "m", type_name: "usize", description: "Bits for first factor" }, @@ -133,7 +134,7 @@ impl Problem for Factoring { } fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", "i32")] + vec![] } } diff --git a/src/models/specialized/paintshop.rs b/src/models/specialized/paintshop.rs index 7a518964e..84c104c24 100644 --- a/src/models/specialized/paintshop.rs +++ b/src/models/specialized/paintshop.rs @@ -14,6 +14,7 @@ use std::collections::{HashMap, HashSet}; inventory::submit! { ProblemSchemaEntry { name: "PaintShop", + module_path: module_path!(), description: "Minimize color changes in paint shop sequence", fields: &[ FieldInfo { name: "sequence_indices", type_name: "Vec", description: "Car sequence as indices" }, @@ -173,7 +174,7 @@ impl Problem for PaintShop { } fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", "i32")] + vec![] } } diff --git a/src/registry/schema.rs b/src/registry/schema.rs index 3af6ea57d..4a362ec1e 100644 --- a/src/registry/schema.rs +++ b/src/registry/schema.rs @@ -7,6 +7,8 @@ use serde::Serialize; pub struct ProblemSchemaEntry { /// Problem name (e.g., "MaximumIndependentSet"). pub name: &'static str, + /// Module path from `module_path!()` (e.g., "problemreductions::models::graph::maximum_independent_set"). + pub module_path: &'static str, /// Human-readable description. pub description: &'static str, /// Struct fields. diff --git a/src/rules/coloring_ilp.rs b/src/rules/coloring_ilp.rs index aba47033f..f35d539d1 100644 --- a/src/rules/coloring_ilp.rs +++ b/src/rules/coloring_ilp.rs @@ -72,11 +72,8 @@ where ]) } )] -impl ReduceTo for KColoring -where - G: Graph, -{ - type Result = ReductionKColoringToILP; +impl ReduceTo for KColoring { + type Result = ReductionKColoringToILP; fn reduce_to(&self) -> Self::Result { let num_vertices = self.num_vertices(); diff --git a/src/rules/graph.rs b/src/rules/graph.rs index fe630ee61..6b4649a3a 100644 --- a/src/rules/graph.rs +++ b/src/rules/graph.rs @@ -10,24 +10,16 @@ use crate::graph_types::{GraphSubtypeEntry, WeightSubtypeEntry}; use crate::rules::cost::PathCostFn; -use crate::rules::registry::{ConcreteVariantEntry, ReductionEntry, ReductionOverhead}; +use crate::rules::registry::{ReductionEntry, ReductionOverhead}; use crate::types::ProblemSize; use ordered_float::OrderedFloat; use petgraph::algo::all_simple_paths; use petgraph::graph::{DiGraph, NodeIndex}; use petgraph::visit::EdgeRef; use serde::Serialize; -use std::any::TypeId; use std::cmp::Reverse; use std::collections::{BinaryHeap, HashMap, HashSet}; -// Register concrete variants for problems that support non-SimpleGraph graph types. -// These generate additional nodes in the JSON export. -inventory::submit! { ConcreteVariantEntry { name: "MaximumIndependentSet", variant_fn: || vec![("graph", "GridGraph"), ("weight", "Unweighted")] } } -inventory::submit! { ConcreteVariantEntry { name: "MaximumIndependentSet", variant_fn: || vec![("graph", "UnitDiskGraph"), ("weight", "Unweighted")] } } -inventory::submit! { ConcreteVariantEntry { name: "MaxCut", variant_fn: || vec![("graph", "GridGraph"), ("weight", "Unweighted")] } } -inventory::submit! { ConcreteVariantEntry { name: "SpinGlass", variant_fn: || vec![("graph", "GridGraph"), ("weight", "f64")] } } - /// JSON-serializable representation of the reduction graph. #[derive(Debug, Clone, Serialize)] pub struct ReductionGraphJson { @@ -37,6 +29,18 @@ pub struct ReductionGraphJson { pub edges: Vec, } +impl ReductionGraphJson { + /// Get the source node of an edge. + pub fn source_node(&self, edge: &EdgeJson) -> &NodeJson { + &self.nodes[edge.source] + } + + /// Get the target node of an edge. + pub fn target_node(&self, edge: &EdgeJson) -> &NodeJson { + &self.nodes[edge.target] + } +} + /// A node in the reduction graph JSON. #[derive(Debug, Clone, Serialize)] pub struct NodeJson { @@ -50,13 +54,11 @@ pub struct NodeJson { pub doc_path: String, } -/// Reference to a problem variant in an edge. -#[derive(Debug, Clone, Serialize, PartialEq, Eq, Hash)] -pub struct VariantRef { - /// Base problem name. - pub name: String, - /// Variant attributes as key-value pairs. - pub variant: std::collections::BTreeMap, +/// Internal reference to a problem variant, used during edge construction. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +struct VariantRef { + name: String, + variant: std::collections::BTreeMap, } /// A single output field in the reduction overhead. @@ -71,10 +73,10 @@ pub struct OverheadFieldJson { /// An edge in the reduction graph JSON. #[derive(Debug, Clone, Serialize)] pub struct EdgeJson { - /// Source problem variant. - pub source: VariantRef, - /// Target problem variant. - pub target: VariantRef, + /// Index into the `nodes` array for the source problem variant. + pub source: usize, + /// Index into the `nodes` array for the target problem variant. + pub target: usize, /// Reduction overhead: output size as polynomials of input size. pub overhead: Vec, /// Relative rustdoc path for the reduction module. @@ -165,8 +167,6 @@ pub struct ReductionGraph { graph: DiGraph<&'static str, ReductionEdge>, /// Map from base type name to node index. name_indices: HashMap<&'static str, NodeIndex>, - /// Map from TypeId to base type name (for generic API compatibility). - type_to_name: HashMap, /// Graph hierarchy: subtype -> set of supertypes (transitively closed). graph_hierarchy: HashMap<&'static str, HashSet<&'static str>>, /// Weight hierarchy: subtype -> set of supertypes (transitively closed). @@ -178,7 +178,6 @@ impl ReductionGraph { pub fn new() -> Self { let mut graph = DiGraph::new(); let mut name_indices = HashMap::new(); - let mut type_to_name = HashMap::new(); // Build graph hierarchy from GraphSubtypeEntry registrations let graph_hierarchy = Self::build_graph_hierarchy(); @@ -186,10 +185,7 @@ impl ReductionGraph { // Build weight hierarchy from WeightSubtypeEntry registrations let weight_hierarchy = Self::build_weight_hierarchy(); - // First, register all problem types (for TypeId mapping) - Self::register_types(&mut graph, &mut name_indices, &mut type_to_name); - - // Then, register reductions from inventory (auto-discovery) + // Register reductions from inventory (auto-discovery) for entry in inventory::iter:: { // Ensure source node exists if !name_indices.contains_key(entry.source_name) { @@ -223,7 +219,6 @@ impl ReductionGraph { Self { graph, name_indices, - type_to_name, graph_hierarchy, weight_hierarchy, } @@ -315,64 +310,6 @@ impl ReductionGraph { supertypes } - fn register_types( - graph: &mut DiGraph<&'static str, ReductionEdge>, - name_indices: &mut HashMap<&'static str, NodeIndex>, - type_to_name: &mut HashMap, - ) { - // Register a problem type with its base name. - // Multiple concrete types can map to the same base name. - macro_rules! register { - ($($ty:ty => $base_name:expr),* $(,)?) => { - $( - // Map TypeId to base name - type_to_name.insert(TypeId::of::<$ty>(), $base_name); - - // Only add node if not already present - if !name_indices.contains_key($base_name) { - let idx = graph.add_node($base_name); - name_indices.insert($base_name, idx); - } - )* - }; - } - - use crate::models::graph::*; - use crate::models::optimization::*; - use crate::models::satisfiability::*; - use crate::models::set::*; - use crate::models::specialized::*; - use crate::topology::SimpleGraph; - - // Register problem types - multiple concrete types can share a base name - register! { - // Graph problems - MaximumIndependentSet => "MaximumIndependentSet", - MaximumIndependentSet => "MaximumIndependentSet", - MinimumVertexCover => "MinimumVertexCover", - MinimumVertexCover => "MinimumVertexCover", - MaxCut => "MaxCut", - MaxCut => "MaxCut", - MaximumMatching => "MaximumMatching", - MinimumDominatingSet => "MinimumDominatingSet", - KColoring<3, SimpleGraph> => "KColoring", - // Set problems - MaximumSetPacking => "MaximumSetPacking", - MinimumSetCovering => "MinimumSetCovering", - // Optimization problems - SpinGlass => "SpinGlass", - SpinGlass => "SpinGlass", - QUBO => "QUBO", - ILP => "ILP", - // Satisfiability problems - Satisfiability => "Satisfiability", - KSatisfiability<3> => "KSatisfiability", - CircuitSAT => "CircuitSAT", - // Specialized - Factoring => "Factoring", - } - } - /// Check if `sub` is a subtype of `sup` (or equal). pub fn is_graph_subtype(&self, sub: &str, sup: &str) -> bool { sub == sup @@ -519,19 +456,12 @@ impl ReductionGraph { /// Find all paths from source to target type. /// - /// Uses type-erased names, so `find_paths::, SpinGlass>()` + /// Uses `Problem::NAME` for lookup, so `find_paths::, SpinGlass>()` /// will find paths even though the weight types differ. - pub fn find_paths(&self) -> Vec { - let src_name = match self.type_to_name.get(&TypeId::of::()) { - Some(&name) => name, - None => return vec![], - }; - let dst_name = match self.type_to_name.get(&TypeId::of::()) { - Some(&name) => name, - None => return vec![], - }; - - self.find_paths_by_name(src_name, dst_name) + pub fn find_paths( + &self, + ) -> Vec { + self.find_paths_by_name(S::NAME, T::NAME) } /// Find all paths between problem types by name. @@ -563,7 +493,9 @@ impl ReductionGraph { } /// Find the shortest path from source to target type. - pub fn find_shortest_path(&self) -> Option { + pub fn find_shortest_path( + &self, + ) -> Option { let paths = self.find_paths::(); paths.into_iter().min_by_key(|p| p.len()) } @@ -575,17 +507,10 @@ impl ReductionGraph { } /// Check if a direct reduction exists from S to T. - pub fn has_direct_reduction(&self) -> bool { - let src_name = match self.type_to_name.get(&TypeId::of::()) { - Some(&name) => name, - None => return false, - }; - let dst_name = match self.type_to_name.get(&TypeId::of::()) { - Some(&name) => name, - None => return false, - }; - - self.has_direct_reduction_by_name(src_name, dst_name) + pub fn has_direct_reduction( + &self, + ) -> bool { + self.has_direct_reduction_by_name(S::NAME, T::NAME) } /// Check if a direct reduction exists by name. @@ -704,8 +629,15 @@ impl ReductionGraph { /// /// This method generates nodes for each variant based on the registered reductions. pub fn to_json(&self) -> ReductionGraphJson { + use crate::registry::ProblemSchemaEntry; use crate::rules::registry::ReductionEntry; + // Build name → module_path lookup from ProblemSchemaEntry inventory + let schema_modules: HashMap<&str, &str> = inventory::iter:: + .into_iter() + .map(|entry| (entry.name, entry.module_path)) + .collect(); + // Collect all unique nodes (name + variant combination) let mut node_set: HashSet<(String, std::collections::BTreeMap)> = HashSet::new(); @@ -729,31 +661,40 @@ impl ReductionGraph { )); } - // Also collect nodes from ConcreteVariantEntry registrations - for entry in inventory::iter:: { - let variant = (entry.variant_fn)(); - node_set.insert((entry.name.to_string(), Self::variant_to_map(&variant))); - } - - // Build nodes with categories and doc paths + // Build nodes with categories and doc paths derived from ProblemSchemaEntry.module_path let mut nodes: Vec = node_set .iter() .map(|(name, variant)| { - let category = Self::categorize_type(name); - let doc_path = Self::compute_doc_path(name); + let (category, doc_path) = + if let Some(&mod_path) = schema_modules.get(name.as_str()) { + ( + Self::category_from_module_path(mod_path), + Self::doc_path_from_module_path(mod_path, name), + ) + } else { + ("other".to_string(), String::new()) + }; NodeJson { name: name.clone(), variant: variant.clone(), - category: category.to_string(), + category, doc_path, } }) .collect(); nodes.sort_by(|a, b| (&a.name, &a.variant).cmp(&(&b.name, &b.variant))); - // Collect edges: each reduction is a separate directed edge + // Build node index lookup: (name, variant) -> index in sorted nodes vec + let node_index: HashMap<(&str, &std::collections::BTreeMap), usize> = nodes + .iter() + .enumerate() + .map(|(i, n)| ((n.name.as_str(), &n.variant), i)) + .collect(); + + // Collect edges as (VariantRef, VariantRef) pairs first, then resolve to indices let mut edge_set: HashSet<(VariantRef, VariantRef)> = HashSet::new(); - let mut edge_data: Vec<(VariantRef, VariantRef, ReductionOverhead, String)> = Vec::new(); + let mut edge_data: Vec<(VariantRef, VariantRef, Vec, String)> = + Vec::new(); for entry in inventory::iter:: { let source_variant = entry.source_variant(); @@ -764,41 +705,17 @@ impl ReductionGraph { if edge_set.insert(key) { let overhead = entry.overhead(); let doc_path = Self::module_path_to_doc_path(entry.module_path); - edge_data.push((src_ref, dst_ref, overhead, doc_path)); - } - } - - // Build edges - let mut edges: Vec = edge_data - .into_iter() - .map(|(src, dst, overhead, doc_path)| EdgeJson { - source: src, - target: dst, - overhead: overhead + let overhead_fields = overhead .output_size .iter() .map(|(field, poly)| OverheadFieldJson { field: field.to_string(), formula: poly.to_string(), }) - .collect(), - doc_path, - }) - .collect(); - edges.sort_by(|a, b| { - ( - &a.source.name, - &a.source.variant, - &a.target.name, - &a.target.variant, - ) - .cmp(&( - &b.source.name, - &b.source.variant, - &b.target.name, - &b.target.variant, - )) - }); + .collect(); + edge_data.push((src_ref, dst_ref, overhead_fields, doc_path)); + } + } // Auto-generate natural edges between same-name variant nodes. // A natural edge exists from A to B when all variant fields of A are @@ -821,11 +738,11 @@ impl ReductionGraph { // Use edges where the problem is the TARGET, since the overhead fields // describe the target problem's size dimensions. let mut fields_by_problem: HashMap> = HashMap::new(); - for edge in &edges { - if !edge.overhead.is_empty() { + for (_, dst, overhead, _) in &edge_data { + if !overhead.is_empty() { fields_by_problem - .entry(edge.target.name.clone()) - .or_insert_with(|| edge.overhead.iter().map(|o| o.field.clone()).collect()); + .entry(dst.name.clone()) + .or_insert_with(|| overhead.iter().map(|o| o.field.clone()).collect()); } } @@ -858,35 +775,39 @@ impl ReductionGraph { }) .unwrap_or_default(); - edges.push(EdgeJson { - source: src_ref, - target: dst_ref, - overhead, - doc_path: String::new(), - }); + edge_data.push((src_ref, dst_ref, overhead, String::new())); } } } } } - - // Re-sort after adding natural edges - edges.sort_by(|a, b| { - ( - &a.source.name, - &a.source.variant, - &a.target.name, - &a.target.variant, - ) - .cmp(&( - &b.source.name, - &b.source.variant, - &b.target.name, - &b.target.variant, - )) - }); } + // Sort edge data by source/target names for deterministic output + edge_data.sort_by(|a, b| { + (&a.0.name, &a.0.variant, &a.1.name, &a.1.variant).cmp(&( + &b.0.name, + &b.0.variant, + &b.1.name, + &b.1.variant, + )) + }); + + // Resolve VariantRefs to node indices + let edges: Vec = edge_data + .into_iter() + .map(|(src, dst, overhead, doc_path)| { + let src_idx = node_index[&(src.name.as_str(), &src.variant)]; + let dst_idx = node_index[&(dst.name.as_str(), &dst.variant)]; + EdgeJson { + source: src_idx, + target: dst_idx, + overhead, + doc_path, + } + }) + .collect(); + ReductionGraphJson { nodes, edges } } @@ -914,49 +835,34 @@ impl ReductionGraph { format!("{}/index.html", stripped.replace("::", "/")) } - /// Compute the rustdoc path for a problem type. - /// Maps name → actual Rust module location (which may differ from the visualization category). - fn compute_doc_path(name: &str) -> String { - let module = match name { - "MaximumIndependentSet" - | "MaximalIS" - | "MinimumVertexCover" - | "MinimumDominatingSet" - | "KColoring" - | "MaximumMatching" - | "MaxCut" - | "MaximumClique" - | "TravelingSalesman" => "graph", - "Satisfiability" | "KSatisfiability" => "satisfiability", - "SpinGlass" | "QUBO" | "ILP" => "optimization", - "MinimumSetCovering" | "MaximumSetPacking" => "set", - _ => "specialized", - }; - format!("models/{module}/struct.{name}.html") - } - - /// Categorize a type name into a problem category. - fn categorize_type(name: &str) -> &'static str { - if name.contains("MaximumIndependentSet") - || name.contains("VertexCover") - || name.contains("MaxCut") - || name.contains("Coloring") - || name.contains("MinimumDominatingSet") - || name.contains("MaximumMatching") - || name.contains("MaximumClique") - || name.contains("TravelingSalesman") - { - "graph" - } else if name.contains("MaximumSetPacking") || name.contains("SetCover") { - "set" - } else if name.contains("SpinGlass") || name.contains("QUBO") || name.contains("ILP") { - "optimization" - } else if name.contains("Satisfiability") || name.contains("SAT") { - "satisfiability" - } else if name.contains("Factoring") || name.contains("Circuit") { - "specialized" + /// Extract the category from a module path. + /// + /// E.g., `"problemreductions::models::graph::maximum_independent_set"` → `"graph"`. + fn category_from_module_path(module_path: &str) -> String { + // Expected format: "problemreductions::models::::" + let parts: Vec<&str> = module_path.split("::").collect(); + // parts = ["problemreductions", "models", "graph", "maximum_independent_set"] + if parts.len() >= 3 { + parts[2].to_string() + } else { + "other".to_string() + } + } + + /// Build the rustdoc path from a module path and problem name. + /// + /// E.g., `"problemreductions::models::graph::maximum_independent_set"`, `"MaximumIndependentSet"` + /// → `"models/graph/struct.MaximumIndependentSet.html"`. + fn doc_path_from_module_path(module_path: &str, name: &str) -> String { + let stripped = module_path + .strip_prefix("problemreductions::") + .unwrap_or(module_path); + // stripped = "models::graph::maximum_independent_set" + // We need "models/graph/struct.MaximumIndependentSet.html" + if let Some(parent) = stripped.rsplit_once("::").map(|(p, _)| p) { + format!("{}/struct.{}.html", parent.replace("::", "/"), name) } else { - "other" + format!("struct.{}.html", name) } } } diff --git a/src/rules/maximumindependentset_gridgraph.rs b/src/rules/maximumindependentset_gridgraph.rs new file mode 100644 index 000000000..62ac6b653 --- /dev/null +++ b/src/rules/maximumindependentset_gridgraph.rs @@ -0,0 +1,118 @@ +//! Reduction from MaximumIndependentSet on SimpleGraph/UnitDiskGraph to GridGraph +//! using the King's Subgraph (KSG) unit disk mapping. +//! +//! Maps an arbitrary graph's MIS problem to an equivalent weighted MIS on a grid graph. + +use crate::models::graph::MaximumIndependentSet; +use crate::poly; +use crate::reduction; +use crate::rules::registry::ReductionOverhead; +use crate::rules::traits::{ReduceTo, ReductionResult}; +use crate::rules::unitdiskmapping::ksg; +use crate::topology::{GridGraph, SimpleGraph, UnitDiskGraph}; + +/// Result of reducing MIS on SimpleGraph to MIS on GridGraph. +#[derive(Debug, Clone)] +pub struct ReductionISSimpleToGrid { + target: MaximumIndependentSet, i32>, + mapping_result: ksg::MappingResult, +} + +impl ReductionResult for ReductionISSimpleToGrid { + type Source = MaximumIndependentSet; + type Target = MaximumIndependentSet, i32>; + + fn target_problem(&self) -> &Self::Target { + &self.target + } + + fn extract_solution(&self, target_solution: &[usize]) -> Vec { + self.mapping_result.map_config_back(target_solution) + } +} + +#[reduction( + overhead = { + ReductionOverhead::new(vec![ + ("num_vertices", poly!(num_vertices * num_vertices)), + ("num_edges", poly!(num_vertices * num_vertices)), + ]) + } +)] +impl ReduceTo, i32>> + for MaximumIndependentSet +{ + type Result = ReductionISSimpleToGrid; + + fn reduce_to(&self) -> Self::Result { + let n = self.num_vertices(); + let edges = self.edges(); + let result = ksg::map_unweighted(n, &edges); + let weights: Vec = result + .grid_graph + .nodes() + .iter() + .map(|node| node.weight) + .collect(); + let target = MaximumIndependentSet::from_graph(result.grid_graph.clone(), weights); + ReductionISSimpleToGrid { + target, + mapping_result: result, + } + } +} + +/// Result of reducing MIS on UnitDiskGraph to MIS on GridGraph. +#[derive(Debug, Clone)] +pub struct ReductionISUnitDiskToGrid { + target: MaximumIndependentSet, i32>, + mapping_result: ksg::MappingResult, +} + +impl ReductionResult for ReductionISUnitDiskToGrid { + type Source = MaximumIndependentSet; + type Target = MaximumIndependentSet, i32>; + + fn target_problem(&self) -> &Self::Target { + &self.target + } + + fn extract_solution(&self, target_solution: &[usize]) -> Vec { + self.mapping_result.map_config_back(target_solution) + } +} + +#[reduction( + overhead = { + ReductionOverhead::new(vec![ + ("num_vertices", poly!(num_vertices * num_vertices)), + ("num_edges", poly!(num_vertices * num_vertices)), + ]) + } +)] +impl ReduceTo, i32>> + for MaximumIndependentSet +{ + type Result = ReductionISUnitDiskToGrid; + + fn reduce_to(&self) -> Self::Result { + let n = self.num_vertices(); + let edges = self.edges(); + let result = ksg::map_unweighted(n, &edges); + let weights: Vec = result + .grid_graph + .nodes() + .iter() + .map(|node| node.weight) + .collect(); + let target = MaximumIndependentSet::from_graph(result.grid_graph.clone(), weights); + ReductionISUnitDiskToGrid { + target, + mapping_result: result, + } + } +} + +#[cfg(test)] +#[path = "../unit_tests/rules/maximumindependentset_gridgraph.rs"] +mod tests; diff --git a/src/rules/maximumindependentset_maximumsetpacking.rs b/src/rules/maximumindependentset_maximumsetpacking.rs index 0732517bc..92527167f 100644 --- a/src/rules/maximumindependentset_maximumsetpacking.rs +++ b/src/rules/maximumindependentset_maximumsetpacking.rs @@ -10,9 +10,8 @@ use crate::reduction; use crate::rules::registry::ReductionOverhead; use crate::rules::traits::{ReduceTo, ReductionResult}; use crate::topology::SimpleGraph; -use num_traits::{Bounded, Num, Zero}; +use crate::types::WeightElement; use std::collections::HashSet; -use std::ops::AddAssign; /// Result of reducing MaximumIndependentSet to MaximumSetPacking. #[derive(Debug, Clone)] @@ -22,7 +21,7 @@ pub struct ReductionISToSP { impl ReductionResult for ReductionISToSP where - W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + 'static, + W: WeightElement, { type Source = MaximumIndependentSet; type Target = MaximumSetPacking; @@ -45,11 +44,8 @@ where ]) } )] -impl ReduceTo> for MaximumIndependentSet -where - W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + From + 'static, -{ - type Result = ReductionISToSP; +impl ReduceTo> for MaximumIndependentSet { + type Result = ReductionISToSP; fn reduce_to(&self) -> Self::Result { let edges = self.edges(); @@ -76,7 +72,7 @@ pub struct ReductionSPToIS { impl ReductionResult for ReductionSPToIS where - W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + 'static, + W: WeightElement, { type Source = MaximumSetPacking; type Target = MaximumIndependentSet; @@ -99,11 +95,8 @@ where ]) } )] -impl ReduceTo> for MaximumSetPacking -where - W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + From + 'static, -{ - type Result = ReductionSPToIS; +impl ReduceTo> for MaximumSetPacking { + type Result = ReductionSPToIS; fn reduce_to(&self) -> Self::Result { let sets = self.sets(); diff --git a/src/rules/maximumindependentset_triangular.rs b/src/rules/maximumindependentset_triangular.rs new file mode 100644 index 000000000..a779b1702 --- /dev/null +++ b/src/rules/maximumindependentset_triangular.rs @@ -0,0 +1,68 @@ +//! Reduction from MaximumIndependentSet on SimpleGraph to Triangular lattice +//! using the weighted triangular unit disk mapping. +//! +//! Maps an arbitrary graph's MIS problem to an equivalent weighted MIS on a +//! triangular lattice grid graph. + +use crate::models::graph::MaximumIndependentSet; +use crate::poly; +use crate::reduction; +use crate::rules::registry::ReductionOverhead; +use crate::rules::traits::{ReduceTo, ReductionResult}; +use crate::rules::unitdiskmapping::ksg; +use crate::rules::unitdiskmapping::triangular; +use crate::topology::{SimpleGraph, Triangular}; + +/// Result of reducing MIS on SimpleGraph to MIS on Triangular. +#[derive(Debug, Clone)] +pub struct ReductionISSimpleToTriangular { + target: MaximumIndependentSet, + mapping_result: ksg::MappingResult, +} + +impl ReductionResult for ReductionISSimpleToTriangular { + type Source = MaximumIndependentSet; + type Target = MaximumIndependentSet; + + fn target_problem(&self) -> &Self::Target { + &self.target + } + + fn extract_solution(&self, target_solution: &[usize]) -> Vec { + self.mapping_result.map_config_back(target_solution) + } +} + +#[reduction( + overhead = { + ReductionOverhead::new(vec![ + ("num_vertices", poly!(num_vertices * num_vertices)), + ("num_edges", poly!(num_vertices * num_vertices)), + ]) + } +)] +impl ReduceTo> for MaximumIndependentSet { + type Result = ReductionISSimpleToTriangular; + + fn reduce_to(&self) -> Self::Result { + let n = self.num_vertices(); + let edges = self.edges(); + let result = triangular::map_weighted(n, &edges); + let weights: Vec = result + .grid_graph + .nodes() + .iter() + .map(|node| node.weight) + .collect(); + let grid = Triangular::new(result.grid_graph.clone()); + let target = MaximumIndependentSet::from_graph(grid, weights); + ReductionISSimpleToTriangular { + target, + mapping_result: result, + } + } +} + +#[cfg(test)] +#[path = "../unit_tests/rules/maximumindependentset_triangular.rs"] +mod tests; diff --git a/src/rules/maximummatching_maximumsetpacking.rs b/src/rules/maximummatching_maximumsetpacking.rs index 9f60923e5..f1c74890f 100644 --- a/src/rules/maximummatching_maximumsetpacking.rs +++ b/src/rules/maximummatching_maximumsetpacking.rs @@ -9,9 +9,8 @@ use crate::poly; use crate::reduction; use crate::rules::registry::ReductionOverhead; use crate::rules::traits::{ReduceTo, ReductionResult}; -use crate::topology::Graph; -use num_traits::{Bounded, Num, Zero}; -use std::ops::AddAssign; +use crate::topology::{Graph, SimpleGraph}; +use crate::types::WeightElement; /// Result of reducing MaximumMatching to MaximumSetPacking. #[derive(Debug, Clone)] @@ -23,7 +22,7 @@ pub struct ReductionMatchingToSP { impl ReductionResult for ReductionMatchingToSP where G: Graph, - W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + 'static, + W: WeightElement, { type Source = MaximumMatching; type Target = MaximumSetPacking; @@ -46,12 +45,8 @@ where ]) } )] -impl ReduceTo> for MaximumMatching -where - G: Graph, - W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + From + 'static, -{ - type Result = ReductionMatchingToSP; +impl ReduceTo> for MaximumMatching { + type Result = ReductionMatchingToSP; fn reduce_to(&self) -> Self::Result { let edges = self.edges(); diff --git a/src/rules/maximumsetpacking_qubo.rs b/src/rules/maximumsetpacking_qubo.rs index 3496eafbb..2e5a48c0b 100644 --- a/src/rules/maximumsetpacking_qubo.rs +++ b/src/rules/maximumsetpacking_qubo.rs @@ -12,19 +12,15 @@ use crate::poly; use crate::reduction; use crate::rules::registry::ReductionOverhead; use crate::rules::traits::{ReduceTo, ReductionResult}; -use crate::types::NumericWeight; -use std::marker::PhantomData; - -/// Result of reducing MaximumSetPacking to QUBO. +/// Result of reducing `MaximumSetPacking` to `QUBO`. #[derive(Debug, Clone)] -pub struct ReductionSPToQUBO { +pub struct ReductionSPToQUBO { target: QUBO, - _phantom: PhantomData, } -impl> ReductionResult for ReductionSPToQUBO { - type Source = MaximumSetPacking; +impl ReductionResult for ReductionSPToQUBO { + type Source = MaximumSetPacking; type Target = QUBO; fn target_problem(&self) -> &Self::Target { @@ -37,26 +33,22 @@ impl> ReductionResult for Red } #[reduction( - source_weighted = true, overhead = { ReductionOverhead::new(vec![("num_vars", poly!(num_sets))]) } )] -impl> ReduceTo> - for MaximumSetPacking -{ - type Result = ReductionSPToQUBO; +impl ReduceTo> for MaximumSetPacking { + type Result = ReductionSPToQUBO; fn reduce_to(&self) -> Self::Result { let n = self.num_sets(); let weights = self.weights_ref(); - let total_weight: f64 = weights.iter().map(|w| w.clone().into()).sum(); + let total_weight: f64 = weights.iter().sum(); let penalty = 1.0 + total_weight; let mut matrix = vec![vec![0.0; n]; n]; // Diagonal: -w_i for i in 0..n { - let w: f64 = weights[i].clone().into(); - matrix[i][i] = -w; + matrix[i][i] = -weights[i]; } // Off-diagonal: P for overlapping pairs @@ -67,7 +59,6 @@ impl> ReduceTo> ReductionSPToQUBO { target: QUBO::from_matrix(matrix), - _phantom: PhantomData, } } } diff --git a/src/rules/minimumvertexcover_maximumindependentset.rs b/src/rules/minimumvertexcover_maximumindependentset.rs index 0a3311836..c8367c288 100644 --- a/src/rules/minimumvertexcover_maximumindependentset.rs +++ b/src/rules/minimumvertexcover_maximumindependentset.rs @@ -8,8 +8,7 @@ use crate::reduction; use crate::rules::registry::ReductionOverhead; use crate::rules::traits::{ReduceTo, ReductionResult}; use crate::topology::SimpleGraph; -use num_traits::{Bounded, Num, Zero}; -use std::ops::AddAssign; +use crate::types::WeightElement; /// Result of reducing MaximumIndependentSet to MinimumVertexCover. #[derive(Debug, Clone)] @@ -19,7 +18,7 @@ pub struct ReductionISToVC { impl ReductionResult for ReductionISToVC where - W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + 'static, + W: WeightElement, { type Source = MaximumIndependentSet; type Target = MinimumVertexCover; @@ -43,11 +42,8 @@ where ]) } )] -impl ReduceTo> for MaximumIndependentSet -where - W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + From + 'static, -{ - type Result = ReductionISToVC; +impl ReduceTo> for MaximumIndependentSet { + type Result = ReductionISToVC; fn reduce_to(&self) -> Self::Result { let target = MinimumVertexCover::with_weights( @@ -67,7 +63,7 @@ pub struct ReductionVCToIS { impl ReductionResult for ReductionVCToIS where - W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + 'static, + W: WeightElement, { type Source = MinimumVertexCover; type Target = MaximumIndependentSet; @@ -90,11 +86,8 @@ where ]) } )] -impl ReduceTo> for MinimumVertexCover -where - W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + From + 'static, -{ - type Result = ReductionVCToIS; +impl ReduceTo> for MinimumVertexCover { + type Result = ReductionVCToIS; fn reduce_to(&self) -> Self::Result { let target = MaximumIndependentSet::with_weights( diff --git a/src/rules/minimumvertexcover_minimumsetcovering.rs b/src/rules/minimumvertexcover_minimumsetcovering.rs index 2291b5c17..b8b707e8d 100644 --- a/src/rules/minimumvertexcover_minimumsetcovering.rs +++ b/src/rules/minimumvertexcover_minimumsetcovering.rs @@ -10,8 +10,7 @@ use crate::reduction; use crate::rules::registry::ReductionOverhead; use crate::rules::traits::{ReduceTo, ReductionResult}; use crate::topology::SimpleGraph; -use num_traits::{Bounded, Num, Zero}; -use std::ops::AddAssign; +use crate::types::WeightElement; /// Result of reducing MinimumVertexCover to MinimumSetCovering. #[derive(Debug, Clone)] @@ -21,7 +20,7 @@ pub struct ReductionVCToSC { impl ReductionResult for ReductionVCToSC where - W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + 'static, + W: WeightElement, { type Source = MinimumVertexCover; type Target = MinimumSetCovering; @@ -45,11 +44,8 @@ where ]) } )] -impl ReduceTo> for MinimumVertexCover -where - W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + From + 'static, -{ - type Result = ReductionVCToSC; +impl ReduceTo> for MinimumVertexCover { + type Result = ReductionVCToSC; fn reduce_to(&self) -> Self::Result { let edges = self.edges(); diff --git a/src/rules/mod.rs b/src/rules/mod.rs index 64e2f6c52..9bf45d5da 100644 --- a/src/rules/mod.rs +++ b/src/rules/mod.rs @@ -13,13 +13,16 @@ mod coloring_qubo; mod factoring_circuit; mod graph; mod ksatisfiability_qubo; +mod maximumindependentset_gridgraph; mod maximumindependentset_maximumsetpacking; mod maximumindependentset_qubo; +mod maximumindependentset_triangular; mod maximummatching_maximumsetpacking; mod maximumsetpacking_qubo; mod minimumvertexcover_maximumindependentset; mod minimumvertexcover_minimumsetcovering; mod minimumvertexcover_qubo; +mod natural; mod sat_coloring; mod sat_ksat; mod sat_maximumindependentset; @@ -63,8 +66,10 @@ pub use graph::{ EdgeJson, NodeJson, ReductionEdge, ReductionGraph, ReductionGraphJson, ReductionPath, }; pub use ksatisfiability_qubo::{Reduction3SATToQUBO, ReductionKSatToQUBO}; +pub use maximumindependentset_gridgraph::{ReductionISSimpleToGrid, ReductionISUnitDiskToGrid}; pub use maximumindependentset_maximumsetpacking::{ReductionISToSP, ReductionSPToIS}; pub use maximumindependentset_qubo::ReductionISToQUBO; +pub use maximumindependentset_triangular::ReductionISSimpleToTriangular; pub use maximummatching_maximumsetpacking::ReductionMatchingToSP; pub use maximumsetpacking_qubo::ReductionSPToQUBO; pub use minimumvertexcover_maximumindependentset::{ReductionISToVC, ReductionVCToIS}; @@ -76,7 +81,46 @@ pub use sat_maximumindependentset::{BoolVar, ReductionSATToIS}; pub use sat_minimumdominatingset::ReductionSATToDS; pub use spinglass_maxcut::{ReductionMaxCutToSG, ReductionSGToMaxCut}; pub use spinglass_qubo::{ReductionQUBOToSG, ReductionSGToQUBO}; -pub use traits::{ReduceTo, ReductionResult}; +pub use traits::{ReduceTo, ReductionAutoCast, ReductionResult}; + +/// Generates a natural-edge `ReduceTo` impl for graph subtype relaxation. +/// +/// When graph type `$SubGraph` is a subtype of `$SuperGraph`, a problem on +/// the subgraph can be trivially solved as the same problem on the supergraph. +/// This macro stamps out the concrete `#[reduction]` impl with identity overhead +/// and uses [`ReductionAutoCast`] for the identity solution mapping. +/// +/// # Example +/// +/// ```text +/// impl_natural_reduction!(MaximumIndependentSet, Triangular, SimpleGraph, i32); +/// // Generates: ReduceTo> for MIS +/// ``` +#[macro_export] +macro_rules! impl_natural_reduction { + ($Problem:ident, $SubGraph:ty, $SuperGraph:ty, $Weight:ty) => { + #[reduction( + overhead = { + $crate::rules::registry::ReductionOverhead::new(vec![ + ("num_vertices", $crate::poly!(num_vertices)), + ("num_edges", $crate::poly!(num_edges)), + ]) + } + )] + impl $crate::rules::ReduceTo<$Problem<$SuperGraph, $Weight>> + for $Problem<$SubGraph, $Weight> + { + type Result = $crate::rules::ReductionAutoCast>; + + fn reduce_to(&self) -> Self::Result { + use $crate::topology::GraphCast; + let graph: $SuperGraph = self.graph().cast_graph(); + let target = $Problem::from_graph(graph, self.weights()); + $crate::rules::ReductionAutoCast::new(target) + } + } + }; +} #[cfg(feature = "ilp")] pub use coloring_ilp::{ReductionColoringToILP, ReductionKColoringToILP}; diff --git a/src/rules/natural.rs b/src/rules/natural.rs new file mode 100644 index 000000000..f617fdf97 --- /dev/null +++ b/src/rules/natural.rs @@ -0,0 +1,19 @@ +//! Natural-edge reductions via graph subtype relaxation. +//! +//! These reductions are trivial: a problem on a specific graph type +//! (e.g., `Triangular`) can always be solved as the same problem on a +//! more general graph type (e.g., `SimpleGraph`), since the specific +//! graph *is* a general graph. The solution mapping is identity. +//! +//! Each reduction is generated by [`impl_natural_reduction!`]. + +use crate::impl_natural_reduction; +use crate::models::graph::MaximumIndependentSet; +use crate::reduction; +use crate::topology::{SimpleGraph, Triangular}; + +impl_natural_reduction!(MaximumIndependentSet, Triangular, SimpleGraph, i32); + +#[cfg(all(test, feature = "ilp"))] +#[path = "../unit_tests/rules/natural.rs"] +mod tests; diff --git a/src/rules/registry.rs b/src/rules/registry.rs index 476ee5026..532b6cbe8 100644 --- a/src/rules/registry.rs +++ b/src/rules/registry.rs @@ -72,12 +72,12 @@ impl ReductionEntry { let source_unweighted = source .iter() .find(|(k, _)| *k == "weight") - .map(|(_, v)| *v == "Unweighted") + .map(|(_, v)| *v == "One") .unwrap_or(true); let target_unweighted = target .iter() .find(|(k, _)| *k == "weight") - .map(|(_, v)| *v == "Unweighted") + .map(|(_, v)| *v == "One") .unwrap_or(true); source_unweighted && target_unweighted } @@ -98,15 +98,6 @@ impl std::fmt::Debug for ReductionEntry { inventory::collect!(ReductionEntry); -/// A registered concrete problem variant (for JSON export nodes). -/// Variants registered here appear as nodes even without explicit reduction rules. -pub struct ConcreteVariantEntry { - pub name: &'static str, - pub variant_fn: fn() -> Vec<(&'static str, &'static str)>, -} - -inventory::collect!(ConcreteVariantEntry); - #[cfg(test)] #[path = "../unit_tests/rules/registry.rs"] mod tests; diff --git a/src/rules/sat_ksat.rs b/src/rules/sat_ksat.rs index d79476506..687134a6b 100644 --- a/src/rules/sat_ksat.rs +++ b/src/rules/sat_ksat.rs @@ -113,11 +113,11 @@ fn add_clause_to_ksat( macro_rules! impl_sat_to_ksat { ($k:expr) => { #[reduction(overhead = { - ReductionOverhead::new(vec![ - ("num_clauses", poly!(num_clauses) + poly!(num_literals)), - ("num_vars", poly!(num_vars) + poly!(num_literals)), - ]) - })] + ReductionOverhead::new(vec![ + ("num_clauses", poly!(num_clauses) + poly!(num_literals)), + ("num_vars", poly!(num_vars) + poly!(num_literals)), + ]) + })] impl ReduceTo> for Satisfiability { type Result = ReductionSATToKSAT<$k>; @@ -144,10 +144,8 @@ macro_rules! impl_sat_to_ksat { }; } -// Implement for common K values +// Implement for K=3 (the canonical NP-complete case) impl_sat_to_ksat!(3); -impl_sat_to_ksat!(4); -impl_sat_to_ksat!(5); /// Result of reducing K-SAT to general SAT. /// diff --git a/src/rules/spinglass_maxcut.rs b/src/rules/spinglass_maxcut.rs index fadd366c9..8c314fe65 100644 --- a/src/rules/spinglass_maxcut.rs +++ b/src/rules/spinglass_maxcut.rs @@ -10,8 +10,8 @@ use crate::reduction; use crate::rules::registry::ReductionOverhead; use crate::rules::traits::{ReduceTo, ReductionResult}; use crate::topology::SimpleGraph; -use num_traits::{Bounded, Num, Zero}; -use std::ops::AddAssign; +use crate::types::WeightElement; +use num_traits::Zero; /// Result of reducing MaxCut to SpinGlass. #[derive(Debug, Clone)] @@ -21,7 +21,14 @@ pub struct ReductionMaxCutToSG { impl ReductionResult for ReductionMaxCutToSG where - W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + From + 'static, + W: WeightElement + + PartialOrd + + num_traits::Num + + num_traits::Zero + + num_traits::Bounded + + std::ops::AddAssign + + std::ops::Mul + + From, { type Source = MaxCut; type Target = SpinGlass; @@ -43,11 +50,8 @@ where ]) } )] -impl ReduceTo> for MaxCut -where - W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + From + 'static, -{ - type Result = ReductionMaxCutToSG; +impl ReduceTo> for MaxCut { + type Result = ReductionMaxCutToSG; fn reduce_to(&self) -> Self::Result { let n = self.num_vertices(); @@ -70,15 +74,15 @@ where // MaxCut wants to maximize edges cut, SpinGlass minimizes energy. // When J > 0 (antiferromagnetic), opposite spins lower energy. // So maximizing cut = minimizing Ising energy with J = w. - let interactions: Vec<((usize, usize), W)> = edges_with_weights + let interactions: Vec<((usize, usize), i32)> = edges_with_weights .into_iter() .map(|(u, v, w)| ((u, v), w)) .collect(); // No onsite terms for pure MaxCut - let onsite = vec![W::zero(); n]; + let onsite = vec![0i32; n]; - let target = SpinGlass::::new(n, interactions, onsite); + let target = SpinGlass::::new(n, interactions, onsite); ReductionMaxCutToSG { target } } @@ -94,7 +98,14 @@ pub struct ReductionSGToMaxCut { impl ReductionResult for ReductionSGToMaxCut where - W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + From + 'static, + W: WeightElement + + PartialOrd + + num_traits::Num + + num_traits::Zero + + num_traits::Bounded + + std::ops::AddAssign + + std::ops::Mul + + From, { type Source = SpinGlass; type Target = MaxCut; @@ -129,11 +140,8 @@ where ]) } )] -impl ReduceTo> for SpinGlass -where - W: Clone + Default + PartialOrd + Num + Zero + Bounded + AddAssign + From + 'static, -{ - type Result = ReductionSGToMaxCut; +impl ReduceTo> for SpinGlass { + type Result = ReductionSGToMaxCut; fn reduce_to(&self) -> Self::Result { let n = self.num_spins(); @@ -161,7 +169,7 @@ where for (i, h) in fields.iter().enumerate() { if !h.is_zero() { edges.push((i, n)); - weights.push(h.clone()); + weights.push(*h); } } } diff --git a/src/rules/traits.rs b/src/rules/traits.rs index 2a49a9474..a5d243666 100644 --- a/src/rules/traits.rs +++ b/src/rules/traits.rs @@ -1,6 +1,7 @@ //! Core traits for problem reductions. use crate::traits::Problem; +use std::marker::PhantomData; /// Result of reducing a source problem to a target problem. /// @@ -60,6 +61,41 @@ pub trait ReduceTo: Problem { fn reduce_to(&self) -> Self::Result; } +/// Generic reduction result for natural-edge (subtype) reductions. +/// +/// Used when a problem on a specific graph type is trivially reducible to +/// the same problem on a more general graph type (e.g., `MIS` → +/// `MIS`). The solution mapping is identity — vertex indices +/// are preserved. +#[derive(Debug, Clone)] +pub struct ReductionAutoCast { + target: T, + _phantom: PhantomData, +} + +impl ReductionAutoCast { + /// Create a new auto-cast reduction result. + pub fn new(target: T) -> Self { + Self { + target, + _phantom: PhantomData, + } + } +} + +impl ReductionResult for ReductionAutoCast { + type Source = S; + type Target = T; + + fn target_problem(&self) -> &Self::Target { + &self.target + } + + fn extract_solution(&self, target_solution: &[usize]) -> Vec { + target_solution.to_vec() + } +} + #[cfg(test)] #[path = "../unit_tests/rules/traits.rs"] mod tests; diff --git a/src/rules/travelingsalesman_ilp.rs b/src/rules/travelingsalesman_ilp.rs index cf31e7784..b721ef8af 100644 --- a/src/rules/travelingsalesman_ilp.rs +++ b/src/rules/travelingsalesman_ilp.rs @@ -109,8 +109,12 @@ impl ReduceTo for TravelingSalesman { let n = self.num_vertices(); let graph = self.graph(); let edges_with_weights = self.edges(); - let source_edges: Vec<(usize, usize)> = edges_with_weights.iter().map(|&(u, v, _)| (u, v)).collect(); - let edge_weights: Vec = edges_with_weights.iter().map(|&(_, _, w)| w as f64).collect(); + let source_edges: Vec<(usize, usize)> = + edges_with_weights.iter().map(|&(u, v, _)| (u, v)).collect(); + let edge_weights: Vec = edges_with_weights + .iter() + .map(|&(_, _, w)| w as f64) + .collect(); let m = source_edges.len(); // Variable layout: @@ -124,7 +128,8 @@ impl ReduceTo for TravelingSalesman { let num_vars = num_x + num_y; let x_idx = |v: usize, k: usize| -> usize { v * n + k }; - let y_idx = |edge: usize, k: usize, dir: usize| -> usize { num_x + edge * 2 * n + 2 * k + dir }; + let y_idx = + |edge: usize, k: usize, dir: usize| -> usize { num_x + edge * 2 * n + 2 * k + dir }; let bounds = vec![VarBounds::binary(); num_vars]; let mut constraints = Vec::new(); @@ -174,7 +179,10 @@ impl ReduceTo for TravelingSalesman { let xu = x_idx(u, k); let xv_next = x_idx(v, k_next); constraints.push(LinearConstraint::le(vec![(y_fwd, 1.0), (xu, -1.0)], 0.0)); - constraints.push(LinearConstraint::le(vec![(y_fwd, 1.0), (xv_next, -1.0)], 0.0)); + constraints.push(LinearConstraint::le( + vec![(y_fwd, 1.0), (xv_next, -1.0)], + 0.0, + )); constraints.push(LinearConstraint::ge( vec![(y_fwd, 1.0), (xu, -1.0), (xv_next, -1.0)], -1.0, @@ -185,7 +193,10 @@ impl ReduceTo for TravelingSalesman { let xv = x_idx(v, k); let xu_next = x_idx(u, k_next); constraints.push(LinearConstraint::le(vec![(y_rev, 1.0), (xv, -1.0)], 0.0)); - constraints.push(LinearConstraint::le(vec![(y_rev, 1.0), (xu_next, -1.0)], 0.0)); + constraints.push(LinearConstraint::le( + vec![(y_rev, 1.0), (xu_next, -1.0)], + 0.0, + )); constraints.push(LinearConstraint::ge( vec![(y_rev, 1.0), (xv, -1.0), (xu_next, -1.0)], -1.0, @@ -202,7 +213,13 @@ impl ReduceTo for TravelingSalesman { } } - let target = ILP::new(num_vars, bounds, constraints, objective, ObjectiveSense::Minimize); + let target = ILP::new( + num_vars, + bounds, + constraints, + objective, + ObjectiveSense::Minimize, + ); ReductionTSPToILP { target, diff --git a/src/topology/graph.rs b/src/topology/graph.rs index 04d8d4191..fc8b8386d 100644 --- a/src/topology/graph.rs +++ b/src/topology/graph.rs @@ -82,6 +82,24 @@ pub trait Graph: Clone + Send + Sync + 'static { } } +/// Trait for casting a graph to a supertype in the graph hierarchy. +/// +/// When `A: GraphCast`, graph `A` can be losslessly converted to graph `B` +/// by extracting the adjacency structure. This enables natural-edge reductions +/// where a problem on a specific graph type is solved by treating it as a more +/// general graph. +pub trait GraphCast: Graph { + /// Convert this graph to the target graph type. + fn cast_graph(&self) -> Target; +} + +/// Any graph can be cast to a `SimpleGraph` by extracting vertices and edges. +impl GraphCast for G { + fn cast_graph(&self) -> SimpleGraph { + SimpleGraph::new(self.num_vertices(), self.edges()) + } +} + /// A simple unweighted undirected graph. /// /// This is the default graph type for most problems. It wraps petgraph's diff --git a/src/topology/mod.rs b/src/topology/mod.rs index 198e9ca0a..bfde7523e 100644 --- a/src/topology/mod.rs +++ b/src/topology/mod.rs @@ -26,10 +26,12 @@ mod graph; mod grid_graph; mod hypergraph; pub mod small_graphs; +mod triangular; mod unit_disk_graph; -pub use graph::{Graph, SimpleGraph}; +pub use graph::{Graph, GraphCast, SimpleGraph}; pub use grid_graph::{GridGraph, GridNode, GridType}; pub use hypergraph::HyperGraph; pub use small_graphs::{available_graphs, smallgraph}; +pub use triangular::Triangular; pub use unit_disk_graph::UnitDiskGraph; diff --git a/src/topology/triangular.rs b/src/topology/triangular.rs new file mode 100644 index 000000000..f1d20ece5 --- /dev/null +++ b/src/topology/triangular.rs @@ -0,0 +1,57 @@ +//! Triangular lattice graph — a weighted unit disk graph on a triangular grid. +//! +//! This is a newtype wrapper around [`GridGraph`] with triangular geometry, +//! exposed as a distinct graph type for the reduction system. + +use super::graph::Graph; +use super::grid_graph::GridGraph; +use serde::{Deserialize, Serialize}; + +/// A triangular lattice graph. +/// +/// Wraps a [`GridGraph`] that uses triangular lattice geometry. +/// This is a subtype of `UnitDiskGraph` — all triangular lattice graphs +/// are unit disk graphs (and therefore also simple graphs). +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct Triangular(GridGraph); + +impl Triangular { + /// Create a new Triangular graph from a GridGraph. + pub fn new(grid_graph: GridGraph) -> Self { + Self(grid_graph) + } + + /// Get a reference to the inner GridGraph. + pub fn grid_graph(&self) -> &GridGraph { + &self.0 + } + + /// Get the nodes of the graph. + pub fn nodes(&self) -> &[super::grid_graph::GridNode] { + self.0.nodes() + } +} + +impl Graph for Triangular { + const NAME: &'static str = "Triangular"; + + fn num_vertices(&self) -> usize { + self.0.num_vertices() + } + + fn num_edges(&self) -> usize { + self.0.num_edges() + } + + fn edges(&self) -> Vec<(usize, usize)> { + Graph::edges(&self.0) + } + + fn has_edge(&self, u: usize, v: usize) -> bool { + self.0.has_edge(u, v) + } + + fn neighbors(&self, v: usize) -> Vec { + self.0.neighbors(v) + } +} diff --git a/src/traits.rs b/src/traits.rs index ab13abf3e..635718c0c 100644 --- a/src/traits.rs +++ b/src/traits.rs @@ -36,6 +36,12 @@ pub trait OptimizationProblem: Problem crate::types::Direction; } +/// Marker trait for satisfaction (decision) problems. +/// +/// Satisfaction problems evaluate configurations to `bool`: +/// `true` if the configuration satisfies all constraints, `false` otherwise. +pub trait SatisfactionProblem: Problem {} + #[cfg(test)] #[path = "unit_tests/traits.rs"] mod tests; diff --git a/src/types.rs b/src/types.rs index 64f5346f4..d962d112c 100644 --- a/src/types.rs +++ b/src/types.rs @@ -3,28 +3,6 @@ use serde::{Deserialize, Serialize}; use std::fmt; -/// Marker trait for numeric weight types. -/// -/// Weight subsumption uses Rust's `From` trait: -/// - `i32 → f64` is valid (`From` for f64 exists) -/// - `f64 → i32` is invalid (no lossless conversion) -pub trait NumericWeight: - Clone + Default + PartialOrd + num_traits::Num + num_traits::Zero + std::ops::AddAssign + 'static -{ -} - -// Blanket implementation for any type satisfying the bounds -impl NumericWeight for T where - T: Clone - + Default - + PartialOrd - + num_traits::Num - + num_traits::Zero - + std::ops::AddAssign - + 'static -{ -} - /// Bound for objective value types (i32, f64, etc.) pub trait NumericSize: Clone @@ -50,85 +28,61 @@ impl NumericSize for T where { } -/// Trait for weight storage. Separates weight storage from objective value type. -pub trait Weights: Clone + 'static { - /// Name for variant metadata (e.g., "Unweighted", "`Weighted`"). - const NAME: &'static str; - /// The objective/metric type derived from these weights. - type Size: NumericSize; - /// Get the weight at a given index. - fn weight(&self, index: usize) -> Self::Size; - /// Number of weights. - fn len(&self) -> usize; - /// Whether the weight vector is empty. - fn is_empty(&self) -> bool { - self.len() == 0 - } -} - -/// Marker type for unweighted problems. +/// Maps a weight element to its sum/metric type. /// -/// When constructed with `Unweighted(n)`, it represents `n` unit weights (all equal to 1). -/// When constructed with `Unweighted` (the zero-sized default), it serves as a type marker. -/// -/// # Example -/// -/// ``` -/// use problemreductions::types::{Unweighted, Weights}; -/// -/// let w = Unweighted(5); -/// assert_eq!(w.len(), 5); -/// assert_eq!(w.weight(0), 1); -/// ``` -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default, Serialize, Deserialize)] -pub struct Unweighted(pub usize); +/// This decouples the per-element weight type from the accumulation type. +/// For concrete weights (`i32`, `f64`), `Sum` is the same type. +/// For the unit weight `One`, `Sum = i32`. +pub trait WeightElement: Clone + Default + 'static { + /// The numeric type used for sums and comparisons. + type Sum: NumericSize; + /// Convert this weight element to the sum type. + fn to_sum(&self) -> Self::Sum; +} -impl Unweighted { - /// Returns 1 for any index (all weights are unit). - pub fn get(&self, _index: usize) -> i32 { - 1 +impl WeightElement for i32 { + type Sum = i32; + fn to_sum(&self) -> i32 { + *self } } -impl Weights for Unweighted { - const NAME: &'static str = "Unweighted"; - type Size = i32; - fn weight(&self, _index: usize) -> i32 { - 1 - } - fn len(&self) -> usize { - self.0 +impl WeightElement for f64 { + type Sum = f64; + fn to_sum(&self) -> f64 { + *self } } -impl Weights for Vec { - const NAME: &'static str = "Weighted"; - type Size = i32; - fn weight(&self, index: usize) -> i32 { - self[index] - } - fn len(&self) -> usize { - self.len() +/// The constant 1. Unit weight for unweighted problems. +/// +/// When used as the weight type parameter `W`, indicates that all weights +/// are uniformly 1. `One::to_sum()` returns `1i32`. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default, Serialize, Deserialize)] +pub struct One; + +impl WeightElement for One { + type Sum = i32; + fn to_sum(&self) -> i32 { + 1 } } -impl Weights for Vec { - const NAME: &'static str = "Weighted"; - type Size = f64; - fn weight(&self, index: usize) -> f64 { - self[index] - } - fn len(&self) -> usize { - self.len() +impl std::fmt::Display for One { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "One") } } -impl std::fmt::Display for Unweighted { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "Unweighted") +impl From for One { + fn from(_: i32) -> Self { + One } } +/// Backward-compatible alias for `One`. +pub type Unweighted = One; + /// Result of evaluating a constrained optimization problem. /// /// For optimization problems with constraints (like MaximumIndependentSet), diff --git a/src/unit_tests/graph_types.rs b/src/unit_tests/graph_types.rs index ad7e56b56..c52e64620 100644 --- a/src/unit_tests/graph_types.rs +++ b/src/unit_tests/graph_types.rs @@ -120,11 +120,11 @@ fn test_weight_subtype_entries_registered() { let entries: Vec<_> = inventory::iter::().collect(); assert!(entries .iter() - .any(|e| e.subtype == "Unweighted" && e.supertype == "i32")); + .any(|e| e.subtype == "One" && e.supertype == "i32")); assert!(entries .iter() .any(|e| e.subtype == "i32" && e.supertype == "f64")); assert!(entries .iter() - .any(|e| e.subtype == "Unweighted" && e.supertype == "f64")); + .any(|e| e.subtype == "One" && e.supertype == "f64")); } diff --git a/src/unit_tests/models/graph/traveling_salesman.rs b/src/unit_tests/models/graph/traveling_salesman.rs index 8c6fb8026..0a797ab99 100644 --- a/src/unit_tests/models/graph/traveling_salesman.rs +++ b/src/unit_tests/models/graph/traveling_salesman.rs @@ -9,8 +9,12 @@ fn test_traveling_salesman_creation() { let problem = TravelingSalesman::::new( 4, vec![ - (0, 1, 10), (0, 2, 15), (0, 3, 20), - (1, 2, 35), (1, 3, 25), (2, 3, 30), + (0, 1, 10), + (0, 2, 15), + (0, 3, 20), + (1, 2, 35), + (1, 3, 25), + (2, 3, 30), ], ); assert_eq!(problem.num_vertices(), 4); @@ -34,8 +38,12 @@ fn test_traveling_salesman_weighted() { let problem = TravelingSalesman::::new( 4, vec![ - (0, 1, 10), (0, 2, 15), (0, 3, 20), - (1, 2, 35), (1, 3, 25), (2, 3, 30), + (0, 1, 10), + (0, 2, 15), + (0, 3, 20), + (1, 2, 35), + (1, 3, 25), + (2, 3, 30), ], ); assert!(problem.is_weighted()); @@ -58,8 +66,12 @@ fn test_evaluate_invalid_degree() { let problem = TravelingSalesman::::new( 4, vec![ - (0, 1, 10), (0, 2, 15), (0, 3, 20), - (1, 2, 35), (1, 3, 25), (2, 3, 30), + (0, 1, 10), + (0, 2, 15), + (0, 3, 20), + (1, 2, 35), + (1, 3, 25), + (2, 3, 30), ], ); // edges: 0-1, 0-2, 0-3, 1-2, 1-3, 2-3 @@ -72,10 +84,7 @@ fn test_evaluate_invalid_not_connected() { // 6 vertices, two disjoint triangles: 0-1-2-0 and 3-4-5-3 let problem = TravelingSalesman::::unweighted( 6, - vec![ - (0, 1), (1, 2), (0, 2), - (3, 4), (4, 5), (3, 5), - ], + vec![(0, 1), (1, 2), (0, 2), (3, 4), (4, 5), (3, 5)], ); // Select all 6 edges: two disjoint cycles, not a single Hamiltonian cycle assert_eq!(problem.evaluate(&[1, 1, 1, 1, 1, 1]), SolutionSize::Invalid); @@ -106,8 +115,12 @@ fn test_brute_force_k4() { let problem = TravelingSalesman::::new( 4, vec![ - (0, 1, 10), (0, 2, 15), (0, 3, 20), - (1, 2, 35), (1, 3, 25), (2, 3, 30), + (0, 1, 10), + (0, 2, 15), + (0, 3, 20), + (1, 2, 35), + (1, 3, 25), + (2, 3, 30), ], ); let solver = BruteForce::new(); @@ -122,10 +135,8 @@ fn test_brute_force_k4() { #[test] fn test_brute_force_path_graph_no_solution() { // Instance 2 from issue: path graph, no Hamiltonian cycle exists - let problem = TravelingSalesman::::unweighted( - 4, - vec![(0, 1), (1, 2), (2, 3)], - ); + let problem = + TravelingSalesman::::unweighted(4, vec![(0, 1), (1, 2), (2, 3)]); let solver = BruteForce::new(); let solutions = solver.find_all_best(&problem); assert!(solutions.is_empty()); @@ -159,10 +170,8 @@ fn test_brute_force_bipartite_no_solution() { #[test] fn test_direction() { - let problem = TravelingSalesman::::unweighted( - 3, - vec![(0, 1), (1, 2), (0, 2)], - ); + let problem = + TravelingSalesman::::unweighted(3, vec![(0, 1), (1, 2), (0, 2)]); assert_eq!(problem.direction(), Direction::Minimize); } @@ -183,29 +192,21 @@ fn test_is_hamiltonian_cycle_function() { &[true, true, true] )); // Path: not a cycle - assert!(!is_hamiltonian_cycle( - 3, - &[(0, 1), (1, 2)], - &[true, true] - )); + assert!(!is_hamiltonian_cycle(3, &[(0, 1), (1, 2)], &[true, true])); } #[test] fn test_set_weights() { - let mut problem = TravelingSalesman::::unweighted( - 3, - vec![(0, 1), (1, 2), (0, 2)], - ); + let mut problem = + TravelingSalesman::::unweighted(3, vec![(0, 1), (1, 2), (0, 2)]); problem.set_weights(vec![5, 10, 15]); assert_eq!(problem.weights(), vec![5, 10, 15]); } #[test] fn test_edges() { - let problem = TravelingSalesman::::new( - 3, - vec![(0, 1, 10), (1, 2, 20), (0, 2, 30)], - ); + let problem = + TravelingSalesman::::new(3, vec![(0, 1, 10), (1, 2, 20), (0, 2, 30)]); let edges = problem.edges(); assert_eq!(edges.len(), 3); } @@ -228,10 +229,8 @@ fn test_from_graph_unit_weights() { #[test] fn test_brute_force_triangle_weighted() { // Triangle with weights: unique Hamiltonian cycle using all edges - let problem = TravelingSalesman::::new( - 3, - vec![(0, 1, 5), (1, 2, 10), (0, 2, 15)], - ); + let problem = + TravelingSalesman::::new(3, vec![(0, 1, 5), (1, 2, 10), (0, 2, 15)]); let solver = BruteForce::new(); let solutions = solver.find_all_best(&problem); assert_eq!(solutions.len(), 1); diff --git a/src/unit_tests/rules/circuit_spinglass.rs b/src/unit_tests/rules/circuit_spinglass.rs index 1ab289689..7a6553044 100644 --- a/src/unit_tests/rules/circuit_spinglass.rs +++ b/src/unit_tests/rules/circuit_spinglass.rs @@ -1,14 +1,13 @@ use super::*; use crate::models::specialized::Circuit; use crate::solvers::BruteForce; -use crate::types::NumericSize; +use crate::types::{NumericSize, WeightElement}; use num_traits::Num; /// Verify a gadget has the correct ground states. fn verify_gadget_truth_table(gadget: &LogicGadget, expected: &[(Vec, Vec)]) where - W: Clone - + Default + W: WeightElement + PartialOrd + Num + Zero diff --git a/src/unit_tests/rules/graph.rs b/src/unit_tests/rules/graph.rs index f2702e06d..9011e05ba 100644 --- a/src/unit_tests/rules/graph.rs +++ b/src/unit_tests/rules/graph.rs @@ -138,14 +138,14 @@ fn test_to_json() { assert!(json.edges.len() >= 10); // Check that IS -> VC and VC -> IS both exist as separate directed edges - let is_to_vc = json - .edges - .iter() - .any(|e| e.source.name == "MaximumIndependentSet" && e.target.name == "MinimumVertexCover"); - let vc_to_is = json - .edges - .iter() - .any(|e| e.source.name == "MinimumVertexCover" && e.target.name == "MaximumIndependentSet"); + let is_to_vc = json.edges.iter().any(|e| { + json.source_node(e).name == "MaximumIndependentSet" + && json.target_node(e).name == "MinimumVertexCover" + }); + let vc_to_is = json.edges.iter().any(|e| { + json.source_node(e).name == "MinimumVertexCover" + && json.target_node(e).name == "MaximumIndependentSet" + }); assert!(is_to_vc, "Should have IS -> VC edge"); assert!(vc_to_is, "Should have VC -> IS edge"); } @@ -170,66 +170,56 @@ fn test_to_json_string() { } #[test] -fn test_categorize_type() { - // Graph problems - assert_eq!( - ReductionGraph::categorize_type("MaximumIndependentSet"), - "graph" - ); - assert_eq!( - ReductionGraph::categorize_type("MinimumVertexCover"), - "graph" - ); +fn test_category_from_module_path() { assert_eq!( - ReductionGraph::categorize_type("MaxCut"), + ReductionGraph::category_from_module_path( + "problemreductions::models::graph::maximum_independent_set" + ), "graph" ); - assert_eq!(ReductionGraph::categorize_type("KColoring"), "graph"); assert_eq!( - ReductionGraph::categorize_type("MinimumDominatingSet"), - "graph" + ReductionGraph::category_from_module_path( + "problemreductions::models::set::minimum_set_covering" + ), + "set" ); assert_eq!( - ReductionGraph::categorize_type("MaximumMatching"), - "graph" + ReductionGraph::category_from_module_path("problemreductions::models::optimization::qubo"), + "optimization" ); - - // Set problems assert_eq!( - ReductionGraph::categorize_type("MaximumSetPacking"), - "set" + ReductionGraph::category_from_module_path("problemreductions::models::satisfiability::sat"), + "satisfiability" ); assert_eq!( - ReductionGraph::categorize_type("MinimumSetCovering"), - "set" + ReductionGraph::category_from_module_path( + "problemreductions::models::specialized::factoring" + ), + "specialized" ); - - // Optimization + // Fallback for unexpected format assert_eq!( - ReductionGraph::categorize_type("SpinGlass"), - "optimization" + ReductionGraph::category_from_module_path("foo::bar"), + "other" ); - assert_eq!(ReductionGraph::categorize_type("QUBO"), "optimization"); +} - // Satisfiability - assert_eq!( - ReductionGraph::categorize_type("Satisfiability"), - "satisfiability" - ); +#[test] +fn test_doc_path_from_module_path() { assert_eq!( - ReductionGraph::categorize_type("KSatisfiability<3>"), - "satisfiability" + ReductionGraph::doc_path_from_module_path( + "problemreductions::models::graph::maximum_independent_set", + "MaximumIndependentSet" + ), + "models/graph/struct.MaximumIndependentSet.html" ); assert_eq!( - ReductionGraph::categorize_type("CircuitSAT"), - "satisfiability" + ReductionGraph::doc_path_from_module_path( + "problemreductions::models::optimization::qubo", + "QUBO" + ), + "models/optimization/struct.QUBO.html" ); - - // Specialized - assert_eq!(ReductionGraph::categorize_type("Factoring"), "specialized"); - - // Unknown - assert_eq!(ReductionGraph::categorize_type("UnknownProblem"), "other"); } #[test] @@ -370,62 +360,37 @@ fn test_to_json_file() { } #[test] -fn test_has_direct_reduction_unregistered_types() { - // Test with a type that's not registered in the graph - struct UnregisteredType; - +fn test_unknown_name_returns_empty() { let graph = ReductionGraph::new(); - // Source type not registered - assert!( - !graph.has_direct_reduction::>() - ); + // Unknown source + assert!(!graph.has_direct_reduction_by_name("UnknownProblem", "MaximumIndependentSet")); + // Unknown target + assert!(!graph.has_direct_reduction_by_name("MaximumIndependentSet", "UnknownProblem")); + // Both unknown + assert!(!graph.has_direct_reduction_by_name("UnknownA", "UnknownB")); - // Target type not registered - assert!( - !graph.has_direct_reduction::, UnregisteredType>() - ); + // find_paths with unknown name + assert!(graph + .find_paths_by_name("UnknownProblem", "MaximumIndependentSet") + .is_empty()); + assert!(graph + .find_paths_by_name("MaximumIndependentSet", "UnknownProblem") + .is_empty()); - // Both types not registered - assert!(!graph.has_direct_reduction::()); + // find_shortest_path with unknown name + assert!(graph + .find_shortest_path_by_name("UnknownProblem", "MaximumIndependentSet") + .is_none()); } #[test] -fn test_find_paths_unregistered_source() { - struct UnregisteredType; - +fn test_category_derived_from_schema() { + // CircuitSAT's category is derived from its ProblemSchemaEntry module_path let graph = ReductionGraph::new(); - let paths = graph.find_paths::>(); - assert!(paths.is_empty()); -} - -#[test] -fn test_find_paths_unregistered_target() { - struct UnregisteredType; - - let graph = ReductionGraph::new(); - let paths = graph.find_paths::, UnregisteredType>(); - assert!(paths.is_empty()); -} - -#[test] -fn test_find_shortest_path_no_path() { - struct UnregisteredType; - - let graph = ReductionGraph::new(); - let path = - graph.find_shortest_path::>(); - assert!(path.is_none()); -} - -#[test] -fn test_categorize_circuit_as_specialized() { - // CircuitSAT should be categorized as specialized (contains "Circuit") - assert_eq!( - ReductionGraph::categorize_type("CircuitSAT"), - "satisfiability" - ); - // It contains "SAT" so it goes to satisfiability + let json = graph.to_json(); + let circuit = json.nodes.iter().find(|n| n.name == "CircuitSAT").unwrap(); + assert_eq!(circuit.category, "specialized"); } #[test] @@ -434,26 +399,24 @@ fn test_directed_edge_pairs() { let json = graph.to_json(); // IS <-> VC: both directions should exist as separate edges - let is_to_vc = json - .edges - .iter() - .any(|e| e.source.name == "MaximumIndependentSet" && e.target.name == "MinimumVertexCover"); - let vc_to_is = json - .edges - .iter() - .any(|e| e.source.name == "MinimumVertexCover" && e.target.name == "MaximumIndependentSet"); + let is_to_vc = json.edges.iter().any(|e| { + json.source_node(e).name == "MaximumIndependentSet" + && json.target_node(e).name == "MinimumVertexCover" + }); + let vc_to_is = json.edges.iter().any(|e| { + json.source_node(e).name == "MinimumVertexCover" + && json.target_node(e).name == "MaximumIndependentSet" + }); assert!(is_to_vc, "Should have IS -> VC edge"); assert!(vc_to_is, "Should have VC -> IS edge"); // Factoring -> CircuitSAT: only forward direction - let factoring_to_circuit = json - .edges - .iter() - .any(|e| e.source.name == "Factoring" && e.target.name == "CircuitSAT"); - let circuit_to_factoring = json - .edges - .iter() - .any(|e| e.source.name == "CircuitSAT" && e.target.name == "Factoring"); + let factoring_to_circuit = json.edges.iter().any(|e| { + json.source_node(e).name == "Factoring" && json.target_node(e).name == "CircuitSAT" + }); + let circuit_to_factoring = json.edges.iter().any(|e| { + json.source_node(e).name == "CircuitSAT" && json.target_node(e).name == "Factoring" + }); assert!(factoring_to_circuit, "Should have Factoring -> CircuitSAT"); assert!( !circuit_to_factoring, @@ -665,8 +628,8 @@ fn test_find_cheapest_path_unknown_target() { #[test] fn test_reduction_edge_struct() { let edge = ReductionEdge { - source_variant: vec![("graph", "PlanarGraph"), ("weight", "Unweighted")], - target_variant: vec![("graph", "SimpleGraph"), ("weight", "Unweighted")], + source_variant: vec![("graph", "PlanarGraph"), ("weight", "One")], + target_variant: vec![("graph", "SimpleGraph"), ("weight", "One")], overhead: ReductionOverhead::default(), }; @@ -678,7 +641,7 @@ fn test_reduction_edge_struct() { fn test_reduction_edge_default_graph() { // When no "graph" key is present, default to SimpleGraph let edge = ReductionEdge { - source_variant: vec![("weight", "Unweighted")], + source_variant: vec![("weight", "One")], target_variant: vec![], overhead: ReductionOverhead::default(), }; @@ -721,10 +684,10 @@ fn test_weight_hierarchy_built() { let hierarchy = graph.weight_hierarchy(); assert!( hierarchy - .get("Unweighted") + .get("One") .map(|s| s.contains("i32")) .unwrap_or(false), - "Unweighted should have i32 as supertype" + "One should have i32 as supertype" ); assert!( hierarchy @@ -735,10 +698,10 @@ fn test_weight_hierarchy_built() { ); assert!( hierarchy - .get("Unweighted") + .get("One") .map(|s| s.contains("f64")) .unwrap_or(false), - "Unweighted should transitively have f64 as supertype" + "One should transitively have f64 as supertype" ); } @@ -748,17 +711,17 @@ fn test_is_weight_subtype() { // Reflexive assert!(graph.is_weight_subtype("i32", "i32")); - assert!(graph.is_weight_subtype("Unweighted", "Unweighted")); + assert!(graph.is_weight_subtype("One", "One")); // Direct - assert!(graph.is_weight_subtype("Unweighted", "i32")); + assert!(graph.is_weight_subtype("One", "i32")); assert!(graph.is_weight_subtype("i32", "f64")); // Transitive - assert!(graph.is_weight_subtype("Unweighted", "f64")); + assert!(graph.is_weight_subtype("One", "f64")); // Not supertypes - assert!(!graph.is_weight_subtype("i32", "Unweighted")); + assert!(!graph.is_weight_subtype("i32", "One")); assert!(!graph.is_weight_subtype("f64", "i32")); } @@ -783,8 +746,8 @@ fn test_to_json_edges_have_variants() { // Check that edges have source and target variant refs for edge in &json.edges { - assert!(!edge.source.name.is_empty()); - assert!(!edge.target.name.is_empty()); + assert!(!json.source_node(edge).name.is_empty()); + assert!(!json.target_node(edge).name.is_empty()); } } @@ -802,7 +765,8 @@ fn test_json_variant_content() { // Find an edge involving MaximumIndependentSet (could be source or target) let is_edge = json.edges.iter().find(|e| { - e.source.name == "MaximumIndependentSet" || e.target.name == "MaximumIndependentSet" + json.source_node(e).name == "MaximumIndependentSet" + || json.target_node(e).name == "MaximumIndependentSet" }); assert!( is_edge.is_some(), @@ -811,7 +775,7 @@ fn test_json_variant_content() { } #[test] -fn test_concrete_variant_nodes_in_json() { +fn test_reduction_variant_nodes_in_json() { let graph = ReductionGraph::new(); let json = graph.to_json(); @@ -828,11 +792,7 @@ fn test_concrete_variant_nodes_in_json() { }); assert!(mis_unitdisk, "MIS/UnitDiskGraph node should exist"); - let maxcut_gridgraph = json - .nodes - .iter() - .any(|n| n.name == "MaxCut" && n.variant.get("graph") == Some(&"GridGraph".to_string())); - assert!(maxcut_gridgraph, "MaxCut/GridGraph node should exist"); + // MaxCut/GridGraph was removed (orphan with no reduction path) } #[test] @@ -842,10 +802,10 @@ fn test_natural_edge_graph_relaxation() { // MIS/GridGraph -> MIS/SimpleGraph should exist (graph type relaxation) let has_edge = json.edges.iter().any(|e| { - e.source.name == "MaximumIndependentSet" - && e.target.name == "MaximumIndependentSet" - && e.source.variant.get("graph") == Some(&"GridGraph".to_string()) - && e.target.variant.get("graph") == Some(&"SimpleGraph".to_string()) + json.source_node(e).name == "MaximumIndependentSet" + && json.target_node(e).name == "MaximumIndependentSet" + && json.source_node(e).variant.get("graph") == Some(&"GridGraph".to_string()) + && json.target_node(e).variant.get("graph") == Some(&"SimpleGraph".to_string()) }); assert!( has_edge, @@ -854,40 +814,38 @@ fn test_natural_edge_graph_relaxation() { } #[test] -fn test_natural_edge_gridgraph_to_unitdisk() { +fn test_natural_edge_triangular_to_simplegraph() { let graph = ReductionGraph::new(); let json = graph.to_json(); - // MIS/GridGraph -> MIS/UnitDiskGraph should exist + // MIS/Triangular -> MIS/SimpleGraph should exist (Triangular is a subtype of SimpleGraph) let has_edge = json.edges.iter().any(|e| { - e.source.name == "MaximumIndependentSet" - && e.target.name == "MaximumIndependentSet" - && e.source.variant.get("graph") == Some(&"GridGraph".to_string()) - && e.target.variant.get("graph") == Some(&"UnitDiskGraph".to_string()) + json.source_node(e).name == "MaximumIndependentSet" + && json.target_node(e).name == "MaximumIndependentSet" + && json.source_node(e).variant.get("graph") == Some(&"Triangular".to_string()) + && json.target_node(e).variant.get("graph") == Some(&"SimpleGraph".to_string()) }); assert!( has_edge, - "Natural edge MIS/GridGraph -> MIS/UnitDiskGraph should exist" + "Natural edge MIS/Triangular -> MIS/SimpleGraph should exist" ); } #[test] -fn test_natural_edge_weight_promotion() { +fn test_natural_edge_gridgraph_to_unitdisk() { let graph = ReductionGraph::new(); let json = graph.to_json(); - // MIS{SimpleGraph, Unweighted} -> MIS{SimpleGraph, i32} should exist + // MIS/GridGraph -> MIS/UnitDiskGraph should exist let has_edge = json.edges.iter().any(|e| { - e.source.name == "MaximumIndependentSet" - && e.target.name == "MaximumIndependentSet" - && e.source.variant.get("graph") == Some(&"SimpleGraph".to_string()) - && e.target.variant.get("graph") == Some(&"SimpleGraph".to_string()) - && e.source.variant.get("weight") == Some(&"Unweighted".to_string()) - && e.target.variant.get("weight") == Some(&"i32".to_string()) + json.source_node(e).name == "MaximumIndependentSet" + && json.target_node(e).name == "MaximumIndependentSet" + && json.source_node(e).variant.get("graph") == Some(&"GridGraph".to_string()) + && json.target_node(e).variant.get("graph") == Some(&"UnitDiskGraph".to_string()) }); assert!( has_edge, - "Natural edge MIS/Unweighted -> MIS/i32 should exist" + "Natural edge MIS/GridGraph -> MIS/UnitDiskGraph should exist" ); } @@ -896,16 +854,18 @@ fn test_no_natural_edge_wrong_direction() { let graph = ReductionGraph::new(); let json = graph.to_json(); - // MIS/SimpleGraph -> MIS/GridGraph should NOT exist (wrong direction) - let has_edge = json.edges.iter().any(|e| { - e.source.name == "MaximumIndependentSet" - && e.target.name == "MaximumIndependentSet" - && e.source.variant.get("graph") == Some(&"SimpleGraph".to_string()) - && e.target.variant.get("graph") == Some(&"GridGraph".to_string()) + // No NATURAL edge from SimpleGraph -> GridGraph (wrong direction for graph relaxation). + // A real reduction edge from SimpleGraph -> GridGraph may exist (unit disk mapping). + let has_natural_edge = json.edges.iter().any(|e| { + json.source_node(e).name == "MaximumIndependentSet" + && json.target_node(e).name == "MaximumIndependentSet" + && json.source_node(e).variant.get("graph") == Some(&"SimpleGraph".to_string()) + && json.target_node(e).variant.get("graph") == Some(&"GridGraph".to_string()) + && e.doc_path.is_empty() // natural edges have empty doc_path }); assert!( - !has_edge, - "Should NOT have MIS/SimpleGraph -> MIS/GridGraph" + !has_natural_edge, + "Should NOT have natural edge MIS/SimpleGraph -> MIS/GridGraph" ); } @@ -916,12 +876,12 @@ fn test_no_natural_self_edge() { // No self-edges (same variant to same variant) for edge in &json.edges { - if edge.source.name == edge.target.name { + if json.source_node(edge).name == json.target_node(edge).name { assert!( - edge.source.variant != edge.target.variant, + json.source_node(edge).variant != json.target_node(edge).variant, "Should not have self-edge: {} {:?}", - edge.source.name, - edge.source.variant + json.source_node(edge).name, + json.source_node(edge).variant ); } } @@ -934,12 +894,12 @@ fn test_natural_edge_has_identity_overhead() { // Find a natural edge and verify its overhead is identity (field == formula) let natural_edge = json.edges.iter().find(|e| { - e.source.name == "MaximumIndependentSet" - && e.target.name == "MaximumIndependentSet" - && e.source.variant.get("graph") == Some(&"GridGraph".to_string()) - && e.target.variant.get("graph") == Some(&"SimpleGraph".to_string()) - && e.source.variant.get("weight") == Some(&"Unweighted".to_string()) - && e.target.variant.get("weight") == Some(&"Unweighted".to_string()) + json.source_node(e).name == "MaximumIndependentSet" + && json.target_node(e).name == "MaximumIndependentSet" + && json.source_node(e).variant.get("graph") == Some(&"GridGraph".to_string()) + && json.target_node(e).variant.get("graph") == Some(&"SimpleGraph".to_string()) + && json.source_node(e).variant.get("weight") == Some(&"i32".to_string()) + && json.target_node(e).variant.get("weight") == Some(&"i32".to_string()) }); assert!(natural_edge.is_some(), "Natural edge should exist"); let edge = natural_edge.unwrap(); diff --git a/src/unit_tests/rules/maximumindependentset_gridgraph.rs b/src/unit_tests/rules/maximumindependentset_gridgraph.rs new file mode 100644 index 000000000..dfc39cda3 --- /dev/null +++ b/src/unit_tests/rules/maximumindependentset_gridgraph.rs @@ -0,0 +1,71 @@ +use super::*; +use crate::models::graph::MaximumIndependentSet; +use crate::solvers::BruteForce; +use crate::topology::{SimpleGraph, UnitDiskGraph}; + +#[test] +fn test_mis_simple_to_grid_closed_loop() { + // Triangle graph: 3 vertices, 3 edges + let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2), (0, 2)]); + let result = ReduceTo::, i32>>::reduce_to(&problem); + let target = result.target_problem(); + + // The grid graph should have more vertices than the original + assert!(target.num_vertices() > 3); + + // Find best solution on the grid graph using brute force + let solver = BruteForce::new(); + let grid_solutions = solver.find_all_best(target); + assert!(!grid_solutions.is_empty()); + + // Map solution back + let original_solution = result.extract_solution(&grid_solutions[0]); + assert_eq!(original_solution.len(), 3); + + // For a triangle, MIS size is 1 + let size: usize = original_solution.iter().sum(); + assert_eq!(size, 1, "Max IS in triangle should be 1"); +} + +#[test] +fn test_mis_simple_to_grid_path_graph() { + // Path graph: 0-1-2 + let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); + let result = ReduceTo::, i32>>::reduce_to(&problem); + let target = result.target_problem(); + + let solver = BruteForce::new(); + let grid_solutions = solver.find_all_best(target); + assert!(!grid_solutions.is_empty()); + + let original_solution = result.extract_solution(&grid_solutions[0]); + + // Path of 3 vertices has MIS size 2 (vertices 0 and 2) + let size: usize = original_solution.iter().sum(); + assert_eq!(size, 2, "Max IS in path should be 2"); +} + +#[test] +fn test_mis_unitdisk_to_grid_closed_loop() { + // Create a UnitDiskGraph: 3 points where 0-1 are close, 2 is far + let udg = UnitDiskGraph::new(vec![(0.0, 0.0), (0.5, 0.0), (3.0, 0.0)], 1.0); + // Only edge is 0-1 (distance 0.5 <= 1.0), vertex 2 is isolated + assert_eq!(udg.num_edges(), 1); + + let problem = MaximumIndependentSet::::from_graph(udg, vec![1, 1, 1]); + let result = ReduceTo::, i32>>::reduce_to(&problem); + let target = result.target_problem(); + + assert!(target.num_vertices() >= 3); + + let solver = BruteForce::new(); + let grid_solutions = solver.find_all_best(target); + assert!(!grid_solutions.is_empty()); + + let original_solution = result.extract_solution(&grid_solutions[0]); + assert_eq!(original_solution.len(), 3); + + // MIS should be size 2 (one from {0,1} + vertex 2) + let size: usize = original_solution.iter().sum(); + assert_eq!(size, 2, "Max IS should be 2"); +} diff --git a/src/unit_tests/rules/maximumindependentset_triangular.rs b/src/unit_tests/rules/maximumindependentset_triangular.rs new file mode 100644 index 000000000..bfe54b02a --- /dev/null +++ b/src/unit_tests/rules/maximumindependentset_triangular.rs @@ -0,0 +1,59 @@ +use super::*; +use crate::models::graph::MaximumIndependentSet; +use crate::topology::{Graph, SimpleGraph, Triangular}; + +#[test] +fn test_mis_simple_to_triangular_closed_loop() { + // Path graph: 0-1-2 + let problem = MaximumIndependentSet::::new(3, vec![(0, 1), (1, 2)]); + let result = ReduceTo::>::reduce_to(&problem); + let target = result.target_problem(); + + // The triangular graph should have more vertices than the original + assert!(target.num_vertices() > 3); + + // Map a trivial zero solution back to verify dimensions + let zero_config = vec![0; target.num_vertices()]; + let original_solution = result.extract_solution(&zero_config); + assert_eq!(original_solution.len(), 3); +} + +#[test] +fn test_mis_simple_to_triangular_graph_methods() { + // Single edge graph: 0-1 + let problem = MaximumIndependentSet::::new(2, vec![(0, 1)]); + let result = ReduceTo::>::reduce_to(&problem); + let target = result.target_problem(); + let graph = target.graph(); + + // Exercise all Graph trait methods on the Triangular type + let n = graph.num_vertices(); + assert!(n > 2); + + let m = graph.num_edges(); + assert!(m > 0); + + let edges = graph.edges(); + assert_eq!(edges.len(), m); + + // Check edges are consistent with has_edge + for &(u, v) in &edges { + assert!(graph.has_edge(u, v)); + assert!(graph.has_edge(v, u)); // symmetric + } + + // Check neighbors are consistent with edges + for v in 0..n { + let nbrs = graph.neighbors(v); + for &u in &nbrs { + assert!(graph.has_edge(v, u)); + } + } + + // Exercise Triangular-specific methods + let nodes = graph.nodes(); + assert_eq!(nodes.len(), n); + + let inner = graph.grid_graph(); + assert_eq!(inner.num_vertices(), n); +} diff --git a/src/unit_tests/rules/maximumsetpacking_qubo.rs b/src/unit_tests/rules/maximumsetpacking_qubo.rs index d8f9846c6..b0274d0fa 100644 --- a/src/unit_tests/rules/maximumsetpacking_qubo.rs +++ b/src/unit_tests/rules/maximumsetpacking_qubo.rs @@ -7,7 +7,7 @@ fn test_setpacking_to_qubo_closed_loop() { // 3 sets: {0,2}, {1,2}, {0,3} // Overlaps: (0,1) share element 2, (0,2) share element 0 // Max packing: sets 1 and 2 → {1,2} and {0,3} (no overlap) - let sp = MaximumSetPacking::::new(vec![vec![0, 2], vec![1, 2], vec![0, 3]]); + let sp = MaximumSetPacking::::new(vec![vec![0, 2], vec![1, 2], vec![0, 3]]); let reduction = ReduceTo::>::reduce_to(&sp); let qubo = reduction.target_problem(); @@ -24,7 +24,7 @@ fn test_setpacking_to_qubo_closed_loop() { #[test] fn test_setpacking_to_qubo_disjoint() { // Disjoint sets: all can be packed - let sp = MaximumSetPacking::::new(vec![vec![0, 1], vec![2, 3], vec![4]]); + let sp = MaximumSetPacking::::new(vec![vec![0, 1], vec![2, 3], vec![4]]); let reduction = ReduceTo::>::reduce_to(&sp); let qubo = reduction.target_problem(); @@ -42,7 +42,7 @@ fn test_setpacking_to_qubo_disjoint() { #[test] fn test_setpacking_to_qubo_all_overlap() { // All sets overlap: only 1 can be selected - let sp = MaximumSetPacking::::new(vec![vec![0, 1], vec![0, 2], vec![0, 3]]); + let sp = MaximumSetPacking::::new(vec![vec![0, 1], vec![0, 2], vec![0, 3]]); let reduction = ReduceTo::>::reduce_to(&sp); let qubo = reduction.target_problem(); @@ -58,7 +58,7 @@ fn test_setpacking_to_qubo_all_overlap() { #[test] fn test_setpacking_to_qubo_structure() { - let sp = MaximumSetPacking::::new(vec![vec![0, 2], vec![1, 2], vec![0, 3]]); + let sp = MaximumSetPacking::::new(vec![vec![0, 2], vec![1, 2], vec![0, 3]]); let reduction = ReduceTo::>::reduce_to(&sp); let qubo = reduction.target_problem(); diff --git a/src/unit_tests/rules/natural.rs b/src/unit_tests/rules/natural.rs new file mode 100644 index 000000000..c5d1bf9d5 --- /dev/null +++ b/src/unit_tests/rules/natural.rs @@ -0,0 +1,49 @@ +use crate::models::graph::MaximumIndependentSet; +use crate::rules::{ReduceTo, ReductionResult}; +use crate::solvers::ILPSolver; +use crate::topology::{SimpleGraph, Triangular}; +use crate::traits::Problem; + +#[test] +fn test_mis_triangular_to_simple_closed_loop() { + // Petersen graph: 10 vertices, 15 edges, max IS = 4 + let source = MaximumIndependentSet::::new( + 10, + vec![ + (0, 1), (1, 2), (2, 3), (3, 4), (4, 0), // outer cycle + (5, 7), (7, 9), (9, 6), (6, 8), (8, 5), // inner pentagram + (0, 5), (1, 6), (2, 7), (3, 8), (4, 9), // spokes + ], + ); + + // SimpleGraph → Triangular (unit disk mapping) + let to_tri = ReduceTo::>::reduce_to(&source); + let tri_problem = to_tri.target_problem(); + + // Triangular → SimpleGraph (natural edge: graph subtype relaxation) + let to_simple = ReduceTo::>::reduce_to(tri_problem); + let simple_problem = to_simple.target_problem(); + + // Graph structure is preserved by identity cast + assert_eq!(simple_problem.num_vertices(), tri_problem.num_vertices()); + assert_eq!(simple_problem.num_edges(), tri_problem.num_edges()); + + // Solve with ILP on the relaxed SimpleGraph problem + let solver = ILPSolver::new(); + let solution = solver.solve_reduced(simple_problem).expect("ILP should find a solution"); + + // Identity mapping: solution is unchanged + let extracted = to_simple.extract_solution(&solution); + assert_eq!(extracted, solution); + + // Extracted solution is valid on the Triangular problem + let metric = tri_problem.evaluate(&extracted); + assert!(metric.is_valid()); + + // Map back through the full chain to the original Petersen graph + let original_solution = to_tri.extract_solution(&extracted); + let original_metric = source.evaluate(&original_solution); + assert!(original_metric.is_valid()); + // Petersen graph max IS = 4 + assert_eq!(original_solution.iter().sum::(), 4); +} diff --git a/src/unit_tests/rules/registry.rs b/src/unit_tests/rules/registry.rs index 99519935f..d51b8d32f 100644 --- a/src/unit_tests/rules/registry.rs +++ b/src/unit_tests/rules/registry.rs @@ -23,8 +23,8 @@ fn test_reduction_entry_overhead() { let entry = ReductionEntry { source_name: "TestSource", target_name: "TestTarget", - source_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "Unweighted")], - target_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "Unweighted")], + source_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "One")], + target_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "One")], overhead_fn: || ReductionOverhead::new(vec![("n", poly!(2 * n))]), module_path: "test::module", }; @@ -40,8 +40,8 @@ fn test_reduction_entry_debug() { let entry = ReductionEntry { source_name: "A", target_name: "B", - source_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "Unweighted")], - target_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "Unweighted")], + source_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "One")], + target_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "One")], overhead_fn: || ReductionOverhead::default(), module_path: "test::module", }; @@ -56,8 +56,8 @@ fn test_is_base_reduction_unweighted() { let entry = ReductionEntry { source_name: "A", target_name: "B", - source_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "Unweighted")], - target_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "Unweighted")], + source_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "One")], + target_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "One")], overhead_fn: || ReductionOverhead::default(), module_path: "test::module", }; @@ -70,7 +70,7 @@ fn test_is_base_reduction_source_weighted() { source_name: "A", target_name: "B", source_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "i32")], - target_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "Unweighted")], + target_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "One")], overhead_fn: || ReductionOverhead::default(), module_path: "test::module", }; @@ -82,7 +82,7 @@ fn test_is_base_reduction_target_weighted() { let entry = ReductionEntry { source_name: "A", target_name: "B", - source_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "Unweighted")], + source_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "One")], target_variant_fn: || vec![("graph", "SimpleGraph"), ("weight", "f64")], overhead_fn: || ReductionOverhead::default(), module_path: "test::module", diff --git a/src/unit_tests/rules/sat_ksat.rs b/src/unit_tests/rules/sat_ksat.rs index 9775c061e..137919a6b 100644 --- a/src/unit_tests/rules/sat_ksat.rs +++ b/src/unit_tests/rules/sat_ksat.rs @@ -218,22 +218,23 @@ fn test_roundtrip_sat_3sat_sat() { } #[test] -fn test_sat_to_4sat() { +fn test_sat_to_3sat_mixed_clause_types() { + // Test padding, exact-size, and splitting all at once let sat = Satisfiability::new( 4, vec![ CNFClause::new(vec![1, 2]), // Needs padding - CNFClause::new(vec![1, 2, 3, 4]), // Exact + CNFClause::new(vec![1, 2, 3]), // Exact CNFClause::new(vec![1, 2, 3, 4, -1]), // Needs splitting ], ); - let reduction = ReduceTo::>::reduce_to(&sat); + let reduction = ReduceTo::>::reduce_to(&sat); let ksat = reduction.target_problem(); - // All clauses should have exactly 4 literals + // All clauses should have exactly 3 literals for clause in ksat.clauses() { - assert_eq!(clause.len(), 4); + assert_eq!(clause.len(), 3); } } diff --git a/src/unit_tests/rules/travelingsalesman_ilp.rs b/src/unit_tests/rules/travelingsalesman_ilp.rs index 76d4654f6..c7fb97736 100644 --- a/src/unit_tests/rules/travelingsalesman_ilp.rs +++ b/src/unit_tests/rules/travelingsalesman_ilp.rs @@ -6,10 +6,8 @@ use crate::types::SolutionSize; #[test] fn test_reduction_creates_valid_ilp_c4() { // C4 cycle: 4 vertices, 4 edges. Unique Hamiltonian cycle (the cycle itself). - let problem = TravelingSalesman::::unweighted( - 4, - vec![(0, 1), (1, 2), (2, 3), (3, 0)], - ); + let problem = + TravelingSalesman::::unweighted(4, vec![(0, 1), (1, 2), (2, 3), (3, 0)]); let reduction: ReductionTSPToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -26,10 +24,8 @@ fn test_reduction_creates_valid_ilp_c4() { #[test] fn test_reduction_c4_closed_loop() { // C4 cycle with unit weights: optimal tour cost = 4 - let problem = TravelingSalesman::::unweighted( - 4, - vec![(0, 1), (1, 2), (2, 3), (3, 0)], - ); + let problem = + TravelingSalesman::::unweighted(4, vec![(0, 1), (1, 2), (2, 3), (3, 0)]); let reduction: ReductionTSPToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -49,8 +45,12 @@ fn test_reduction_k4_weighted_closed_loop() { let problem = TravelingSalesman::::new( 4, vec![ - (0, 1, 10), (0, 2, 15), (0, 3, 20), - (1, 2, 35), (1, 3, 25), (2, 3, 30), + (0, 1, 10), + (0, 2, 15), + (0, 3, 20), + (1, 2, 35), + (1, 3, 25), + (2, 3, 30), ], ); @@ -68,7 +68,10 @@ fn test_reduction_k4_weighted_closed_loop() { let ilp_metric = problem.evaluate(&extracted); assert!(ilp_metric.is_valid()); - assert_eq!(ilp_metric, bf_metric, "ILP and brute force must agree on optimal cost"); + assert_eq!( + ilp_metric, bf_metric, + "ILP and brute force must agree on optimal cost" + ); } #[test] @@ -93,26 +96,25 @@ fn test_reduction_c5_unweighted_closed_loop() { #[test] fn test_no_hamiltonian_cycle_infeasible() { // Path graph 0-1-2-3: no Hamiltonian cycle exists - let problem = TravelingSalesman::::unweighted( - 4, - vec![(0, 1), (1, 2), (2, 3)], - ); + let problem = + TravelingSalesman::::unweighted(4, vec![(0, 1), (1, 2), (2, 3)]); let reduction: ReductionTSPToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); let ilp_solver = ILPSolver::new(); let result = ilp_solver.solve(ilp); - assert!(result.is_none(), "Path graph should have no Hamiltonian cycle (infeasible ILP)"); + assert!( + result.is_none(), + "Path graph should have no Hamiltonian cycle (infeasible ILP)" + ); } #[test] fn test_solution_extraction_structure() { // C4 cycle: verify extraction produces correct edge selection format - let problem = TravelingSalesman::::unweighted( - 4, - vec![(0, 1), (1, 2), (2, 3), (3, 0)], - ); + let problem = + TravelingSalesman::::unweighted(4, vec![(0, 1), (1, 2), (2, 3), (3, 0)]); let reduction: ReductionTSPToILP = ReduceTo::::reduce_to(&problem); let ilp = reduction.target_problem(); @@ -132,13 +134,19 @@ fn test_solve_reduced() { let problem = TravelingSalesman::::new( 4, vec![ - (0, 1, 10), (0, 2, 15), (0, 3, 20), - (1, 2, 35), (1, 3, 25), (2, 3, 30), + (0, 1, 10), + (0, 2, 15), + (0, 3, 20), + (1, 2, 35), + (1, 3, 25), + (2, 3, 30), ], ); let ilp_solver = ILPSolver::new(); - let solution = ilp_solver.solve_reduced(&problem).expect("solve_reduced should work"); + let solution = ilp_solver + .solve_reduced(&problem) + .expect("solve_reduced should work"); let metric = problem.evaluate(&solution); assert!(metric.is_valid()); diff --git a/src/unit_tests/types.rs b/src/unit_tests/types.rs index 28c2a30c6..ceb7ee389 100644 --- a/src/unit_tests/types.rs +++ b/src/unit_tests/types.rs @@ -39,23 +39,23 @@ fn test_solution_size_map() { } #[test] -fn test_unweighted() { - let uw = Unweighted(0); - // Test get() method - assert_eq!(uw.get(0), 1); - assert_eq!(uw.get(100), 1); - assert_eq!(uw.get(usize::MAX), 1); +fn test_one() { + let one = One; // Test Display - assert_eq!(format!("{}", uw), "Unweighted"); + assert_eq!(format!("{}", one), "One"); // Test Clone, Copy, Default - let uw2 = uw; - let _uw3 = uw2; // Copy works (no clone needed) - let _uw4: Unweighted = Default::default(); + let one2 = one; + let _one3 = one2; // Copy works (no clone needed) + let _one4: One = Default::default(); // Test PartialEq - assert_eq!(Unweighted(0), Unweighted(0)); + assert_eq!(One, One); + + // Test From + let from_int: One = One::from(42); + assert_eq!(from_int, One); } #[test] @@ -88,16 +88,6 @@ fn test_problem_size_display() { assert_eq!(format!("{}", single), "ProblemSize{n: 5}"); } -#[test] -fn test_numeric_weight_impls() { - fn assert_numeric_weight() {} - - assert_numeric_weight::(); - assert_numeric_weight::(); - assert_numeric_weight::(); - assert_numeric_weight::(); -} - #[test] fn test_numeric_size_blanket_impl() { fn assert_numeric_size() {} @@ -107,29 +97,37 @@ fn test_numeric_size_blanket_impl() { } #[test] -fn test_unweighted_weights_trait() { - let w = Unweighted(5); - assert_eq!(w.len(), 5); - assert_eq!(w.weight(0), 1); - assert_eq!(w.weight(4), 1); - assert_eq!(Unweighted::NAME, "Unweighted"); +fn test_weight_element_one() { + let one = One; + assert_eq!(one.to_sum(), 1); + + // Verify associated type + fn assert_weight_element() {} + assert_weight_element::(); } #[test] -fn test_vec_i32_weights_trait() { - let w = vec![3, 1, 4]; - assert_eq!(w.len(), 3); - assert_eq!(w.weight(0), 3); - assert_eq!(w.weight(2), 4); - assert_eq!( as Weights>::NAME, "Weighted"); +fn test_weight_element_i32() { + let w: i32 = 42; + assert_eq!(w.to_sum(), 42); + + let zero: i32 = 0; + assert_eq!(zero.to_sum(), 0); + + let neg: i32 = -5; + assert_eq!(neg.to_sum(), -5); } #[test] -fn test_vec_f64_weights_trait() { - let w = vec![1.5, 2.5]; - assert_eq!(w.len(), 2); - assert_eq!(w.weight(1), 2.5); - assert_eq!( as Weights>::NAME, "Weighted"); +fn test_weight_element_f64() { + let w: f64 = 3.15; + assert_eq!(w.to_sum(), 3.15); + + let zero: f64 = 0.0; + assert_eq!(zero.to_sum(), 0.0); + + let neg: f64 = -2.5; + assert_eq!(neg.to_sum(), -2.5); } #[test] diff --git a/src/unit_tests/variant.rs b/src/unit_tests/variant.rs index 6ffc2608f..35d1c0dd3 100644 --- a/src/unit_tests/variant.rs +++ b/src/unit_tests/variant.rs @@ -90,23 +90,26 @@ fn test_variant_for_problems() { assert_eq!(v.len(), 2); assert_eq!(v[0].1, "SimpleGraph"); - // Test Satisfiability + // Test Satisfiability (no type parameters) let v = Satisfiability::variant(); - assert_eq!(v.len(), 2); + assert_eq!(v.len(), 0); - // Test KSatisfiability + // Test KSatisfiability (const K parameter only) let v = KSatisfiability::<3>::variant(); - assert_eq!(v.len(), 2); + assert_eq!(v.len(), 1); + assert_eq!(v[0], ("k", "3")); - // Test MaximumSetPacking + // Test MaximumSetPacking (weight parameter only) let v = MaximumSetPacking::::variant(); - assert_eq!(v.len(), 2); + assert_eq!(v.len(), 1); + assert_eq!(v[0], ("weight", "i32")); - // Test MinimumSetCovering + // Test MinimumSetCovering (weight parameter only) let v = MinimumSetCovering::::variant(); - assert_eq!(v.len(), 2); + assert_eq!(v.len(), 1); + assert_eq!(v[0], ("weight", "i32")); - // Test SpinGlass + // Test SpinGlass (graph + weight parameters) let v = SpinGlass::::variant(); assert_eq!(v.len(), 2); assert_eq!(v[1].1, "f64"); @@ -114,33 +117,28 @@ fn test_variant_for_problems() { let v = SpinGlass::::variant(); assert_eq!(v[1].1, "i32"); - // Test QUBO + // Test QUBO (weight parameter only) let v = QUBO::::variant(); - assert_eq!(v.len(), 2); - assert_eq!(v[1].1, "f64"); + assert_eq!(v.len(), 1); + assert_eq!(v[0], ("weight", "f64")); - // Test CircuitSAT + // Test CircuitSAT (no type parameters) let v = CircuitSAT::variant(); - assert_eq!(v.len(), 1); + assert_eq!(v.len(), 0); // Test Factoring (no type parameters) let v = Factoring::variant(); - assert_eq!(v.len(), 2); - assert_eq!(v[0].1, "SimpleGraph"); - assert_eq!(v[1].1, "i32"); + assert_eq!(v.len(), 0); // Test BicliqueCover (no type parameters) let v = BicliqueCover::variant(); - assert_eq!(v.len(), 2); - assert_eq!(v[0].1, "SimpleGraph"); + assert_eq!(v.len(), 0); // Test BMF (no type parameters) let v = BMF::variant(); - assert_eq!(v.len(), 2); - assert_eq!(v[0].1, "SimpleGraph"); + assert_eq!(v.len(), 0); // Test PaintShop (no type parameters) let v = PaintShop::variant(); - assert_eq!(v.len(), 2); - assert_eq!(v[0].1, "SimpleGraph"); + assert_eq!(v.len(), 0); } diff --git a/tests/data/qubo/maximumindependentset_to_qubo.json b/tests/data/qubo/maximumindependentset_to_qubo.json index 571dba17f..164d36c83 100644 --- a/tests/data/qubo/maximumindependentset_to_qubo.json +++ b/tests/data/qubo/maximumindependentset_to_qubo.json @@ -1 +1 @@ -{"problem":"IndependentSet","source":{"num_vertices":4,"edges":[[0,1],[1,2],[2,3],[0,3]],"penalty":8.0},"qubo_matrix":[[-1.0,8.0,0.0,8.0],[0.0,-1.0,8.0,0.0],[0.0,0.0,-1.0,8.0],[0.0,0.0,0.0,-1.0]],"qubo_num_vars":4,"qubo_optimal":{"value":-2.0,"configs":[[0,1,0,1],[1,0,1,0]]}} \ No newline at end of file +{"problem":"MaximumIndependentSet","source":{"num_vertices":4,"edges":[[0,1],[1,2],[2,3],[0,3]],"penalty":8.0},"qubo_matrix":[[-1.0,8.0,0.0,8.0],[0.0,-1.0,8.0,0.0],[0.0,0.0,-1.0,8.0],[0.0,0.0,0.0,-1.0]],"qubo_num_vars":4,"qubo_optimal":{"value":-2.0,"configs":[[0,1,0,1],[1,0,1,0]]}} \ No newline at end of file diff --git a/tests/data/qubo/maximumsetpacking_to_qubo.json b/tests/data/qubo/maximumsetpacking_to_qubo.json index b9b86e2e4..fefda55fd 100644 --- a/tests/data/qubo/maximumsetpacking_to_qubo.json +++ b/tests/data/qubo/maximumsetpacking_to_qubo.json @@ -1 +1 @@ -{"problem":"SetPacking","source":{"sets":[[0,2],[1,2],[0,3]],"num_elements":4,"weights":[1.0,2.0,1.5],"penalty":8.0},"qubo_matrix":[[-1.0,4.0,4.0],[4.0,-2.0,0.0],[4.0,0.0,-1.5]],"qubo_num_vars":3,"qubo_optimal":{"value":-3.5,"configs":[[0,1,1]]}} \ No newline at end of file +{"problem":"MaximumSetPacking","source":{"sets":[[0,2],[1,2],[0,3]],"num_elements":4,"weights":[1.0,2.0,1.5],"penalty":8.0},"qubo_matrix":[[-1.0,4.0,4.0],[4.0,-2.0,0.0],[4.0,0.0,-1.5]],"qubo_num_vars":3,"qubo_optimal":{"value":-3.5,"configs":[[0,1,1]]}} \ No newline at end of file diff --git a/tests/data/qubo/minimumvertexcover_to_qubo.json b/tests/data/qubo/minimumvertexcover_to_qubo.json index 06479d1eb..064fe7bbf 100644 --- a/tests/data/qubo/minimumvertexcover_to_qubo.json +++ b/tests/data/qubo/minimumvertexcover_to_qubo.json @@ -1 +1 @@ -{"problem":"VertexCovering","source":{"num_vertices":4,"edges":[[0,1],[1,2],[2,3],[0,3],[0,2]],"penalty":8.0},"qubo_matrix":[[-23.0,4.0,4.0,4.0],[4.0,-15.0,4.0,0.0],[4.0,4.0,-23.0,4.0],[4.0,0.0,4.0,-15.0]],"qubo_num_vars":4,"qubo_optimal":{"value":-38.0,"configs":[[1,0,1,0]]}} \ No newline at end of file +{"problem":"MinimumVertexCover","source":{"num_vertices":4,"edges":[[0,1],[1,2],[2,3],[0,3],[0,2]],"penalty":8.0},"qubo_matrix":[[-23.0,4.0,4.0,4.0],[4.0,-15.0,4.0,0.0],[4.0,4.0,-23.0,4.0],[4.0,0.0,4.0,-15.0]],"qubo_num_vars":4,"qubo_optimal":{"value":-38.0,"configs":[[1,0,1,0]]}} \ No newline at end of file