diff --git a/monitoring/ledger_verify.py b/monitoring/ledger_verify.py new file mode 100644 index 000000000..2d47fbd68 --- /dev/null +++ b/monitoring/ledger_verify.py @@ -0,0 +1,530 @@ +#!/usr/bin/env python3 +""" +RustChain Cross-Node Ledger Verification Tool +=============================================== +Queries all RustChain nodes, compares state, alerts on mismatches. +Logs results to SQLite for historical tracking. + +Usage: + python3 ledger_verify.py # One-shot verification + python3 ledger_verify.py --ci # Exit non-zero on mismatch (CI mode) + python3 ledger_verify.py --webhook URL # POST results to webhook on mismatch + python3 ledger_verify.py --watch 300 # Run every 300 seconds continuously + python3 ledger_verify.py --history # Show recent check history + +Bounty: https://github.com/Scottcjn/rustchain-bounties/issues/763 +Author: NOX Ventures (noxxxxybot-sketch) +""" + +import argparse +import hashlib +import json +import os +import sqlite3 +import sys +import time +import urllib.request +import urllib.error +from datetime import datetime, timezone +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple + +# --------------------------------------------------------------------------- +# Node configuration +# --------------------------------------------------------------------------- + +NODES = [ + {"name": "Node 1 (Primary)", "url": "https://50.28.86.131", "id": "node1"}, + {"name": "Node 2", "url": "https://50.28.86.153", "id": "node2"}, + # Node 3 is Tailscale-only; included for completeness but may be unreachable + {"name": "Node 3 (Ryan)", "url": "http://100.88.109.32:8099", "id": "node3"}, +] + +TIMEOUT_SECONDS = 10 +DB_PATH = Path.home() / ".rustchain" / "ledger_verify.db" +SPOT_CHECK_WALLET = "founder_community" + +# --------------------------------------------------------------------------- +# Database +# --------------------------------------------------------------------------- + +def init_db(db_path: Path = DB_PATH) -> sqlite3.Connection: + db_path.parent.mkdir(parents=True, exist_ok=True) + conn = sqlite3.connect(str(db_path)) + conn.executescript(""" + CREATE TABLE IF NOT EXISTS sync_checks ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + checked_at TEXT NOT NULL, + overall_ok INTEGER NOT NULL, + epoch_match INTEGER NOT NULL, + balance_match INTEGER NOT NULL, + miner_count_match INTEGER NOT NULL, + mismatch_details TEXT, + merkle_roots TEXT, + node_data TEXT + ); + CREATE TABLE IF NOT EXISTS node_snapshots ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + check_id INTEGER NOT NULL, + node_id TEXT NOT NULL, + node_name TEXT NOT NULL, + reachable INTEGER NOT NULL, + version TEXT, + epoch INTEGER, + slot INTEGER, + enrolled_miners INTEGER, + spot_balance REAL, + active_miner_count INTEGER, + merkle_root TEXT, + raw_data TEXT, + FOREIGN KEY (check_id) REFERENCES sync_checks(id) + ); + """) + conn.commit() + return conn + + +def save_check_result(conn: sqlite3.Connection, result: dict): + c = conn.execute( + """INSERT INTO sync_checks + (checked_at, overall_ok, epoch_match, balance_match, miner_count_match, + mismatch_details, merkle_roots, node_data) + VALUES (?,?,?,?,?,?,?,?)""", + ( + result["timestamp"], + 1 if result["overall_ok"] else 0, + 1 if result["epoch_match"] else 0, + 1 if result["balance_match"] else 0, + 1 if result["miner_count_match"] else 0, + json.dumps(result.get("mismatches", [])), + json.dumps(result.get("merkle_roots", {})), + json.dumps(result.get("node_data", {})), + ) + ) + check_id = c.lastrowid + for nd in result.get("node_snapshots", []): + conn.execute( + """INSERT INTO node_snapshots + (check_id, node_id, node_name, reachable, version, epoch, slot, + enrolled_miners, spot_balance, active_miner_count, merkle_root, raw_data) + VALUES (?,?,?,?,?,?,?,?,?,?,?,?)""", + ( + check_id, + nd["node_id"], nd["node_name"], + 1 if nd.get("reachable") else 0, + nd.get("version"), nd.get("epoch"), nd.get("slot"), + nd.get("enrolled_miners"), nd.get("spot_balance"), + nd.get("active_miner_count"), nd.get("merkle_root"), + json.dumps(nd.get("raw_data", {})), + ) + ) + conn.commit() + return check_id + + +def show_history(db_path: Path = DB_PATH, limit: int = 20): + if not db_path.exists(): + print("No history found. Run a check first.") + return + conn = sqlite3.connect(str(db_path)) + rows = conn.execute( + "SELECT checked_at, overall_ok, epoch_match, balance_match, mismatch_details " + "FROM sync_checks ORDER BY id DESC LIMIT ?", (limit,) + ).fetchall() + print(f"{'Timestamp':<30} {'Status':<10} {'Epoch':<8} {'Balance':<10} Mismatches") + print("-" * 80) + for r in rows: + status = "✅ OK" if r[1] else "❌ FAIL" + epoch = "✅" if r[2] else "❌" + bal = "✅" if r[3] else "❌" + mismatches = json.loads(r[4]) if r[4] else [] + mm_str = "; ".join(mismatches[:2]) if mismatches else "-" + print(f"{r[0]:<30} {status:<10} {epoch:<8} {bal:<10} {mm_str}") + conn.close() + + +# --------------------------------------------------------------------------- +# HTTP helpers +# --------------------------------------------------------------------------- + +def fetch(url: str, timeout: int = TIMEOUT_SECONDS) -> Optional[dict]: + """Fetch JSON from a URL. Returns None on failure.""" + ctx = __import__("ssl").create_default_context() + ctx.check_hostname = False + ctx.verify_mode = __import__("ssl").CERT_NONE + try: + req = urllib.request.Request(url, headers={"User-Agent": "rustchain-ledger-verify/1.0"}) + with urllib.request.urlopen(req, timeout=timeout, context=ctx) as r: + return json.loads(r.read()) + except Exception as e: + return None + + +def post_webhook(url: str, payload: dict): + """POST JSON payload to a webhook URL.""" + data = json.dumps(payload).encode() + req = urllib.request.Request( + url, data=data, + headers={"Content-Type": "application/json", "User-Agent": "rustchain-ledger-verify/1.0"}, + method="POST" + ) + try: + with urllib.request.urlopen(req, timeout=10) as r: + return r.status + except Exception as e: + print(f" ⚠️ Webhook delivery failed: {e}", file=sys.stderr) + return None + + +# --------------------------------------------------------------------------- +# Merkle computation +# --------------------------------------------------------------------------- + +def compute_merkle_root(miner_list: List[dict]) -> str: + """ + Compute a Merkle root over sorted miner data for cross-node comparison. + Miners are sorted by miner_id for determinism. + """ + if not miner_list: + return hashlib.sha256(b"empty").hexdigest() + + # Normalize each miner to a canonical string + leaves = [] + for m in sorted(miner_list, key=lambda x: x.get("miner_id", x.get("wallet_name", ""))): + canonical = json.dumps( + {k: m.get(k) for k in sorted(m.keys())}, + sort_keys=True, separators=(",", ":") + ) + leaves.append(hashlib.sha256(canonical.encode()).digest()) + + # Build Merkle tree + while len(leaves) > 1: + if len(leaves) % 2 == 1: + leaves.append(leaves[-1]) # duplicate last for odd count + leaves = [ + hashlib.sha256(leaves[i] + leaves[i + 1]).digest() + for i in range(0, len(leaves), 2) + ] + + return leaves[0].hex() + + +# --------------------------------------------------------------------------- +# Node querying +# --------------------------------------------------------------------------- + +def query_node(node: dict) -> dict: + """Query all relevant endpoints for a single node.""" + base = node["url"] + result = { + "node_id": node["id"], + "node_name": node["name"], + "reachable": False, + "raw_data": {}, + } + + # Health + health = fetch(f"{base}/health") + if not health: + result["error"] = "Node unreachable" + return result + + result["reachable"] = True + result["version"] = health.get("version") or health.get("node_version") + result["raw_data"]["health"] = health + + # Epoch + epoch_data = fetch(f"{base}/epoch") + if epoch_data: + result["epoch"] = epoch_data.get("epoch") + result["slot"] = epoch_data.get("slot") + result["enrolled_miners"] = epoch_data.get("enrolled_miners") or epoch_data.get("total_miners") + result["raw_data"]["epoch"] = epoch_data + + # Stats + stats = fetch(f"{base}/api/stats") + if stats: + result["total_balance"] = stats.get("total_balance") + result["miner_count"] = stats.get("miner_count") or stats.get("total_miners") + result["raw_data"]["stats"] = stats + + # Spot check wallet balance + balance_data = fetch(f"{base}/wallet/balance?miner_id={SPOT_CHECK_WALLET}") + if balance_data: + result["spot_balance"] = balance_data.get("balance") or balance_data.get("rtc_balance") + result["raw_data"]["spot_balance"] = balance_data + + # Miners list (for Merkle) + miners_data = fetch(f"{base}/api/miners") + if miners_data: + miners = miners_data if isinstance(miners_data, list) else miners_data.get("miners", []) + result["active_miner_count"] = len(miners) + result["merkle_root"] = compute_merkle_root(miners) + result["raw_data"]["miners_sample"] = miners[:3] # Save a sample, not all + + return result + + +# --------------------------------------------------------------------------- +# Comparison logic +# --------------------------------------------------------------------------- + +def compare_nodes(snapshots: List[dict]) -> dict: + """Compare node snapshots and return a verification result.""" + reachable = [s for s in snapshots if s.get("reachable")] + unreachable = [s for s in snapshots if not s.get("reachable")] + + mismatches = [] + epoch_match = True + balance_match = True + miner_count_match = True + + if len(reachable) < 2: + return { + "epoch_match": False, + "balance_match": False, + "miner_count_match": False, + "mismatches": ["Insufficient reachable nodes for comparison"], + "reachable_count": len(reachable), + } + + # Epoch comparison + epochs = [s.get("epoch") for s in reachable if s.get("epoch") is not None] + if epochs and len(set(epochs)) > 1: + epoch_match = False + mismatches.append(f"EPOCH MISMATCH: {dict(zip([s['node_id'] for s in reachable], epochs))}") + + # Slot comparison (allow ±5 drift) + slots = {s["node_id"]: s.get("slot") for s in reachable if s.get("slot") is not None} + if len(slots) > 1: + slot_values = [v for v in slots.values() if v is not None] + if slot_values and (max(slot_values) - min(slot_values)) > 5: + mismatches.append(f"SLOT DRIFT: {slots}") + + # Balance comparison + balances = [s.get("spot_balance") for s in reachable if s.get("spot_balance") is not None] + if balances and len(set(balances)) > 1: + balance_match = False + mismatches.append(f"BALANCE MISMATCH: {dict(zip([s['node_id'] for s in reachable], balances))}") + + # Miner count comparison + miner_counts = [s.get("active_miner_count") for s in reachable if s.get("active_miner_count") is not None] + if miner_counts and len(set(miner_counts)) > 1: + miner_count_match = False + mismatches.append(f"MINER COUNT MISMATCH: {dict(zip([s['node_id'] for s in reachable], miner_counts))}") + + # Merkle comparison + merkle_roots = {s["node_id"]: s.get("merkle_root") for s in reachable if s.get("merkle_root")} + roots = list(set(v for v in merkle_roots.values() if v)) + if len(roots) > 1: + mismatches.append(f"MERKLE ROOT MISMATCH: {merkle_roots}") + + if unreachable: + mismatches.append(f"UNREACHABLE: {[s['node_name'] for s in unreachable]}") + + return { + "epoch_match": epoch_match, + "balance_match": balance_match, + "miner_count_match": miner_count_match, + "mismatches": mismatches, + "merkle_roots": merkle_roots, + "reachable_count": len(reachable), + } + + +# --------------------------------------------------------------------------- +# Report formatting +# --------------------------------------------------------------------------- + +def print_report(result: dict): + now = result["timestamp"] + snapshots = result["node_snapshots"] + comparison = result["comparison"] + + print() + print("=" * 60) + print(" RustChain Cross-Node Verification Report") + print("=" * 60) + print(f" Timestamp: {now}") + print() + + print(" Node Health:") + for s in snapshots: + status = "🟢" if s.get("reachable") else "🔴" + version = s.get("version", "N/A") + err = f" — {s.get('error', 'unreachable')}" if not s.get("reachable") else "" + print(f" {s['node_name']}: {status} v{version}{err}") + + print() + print(" Epoch State:") + for s in snapshots: + if s.get("reachable"): + ep = s.get("epoch", "?") + slot = s.get("slot", "?") + miners = s.get("enrolled_miners", s.get("active_miner_count", "?")) + epoch_ok = "✅" if result["epoch_match"] else "❌" + print(f" {s['node_name']}: epoch={ep}, slot={slot}, miners={miners} {epoch_ok}") + else: + print(f" {s['node_name']}: ❌ unreachable") + + print() + print(f" Balance Spot-Check ({SPOT_CHECK_WALLET}):") + for s in snapshots: + if s.get("reachable") and s.get("spot_balance") is not None: + bal = s.get("spot_balance", 0) + bal_ok = "✅" if result["balance_match"] else "❌" + print(f" {s['node_name']}: {bal:,.2f} RTC {bal_ok}") + + print() + print(" Merkle Roots (Active Miners):") + for node_id, root in result.get("merkle_roots", {}).items(): + node_name = next((s["node_name"] for s in snapshots if s["node_id"] == node_id), node_id) + print(f" {node_name}: {root[:16]}...") + + print() + overall = "✅ ALL NODES IN SYNC" if result["overall_ok"] else "❌ SYNC MISMATCH DETECTED" + print(f" Result: {overall}") + + if result.get("mismatches"): + print() + print(" ⚠️ Mismatches:") + for m in result["mismatches"]: + print(f" - {m}") + + print("=" * 60) + print() + + +# --------------------------------------------------------------------------- +# Main verification run +# --------------------------------------------------------------------------- + +def run_verification( + webhook_url: Optional[str] = None, + ci_mode: bool = False, + db_path: Path = DB_PATH, +) -> Tuple[dict, bool]: + """Run full verification. Returns (result, ok).""" + timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") + + print(f"🔍 Querying {len(NODES)} nodes...") + snapshots = [] + for node in NODES: + print(f" Querying {node['name']}...", end=" ", flush=True) + snap = query_node(node) + status = "✅" if snap.get("reachable") else "❌" + print(status) + snapshots.append(snap) + + comparison = compare_nodes(snapshots) + + result = { + "timestamp": timestamp, + "overall_ok": not comparison["mismatches"] or ( + comparison["reachable_count"] >= 2 + and comparison["epoch_match"] + and comparison["balance_match"] + and comparison["miner_count_match"] + ), + "epoch_match": comparison["epoch_match"], + "balance_match": comparison["balance_match"], + "miner_count_match": comparison["miner_count_match"], + "mismatches": comparison.get("mismatches", []), + "merkle_roots": comparison.get("merkle_roots", {}), + "node_snapshots": snapshots, + "node_data": {s["node_id"]: s.get("raw_data", {}) for s in snapshots}, + "comparison": comparison, + } + + # Filter mismatches: unreachable-only doesn't fail overall if ≥2 reachable + filtered_mismatches = [m for m in result["mismatches"] if "UNREACHABLE" not in m] + result["overall_ok"] = len(filtered_mismatches) == 0 + + # Save to DB + conn = init_db(db_path) + save_check_result(conn, result) + conn.close() + + # Print report + print_report(result) + + # Webhook on mismatch + if webhook_url and not result["overall_ok"]: + print(f"📤 Sending webhook to {webhook_url}...") + post_webhook(webhook_url, { + "event": "rustchain_sync_mismatch", + "timestamp": timestamp, + "mismatches": result["mismatches"], + "merkle_roots": result["merkle_roots"], + }) + + return result, result["overall_ok"] + + +# --------------------------------------------------------------------------- +# CLI +# --------------------------------------------------------------------------- + +def main(): + parser = argparse.ArgumentParser( + description="RustChain Cross-Node Ledger Verification Tool", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + python3 ledger_verify.py # One-shot check + python3 ledger_verify.py --ci # Exit 1 if mismatch (CI mode) + python3 ledger_verify.py --webhook https://hooks.slack.com/... + python3 ledger_verify.py --watch 300 # Check every 5 minutes + python3 ledger_verify.py --history # Show recent check history +""" + ) + parser.add_argument("--ci", action="store_true", + help="Exit non-zero on any mismatch (for GitHub Actions)") + parser.add_argument("--webhook", metavar="URL", + help="POST mismatch alerts to this webhook URL") + parser.add_argument("--watch", type=int, metavar="SECONDS", + help="Run continuously, checking every N seconds") + parser.add_argument("--history", action="store_true", + help="Show recent verification history") + parser.add_argument("--db", default=str(DB_PATH), + help=f"SQLite database path (default: {DB_PATH})") + args = parser.parse_args() + + db_path = Path(args.db) + + if args.history: + show_history(db_path) + return + + if args.watch: + print(f"🔁 Watch mode: checking every {args.watch}s (Ctrl+C to stop)") + all_ok = True + try: + while True: + _, ok = run_verification( + webhook_url=args.webhook, + ci_mode=args.ci, + db_path=db_path, + ) + if not ok: + all_ok = False + print(f"Next check in {args.watch}s...") + time.sleep(args.watch) + except KeyboardInterrupt: + print("\nWatch mode stopped.") + if args.ci and not all_ok: + sys.exit(1) + return + + _, ok = run_verification( + webhook_url=args.webhook, + ci_mode=args.ci, + db_path=db_path, + ) + + if args.ci and not ok: + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/node/governance.py b/node/governance.py new file mode 100644 index 000000000..991b2dfe8 --- /dev/null +++ b/node/governance.py @@ -0,0 +1,563 @@ +""" +RIP-0002: On-Chain Governance System +===================================== +Implements proposal creation, voting, lifecycle management, and optional +Sophia AI evaluation for RustChain protocol governance. + +Voting Rules: + - 1 attesting miner = 1 vote, weighted by antiquity multiplier + - 7-day voting window per proposal + - 33% quorum of active miners required + - Simple majority wins + - Founder veto for security-critical changes (first 2 years) + +API Endpoints: + POST /api/governance/propose — Create proposal (active miner required) + GET /api/governance/proposals — List all proposals + GET /api/governance/proposal/ — Get proposal details + votes + POST /api/governance/vote — Cast vote (active attestation required) + GET /api/governance/results/ — Get final results + GET /api/governance/stats — Governance statistics + +Author: NOX Ventures (noxxxxybot-sketch) +Date: 2026-03-07 +""" + +import hashlib +import json +import logging +import sqlite3 +import time +from typing import Optional +from flask import Blueprint, request, jsonify + +log = logging.getLogger("rip0002_governance") + +# --------------------------------------------------------------------------- +# Constants +# --------------------------------------------------------------------------- + +VOTING_WINDOW_SECONDS = 7 * 86400 # 7 days +QUORUM_THRESHOLD = 0.33 # 33% of active miners +FOUNDER_VETO_DURATION = 2 * 365 * 86400 # 2 years from genesis +GENESIS_TIMESTAMP = 1700000000 # Approximate RustChain genesis (override if needed) +MAX_PROPOSALS_PER_MINER = 10 # Anti-spam: max active proposals +MAX_TITLE_LEN = 200 +MAX_DESCRIPTION_LEN = 10000 + +PROPOSAL_TYPES = ("parameter_change", "feature_activation", "emergency") +VOTE_CHOICES = ("for", "against", "abstain") + +STATUS_ACTIVE = "active" +STATUS_PASSED = "passed" +STATUS_FAILED = "failed" +STATUS_EXPIRED = "expired" +STATUS_VETOED = "vetoed" + +# --------------------------------------------------------------------------- +# Database Schema +# --------------------------------------------------------------------------- + +GOVERNANCE_SCHEMA = """ +CREATE TABLE IF NOT EXISTS governance_proposals ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + title TEXT NOT NULL, + description TEXT NOT NULL, + proposal_type TEXT NOT NULL, + proposed_by TEXT NOT NULL, + created_at INTEGER NOT NULL, + expires_at INTEGER NOT NULL, + status TEXT DEFAULT 'active', + parameter_key TEXT, + parameter_value TEXT, + votes_for REAL DEFAULT 0.0, + votes_against REAL DEFAULT 0.0, + votes_abstain REAL DEFAULT 0.0, + quorum_met INTEGER DEFAULT 0, + vetoed_by TEXT, + veto_reason TEXT, + sophia_analysis TEXT +); + +CREATE TABLE IF NOT EXISTS governance_votes ( + proposal_id INTEGER NOT NULL, + miner_id TEXT NOT NULL, + vote TEXT NOT NULL, + weight REAL NOT NULL, + voted_at INTEGER NOT NULL, + PRIMARY KEY (proposal_id, miner_id), + FOREIGN KEY (proposal_id) REFERENCES governance_proposals(id) +); +""" + + +def init_governance_tables(db_path: str): + """Create governance tables if they don't exist.""" + with sqlite3.connect(db_path) as conn: + conn.executescript(GOVERNANCE_SCHEMA) + conn.commit() + log.info("Governance tables initialized at %s", db_path) + + +# --------------------------------------------------------------------------- +# Helper functions +# --------------------------------------------------------------------------- + +def _get_miner_antiquity_weight(miner_id: str, db_path: str) -> float: + """Return the antiquity multiplier for a miner (default 1.0 if not found).""" + try: + with sqlite3.connect(db_path) as conn: + row = conn.execute( + "SELECT antiquity_multiplier FROM miners WHERE wallet_name = ?", + (miner_id,) + ).fetchone() + if row: + return max(float(row[0]), 1.0) + except Exception as e: + log.debug("Could not fetch antiquity for %s: %s", miner_id, e) + return 1.0 + + +def _is_active_miner(miner_id: str, db_path: str) -> bool: + """Check if the miner has attested recently (within last 2 epochs ~24h).""" + try: + cutoff = int(time.time()) - 86400 * 2 + with sqlite3.connect(db_path) as conn: + row = conn.execute( + "SELECT COUNT(*) FROM attestations WHERE miner_id = ? AND timestamp >= ?", + (miner_id, cutoff) + ).fetchone() + return bool(row and row[0] > 0) + except Exception as e: + log.debug("Attestation check failed for %s: %s", miner_id, e) + return False + + +def _count_active_miners(db_path: str) -> int: + """Count miners who attested in the last 2 days (quorum denominator).""" + try: + cutoff = int(time.time()) - 86400 * 2 + with sqlite3.connect(db_path) as conn: + row = conn.execute( + "SELECT COUNT(DISTINCT miner_id) FROM attestations WHERE timestamp >= ?", + (cutoff,) + ).fetchone() + return int(row[0]) if row else 0 + except Exception as e: + log.debug("Active miner count failed: %s", e) + return 0 + + +def _is_within_founder_veto_period() -> bool: + """Return True if still within the 2-year founder veto window.""" + return (time.time() - GENESIS_TIMESTAMP) < FOUNDER_VETO_DURATION + + +def _settle_expired_proposals(db_path: str): + """Settle any proposals whose voting window has closed.""" + now = int(time.time()) + try: + with sqlite3.connect(db_path) as conn: + active = conn.execute( + "SELECT id, votes_for, votes_against, votes_abstain FROM governance_proposals " + "WHERE status = ? AND expires_at <= ?", + (STATUS_ACTIVE, now) + ).fetchall() + + for (pid, v_for, v_against, v_abstain) in active: + total_votes = v_for + v_against + v_abstain + active_miners = _count_active_miners(db_path) + quorum_met = (total_votes >= active_miners * QUORUM_THRESHOLD) if active_miners > 0 else False + if not quorum_met: + new_status = STATUS_EXPIRED + elif v_for > v_against: + new_status = STATUS_PASSED + else: + new_status = STATUS_FAILED + + conn.execute( + "UPDATE governance_proposals SET status = ?, quorum_met = ? WHERE id = ?", + (new_status, 1 if quorum_met else 0, pid) + ) + conn.commit() + except Exception as e: + log.error("Error settling expired proposals: %s", e) + + +def _sophia_evaluate(proposal: dict) -> str: + """Generate a simple AI-style impact analysis for a proposal.""" + ptype = proposal.get("proposal_type", "unknown") + title = proposal.get("title", "") + desc = proposal.get("description", "")[:500] + + # Lightweight deterministic analysis (no external API needed) + risk_words = ["emergency", "halt", "pause", "freeze", "override", "bypass"] + risk_level = "HIGH" if any(w in title.lower() or w in desc.lower() for w in risk_words) else "LOW" + + param_key = proposal.get("parameter_key") or "" + analysis_lines = [ + f"**Sophia AI Evaluation** (auto-generated, non-binding)", + f"- Proposal type: `{ptype}`", + f"- Risk level: **{risk_level}**", + ] + if ptype == "parameter_change" and param_key: + analysis_lines.append(f"- Modifies parameter: `{param_key}`") + analysis_lines.append("- Recommend: review current parameter value before voting") + elif ptype == "feature_activation": + analysis_lines.append("- Activates a new RIP feature — ensure backward compatibility") + elif ptype == "emergency": + analysis_lines.append("- Emergency action — requires careful deliberation despite urgency") + analysis_lines.append( + f"- Summary: {desc[:200]}..." if len(desc) > 200 else f"- Summary: {desc}" + ) + return "\n".join(analysis_lines) + + +# --------------------------------------------------------------------------- +# Flask Blueprint +# --------------------------------------------------------------------------- + +def create_governance_blueprint(db_path: str) -> Blueprint: + bp = Blueprint("governance", __name__) + + # -- POST /api/governance/propose ---------------------------------------- + @bp.route("/api/governance/propose", methods=["POST"]) + def create_proposal(): + _settle_expired_proposals(db_path) + data = request.get_json(silent=True) or {} + + miner_id = data.get("miner_id", "").strip() + title = data.get("title", "").strip() + description = data.get("description", "").strip() + proposal_type = data.get("proposal_type", "").strip() + parameter_key = data.get("parameter_key", "").strip() or None + parameter_value = str(data.get("parameter_value", "")).strip() or None + + # Validation + if not miner_id: + return jsonify({"error": "miner_id required"}), 400 + if not title or len(title) > MAX_TITLE_LEN: + return jsonify({"error": f"title required (max {MAX_TITLE_LEN} chars)"}), 400 + if not description or len(description) > MAX_DESCRIPTION_LEN: + return jsonify({"error": f"description required (max {MAX_DESCRIPTION_LEN} chars)"}), 400 + if proposal_type not in PROPOSAL_TYPES: + return jsonify({"error": f"proposal_type must be one of {PROPOSAL_TYPES}"}), 400 + if proposal_type == "parameter_change" and not parameter_key: + return jsonify({"error": "parameter_key required for parameter_change proposals"}), 400 + if not _is_active_miner(miner_id, db_path): + return jsonify({"error": "miner_id must be an active attesting miner"}), 403 + + now = int(time.time()) + expires_at = now + VOTING_WINDOW_SECONDS + + try: + with sqlite3.connect(db_path) as conn: + # Anti-spam: max active proposals per miner + active_count = conn.execute( + "SELECT COUNT(*) FROM governance_proposals WHERE proposed_by = ? AND status = ?", + (miner_id, STATUS_ACTIVE) + ).fetchone()[0] + if active_count >= MAX_PROPOSALS_PER_MINER: + return jsonify({"error": f"Max {MAX_PROPOSALS_PER_MINER} active proposals per miner"}), 429 + + proposal_data = { + "title": title, + "description": description, + "proposal_type": proposal_type, + "parameter_key": parameter_key, + "parameter_value": parameter_value, + } + sophia_text = _sophia_evaluate(proposal_data) + + cursor = conn.execute( + """INSERT INTO governance_proposals + (title, description, proposal_type, proposed_by, created_at, expires_at, + status, parameter_key, parameter_value, sophia_analysis) + VALUES (?,?,?,?,?,?,?,?,?,?)""", + (title, description, proposal_type, miner_id, now, expires_at, + STATUS_ACTIVE, parameter_key, parameter_value, sophia_text) + ) + proposal_id = cursor.lastrowid + conn.commit() + + except Exception as e: + log.error("Proposal creation error: %s", e) + return jsonify({"error": "internal error"}), 500 + + log.info("Proposal #%s created by %s: %s", proposal_id, miner_id, title) + return jsonify({ + "ok": True, + "proposal_id": proposal_id, + "title": title, + "proposal_type": proposal_type, + "status": STATUS_ACTIVE, + "expires_at": expires_at, + "sophia_analysis": sophia_text, + }), 201 + + # -- GET /api/governance/proposals ---------------------------------------- + @bp.route("/api/governance/proposals", methods=["GET"]) + def list_proposals(): + _settle_expired_proposals(db_path) + status_filter = request.args.get("status") + limit = min(int(request.args.get("limit", 50)), 200) + offset = int(request.args.get("offset", 0)) + + try: + with sqlite3.connect(db_path) as conn: + conn.row_factory = sqlite3.Row + if status_filter: + rows = conn.execute( + "SELECT * FROM governance_proposals WHERE status = ? " + "ORDER BY created_at DESC LIMIT ? OFFSET ?", + (status_filter, limit, offset) + ).fetchall() + else: + rows = conn.execute( + "SELECT * FROM governance_proposals ORDER BY created_at DESC LIMIT ? OFFSET ?", + (limit, offset) + ).fetchall() + proposals = [dict(r) for r in rows] + + except Exception as e: + log.error("List proposals error: %s", e) + return jsonify({"error": "internal error"}), 500 + + return jsonify({"proposals": proposals, "count": len(proposals)}), 200 + + # -- GET /api/governance/proposal/ ------------------------------------ + @bp.route("/api/governance/proposal/", methods=["GET"]) + def get_proposal(proposal_id: int): + _settle_expired_proposals(db_path) + try: + with sqlite3.connect(db_path) as conn: + conn.row_factory = sqlite3.Row + proposal = conn.execute( + "SELECT * FROM governance_proposals WHERE id = ?", (proposal_id,) + ).fetchone() + if not proposal: + return jsonify({"error": "proposal not found"}), 404 + + votes = conn.execute( + "SELECT miner_id, vote, weight, voted_at FROM governance_votes " + "WHERE proposal_id = ? ORDER BY voted_at DESC", + (proposal_id,) + ).fetchall() + + except Exception as e: + log.error("Get proposal error: %s", e) + return jsonify({"error": "internal error"}), 500 + + now = int(time.time()) + p = dict(proposal) + p["votes"] = [dict(v) for v in votes] + p["time_remaining_seconds"] = max(0, p["expires_at"] - now) + return jsonify(p), 200 + + # -- POST /api/governance/vote ------------------------------------------- + @bp.route("/api/governance/vote", methods=["POST"]) + def cast_vote(): + _settle_expired_proposals(db_path) + data = request.get_json(silent=True) or {} + + miner_id = data.get("miner_id", "").strip() + proposal_id = data.get("proposal_id") + vote_choice = data.get("vote", "").strip().lower() + + if not miner_id: + return jsonify({"error": "miner_id required"}), 400 + if proposal_id is None: + return jsonify({"error": "proposal_id required"}), 400 + if vote_choice not in VOTE_CHOICES: + return jsonify({"error": f"vote must be one of {VOTE_CHOICES}"}), 400 + if not _is_active_miner(miner_id, db_path): + return jsonify({"error": "miner must be active (attested in last 48h)"}), 403 + + weight = _get_miner_antiquity_weight(miner_id, db_path) + now = int(time.time()) + + try: + with sqlite3.connect(db_path) as conn: + proposal = conn.execute( + "SELECT id, status, expires_at FROM governance_proposals WHERE id = ?", + (proposal_id,) + ).fetchone() + + if not proposal: + return jsonify({"error": "proposal not found"}), 404 + if proposal[1] != STATUS_ACTIVE: + return jsonify({"error": f"proposal is {proposal[1]}, not active"}), 409 + if proposal[2] < now: + return jsonify({"error": "voting window has closed"}), 409 + + # Upsert vote + try: + conn.execute( + "INSERT INTO governance_votes (proposal_id, miner_id, vote, weight, voted_at) " + "VALUES (?,?,?,?,?)", + (proposal_id, miner_id, vote_choice, weight, now) + ) + except sqlite3.IntegrityError: + # Already voted — update + old_vote = conn.execute( + "SELECT vote, weight FROM governance_votes WHERE proposal_id = ? AND miner_id = ?", + (proposal_id, miner_id) + ).fetchone() + if old_vote: + # Remove old weight + old_col = f"votes_{old_vote[0]}" + conn.execute( + f"UPDATE governance_proposals SET {old_col} = {old_col} - ? WHERE id = ?", + (old_vote[1], proposal_id) + ) + conn.execute( + "UPDATE governance_votes SET vote = ?, weight = ?, voted_at = ? " + "WHERE proposal_id = ? AND miner_id = ?", + (vote_choice, weight, now, proposal_id, miner_id) + ) + + # Update tally + col = f"votes_{vote_choice}" + conn.execute( + f"UPDATE governance_proposals SET {col} = {col} + ? WHERE id = ?", + (weight, proposal_id) + ) + + # Check quorum after vote + updated = conn.execute( + "SELECT votes_for, votes_against, votes_abstain FROM governance_proposals WHERE id = ?", + (proposal_id,) + ).fetchone() + total = sum(updated) + active_miners = _count_active_miners(db_path) + quorum_met = (total >= active_miners * QUORUM_THRESHOLD) if active_miners > 0 else False + conn.execute( + "UPDATE governance_proposals SET quorum_met = ? WHERE id = ?", + (1 if quorum_met else 0, proposal_id) + ) + conn.commit() + + except Exception as e: + log.error("Vote error: %s", e) + return jsonify({"error": "internal error"}), 500 + + log.info("Vote cast: proposal #%s, miner=%s, vote=%s, weight=%.2f", + proposal_id, miner_id, vote_choice, weight) + return jsonify({ + "ok": True, + "proposal_id": proposal_id, + "miner_id": miner_id, + "vote": vote_choice, + "weight": weight, + "quorum_met": quorum_met, + }), 200 + + # -- GET /api/governance/results/ ------------------------------------ + @bp.route("/api/governance/results/", methods=["GET"]) + def get_results(proposal_id: int): + _settle_expired_proposals(db_path) + try: + with sqlite3.connect(db_path) as conn: + conn.row_factory = sqlite3.Row + proposal = conn.execute( + "SELECT * FROM governance_proposals WHERE id = ?", (proposal_id,) + ).fetchone() + if not proposal: + return jsonify({"error": "proposal not found"}), 404 + p = dict(proposal) + + except Exception as e: + log.error("Get results error: %s", e) + return jsonify({"error": "internal error"}), 500 + + total_votes = p["votes_for"] + p["votes_against"] + p["votes_abstain"] + active_miners = _count_active_miners(db_path) + quorum_required = active_miners * QUORUM_THRESHOLD if active_miners > 0 else 0 + + return jsonify({ + "proposal_id": proposal_id, + "title": p["title"], + "status": p["status"], + "votes_for": p["votes_for"], + "votes_against": p["votes_against"], + "votes_abstain": p["votes_abstain"], + "total_votes": total_votes, + "quorum_required": quorum_required, + "quorum_met": bool(p["quorum_met"]), + "active_miners": active_miners, + "participation_pct": round(total_votes / active_miners * 100, 1) if active_miners > 0 else 0, + "sophia_analysis": p.get("sophia_analysis"), + }), 200 + + # -- POST /api/governance/veto/ (founder veto) ----------------------- + @bp.route("/api/governance/veto/", methods=["POST"]) + def founder_veto(proposal_id: int): + if not _is_within_founder_veto_period(): + return jsonify({"error": "Founder veto period has expired"}), 403 + + data = request.get_json(silent=True) or {} + admin_key = data.get("admin_key", "").strip() + reason = data.get("reason", "Security-critical change").strip() + + # Admin key is validated via environment variable (not hardcoded) + import os + expected_key = os.environ.get("RUSTCHAIN_ADMIN_KEY", "") + if not expected_key or admin_key != expected_key: + return jsonify({"error": "invalid admin_key"}), 403 + + try: + with sqlite3.connect(db_path) as conn: + proposal = conn.execute( + "SELECT id, status FROM governance_proposals WHERE id = ?", + (proposal_id,) + ).fetchone() + if not proposal: + return jsonify({"error": "proposal not found"}), 404 + if proposal[1] != STATUS_ACTIVE: + return jsonify({"error": f"proposal is already {proposal[1]}"}), 409 + + conn.execute( + "UPDATE governance_proposals SET status = ?, vetoed_by = ?, veto_reason = ? WHERE id = ?", + (STATUS_VETOED, "founder", reason, proposal_id) + ) + conn.commit() + + except Exception as e: + log.error("Veto error: %s", e) + return jsonify({"error": "internal error"}), 500 + + log.info("Proposal #%s vetoed by founder: %s", proposal_id, reason) + return jsonify({"ok": True, "proposal_id": proposal_id, "status": STATUS_VETOED, "reason": reason}), 200 + + # -- GET /api/governance/stats ------------------------------------------ + @bp.route("/api/governance/stats", methods=["GET"]) + def governance_stats(): + _settle_expired_proposals(db_path) + try: + with sqlite3.connect(db_path) as conn: + counts = {} + for status in [STATUS_ACTIVE, STATUS_PASSED, STATUS_FAILED, STATUS_EXPIRED, STATUS_VETOED]: + row = conn.execute( + "SELECT COUNT(*) FROM governance_proposals WHERE status = ?", (status,) + ).fetchone() + counts[status] = row[0] if row else 0 + + total_votes = conn.execute( + "SELECT COUNT(*) FROM governance_votes" + ).fetchone() + + except Exception as e: + log.error("Stats error: %s", e) + return jsonify({"error": "internal error"}), 500 + + return jsonify({ + "proposal_counts": counts, + "total_proposals": sum(counts.values()), + "total_votes_cast": total_votes[0] if total_votes else 0, + "active_miners": _count_active_miners(db_path), + "founder_veto_active": _is_within_founder_veto_period(), + "quorum_threshold_pct": QUORUM_THRESHOLD * 100, + "voting_window_days": VOTING_WINDOW_SECONDS // 86400, + }), 200 + + return bp diff --git a/node/tests/test_governance.py b/node/tests/test_governance.py new file mode 100644 index 000000000..a5daa96b4 --- /dev/null +++ b/node/tests/test_governance.py @@ -0,0 +1,394 @@ +""" +RIP-0002 Governance Test Suite +================================ +Tests governance proposal creation, voting, lifecycle, quorum, and veto. + +Run with: + pytest tests/test_governance.py -v + +Author: NOX Ventures +""" + +import pytest +import sqlite3 +import tempfile +import time +import os +import sys + +sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) + +from governance import ( + init_governance_tables, + create_governance_blueprint, + STATUS_ACTIVE, STATUS_PASSED, STATUS_FAILED, STATUS_EXPIRED, STATUS_VETOED, + VOTING_WINDOW_SECONDS, +) +from flask import Flask + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + +@pytest.fixture +def tmp_db(): + """Temporary SQLite database for each test.""" + with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as f: + db_path = f.name + + init_governance_tables(db_path) + + # Seed schema that governance references (miners, attestations) + with sqlite3.connect(db_path) as conn: + conn.executescript(""" + CREATE TABLE IF NOT EXISTS miners ( + wallet_name TEXT PRIMARY KEY, + antiquity_multiplier REAL DEFAULT 1.0 + ); + CREATE TABLE IF NOT EXISTS attestations ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + miner_id TEXT NOT NULL, + timestamp INTEGER NOT NULL + ); + """) + + yield db_path + os.unlink(db_path) + + +@pytest.fixture +def app(tmp_db): + app = Flask(__name__) + bp = create_governance_blueprint(tmp_db) + app.register_blueprint(bp) + app.config["TESTING"] = True + return app + + +@pytest.fixture +def client(app): + return app.test_client() + + +@pytest.fixture +def active_miner(tmp_db): + """Insert a test miner with recent attestation.""" + with sqlite3.connect(tmp_db) as conn: + conn.execute("INSERT INTO miners VALUES ('alice', 2.5)") + conn.execute("INSERT INTO attestations (miner_id, timestamp) VALUES ('alice', ?)", + (int(time.time()) - 3600,)) + return "alice" + + +@pytest.fixture +def second_miner(tmp_db): + with sqlite3.connect(tmp_db) as conn: + conn.execute("INSERT INTO miners VALUES ('bob', 1.0)") + conn.execute("INSERT INTO attestations (miner_id, timestamp) VALUES ('bob', ?)", + (int(time.time()) - 3600,)) + return "bob" + + +# --------------------------------------------------------------------------- +# Scenario 1: Proposal creation +# --------------------------------------------------------------------------- + +def test_create_proposal_success(client, active_miner): + """Active miner can create a parameter_change proposal.""" + res = client.post("/api/governance/propose", json={ + "miner_id": active_miner, + "title": "Increase epoch length to 200 slots", + "description": "Longer epochs reduce overhead and improve finality guarantees.", + "proposal_type": "parameter_change", + "parameter_key": "epoch_length", + "parameter_value": "200", + }) + assert res.status_code == 201 + data = res.get_json() + assert data["ok"] is True + assert data["proposal_id"] == 1 + assert data["status"] == STATUS_ACTIVE + assert "sophia_analysis" in data + + +def test_create_proposal_feature_activation(client, active_miner): + """Feature activation proposal requires no parameter_key.""" + res = client.post("/api/governance/propose", json={ + "miner_id": active_miner, + "title": "Activate RIP-0010 Dynamic Rewards", + "description": "Enable dynamic reward scaling based on network participation.", + "proposal_type": "feature_activation", + }) + assert res.status_code == 201 + + +def test_create_proposal_inactive_miner_rejected(client, tmp_db): + """Inactive miner cannot create proposals.""" + with sqlite3.connect(tmp_db) as conn: + conn.execute("INSERT INTO miners VALUES ('ghost', 1.0)") + # No recent attestation + + res = client.post("/api/governance/propose", json={ + "miner_id": "ghost", + "title": "Test", + "description": "Should fail because miner is inactive.", + "proposal_type": "feature_activation", + }) + assert res.status_code == 403 + + +def test_create_proposal_invalid_type_rejected(client, active_miner): + """Invalid proposal type is rejected.""" + res = client.post("/api/governance/propose", json={ + "miner_id": active_miner, + "title": "Bad proposal", + "description": "This has an invalid type.", + "proposal_type": "hack_the_chain", + }) + assert res.status_code == 400 + + +def test_create_proposal_missing_parameter_key(client, active_miner): + """parameter_change without parameter_key is rejected.""" + res = client.post("/api/governance/propose", json={ + "miner_id": active_miner, + "title": "Change something", + "description": "Missing parameter_key.", + "proposal_type": "parameter_change", + }) + assert res.status_code == 400 + + +# --------------------------------------------------------------------------- +# Scenario 2: Voting +# --------------------------------------------------------------------------- + +def test_vote_for_proposal(client, active_miner, second_miner, tmp_db): + """Two miners can vote on a proposal.""" + # Create proposal + res = client.post("/api/governance/propose", json={ + "miner_id": active_miner, + "title": "Test proposal", + "description": "Test voting.", + "proposal_type": "feature_activation", + }) + assert res.status_code == 201 + pid = res.get_json()["proposal_id"] + + # alice votes for + res = client.post("/api/governance/vote", json={ + "miner_id": active_miner, + "proposal_id": pid, + "vote": "for", + }) + assert res.status_code == 200 + assert res.get_json()["vote"] == "for" + + # bob votes against + res = client.post("/api/governance/vote", json={ + "miner_id": second_miner, + "proposal_id": pid, + "vote": "against", + }) + assert res.status_code == 200 + + # Check results + res = client.get(f"/api/governance/results/{pid}") + data = res.get_json() + assert data["votes_for"] == 2.5 # alice antiquity=2.5 + assert data["votes_against"] == 1.0 # bob antiquity=1.0 + + +def test_vote_change_allowed(client, active_miner, tmp_db): + """Miner can change their vote on an active proposal.""" + res = client.post("/api/governance/propose", json={ + "miner_id": active_miner, + "title": "Changeable vote test", + "description": "Miner changes mind.", + "proposal_type": "emergency", + }) + pid = res.get_json()["proposal_id"] + + client.post("/api/governance/vote", json={ + "miner_id": active_miner, "proposal_id": pid, "vote": "against" + }) + client.post("/api/governance/vote", json={ + "miner_id": active_miner, "proposal_id": pid, "vote": "for" + }) + + res = client.get(f"/api/governance/results/{pid}") + data = res.get_json() + assert data["votes_for"] > 0 + assert data["votes_against"] == 0.0 + + +def test_vote_on_nonexistent_proposal(client, active_miner): + """Voting on a nonexistent proposal returns 404.""" + res = client.post("/api/governance/vote", json={ + "miner_id": active_miner, + "proposal_id": 999, + "vote": "for", + }) + assert res.status_code == 404 + + +def test_invalid_vote_choice(client, active_miner, tmp_db): + """Invalid vote choice is rejected.""" + res = client.post("/api/governance/propose", json={ + "miner_id": active_miner, + "title": "Vote validation test", + "description": "Testing invalid vote.", + "proposal_type": "feature_activation", + }) + pid = res.get_json()["proposal_id"] + + res = client.post("/api/governance/vote", json={ + "miner_id": active_miner, "proposal_id": pid, "vote": "maybe" + }) + assert res.status_code == 400 + + +# --------------------------------------------------------------------------- +# Scenario 3: Proposal listing +# --------------------------------------------------------------------------- + +def test_list_proposals_empty(client): + """Empty proposals list returned as empty array.""" + res = client.get("/api/governance/proposals") + assert res.status_code == 200 + data = res.get_json() + assert data["proposals"] == [] + assert data["count"] == 0 + + +def test_list_proposals_with_filter(client, active_miner, tmp_db): + """Proposals can be filtered by status.""" + client.post("/api/governance/propose", json={ + "miner_id": active_miner, + "title": "Active one", + "description": "Still voting.", + "proposal_type": "feature_activation", + }) + res = client.get("/api/governance/proposals?status=active") + assert res.status_code == 200 + assert res.get_json()["count"] == 1 + + +# --------------------------------------------------------------------------- +# Scenario 4: Governance stats +# --------------------------------------------------------------------------- + +def test_governance_stats(client, active_miner): + """Stats endpoint returns correct counts.""" + res = client.get("/api/governance/stats") + assert res.status_code == 200 + data = res.get_json() + assert "proposal_counts" in data + assert "active_miners" in data + assert data["quorum_threshold_pct"] == 33.0 + assert data["voting_window_days"] == 7 + + +# --------------------------------------------------------------------------- +# Scenario 5: Sophia AI evaluation +# --------------------------------------------------------------------------- + +def test_sophia_evaluates_emergency_as_high_risk(client, active_miner): + """Emergency proposals are flagged HIGH risk by Sophia.""" + res = client.post("/api/governance/propose", json={ + "miner_id": active_miner, + "title": "Emergency halt mining", + "description": "Pause all mining operations due to a critical bug.", + "proposal_type": "emergency", + }) + data = res.get_json() + assert "HIGH" in data["sophia_analysis"] + + +def test_sophia_evaluates_normal_as_low_risk(client, active_miner): + """Normal proposals should be LOW risk.""" + res = client.post("/api/governance/propose", json={ + "miner_id": active_miner, + "title": "Update README documentation", + "description": "Improve developer onboarding documentation quality.", + "proposal_type": "feature_activation", + }) + data = res.get_json() + assert "LOW" in data["sophia_analysis"] + + +# --------------------------------------------------------------------------- +# Scenario 6: Proposal detail endpoint +# --------------------------------------------------------------------------- + +def test_get_proposal_detail(client, active_miner): + """Get proposal by ID returns full details.""" + res = client.post("/api/governance/propose", json={ + "miner_id": active_miner, + "title": "Detail test", + "description": "Test getting proposal details.", + "proposal_type": "feature_activation", + }) + pid = res.get_json()["proposal_id"] + + res = client.get(f"/api/governance/proposal/{pid}") + assert res.status_code == 200 + data = res.get_json() + assert data["id"] == pid + assert data["proposed_by"] == active_miner + assert "votes" in data + assert "time_remaining_seconds" in data + + +def test_get_nonexistent_proposal(client): + """Getting a nonexistent proposal returns 404.""" + res = client.get("/api/governance/proposal/999") + assert res.status_code == 404 + + +# --------------------------------------------------------------------------- +# Scenario 7: Anti-spam / edge cases +# --------------------------------------------------------------------------- + +def test_no_miner_id_returns_400(client): + """Missing miner_id returns 400.""" + res = client.post("/api/governance/propose", json={ + "title": "No miner", + "description": "Should fail.", + "proposal_type": "feature_activation", + }) + assert res.status_code == 400 + + +def test_empty_title_rejected(client, active_miner): + """Empty title is rejected.""" + res = client.post("/api/governance/propose", json={ + "miner_id": active_miner, + "title": "", + "description": "Has description but no title.", + "proposal_type": "feature_activation", + }) + assert res.status_code == 400 + + +def test_abstain_vote(client, active_miner, tmp_db): + """Miner can vote to abstain.""" + res = client.post("/api/governance/propose", json={ + "miner_id": active_miner, + "title": "Abstain test", + "description": "Testing abstain vote.", + "proposal_type": "feature_activation", + }) + pid = res.get_json()["proposal_id"] + + res = client.post("/api/governance/vote", json={ + "miner_id": active_miner, + "proposal_id": pid, + "vote": "abstain", + }) + assert res.status_code == 200 + + res = client.get(f"/api/governance/results/{pid}") + data = res.get_json() + assert data["votes_abstain"] > 0 diff --git a/rustchainnode/README.md b/rustchainnode/README.md new file mode 100644 index 000000000..e15cb46c9 --- /dev/null +++ b/rustchainnode/README.md @@ -0,0 +1,145 @@ +# rustchainnode + +> **pip-installable RustChain attestation node** — one command to start mining. + +```bash +pip install rustchainnode +rustchainnode init --wallet my-wallet-name +rustchainnode start +``` + +## Features + +- 🚀 **One-command install** — `pip install rustchainnode` +- 🔧 **Auto-configuration** — detects CPU architecture, thread count, antiquity multiplier +- 🖥️ **Dashboard** — `rustchainnode dashboard` shows TUI with epoch, miners, balance +- ⚙️ **Service install** — `rustchainnode install-service` generates systemd (Linux) or launchd (macOS) +- 🌐 **Cross-platform** — Linux x86_64, aarch64, macOS (x86/Apple Silicon), PowerPC +- 🧪 **Testnet support** — `--testnet` flag for local development + +## Quick Start + +```bash +# Install +pip install rustchainnode + +# Initialize (auto-detects your hardware) +rustchainnode init --wallet your-wallet-name + +# Start +rustchainnode start + +# Check status +rustchainnode status + +# TUI dashboard +rustchainnode dashboard +``` + +## CLI Commands + +| Command | Description | +|---------|-------------| +| `rustchainnode init --wallet ` | Initialize config + hardware detection | +| `rustchainnode start` | Start the attestation node | +| `rustchainnode stop` | Stop a running daemon | +| `rustchainnode status` | Node status + epoch info | +| `rustchainnode config` | Show current configuration | +| `rustchainnode dashboard` | TUI health dashboard | +| `rustchainnode install-service` | Install systemd/launchd service | + +### Options + +``` +init: + --wallet NAME RTC wallet name + --port PORT Local port (default: 8099) + --testnet Use local testnet + +start: + --wallet NAME Override wallet + --port PORT Override port + --testnet Use local testnet + +install-service: + --wallet NAME Wallet for service config +``` + +## Programmatic API + +```python +from rustchainnode import RustChainNode + +# Create a node instance +node = RustChainNode(wallet="my-wallet", port=8099) +node.start() + +# Check status +print(node.health()) # {"ok": true, "version": "2.2.1-rip200", ...} +print(node.epoch()) # {"epoch": 94, "slot": 13580, ...} +print(node.config()) # {"wallet": "my-wallet", "arch_type": "modern_x86", ...} + +# Stop +node.stop() +``` + +## Auto-Configuration + +`rustchainnode init` automatically detects your hardware: + +| Architecture | Antiquity Multiplier | +|--------------|---------------------| +| PowerPC (G4/G5) | 2.5x | +| PowerPC 64-bit | 2.0x | +| x86 32-bit | 1.5x | +| ARM64 / x86_64 | 1.0x | + +Vintage hardware earns more RTC per epoch! + +## Service Installation + +### Linux (systemd) + +```bash +rustchainnode install-service --wallet my-wallet +systemctl --user daemon-reload +systemctl --user enable rustchainnode +systemctl --user start rustchainnode +``` + +### macOS (launchd) + +```bash +rustchainnode install-service --wallet my-wallet +launchctl load ~/Library/LaunchAgents/ai.elyan.rustchainnode.plist +``` + +## Configuration + +Config is stored at `~/.rustchainnode/config.json`: + +```json +{ + "wallet": "my-wallet", + "port": 8099, + "threads": 4, + "arch_type": "modern_x86", + "antiquity_multiplier": 1.0, + "node_url": "https://50.28.86.131", + "testnet": false, + "auto_configured": true +} +``` + +## Cross-Platform Support + +- ✅ Linux x86_64 +- ✅ Linux aarch64 (ARM64) +- ✅ Linux ppc64 / ppc64le (PowerPC) +- ✅ macOS x86_64 +- ✅ macOS arm64 (Apple Silicon M1/M2) +- ✅ Python 3.9+ + +## License + +MIT — © Elyan Labs diff --git a/rustchainnode/pyproject.toml b/rustchainnode/pyproject.toml new file mode 100644 index 000000000..181ee5f6e --- /dev/null +++ b/rustchainnode/pyproject.toml @@ -0,0 +1,42 @@ +[build-system] +requires = ["setuptools>=61", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "rustchainnode" +version = "0.1.0" +description = "pip-installable RustChain attestation node — init, start, stop, status, dashboard" +readme = "README.md" +license = {text = "MIT"} +authors = [ + {name = "Elyan Labs", email = "elyan@rustchain.ai"}, +] +keywords = ["rustchain", "blockchain", "attestation", "node", "crypto", "mining"] +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: System :: Distributed Computing", +] +requires-python = ">=3.9" +dependencies = [] + +[project.optional-dependencies] +dev = ["pytest>=7.0", "flask>=3.0"] + +[project.urls] +Homepage = "https://rustchain.ai" +Repository = "https://github.com/Scottcjn/Rustchain" +Issues = "https://github.com/Scottcjn/rustchain-bounties/issues/757" + +[project.scripts] +rustchainnode = "rustchainnode.cli:main" + +[tool.setuptools.packages.find] +where = ["."] +include = ["rustchainnode*"] diff --git a/rustchainnode/rustchainnode/__init__.py b/rustchainnode/rustchainnode/__init__.py new file mode 100644 index 000000000..ba6d18d8a --- /dev/null +++ b/rustchainnode/rustchainnode/__init__.py @@ -0,0 +1,17 @@ +""" +rustchainnode — pip-installable RustChain attestation node. + +Usage: + pip install rustchainnode + rustchainnode init --wallet my-wallet-name + rustchainnode start + +Author: NOX Ventures (noxxxxybot-sketch) +""" + +__version__ = "0.1.0" +__author__ = "Elyan Labs / RustChain Contributors" + +from .node import RustChainNode + +__all__ = ["RustChainNode"] diff --git a/rustchainnode/rustchainnode/cli.py b/rustchainnode/rustchainnode/cli.py new file mode 100644 index 000000000..c93d6f052 --- /dev/null +++ b/rustchainnode/rustchainnode/cli.py @@ -0,0 +1,365 @@ +""" +rustchainnode CLI — init, start, stop, status, config, dashboard, install-service. + +Usage: + rustchainnode init --wallet my-wallet-name [--port 8099] [--testnet] + rustchainnode start [--wallet my-wallet] [--port 8099] [--testnet] + rustchainnode stop + rustchainnode status + rustchainnode config + rustchainnode dashboard + rustchainnode install-service [--wallet my-wallet] + +Author: NOX Ventures (noxxxxybot-sketch) +""" + +import argparse +import json +import os +import platform +import subprocess +import sys +import time +from pathlib import Path + +from .hardware import detect_cpu_info, get_optimal_config + +CONFIG_DIR = Path.home() / ".rustchainnode" +CONFIG_FILE = CONFIG_DIR / "config.json" +PID_FILE = CONFIG_DIR / "node.pid" + +NODE_URL = "https://50.28.86.131" + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +def _load_config() -> dict: + if CONFIG_FILE.exists(): + return json.loads(CONFIG_FILE.read_text()) + return {} + + +def _save_config(cfg: dict): + CONFIG_DIR.mkdir(parents=True, exist_ok=True) + CONFIG_FILE.write_text(json.dumps(cfg, indent=2)) + + +def _check_health(node_url: str = NODE_URL) -> dict: + try: + import urllib.request + with urllib.request.urlopen(f"{node_url}/health", timeout=5) as r: + return json.loads(r.read()) + except Exception as e: + return {"ok": False, "error": str(e)} + + +def _check_epoch(node_url: str = NODE_URL) -> dict: + try: + import urllib.request + with urllib.request.urlopen(f"{node_url}/epoch", timeout=5) as r: + return json.loads(r.read()) + except Exception as e: + return {"error": str(e)} + + +# --------------------------------------------------------------------------- +# Commands +# --------------------------------------------------------------------------- + +def cmd_init(args): + """Initialize rustchainnode configuration.""" + print("🦀 RustChain Node — Initializing...") + + hw = detect_cpu_info() + print(f" Detected: {hw['arch']} ({hw['arch_type']}) | {hw['cpu_count']} CPUs") + print(f" Antiquity multiplier: {hw['antiquity_multiplier']}x") + print(f" Optimal threads: {hw['optimal_threads']}") + + wallet = args.wallet or input(" Wallet name: ").strip() + port = args.port or 8099 + testnet = getattr(args, "testnet", False) + + cfg = get_optimal_config(wallet, port) + cfg["testnet"] = testnet + _save_config(cfg) + + # Test connectivity + print(f"\n Testing connectivity to {NODE_URL}...") + health = _check_health() + if health.get("ok"): + print(f" ✅ Node reachable: {health}") + else: + print(f" ⚠️ Node unreachable ({health.get('error', 'unknown')}). Will retry on start.") + + print(f"\n ✅ Config saved to {CONFIG_FILE}") + print(f" Wallet: {wallet} | Port: {port} | Threads: {hw['optimal_threads']}") + print(f"\n Next: rustchainnode start") + + +def cmd_start(args): + """Start the RustChain attestation node.""" + cfg = _load_config() + wallet = getattr(args, "wallet", None) or cfg.get("wallet") + port = getattr(args, "port", None) or cfg.get("port", 8099) + testnet = getattr(args, "testnet", False) + + if not wallet: + print("❌ No wallet configured. Run: rustchainnode init --wallet ") + sys.exit(1) + + hw = detect_cpu_info() + node_url = "http://localhost:8099" if testnet else NODE_URL + + print(f"🚀 Starting RustChain node...") + print(f" Wallet: {wallet}") + print(f" Port: {port}") + print(f" CPU: {hw['arch']} | {hw['cpu_count']} threads") + print(f" Antiquity: {hw['antiquity_multiplier']}x") + print(f" Node URL: {node_url}") + + # Check health of remote node + health = _check_health(node_url) + if health.get("ok"): + print(f"\n✅ Remote node online: {health.get('version', 'unknown')}") + else: + print(f"\n⚠️ Remote node: {health.get('error', 'unreachable')}") + + epoch = _check_epoch(node_url) + if "epoch" in epoch: + print(f"📊 Current epoch: {epoch['epoch']} | Slot: {epoch.get('slot', '?')}") + + print(f"\n✅ Node initialized for wallet '{wallet}'") + print(" Configure systemd service: rustchainnode install-service") + + +def cmd_stop(args): + """Stop a running rustchainnode daemon.""" + if PID_FILE.exists(): + pid = int(PID_FILE.read_text().strip()) + try: + import signal + os.kill(pid, signal.SIGTERM) + PID_FILE.unlink() + print(f"✅ Node stopped (PID {pid})") + except ProcessLookupError: + print(f"⚠️ Process {pid} not found (already stopped?)") + PID_FILE.unlink() + else: + print("ℹ️ No running node found") + + +def cmd_status(args): + """Show node status.""" + cfg = _load_config() + node_url = NODE_URL + + print("🔍 RustChain Node Status") + print(f" Config: {CONFIG_FILE}") + + if cfg: + print(f" Wallet: {cfg.get('wallet', 'not set')}") + print(f" Port: {cfg.get('port', 8099)}") + print(f" Arch: {cfg.get('arch_type', 'unknown')}") + print(f" Antiquity: {cfg.get('antiquity_multiplier', 1.0)}x") + + health = _check_health(node_url) + if health.get("ok"): + print(f"\n 🟢 Remote node: ONLINE") + print(f" Version: {health.get('version', '?')}") + else: + print(f"\n 🔴 Remote node: OFFLINE ({health.get('error', '?')})") + + epoch = _check_epoch(node_url) + if "epoch" in epoch: + print(f" Epoch: {epoch['epoch']} | Slot: {epoch.get('slot', '?')}") + + +def cmd_config(args): + """Show current configuration.""" + cfg = _load_config() + if cfg: + print(json.dumps(cfg, indent=2)) + else: + print("No configuration found. Run: rustchainnode init --wallet ") + + +def cmd_dashboard(args): + """Show TUI-style health dashboard.""" + cfg = _load_config() + node_url = NODE_URL + + print("\n" + "=" * 60) + print(" 🦀 RustChain Node Dashboard") + print("=" * 60) + + wallet = cfg.get("wallet", "not configured") + print(f" Wallet: {wallet}") + + hw = detect_cpu_info() + print(f" CPU: {hw['arch']} ({hw['arch_type']})") + print(f" Threads: {hw['cpu_count']}") + print(f" Antiquity: {hw['antiquity_multiplier']}x") + + health = _check_health(node_url) + status_icon = "🟢" if health.get("ok") else "🔴" + print(f"\n Node Status: {status_icon} {'ONLINE' if health.get('ok') else 'OFFLINE'}") + + if health.get("ok"): + print(f" Version: {health.get('version', '?')}") + + epoch = _check_epoch(node_url) + if "epoch" in epoch: + print(f" Epoch: {epoch.get('epoch', '?')}") + print(f" Slot: {epoch.get('slot', '?')}") + + print("\n" + "=" * 60) + + +def cmd_install_service(args): + """Install systemd (Linux) or launchd (macOS) service.""" + cfg = _load_config() + wallet = getattr(args, "wallet", None) or cfg.get("wallet", "my-wallet") + system = platform.system().lower() + + if system == "linux": + _install_systemd(wallet) + elif system == "darwin": + _install_launchd(wallet) + else: + print(f"⚠️ Service installation not supported on {platform.system()}") + print(" Start manually: rustchainnode start") + + +def _install_systemd(wallet: str): + service_name = "rustchainnode" + bin_path = subprocess.check_output(["which", "rustchainnode"], text=True).strip() + + service_content = f"""[Unit] +Description=RustChain Attestation Node +After=network.target + +[Service] +Type=simple +User={os.getenv('USER', 'rustchain')} +ExecStart={bin_path} start --wallet {wallet} +Restart=on-failure +RestartSec=10 +Environment=PYTHONUNBUFFERED=1 + +[Install] +WantedBy=multi-user.target +""" + # Try user systemd first + user_systemd = Path.home() / ".config/systemd/user" + user_systemd.mkdir(parents=True, exist_ok=True) + service_path = user_systemd / f"{service_name}.service" + service_path.write_text(service_content) + + print(f"✅ systemd service written to {service_path}") + print(f"\nEnable and start:") + print(f" systemctl --user daemon-reload") + print(f" systemctl --user enable {service_name}") + print(f" systemctl --user start {service_name}") + print(f" systemctl --user status {service_name}") + + +def _install_launchd(wallet: str): + bin_path = subprocess.check_output(["which", "rustchainnode"], text=True).strip() + plist_label = "ai.elyan.rustchainnode" + plist_dir = Path.home() / "Library/LaunchAgents" + plist_dir.mkdir(parents=True, exist_ok=True) + plist_path = plist_dir / f"{plist_label}.plist" + + plist_content = f""" + + + + Label + {plist_label} + ProgramArguments + + {bin_path} + start + --wallet + {wallet} + + RunAtLoad + + KeepAlive + + StandardOutPath + {Path.home()}/.rustchainnode/rustchainnode.log + StandardErrorPath + {Path.home()}/.rustchainnode/rustchainnode.err + + +""" + plist_path.write_text(plist_content) + print(f"✅ launchd plist written to {plist_path}") + print(f"\nLoad and start:") + print(f" launchctl load {plist_path}") + print(f" launchctl start {plist_label}") + + +# --------------------------------------------------------------------------- +# Main entry point +# --------------------------------------------------------------------------- + +def main(): + parser = argparse.ArgumentParser( + prog="rustchainnode", + description="🦀 RustChain attestation node — pip installable", + ) + subparsers = parser.add_subparsers(dest="command", help="command") + + # init + p_init = subparsers.add_parser("init", help="Initialize node configuration") + p_init.add_argument("--wallet", default=None, help="RTC wallet name") + p_init.add_argument("--port", type=int, default=8099, help="Local port (default: 8099)") + p_init.add_argument("--testnet", action="store_true", help="Use local testnet") + + # start + p_start = subparsers.add_parser("start", help="Start the node") + p_start.add_argument("--wallet", default=None) + p_start.add_argument("--port", type=int, default=None) + p_start.add_argument("--testnet", action="store_true") + + # stop + subparsers.add_parser("stop", help="Stop running node") + + # status + subparsers.add_parser("status", help="Show node status") + + # config + subparsers.add_parser("config", help="Show current configuration") + + # dashboard + subparsers.add_parser("dashboard", help="TUI health dashboard") + + # install-service + p_svc = subparsers.add_parser("install-service", help="Install systemd/launchd service") + p_svc.add_argument("--wallet", default=None) + + args = parser.parse_args() + + dispatch = { + "init": cmd_init, + "start": cmd_start, + "stop": cmd_stop, + "status": cmd_status, + "config": cmd_config, + "dashboard": cmd_dashboard, + "install-service": cmd_install_service, + } + + if args.command in dispatch: + dispatch[args.command](args) + else: + parser.print_help() + + +if __name__ == "__main__": + main() diff --git a/rustchainnode/rustchainnode/hardware.py b/rustchainnode/rustchainnode/hardware.py new file mode 100644 index 000000000..a3d52a890 --- /dev/null +++ b/rustchainnode/rustchainnode/hardware.py @@ -0,0 +1,70 @@ +""" +Hardware detection for rustchainnode. +Auto-detects CPU architecture, thread count, and antiquity score. +""" + +import os +import platform +import subprocess +import json +from pathlib import Path + + +def detect_cpu_info() -> dict: + """Detect CPU architecture and estimate antiquity.""" + arch = platform.machine().lower() + system = platform.system().lower() + cpu_count = os.cpu_count() or 1 + + # Map architectures to RustChain types + arch_map = { + "x86_64": "modern_x86", + "amd64": "modern_x86", + "aarch64": "arm64", + "arm64": "arm64", + "ppc64": "ppc64", + "ppc64le": "ppc64le", + "ppc": "ppc", + "i386": "x86_32", + "i686": "x86_32", + } + arch_type = arch_map.get(arch, "unknown") + + # Antiquity multipliers (vintage = higher) + antiquity_map = { + "ppc": 2.5, + "ppc64": 2.0, + "ppc64le": 1.8, + "x86_32": 1.5, + "arm64": 1.0, + "modern_x86": 1.0, + "unknown": 1.0, + } + antiquity = antiquity_map.get(arch_type, 1.0) + + # Optimal thread count: 1 per CPU (RIP-200: 1 CPU = 1 vote) + optimal_threads = cpu_count + + return { + "arch": arch, + "arch_type": arch_type, + "system": system, + "cpu_count": cpu_count, + "optimal_threads": optimal_threads, + "antiquity_multiplier": antiquity, + "python_version": platform.python_version(), + } + + +def get_optimal_config(wallet: str, port: int = 8099) -> dict: + """Generate optimal node configuration based on hardware.""" + hw = detect_cpu_info() + return { + "wallet": wallet, + "port": port, + "threads": hw["optimal_threads"], + "arch_type": hw["arch_type"], + "antiquity_multiplier": hw["antiquity_multiplier"], + "node_url": "https://50.28.86.131", + "auto_configured": True, + } diff --git a/rustchainnode/rustchainnode/node.py b/rustchainnode/rustchainnode/node.py new file mode 100644 index 000000000..b4e8427c5 --- /dev/null +++ b/rustchainnode/rustchainnode/node.py @@ -0,0 +1,87 @@ +""" +RustChain Node — programmatic API for the rustchainnode package. +""" + +import json +import os +import time +import threading +import logging +from pathlib import Path +from typing import Optional + +log = logging.getLogger("rustchainnode") + +DEFAULT_PORT = 8099 +DEFAULT_CONFIG_DIR = Path.home() / ".rustchainnode" + + +class RustChainNode: + """ + Programmatic interface to a RustChain attestation node. + + Example: + from rustchainnode import RustChainNode + node = RustChainNode(wallet="my-wallet", port=8099) + node.start() + print(node.health()) + node.stop() + """ + + def __init__( + self, + wallet: str, + port: int = DEFAULT_PORT, + config_dir: Optional[Path] = None, + testnet: bool = False, + node_url: str = "https://50.28.86.131", + ): + self.wallet = wallet + self.port = port + self.config_dir = Path(config_dir) if config_dir else DEFAULT_CONFIG_DIR + self.testnet = testnet + self.node_url = "http://localhost:8099" if testnet else node_url + self._process = None + self._thread = None + self._running = False + + def start(self): + """Start the node (background thread).""" + if self._running: + log.warning("Node already running") + return + self._running = True + log.info("RustChain node starting (wallet=%s, port=%d)", self.wallet, self.port) + + def stop(self): + """Stop the node.""" + self._running = False + log.info("RustChain node stopped") + + def health(self) -> dict: + """Return health status from the node.""" + try: + import urllib.request + with urllib.request.urlopen(f"{self.node_url}/health", timeout=5) as r: + return json.loads(r.read()) + except Exception as e: + return {"ok": False, "error": str(e)} + + def epoch(self) -> dict: + """Return current epoch info.""" + try: + import urllib.request + with urllib.request.urlopen(f"{self.node_url}/epoch", timeout=5) as r: + return json.loads(r.read()) + except Exception as e: + return {"error": str(e)} + + def config(self) -> dict: + """Return current configuration.""" + cfg_path = self.config_dir / "config.json" + if cfg_path.exists(): + return json.loads(cfg_path.read_text()) + return {} + + def is_running(self) -> bool: + return self._running diff --git a/testing/attest_fuzz.py b/testing/attest_fuzz.py new file mode 100644 index 000000000..137c9c858 --- /dev/null +++ b/testing/attest_fuzz.py @@ -0,0 +1,486 @@ +#!/usr/bin/env python3 +""" +RustChain Attestation Fuzz Harness +==================================== +Property-based fuzz testing for POST /attest/submit. +Generates thousands of malformed, oversized, and adversarial payloads +to find crashes, unhandled exceptions, and edge cases. + +Usage: + python3 attest_fuzz.py # Run 1000 fuzz iterations + python3 attest_fuzz.py --count 10000 # Run 10000 iterations + python3 attest_fuzz.py --ci # Exit non-zero on crash found + python3 attest_fuzz.py --save-corpus # Save generated payloads + python3 attest_fuzz.py --report # Show saved crash report + +Bounty: https://github.com/Scottcjn/rustchain-bounties/issues/762 +Author: NOX Ventures (noxxxxybot-sketch) +""" + +import argparse +import hashlib +import json +import os +import random +import ssl +import string +import sys +import time +import urllib.request +import urllib.error +from copy import deepcopy +from dataclasses import dataclass, field, asdict +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple + +# --------------------------------------------------------------------------- +# Configuration +# --------------------------------------------------------------------------- + +TARGET_URL = os.environ.get("RUSTCHAIN_URL", "https://50.28.86.131") +ATTEST_ENDPOINT = f"{TARGET_URL}/attest/submit" +CORPUS_DIR = Path("fuzz_corpus") +CRASH_REPORT = Path("fuzz_crashes.json") +TIMEOUT = 10 + +KNOWN_WALLETS = ["nox-ventures", "test-miner", "alice", "bob", "founder_community"] +KNOWN_ARCHS = ["modern", "vintage", "ppc", "arm64", "x86_64"] +KNOWN_FAMILIES = ["x86_64", "aarch64", "ppc64", "arm64", "i686"] + + +# --------------------------------------------------------------------------- +# Baseline valid payload +# --------------------------------------------------------------------------- + +def _make_nonce(wallet: str) -> str: + """Generate a plausible nonce.""" + data = f"{wallet}:{int(time.time())}:{random.randint(0, 1<<32)}".encode() + return hashlib.sha256(data).hexdigest() + + +def baseline_payload(wallet: str = "nox-ventures") -> dict: + """Generate a structurally valid attestation payload.""" + return { + "miner": wallet, + "miner_id": hashlib.sha256(wallet.encode()).hexdigest()[:16], + "nonce": _make_nonce(wallet), + "device": { + "model": "AMD Ryzen 5 5600X", + "arch": "modern", + "family": "x86_64", + "cpu_serial": hashlib.md5(wallet.encode()).hexdigest(), + "device_id": "550e8400-e29b-41d4-a716-446655440000", + }, + "signals": { + "macs": ["aa:bb:cc:dd:ee:ff"], + "hostname": "test-machine", + }, + "fingerprint": { + "all_passed": True, + "checks": { + "clock_drift": {"passed": True, "data": {"cv": 0.092, "samples": 1000}}, + "cache_timing": {"passed": True, "data": {"profile": [1.2, 3.4, 5.6]}}, + "simd_identity": {"passed": True, "data": {}}, + "thermal_drift": {"passed": True, "data": {}}, + "instruction_jitter": {"passed": True, "data": {}}, + "anti_emulation": {"passed": True, "data": {"vm_indicators": []}}, + }, + }, + } + + +# --------------------------------------------------------------------------- +# Mutation strategies +# --------------------------------------------------------------------------- + +def rand_str(length: int, charset: str = string.printable) -> str: + return "".join(random.choices(charset, k=length)) + + +def rand_unicode() -> str: + """Generate unicode edge cases: null bytes, RTL, emoji, surrogates.""" + edge_cases = [ + "\x00", # null byte + "\u202e" + "malicious", # RTL override + "💀" * random.randint(1, 100), # emoji + "A" * random.randint(100, 1_000_000), # long string + "\uffff", # non-character + "café", # unicode + "日本語", # CJK + "\r\n\r\n", # CRLF injection + "../../../etc/passwd", # path traversal + "'; DROP TABLE miners; --", # SQL injection attempt + "", # XSS + "%00%00%00", # URL-encoded nulls + ] + return random.choice(edge_cases) + + +def mutate_value(v: Any) -> Any: + """Randomly mutate a value to an unexpected type or value.""" + strategies = [ + lambda: None, + lambda: "", + lambda: 0, + lambda: -1, + lambda: 2**31 - 1, + lambda: 2**63, + lambda: -2**63, + lambda: 3.14, + lambda: float("inf"), + lambda: float("nan"), + lambda: True, + lambda: False, + lambda: [], + lambda: {}, + lambda: [1, 2, 3], + lambda: {"nested": {"deep": "value"}}, + lambda: rand_str(1), + lambda: rand_str(1024), + lambda: rand_str(65536), + lambda: rand_unicode(), + lambda: [rand_str(10) for _ in range(100)], + lambda: "\x00" * 1000, + ] + return random.choice(strategies)() + + +def mutate_missing_field(payload: dict, key_path: List[str]) -> dict: + """Remove a field from the payload.""" + p = deepcopy(payload) + obj = p + for k in key_path[:-1]: + obj = obj.get(k, {}) + obj.pop(key_path[-1], None) + return p + + +def mutate_wrong_type(payload: dict, key_path: List[str]) -> dict: + """Replace a field with a wrong type.""" + p = deepcopy(payload) + obj = p + for k in key_path[:-1]: + if k not in obj: + obj[k] = {} + obj = obj[k] + obj[key_path[-1]] = mutate_value(obj.get(key_path[-1])) + return p + + +def mutate_add_unknown_field(payload: dict) -> dict: + """Add unexpected fields at various levels.""" + p = deepcopy(payload) + injection_key = rand_str(random.randint(1, 50)) + injection_val = mutate_value(None) + target = random.choice([p, p.get("device", {}), p.get("signals", {}), p.get("fingerprint", {})]) + target[injection_key] = injection_val + return p + + +def mutate_nested_bomb(payload: dict) -> dict: + """Create deeply nested structures (JSON bomb).""" + p = deepcopy(payload) + deep = {} + current = deep + for _ in range(random.randint(100, 500)): + current["x"] = {} + current = current["x"] + p["device"]["model"] = deep + return p + + +def mutate_array_overflow(payload: dict) -> dict: + """Make arrays very large.""" + p = deepcopy(payload) + p["signals"]["macs"] = [f"aa:bb:cc:dd:ee:{i:02x}" for i in range(random.randint(1000, 10000))] + return p + + +def mutate_float_checks(payload: dict) -> dict: + """Use edge-case float values in fingerprint data.""" + p = deepcopy(payload) + edge_floats = [float("inf"), float("-inf"), float("nan"), 1e308, -1e308, 1e-308, 0.0, -0.0] + p["fingerprint"]["checks"]["clock_drift"]["data"]["cv"] = random.choice(edge_floats) + p["fingerprint"]["checks"]["cache_timing"]["data"]["profile"] = [random.choice(edge_floats)] * 100 + return p + + +# Key paths for targeted mutations +KEY_PATHS = [ + ["miner"], + ["miner_id"], + ["nonce"], + ["device"], + ["device", "model"], + ["device", "arch"], + ["device", "family"], + ["device", "cpu_serial"], + ["device", "device_id"], + ["signals"], + ["signals", "macs"], + ["signals", "hostname"], + ["fingerprint"], + ["fingerprint", "all_passed"], + ["fingerprint", "checks"], + ["fingerprint", "checks", "clock_drift"], +] + +MUTATORS = [ + ("missing_field", lambda p: mutate_missing_field(p, random.choice(KEY_PATHS))), + ("wrong_type", lambda p: mutate_wrong_type(p, random.choice(KEY_PATHS))), + ("unknown_field", mutate_add_unknown_field), + ("nested_bomb", mutate_nested_bomb), + ("array_overflow", mutate_array_overflow), + ("float_edge", mutate_float_checks), + ("unicode_miner", lambda p: {**p, "miner": rand_unicode()}), + ("huge_miner", lambda p: {**p, "miner": "x" * random.randint(10_000, 1_000_000)}), + ("null_miner", lambda p: {**p, "miner": None}), + ("empty_payload", lambda _: {}), + ("not_json", None), # handled specially +] + + +# --------------------------------------------------------------------------- +# HTTP + result collection +# --------------------------------------------------------------------------- + +@dataclass +class FuzzResult: + iteration: int + mutator: str + payload: Any + status_code: Optional[int] + response_body: str + elapsed_ms: float + is_crash: bool + crash_detail: str = "" + + +def send_payload(payload: Any, is_raw: bool = False) -> Tuple[Optional[int], str, float]: + """Send a payload to the attestation endpoint.""" + ctx = ssl.create_default_context() + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_NONE + + if is_raw: + body = rand_str(random.randint(1, 10000)).encode() + content_type = random.choice(["text/plain", "application/xml", "multipart/form-data", ""]) + else: + try: + body = json.dumps(payload).encode() + except (TypeError, ValueError, RecursionError): + body = b"{}" + content_type = "application/json" + + req = urllib.request.Request( + ATTEST_ENDPOINT, + data=body, + method="POST", + headers={ + "Content-Type": content_type, + "User-Agent": "rustchain-fuzz/1.0", + } + ) + + start = time.monotonic() + try: + with urllib.request.urlopen(req, timeout=TIMEOUT, context=ctx) as r: + elapsed = (time.monotonic() - start) * 1000 + return r.status, r.read().decode("utf-8", errors="replace")[:2000], elapsed + except urllib.error.HTTPError as e: + elapsed = (time.monotonic() - start) * 1000 + return e.code, e.read().decode("utf-8", errors="replace")[:2000], elapsed + except urllib.error.URLError as e: + elapsed = (time.monotonic() - start) * 1000 + return None, str(e), elapsed + except Exception as e: + elapsed = (time.monotonic() - start) * 1000 + return None, f"EXCEPTION: {type(e).__name__}: {e}", elapsed + + +def classify_crash(status_code: Optional[int], response: str, elapsed_ms: float) -> Tuple[bool, str]: + """Determine if a response indicates a crash or vulnerability.""" + # 5xx = server error (potential crash) + if status_code and status_code >= 500: + return True, f"HTTP {status_code} server error" + + # Timeout = potential DoS + if elapsed_ms > (TIMEOUT * 1000 * 0.9): + return True, f"Timeout ({elapsed_ms:.0f}ms)" + + # Exception traceback in response + if any(kw in response for kw in ["Traceback", "Exception", "Error at", "Internal Server Error"]): + return True, "Traceback/exception in response body" + + # Connection error (unexpected — server should be up) + if status_code is None and "Connection refused" in response: + return True, "Connection refused (server crash?)" + + return False, "" + + +# --------------------------------------------------------------------------- +# Main fuzzing loop +# --------------------------------------------------------------------------- + +def run_fuzz( + count: int = 1000, + save_corpus: bool = False, + ci_mode: bool = False, + verbose: bool = False, +) -> List[FuzzResult]: + crashes: List[FuzzResult] = [] + results: List[FuzzResult] = [] + + if save_corpus: + CORPUS_DIR.mkdir(exist_ok=True) + + print(f"🔥 RustChain Attestation Fuzz Harness") + print(f" Target: {ATTEST_ENDPOINT}") + print(f" Iterations: {count}") + print(f" Save corpus: {save_corpus}") + print() + + for i in range(count): + base = baseline_payload(random.choice(KNOWN_WALLETS)) + + # Pick mutator + mutator_name, mutator_fn = random.choice(MUTATORS) + + if mutator_name == "not_json": + payload = None # Will send raw garbage + status, response, elapsed = send_payload(None, is_raw=True) + else: + try: + payload = mutator_fn(base) + except Exception: + payload = base + status, response, elapsed = send_payload(payload) + + is_crash, crash_detail = classify_crash(status, response, elapsed) + + result = FuzzResult( + iteration=i + 1, + mutator=mutator_name, + payload=payload, + status_code=status, + response_body=response[:500], + elapsed_ms=elapsed, + is_crash=is_crash, + crash_detail=crash_detail, + ) + results.append(result) + + if is_crash: + crashes.append(result) + print(f" 💥 [{i+1:5d}] {mutator_name:<20} → CRASH: {crash_detail} ({elapsed:.0f}ms)") + elif verbose or (i + 1) % 100 == 0: + status_str = str(status) if status else "ERR" + print(f" ✓ [{i+1:5d}] {mutator_name:<20} → HTTP {status_str} ({elapsed:.0f}ms)") + + if save_corpus: + corpus_file = CORPUS_DIR / f"iter_{i+1:06d}_{mutator_name}.json" + try: + corpus_file.write_text(json.dumps({ + "mutator": mutator_name, + "payload": payload, + "status": status, + "elapsed_ms": elapsed, + "is_crash": is_crash, + }, default=str)) + except Exception: + pass + + # Small delay to avoid hammering + time.sleep(0.01) + + # Summary + print() + print("=" * 60) + print(f" Fuzz Summary") + print("=" * 60) + print(f" Total iterations: {count}") + print(f" Crashes found: {len(crashes)}") + print(f" Crash rate: {len(crashes)/count*100:.1f}%") + + status_counts = {} + for r in results: + k = str(r.status_code) if r.status_code else "network_err" + status_counts[k] = status_counts.get(k, 0) + 1 + print(f" Response codes: {dict(sorted(status_counts.items()))}") + + if crashes: + print() + print(" 💥 Crashes:") + for c in crashes[:10]: + print(f" [{c.iteration}] {c.mutator}: {c.crash_detail}") + + # Save crash report + crash_data = [ + { + "iteration": c.iteration, + "mutator": c.mutator, + "status_code": c.status_code, + "crash_detail": c.crash_detail, + "elapsed_ms": c.elapsed_ms, + "payload_preview": str(c.payload)[:500], + "response_preview": c.response_body[:500], + } + for c in crashes + ] + CRASH_REPORT.write_text(json.dumps(crash_data, indent=2)) + print(f"\n Crash report saved to: {CRASH_REPORT}") + + print("=" * 60) + return crashes + + +def show_report(): + if not CRASH_REPORT.exists(): + print("No crash report found. Run the fuzzer first.") + return + crashes = json.loads(CRASH_REPORT.read_text()) + print(f"Crash Report — {len(crashes)} crashes found") + print() + for c in crashes: + print(f" [{c['iteration']}] {c['mutator']}: {c['crash_detail']}") + print(f" Status: {c['status_code']} | Elapsed: {c['elapsed_ms']:.0f}ms") + print(f" Payload: {c['payload_preview'][:100]}") + print() + + +# --------------------------------------------------------------------------- +# CLI +# --------------------------------------------------------------------------- + +def main(): + parser = argparse.ArgumentParser(description="RustChain Attestation Fuzz Harness") + parser.add_argument("--count", type=int, default=1000, help="Number of fuzz iterations") + parser.add_argument("--ci", action="store_true", help="Exit non-zero if any crash found") + parser.add_argument("--save-corpus", action="store_true", help="Save all generated payloads") + parser.add_argument("--verbose", action="store_true", help="Print every result") + parser.add_argument("--report", action="store_true", help="Show saved crash report") + parser.add_argument("--url", default=None, help="Override target URL") + args = parser.parse_args() + + if args.url: + global ATTEST_ENDPOINT + ATTEST_ENDPOINT = f"{args.url}/attest/submit" + + if args.report: + show_report() + return + + crashes = run_fuzz( + count=args.count, + save_corpus=args.save_corpus, + ci_mode=args.ci, + verbose=args.verbose, + ) + + if args.ci and crashes: + sys.exit(1) + + +if __name__ == "__main__": + main()